code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Ch30.TryExcept where
import Control.Exception
onlyReportError :: Show e => IO (Either e a) -> IO ()
onlyReportError action = do
res <- action
case res of
Left e -> print e
Right _ -> return ()
willFail :: Integer -> IO ()
willFail denom =
onlyReportError $ willIFail denom
willIFail :: Integer -> IO (Either ArithException ())
willIFail denom =
try $ print $ div 5 denom
|
andrewMacmurray/haskell-book-solutions
|
src/ch30/TryExcept.hs
|
mit
| 401
| 0
| 11
| 91
| 163
| 78
| 85
| 14
| 2
|
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module JsonLogViewer.Filtration where
import Control.Monad (mzero)
import Data.Aeson ((.:), (.=))
import qualified Data.Aeson as Aeson
import Data.Aeson.Path (JSONPath (..), followPath)
import qualified Data.Aeson.Path.Parser as Parser
import Data.Foldable (toList)
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import qualified Data.Vector as V
newtype IsActive = IsActive {unIsActive :: Bool } deriving Show
data JSONPredicate
= Equals Aeson.Value
| MatchesRegex T.Text
| HasSubstring T.Text
| HasKey T.Text
deriving Show
data LogFilter = LogFilter
{ jsonPath :: JSONPath
, jsonPredicate :: JSONPredicate
, filterName :: T.Text
, filterIsActive :: IsActive} deriving Show
lArray :: [Aeson.Value] -> Aeson.Value
lArray = Aeson.Array . V.fromList
instance Aeson.ToJSON JSONPredicate where
toJSON (Equals jsonVal) = lArray [Aeson.String "Equals", jsonVal]
toJSON (MatchesRegex text) = lArray [Aeson.String "MatchesRegex", Aeson.String text]
toJSON (HasSubstring text) = lArray [Aeson.String "HasSubstring", Aeson.String text]
toJSON (HasKey text) = lArray [Aeson.String "HasKey", Aeson.String text]
instance Aeson.FromJSON JSONPredicate where
parseJSON (Aeson.Array v) = case toList v of
[Aeson.String "Equals", val] -> pure $ Equals val
[Aeson.String "MatchesRegex", Aeson.String text] -> pure $ MatchesRegex text
[Aeson.String "HasSubstring", Aeson.String text] -> pure $ HasSubstring text
[Aeson.String "HasKey", Aeson.String text] -> pure $ HasKey text
_ -> mzero
parseJSON _ = mzero
instance Aeson.ToJSON LogFilter where
toJSON (LogFilter {..}) = Aeson.object [
"name" .= filterName
, "is_active" .= unIsActive filterIsActive
, "path" .= Parser.toString jsonPath
, "predicate" .= Aeson.toJSON jsonPredicate]
instance Aeson.FromJSON LogFilter where
parseJSON (Aeson.Object o) = do
-- I'd *like* to do this applicatively, but I'm not actually sure it's
-- possible at all, given the Either handling below.
name <- o .: "name"
predicate <- o .: "predicate"
pathText <- o .: "path"
isActive <- o .: "is_active"
let parsed = Parser.getPath pathText
case parsed of
Right path -> return LogFilter {filterName=name,
filterIsActive=IsActive isActive,
jsonPredicate=predicate, jsonPath=path}
Left e -> fail ("Error when parsing jsonPath: " ++ show e)
parseJSON _ = mzero
matchPredicate :: JSONPredicate -> Aeson.Value -> Bool
matchPredicate (Equals expected) got = expected == got
matchPredicate (HasSubstring expected) (Aeson.String got) = expected `T.isInfixOf` got
matchPredicate (MatchesRegex _) _ = error "Implement MatchesRegex"
matchPredicate (HasKey expected) (Aeson.Object hm) = HM.member expected hm
matchPredicate _ _ = False
matchFilter :: LogFilter -> Aeson.Value -> Bool
matchFilter (LogFilter {jsonPath, jsonPredicate}) aesonValue
| Just gotValue <- followPath jsonPath aesonValue
= jsonPredicate `matchPredicate` gotValue
matchFilter _ _ = False
|
radix/json-log-viewer
|
src/JsonLogViewer/Filtration.hs
|
mit
| 3,333
| 0
| 14
| 741
| 955
| 509
| 446
| 70
| 1
|
-- Algorithms/Dynamic Programming/The Maximum Subarray
module Main where
import qualified HackerRank.Algorithms.MaximumSubarray as M
main :: IO ()
main = M.main
|
4e6/sandbox
|
haskell/hackerrank/MaximumSubarray.hs
|
mit
| 164
| 0
| 6
| 23
| 31
| 20
| 11
| 4
| 1
|
module Graphics.D3D11Binding.Shader.D3D11PixelShader where
import Graphics.D3D11Binding.Interface.Unknown
data ID3D11PixelShader = ID3D11PixelShader
instance UnknownInterface ID3D11PixelShader
|
jwvg0425/d3d11binding
|
src/Graphics/D3D11Binding/Shader/D3D11PixelShader.hs
|
mit
| 195
| 0
| 5
| 14
| 29
| 18
| 11
| 4
| 0
|
-- Copyright (c) 2016-present, SoundCloud Ltd.
-- All rights reserved.
--
-- This source code is distributed under the terms of a MIT license,
-- found in the LICENSE file.
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module Kubernetes.Model.V1.Probe
( Probe (..)
, exec
, httpGet
, tcpSocket
, initialDelaySeconds
, timeoutSeconds
, periodSeconds
, successThreshold
, failureThreshold
, mkProbe
) where
import Control.Lens.TH (makeLenses)
import Data.Aeson.TH (defaultOptions,
deriveJSON,
fieldLabelModifier)
import GHC.Generics (Generic)
import Kubernetes.Model.V1.ExecAction (ExecAction)
import Kubernetes.Model.V1.HTTPGetAction (HTTPGetAction)
import Kubernetes.Model.V1.TCPSocketAction (TCPSocketAction)
import Prelude hiding (drop, error, max,
min)
import qualified Prelude as P
import Test.QuickCheck (Arbitrary, arbitrary)
import Test.QuickCheck.Instances ()
-- | Probe describes a health check to be performed against a container to determine whether it is alive or ready to receive traffic.
data Probe = Probe
{ _exec :: !(Maybe ExecAction)
, _httpGet :: !(Maybe HTTPGetAction)
, _tcpSocket :: !(Maybe TCPSocketAction)
, _initialDelaySeconds :: !(Maybe Integer)
, _timeoutSeconds :: !(Maybe Integer)
, _periodSeconds :: !(Maybe Integer)
, _successThreshold :: !(Maybe Integer)
, _failureThreshold :: !(Maybe Integer)
} deriving (Show, Eq, Generic)
makeLenses ''Probe
$(deriveJSON defaultOptions{fieldLabelModifier = (\n -> if n == "_type_" then "type" else P.drop 1 n)} ''Probe)
instance Arbitrary Probe where
arbitrary = Probe <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
-- | Use this method to build a Probe
mkProbe :: Probe
mkProbe = Probe Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
|
soundcloud/haskell-kubernetes
|
lib/Kubernetes/Model/V1/Probe.hs
|
mit
| 2,428
| 0
| 14
| 823
| 431
| 254
| 177
| 59
| 1
|
{-# LANGUAGE ForeignFunctionInterface #-}
module Paths_ls (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import Foreign
import Foreign.C
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version [0,1,0,0] []
prefix, bindirrel :: FilePath
prefix = "C:\\Users\\Nadya\\AppData\\Roaming\\cabal"
bindirrel = "bin"
getBinDir :: IO FilePath
getBinDir = getPrefixDirRel bindirrel
getLibDir :: IO FilePath
getLibDir = getPrefixDirRel "x86_64-windows-ghc-7.10.2\\ls-0.1.0.0-KDYgcpdd1bKJLCwoo4leSO"
getDataDir :: IO FilePath
getDataDir = catchIO (getEnv "ls_datadir") (\_ -> getPrefixDirRel "x86_64-windows-ghc-7.10.2\\ls-0.1.0.0")
getLibexecDir :: IO FilePath
getLibexecDir = getPrefixDirRel "ls-0.1.0.0-KDYgcpdd1bKJLCwoo4leSO"
getSysconfDir :: IO FilePath
getSysconfDir = getPrefixDirRel "etc"
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir `joinFileName` name)
getPrefixDirRel :: FilePath -> IO FilePath
getPrefixDirRel dirRel = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.
where
try_size size = allocaArray (fromIntegral size) $ \buf -> do
ret <- c_GetModuleFileName nullPtr buf size
case ret of
0 -> return (prefix `joinFileName` dirRel)
_ | ret < size -> do
exePath <- peekCWString buf
let (bindir,_) = splitFileName exePath
return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)
| otherwise -> try_size (size * 2)
foreign import stdcall unsafe "windows.h GetModuleFileNameW"
c_GetModuleFileName :: Ptr () -> CWString -> Int32 -> IO Int32
minusFileName :: FilePath -> String -> FilePath
minusFileName dir "" = dir
minusFileName dir "." = dir
minusFileName dir suffix =
minusFileName (fst (splitFileName dir)) (fst (splitFileName suffix))
joinFileName :: String -> String -> FilePath
joinFileName "" fname = fname
joinFileName "." fname = fname
joinFileName dir "" = dir
joinFileName dir fname
| isPathSeparator (last dir) = dir++fname
| otherwise = dir++pathSeparator:fname
splitFileName :: FilePath -> (String, String)
splitFileName p = (reverse (path2++drive), reverse fname)
where
(path,drive) = case p of
(c:':':p') -> (reverse p',[':',c])
_ -> (reverse p ,"")
(fname,path1) = break isPathSeparator path
path2 = case path1 of
[] -> "."
[_] -> path1 -- don't remove the trailing slash if
-- there is only one character
(c:path') | isPathSeparator c -> path'
_ -> path1
pathSeparator :: Char
pathSeparator = '\\'
isPathSeparator :: Char -> Bool
isPathSeparator c = c == '/' || c == '\\'
|
nadyac/haskell-gol
|
dist/build/autogen/Paths_ls.hs
|
mit
| 3,072
| 0
| 21
| 736
| 889
| 467
| 422
| 72
| 5
|
import Data.List
import Network
import System.IO
import System.Time
import System.Exit
import Control.Arrow
import Control.Monad.Reader
import Control.Exception
import Text.Printf
server = "irc.freenode.org"
port = 6667
chan = "#qutcode"
nick = "qutcodebot"
-- 'Net' monad, a wrapper over IO, carrying the bot's immutable state.
type Net = ReaderT Bot IO
data Bot = Bot { socket :: Handle, starttime :: ClockTime }
-- Set up actions to run on start and end, and run the main loop
main:: IO ()
main = bracket connect disconnect loop
where
disconnect = hClose . socket
loop st = runReaderT run st
-- Connect to the server and return the initial bot state
connect :: IO Bot
connect = notify $ do
t <- getClockTime
h <- connectTo server (PortNumber (fromIntegral port))
hSetBuffering h NoBuffering
return (Bot h t)
where
notify a = bracket_
(printf "Connecting to %s ... " server >> hFlush stdout)
(putStrLn "done.")
a
-- We're in the Net monad now, so we've connected successfully
-- Join a channel, and start processing commands
run:: Net ()
run = do
write "NICK" nick
write "USER" (nick ++ " 0 * :qutcode bot")
write "JOIN" chan
asks socket >>= listen
-- Process each line from the server
listen :: Handle -> Net ()
listen h = forever $ do
s <- init `fmap` io (hGetLine h)
io (putStrLn s)
if ping s then pong s else eval (clean s)
where
forever a = a >> forever a
clean = drop 1 . dropWhile (/= ':') . drop 1
ping x = "PING :" `isPrefixOf` x
pong x = write "PONG" (':' : drop 6 x)
-- Dispatch a command
eval :: String -> Net ()
eval "!quit" = write "QUIT" ":Exiting" >> io (exitWith ExitSuccess)
eval x | "!id" `isPrefixOf` x = privmsg (drop 4 x)
eval "!uptime" = uptime >>= privmsg
eval _ = return () -- ignore everything else
-- Send a privmsg to the current chan + server
privmsg :: String -> Net ()
privmsg s = write "PRIVMSG" (chan ++ " :" ++ s)
-- Send a message out to the server we're currently connected to
write :: String -> String -> Net ()
write s t = do
h <- asks socket
io $ hPrintf h "%s %s\r\n" s t
io $ printf "> %s %s\n" s t
-- Calculate and pretty print the uptime
uptime :: Net String
uptime = do
now <- io getClockTime
zero <- asks starttime
return . pretty $ diffClockTimes now zero
-- Pretty print the date in '1d 9h 9m 17s' format
pretty :: TimeDiff -> String
pretty td = join . intersperse " " . filter (not . null) . map f $
[(years ,"y") ,(months `mod` 12,"m")
,(days `mod` 28,"d") ,(hours `mod` 24,"h")
,(mins `mod` 60,"m") ,(secs `mod` 60,"s")]
where
secs = abs $ tdSec td ; mins = secs `div` 60
hours = mins `div` 60 ; days = hours `div` 24
months = days `div` 28 ; years = months `div` 12
f (i,s) | i == 0 = []
| otherwise = show i ++ s
-- Convenience
io :: IO a -> Net a
io = liftIO
|
qutcode/qutcodebot
|
bot.hs
|
gpl-2.0
| 3,003
| 0
| 13
| 818
| 1,006
| 524
| 482
| 73
| 2
|
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : ./Common/Id.hs
Description : positions, simple and mixfix identifiers
Copyright : (c) Klaus Luettich and Christian Maeder and Uni Bremen 2002-2003
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
This module supplies positions, simple and mixfix identifiers.
A simple identifier is a lexical token given by a string and a start position.
- A 'place' is a special token within mixfix identifiers.
- A mixfix identifier may have a compound list.
This compound list follows the last non-place token!
- Identifiers fixed for all logics
-}
module Common.Id where
import Data.Char
import Data.Data
import Data.List (isPrefixOf)
import Data.Ratio
import qualified Data.Set as Set
-- do use in data types that derive d directly
data Pos = SourcePos
{ sourceName :: String
, sourceLine :: !Int
, sourceColumn :: !Int
} deriving (Eq, Ord, Typeable, Data)
instance Show Pos where
showsPrec _ = showPos
-- | position lists with trivial equality
newtype Range = Range { rangeToList :: [Pos] }
deriving (Typeable, Data)
-- let InlineAxioms recognize positions
instance Show Range where
show _ = "nullRange"
-- ignore all ranges in comparisons
instance Eq Range where
_ == _ = True
-- Ord must be consistent with Eq
instance Ord Range where
compare _ _ = EQ
nullRange :: Range
nullRange = Range []
isNullRange :: Range -> Bool
isNullRange = null . rangeToList
appRange :: Range -> Range -> Range
appRange (Range l1) (Range l2) = Range $ l1 ++ l2
concatMapRange :: (a -> Range) -> [a] -> Range
concatMapRange f = Range . concatMap (rangeToList . f)
-- | construct a new position
newPos :: String -> Int -> Int -> Pos
newPos = SourcePos
-- | increment the column counter
incSourceColumn :: Pos -> Int -> Pos
incSourceColumn (SourcePos s l c) = SourcePos s l . (c +)
-- | show a position
showPos :: Pos -> ShowS
showPos p = let name = sourceName p
line = sourceLine p
column = sourceColumn p
in noShow (null name) (showString name . showChar ':') .
noShow (line == 0 && column == 0)
(shows line . showChar '.' . shows column)
-- * Tokens as 'String's with positions that are ignored for 'Eq' and 'Ord'
-- | tokens as supplied by the scanner
data Token = Token { tokStr :: String
, tokPos :: Range
} deriving (Eq, Ord, Typeable, Data)
instance Show Token where
show = tokStr
instance Read Token where
readsPrec i = map (\ (a, r) -> (mkSimpleId a, r)) . readsPrec i
-- | simple ids are just tokens
type SIMPLE_ID = Token
-- | construct a token without position from a string
mkSimpleId :: String -> Token
mkSimpleId s = Token s nullRange
-- | null token
nullTok :: Token
nullTok = mkSimpleId ""
-- | create a numbered string
mkNumStr :: String -> Int -> String
mkNumStr str n = str ++ show n
-- | create a numbered simple identifier (for variables)
mkNumVar :: String -> Int -> Token
mkNumVar str = mkSimpleId . mkNumStr str
-- | test if the first character indicates a legal simple CASL identifier
isSimpleToken :: Token -> Bool
isSimpleToken t = case tokStr t of
c : r -> isAlpha c || isDigit c && null r || c == '\''
"" -> False
-- | collect positions
catPosAux :: [Token] -> [Pos]
catPosAux = concatMap (rangeToList . getRange)
-- | collect positions as range
catRange :: [Token] -> Range
catRange = Range . catPosAux
-- | shortcut to get positions of surrounding and interspersed tokens
toRange :: Token -> [Token] -> Token -> Range
toRange o l c = catRange $ o : l ++ [c]
-- * placeholder stuff
-- | the special 'place'
place :: String
place = "__"
-- | is a 'place' token
isPlace :: Token -> Bool
isPlace (Token t _) = t == place
placeTok :: Token
placeTok = mkSimpleId place
-- * equality symbols
-- | also a definition indicator
equalS :: String
equalS = "="
-- | mind spacing i.e. in @e =e= e@
exEqual :: String
exEqual = "=e="
-- | token for type annotations
typeTok :: Token
typeTok = mkSimpleId ":"
-- * mixfix identifiers with compound lists and its range
-- | mixfix and compound identifiers
data Id = Id
{ getTokens :: [Token]
, getComps :: [Id]
, rangeOfId :: Range }
deriving (Eq, Ord, Typeable, Data)
-- pos of square brackets and commas of a compound list
instance Show Id where
showsPrec _ = showId
-- | construct an 'Id' from a token list
mkId :: [Token] -> Id
mkId toks = Id toks [] nullRange
mkInfix :: String -> Id
mkInfix s = mkId [placeTok, mkSimpleId s, placeTok]
-- | a prefix for generated names
genNamePrefix :: String
genNamePrefix = "gn_"
-- | create a generated simple identifier
genToken :: String -> Token
genToken = mkSimpleId . (genNamePrefix ++)
-- | create a generated, numbered variable
genNumVar :: String -> Int -> Token
genNumVar str = genToken . mkNumStr str
-- | create a generated identifier
genName :: String -> Id
genName str = mkId [genToken str]
-- | create a generated identifier from a given one excluding characters
mkGenName :: Id -> Id
mkGenName i@(Id ts cs r) = case ts of
t : s -> let st = tokStr t in case st of
c : _ | isAlphaNum c -> Id (genToken st : s) cs r
| isPlace t -> Id (mkSimpleId "gn" : ts) cs r
| c == '\'' -> i
_ -> Id (mkSimpleId "gn_n" : ts) cs r
_ -> i
-- | tests whether a Token is already a generated one
isGeneratedToken :: Token -> Bool
isGeneratedToken = isPrefixOf genNamePrefix . tokStr
{- | append a number to the first token of a (possible compound) Id,
or generate a new identifier for /invisible/ ones -}
appendString :: Id -> String -> Id
appendString (Id tokList idList range) s = let
isAlphaToken tok = case tokStr tok of
c : _ -> isAlpha c
"" -> False
genTok tList tList1 str = case tList of
[] -> [mkSimpleId $ genNamePrefix ++ "n" ++ str]
-- for invisible identifiers
tok : tokens ->
if isPlace tok || not (isAlphaToken tok)
then genTok tokens (tok : tList1) str
else reverse tList1 ++
[tok {tokStr = -- avoid gn_gn_
(if isGeneratedToken tok then "" else genNamePrefix)
++ tokStr tok ++ str}]
{- only underline words may be
prefixed with genNamePrefix or extended with a number -}
++ tokens
in Id (genTok tokList [] s) idList range
-- | the name of injections
injToken :: Token
injToken = genToken "inj"
injName :: Id
injName = mkId [injToken]
mkUniqueName :: Token -> [Id] -> Id
mkUniqueName t is =
Id [foldl (\ (Token s1 r1) (Token s2 r2) ->
Token (s1 ++ "_" ++ s2) $ appRange r1 r2) t
$ concatMap getTokens is]
(let css = filter (not . null) $ map getComps is
in case css of
[] -> []
h : r -> if all (== h) r then h else concat css)
(foldl appRange nullRange $ map rangeOfId is)
-- | the name of projections
projToken :: Token
projToken = genToken "proj"
projName :: Id
projName = mkId [projToken]
mkUniqueProjName :: Id -> Id -> Id
mkUniqueProjName from to = mkUniqueName projToken [from, to]
mkUniqueInjName :: Id -> Id -> Id
mkUniqueInjName from to = mkUniqueName injToken [from, to]
isInjName :: Id -> Bool
isInjName = isPrefixOf (show injName) . show
-- | the postfix type identifier
typeId :: Id
typeId = mkId [placeTok, typeTok]
-- | the invisible application rule with two places
applId :: Id
applId = mkId [placeTok, placeTok]
-- | the infix equality identifier
eqId :: Id
eqId = mkInfix equalS
exEq :: Id
exEq = mkInfix exEqual
-- ** show stuff
-- | shortcut to suppress output for input condition
noShow :: Bool -> ShowS -> ShowS
noShow b s = if b then id else s
-- | intersperse seperators
showSepList :: ShowS -> (a -> ShowS) -> [a] -> ShowS
showSepList s f l = case l of
[] -> id
[x] -> f x
x : r -> f x . s . showSepList s f r
-- | shows a compound list
showIds :: [Id] -> ShowS
showIds is = noShow (null is) $ showString "["
. showSepList (showString ",") showId is
. showString "]"
-- | shows an 'Id', puts final places behind a compound list
showId :: Id -> ShowS
showId (Id ts is _) =
let (toks, places) = splitMixToken ts
showToks = showSepList id $ showString . tokStr
in showToks toks . showIds is . showToks places
-- ** splitting identifiers
-- | splits off the front and final places
splitMixToken :: [Token] -> ([Token], [Token])
splitMixToken ts = case ts of
[] -> ([], [])
h : l ->
let (toks, pls) = splitMixToken l
in if isPlace h && null toks
then (toks, h : pls)
else (h : toks, pls)
{- | return open and closing list bracket and a compound list
from a bracket 'Id' (parsed by 'Common.AnnoParser.caslListBrackets') -}
getListBrackets :: Id -> ([Token], [Token], [Id])
getListBrackets (Id b cs _) =
let (b1, rest) = break isPlace b
b2 = if null rest then []
else filter (not . isPlace) rest
in (b1, b2, cs)
-- ** reconstructing token lists
{- | reconstruct a list with surrounding strings and interspersed
commas with proper position information that should be preserved
by the input function -}
expandPos :: (Token -> a) -> (String, String) -> [a] -> Range -> [a]
{- expandPos f ("{", "}") [a,b] [(1,1), (1,3), 1,5)] =
[ t"{" , a , t"," , b , t"}" ] where t = f . Token (and proper positions) -}
expandPos f (o, c) ts (Range ps) =
if null ts then if null ps then map (f . mkSimpleId) [o, c]
else map f (zipWith Token [o, c] [Range [head ps] , Range [last ps]])
else let
n = length ts + 1
diff = n - length ps
commas j = if j == 2 then [c] else "," : commas (j - 1)
ocs = o : commas n
hsep : tseps = map f
$ if diff == 0
then zipWith (\ s p -> Token s (Range [p])) ocs ps
else map mkSimpleId ocs
in hsep : concat (zipWith (\ t s -> [t, s]) ts tseps)
{- | reconstruct the token list of an 'Id'
including square brackets and commas of (nested) compound lists. -}
getPlainTokenList :: Id -> [Token]
getPlainTokenList = getTokenList place
{- | reconstruct the token list of an 'Id'.
Replace top-level places with the input String -}
getTokenList :: String -> Id -> [Token]
getTokenList placeStr (Id ts cs ps) =
let convert = map (\ t -> if isPlace t then t {tokStr = placeStr} else t)
{- reconstruct tokens of a compound list
although positions will be replaced (by scan) -}
getCompoundTokenList comps = concat .
expandPos (: []) ("[", "]") (map getPlainTokenList comps)
in if null cs then convert ts else
let (toks, pls) = splitMixToken ts in
convert toks ++ getCompoundTokenList cs ps ++ convert pls
-- ** conversion from 'SIMPLE_ID'
-- | a 'SIMPLE_ID' as 'Id'
simpleIdToId :: SIMPLE_ID -> Id
simpleIdToId sid = mkId [sid]
-- | a string as 'Id'
stringToId :: String -> Id
stringToId = simpleIdToId . mkSimpleId
-- | efficiently test for a singleton list
isSingle :: [a] -> Bool
isSingle l = case l of
[_] -> True
_ -> False
-- | test for a 'SIMPLE_ID'
isSimpleId :: Id -> Bool
isSimpleId (Id ts cs _) = null cs && case ts of
[t] -> isSimpleToken t
_ -> False
idToSimpleId :: Id -> Token
idToSimpleId i = case i of
Id [t] [] _ -> t
_ -> error $ "idToSimpleId: " ++ show i
-- ** fixity stuff
-- | number of 'place' in 'Id'
placeCount :: Id -> Int
placeCount (Id tops _ _) = length $ filter isPlace tops
-- | has a 'place'
isMixfix :: Id -> Bool
isMixfix (Id tops _ _) = any isPlace tops
-- | 'Id' starts with a 'place'
begPlace :: Id -> Bool
begPlace (Id toks _ _) = not (null toks) && isPlace (head toks)
-- | 'Id' ends with a 'place'
endPlace :: Id -> Bool
endPlace (Id toks _ _) = not (null toks) && isPlace (last toks)
-- | starts with a 'place'
isPostfix :: Id -> Bool
isPostfix (Id tops _ _) = not (null tops) && isPlace (head tops)
&& not (isPlace (last tops))
-- | starts and ends with a 'place'
isInfix :: Id -> Bool
isInfix (Id tops _ _) = not (null tops) && isPlace (head tops)
&& isPlace (last tops)
-- * position stuff
-- | compute a meaningful position from an 'Id' for diagnostics
posOfId :: Id -> Range
posOfId (Id ts _ (Range ps)) =
Range $ let l = filter (not . isPlace) ts
in catPosAux (if null l then ts
-- for invisible "__ __" (only places)
else l) ++ ps
-- | compute start and end position of a Token (or leave it empty)
tokenRange :: Token -> [Pos]
tokenRange (Token str (Range ps)) = case ps of
[p] -> mkTokPos str p
_ -> ps
mkTokPos :: String -> Pos -> [Pos]
mkTokPos str p = let l = length str in
if l > 1 then [p, incSourceColumn p $ length str - 1] else [p]
outerRange :: Range -> [Pos]
outerRange (Range qs) = case qs of
[] -> []
q : _ -> let p = last qs in if p == q then [q] else [q, p]
sortRange :: [Pos] -> [Pos] -> [Pos]
sortRange rs qs = case qs of
[] -> rs
r : _ -> let
ps = filter ((== sourceName r) . sourceName) rs
p = minimum $ ps ++ qs
q = maximum $ ps ++ qs
in if p == q then [p] else [p, q]
joinRanges :: [[Pos]] -> [Pos]
joinRanges = foldr sortRange []
{- | compute start and end position of a declared Id (or leave it empty).
Do not use for applied identifiers where place holders are replaced. -}
idRange :: Id -> [Pos]
idRange (Id ts _ r) =
let (fs, rs) = splitMixToken ts
in joinRanges $ map tokenRange fs ++ [outerRange r] ++ map tokenRange rs
-- -- helper class -------------------------------------------------------
{- | This class is derivable with DrIFT.
Its main purpose is to have a function that operates on
constructors with a 'Range' field. During parsing, mixfix
analysis and ATermConversion this function might be very useful.
-}
class GetRange a where
getRange :: a -> Range
getRange = const nullRange
rangeSpan :: a -> [Pos]
rangeSpan = sortRange [] . getPosList
getPosList :: GetRange a => a -> [Pos]
getPosList = rangeToList . getRange
getRangeSpan :: GetRange a => a -> Range
getRangeSpan = Range . rangeSpan
instance GetRange Token where
getRange = Range . tokenRange
rangeSpan = tokenRange
instance GetRange Id where
getRange = posOfId
rangeSpan = idRange
instance GetRange Range where
getRange = id
rangeSpan = outerRange
-- defaults ok
instance GetRange ()
instance GetRange Char
instance GetRange Bool
instance GetRange Int
instance GetRange Integer
instance GetRange (Ratio a) -- for Rational
instance GetRange a => GetRange (Maybe a) where
getRange = maybe nullRange getRange
rangeSpan = maybe [] rangeSpan
instance GetRange a => GetRange [a] where
getRange = concatMapRange getRange
rangeSpan = joinRanges . map rangeSpan
instance (GetRange a, GetRange b) => GetRange (a, b) where
getRange = getRange . fst
rangeSpan (a, b) = sortRange (rangeSpan a) $ rangeSpan b
instance GetRange a => GetRange (Set.Set a) where
getRange = getRange . Set.toList
rangeSpan = rangeSpan . Set.toList
|
gnn/Hets
|
Common/Id.hs
|
gpl-2.0
| 15,217
| 0
| 22
| 3,808
| 4,502
| 2,376
| 2,126
| 310
| 5
|
{-# LANGUAGE ScopedTypeVariables #-}
-- Copyright (c) 2012, Diego Souza
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
-- * Neither the name of the <ORGANIZATION> nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-- SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-- CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
module SSSFS.Filesystem.ReplicationStorage
( new
) where
import Control.Monad
import Control.Exception as C
import SSSFS.Except
import SSSFS.Filesystem.Types
import SSSFS.Storage as S
import SSSFS.Storage.Helpers
data ReplicationStorage a b = ReplicationStorage { master :: a
, slave :: b
}
new :: (StorageHashLike a, StorageHashLike b) => a -> b -> ReplicationStorage a b
new = ReplicationStorage
onSlave :: ReplicationStorage a b -> (b -> c) -> c
onSlave s f = f (slave s)
onMaster :: ReplicationStorage a b -> (a -> c) -> c
onMaster s f = f (master s)
transferBlocks :: (StorageHashLike a, StorageHashLike b, StorageEnumLike a) => ReplicationStorage a b -> INode -> IO ()
transferBlocks s inum = do { inThere <- onMaster s (flip enumKeys (init $ dFromINode inum 0))
; mapM_ (copyNoOverwrite (slave s) (master s)) (dblocks inThere)
}
where nblocks = (snd (blocks inum)) - 1
dblocks inThere = map (dFromINode inum) (filter ((`notElem` inThere) . ref . show) [0..nblocks])
instance (Storage a, Storage b) => Storage (ReplicationStorage a b) where
shutdown s = onSlave s shutdown >> onMaster s shutdown
instance (StorageHashLike a, StorageEnumLike a, StorageHashLike b) => StorageHashLike (ReplicationStorage a b) where
get s k = C.catch (onSlave s (flip get k)) handler
where handler e
| isNotFound e = do { v <- onMaster s (flip get k)
; onSlave s (\s1 -> put s1 k v)
; return v
}
| otherwise = throw e
head s k = do { r <- onSlave s (flip S.head k)
; if (r)
then return r
else onMaster s (flip S.head k)
}
del s k = onSlave s (flip del k) >> onMaster s (flip del k)
put s k v
| keyToINode k = do { minum <- fmap unitToINode (eulavM v)
; case minum
of Nothing
-> throw (DataCorruptionExcept "medic!")
Just inum
-> when (size inum > 0) (transferBlocks s inum)
; onSlave s (\s1 -> put s1 k v)
; onMaster s (\s1 -> put s1 k v)
}
| keyToDBlock k = onSlave s (\s1 -> put s1 k v)
| otherwise = onSlave s (\s1 -> put s1 k v) >> onMaster s (\s1 -> put s1 k v)
instance (StorageEnumLike a, StorageEnumLike b) => StorageEnumLike (ReplicationStorage a b) where
-- | TODO:figure a better way to implement this
enumKeys s k = onMaster s (flip enumKeys k)
|
dgvncsz0f/sssfs
|
src/SSSFS/Filesystem/ReplicationStorage.hs
|
gpl-3.0
| 4,414
| 0
| 15
| 1,331
| 1,016
| 528
| 488
| 49
| 1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Blog.Instances() where
import Database.BlobStorage (BlobId)
import qualified Data.Binary as B
import Data.Hashable
import qualified Data.HashMap.Strict as H
import qualified Data.HashSet as HS
import Data.SafeCopy
import qualified Data.Serialize as C
import qualified Data.Text as T
import Data.Word
import qualified Web.Routes as WR
instance (SafeCopy k, SafeCopy v, Eq k, Hashable k) => SafeCopy (H.HashMap k v) where
getCopy = contain $ fmap H.fromList safeGet
putCopy = contain . safePut . H.toList
instance (SafeCopy k, Eq k, Hashable k) => SafeCopy (HS.HashSet k) where
getCopy = contain $ fmap HS.fromList safeGet
putCopy = contain . safePut . HS.toList
instance SafeCopy BlobId where
getCopy = getBinaryCopy
putCopy = putBinaryCopy
getBinaryCopy :: (B.Binary a) => Contained (C.Get a)
getBinaryCopy =
contain $ do
bytes <- C.get
return $ B.decode bytes
putBinaryCopy :: (B.Binary a) => a -> Contained C.Put
putBinaryCopy = contain . C.put . B.encode
instance WR.PathInfo Word32 where
toPathSegments = WR.toPathSegments . (fromIntegral :: Word32 -> Int)
fromPathSegments = (fromIntegral :: Int -> Word32) `fmap` WR.fromPathSegments
|
aslatter/blog
|
Blog/Instances.hs
|
gpl-3.0
| 1,242
| 0
| 10
| 227
| 399
| 224
| 175
| 31
| 1
|
module RayMarch.Types where
import Control.Monad.Trans.State
import Control.Applicative hiding ((<*>))
import System.Random
type Float3 = (Float, Float, Float)
newtype Vector = Vector Float3
type Point = Vector
newtype Color = Color Float3
type Quaternion = (Vector, Float)
class Arith a where
(<+>) :: a -> a -> a
(<->) :: a -> a -> a
(<*>) :: a -> Float -> a
(</>) :: a -> Float -> a
inv :: a -> a
lerp :: Float -> a -> a -> a
x <*> y = x </> (recip y)
x </> y = x <*> (recip y)
inv v = v <*> (-1)
lerp r a b = a<*>(1-r)<+>b<*>r
class Arith a => Direction a where
len :: a -> Float
unit :: a
norm :: a -> a
norm v = if len v == 0 then unit else v </> len v
class Each a where
each :: (Float -> Float) -> a -> a
fold :: (Float -> Float -> Float) -> a -> Float
infixl 6 <+>
infixl 6 <->
infixl 7 <*>
infixl 7 </>
instance Arith Vector where
(Vector (a,b,c)) <+> (Vector (d,e,f)) = Vector (a+d,b+e,c+f)
(Vector (a,b,c)) <-> (Vector (d,e,f)) = Vector (a-d,b-e,c-f)
(Vector (a,b,c)) <*> r = Vector (a*r,b*r,c*r)
instance Direction Vector where
len (Vector (a,b,c)) = sqrt (a*a+b*b+c*c)
unit = Vector (1,0,0)
instance Each Vector where
each f (Vector (a,b,c)) = Vector (f a,f b,f c)
fold f (Vector (a,b,c)) = f (f a b) c
instance Random Vector where
randomR (Vector (a,b,c), Vector (d,e,f)) g = let
(x,xs) = randomR (a,d) g
(y,ys) = randomR (b,e) xs
(z,zs) = randomR (c,f) ys
in (Vector (x,y,z),zs)
random g = randomR (Vector (0,0,0),Vector (1,1,1)) g
dot :: Vector -> Vector -> Float
dot (Vector (a,b,c)) (Vector (d,e,f)) = a*d+b*e+c*f
dotP :: Vector -> Vector -> Float
dotP a b = max 0 $ a`dot`b
cross :: Vector -> Vector -> Vector
cross (Vector (a,b,c)) (Vector (d,e,f)) = Vector (b*f-c*e,c*d-a*f,a*e-b*d)
crossF :: Vector -> Vector -> Float
crossF x y = len $ cross x y
zero :: Vector
zero = Vector (0,0,0)
instance Arith Color where
(Color (a,b,c)) <+> (Color (d,e,f)) = Color (a+d,b+e,c+f)
(Color (a,b,c)) <-> (Color (d,e,f)) = Color (a-d,b-e,c-f)
(Color (a,b,c)) <*> r = Color (a*r,b*r,c*r)
instance Each Color where
each f (Color (a,b,c)) = Color (f a,f b,f c)
fold f (Color (a,b,c)) = f (f a b) c
type Pixel = (Float,Float)
instance Arith (Float,Float) where
(a,b) <+> (c,d) = (a+c,b+d)
(a,b) <-> (c,d) = (a-c,b-d)
(a,b) <*> r = (a*r,b*r)
instance Direction (Float,Float) where
len (a,b) = sqrt $ a*a+b*b
unit = (1,0)
instance Each (Float,Float) where
each f (a,b) = (f a,f b)
fold f (a,b) = f a b
instance Random (Float,Float) where
randomR ((a,b),(c,d)) g = let
(x,xs) = randomR (a,c) g
(y,ys) = randomR (b,d) xs
in ((x,y),ys)
random g = let
(x,xs) = random g
(y,ys) = random xs
in ((x,y),ys)
data View = View {
position :: Point,
direction :: Quaternion,
lens :: Pixel -> Vector,
ratio :: Float
}
data Config = Config {
fileName :: FilePath,
view :: View,
width :: Float
}
data World s = World {
distancer :: Distance s,
advancer :: Maybe s -> Point -> Vector -> March s Color,
effector :: Point -> Config -> Pixel -> Color -> Color,
viewPoint :: Point,
backGround :: Color,
advanceCount :: Int,
advanceLimit :: Int,
reflectCount :: Int,
reflectLimit :: Int
}
type March s = State (World s)
type Distance s = Point -> (Float, Object s)
type Object s = Point -> Vector -> March s Color
type Field = (Float,Float) -> Float
getAdvanceLimit :: March s Int
getAdvanceLimit = advanceLimit <$> get
getViewPoint :: March s Point
getViewPoint = viewPoint <$> get
backGroundColor :: March s Color
backGroundColor = backGround <$> get
|
phi16/RayMarch
|
RayMarch/Types.hs
|
gpl-3.0
| 3,627
| 0
| 12
| 836
| 2,196
| 1,218
| 978
| 115
| 1
|
{-# LANGUAGE RankNTypes, MultiParamTypeClasses, TypeFamilies, FlexibleContexts, CPP #-}
-- |
--
-- Module : Parsek
-- Copyright : Koen Claessen 2003
-- License : GPL
--
-- Maintainer : JP Bernardy
-- Stability : provisional
-- Portability : portable
--
-- This module provides the /Parsek/ library developed by Koen Claessen in his
-- functional pearl article /Parallel Parsing Processes/, Journal of Functional
-- Programming, 14(6), 741-757, Cambridge University Press, 2004:
--
-- <http://www.cs.chalmers.se/~koen/pubs/entry-jfp04-parser.html>
module Text.ParserCombinators.Parsek
-- basic parser type
( Parser -- :: * -> * -> *; Functor, Monad, MonadPlus
, Expect -- :: *; = [String]
, module Text.ParserCombinators.Class
-- parsing & parse methods
, ParseMethod
, ParseResult
, mapErrR
, parseFromFile
, parse
, shortestResult
, longestResult
, longestResults
, allResults
, allResultsStaged
, completeResults
, shortestResultWithLeftover
, longestResultWithLeftover
, longestResultsWithLeftover
, allResultsWithLeftover
, module Control.Applicative
, module Control.Monad
-- , completeResultsWithLine -- :: ParseMethod Char a Int [a]
)
where
import Prelude hiding (exp,pred)
import Data.Maybe (listToMaybe)
import Control.Applicative
import Control.Monad.Fail as Fail
import Control.Monad
( MonadPlus(..)
, forM_
, guard
, ap
)
import Text.ParserCombinators.Class
-------------------------------------------------------------------------
-- type Parser
newtype Parser s a
= Parser (forall res. (a -> Expect s -> P s res) -> Expect s -> P s res)
-- | Parsing processes
data P s res
= Skip Int (P s res) -- ^ skip ahead a number of symbols. At end of file, no effect.
| Look ([s] -> P s res)
| Fail (Err s)
| Result res (P s res)
| Kill (P s res) -- ^ This is a high priority process trying to kill a low-priority one. See <<|>.
-- | Suppress 1st kill instruction
noKill :: P s a -> P s a
noKill (Skip n p) = Skip n (noKill p)
noKill (Look fut) = Look $ noKill . fut
noKill (Fail e) = Fail e
noKill (Result res p) = Result res (noKill p)
noKill (Kill p) = p
skip :: Int -> P s a -> P s a
skip 0 = id
skip n = Skip n
-- The boolean indicates how to interpret 'Kill' instructions. If
-- 'True', the lhs process has a high priority and will kill the rhs
-- when it reaches its Kill instruction (otherwise it fails). If
-- 'False', then both processes have the same priority, and 'Kill'
-- instructions are just propagated.
plus' :: Bool -> P s res -> P s res -> P s res
plus' hasKiller p0 q0 = plus p0 q0 where
noKill' = if hasKiller then noKill else id
Kill p `plus` q | hasKiller = p
| otherwise = Kill $ p `plus` noKill q
p `plus` Kill q | hasKiller = error "plus': Impossible"
| otherwise = Kill $ noKill p `plus` q
Skip m p `plus` Skip n q | m <= n = Skip m $ p `plus` skip (n-m) q
| otherwise = Skip n $ skip (m-n) p `plus` skip n q
Fail err1 `plus` Fail err2 = Fail (err1 ++ err2)
p `plus` Result res q = Result res (p `plus` q)
Result res p `plus` q = Result res (p `plus` q)
Look fut1 `plus` Look fut2 = Look (\s -> fut1 s `plus` fut2 s)
Look fut1 `plus` q = Look (\s -> fut1 s `plus` q)
p `plus` Look fut2 = Look (\s -> p `plus` fut2 s)
p@(Skip _ _) `plus` _ = noKill' p
_ `plus` q@(Skip _ _) = q
type Err s = [(Expect s, -- we expect this stuff
String -- but failed for this reason
)]
-- | An intersection (nesting) of things currently expected
type Expect s = [(String, -- Expect this
Maybe s -- Here
)]
-------------------------------------------------------------------------
-- instances
instance Functor (Parser s) where
fmap p (Parser f) =
Parser (\fut -> f (fut . p))
instance Monad (Parser s) where
return a = Parser (\fut -> fut a)
Parser f >>= k =
Parser (\fut -> f (\a -> let Parser g = k a in g fut))
#if !MIN_VERSION_base(4,11,0)
-- Monad(fail) was removed in GHC 8.8.1
fail = Fail.fail
#endif
instance Fail.MonadFail (Parser s) where
fail s = Parser (\_fut exp -> Fail [(exp,s)])
instance MonadPlus (Parser s) where
mzero = Parser (\_fut exp -> Fail [(exp,"mzero")])
mplus (Parser f) (Parser g) =
Parser (\fut exp -> plus' False (f fut exp) (g fut exp))
instance Applicative (Parser s) where
pure = return
(<*>) = ap
instance Alternative (Parser s) where
(<|>) = mplus
empty = mzero
instance IsParser (Parser s) where
type SymbolOf (Parser s) = s
satisfy pred =
Parser $ \fut exp -> Look $ \xs -> case xs of
(c:_) | pred c -> Skip 1 $ fut c exp
_ -> Fail [(exp,"satisfy")]
look = Parser $ \fut exp -> Look $ \s -> fut s exp
label msg (Parser f) =
Parser $ \fut exp ->
Look $ \xs ->
f (\a _ -> fut a exp) -- drop the extra expectation in the future
((msg,listToMaybe xs):exp) -- locally have an extra expectation
Parser f <<|> Parser g = Parser $ \fut exp ->
plus' True (f (\a x -> Kill (fut a x)) exp) (g fut exp)
-------------------------------------------------------------------------
-- type ParseMethod, ParseResult
type ParseMethod s a r = P s a -> [s] -> ParseResult s r
type ParseResult s r
= Either (Err s) r
mapErrR :: (s -> s') -> ParseResult s r -> ParseResult s' r
mapErrR _ (Right x) = Right x
mapErrR f (Left x) = Left (mapErr f x)
first f (a,b) = (f a,b)
second f (a,b) = (a, f b)
mapErr :: (a -> b) -> Err a -> Err b
mapErr f = map (first (mapExpect f))
mapExpect :: (a -> b) -> Expect a -> Expect b
mapExpect f = map (second (fmap f))
-- parse functions
parseFromFile :: Parser Char a -> ParseMethod Char a r -> FilePath -> IO (ParseResult Char r)
parseFromFile p method file =
do s <- readFile file
return (parse p method s)
parse :: Parser s a -> ParseMethod s a r -> [s] -> ParseResult s r
parse (Parser f) method xs = method (f (\a _exp -> Result a (Fail [])) []) xs
-- parse methods
shortestResult :: ParseMethod s a a
shortestResult = scan
where
scan (Skip n p) xs = scan p (drop n xs)
scan (Result res _) _ = Right res
scan (Fail err) _ = Left err
scan (Look f) xs = scan (f xs) xs
longestResult :: ParseMethod s a a
longestResult p0 = scan p0 Nothing
where
scan (Skip n p) mres xs = scan p mres (drop n xs)
scan (Result res p) _ xs = scan p (Just res) xs
scan (Fail err) Nothing _ = Left err
scan (Fail _ ) (Just res) _ = Right res
scan (Look f) mres xs = scan (f xs) mres xs
longestResults :: ParseMethod s a [a]
longestResults p0 = scan p0 [] []
where
scan (Skip n p) [] old xs = scan p [] old (drop n xs)
scan (Skip n p ) new old xs = scan p [] new (drop n xs)
scan (Result res p) new old xs = scan p (res:new) [] xs
scan (Fail err) [] [] _ = Left err
scan (Fail _) [] old _ = Right old
scan (Fail _) new _ _ = Right new
scan (Look f) new old xs = scan (f xs) new old xs
allResultsStaged :: ParseMethod s a [[a]]
allResultsStaged p0 xs0 = Right (scan p0 [] xs0)
where
scan (Skip n p) ys xs = ys : scan p [] (drop n xs)
scan (Result res p) ys xs = scan p (res:ys) xs
scan (Fail _) ys _ = [ys]
scan (Look f) ys xs = scan (f xs) ys xs
allResults :: ParseMethod s a [a]
allResults = scan
where
scan (Skip n p) xs = scan p (drop n xs)
scan (Result res p) xs = Right (res : scan' p xs)
scan (Fail err) _ = Left err
scan (Look f) xs = scan (f xs) xs
scan' p xs =
case scan p xs of
Left _ -> []
Right ress -> ress
completeResults :: ParseMethod s a [a]
completeResults = scan
where
scan (Skip n p) xs = scan p (drop n xs)
scan (Result res p) [] = Right (res : scan' p [])
scan (Result _ p) xs = scan p xs
scan (Fail err) _ = Left err
scan (Look f) xs = scan (f xs) xs
scan' p xs =
case scan p xs of
Left _ -> []
Right ress -> ress
-- with left overs
shortestResultWithLeftover :: ParseMethod s a (a,[s])
shortestResultWithLeftover = scan
where
scan (Skip n p) xs = scan p (drop n xs)
scan (Result res _) xs = Right (res,xs)
scan (Fail err) _ = Left err
scan (Look f) xs = scan (f xs) xs
longestResultWithLeftover :: ParseMethod s a (a,[s])
longestResultWithLeftover p0 = scan p0 Nothing
where
scan (Skip n p) mres xs = scan p mres (drop n xs)
scan (Result res p) _ xs = scan p (Just (res,xs)) xs
scan (Fail err) Nothing _ = Left err
scan (Fail _) (Just resxs) _ = Right resxs
scan (Look f) mres xs = scan (f xs) mres xs
longestResultsWithLeftover :: ParseMethod s a ([a],Maybe [s])
longestResultsWithLeftover p0 = scan p0 empty empty
where
scan (Skip n p) ([],_) old xs = scan p empty old $ drop n xs
scan (Skip n p) new _ xs = scan p empty new $ drop n xs
scan (Result res p) (as,_) _ xs = scan p (res:as,Just xs) empty xs
scan (Fail err) ([],_) ([],_) _ = Left err
scan (Fail _) ([],_) old _ = Right old
scan (Fail _) new _ _ = Right new
scan (Look f) new old xs = scan (f xs) new old xs
empty = ([],Nothing)
allResultsWithLeftover :: ParseMethod s a [(a,[s])]
allResultsWithLeftover = scan
where
scan (Skip n p) xs = scan p $ drop n xs
scan (Result res p) xs = Right ((res,xs) : scan' p xs)
scan (Fail err) [] = Left err
scan (Look f) xs = scan (f xs) xs
scan' p xs =
case scan p xs of
Left _ -> []
Right ress -> ress
|
jyp/Parsek
|
Text/ParserCombinators/Parsek.hs
|
gpl-3.0
| 10,201
| 0
| 16
| 3,183
| 4,055
| 2,084
| 1,971
| 204
| 12
|
{-# LANGUAGE OverloadedStrings #-}
-- | Pretty-printing the functional language, parameterised on how to
-- pretty-print variables.
module HipSpec.Lang.PrettyFO where
import Text.PrettyPrint
import HipSpec.Lang.FunctionalFO
import HipSpec.Lang.Type hiding ((:::))
import HipSpec.Lang.PrettyUtils
type Id a = a -> Doc
ppFun :: Id a -> Function a -> Doc
ppFun p (Function f tvs args res b) =
hang (p f <+> "::") 2 pp_type $$
hang (p f) 2 (hang (csv (map (p . fst) args) <+> "=") 2 (ppBody p b))
where
-- | Pretty printing the type
pp_type = pp_forall (pp_args pp_res)
pp_forall = case tvs of
[] -> id
_ -> hang ("forall" <+> sep (map p tvs) <+> ".") 2
pp_args = case map snd args of
[] -> id
as -> \ r -> hang (csv (map (ppType p) as) <+> "->") 2 r
pp_res = ppType p res
ppBody :: Id a -> Body a -> Doc
ppBody p b0 = case b0 of
Case e alts -> hang ("case" <+> ppExpr p e <+> "of") 2
(inside "{ " "; " "}" (map (ppAlt p) alts))
Body e -> ppExpr p e
ppAlt :: Id a -> Alt a -> Doc
ppAlt p (pat,rhs) = hang (ppPat p pat <+> "->") 2 (ppBody p rhs)
ppPat :: Id a -> Pattern a -> Doc
ppPat p pat = case pat of
Default -> "_"
ConPat c tys args -> hang (p c) 2 (ppTysArgs p tys (map (ppTyped p) args))
LitPat i -> integer i
ppTyped :: Id a -> (a,Type a) -> Doc
ppTyped p (x,t) = hang (p x <+> "::") 2 (ppType p t)
ppExpr :: Id a -> Expr a -> Doc
ppExpr p e0 = case e0 of
App t1 t2 e1 e2 -> hang "app" 2 (ppTysArgs p [t1,t2] (map (ppExpr p) [e1,e2]))
Fun f tys args -> hang (p f) 2 (ppTysArgs p tys (map (ppExpr p) args))
Ptr f tys -> hang (p f <> "_ptr") 2 (ppTysArgs p tys [])
Lit x -> integer x
ppTysArgs :: (a -> Doc) -> [Type a] -> [Doc] -> Doc
ppTysArgs _ [] [] = empty
ppTysArgs p tys pp_args = csv $ pp_tys ++ pp_args
where
pp_tys = [ "@" <+> ppType p t | t <- tys ]
ppType :: (a -> Doc) -> Type a -> Doc
ppType p t0 = case t0 of
TyVar x -> p x
ArrTy t1 t2 -> hang "Fn" 2 (csv (map (ppType p) [t1,t2]))
TyCon tc ts -> hang (p tc) 2 (csv (map (ppType p) ts))
Star -> error "PrettyFO.ppType: star"
Forall{} -> error "PrettyFO.ppType: forall"
|
danr/hipspec
|
src/HipSpec/Lang/PrettyFO.hs
|
gpl-3.0
| 2,244
| 0
| 18
| 646
| 1,097
| 551
| 546
| 50
| 5
|
module L.Pure where
import qualified L.Type as S
import qualified S.Type as S
import L.Eval.Generic ( pure )
import L.Eval.Monadic ( eval )
import L.Reduce (imo)
import Control.Monad ( forM_, when )
import qualified Data.Map.Strict as M
import System.IO
find_pure = do
forM_ ( concat S.normalforms ) $ \ t -> do
case eval (10^3) t of
Nothing -> do
-- putStr "!" ; hFlush stdout
return ()
Just _ -> do
when (pure t) $ do
print t
-- putStr "." ; hFlush stdout
|
jwaldmann/s
|
L/Pure.hs
|
gpl-3.0
| 584
| 0
| 19
| 211
| 175
| 98
| 77
| 17
| 2
|
{-# LANGUAGE TemplateHaskell, TypeFamilies, MultiParamTypeClasses, UndecidableInstances, DataKinds, GADTs, ConstraintKinds, FlexibleInstances #-}
module Lamdu.Sugar.Types.Expression
( Expr, Body
, Term(..)
, _BodyLam, _BodyLabeledApply, _BodySimpleApply
, _BodyRecord, _BodyFragment, _BodyLeaf, _BodyNullaryInject
, _BodyToNom, _BodyIfElse, _BodyPostfixApply, _BodyPostfixFunc
, Leaf(..), _LeafLiteral, _LeafHole, _LeafGetVar, _LeafInject
, AnnotatedArg(..), aaTag, aaExpr
, OperatorArgs(..), oaLhs, oaRhs, oaSwapArguments
, LabeledApply(..), aFunc, aMOpArgs, aAnnotatedArgs, aPunnedArgs
, PostfixApply(..), pArg, pFunc
, PostfixFunc(..), _PfCase, _PfFromNom, _PfGetField
, App(..), appFunc, appArg
, Lambda(..), lamFunc, lamLightweight, lamApplyLimit
, Nominal(..), nTId, nVal
-- Binders
, Let(..), lValue, lNames, lBody
, Meta.DefinitionState(..)
, BinderParamScopeId(..), bParamScopeId
, Binder(..), bBody, bAddOuterLet
, BinderBody(..), _BinderLet, _BinderTerm
, Function(..), fChosenScopeProp, fParams, fBody, fBodyScopes
, AssignPlain(..), apAddFirstParam, apBody
, Assignment(..), _BodyFunction, _BodyPlain
-- Holes
, Hole(..), holeOptions, holeTagSuffixes
, HoleOpt(..), _HoleBinder, _HoleVarsRecord
, Query(..), qLangInfo, qSearchTerm
, QueryLangInfo(..), qLangId, qLangDir, qCodeTexts, qUITexts, qNameTexts
, hasQueryLangInfo
-- Fragments
, Fragment(..), fExpr, fHeal, fTypeMismatch, fOptions, fOptApply, fTagSuffixes
, FragOpt(..), _FragPostfix, _FragInject, _FragApplyFunc, _FragOp
, FragOperator(..), oFunc, oRightArg, oAnnotatedArgs
-- If/else
, IfElse(..), iIf, iThen, iElse
, Else(..), _SimpleElse, _ElseIf
, ElseIfBody(..), eAddLet, eIfElse
-- Record & Cases
, Composite(..), cList, cPunnedItems, cTail
, CompositeTail(..), _OpenCompositeTail, _ClosedCompositeTail
, PunnedVar(..), pvVar, pvTagEntityId
, MorphWitness(..)
) where
import qualified Control.Lens as Lens
import Control.Monad.Unit (Unit)
import Data.Property (Property)
import Data.Kind (Type)
import Hyper
import Hyper.Syntax (App(..), appFunc, appArg)
import Lamdu.Data.Anchors (BinderParamScopeId(..), bParamScopeId)
import qualified Lamdu.Data.Meta as Meta
import Lamdu.Sugar.Internal.EntityId (EntityId)
import Lamdu.Sugar.Types.Eval (ParamScopes)
import Lamdu.Sugar.Types.GetVar (GetVar)
import Lamdu.Sugar.Types.Parts
import Lamdu.Sugar.Types.Tag
import Lamdu.Sugar.Types.Type (TId)
import qualified Lamdu.Sugar.Types.Type as T
import Lamdu.Prelude
type Body e v name (i :: Type -> Type) o = e v name i o # Annotated (Payload v o)
data AnnotatedArg v name i o k = AnnotatedArg
{ _aaTag :: Tag name
, _aaExpr :: k :# Term v name i o
} deriving Generic
data OperatorArgs v name i o k = OperatorArgs
{ _oaLhs :: k :# Term v name i o
, _oaRhs :: k :# Term v name i o
, _oaSwapArguments :: o Bool -- Returns whether fragment were added or removed
} deriving Generic
-- TODO: func + specialArgs into a single sum type so that field order
-- matches gui order, no need for special traversal code
data LabeledApply v name i o k = LabeledApply
{ _aFunc :: k :# Const (GetVar name o)
, _aMOpArgs :: Maybe (OperatorArgs v name i o k)
, _aAnnotatedArgs :: [AnnotatedArg v name i o k]
, _aPunnedArgs :: [PunnedVar name o k]
} deriving Generic
data PostfixApply v name i o k = PostfixApply
{ _pArg :: k :# Term v name i o
, _pFunc :: k :# PostfixFunc v name i o
} deriving Generic
data Lambda v name i o f = Lambda
{ _lamLightweight :: Bool
, _lamApplyLimit :: FuncApplyLimit
, _lamFunc :: Function v name i o f
} deriving Generic
-- | An expression marked for transformation.
-- Holds an expression to be transformed but acts like a hole.
data Fragment v name i o k = Fragment
{ _fExpr :: k :# Term v name i o
, _fHeal :: o EntityId
, _fTypeMismatch :: Maybe (Annotated EntityId # T.Type name Unit)
, _fOptions :: i (Query -> i [Option FragOpt name i o])
, _fOptApply :: i (Option FragOpt name i o)
-- An option to apply (with a hole).
-- Used for the actions to turn this hole into literal (i.e pressing "5")
, _fTagSuffixes :: TagSuffixes -- See comment for holeTagSuffixes
} deriving Generic
data FragOpt v name i o k
= FragPostfix [k :# PostfixFunc v name i o] -- a single option can suggest chaining of multiple post-fix applications
| FragInject (TagRef name i o)
| FragWrapInRec (TagRef name i o)
| FragApplyFunc (GetVar name o)
| FragOp (FragOperator v name i o k)
| FragToNom (TId name o)
| FragLam
| FragDefer
| FragIf (k :# Term v name i o)
| FragArgument (HoleOpt v name i o k) -- Apply fragmented expr with argument
deriving Generic
data FragOperator v name i o k = FragOperator
{ _oFunc :: k :# Const (GetVar name o)
, -- Argument on right-hand-side (LTR) of operator.
-- (usually a hole, but may be completed to other values)
_oRightArg :: k :# Term v name i o
, _oAnnotatedArgs :: [Tag name]
} deriving Generic
data Hole name i o = Hole
{ _holeOptions ::
i (Query -> i [Option HoleOpt name i o])
-- Inner `i` serves two purposes:
-- Name walk requires monadic place to process names.
-- Hole can prepare results depending on the query and avoid doing work
-- if the query filters it out.
, _holeTagSuffixes :: TagSuffixes
-- When tag suffixes are created by the name pass this is populated,
-- should be given back in the query.
-- TODO: More elegant solution?
} deriving stock Generic
data HoleOpt v name i o k
= HoleBinder (Binder v name i o k)
| HoleVarsRecord [name] -- List of fields
deriving stock Generic
data Else v name i o f
= SimpleElse (Term v name i o f)
| ElseIf (ElseIfBody v name i o f)
deriving Generic
data ElseIfBody v name i o k = ElseIfBody
{ _eAddLet :: o EntityId
, _eIfElse :: IfElse v name i o k
} deriving Generic
data IfElse v name i o k = IfElse
{ _iIf :: k :# Term v name i o
, _iThen :: k :# Term v name i o
, _iElse :: k :# Else v name i o
} deriving Generic
data CompositeTail v name i o k
= OpenCompositeTail (k :# Term v name i o)
| ClosedCompositeTail (ClosedCompositeActions o)
deriving Generic
data Composite v name i o k = Composite
{ _cList :: TaggedList name i o (k :# Term v name i o)
, -- Punned items are like Haskell's NamedFieldPuns
_cPunnedItems :: [PunnedVar name o k]
, _cTail :: CompositeTail v name i o k
} deriving Generic
data Nominal v name i o k = Nominal
{ _nTId :: TId name o
, _nVal :: k :# Binder v name i o
} deriving Generic
data PostfixFunc v name i o k
= PfCase (Composite v name i o k)
| PfFromNom (TId name o)
| PfGetField (TagRef name i o)
deriving Generic
data Leaf name i o
= LeafLiteral (Literal (Property o))
| LeafHole (Hole name i o)
| LeafGetVar (GetVar name o)
| LeafInject (TagRef name i o)
deriving Generic
data Term v name i o k
= BodyLam (Lambda v name i o k)
| BodySimpleApply (App (Term v name i o) k)
| BodyPostfixApply (PostfixApply v name i o k)
| BodyLabeledApply (LabeledApply v name i o k)
| BodyRecord (Composite v name i o k)
| BodyIfElse (IfElse v name i o k)
| BodyToNom (Nominal v name i o k)
| BodyPostfixFunc (PostfixFunc v name i o k)
| BodyNullaryInject (NullaryInject name i o k)
| BodyFragment (Fragment v name i o k)
| BodyLeaf (Leaf name i o)
deriving Generic
data Let v name i o k = Let
{ _lValue :: k :# Assignment v name i o -- "let foo = [[bar]] in x"
, _lNames :: LhsNames name i o v -- let [[foo]] = bar in x
, _lBody :: k :# Binder v name i o -- "let foo = bar in [[x]]"
} deriving Generic
-- An expression with 0 or more let items,
-- Appear in a:
-- * Function: "\x -> [[THIS]]"
-- * ToNom: "«X [[THIS]]"
-- * Definition or let item value: "x = [[THIS]]"
-- * Let-item/redex: "let x = y in [[THIS]]"
data Binder v name i o k = Binder
{ _bAddOuterLet :: o EntityId
, _bBody :: BinderBody v name i o k
} deriving Generic
data BinderBody v name i o k
= BinderLet (Let v name i o k)
| BinderTerm (Term v name i o k)
deriving Generic
data Function v name i o k = Function
{ _fChosenScopeProp :: i (Property o (Maybe BinderParamScopeId))
, _fParams :: LhsNames name i o v
, _fBody :: k :# Binder v name i o
, -- The scope inside a lambda
_fBodyScopes :: ParamScopes
} deriving Generic
data AssignPlain v name i o f = AssignPlain
{ _apAddFirstParam :: o EntityId
, _apBody :: Binder v name i o f
} deriving Generic
data Assignment v name i o f
= BodyFunction (Function v name i o f)
| BodyPlain (AssignPlain v name i o f)
deriving Generic
traverse Lens.makeLenses
[ ''AnnotatedArg, ''AssignPlain, ''Binder
, ''Composite, ''Fragment, ''FragOperator
, ''Function, ''Hole
, ''IfElse, ''ElseIfBody, ''LabeledApply, ''Lambda, ''Let
, ''Nominal, ''OperatorArgs, ''PostfixApply
] <&> concat
traverse Lens.makePrisms
[''Assignment, ''BinderBody, ''CompositeTail, ''Else
, ''FragOpt, ''HoleOpt, ''Leaf, ''PostfixFunc, ''Term
] <&> concat
traverse makeHTraversableAndBases
[ ''AnnotatedArg, ''Assignment, ''AssignPlain, ''Binder, ''BinderBody
, ''Composite, ''CompositeTail, ''Else, ''ElseIfBody
, ''Fragment, ''FragOperator, ''FragOpt, ''Function, ''HoleOpt, ''IfElse
, ''LabeledApply, ''Lambda, ''Let, ''Nominal
, ''OperatorArgs, ''PostfixApply, ''PostfixFunc, ''Term
] <&> concat
traverse makeHMorph
[ ''Composite, ''FragOperator, ''IfElse, ''LabeledApply, ''Let, ''OperatorArgs, ''PostfixApply, ''PostfixFunc
] <&> concat
-- TODO: Replace boilerplate below with TH
instance RNodes (Assignment v name i o)
instance RNodes (Binder v name i o)
instance RNodes (Else v name i o)
instance RNodes (Function v name i o)
instance RNodes (FragOpt v name i o)
instance RNodes (HoleOpt v name i o)
instance RNodes (PostfixFunc v name i o)
instance RNodes (Term v name i o)
type Dep v (c :: HyperType -> Constraint) name i o =
( c (Assignment v name i o)
, c (Binder v name i o)
, c (Const (GetVar name o))
, c (Const (i (TagChoice name o)))
, c (Const (TagRef name i o))
, c (Else v name i o)
, c (PostfixFunc v name i o)
, c (Term v name i o)
)
instance Dep v c name i o => Recursively c (Assignment v name i o)
instance Dep v c name i o => Recursively c (Binder v name i o)
instance Dep v c name i o => Recursively c (Else v name i o)
instance Dep v c name i o => Recursively c (PostfixFunc v name i o)
instance Dep v c name i o => Recursively c (Term v name i o)
instance (Dep v c name i o, c (HoleOpt v name i o)) => Recursively c (HoleOpt v name i o)
instance (Dep v c name i o, c (FragOpt v name i o)) => Recursively c (FragOpt v name i o)
instance (Dep v c name i o, c (Function v name i o)) => Recursively c (Function v name i o)
instance RTraversable (Assignment v name i o)
instance RTraversable (Binder v name i o)
instance RTraversable (Else v name i o)
instance RTraversable (PostfixFunc v name i o)
instance RTraversable (Term v name i o)
|
lamdu/lamdu
|
src/Lamdu/Sugar/Types/Expression.hs
|
gpl-3.0
| 11,527
| 0
| 14
| 2,819
| 3,649
| 2,117
| 1,532
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
-- | Some misc utility functions
module PasteWatch.Utils
(
sendEmail,
stripQuotes
) where
import qualified Data.Text.Lazy as T
import Network.Socket
import Network.Mail.SMTP
import Data.Monoid ( (<>) )
import PasteWatch.Types (MatchText (..) )
-- | Send an email with given subject and contents
-- using the given (unauthenicated) smtp server
-- myDomain is the domain of the sender
-- This function taken from the example code in Network.SMTP.Client
sendEmail::Address -> [Address] -> HostName -> MatchText -> String -> IO()
sendEmail sender recipients smtpServer (MatchText match) content = sendMail smtpServer mail
where
to = recipients
cc = []
bcc = []
body = plainTextPart $ T.pack content
subject = "Pastebin alert. Match on " <> match
mail = simpleMail sender to cc bcc subject [body]
stripQuotes::String -> String
stripQuotes ('"':s) | last s == '"' = init s
| otherwise = s
stripQuotes ('\'':s) | last s == '\'' = init s
| otherwise = s
stripQuotes x = x
|
ArthurClune/pastewatch
|
src/PasteWatch/Utils.hs
|
gpl-3.0
| 1,167
| 0
| 11
| 330
| 288
| 156
| 132
| 24
| 1
|
{-# LANGUAGE CPP #-}
module Import.NoFoundation
( module Import
) where
#if MIN_VERSION_classy_prelude(1,0,0)
import ClassyPrelude.Yesod as Import hiding (Handler)
#else
import ClassyPrelude.Yesod as Import
#endif
import Control.Error.Safe as Import (justZ)
import Database.Persist.Sql as Import
import Model as Import
import Settings as Import
import Settings.StaticFiles as Import
import Yesod.Core.Types as Import (loggerSet)
import Yesod.Default.Config2 as Import
-- import Yesod.Form as Import hiding (parseTime)
|
alexeyzab/cards-with-comrades
|
backend/src/Import/NoFoundation.hs
|
gpl-3.0
| 582
| 0
| 5
| 126
| 89
| 65
| 24
| 11
| 0
|
-- Parse module.
-- By Gregory W. Schwartz
--
{- | Collection of functions for the parsing of a fasta file. Uses the lazy
- ByteString type.
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE BangPatterns #-}
module Data.Fasta.ByteString.Lazy.Parse ( parsecFasta
, parsecCLIPFasta
, attoFasta
, attoCLIPFasta
, pipesFasta
, pipesCLIPFasta
, removeNs
, removeN
, removeCLIPNs ) where
-- Built-in
import Data.Char
import Text.Parsec
import Text.Parsec.ByteString.Lazy
import qualified Data.Map.Strict as Map
import qualified Data.ByteString as BW
import qualified Data.ByteString.Char8 as SB
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Control.Applicative as CA
import Control.Monad (void)
-- Cabal
import qualified Data.Attoparsec.ByteString.Char8 as A
import Pipes
import qualified Pipes.Prelude as P
import qualified Pipes.ByteString as PB
import qualified Pipes.Group as PG
import qualified Pipes.Attoparsec as PA
import Control.Lens (view)
import qualified Control.Foldl as FL
-- Local
import Data.Fasta.ByteString.Lazy.Types
eol :: Parsec B.ByteString u String
eol = choice . map (try . string) $ ["\n\r", "\r\n", "\n", "\r"]
eoe :: Parsec B.ByteString u ()
eoe = do
lookAhead (void $ char '>') <|> eof
fasta :: Parsec B.ByteString u FastaSequence
fasta = do
spaces
char '>'
header <- manyTill (satisfy (/= '>')) eol
fseq <- manyTill anyChar eoe
return (FastaSequence { fastaHeader = B.pack header
, fastaSeq = B.pack . removeWhitespace $ fseq } )
where
removeWhitespace = filter (`notElem` ("\n\r " :: String))
fastaFile :: Parsec B.ByteString u [FastaSequence]
fastaFile = do
spaces
many fasta
fastaCLIP :: Parsec B.ByteString u (FastaSequence, [FastaSequence])
fastaCLIP = do
spaces
char '>'
germline <- fasta
clones <- many $ try fasta
return (germline, clones)
fastaCLIPFile :: Parsec B.ByteString u [(FastaSequence, [FastaSequence])]
fastaCLIPFile = do
spaces
many fastaCLIP
-- | Parse a standard fasta file into
parsecFasta :: B.ByteString -> [FastaSequence]
parsecFasta = eToV . parse fastaFile "error"
where
eToV (Right x) = x
eToV (Left x) = error ("Unable to parse fasta file\n" ++ show x)
-- | Parse a CLIP fasta file into
parsecCLIPFasta :: B.ByteString -> CloneMap
parsecCLIPFasta = Map.fromList
. map (\(!x, (!y, !z)) -> ((x, y), z))
. zip [0..]
. eToV
. parse fastaCLIPFile "error"
where
eToV (Right x) = x
eToV (Left x) = error ("Unable to parse fasta file\n" ++ show x)
-- | attopares any char but space
anyButSpace :: A.Parser Char
anyButSpace = do
A.skipSpace
x <- A.anyChar
A.skipSpace
return x
-- | attoparsec parser for a fasta type
fasta' :: A.Parser FastaSequence
fasta' = do
header <- A.takeWhile (\x -> x /= '\n' && x /= '\r')
A.endOfLine
fseq <- A.manyTill anyButSpace (void (A.char '>') CA.<|> A.endOfInput)
return FastaSequence { fastaHeader = B.fromStrict header
, fastaSeq = B.pack fseq }
-- | attoparsec parser for a fasta file
fastaFile' :: A.Parser [FastaSequence]
fastaFile' = do
A.skipSpace
A.char '>'
A.many' fasta'
-- | attoparsec parser for a CLIP fasta sequence
fastaCLIP' :: A.Parser FastaSequence
fastaCLIP' = do
header <- A.takeWhile (\x -> x /= '\n' && x /= '\r')
A.endOfLine
fseq <- A.manyTill anyButSpace (void (A.char '>') CA.<|> A.endOfInput)
return FastaSequence { fastaHeader = B.fromStrict header
, fastaSeq = B.pack fseq }
clone' :: A.Parser (Germline, [FastaSequence])
clone' = do
A.skipSpace
germline <- fastaCLIP'
fseqs <- A.manyTill fasta' (void (A.char '>') CA.<|> A.endOfInput)
return (germline, fseqs)
-- | attoparsec parser for a fasta file
fastaCLIPFile' :: A.Parser [(Germline, [FastaSequence])]
fastaCLIPFile' = do
A.skipSpace
A.string ">>"
A.many' clone'
-- | Parse a standard fasta file
attoFasta :: BW.ByteString -> [FastaSequence]
attoFasta = eToV . A.parseOnly fastaFile'
where
eToV (Right x) = x
eToV (Left x) = error ("Unable to parse fasta file\n" ++ show x)
-- | Parse a CLIP fasta file into text sequences
attoCLIPFasta :: BW.ByteString -> [(Germline, [FastaSequence])]
attoCLIPFasta = eToV . A.parseOnly fastaCLIPFile'
where
eToV (Right x) = x
eToV (Left x) = error ("Unable to parse fasta file\n" ++ show x)
-- | Parse a standard fasta file into a pipe
pipesFasta :: (MonadIO m)
=> Producer SB.ByteString m ()
-> Producer FastaSequence m ()
pipesFasta p = FL.purely
PG.folds
FL.mconcat
( view (PB.splits (fromIntegral $ ord '>'))
. PB.drop (1 :: Int)
$ p )
>-> P.map toFasta
where
toFasta x = FastaSequence { fastaHeader = B.fromChunks
. take 1
. lines'
$ x
, fastaSeq = B.fromChunks
. tail
. lines'
$ x }
lines' = SB.lines . SB.filter (/= '\r')
-- | Parse a CLIP fasta file into strict text sequences for pipes.
pipesCLIPFasta :: (MonadIO m)
=> Producer BW.ByteString m ()
-> Producer (Germline, [FastaSequence]) m (Either (PA.ParsingError, Producer BW.ByteString m ()) ())
pipesCLIPFasta = PA.parsed clone'
. PB.drop 2
. PB.dropWhile (`BW.elem` "\n\r\t ")
-- | Remove Ns from a collection of sequences
removeNs :: [FastaSequence] -> [FastaSequence]
removeNs = map (\x -> x { fastaSeq = noN . fastaSeq $ x })
where
noN = B.map (\y -> if (y /= 'N' && y /= 'n') then y else '-')
-- | Remove Ns from a sequence
removeN :: FastaSequence -> FastaSequence
removeN x = x { fastaSeq = noN . fastaSeq $ x }
where
noN = B.map (\y -> if (y /= 'N' && y /= 'n') then y else '-')
-- | Remove Ns from a collection of CLIP fasta sequences
removeCLIPNs :: CloneMap -> CloneMap
removeCLIPNs = Map.fromList . map remove . Map.toList
where
remove ((!x, !y), !z) = ((x, newSeq y), map newSeq z)
newSeq !x = x { fastaSeq = noN . fastaSeq $ x }
noN = B.map (\y -> if (y /= 'N' && y /= 'n') then y else '-')
|
GregorySchwartz/fasta
|
src/Data/Fasta/ByteString/Lazy/Parse.hs
|
gpl-3.0
| 6,784
| 0
| 15
| 2,108
| 1,953
| 1,051
| 902
| 150
| 2
|
module AST where
import Data.Text
type Identifier = Text
data CompareOp = Eq | Neq | Gt | Lt | Gte | Lte
deriving (Show,Eq)
data Expr a = LValueId Identifier a
| LValueField (Expr a) Identifier a
| LValueSubscript (Expr a) (Expr a) a
| Nil a
| Seq [Expr a] a -- Has value of the last expr
| Void a -- () or let ... in end;
| IntLit Integer a
| StringLit Text a
| Negation (Expr a) a
| FunctionCall Identifier [Expr a] a -- This has a value if it is a function, none for procedure
| Add (Expr a) (Expr a) a | Sub (Expr a) (Expr a) a
| Mult (Expr a) (Expr a) a | Div (Expr a) (Expr a) a
| Comp CompareOp (Expr a) (Expr a) a
| And (Expr a) (Expr a) a
| Or (Expr a) (Expr a) a
| Record Identifier [(Identifier,Expr a)] a -- typeid {id=exp,id=exp,...}
| Array Identifier (Expr a) (Expr a) a -- typeid [n] of v, duplicates v n times
| Assignment (Expr a) (Expr a) a -- Compounds share backing and are never released
| IfThenElse (Expr a) (Expr a) (Expr a) a -- The second two exprs must have the same type
| IfThen (Expr a) (Expr a) a -- The second expr must not produce a value
| While (Expr a) (Expr a) a -- e2 produces no value
| For Identifier (Expr a) (Expr a) (Expr a) a -- for id := e1 to e2 do e3
| Break a
| Let [Decl a] (Expr a) a -- let decs in exps end
deriving (Show,Eq)
type UniqueId = Integer
data Decl a = TypeDec UniqueId Identifier Type a
| VarDec UniqueId Identifier (Expr a) a
| TVarDec UniqueId Identifier Type (Expr a) a -- id : tpye-id := expr
| FunDec UniqueId Identifier [(Identifier,Type)] (Expr a) a
| TFunDec UniqueId Identifier [(Identifier,Type)] Type (Expr a) a -- function id (a1:t1,a2:t2,...) :tr = expr
deriving (Show,Eq)
data Type = NamedType Identifier
| RecType [(Identifier,Type)] -- { f1:t1,f2:t2 ...}
| ArrType Type
| FuncType [Type] Type
| Top -- This is the type of nil
| VoidT -- typeof(())
deriving (Show,Eq)
|
joelwilliamson/modern-compilers-exercises
|
AST.hs
|
gpl-3.0
| 2,202
| 0
| 9
| 726
| 712
| 398
| 314
| 44
| 0
|
module Example.Eg12 (eg12) where
import Graphics.Radian
import ExampleUtils
eg12 :: IO Html
eg12 = do
let x = [0, 4 * pi / 100 .. 4 * pi]
plot = Plot [l1, l2] # [height.=300, aspect.=3, strokeWidth.=2,
axisXLabel.="Time", axisYLabel.="sin(x) / cos(x)"]
l1 = Lines x (map sin x) # [stroke.="red"]
l2 = Lines x (map cos x) # [stroke.="blue"]
source = exampleSource "Eg12.hs"
return [shamlet|
<h3>
Example 12 (functional plots)
^{plot}
^{source}
|]
|
openbrainsrc/hRadian
|
examples/Example/Eg12.hs
|
mpl-2.0
| 518
| 4
| 13
| 145
| 195
| 105
| 90
| -1
| -1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionTargetHTTPSProxies.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a TargetHttpsProxy resource in the specified project and region
-- using the data included in the request.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionTargetHttpsProxies.insert@.
module Network.Google.Resource.Compute.RegionTargetHTTPSProxies.Insert
(
-- * REST Resource
RegionTargetHTTPSProxiesInsertResource
-- * Creating a Request
, regionTargetHTTPSProxiesInsert
, RegionTargetHTTPSProxiesInsert
-- * Request Lenses
, rthpiRequestId
, rthpiProject
, rthpiPayload
, rthpiRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionTargetHttpsProxies.insert@ method which the
-- 'RegionTargetHTTPSProxiesInsert' request conforms to.
type RegionTargetHTTPSProxiesInsertResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"targetHttpsProxies" :>
QueryParam "requestId" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] TargetHTTPSProxy :>
Post '[JSON] Operation
-- | Creates a TargetHttpsProxy resource in the specified project and region
-- using the data included in the request.
--
-- /See:/ 'regionTargetHTTPSProxiesInsert' smart constructor.
data RegionTargetHTTPSProxiesInsert =
RegionTargetHTTPSProxiesInsert'
{ _rthpiRequestId :: !(Maybe Text)
, _rthpiProject :: !Text
, _rthpiPayload :: !TargetHTTPSProxy
, _rthpiRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RegionTargetHTTPSProxiesInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rthpiRequestId'
--
-- * 'rthpiProject'
--
-- * 'rthpiPayload'
--
-- * 'rthpiRegion'
regionTargetHTTPSProxiesInsert
:: Text -- ^ 'rthpiProject'
-> TargetHTTPSProxy -- ^ 'rthpiPayload'
-> Text -- ^ 'rthpiRegion'
-> RegionTargetHTTPSProxiesInsert
regionTargetHTTPSProxiesInsert pRthpiProject_ pRthpiPayload_ pRthpiRegion_ =
RegionTargetHTTPSProxiesInsert'
{ _rthpiRequestId = Nothing
, _rthpiProject = pRthpiProject_
, _rthpiPayload = pRthpiPayload_
, _rthpiRegion = pRthpiRegion_
}
-- | An optional request ID to identify requests. Specify a unique request ID
-- so that if you must retry your request, the server will know to ignore
-- the request if it has already been completed. For example, consider a
-- situation where you make an initial request and the request times out.
-- If you make the request again with the same request ID, the server can
-- check if original operation with the same request ID was received, and
-- if so, will ignore the second request. This prevents clients from
-- accidentally creating duplicate commitments. The request ID must be a
-- valid UUID with the exception that zero UUID is not supported
-- (00000000-0000-0000-0000-000000000000).
rthpiRequestId :: Lens' RegionTargetHTTPSProxiesInsert (Maybe Text)
rthpiRequestId
= lens _rthpiRequestId
(\ s a -> s{_rthpiRequestId = a})
-- | Project ID for this request.
rthpiProject :: Lens' RegionTargetHTTPSProxiesInsert Text
rthpiProject
= lens _rthpiProject (\ s a -> s{_rthpiProject = a})
-- | Multipart request metadata.
rthpiPayload :: Lens' RegionTargetHTTPSProxiesInsert TargetHTTPSProxy
rthpiPayload
= lens _rthpiPayload (\ s a -> s{_rthpiPayload = a})
-- | Name of the region scoping this request.
rthpiRegion :: Lens' RegionTargetHTTPSProxiesInsert Text
rthpiRegion
= lens _rthpiRegion (\ s a -> s{_rthpiRegion = a})
instance GoogleRequest RegionTargetHTTPSProxiesInsert
where
type Rs RegionTargetHTTPSProxiesInsert = Operation
type Scopes RegionTargetHTTPSProxiesInsert =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient RegionTargetHTTPSProxiesInsert'{..}
= go _rthpiProject _rthpiRegion _rthpiRequestId
(Just AltJSON)
_rthpiPayload
computeService
where go
= buildClient
(Proxy ::
Proxy RegionTargetHTTPSProxiesInsertResource)
mempty
|
brendanhay/gogol
|
gogol-compute/gen/Network/Google/Resource/Compute/RegionTargetHTTPSProxies/Insert.hs
|
mpl-2.0
| 5,277
| 0
| 17
| 1,152
| 559
| 335
| 224
| 89
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Gmail.Users.Settings.Filters.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes a filter.
--
-- /See:/ <https://developers.google.com/gmail/api/ Gmail API Reference> for @gmail.users.settings.filters.delete@.
module Network.Google.Resource.Gmail.Users.Settings.Filters.Delete
(
-- * REST Resource
UsersSettingsFiltersDeleteResource
-- * Creating a Request
, usersSettingsFiltersDelete
, UsersSettingsFiltersDelete
-- * Request Lenses
, usfdXgafv
, usfdUploadProtocol
, usfdAccessToken
, usfdUploadType
, usfdUserId
, usfdId
, usfdCallback
) where
import Network.Google.Gmail.Types
import Network.Google.Prelude
-- | A resource alias for @gmail.users.settings.filters.delete@ method which the
-- 'UsersSettingsFiltersDelete' request conforms to.
type UsersSettingsFiltersDeleteResource =
"gmail" :>
"v1" :>
"users" :>
Capture "userId" Text :>
"settings" :>
"filters" :>
Capture "id" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes a filter.
--
-- /See:/ 'usersSettingsFiltersDelete' smart constructor.
data UsersSettingsFiltersDelete =
UsersSettingsFiltersDelete'
{ _usfdXgafv :: !(Maybe Xgafv)
, _usfdUploadProtocol :: !(Maybe Text)
, _usfdAccessToken :: !(Maybe Text)
, _usfdUploadType :: !(Maybe Text)
, _usfdUserId :: !Text
, _usfdId :: !Text
, _usfdCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'UsersSettingsFiltersDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'usfdXgafv'
--
-- * 'usfdUploadProtocol'
--
-- * 'usfdAccessToken'
--
-- * 'usfdUploadType'
--
-- * 'usfdUserId'
--
-- * 'usfdId'
--
-- * 'usfdCallback'
usersSettingsFiltersDelete
:: Text -- ^ 'usfdId'
-> UsersSettingsFiltersDelete
usersSettingsFiltersDelete pUsfdId_ =
UsersSettingsFiltersDelete'
{ _usfdXgafv = Nothing
, _usfdUploadProtocol = Nothing
, _usfdAccessToken = Nothing
, _usfdUploadType = Nothing
, _usfdUserId = "me"
, _usfdId = pUsfdId_
, _usfdCallback = Nothing
}
-- | V1 error format.
usfdXgafv :: Lens' UsersSettingsFiltersDelete (Maybe Xgafv)
usfdXgafv
= lens _usfdXgafv (\ s a -> s{_usfdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
usfdUploadProtocol :: Lens' UsersSettingsFiltersDelete (Maybe Text)
usfdUploadProtocol
= lens _usfdUploadProtocol
(\ s a -> s{_usfdUploadProtocol = a})
-- | OAuth access token.
usfdAccessToken :: Lens' UsersSettingsFiltersDelete (Maybe Text)
usfdAccessToken
= lens _usfdAccessToken
(\ s a -> s{_usfdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
usfdUploadType :: Lens' UsersSettingsFiltersDelete (Maybe Text)
usfdUploadType
= lens _usfdUploadType
(\ s a -> s{_usfdUploadType = a})
-- | User\'s email address. The special value \"me\" can be used to indicate
-- the authenticated user.
usfdUserId :: Lens' UsersSettingsFiltersDelete Text
usfdUserId
= lens _usfdUserId (\ s a -> s{_usfdUserId = a})
-- | The ID of the filter to be deleted.
usfdId :: Lens' UsersSettingsFiltersDelete Text
usfdId = lens _usfdId (\ s a -> s{_usfdId = a})
-- | JSONP
usfdCallback :: Lens' UsersSettingsFiltersDelete (Maybe Text)
usfdCallback
= lens _usfdCallback (\ s a -> s{_usfdCallback = a})
instance GoogleRequest UsersSettingsFiltersDelete
where
type Rs UsersSettingsFiltersDelete = ()
type Scopes UsersSettingsFiltersDelete =
'["https://www.googleapis.com/auth/gmail.settings.basic"]
requestClient UsersSettingsFiltersDelete'{..}
= go _usfdUserId _usfdId _usfdXgafv
_usfdUploadProtocol
_usfdAccessToken
_usfdUploadType
_usfdCallback
(Just AltJSON)
gmailService
where go
= buildClient
(Proxy :: Proxy UsersSettingsFiltersDeleteResource)
mempty
|
brendanhay/gogol
|
gogol-gmail/gen/Network/Google/Resource/Gmail/Users/Settings/Filters/Delete.hs
|
mpl-2.0
| 5,176
| 0
| 20
| 1,245
| 785
| 457
| 328
| 115
| 1
|
-- |
-- Module : Xine.Internal.Handle
-- Copyright : (c) Joachim Fasting 2010
-- License : LGPL (see COPYING)
--
-- Maintainer : Joachim Fasting <joachim.fasting@gmail.com>
-- Stability : unstable
-- Portability : not portable
--
-- A 'Handle'-like interface for a Xine engine instance.
module Xine.Internal.Handle (
-- * Xine engine handle
HandleState(..), XineHandle_(..), XineHandle(..), isClosed,
-- * Using handles
modifyXineHandle, withXineHandle, withStream
) where
import Xine.Foreign
import Xine.Internal.Stream (Streams, StreamId)
import qualified Xine.Internal.Stream as S
import Control.Concurrent.MVar
import Control.Monad (when)
data HandleState = Closed | Open deriving Eq
data XineHandle_ = XineHandle_
{ hEngine :: !Engine
, hAudioPort :: !AudioPort
, hVideoPort :: !VideoPort
, hStreams :: !Streams
, hCurrent :: !(Maybe StreamId)
, hState :: !HandleState
}
-- | A xine-lib handle.
newtype XineHandle = XineHandle (MVar XineHandle_)
-- | Test whether the handle is closed.
isClosed :: XineHandle -> IO Bool
isClosed (XineHandle hv) = withMVar hv $ \h -> return (hState h == Closed)
-- | A helper for modifying the internal handle state.
modifyXineHandle :: XineHandle -> (XineHandle_ -> IO XineHandle_) -> IO ()
modifyXineHandle h@(XineHandle hv) f = do
closed <- isClosed h
when closed (fail "XineHandle is closed")
modifyMVar_ hv f
-- | A helper for functions using the xine-handle wrapper.
withXineHandle :: XineHandle -> (XineHandle_ -> IO a) -> IO a
withXineHandle h@(XineHandle hv) f = do
closed <- isClosed h
when closed (fail "XineHandle is closed")
withMVar hv f
-- | A helper for using a given stream.
withStream :: XineHandle -> StreamId -> (Stream -> IO a) -> IO a
withStream h sid f = withXineHandle h $ \hv ->
case S.lookup sid (hStreams hv) of
Just s -> f s
Nothing -> fail $ "No such stream: " ++ show sid
|
joachifm/hxine
|
Xine/Internal/Handle.hs
|
lgpl-2.1
| 1,951
| 0
| 11
| 404
| 494
| 269
| 225
| 46
| 2
|
module Paths_happstack_test (
version,
getBinDir, getLibDir, getDataDir, getLibexecDir,
getDataFileName, getSysconfDir
) where
import qualified Control.Exception as Exception
import Data.Version (Version(..))
import System.Environment (getEnv)
import Prelude
catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a
catchIO = Exception.catch
version :: Version
version = Version {versionBranch = [0,1,0,0], versionTags = []}
bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath
bindir = "/home/wolfgang/.cabal/bin"
libdir = "/home/wolfgang/.cabal/lib/i386-linux-ghc-7.6.3/happstack-test-0.1.0.0"
datadir = "/home/wolfgang/.cabal/share/i386-linux-ghc-7.6.3/happstack-test-0.1.0.0"
libexecdir = "/home/wolfgang/.cabal/libexec"
sysconfdir = "/home/wolfgang/.cabal/etc"
getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath
getBinDir = catchIO (getEnv "happstack_test_bindir") (\_ -> return bindir)
getLibDir = catchIO (getEnv "happstack_test_libdir") (\_ -> return libdir)
getDataDir = catchIO (getEnv "happstack_test_datadir") (\_ -> return datadir)
getLibexecDir = catchIO (getEnv "happstack_test_libexecdir") (\_ -> return libexecdir)
getSysconfDir = catchIO (getEnv "happstack_test_sysconfdir") (\_ -> return sysconfdir)
getDataFileName :: FilePath -> IO FilePath
getDataFileName name = do
dir <- getDataDir
return (dir ++ "/" ++ name)
|
wginolas/playground
|
haskell/happstack-test/dist/build/autogen/Paths_happstack_test.hs
|
lgpl-3.0
| 1,406
| 0
| 10
| 182
| 371
| 213
| 158
| 28
| 1
|
module Sound.Analysis.Vamp.Feature (
Vector
, Feature(..)
, FeatureList
, TimeStamp(..)
, peekFeature
, peekFeatureList
) where
import Control.Monad
import Bindings.Sound.Analysis.Vamp
import Bindings.Sound.Analysis.Vamp.Version2
import qualified Data.Vector.Storable as SV
import Foreign
import Foreign.C
import Sound.Analysis.Vamp.ApiVersion (ApiVersion(..))
import Sound.Analysis.Vamp.TimeStamp
type Vector = SV.Vector Float
data Feature = Feature {
time :: Maybe TimeStamp
, duration :: Maybe TimeStamp
, values :: Vector
, label :: Maybe String
} deriving (Eq, Show)
type FeatureList = [Feature]
peekVector :: Int -> Ptr CFloat -> IO Vector
peekVector n ptr = do
fptr <- mallocForeignPtrArray n
withForeignPtr fptr (\dst -> copyArray dst (castPtr ptr) n)
return $ SV.unsafeFromForeignPtr fptr 0 n
peekFeature :: C'VampFeature -> Maybe C'VampFeatureV2 -> IO Feature
peekFeature v1 v2 = do
let x1 = if toBool (c'VampFeature'hasTimestamp v1)
then Just $ TimeStamp (fromIntegral (c'VampFeature'sec v1))
(fromIntegral (c'VampFeature'nsec v1))
else Nothing
x2 = case v2 of
Nothing -> Nothing
Just x -> if toBool (c'VampFeatureV2'hasDuration x)
then Just $ TimeStamp (fromIntegral (c'VampFeatureV2'durationSec x))
(fromIntegral (c'VampFeatureV2'durationNsec x))
else Nothing
x3 <- peekVector (fromIntegral (c'VampFeature'valueCount v1))
(c'VampFeature'values v1)
x4 <- maybePeek peekCString (c'VampFeature'label v1)
return $ Feature x1 x2 x3 x4
peekFeatureList :: ApiVersion -> C'VampFeatureList -> IO FeatureList
peekFeatureList version x = do
let n = fromIntegral (c'VampFeatureList'featureCount x)
vs = c'VampFeatureList'features x
v1 <- liftM (map c'VampFeatureUnion'v1) (peekArray n vs)
v2 <- if version >= ApiVersion 2
then liftM (map (Just . c'VampFeatureUnion'v2)) (peekArray n (vs `plusPtr` n))
else return (replicate n Nothing)
zipWithM peekFeature v1 v2
|
kaoskorobase/hvamp
|
Sound/Analysis/Vamp/Feature.hs
|
lgpl-3.0
| 2,292
| 0
| 19
| 673
| 635
| 333
| 302
| 53
| 4
|
{-#LANGUAGE ForeignFunctionInterface#-}
{-#LANGUAGE RankNTypes#-}
-----------------------------------------------------------------------------
-- |
-- Module : Bindings.Verba.FFI
-- Copyright : (c) Macil 2014
-- License : PublicDomain
--
-- Maintainer : Macil.dev@gmail.com
-- Stability : experimental
-- Portability : unportable
--
-- FFI interface
--
-----------------------------------------------------------------------------
module Bindings.Verba.C.FFI where
import Bindings.Verba.C.Types
import Foreign
import Foreign.C
-- * Library initialization
-- | CryptoInit
foreign import stdcall "CryptoInit" c_CryptoInit :: CString -- ^ Path to secret keys storage
-> CString -- ^ Path to open keys storage
-> IO Word16
-- | SignInit
foreign import stdcall "SignInit" c_SignInit :: CString -- ^ Path to secret keys storage
-> CString -- ^ Path to open keys storage
-> IO Word16
-- | CryptoDone
foreign import stdcall "CryptoDone" c_CryptoDone :: IO Word16
-- | SignDone
foreign import stdcall "SignDone" c_SignDone :: IO Word16
-- * Encryption
-- | EnCryptFile
foreign import stdcall "EnCryptFile" c_EnCryptFile :: CString -- ^ Source filename
-> CString -- ^ Destination filename
-> Word16 -- ^ Sender key id
-> Ptr Word16 -- ^ Receivers key ids
-> CString -- ^ Key series
-> IO Word16
-- | EnCryptFileEx
foreign import stdcall "EnCryptFileEx" c_EnCryptFileEx :: CString -- ^ Source filename
-> CString -- ^ Destination filename
-> CString -- ^ Sender key id
-> Ptr (Ptr Word8) -- ^ Open keys array
-> Word16 -- ^ Open keys array length
-> Word32 -- ^ Flags (reserved)
-> IO Word16
-- | DeCryptFile
foreign import stdcall "DeCryptFile" c_DeCryptFile :: CString -- ^ Source filename
-> CString -- ^ Destination filename
-> Word16 -- ^ Key id
-> IO Word16
-- | DeCryptFileEx
foreign import stdcall "DeCryptFileEx" c_DeCryptFileEx :: CString -- ^ Source filename
-> CString -- ^ Destination filename
-> CString -- ^ Key id
-> Ptr Word8 -- ^ Public key
-> IO Word16
-- | EnCryptMem
foreign import stdcall "EnCryptMem" c_EnCryptMem :: Ptr Word8 -- ^ Source buffer
-> Word32 -- ^ Source buffer length
-> Ptr Word8 -- ^ Destination buffer
-> Word16 -- ^ Sender key ID
-> Ptr Word16 -- ^ Receivers key ids
-> CString -- ^ Key series
-> IO Word16
-- | DeCryptMem
foreign import stdcall "DeCryptMem" c_DeCryptMem :: Ptr Word8 -- ^ Buffer
-> Ptr Word32 -- ^ Buffer length
-> Word16 -- ^ Receiver key id
-> IO Word16
-- * Signing
-- | SignFile
foreign import stdcall "SignFile" c_SignFile :: CString -- ^ Source filename
-> CString -- ^ Destination filename
-> CString -- ^ Key id
-> IO Word16
-- | SignMem
foreign import stdcall "SignMem" c_SignMem :: Ptr Word8 -- ^ Buffer
-> Word32 -- ^ BufferLength
-> CString -- ^ Key id
-> IO Word16
-- | SignMemEx
foreign import stdcall "SignMemEx" c_SignMemEx :: Ptr Word8 -- ^ Buffer
-> Ptr Word32 -- ^ BufferLength
-> CString -- ^ Key id
-> IO Word16
-- | SignMemSeparate
foreign import stdcall "SignMemSeparate" c_SignMemSeparate :: Ptr Word8 -- ^ Input buffer
-> Word32 -- ^ Input buffer length
-> Word32 -- ^ Output buffer length
-> CString -- ^ Key id
-> Ptr Word8 -- ^ Output buffer
-> IO Word16
-- | SignMemSeparateEx
foreign import stdcall "SignMemSeparateEx" c_SignMemSeparateEx :: Ptr Word8 -- ^ Input buffer
-> Word32 -- ^ Input buffer length
-> Ptr Word32 -- ^ Output buffer length
-> CString -- ^ Key id
-> Ptr Word8 -- ^ Output buffer
-> IO Word16
-- | check_file_sign
foreign import stdcall "check_file_sign" c_check_file_sign :: CString -- ^ Signed filename
-> Ptr Word8 -- ^ Size of result
-> Ptr (Ptr C_Check_Status) -- ^ Result
-> IO Word16
-- | check_mem_sign
foreign import stdcall "check_mem_sign" c_check_mem_sign :: Ptr Word8 -- ^ Input buffer
-> Word32 -- ^ Input buffer length
-> Ptr Word8 -- ^ Size of result
-> Ptr (Ptr C_Check_Status) -- ^ Result
-> IO Word16
-- | CheckMemSeparate
foreign import stdcall "CheckMemSeparate" c_CheckMemSeparate :: Ptr Word8 -- ^ Input buffer (data)
-> Word32 -- ^ Size of data
-> Word32 -- ^ Size of signature
-> Ptr Word8 -- ^ Size of result
-> Ptr (Ptr C_Check_Status) -- ^ Result
-> Ptr Word8 -- ^ Input buffer (signature)
-> IO Word16
-- | DelSign
foreign import stdcall "DelSign" c_DelSign :: CString -- ^ Signed filename
-> Word8 -- ^ Number of signatires to remove (-1 to remove all)
-> IO Word16
-- | Del_Mem_Sign
foreign import stdcall "Del_Mem_Sign" c_Del_Mem_Sign :: Ptr Word8 -- ^ Buffer
-> Ptr Word32 -- ^ Buffer length
-> Word8 -- ^ Number of signatures to remove (-1 to remove all)
-> IO Word16
-- * Miscellaneous
-- | GetDrvInfo
foreign import stdcall "GetDrvInfo" c_GetDrvInfo :: Ptr C_USR_KEYS_INFO -- ^ Result
-> Ptr Word32 -- ^ Size of result
-> IO Word16
-- | GetFileSenderId
foreign import stdcall "GetFileSenderID" c_GetFileSenderId :: CString -- ^ Encrypted filename
-> CString -- ^ Result
-> IO Word16
-- | GetCryptKeysF
foreign import stdcall "GetCryptKeysF" c_GetCryptKeysF :: CString -- ^ Encrypted filename
-> Ptr Word16 -- ^ Size of result array
-> Ptr (Ptr Word16) -- ^ Result (array of keys)
-> CString -- ^ Result (key series)
-> IO Word16
-- | GetMemSenderId
foreign import stdcall "GetMemSenderID" c_GetMemSenderID :: Ptr Word8 -- ^ Input buffer
-> Word32 -- ^ Buffer length
-> CString -- ^ Result
-> IO Word16
-- | GetCryptKeysM
foreign import stdcall "GetCryptKeysM" c_GetCryptKeysM :: Ptr Word8 -- ^ Input buffer
-> Word32 -- ^ Buffer length
-> Ptr Word16 -- ^ Size of result array
-> Ptr (Ptr Word16) -- ^ Result (array of keys)
-> CString -- ^ Result (key series)
-> IO Word16
-- | FreeMemory
foreign import stdcall "FreeMemory" c_Free_Memory :: forall a. Ptr a -> IO ()
-- * Open keys storage manipulation
-- | GetAlias
foreign import stdcall "GetAlias" c_GetAlias :: CString -- ^ Base dir
-> CString -- ^ Key ID
-> CString -- ^ Output buffer
-> IO Word16
-- | SprList
foreign import stdcall "SprList" c_SprList :: CString
-> CString
-> Ptr (Ptr C_SprList)
-> Ptr Word16
-> Word8
-> IO Word16
-- | SignSpr
foreign import stdcall "SignSpr" c_SignSpr :: CString -- ^ Base dir
-> CString -- ^ Series
-> CString -- ^ Key ID
-> Word8 -- ^ Request type
-> IO Word16
-- | CheckSpr
foreign import stdcall "CheckSpr" c_CheckSpr :: CString
-> CString
-> Ptr (Ptr C_SprList)
-> Ptr Word16
-> CString
-> Word8
-> IO Word16
-- | ExtractKey
foreign import stdcall "ExtractKey" c_ExtractKey :: CString -- ^ Base dir
-> CString -- ^ Key ID
-> Ptr Word8 -- ^ Output buffer
-> IO Word16
|
Macil-dev/verhface-ll
|
src/Bindings/Verba/C/FFI.hs
|
unlicense
| 12,378
| 0
| 13
| 7,070
| 1,235
| 690
| 545
| -1
| -1
|
{-# LANGUAGE DeriveAnyClass #-}
--------------------------------------------------------------------------------
-- |
-- Module : Blockchain.Node.Transaction
-- Copyright : (c) carbolymer
-- License : Apache-2.0
--
-- Stability : experimental
-- Portability : POSIX
--
-- Transactions model
--
--------------------------------------------------------------------------------
module Blockchain.Node.Transaction (
-- * Data types
Transaction(Transaction)
, Operation(Reward, Transfer)
, NewTransactionRequest(..)
-- * Transaction getters
, operation
, pubKey
, signature
, sender
, recipient
, amount
, time
-- * transaction signature and validation
, sign
, verify
) where
import Data.Aeson (FromJSON(..), ToJSON(..))
import Data.Text (Text)
import Data.Time.Calendar (Day(..))
import Data.Time.Clock (UTCTime(..), DiffTime, diffTimeToPicoseconds, picosecondsToDiffTime)
import Data.Serialize (Serialize, get, encode, put)
import Data.Serialize.Text ()
import GHC.Generics (Generic)
import qualified Blockchain.Node.Signature as Signature
-- | Represents single transaction between two adresses
data Transaction = Transaction {
_pubKey :: !Signature.PublicKey, -- ^ Sender ECDSA public key
_signature :: !Signature.Signature, -- ^ Operation ECDSA signature
_operation :: !Operation -- ^ Operation details
} deriving (Show, Eq, Generic, Serialize)
pubKey :: Transaction -> Signature.PublicKey
pubKey = _pubKey
operation :: Transaction -> Operation
operation = _operation
signature :: Transaction -> Signature.Signature
signature = _signature
instance Ord Transaction where
compare a b = compare (_operation a) (_operation b)
instance ToJSON Transaction
instance FromJSON Transaction
-- | Represents single transaction between two adresses
data Operation
= Reward {
_recipient :: !Text, -- ^ Recipient address
_amount :: !Int, -- ^ Transferred amount
_time :: !UTCTime -- ^ Transaction time
}
| Transfer {
_sender :: !Text, -- ^ Sender address, has to be derived from the public key in the
-- transaction
_recipient :: !Text, -- ^ Recipient address
_amount :: !Int, -- ^ Transferred amount
_time :: !UTCTime -- ^ Transaction time
} deriving (Show, Eq, Generic)
sender :: Operation -> Maybe Text
sender Reward {} = Nothing
sender t@Transfer {} = Just $ _sender t
recipient :: Operation -> Text
recipient = _recipient
amount :: Operation -> Int
amount = _amount
time :: Operation -> UTCTime
time = _time
instance Ord Operation where
compare a b = compare (_time a) (_time b)
instance Serialize DiffTime where
get = picosecondsToDiffTime <$> get
put = put . diffTimeToPicoseconds
deriving instance Generic Day
instance Serialize Day
deriving instance Generic UTCTime
instance Serialize UTCTime
instance ToJSON Operation
instance FromJSON Operation
instance Serialize Operation
data NewTransactionRequest = NewTransactionRequest {
newAmount :: !Int,
newSender :: !Text,
newRecipient :: !Text
} deriving (Show, Eq, Generic)
instance ToJSON NewTransactionRequest
instance FromJSON NewTransactionRequest
-- | Signs the operation using private key
sign :: Signature.PrivateKey -> Operation -> IO Signature.Signature
sign privKey operation' = Signature.sign privKey (encode operation')
-- | Verifies the transaction signature
verify :: Transaction -> Bool
verify transaction = Signature.verify
(pubKey transaction)
(signature transaction)
(encode $ operation transaction)
|
carbolymer/blockchain
|
blockchain-node/src/Blockchain/Node/Transaction.hs
|
apache-2.0
| 3,722
| 0
| 10
| 807
| 783
| 448
| 335
| -1
| -1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE NamedFieldPuns #-}
module Ldap.Client.Internal
( Host(..)
, PortNumber
, Ldap(..)
, ClientMessage(..)
, Type.ResultCode(..)
, Async(..)
, AttrList
-- * Waiting for Request Completion
, wait
, waitSTM
-- * Misc
, Response
, ResponseError(..)
, Request
, raise
, sendRequest
, Dn(..)
, Attr(..)
, AttrValue
, unAttr
-- * Unbind operation
, unbindAsync
, unbindAsyncSTM
) where
import Control.Concurrent.STM (STM, atomically)
import Control.Concurrent.STM.TMVar (TMVar, newEmptyTMVar, readTMVar)
import Control.Concurrent.STM.TQueue (TQueue, writeTQueue)
import Control.Concurrent.STM.TVar (TVar, modifyTVar, readTVar)
import Control.Exception (Exception, throwIO)
import Control.Monad (void)
import Data.ByteString (ByteString)
import Data.List.NonEmpty (NonEmpty)
import Data.Text (Text)
import Data.Typeable (Typeable)
import Network (PortNumber)
import Network.Connection (Connection(..))
import qualified Ldap.Asn1.Type as Type
import qualified Control.Concurrent.Async as Async
-- | LDAP host.
data Host =
Plain String -- ^ Plain LDAP. Do not use!
| Insecure String -- ^ LDAP over TLS without the certificate validity check.
-- Only use for testing!
| Secure String -- ^ LDAP over TLS. Use!
deriving (Show, Eq, Ord)
-- | A token. All functions that interact with the Directory require one.
data Ldap = Ldap
{ client :: TQueue ClientMessage
, counter :: TVar Type.Id
, conn :: Connection
, threads :: [Async.Async ()]
}
data ClientMessage = New Type.Id Request (TMVar (NonEmpty Type.ProtocolServerOp))
type Request = Type.ProtocolClientOp
type InMessage = Type.ProtocolServerOp
type Response = NonEmpty InMessage
-- | Asynchronous LDAP operation. Use 'wait' or 'waitSTM' to wait for its completion.
data Async a = Async Type.Id (STM (Either ResponseError a))
instance Functor Async where
fmap f (Async mid stm) = Async mid (fmap (fmap f) stm)
-- | Unique identifier of an LDAP entry.
newtype Dn = Dn Text
deriving (Show, Eq)
-- | Response indicates a failed operation.
data ResponseError =
ResponseInvalid Request Response -- ^ LDAP server did not follow the protocol, so @ldap-client@ couldn't make sense of the response.
| ResponseErrorCode Request Type.ResultCode Dn Text -- ^ The response contains a result code indicating failure and an error message.
deriving (Show, Eq, Typeable)
instance Exception ResponseError
-- | Attribute name.
newtype Attr = Attr Text
deriving (Show, Eq)
-- | Attribute value.
type AttrValue = ByteString
-- | List of attributes and their values. @f@ is the structure these
-- values are in, e.g. 'NonEmpty'.
type AttrList f = [(Attr, f AttrValue)]
-- 'Attr' unwrapper. This is a separate function not to turn 'Attr''s
-- 'Show' instance into complete and utter shit.
unAttr :: Attr -> Text
unAttr (Attr a) = a
-- | Wait for operation completion.
wait :: Async a -> IO (Either ResponseError a)
wait = atomically . waitSTM
-- | Wait for operation completion inside 'STM'.
--
-- Do not use this inside the same 'STM' transaction the operation was
-- requested in! To give LDAP the chance to respond to it that transaction
-- should commit. After that, applying 'waitSTM' to the corresponding 'Async'
-- starts to make sense.
waitSTM :: Async a -> STM (Either ResponseError a)
waitSTM (Async _ stm) = stm
sendRequest :: Ldap -> (Response -> Either ResponseError a) -> Request -> STM (Async a)
sendRequest l p msg =
do var <- newEmptyTMVar
mid <- newId l
writeRequest l (New mid msg var)
return (Async mid (fmap p (readTMVar var)))
newId :: Ldap -> STM Type.Id
newId Ldap { counter } =
do modifyTVar counter (\(Type.Id mid) -> Type.Id (mid + 1))
readTVar counter
writeRequest :: Ldap -> ClientMessage -> STM ()
writeRequest Ldap { client } = writeTQueue client
raise :: Exception e => Either e a -> IO a
raise = either throwIO return
-- | Terminate the connection to the Directory.
--
-- Note that 'unbindAsync' does not return an 'Async',
-- because LDAP server never responds to @UnbindRequest@s, hence
-- a call to 'wait' on a hypothetical 'Async' would have resulted
-- in an exception anyway.
unbindAsync :: Ldap -> IO ()
unbindAsync =
atomically . unbindAsyncSTM
-- | Terminate the connection to the Directory.
--
-- Note that 'unbindAsyncSTM' does not return an 'Async',
-- because LDAP server never responds to @UnbindRequest@s, hence
-- a call to 'wait' on a hypothetical 'Async' would have resulted
-- in an exception anyway.
unbindAsyncSTM :: Ldap -> STM ()
unbindAsyncSTM l =
void (sendRequest l die Type.UnbindRequest)
where
die = error "Ldap.Client.Internal: do not wait for the response to UnbindRequest"
|
VictorDenisov/ldap-client
|
src/Ldap/Client/Internal.hs
|
bsd-2-clause
| 4,900
| 0
| 13
| 1,025
| 1,050
| 601
| 449
| 92
| 1
|
{-|
Module: HaskHOL.Lib.Quot
Copyright: (c) Evan Austin 2015
LICENSE: BSD3
Maintainer: e.c.austin@gmail.com
Stability: unstable
Portability: unknown
-}
module HaskHOL.Lib.Quot
( defineQuotientType
, getQuotientType
, liftTheorem
, liftFunction
, getLiftedFunction
) where
import HaskHOL.Core
import qualified HaskHOL.Core.Kernel as K (typeOf)
import HaskHOL.Lib.Bool
import HaskHOL.Lib.Classic
import HaskHOL.Lib.DRule
import HaskHOL.Lib.Equal
import HaskHOL.Lib.Meson
import HaskHOL.Lib.Simp
import HaskHOL.Lib.Tactics
import HaskHOL.Lib.Theorems
import HaskHOL.Lib.Trivia
data LiftedFunctions =
LiftedFunctions !(Map Text (HOLThm, HOLThm)) deriving Typeable
deriveSafeCopy 0 'base ''LiftedFunctions
addLiftedFunction :: Text -> (HOLThm, HOLThm) -> Update LiftedFunctions ()
addLiftedFunction lbl ths =
do (LiftedFunctions m) <- get
put (LiftedFunctions (mapInsert lbl ths m))
getLiftedFunction' :: Text -> Query LiftedFunctions (Maybe (HOLThm, HOLThm))
getLiftedFunction' name =
do (LiftedFunctions m) <- ask
return $! mapAssoc name m
makeAcidic ''LiftedFunctions ['addLiftedFunction, 'getLiftedFunction']
data QuotientTypes =
QuotientTypes !(Map Text (HOLThm, HOLThm)) deriving Typeable
deriveSafeCopy 0 'base ''QuotientTypes
addQuotientType :: Text -> (HOLThm, HOLThm) -> Update QuotientTypes ()
addQuotientType lbl ths =
do (QuotientTypes m) <- get
put (QuotientTypes (mapInsert lbl ths m))
getQuotientType' :: Text -> Query QuotientTypes (Maybe (HOLThm, HOLThm))
getQuotientType' name =
do (QuotientTypes m) <- ask
return $! mapAssoc name m
makeAcidic ''QuotientTypes ['addQuotientType, 'getQuotientType']
defineQuotientType :: (BoolCtxt thry, HOLTermRep tm Theory thry) => Text
-> Text -> Text -> tm
-> HOL Theory thry (HOLThm, HOLThm)
defineQuotientType tyname absname repname tm =
getQuotientType tyname <|> (note "defineQuotientType" $
do eqv <- toHTm tm
case K.typeOf eqv of
(TyApp _ (ty:_)) ->
do pty <- mkFunTy ty tyBool
s <- mkVar "s" pty
x <- mkVar "x" ty
eqvx <- mkComb eqv x
exx <- mkExists x $ mkEq s eqvx
predtm <- mkAbs s exx
th0 <- runConv convBETA $ mkComb predtm eqvx
rtm <- rand $ concl th0
th1 <- ruleEXISTS rtm x $ primREFL eqvx
th2 <- ruleSYM th0
th3 <- primEQ_MP th2 th1
(absth, repth) <- newBasicTypeDefinition tyname
absname repname th3
th4 <- ruleCONV (convLAND convBETA) repth
acid' <- openLocalStateHOL (QuotientTypes mapEmpty)
updateHOL acid' (AddQuotientType tyname (absth, th4))
closeAcidStateHOL acid'
return (absth, th4)
_ -> fail "provided term has bad type")
getQuotientType :: Text -> HOL cls thry (HOLThm, HOLThm)
getQuotientType name =
do acid <- openLocalStateHOL (QuotientTypes mapEmpty)
qth <- queryHOL acid (GetQuotientType' name)
closeAcidStateHOL acid
case qth of
Nothing -> fail "getQuotientType: type not found."
Just res -> return res
thmSELECT_LEMMA :: TriviaCtxt thry => HOL cls thry HOLThm
thmSELECT_LEMMA = cacheProof "thmSELECT_LEMMA" ctxtTrivia $
prove [txt| !x:A. (@y. x = y) = x |] $
tacGEN `_THEN`
tacGEN_REWRITE (convLAND . convBINDER) [thmEQ_SYM_EQ] `_THEN`
tacMATCH_ACCEPT thmSELECT_REFL
liftFunction :: (TriviaCtxt thry, HOLThmRep thm1 Theory thry,
HOLThmRep thm2 Theory thry, HOLThmRep thm3 Theory thry,
HOLThmRep thm4 Theory thry) => thm1 -> thm2 -> thm3
-> (Text, thm4) -> HOL Theory thry (HOLThm, HOLThm)
liftFunction ptybij2 refl_th trans_th (fname, pwth) =
getLiftedFunction fname <|> (note "liftFunction" $
do tybij2 <- toHThm ptybij2
case concl tybij2 of
((Exists xtm (Comb _ eqvx@(Comb eqv _))) :<=>
((Comb dest mrt@(Comb mk _)) := rtm)) ->
do wth <- toHThm pwth
wtm <- repeatM (liftM snd . destForall) $ concl wth
let wfvs = frees wtm
(hyps, con) <- (do (l, r) <- destImp wtm
return (conjuncts l, r)) <|> return ([], wtm)
let (eqs, rels) = partition isEq hyps
rvs <- mapM lHand rels
qvs <- mapM lhs eqs
let ety = typeOf mrt
evs <- variants wfvs `fmap`
mapM (\ (Var v _) -> mkVar v ety) rvs
mems <- map2M (\ rv ev -> mkComb (mkComb dest ev) rv)
rvs evs
(lcon, rcon) <- destComb con
u <- variant (evs ++ wfvs) `fmap` mkVar "u" (typeOf rcon)
ucon <- mkComb lcon u
dbod <- listMkConj (ucon:mems)
detm <- listMkExists rvs dbod
datm <- mkAbs u detm
def <- if isEq con then listMkIComb "@" [datm]
else mkComb mk datm
newargs <- mapM (\ e -> case e of
(l := _) -> return l
(Binary _ l _) -> assoc l $ zip rvs evs
_ -> fail "") hyps
rdef <- listMkAbs newargs def
let ldef = mkVar fname $ typeOf rdef
edef <- mkEq ldef rdef
dth <- newDefinition (fname, edef)
eth <- foldlM (\ th v -> ruleCONV (convRAND convBETA) $
(ruleAP_THM th v)) dth newargs
targs <- mapM (mkComb mk . mkComb eqv) rvs
dme_th <- do th <- primINST [(rtm, eqvx)] tybij2
ltm <- lhs $ concl th
primEQ_MP th . ruleEXISTS ltm xtm $ primREFL eqvx
ith <- primINST (zip evs targs) eth
rths <- mapM (\ v -> primINST [(xtm, v)] dme_th) rvs
jth <- ruleSUBS rths ith
(apop, uxtm) <- destComb $ rand (concl jth)
extm <- body uxtm
let (evs', bod) = stripExists extm
th1 <- primASSUME bod
th2 <- if null evs' then return th1
else do (th2a, th2b) <- ruleCONJ_PAIR th1
as <- ruleCONJUNCTS th2b
bs <- mapM primREFL qvs
let ethlist = as ++ bs
ethlist' <- mapM (\ v -> findM (\ thm ->
do v' <- lHand v
c <- lHand $ concl thm
return $! v' == c) ethlist) hyps
th2c <- foldr1M ruleCONJ ethlist'
th2d <- ruleMATCH_MP wth th2c
th2e <- (primTRANS th2d th2a) <|>
(ruleMATCH_MP trans_th $
ruleCONJ th2d th2a)
foldrM ruleSIMPLE_CHOOSE th2e evs'
th3 <- primASSUME $ concl th2
ths <- mapM (`ruleSPEC` refl_th) rvs
th4 <- foldr1M ruleCONJ (th3:ths)
th5 <- flip (foldrM ruleSIMPLE_EXISTS) evs' =<< primASSUME bod
th6 <- ruleMATCH_MP (ruleDISCH_ALL th5) th4
th7 <- ruleIMP_ANTISYM (ruleDISCH_ALL th2) $ ruleDISCH_ALL th6
th8 <- primTRANS jth . ruleAP_TERM apop $ primABS u th7
let fconv = if isEq con then convREWR thmSELECT_LEMMA
else convRAND convETA
th9 <- ruleGSYM $ ruleCONV (convRAND fconv) th8
acid' <- openLocalStateHOL (LiftedFunctions mapEmpty)
updateHOL acid' (AddLiftedFunction fname (eth, th9))
closeAcidStateHOL acid'
return (eth, th9)
_ -> fail "expected quotient type relation theorem.")
getLiftedFunction :: Text -> HOL cls thry (HOLThm, HOLThm)
getLiftedFunction name =
do acid <- openLocalStateHOL (LiftedFunctions mapEmpty)
qth <- queryHOL acid (GetLiftedFunction' name)
closeAcidStateHOL acid
case qth of
Nothing -> fail "getLiftedFunction: type not found."
Just res -> return res
liftTheorem :: (TriviaCtxt thry, HOLThmRep thm1 cls thry,
HOLThmRep thm2 cls thry, HOLThmRep thm3 cls thry,
HOLThmRep thm4 cls thry, HOLThmRep thm5 cls thry,
HOLThmRep thm6 cls thry) => (thm1, thm1) -> thm2 -> thm3 -> thm4
-> [thm5] -> thm6 -> HOL cls thry HOLThm
liftTheorem ptybij prefl_th psym_th ptrans_th ptrths pthm =
do (tybij1, tybij2) <- pairMapM ruleGEN_ALL ptybij
refl_th <- toHThm prefl_th
sym_th <- toHThm psym_th
trans_th <- toHThm ptrans_th
trths <- mapM toHThm ptrths
cth <- foldr1M ruleCONJ [refl_th, sym_th, trans_th, tybij1, tybij2]
ith <- ruleMATCH_MP liftTheorem_pth cth
ruleREWRITE (ith:trths) pthm
where liftTheorem_pth :: TriviaCtxt thry => HOL cls thry HOLThm
liftTheorem_pth = cacheProof "liftTheorem_pth" ctxtTrivia $
prove [txt| (!x:Repty. R x x) /\
(!x y. R x y <=> R y x) /\
(!x y z. R x y /\ R y z ==> R x z) /\
(!a. mk(dest a) = a) /\
(!r. (?x. r = R x) <=> (dest(mk r) = r))
==> (!x y. R x y <=> (mk(R x) = mk(R y))) /\
(!P. (!x. P(mk(R x))) <=> (!x. P x)) /\
(!P. (?x. P(mk(R x))) <=> (?x. P x)) /\
(!x:Absty. mk(R((@)(dest x))) = x) |] $
tacSTRIP `_THEN`
_SUBGOAL_THEN [txt| !x y. (mk((R:Repty->Repty->bool) x):Absty =
mk(R y)) <=> (R x = R y) |]
tacASSUME `_THENL`
[ tacASM_MESON_NIL
, _ALL
] `_THEN`
tacMATCH_MP (ruleTAUT [txt| (a /\ b /\ c) /\ (b ==> a ==> d) ==>
a /\ b /\ c /\ d |]) `_THEN`
tacCONJ `_THENL`
[ tacASM_REWRITE_NIL `_THEN` tacREWRITE [thmFUN_EQ] `_THEN`
tacASM_MESON_NIL
, _ALL
] `_THEN`
_REPEAT (_DISCH_THEN
(\ th g -> tacREWRITE [ruleGSYM th] g)) `_THEN`
tacX_GEN [txt| x:Repty |] `_THEN`
_SUBGOAL_THEN [txt| dest(mk((R:Repty->Repty->bool) x):Absty) = R x|]
tacSUBST1 `_THENL`
[ tacASM_MESON_NIL
, _ALL
] `_THEN`
tacGEN_REWRITE (convLAND . convRAND) [ruleGSYM axETA] `_THEN`
_FIRST_ASSUM (\ th -> tacGEN_REWRITE id [th]) `_THEN`
tacCONV convSELECT `_THEN`
tacASM_MESON_NIL
|
ecaustin/haskhol-deductive
|
src/HaskHOL/Lib/Quot.hs
|
bsd-2-clause
| 11,261
| 4
| 31
| 4,432
| 3,096
| 1,530
| 1,566
| -1
| -1
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE BinaryLiterals #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
import GHC.Exts
import Data.Array.IO
import System.Directory
import Data.Word
import FileSystems
import System.Posix.Terminal
import Data.Time.Clock
import System.Environment
import Monad6502
import Text.Printf
import State6502
import Control.Monad.State
import Control.Concurrent.MVar
import Control.Lens hiding (noneOf)
import Data.Bits
import Data.Bits.Lens
import qualified Data.IntMap as I
import qualified Data.Map as M
import Data.ByteString as B hiding (putStrLn, putStr, count, head)
import System.IO
import Data.Binary.Get
import Text.Parsec
import Data.Binary
import System.Console.CmdArgs hiding ((+=))
import Numeric
import Control.Monad.Loops
import System.Console.Haskeline
import Core
import Binary
import Intel hiding (hexWord16, fromHex)
import VirtualBBC
import System.Posix.Signals
import KeyInput
import VDUOutput
import TraceLog
import qualified Data.ByteString.Internal as BS (c2w, w2c)
--import Vanilla
--import Atari
data Args = Args { verbose :: Bool,
file :: Maybe String,
org :: String,
entry :: String,
logfile :: Maybe String,
directory :: Maybe String } deriving (Show, Data, Typeable)
clargs :: Args
clargs = Args { verbose = False, org = "0", entry = "C000", file = Nothing,
logfile = Nothing, directory = Nothing }
times :: (Integral n, Monad m) => n -> m a -> m ()
times 0 _ = return ()
times n m = m >> times (n-1) m
data FileSpec = Intel String | Binary String Word16 deriving Show
hexWord16 :: Stream s m Char => ParsecT s u m Word16
hexWord16 = fromHex <$> count 4 hexDigit
fromHex :: (Num a, Eq a) => String -> a
fromHex = fst . head . readHex
filespec = do
a <- anyChar
case a of
'i' -> do
char ':'
filename <- many (noneOf ",")
return (Intel filename)
'b' -> do
char ':'
filename <- many (noneOf ":")
char ':'
address <- hexWord16
return (Binary filename address)
filespecs = filespec `sepBy1` (char ',')
--xxx = getPC
loadFile :: IOUArray Int Word8 -> FileSpec -> IO ()
loadFile arr (Intel f) = readIntel arr f
loadFile arr (Binary f o) = readBinary arr f o
handler interrupted = do
print "SIGINT"
putMVar interrupted 1
-- b:../../roms/BASIC-1.0:8000,b:../../os.bin:c000
{-
getROMSpec :: IO (Maybe (String, String))
getROMSpec = do
case lookupEnv "BBC_LANGUAGE" of
Nothing -> return Nothing
Just languageRom -> case (lookupEnv "BBC_ROM") of
case lookupEnv "BBC_ROM" of
Nothing -> return Nothing
Just osRom -> return (languageRom, osRom)
-}
main :: IO ()
main = do
hSetBuffering stdin NoBuffering
hSetBuffering stdout NoBuffering
-- a <- getTerminalAttributes 0
-- let a' = withCC a (System.Posix.Terminal.Interrupt, '\x1b')
-- setTerminalAttributes 0 a' Immediately
--hSetEcho stdin False
-- hSetEcho stdin False
args <- cmdArgs clargs
--print args
putStrLn "BBC Computer 32K\n"
arr <- newArray (0, 0xffff) 0 :: IO (IOUArray Int Word8)
mSpecString <- case file args of
Nothing -> lookupEnv "BBC_ROMS"
Just specString' -> return $ Just specString'
case mSpecString of
Nothing -> return ()
Just specString -> do
let mSpecs = parse filespecs "" specString
case mSpecs of
Right specs -> forM_ specs $ \spec -> loadFile arr spec
Left _ -> putStrLn $ "Unable to parse ROM specification: " ++ show specString
logHandle <- case logfile args of
Nothing -> return Nothing
Just logName -> do
handle <- openFile logName WriteMode
return $ Just handle
let [(entryPoint, _)] = readHex (entry args)
systime <- getCurrentTime
let state = S { _mem = arr, _clock = 0, _regs = R entryPoint 0 0 0 0 0xff,
_debug = verbose args, _handles = I.empty, _sysclock = systime,
_currentDirectory = '$', _keyQueue = emptyQueue,
_vduQueue = emptyVDUQueue, _logFile = logHandle }
case directory args of
Nothing -> return ()
Just d -> setCurrentDirectory d
interrupted <- newEmptyMVar :: IO (MVar Int)
installHandler sigINT (Catch $ handler interrupted) Nothing
--flip execStateT state $ unM $ forever (inline step)
runInputT defaultSettings $ flip execStateT state $ unM $ do
tracelog $ printf "\nExecuting from address %04x" entryPoint
forever $ do
i <- liftIO $ isEmptyMVar interrupted
when (not i) $ do
writeMemory 0xff 0x80
liftIO $ putStrLn "ESCAPE!!!!!!!!!!!!!!!!!!!!!"
_ <- liftIO $ takeMVar interrupted
return ()
--liftIO $ print "X"
step
return ()
|
dpiponi/Bine
|
app/Main.hs
|
bsd-3-clause
| 5,144
| 0
| 18
| 1,468
| 1,344
| 701
| 643
| 122
| 6
|
{-|
Module : Game.GoreAndAsh.Math
Description : Common mathematic utilities in games
Copyright : (c) Anton Gushcha, 2015-2016
Oganyan Levon, 2016
License : BSD3
Maintainer : ncrashed@gmail.com
Stability : experimental
Portability : POSIX
Defines common math transformations for world, camera, vieport spaces.
-}
module Game.GoreAndAsh.Math(
-- * 3D matrix transformations
scale
, rotationZ
, translate
-- * 2D matrix transformations
, scale2D
, rotation2D
, translate2D
, toHom2D
, fromHom2D
, applyTransform2D
, viewportTransform2D
) where
import Linear
-- | Scale matrix for 3D transformation
scale :: Num a => V3 a -> M44 a
scale (V3 x y z) = V4
(V4 x 0 0 0)
(V4 0 y 0 0)
(V4 0 0 z 0)
(V4 0 0 0 1)
-- | Rotation around Z axis for 3D transformation
rotationZ :: Floating a => a -> M44 a
rotationZ a = V4
(V4 (cos a) (- sin a) 0 0)
(V4 (sin a) ( cos a) 0 0)
(V4 0 0 1 0)
(V4 0 0 0 1)
-- | Translation matrix for 3D transformation
translate :: Num a => V3 a -> M44 a
translate (V3 x y z) = V4
(V4 1 0 0 x)
(V4 0 1 0 y)
(V4 0 0 1 z)
(V4 0 0 0 1)
-- | Scale matrix for 2D transformation
scale2D :: Num a => V2 a -> M33 a
scale2D (V2 x y) = V3
(V3 x 0 0)
(V3 0 y 0)
(V3 0 0 1)
-- | Rotation matrix for 2D transformation
rotation2D :: Floating a => a -> M33 a
rotation2D a = V3
(V3 (cos a) (- sin a) 0)
(V3 (sin a) ( cos a) 0)
(V3 0 0 1)
-- | Translation matrix for 2D transformation
translate2D :: Num a => V2 a -> M33 a
translate2D (V2 x y) = V3
(V3 1 0 x)
(V3 0 1 y)
(V3 0 0 1)
-- | Transform to homogenius coordinates
toHom2D :: Num a => V2 a -> V3 a
toHom2D (V2 x y) = V3 x y 1
-- | Transform from homogenius coordinates
fromHom2D :: Floating a => V3 a -> V2 a
fromHom2D (V3 x y w) = V2 (x/w) (y/w)
-- | Applies transformation matrix to vector
applyTransform2D :: Floating a => M33 a -> V2 a -> V2 a
applyTransform2D mt v = fromHom2D $ mt !* toHom2D v
-- | Viewport transformation matrix
viewportTransform2D :: Floating a
=> V2 a -- ^ Viewport left top corner
-> V2 a -- ^ Viewport right bottom corner
-> M33 a
viewportTransform2D (V2 l t) (V2 r b) = V3
(V3 ((r-l)/2) 0 ((r+l)/2))
(V3 0 (-(t-b)/2) ((t+b)/2))
(V3 0 0 1)
!*! scale2D (V2 1 a)
where
a = (r-l)/(t-b)
|
Teaspot-Studio/gore-and-ash
|
src/Game/GoreAndAsh/Math.hs
|
bsd-3-clause
| 2,333
| 2
| 13
| 630
| 942
| 478
| 464
| 61
| 1
|
module MAAM.Classes.MonadStep where
import FP
class (Bind m) => MonadStep m where
type SS m :: * -> *
type SSC m :: * -> Constraint
mstep :: (SSC m a, SSC m b) => (a -> m b) -> SS m a -> SS m b
munit :: (SSC m a) => P m -> a -> SS m a
|
davdar/quals
|
src/MAAM/Classes/MonadStep.hs
|
bsd-3-clause
| 245
| 0
| 11
| 71
| 134
| 72
| 62
| -1
| -1
|
-- Copyright (c) 2014-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file.
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- |
-- Types and operations for statistics and profiling. Most users
-- should import "Haxl.Core" instead of importing this module
-- directly.
--
module Haxl.Core.Stats
(
-- * Data-source stats
Stats(..)
, FetchStats(..)
, Microseconds
, Timestamp
, getTimestamp
, emptyStats
, numFetches
, ppStats
, ppFetchStats
-- * Profiling
, Profile
, emptyProfile
, profile
, ProfileLabel
, ProfileData(..)
, emptyProfileData
, AllocCount
, MemoHitCount
-- * Allocation
, getAllocationCounter
, setAllocationCounter
) where
import Data.Aeson
import Data.HashMap.Strict (HashMap)
import Data.HashSet (HashSet)
import Data.Int
import Data.List (intercalate, maximumBy, minimumBy)
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid
#endif
import Data.Semigroup (Semigroup)
import Data.Ord (comparing)
import Data.Text (Text)
import Data.Time.Clock.POSIX
import Text.Printf
import qualified Data.HashMap.Strict as HashMap
import qualified Data.HashSet as HashSet
import qualified Data.Text as Text
#if __GLASGOW_HASKELL__ >= 710
import GHC.Conc (getAllocationCounter, setAllocationCounter)
#endif
-- ---------------------------------------------------------------------------
-- Measuring time
type Microseconds = Int64
type Timestamp = Microseconds -- since an epoch
getTimestamp :: IO Timestamp
getTimestamp = do
t <- getPOSIXTime -- for now, TODO better
return (round (t * 1000000))
-- ---------------------------------------------------------------------------
-- Stats
-- | Stats that we collect along the way.
newtype Stats = Stats [FetchStats]
deriving (Show, ToJSON, Semigroup, Monoid)
-- | Pretty-print Stats.
ppStats :: Stats -> String
ppStats (Stats rss) =
intercalate "\n"
[ "["
++ [
if fetchWasRunning rs
(minStartTime + (t - 1) * usPerDash)
(minStartTime + t * usPerDash)
then '*'
else '-'
| t <- [1..numDashes]
]
++ "] " ++ show i ++ " - " ++ ppFetchStats rs
| (i, rs) <- zip [(1::Int)..] validFetchStats ]
where
isFetchStats FetchStats{} = True
isFetchStats _ = False
validFetchStats = filter isFetchStats (reverse rss)
numDashes = 50
minStartTime = fetchStart $ minimumBy (comparing fetchStart) validFetchStats
lastFs = maximumBy (comparing (\fs -> fetchStart fs + fetchDuration fs))
validFetchStats
usPerDash = (fetchStart lastFs + fetchDuration lastFs - minStartTime)
`div` numDashes
fetchWasRunning :: FetchStats -> Timestamp -> Timestamp -> Bool
fetchWasRunning fs t1 t2 =
(fetchStart fs + fetchDuration fs) >= t1 && fetchStart fs < t2
-- | Maps data source name to the number of requests made in that round.
-- The map only contains entries for sources that made requests in that
-- round.
data FetchStats
-- | Timing stats for a (batched) data fetch
= FetchStats
{ fetchDataSource :: Text
, fetchBatchSize :: {-# UNPACK #-} !Int
, fetchStart :: !Timestamp -- TODO should be something else
, fetchDuration :: {-# UNPACK #-} !Microseconds
, fetchSpace :: {-# UNPACK #-} !Int64
, fetchFailures :: {-# UNPACK #-} !Int
}
-- | The stack trace of a call to 'dataFetch'. These are collected
-- only when profiling and reportLevel is 5 or greater.
| FetchCall
{ fetchReq :: String
, fetchStack :: [String]
}
deriving (Show)
-- | Pretty-print RoundStats.
ppFetchStats :: FetchStats -> String
ppFetchStats FetchStats{..} =
printf "%s: %d fetches (%.2fms, %d bytes, %d failures)"
(Text.unpack fetchDataSource) fetchBatchSize
(fromIntegral fetchDuration / 1000 :: Double) fetchSpace fetchFailures
ppFetchStats (FetchCall r ss) = show r ++ '\n':show ss
instance ToJSON FetchStats where
toJSON FetchStats{..} = object
[ "datasource" .= fetchDataSource
, "fetches" .= fetchBatchSize
, "start" .= fetchStart
, "duration" .= fetchDuration
, "allocation" .= fetchSpace
, "failures" .= fetchFailures
]
toJSON (FetchCall req strs) = object
[ "request" .= req
, "stack" .= strs
]
emptyStats :: Stats
emptyStats = Stats []
numFetches :: Stats -> Int
numFetches (Stats rs) = sum [ fetchBatchSize | FetchStats{..} <- rs ]
-- ---------------------------------------------------------------------------
-- Profiling
type ProfileLabel = Text
type AllocCount = Int64
type MemoHitCount = Int64
newtype Profile = Profile
{ profile :: HashMap ProfileLabel ProfileData
-- ^ Data on individual labels.
}
emptyProfile :: Profile
emptyProfile = Profile HashMap.empty
data ProfileData = ProfileData
{ profileAllocs :: {-# UNPACK #-} !AllocCount
-- ^ allocations made by this label
, profileDeps :: HashSet ProfileLabel
-- ^ labels that this label depends on
, profileFetches :: HashMap Text Int
-- ^ map from datasource name => fetch count
, profileMemoHits :: {-# UNPACK #-} !MemoHitCount
-- ^ number of hits to memoized computation at this label
}
deriving Show
emptyProfileData :: ProfileData
emptyProfileData = ProfileData 0 HashSet.empty HashMap.empty 0
-- -----------------------------------------------------------------------------
-- Allocation accounting
#if __GLASGOW_HASKELL__ < 710
getAllocationCounter :: IO Int64
getAllocationCounter = return 0
setAllocationCounter :: Int64 -> IO ()
setAllocationCounter _ = return ()
#endif
|
jiayuanmark/Haxl
|
Haxl/Core/Stats.hs
|
bsd-3-clause
| 5,701
| 0
| 19
| 1,145
| 1,157
| 667
| 490
| 126
| 3
|
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE DataKinds #-}
import Test.Hspec
import Data
import DanceView
import Control.Monad
import Data.Generics.Record
rawKps1 = [ 0,0,0,442.1,209.347,0.695157,412.838
, 219.106,0.479542 , 262.192,217.143,0.473475,189.773,268.065
, 0.635878,475.367 , 207.423,0.677412,551.767,191.757,0.792323
, 500.895,133.037 , 0.771379,434.342,365.826,0.563359,383.463
, 483.28,0.467892 , 0,0,0,477.408,367.805
, 0.568175,489.1,489.135,0.643315 , 483.238,631.949,0.657399
, 0,0,0,0,0,0,0
, 0,0,397.158,191.758 , 0.105753
]
rawKps2 = [ 352.19,226.939,0.643588,442.135,211.243,0.653615,412.839
, 215.217,0.279017,0,0,0,0,0
, 0,475.393,207.422,0.669059,555.741 , 187.794,0.825329
, 504.774,129.137,0.751591,446.042,354.142 , 0.493266,491.051
, 479.335,0.443765,490.999,626.073,0.576288 , 481.309,354.172
, 0.546769,479.396,477.38,0.55193,481.283 , 618.331,0.206265
, 350.207,215.221,0.257444,363.899,223.071 , 0.545837,0
, 0,0,397.153,219.145,0.297215
]
main :: IO ()
main = hspec $ do
describe "Matchings" $ do
it "matches the same thing to itself" $ do
let p1 = Person rawKps1 "a"
let matches = matchings [p1] [p1]
matches `shouldBe` [(p1, Just p1, 0)]
it "matches something close from frame to frame" $ do
let p1 = Person rawKps1 "a"
p2 = Person rawKps2 "b"
let matches = matchings [p1] [p2]
[(p1', Just p2', _)] = matches
(p1', p2') `shouldBe` (p1, p2)
it "matches multiple things that should match, to themselves, in any order" $ do
let p1 = Person rawKps1 "a"
p2 = Person rawKps2 "b"
let combs = [ ([p1, p2], [p1, p2])
, ([p2, p1], [p1, p2])
, ([p2, p1], [p2, p1])
, ([p1, p2], [p2, p1])
]
forM_ combs $ \(c1, c2) -> do
let matches = matchings c1 c2
[(a, Just b, _), (c, Just d, _)] = matches
(a, b) `shouldBe` (a, a)
(c, d) `shouldBe` (c, c)
|
silky/DanceView
|
tests/Differences.hs
|
bsd-3-clause
| 2,335
| 9
| 22
| 803
| 764
| 471
| 293
| 48
| 1
|
module Programmers where
data OperatingSystem =
GnuPlusLinux
| OpenBSDPlusNevermindJustBSDStill
| Mac
| Windows
deriving (Eq, Show)
data ProgrammingLanguage =
Haskell
| Agda
| Idris
| PureScript
deriving (Eq, Show)
data Programmer =
Programmer { os :: OperatingSystem
, lang :: ProgrammingLanguage }
deriving (Eq, Show)
allOperatingSystems :: [OperatingSystem]
allOperatingSystems =
[ GnuPlusLinux
, OpenBSDPlusNevermindJustBSDStill
, Mac
, Windows
]
allLanguages :: [ProgrammingLanguage]
allLanguages = [Haskell, Agda, Idris, PureScript]
allProgrammers :: [Programmer]
allProgrammers = [Programmer os lang | os <- allOperatingSystems, lang <- allLanguages]
|
dsaenztagarro/haskellbook
|
src/chapter11/Programmers.hs
|
bsd-3-clause
| 715
| 0
| 8
| 140
| 177
| 106
| 71
| 27
| 1
|
{-# LANGUAGE ForeignFunctionInterface #-}
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.Salsa.CLR
-- Copyright : (c) 2007-2008 Andrew Appleyard
-- Licence : BSD-style (see LICENSE)
--
-- Provides convenient functions for accessing the CLR, including: loading
-- the CLR into the process, releasing .NET object references, and obtaining
-- dynamically-generated stub functions for calling into .NET from Haskell.
--
-----------------------------------------------------------------------------
module Foreign.Salsa.CLR (
withCLR,
startCLR, stopCLR,
ObjectId,
releaseObject,
getMethodStub,
getFieldGetStub,
getFieldSetStub,
getDelegateConstructorStub,
boxString, boxInt32, boxBoolean
) where
import Data.Int
import System.IO.Unsafe ( unsafePerformIO )
import Foreign hiding ( new, newForeignPtr, unsafePerformIO )
import Foreign.C.String
import Foreign.Salsa.CLRHost
-- | Identifies a foreign (.NET) object instance
type ObjectId = Int32
-- | Starts the .NET execution engine before executing the given IO action, and
-- finally stopping the execution engine. This can only be performed once
-- in a process.
withCLR :: IO a -> IO a
withCLR action = do
startCLR
r <- action
stopCLR
return r
startCLR :: IO ()
startCLR = do
start_ICorRuntimeHost clrHost
-- Allow .NET to call into Haskell and free unused function pointer wrappers
setFreeHaskellFunPtr
stopCLR :: IO ()
stopCLR = do
-- saveDynamicAssembly -- (for debugging)
-- Prevent .NET finalizers from calling into Haskell (and causing access violations)
clearFreeHaskellFunPtr
stop_ICorRuntimeHost clrHost
return ()
-- | 'clrHost' stores a reference to the ICLRRuntimeHost for the .NET execution
-- engine that is hosted in the process.
{-# NOINLINE clrHost #-}
clrHost :: ICorRuntimeHost
clrHost = unsafePerformIO $ corBindToRuntimeEx
-- | @'unsafeGetPointerToMethod' m@ returns a function pointer to the method @m@
-- as implemented in the Salsa .NET driver assembly (Salsa.dll). It is safe only
-- if the type of the resulting function pointer matches that of the method given.
unsafeGetPointerToMethod :: String -> IO (FunPtr a)
unsafeGetPointerToMethod methodName = do
result <- withCWString methodName $ \methodName' -> getPointerToMethodRaw methodName'
if result == nullFunPtr
then error $ "Unable to execute Salsa.dll method '" ++ methodName ++ "'."
else return result
{-# NOINLINE getPointerToMethodRaw #-}
getPointerToMethodRaw :: GetPointerToMethodDelegate a
getPointerToMethodRaw = makeGetPointerToMethodDelegate $ unsafePerformIO $ loadDriverAndBoot clrHost
type GetPointerToMethodDelegate a = CWString -> IO (FunPtr a)
foreign import stdcall "dynamic" makeGetPointerToMethodDelegate :: FunPtr (GetPointerToMethodDelegate a) ->
GetPointerToMethodDelegate a
-- | Releases the .NET object indicated by the given object id.
{-# NOINLINE releaseObject #-}
releaseObject :: ObjectId -> IO ()
releaseObject = makeReleaseObjectDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "ReleaseObject"
type ReleaseObjectDelegate = ObjectId -> IO ()
foreign import stdcall "dynamic" makeReleaseObjectDelegate :: FunPtr ReleaseObjectDelegate -> ReleaseObjectDelegate
-- | Passes a function pointer to the 'freeHaskellFunPtr' function into .NET so
-- that Haskell FunPtr's can be freed from .NET code.
setFreeHaskellFunPtr :: IO ()
setFreeHaskellFunPtr = do
funPtr <- wrapFreeHaskellFunPtr freeHaskellFunPtr
setFreeHaskellFunPtrRaw funPtr
-- Note: since the function passed into .NET may be used by .NET at any
-- point until the engine is shutdown, and the engine is only loaded
-- once per process, we don't need to free it.
-- | Clears the 'freeHaskellFunPtr' pointer on the .NET side to prevent finalizers from
-- calling into Haskell (and causing access violations).
clearFreeHaskellFunPtr :: IO ()
clearFreeHaskellFunPtr = setFreeHaskellFunPtrRaw nullFunPtr
{-# NOINLINE setFreeHaskellFunPtrRaw #-}
setFreeHaskellFunPtrRaw :: (FunPtr (FunPtr a -> IO ()) -> IO ())
setFreeHaskellFunPtrRaw = makeSetFreeHaskellFunPtrDelegate $ unsafePerformIO $
unsafeGetPointerToMethod "SetFreeHaskellFunPtr"
foreign import stdcall "dynamic" makeSetFreeHaskellFunPtrDelegate ::
FunPtr (FunPtr (FunPtr a -> IO ()) -> IO ()) -> (FunPtr (FunPtr a -> IO ()) -> IO ())
foreign import stdcall "wrapper" wrapFreeHaskellFunPtr ::
(FunPtr a -> IO ()) -> IO (FunPtr (FunPtr a -> IO ()))
-- | 'saveDynamicAssembly' saves the assembly containing the dynamically-generated
-- wrapper stubs to disk (for debugging purposes).
{-# NOINLINE saveDynamicAssembly #-}
saveDynamicAssembly :: IO ()
saveDynamicAssembly = makeSaveDynamicAssemblyDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "SaveDynamicAssembly"
type SaveDynamicAssemblyDelegate = IO ()
foreign import stdcall "dynamic" makeSaveDynamicAssemblyDelegate :: FunPtr SaveDynamicAssemblyDelegate -> SaveDynamicAssemblyDelegate
-- | @'getMethodStub' c m s@ returns a function pointer to a function that, when
-- called, invokes the method with name @m@ and signature @s@ in class @c@.
--
-- @s@ should be a semi-colon delimited list of parameter types indicating the
-- desired overload of the given method.
getMethodStub :: String -> String -> String -> IO (FunPtr f)
getMethodStub className methodName parameterTypeNames = do
withCWString className $ \className' ->
withCWString methodName $ \methodName' ->
withCWString parameterTypeNames $ \parameterTypeNames' ->
return $ getMethodStubRaw className' methodName' parameterTypeNames'
{-# NOINLINE getMethodStubRaw #-}
getMethodStubRaw :: GetMethodStubDelegate a
getMethodStubRaw = makeGetMethodStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetMethodStub"
type GetMethodStubDelegate a = CWString -> CWString -> CWString -> FunPtr a
foreign import stdcall "dynamic" makeGetMethodStubDelegate :: FunPtr (GetMethodStubDelegate a) ->
(GetMethodStubDelegate a)
-- | @'getFieldGetStub' c f@ returns a function pointer to a function that, when
-- called, gets the value of the field @f@ in class @c@.
getFieldGetStub :: String -> String -> IO (FunPtr f)
getFieldGetStub className fieldName = do
withCWString className $ \className' ->
withCWString fieldName $ \fieldName' ->
return $ getFieldGetStubRaw className' fieldName'
{-# NOINLINE getFieldGetStubRaw #-}
getFieldGetStubRaw :: GetFieldGetStubDelegate a
getFieldGetStubRaw = makeGetFieldGetStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetFieldGetStub"
type GetFieldGetStubDelegate a = CWString -> CWString -> FunPtr a
foreign import stdcall "dynamic" makeGetFieldGetStubDelegate :: FunPtr (GetFieldGetStubDelegate a) ->
(GetFieldGetStubDelegate a)
-- | @'getFieldSetStub' c f@ returns a function pointer to a function that, when
-- called, sets the value of the field @f@ in class @c@ to the given value.
getFieldSetStub :: String -> String -> IO (FunPtr f)
getFieldSetStub className fieldName = do
withCWString className $ \className' ->
withCWString fieldName $ \fieldName' ->
return $ getFieldSetStubRaw className' fieldName'
{-# NOINLINE getFieldSetStubRaw #-}
getFieldSetStubRaw :: GetFieldSetStubDelegate a
getFieldSetStubRaw = makeGetFieldSetStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetFieldSetStub"
type GetFieldSetStubDelegate a = CWString -> CWString -> FunPtr a
foreign import stdcall "dynamic" makeGetFieldSetStubDelegate :: FunPtr (GetFieldSetStubDelegate a) ->
(GetFieldSetStubDelegate a)
-- | @'getDelegateConstructorStub' dt wrapper@ returns an action that, given a
-- function, will return a reference to a .NET delegate object that calls the
-- provided function. The delegate constructed will be of the type @dt@.
-- The function @wrapper@ will be called in order to wrap the given function
-- as a function pointer for passing into .NET.
getDelegateConstructorStub :: String -> (f -> IO (FunPtr f)) -> IO (f -> IO ObjectId)
getDelegateConstructorStub delegateTypeName wrapper = do
-- Obtain a function pointer to a function that, when called with a
-- function pointer compatible with the given wrapper function, returns
-- a reference to a .NET delegate object that calls the function.
delegateConstructor <- withCWString delegateTypeName $
\delegateTypeName' -> getDelegateConstructorStubRaw delegateTypeName'
-- Returns a function that accepts a function, 'f' implementing the
-- delegate, converts 'f' to a function pointer, and then wraps it
-- up as a .NET delegate.
return $ \f -> do
fFunPtr <- wrapper f
(makeDelegateConstructor delegateConstructor) fFunPtr
{-# NOINLINE getDelegateConstructorStubRaw #-}
getDelegateConstructorStubRaw :: GetDelegateConstructorStubDelegate a
getDelegateConstructorStubRaw = makeGetDelegateConstructorStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetDelegateConstructorStub"
type GetDelegateConstructorStubDelegate a = CWString -> IO (FunPtr (FunPtr a -> IO ObjectId))
foreign import stdcall "dynamic" makeGetDelegateConstructorStubDelegate :: FunPtr (GetDelegateConstructorStubDelegate a) ->
(GetDelegateConstructorStubDelegate a)
type DelegateConstructor a = FunPtr a -> IO ObjectId
foreign import stdcall "dynamic" makeDelegateConstructor :: FunPtr (DelegateConstructor a) -> (DelegateConstructor a)
--
-- Boxing support
--
-- | @'getBoxStub' t@ returns a function pointer to a function that, when
-- called, returns a boxed object reference to the given type.
getBoxStub :: String -> IO (FunPtr f)
getBoxStub typeName = do
withCWString typeName $ \typeName' -> return $ getBoxStubRaw typeName'
{-# NOINLINE getBoxStubRaw #-}
getBoxStubRaw :: GetBoxStubDelegate a
getBoxStubRaw = makeGetBoxStubDelegate $ unsafePerformIO $ unsafeGetPointerToMethod "GetBoxStub"
type GetBoxStubDelegate a = CWString -> FunPtr a
foreign import stdcall "dynamic" makeGetBoxStubDelegate :: FunPtr (GetBoxStubDelegate a) -> GetBoxStubDelegate a
boxString :: String -> IO ObjectId
boxString s = withCWString s $ \s' -> boxStringStub s'
type BoxStringStub = CWString -> IO ObjectId
foreign import stdcall "dynamic" makeBoxStringStub :: FunPtr BoxStringStub -> BoxStringStub
{-# NOINLINE boxStringStub #-}
boxStringStub :: BoxStringStub
boxStringStub = makeBoxStringStub $ unsafePerformIO $ getBoxStub "System.String"
boxInt32 :: Int32 -> IO ObjectId
boxInt32 = boxInt32Stub
type BoxInt32Stub = Int32 -> IO ObjectId
foreign import stdcall "dynamic" makeBoxInt32Stub :: FunPtr BoxInt32Stub -> BoxInt32Stub
{-# NOINLINE boxInt32Stub #-}
boxInt32Stub :: BoxInt32Stub
boxInt32Stub = makeBoxInt32Stub $ unsafePerformIO $ getBoxStub "System.Int32"
boxBoolean :: Bool -> ObjectId
boxBoolean True = boxedTrue
boxBoolean False = boxedFalse
{-# NOINLINE boxedTrue #-}
boxedTrue = unsafePerformIO $ boxBooleanStub True
{-# NOINLINE boxedFalse #-}
boxedFalse = unsafePerformIO $ boxBooleanStub False
type BoxBooleanStub = Bool -> IO ObjectId
foreign import stdcall "dynamic" makeBoxBooleanStub :: FunPtr BoxBooleanStub -> BoxBooleanStub
{-# NOINLINE boxBooleanStub #-}
boxBooleanStub :: BoxBooleanStub
boxBooleanStub = makeBoxBooleanStub $ unsafePerformIO $ getBoxStub "System.Boolean"
-- vim:set ts=4 sw=4 expandtab:
|
unfoldr/Salsa
|
Foreign/Salsa/CLR.hs
|
bsd-3-clause
| 11,491
| 0
| 14
| 1,863
| 1,850
| 976
| 874
| 154
| 2
|
{-# LANGUAGE DataKinds #-}
-- | This module provides parts of the JQuery API (<http://api.jquery.com/>).
module Language.Sunroof.JS.JQuery
(
-- * General JQuery API
dollar
, jQuery, jq
-- * DOM
, append
, html, setHtml
, text, setText
-- * CSS
, css, setCss
, addClass, removeClass
-- * Attributes
, attribute, attr'
, setAttr
, removeAttr
-- * Event Handling
, on
-- * Manipulation
, innerWidth
, innerHeight
, outerWidth, outerWidth'
, outerHeight, outerHeight'
, clone, clone'
) where
import Language.Sunroof.Classes
( SunroofArgument(..)
)
import Language.Sunroof.Types
import Language.Sunroof.JS.Object ( JSObject )
import Language.Sunroof.JS.String ( JSString )
import Language.Sunroof.JS.Number ( JSNumber )
import Language.Sunroof.JS.Bool ( JSBool )
-- -----------------------------------------------------------------------
-- JQuery interface
-- -----------------------------------------------------------------------
-- | The dollar function.
-- See <http://api.jquery.com/jQuery/>.
dollar :: JSFunction JSString JSObject
dollar = fun "$"
-- | Calls the JQuery dollar function.
-- See <http://api.jquery.com/jQuery/>.
jQuery :: JSString -> JS t JSObject
jQuery nm = dollar `apply` nm
-- | Short-hand for 'jQuery'.
jq :: JSString -> JS t JSObject
jq = jQuery
-- -----------------------------------------------------------------------
-- Manipulation > DOM
-- -----------------------------------------------------------------------
-- | See <http://api.jquery.com/append/>.
append :: JSObject -> JSObject -> JS t ()
append x = invoke "append" x
-- | See @.html()@ at <http://api.jquery.com/html/>.
html :: JSObject -> JS t JSObject
html = invoke "html" ()
-- | See @.html(htmlString)@ at <http://api.jquery.com/html/>.
setHtml :: JSString -> JSObject -> JS t JSObject
setHtml s = invoke "html" s
-- | See @.text()@ at <http://api.jquery.com/text/>.
text :: JSObject -> JS t JSObject
text = invoke "text" ()
-- | See @.text(textString)@ at <http://api.jquery.com/text/>.
setText :: JSString -> JSObject -> JS t JSObject
setText s = invoke "text" s
-- -------------------------------------------------------------
-- CSS
-- -------------------------------------------------------------
-- | See @.css(propertyName)@ at <http://api.jquery.com/css/>.
css :: JSString -> JSObject -> JS t JSString
css prop = invoke "css" prop
-- | See @.css(propertyName, value)@ at <http://api.jquery.com/css/>.
setCss :: JSString -> JSString -> JSObject -> JS t JSString
setCss prop v = invoke "css" (prop, v)
-- | See <http://api.jquery.com/addClass/>.
addClass :: JSString -> JSObject -> JS t ()
addClass = invoke "addClass"
-- | See <http://api.jquery.com/removeClass/>.
removeClass :: JSString -> JSObject -> JS t ()
removeClass = invoke "removeClass"
-- -------------------------------------------------------------
-- Attributes
-- -------------------------------------------------------------
-- | See @.attr(attributeName)@ at <http://api.jquery.com/attr/>.
-- This binding does not have the original Javascript name,
-- because of the 'attr' function.
attribute :: JSString -> JSObject -> JS t JSString
attribute a = invoke "attr" a
-- | See @.attr(attributeName)@ at <http://api.jquery.com/attr/>.
-- This binding does not have the original Javascript name,
-- because of the 'attr' function.
attr' :: JSString -> JSObject -> JS t JSString
attr' = attribute
-- | See @.attr(attributeName, value)@ at <http://api.jquery.com/attr/>.
setAttr :: JSString -> JSString -> JSObject -> JS t JSString
setAttr a v = invoke "attr" (a, v)
-- | See: <http://api.jquery.com/removeAttr/>
removeAttr :: JSString -> JSObject -> JS t JSObject
removeAttr attrName = invoke "removeAttr" attrName
-- -------------------------------------------------------------
-- Event Handling
-- -------------------------------------------------------------
-- | See <http://api.jquery.com/on/>.
on :: (SunroofArgument a) => JSString -> JSString -> (a -> JS 'B ()) -> JSObject -> JS t ()
on nm sel f o = do
callback <- continuation f
o # invoke "on" (nm,sel,callback)
-- -------------------------------------------------------------
-- Manipulation > Style Properties
-- -------------------------------------------------------------
-- | See <http://api.jquery.com/innerHeight/>.
innerWidth :: JSObject -> JS t JSNumber
innerWidth = invoke "innerWidth" ()
-- | See <http://api.jquery.com/innerWidth/>.
innerHeight :: JSObject -> JS t JSNumber
innerHeight = invoke "innerHeight" ()
-- | See <http://api.jquery.com/outerWidth/>.
outerWidth :: JSObject -> JS t JSNumber
outerWidth = invoke "outerWidth" ()
-- | See <http://api.jquery.com/outerWidth/>.
outerWidth' :: JSBool -> JSObject -> JS t JSNumber
outerWidth' includeMargin = invoke "outerWidth" includeMargin
-- | See <http://api.jquery.com/outerHeight/>.
outerHeight :: JSObject -> JS t JSNumber
outerHeight = invoke "outerHeight" ()
-- | See <http://api.jquery.com/outerHeight/>.
outerHeight' :: JSBool -> JSObject -> JS t JSNumber
outerHeight' includeMargin = invoke "outerHeight" includeMargin
-- | See @.clone()@ at <http://api.jquery.com/clone/>.
clone :: JSObject -> JS t JSObject
clone = invoke "clone" ()
-- | See @.clone(withDataAndEvents, deepWithDataAndEvents)@ at <http://api.jquery.com/clone/>.
clone' :: JSBool -> JSBool -> JSObject -> JS t JSObject
clone' withDataAndEvents deepWithDataAndEvents =
invoke "clone" (withDataAndEvents, deepWithDataAndEvents)
|
ku-fpg/sunroof-compiler
|
Language/Sunroof/JS/JQuery.hs
|
bsd-3-clause
| 5,504
| 0
| 12
| 800
| 1,014
| 561
| 453
| 79
| 1
|
-- | Program to replace everything between brackets by spaces
--
-- This program was originally contributed by Petr Prokhorenkov.
--
module Data.Text.Benchmarks.Micro.StripBrackets
( benchmark
) where
import Criterion.Main (Benchmark, bench)
import qualified Data.Text as T
import Data.Text.Benchmarks.Micro.Util
benchmark :: FilePath -> Benchmark
benchmark = bench "StripBrackets" . withUtf8File stripBrackets
stripBrackets :: T.Text -> T.Text
stripBrackets = snd . T.mapAccumL f (0 :: Int)
where
f depth c =
let depth' = depth + d' c
c' | depth > 0 || depth' > 0 = ' '
| otherwise = c
in (depth', c')
d' '{' = 1
d' '[' = 1
d' '}' = -1
d' ']' = -1
d' _ = 0
|
JensTimmerman/text-benchmarks
|
src/Data/Text/Benchmarks/Micro/StripBrackets.hs
|
bsd-3-clause
| 747
| 0
| 16
| 204
| 218
| 120
| 98
| 19
| 5
|
{-# LANGUAGE CPP, BangPatterns, Rank2Types #-}
-- |
-- Module : FastPut
-- Copyright : (c) 2010 Simon Meier
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Simon Meier <iridcode@gmail.com>
-- Stability : experimental
-- Portability : tested on GHC only
--
-- Implementation of a 'Put' monad with similar performance characteristics
-- like the 'Builder' monoid.
--
module FastPut where
import Foreign
import Data.Monoid
import Control.Monad (unless)
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
#ifdef BYTESTRING_IN_BASE
import Data.ByteString.Base (inlinePerformIO)
import qualified Data.ByteString.Base as S
import qualified Data.ByteString.Lazy.Base as L -- FIXME: is this the right module for access to 'Chunks'?
#else
import Data.ByteString.Internal (inlinePerformIO)
import qualified Data.ByteString.Internal as S
import qualified Data.ByteString.Lazy.Internal as L
#endif
import qualified Blaze.ByteString.Builder.Internal as B
import qualified Blaze.ByteString.Builder.Write as B
import Blaze.ByteString.Builder.Write (Write(..))
import qualified Blaze.ByteString.Builder.Word as B
import Blaze.ByteString.Builder.Word (writeWord8)
import Criterion.Main
------------------------------------------------------------------------------
-- Benchmarks
------------------------------------------------------------------------------
main :: IO ()
main = defaultMain $ concat
[ return $ bench "cost of putBuilder" $ whnf
(L.length . toLazyByteString2 . mapM_ (fromBuilder . fromWord8))
word8s
, benchmark "putBuilder"
(fromBuilder . mconcat . map fromWord8)
(mconcat . map B.fromWord8)
word8s
, benchmark "fromWriteSingleton"
(mapM_ putWord8)
(mconcat . map B.fromWord8)
word8s
, benchmark "fromWrite"
(mapM_ (putWrite . writeWord8))
(mconcat . map (B.fromWrite . writeWord8))
word8s
]
where
benchmark name putF builderF x =
[ bench (name ++ " Put") $
whnf (L.length . toLazyByteString2 . putF) x
, bench (name ++ " Builder") $
whnf (L.length . B.toLazyByteString . builderF) x
]
word8s :: [Word8]
word8s = take 100000 $ cycle [0..]
{-# NOINLINE word8s #-}
------------------------------------------------------------------------------
-- The Put type
------------------------------------------------------------------------------
data BufRange = BufRange {-# UNPACK #-} !(Ptr Word8) {-# UNPACK #-} !(Ptr Word8)
newtype Put a = Put {
unPut :: forall r. (a -> PutStep r) -> PutStep r
}
data PutSignal a =
Done {-# UNPACK #-} !(Ptr Word8) a
| BufferFull
{-# UNPACK #-} !Int
{-# UNPACK #-} !(Ptr Word8)
!(PutStep a)
| InsertByteString
{-# UNPACK #-} !(Ptr Word8)
!S.ByteString
!(PutStep a)
type PutStep a = BufRange -> IO (PutSignal a)
instance Monad Put where
return x = Put $ \k -> k x
{-# INLINE return #-}
m >>= f = Put $ \k -> unPut m (\x -> unPut (f x) k)
{-# INLINE (>>=) #-}
m >> n = Put $ \k -> unPut m (\_ -> unPut n k)
{-# INLINE (>>) #-}
------------------------------------------------------------------------------
-- The Builder type with equal signals as the Put type
------------------------------------------------------------------------------
newtype Builder = Builder (forall r. PutStep r -> PutStep r)
instance Monoid Builder where
mempty = Builder id
{-# INLINE mempty #-}
(Builder b1) `mappend` (Builder b2) = Builder $ b1 . b2
{-# INLINE mappend #-}
mconcat = foldr mappend mempty
{-# INLINE mconcat #-}
fromBuilder :: Builder -> Put ()
fromBuilder (Builder build) = Put $ \k -> build (k ())
toBuilder :: Put () -> Builder
toBuilder (Put put) = Builder $ \k -> put (\_ -> k)
fromWrite :: Write -> Builder
fromWrite (Write size io) =
Builder step
where
step k (BufRange pf pe)
| pf `plusPtr` size <= pe = do
io pf
let !br' = BufRange (pf `plusPtr` size) pe
k br'
| otherwise = return $ BufferFull size pf (step k)
{-# INLINE fromWrite #-}
fromWriteSingleton :: (a -> Write) -> a -> Builder
fromWriteSingleton write =
mkPut
where
mkPut x = Builder step
where
step k (BufRange pf pe)
| pf `plusPtr` size <= pe = do
io pf
let !br' = BufRange (pf `plusPtr` size) pe
k br'
| otherwise = return $ BufferFull size pf (step k)
where
Write size io = write x
{-# INLINE fromWriteSingleton #-}
fromWord8 :: Word8 -> Builder
fromWord8 = fromWriteSingleton writeWord8
------------------------------------------------------------------------------
-- Implementations
------------------------------------------------------------------------------
putWord8 :: Word8 -> Put ()
putWord8 = putWriteSingleton writeWord8
putWrite :: Write -> Put ()
putWrite (Write size io) =
Put step
where
step k (BufRange pf pe)
| pf `plusPtr` size <= pe = do
io pf
let !br' = BufRange (pf `plusPtr` size) pe
k () br'
| otherwise = return $ BufferFull size pf (step k)
{-# INLINE putWrite #-}
putWriteSingleton :: (a -> Write) -> a -> Put ()
putWriteSingleton write =
mkPut
where
mkPut x = Put step
where
step k (BufRange pf pe)
| pf `plusPtr` size <= pe = do
io pf
let !br' = BufRange (pf `plusPtr` size) pe
k () br'
| otherwise = return $ BufferFull size pf (step k)
where
Write size io = write x
{-# INLINE putWriteSingleton #-}
putBuilder :: B.Builder -> Put ()
putBuilder (B.Builder b) =
Put step
where
finalStep _ pf = return $ B.Done pf
step k = go (b finalStep)
where
go buildStep (BufRange pf pe) = do
signal <- buildStep pf pe
case signal of
B.Done pf' -> do
let !br' = BufRange pf' pe
k () br'
B.BufferFull minSize pf' nextBuildStep ->
return $ BufferFull minSize pf' (go nextBuildStep)
B.ModifyChunks _ _ _ ->
error "putBuilder: ModifyChunks not implemented"
{-
m >>= f = GetC $ \done empty pe ->
runGetC m (\pr' x -> runGetC (f x) done empty pe pr')
(\m' -> empty (m' >>= f))
pe
newtype GetC r a = GetC {
runGetC ::
(Ptr Word8 -> a -> IO r) -> -- done
(GetC r a -> IO r ) -> -- empty buffer
Ptr Word8 -> -- end of buffer
Ptr Word8 -> -- next byte to read
IO r
}
instance Functor (GetC r) where
fmap f g = GetC $ \done empty ->
runGetC g (\pr' x -> done pr' (f x))
(\g' -> empty (fmap f g'))
instance Monad (GetC r) where
return x = GetC $ \done _ _ pr -> done pr x
m >>= f = GetC $ \done empty pe ->
runGetC m (\pr' x -> runGetC (f x) done empty pe pr')
(\m' -> empty (m' >>= f))
pe
-}
------------------------------------------------------------------------------
-- Internal global constants.
------------------------------------------------------------------------------
-- | Default size (~32kb) for the buffer that becomes a chunk of the output
-- stream once it is filled.
--
defaultBufferSize :: Int
defaultBufferSize = 32 * 1024 - overhead -- Copied from Data.ByteString.Lazy.
where overhead = 2 * sizeOf (undefined :: Int)
-- | The minimal length (~4kb) a buffer must have before filling it and
-- outputting it as a chunk of the output stream.
--
-- This size determines when a buffer is spilled after a 'flush' or a direct
-- bytestring insertion. It is also the size of the first chunk generated by
-- 'toLazyByteString'.
defaultMinimalBufferSize :: Int
defaultMinimalBufferSize = 4 * 1024 - overhead
where overhead = 2 * sizeOf (undefined :: Int)
-- | The default length (64) for the first buffer to be allocated when
-- converting a 'Builder' to a lazy bytestring.
--
-- See 'toLazyByteStringWith' for further explanation.
defaultFirstBufferSize :: Int
defaultFirstBufferSize = 64
-- | The maximal number of bytes for that copying is cheaper than direct
-- insertion into the output stream. This takes into account the fragmentation
-- that may occur in the output buffer due to the early 'flush' implied by the
-- direct bytestring insertion.
--
-- @'defaultMaximalCopySize' = 2 * 'defaultMinimalBufferSize'@
--
defaultMaximalCopySize :: Int
defaultMaximalCopySize = 2 * defaultMinimalBufferSize
------------------------------------------------------------------------------
-- Flushing and running a Builder
------------------------------------------------------------------------------
-- | Output all data written in the current buffer and start a new chunk.
--
-- The use uf this function depends on how the resulting bytestrings are
-- consumed. 'flush' is possibly not very useful in non-interactive scenarios.
-- However, it is kept for compatibility with the builder provided by
-- Data.Binary.Builder.
--
-- When using 'toLazyByteString' to extract a lazy 'L.ByteString' from a
-- 'Builder', this means that a new chunk will be started in the resulting lazy
-- 'L.ByteString'. The remaining part of the buffer is spilled, if the
-- reamining free space is smaller than the minimal desired buffer size.
--
{-
flush :: Builder
flush = Builder $ \k pf _ -> return $ ModifyChunks pf id k
-}
-- | Run a 'Builder' with the given buffer sizes.
--
-- Use this function for integrating the 'Builder' type with other libraries
-- that generate lazy bytestrings.
--
-- Note that the builders should guarantee that on average the desired chunk
-- size is attained. Builders may decide to start a new buffer and not
-- completely fill the existing buffer, if this is faster. However, they should
-- not spill too much of the buffer, if they cannot compensate for it.
--
-- A call @toLazyByteStringWith bufSize minBufSize firstBufSize@ will generate
-- a lazy bytestring according to the following strategy. First, we allocate
-- a buffer of size @firstBufSize@ and start filling it. If it overflows, we
-- allocate a buffer of size @minBufSize@ and copy the first buffer to it in
-- order to avoid generating a too small chunk. Finally, every next buffer will
-- be of size @bufSize@. This, slow startup strategy is required to achieve
-- good speed for short (<200 bytes) resulting bytestrings, as for them the
-- allocation cost is of a large buffer cannot be compensated. Moreover, this
-- strategy also allows us to avoid spilling too much memory for short
-- resulting bytestrings.
--
-- Note that setting @firstBufSize >= minBufSize@ implies that the first buffer
-- is no longer copied but allocated and filled directly. Hence, setting
-- @firstBufSize = bufSize@ means that all chunks will use an underlying buffer
-- of size @bufSize@. This is recommended, if you know that you always output
-- more than @minBufSize@ bytes.
toLazyByteStringWith
:: Int -- ^ Buffer size (upper-bounds the resulting chunk size).
-> Int -- ^ Minimal free buffer space for continuing filling
-- the same buffer after a 'flush' or a direct bytestring
-- insertion. This corresponds to the minimal desired
-- chunk size.
-> Int -- ^ Size of the first buffer to be used and copied for
-- larger resulting sequences
-> Put a -- ^ Builder to run.
-> L.ByteString -- ^ Lazy bytestring to output after the builder is
-- finished.
-> L.ByteString -- ^ Resulting lazy bytestring
toLazyByteStringWith bufSize minBufSize firstBufSize (Put b) k =
inlinePerformIO $ fillFirstBuffer (b finalStep)
where
finalStep _ (BufRange pf _) = return $ Done pf undefined
-- fill a first very small buffer, if we need more space then copy it
-- to the new buffer of size 'minBufSize'. This way we don't pay the
-- allocation cost of the big 'bufSize' buffer, when outputting only
-- small sequences.
fillFirstBuffer !step0
| minBufSize <= firstBufSize = fillNewBuffer firstBufSize step0
| otherwise = do
fpbuf <- S.mallocByteString firstBufSize
withForeignPtr fpbuf $ \pf -> do
let !br = BufRange pf (pf `plusPtr` firstBufSize)
mkbs pf' = S.PS fpbuf 0 (pf' `minusPtr` pf)
{-# INLINE mkbs #-}
next <- step0 br
case next of
Done pf' _
| pf' == pf -> return k
| otherwise -> return $ L.Chunk (mkbs pf') k
BufferFull newSize pf' nextStep -> do
let !l = pf' `minusPtr` pf
fillNewBuffer (max (l + newSize) minBufSize) $
\(BufRange pfNew peNew) -> do
copyBytes pfNew pf l
let !brNew = BufRange (pfNew `plusPtr` l) peNew
nextStep brNew
InsertByteString _ _ _ -> error "not yet implemented"
{-
ModifyChunks pf' bsk nextStep(
| pf' == pf ->
return $ bsk (inlinePerformIO $ fillNewBuffer bufSize nextStep)
| otherwise ->
return $ L.Chunk (mkbs pf')
(bsk (inlinePerformIO $ fillNewBuffer bufSize nextStep))
-}
-- allocate and fill a new buffer
fillNewBuffer !size !step0 = do
fpbuf <- S.mallocByteString size
withForeignPtr fpbuf $ fillBuffer fpbuf
where
fillBuffer fpbuf !pbuf = fill pbuf step0
where
!pe = pbuf `plusPtr` size
fill !pf !step = do
let !br = BufRange pf pe
next <- step br
let mkbs pf' = S.PS fpbuf (pf `minusPtr` pbuf) (pf' `minusPtr` pf)
{-# INLINE mkbs #-}
case next of
Done pf' _
| pf' == pf -> return k
| otherwise -> return $ L.Chunk (mkbs pf') k
BufferFull newSize pf' nextStep ->
return $ L.Chunk (mkbs pf')
(inlinePerformIO $
fillNewBuffer (max newSize bufSize) nextStep)
InsertByteString _ _ _ -> error "not yet implemented2"
{-
ModifyChunks pf' bsk nextStep
| pf' == pf ->
return $ bsk (inlinePerformIO $ fill pf' nextStep)
| minBufSize < pe `minusPtr` pf' ->
return $ L.Chunk (mkbs pf')
(bsk (inlinePerformIO $ fill pf' nextStep))
| otherwise ->
return $ L.Chunk (mkbs pf')
(bsk (inlinePerformIO $ fillNewBuffer bufSize nextStep))
-}
-- | Extract the lazy 'L.ByteString' from the builder by running it with default
-- buffer sizes. Use this function, if you do not have any special
-- considerations with respect to buffer sizes.
--
-- @ 'toLazyByteString' b = 'toLazyByteStringWith' 'defaultBufferSize' 'defaultMinimalBufferSize' 'defaultFirstBufferSize' b L.empty@
--
-- Note that @'toLazyByteString'@ is a 'Monoid' homomorphism.
--
-- > toLazyByteString mempty == mempty
-- > toLazyByteString (x `mappend` y) == toLazyByteString x `mappend` toLazyByteString y
--
-- However, in the second equation, the left-hand-side is generally faster to
-- execute.
--
toLazyByteString :: Put a -> L.ByteString
toLazyByteString b = toLazyByteStringWith
defaultBufferSize defaultMinimalBufferSize defaultFirstBufferSize b L.empty
{-# INLINE toLazyByteString #-}
------------------------------------------------------------------------------
-- Builder Enumeration
------------------------------------------------------------------------------
data BuildStream a =
BuildChunk S.ByteString (IO (BuildStream a))
| BuildYield
a
(forall b. Bool ->
Either (Maybe S.ByteString) (Put b -> IO (BuildStream b)))
enumPut :: Int -> Put a -> IO (BuildStream a)
enumPut bufSize (Put put0) =
fillBuffer bufSize (put0 finalStep)
where
finalStep :: forall b. b -> PutStep b
finalStep x (BufRange op _) = return $ Done op x
fillBuffer :: forall b. Int -> PutStep b -> IO (BuildStream b)
fillBuffer size step = do
fpbuf <- S.mallocByteString bufSize
let !pbuf = unsafeForeignPtrToPtr fpbuf
-- safe due to later reference of fpbuf
-- BETTER than withForeignPtr, as we lose a tail call otherwise
!br = BufRange pbuf (pbuf `plusPtr` size)
fillStep fpbuf br step
fillPut :: ForeignPtr Word8 -> BufRange ->
Bool -> Either (Maybe S.ByteString) (Put b -> IO (BuildStream b))
fillPut !fpbuf !(BufRange op _) False
| pbuf == op = Left Nothing
| otherwise = Left $ Just $
S.PS fpbuf 0 (op `minusPtr` pbuf)
where
pbuf = unsafeForeignPtrToPtr fpbuf
{-# INLINE pbuf #-}
fillPut !fpbuf !br True =
Right $ \(Put put) -> fillStep fpbuf br (put finalStep)
fillStep :: forall b. ForeignPtr Word8 -> BufRange -> PutStep b -> IO (BuildStream b)
fillStep !fpbuf !br@(BufRange _ ope) step = do
let pbuf = unsafeForeignPtrToPtr fpbuf
{-# INLINE pbuf #-}
signal <- step br
case signal of
Done op' x -> do -- builder completed, buffer partially filled
let !br' = BufRange op' ope
return $ BuildYield x (fillPut fpbuf br')
BufferFull minSize op' nextStep
| pbuf == op' -> do -- nothing written, larger buffer required
fillBuffer (max bufSize minSize) nextStep
| otherwise -> do -- some bytes written, new buffer required
return $ BuildChunk
(S.PS fpbuf 0 (op' `minusPtr` pbuf))
(fillBuffer (max bufSize minSize) nextStep)
InsertByteString op' bs nextStep
| S.null bs -> do -- empty bytestrings are ignored
let !br' = BufRange op' ope
fillStep fpbuf br' nextStep
| pbuf == op' -> do -- no bytes written: just insert bytestring
return $ BuildChunk bs (fillBuffer bufSize nextStep)
| otherwise -> do -- bytes written, insert buffer and bytestring
return $ BuildChunk (S.PS fpbuf 0 (op' `minusPtr` pbuf))
(return $ BuildChunk bs (fillBuffer bufSize nextStep))
toLazyByteString' :: Put () -> L.ByteString
toLazyByteString' put =
inlinePerformIO (consume `fmap` enumPut defaultBufferSize put)
where
consume :: BuildStream () -> L.ByteString
consume (BuildYield _ f) =
case f False of
Left Nothing -> L.Empty
Left (Just bs) -> L.Chunk bs L.Empty
Right _ -> error "toLazyByteString': enumPut violated postcondition"
consume (BuildChunk bs ioStream) =
L.Chunk bs $ inlinePerformIO (consume `fmap` ioStream)
{-
BufferFull minSize pf' nextStep -> do
io $ S.PS fpbuf 0 (pf' `minusPtr` pf)
fillBuffer (max bufSize minSize) nextStep
ModifyChunks pf' bsk nextStep -> do
io $ S.PS fpbuf 0 (pf' `minusPtr` pf)
L.foldrChunks (\bs -> (io bs >>)) (return ()) (bsk L.empty)
fillBuffer bufSize nextStep
-}
------------------------------------------------------------------------------
-- More explicit implementation of running builders
------------------------------------------------------------------------------
data Buffer = Buffer {-# UNPACK #-} !(ForeignPtr Word8) -- underlying pinned array
{-# UNPACK #-} !(Ptr Word8) -- beginning of slice
{-# UNPACK #-} !(Ptr Word8) -- next free byte
{-# UNPACK #-} !(Ptr Word8) -- first byte after buffer
allocBuffer :: Int -> IO Buffer
allocBuffer size = do
fpbuf <- S.mallocByteString size
let !pbuf = unsafeForeignPtrToPtr fpbuf
return $! Buffer fpbuf pbuf pbuf (pbuf `plusPtr` size)
unsafeFreezeBuffer :: Buffer -> S.ByteString
unsafeFreezeBuffer (Buffer fpbuf p0 op _) =
S.PS fpbuf 0 (op `minusPtr` p0)
unsafeFreezeNonEmptyBuffer :: Buffer -> Maybe S.ByteString
unsafeFreezeNonEmptyBuffer (Buffer fpbuf p0 op _)
| p0 == op = Nothing
| otherwise = Just $ S.PS fpbuf 0 (op `minusPtr` p0)
nextSlice :: Int -> Buffer -> Maybe Buffer
nextSlice minSize (Buffer fpbuf _ op ope)
| ope `minusPtr` op <= minSize = Nothing
| otherwise = Just (Buffer fpbuf op op ope)
runPut :: Monad m
=> (IO (PutSignal a) -> m (PutSignal a)) -- lifting of buildsteps
-> (Int -> Buffer -> m Buffer) -- output function for a guaranteedly non-empty buffer, the returned buffer will be filled next
-> (S.ByteString -> m ()) -- output function for guaranteedly non-empty bytestrings, that are inserted directly into the stream
-> Put a -- put to execute
-> Buffer -- initial buffer to be used
-> m (a, Buffer) -- result of put and remaining buffer
runPut liftIO outputBuf outputBS (Put put) =
runStep (put finalStep)
where
finalStep x !(BufRange op _) = return $ Done op x
runStep step buf@(Buffer fpbuf p0 op ope) = do
let !br = BufRange op ope
signal <- liftIO $ step br
case signal of
Done op' x -> -- put completed, buffer partially runSteped
return (x, Buffer fpbuf p0 op' ope)
BufferFull minSize op' nextStep -> do
buf' <- outputBuf minSize (Buffer fpbuf p0 op' ope)
runStep nextStep buf'
InsertByteString op' bs nextStep
| S.null bs -> -- flushing of buffer required
outputBuf 1 (Buffer fpbuf p0 op' ope) >>= runStep nextStep
| p0 == op' -> do -- no bytes written: just insert bytestring
outputBS bs
runStep nextStep buf
| otherwise -> do -- bytes written, insert buffer and bytestring
buf' <- outputBuf 1 (Buffer fpbuf p0 op' ope)
outputBS bs
runStep nextStep buf'
{-# INLINE runPut #-}
-- | A monad for lazily composing lazy bytestrings using continuations.
newtype LBSM a = LBSM { unLBSM :: (a, L.ByteString -> L.ByteString) }
instance Monad LBSM where
return x = LBSM (x, id)
(LBSM (x,k)) >>= f = let LBSM (x',k') = f x in LBSM (x', k . k')
(LBSM (_,k)) >> (LBSM (x',k')) = LBSM (x', k . k')
-- | Execute a put and return the written buffers as the chunks of a lazy
-- bytestring.
toLazyByteString2 :: Put a -> L.ByteString
toLazyByteString2 put =
k (bufToLBSCont (snd result) L.empty)
where
-- initial buffer
buf0 = inlinePerformIO $ allocBuffer defaultBufferSize
-- run put, but don't force result => we're lazy enough
LBSM (result, k) = runPut liftIO outputBuf outputBS put buf0
-- convert a buffer to a lazy bytestring continuation
bufToLBSCont = maybe id L.Chunk . unsafeFreezeNonEmptyBuffer
-- lifting an io putsignal to a lazy bytestring monad
liftIO io = LBSM (inlinePerformIO io, id)
-- add buffer as a chunk prepare allocation of new one
outputBuf minSize buf = LBSM
( inlinePerformIO $ allocBuffer (max minSize defaultBufferSize)
, bufToLBSCont buf )
-- add bytestring directly as a chunk; exploits postcondition of runPut
-- that bytestrings are non-empty
outputBS bs = LBSM ((), L.Chunk bs)
-- | A Builder that traces a message
traceBuilder :: String -> Builder
traceBuilder msg = Builder $ \k br@(BufRange op ope) -> do
putStrLn $ "traceBuilder " ++ show (op, ope) ++ ": " ++ msg
k br
flushBuilder :: Builder
flushBuilder = Builder $ \k (BufRange op _) -> do
return $ InsertByteString op S.empty k
test2 :: Word8 -> [S.ByteString]
test2 x = L.toChunks $ toLazyByteString2 $ fromBuilder $ mconcat
[ traceBuilder "before flush"
, fromWord8 48
, flushBuilder
, flushBuilder
, traceBuilder "after flush"
, fromWord8 x
]
|
meiersi/blaze-builder
|
benchmarks/FastPut.hs
|
bsd-3-clause
| 25,067
| 0
| 28
| 7,520
| 4,932
| 2,518
| 2,414
| 351
| 5
|
-- | Utilities related to Monad and Applicative classes
-- Mostly for backwards compatibility.
module MonadUtils
( Applicative(..)
, (<$>)
, MonadFix(..)
, MonadIO(..)
, zipWith3M, zipWith3M_, zipWith4M, zipWithAndUnzipM
, mapAndUnzipM, mapAndUnzip3M, mapAndUnzip4M, mapAndUnzip5M
, mapAccumLM
, mapSndM
, concatMapM
, mapMaybeM
, fmapMaybeM, fmapEitherM
, anyM, allM, orM
, foldlM, foldlM_, foldrM
, maybeMapM
, whenM, unlessM
, filterOutM
) where
-------------------------------------------------------------------------------
-- Imports
-------------------------------------------------------------------------------
import GhcPrelude
import Control.Applicative
import Control.Monad
import Control.Monad.Fix
import Control.Monad.IO.Class
import Data.Foldable (sequenceA_, foldlM, foldrM)
import Data.List (unzip4, unzip5, zipWith4)
-------------------------------------------------------------------------------
-- Common functions
-- These are used throughout the compiler
-------------------------------------------------------------------------------
{-
Note [Inline @zipWithNM@ functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The inline principle for 'zipWith3M', 'zipWith4M' and 'zipWith3M_' is the same
as for 'zipWithM' and 'zipWithM_' in "Control.Monad", see
Note [Fusion for zipN/zipWithN] in GHC/List.hs for more details.
The 'zipWithM'/'zipWithM_' functions are inlined so that the `zipWith` and
`sequenceA` functions with which they are defined have an opportunity to fuse.
Furthermore, 'zipWith3M'/'zipWith4M' and 'zipWith3M_' have been explicitly
rewritten in a non-recursive way similarly to 'zipWithM'/'zipWithM_', and for
more than just uniformity: after [D5241](https://phabricator.haskell.org/D5241)
for issue #14037, all @zipN@/@zipWithN@ functions fuse, meaning
'zipWith3M'/'zipWIth4M' and 'zipWith3M_'@ now behave like 'zipWithM' and
'zipWithM_', respectively, with regards to fusion.
As such, since there are not any differences between 2-ary 'zipWithM'/
'zipWithM_' and their n-ary counterparts below aside from the number of
arguments, the `INLINE` pragma should be replicated in the @zipWithNM@
functions below as well.
-}
zipWith3M :: Monad m => (a -> b -> c -> m d) -> [a] -> [b] -> [c] -> m [d]
{-# INLINE zipWith3M #-}
-- Inline so that fusion with 'zipWith3' and 'sequenceA' has a chance to fire.
-- See Note [Inline @zipWithNM@ functions] above.
zipWith3M f xs ys zs = sequenceA (zipWith3 f xs ys zs)
zipWith3M_ :: Monad m => (a -> b -> c -> m d) -> [a] -> [b] -> [c] -> m ()
{-# INLINE zipWith3M_ #-}
-- Inline so that fusion with 'zipWith4' and 'sequenceA' has a chance to fire.
-- See Note [Inline @zipWithNM@ functions] above.
zipWith3M_ f xs ys zs = sequenceA_ (zipWith3 f xs ys zs)
zipWith4M :: Monad m => (a -> b -> c -> d -> m e)
-> [a] -> [b] -> [c] -> [d] -> m [e]
{-# INLINE zipWith4M #-}
-- Inline so that fusion with 'zipWith5' and 'sequenceA' has a chance to fire.
-- See Note [Inline @zipWithNM@ functions] above.
zipWith4M f xs ys ws zs = sequenceA (zipWith4 f xs ys ws zs)
zipWithAndUnzipM :: Monad m
=> (a -> b -> m (c, d)) -> [a] -> [b] -> m ([c], [d])
{-# INLINABLE zipWithAndUnzipM #-}
-- See Note [flatten_many performance] in TcFlatten for why this
-- pragma is essential.
zipWithAndUnzipM f (x:xs) (y:ys)
= do { (c, d) <- f x y
; (cs, ds) <- zipWithAndUnzipM f xs ys
; return (c:cs, d:ds) }
zipWithAndUnzipM _ _ _ = return ([], [])
{-
Note [Inline @mapAndUnzipNM@ functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The inline principle is the same as 'mapAndUnzipM' in "Control.Monad".
The 'mapAndUnzipM' function is inlined so that the `unzip` and `traverse`
functions with which it is defined have an opportunity to fuse, see
Note [Inline @unzipN@ functions] in Data/OldList.hs for more details.
Furthermore, the @mapAndUnzipNM@ functions have been explicitly rewritten in a
non-recursive way similarly to 'mapAndUnzipM', and for more than just
uniformity: after [D5249](https://phabricator.haskell.org/D5249) for Trac
ticket #14037, all @unzipN@ functions fuse, meaning 'mapAndUnzip3M',
'mapAndUnzip4M' and 'mapAndUnzip5M' now behave like 'mapAndUnzipM' with regards
to fusion.
As such, since there are not any differences between 2-ary 'mapAndUnzipM' and
its n-ary counterparts below aside from the number of arguments, the `INLINE`
pragma should be replicated in the @mapAndUnzipNM@ functions below as well.
-}
-- | mapAndUnzipM for triples
mapAndUnzip3M :: Monad m => (a -> m (b,c,d)) -> [a] -> m ([b],[c],[d])
{-# INLINE mapAndUnzip3M #-}
-- Inline so that fusion with 'unzip3' and 'traverse' has a chance to fire.
-- See Note [Inline @mapAndUnzipNM@ functions] above.
mapAndUnzip3M f xs = unzip3 <$> traverse f xs
mapAndUnzip4M :: Monad m => (a -> m (b,c,d,e)) -> [a] -> m ([b],[c],[d],[e])
{-# INLINE mapAndUnzip4M #-}
-- Inline so that fusion with 'unzip4' and 'traverse' has a chance to fire.
-- See Note [Inline @mapAndUnzipNM@ functions] above.
mapAndUnzip4M f xs = unzip4 <$> traverse f xs
mapAndUnzip5M :: Monad m => (a -> m (b,c,d,e,f)) -> [a] -> m ([b],[c],[d],[e],[f])
{-# INLINE mapAndUnzip5M #-}
-- Inline so that fusion with 'unzip5' and 'traverse' has a chance to fire.
-- See Note [Inline @mapAndUnzipNM@ functions] above.
mapAndUnzip5M f xs = unzip5 <$> traverse f xs
-- | Monadic version of mapAccumL
mapAccumLM :: Monad m
=> (acc -> x -> m (acc, y)) -- ^ combining function
-> acc -- ^ initial state
-> [x] -- ^ inputs
-> m (acc, [y]) -- ^ final state, outputs
mapAccumLM _ s [] = return (s, [])
mapAccumLM f s (x:xs) = do
(s1, x') <- f s x
(s2, xs') <- mapAccumLM f s1 xs
return (s2, x' : xs')
-- | Monadic version of mapSnd
mapSndM :: Monad m => (b -> m c) -> [(a,b)] -> m [(a,c)]
mapSndM _ [] = return []
mapSndM f ((a,b):xs) = do { c <- f b; rs <- mapSndM f xs; return ((a,c):rs) }
-- | Monadic version of concatMap
concatMapM :: Monad m => (a -> m [b]) -> [a] -> m [b]
concatMapM f xs = liftM concat (mapM f xs)
-- | Applicative version of mapMaybe
mapMaybeM :: Applicative m => (a -> m (Maybe b)) -> [a] -> m [b]
mapMaybeM f = foldr g (pure [])
where g a = liftA2 (maybe id (:)) (f a)
-- | Monadic version of fmap
fmapMaybeM :: (Monad m) => (a -> m b) -> Maybe a -> m (Maybe b)
fmapMaybeM _ Nothing = return Nothing
fmapMaybeM f (Just x) = f x >>= (return . Just)
-- | Monadic version of fmap
fmapEitherM :: Monad m => (a -> m b) -> (c -> m d) -> Either a c -> m (Either b d)
fmapEitherM fl _ (Left a) = fl a >>= (return . Left)
fmapEitherM _ fr (Right b) = fr b >>= (return . Right)
-- | Monadic version of 'any', aborts the computation at the first @True@ value
anyM :: Monad m => (a -> m Bool) -> [a] -> m Bool
anyM _ [] = return False
anyM f (x:xs) = do b <- f x
if b then return True
else anyM f xs
-- | Monad version of 'all', aborts the computation at the first @False@ value
allM :: Monad m => (a -> m Bool) -> [a] -> m Bool
allM _ [] = return True
allM f (b:bs) = (f b) >>= (\bv -> if bv then allM f bs else return False)
-- | Monadic version of or
orM :: Monad m => m Bool -> m Bool -> m Bool
orM m1 m2 = m1 >>= \x -> if x then return True else m2
-- | Monadic version of foldl that discards its result
foldlM_ :: (Monad m, Foldable t) => (a -> b -> m a) -> a -> t b -> m ()
foldlM_ = foldM_
-- | Monadic version of fmap specialised for Maybe
maybeMapM :: Monad m => (a -> m b) -> (Maybe a -> m (Maybe b))
maybeMapM _ Nothing = return Nothing
maybeMapM m (Just x) = liftM Just $ m x
-- | Monadic version of @when@, taking the condition in the monad
whenM :: Monad m => m Bool -> m () -> m ()
whenM mb thing = do { b <- mb
; when b thing }
-- | Monadic version of @unless@, taking the condition in the monad
unlessM :: Monad m => m Bool -> m () -> m ()
unlessM condM acc = do { cond <- condM
; unless cond acc }
-- | Like 'filterM', only it reverses the sense of the test.
filterOutM :: (Applicative m) => (a -> m Bool) -> [a] -> m [a]
filterOutM p =
foldr (\ x -> liftA2 (\ flg -> if flg then id else (x:)) (p x)) (pure [])
|
sdiehl/ghc
|
compiler/utils/MonadUtils.hs
|
bsd-3-clause
| 8,383
| 0
| 12
| 1,842
| 2,238
| 1,204
| 1,034
| 98
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module Network.Panpipes.HTTP.RequestParser (
method
, Method(..)
, version
, Version(..)
, headers
, request
) where
import Control.Applicative
import Data.Attoparsec
import Data.Attoparsec.Char8 hiding (satisfy, takeTill)
import qualified Data.ByteString as ByteString (unpack, pack)
import Data.ByteString hiding (notElem, map, concat)
import Data.Char (ord, chr, digitToInt)
import Data.Word (Word8(..))
import qualified Network.Panpipes.HTTP.Types as Types (PartialRequest(..))
import Network.Panpipes.HTTP.Types hiding (PartialRequest(..))
request :: Parser Types.PartialRequest
request = Types.PartialRequest
<$> method <* space
<*> uri <* space
<*> version <* crlf
<*> headers
method :: Parser Method
method = Option <$ string "OPTION"
<|> Get <$ string "GET"
<|> Head <$ string "HEAD"
<|> Delete <$ string "DELETE"
<|> Trace <$ string "TRACE"
<|> Connect <$ string "CONNECT"
<|> word8' 'P' *> (Post <$ string "OST" <|> Put <$ string "UT")
uri :: Parser ByteString
uri = takeTill (== 32) -- 32 == ' '
version :: Parser Version
version = do
string "HTTP/"
Version <$> major <* word8' '.' <*> minor
where
major = digitToInt . chr . fromIntegral <$> satisfy isDigit_w8
minor = major
headers :: Parser [(ByteString, ByteString)]
headers = many $ header <* crlf
where
header = (,) <$> fieldName <*> (word8' ':' *> many space *> fieldValue)
fieldName = ByteString.pack <$> many1 token
fieldValue = ByteString.pack . concat <$> many (many1 (noneOf "\r\n") <|> lws)
lws = [32] <$ (crlf *> many1 (word8' ' ' <|> word8' '\t')) -- 32 == ' '
token :: Parser Word8
token = satisfy isToken
where
isToken w = w `notElem` [0..31] && w /= 127 && w `notElem` separators
separators = map (fromIntegral . ord) "()<>@,;:\\\"/[]?={} \t"
crlf = string "\r\n"
noneOf :: ByteString -> Parser Word8
noneOf bs = satisfy (`notElem` ByteString.unpack bs)
word8' :: Char -> Parser Word8
word8' = word8 . fromIntegral . ord
|
quantumman/Panpipes
|
Network/Panpipes/HTTP/RequestParser.hs
|
bsd-3-clause
| 2,126
| 0
| 18
| 488
| 686
| 377
| 309
| 54
| 1
|
{-# LANGUAGE Arrows #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Heed.Query
( addSubscription
, allFeedInfo
, allFeeds
, allItemsRead
, feedUpdateInterval
, getRecentItems
, getSubs
, getUserDb
, getUserFeeds
, getUserItems
, getUserUnreadFeedInfo
, insertFeed
, insertItems
, insertUnread
, insertUserPrefName
, printSql
, readFeed
, runFeedInfoQuery
, saveTokenDb
, showSql
, setFeedLastUpdated
, thisFeed
, updateFeedInterval
, updateUserPrefName
, userFeedName
, verifyToken
) where
import Control.Arrow (returnA)
import Control.Lens hiding (from, un)
import Data.Int (Int64)
import Data.Maybe (fromMaybe)
import Data.Profunctor.Product (p2)
import Data.Profunctor.Product.Default (Default)
import qualified Data.Text as T
import Data.Time (UTCTime)
import Heed.Commands
(FeFeedInfo, FeFeedInfo'(..), FeFeedInfoR, FeItemInfo,
FeItemInfo'(..), FeItemInfoR, feedListId, feedListName)
import Heed.Database
import Heed.DbEnums (ItemsDate(..))
import qualified Opaleye as O
import qualified Opaleye.FunctionalJoin as JOIN
import qualified Opaleye.Trans as OT
showSql
:: Default O.Unpackspec a a
=> O.Query a -> String
showSql = fromMaybe "Empty query" . O.showSqlForPostgres
-- | Print a sql query, for debugging only
printSql
:: Default O.Unpackspec a a
=> O.Query a -> IO ()
printSql = putStrLn . showSql
-- | Query used in 'getRecentItems'
getItemsFromQ :: FeedInfoId Int -> UTCTime -> O.Query FeedItemR
getItemsFromQ feedId from =
proc () ->
do items <- O.queryTable feedItemTable -< ()
O.restrict -<
_feedItemFeedId items O..=== O.constant feedId O..&&
_feedItemDate items
O..>= O.pgUTCTime from
returnA -< items
-- | Query used in 'getRecentItems' when items don't have dates
-- We sort by descending time inserted and only return the last x rows
getLastItemsQ :: FeedInfoId Int -> Int -> O.Query FeedItemR
getLastItemsQ feedId number =
O.limit number . O.orderBy (O.descNullsLast _feedItemDate) $
proc () ->
do items <- O.queryTable feedItemTable -< ()
O.restrict -< _feedItemFeedId items O..=== O.constant feedId
returnA -< items
-- | Get items after a certain 'UTCTime'
getRecentItems
:: FeedInfoHR -- ^ Feed information, we need to know if the feed has dates and if not
-- how many items to fetch from the db
-> UTCTime -- ^ Start time if the feed has dates
-> OT.Transaction [FeedItemHR]
getRecentItems feed time =
case _feedHasItemDate feed of
Present -> OT.query $ getItemsFromQ (_feedInfoId feed) time
Missing -> reverse <$> OT.query (getLastItemsQ (_feedInfoId feed) (rounded (_feedNumberItems feed)))
where
-- The number of items isn't always correct, better round up
rounded :: Int -> Int
rounded x = round $ (fromIntegral x :: Double) * 1.3
-- | Insert new feed
insertFeed
:: FeedInfoHW -- ^ Feed information
-> OT.Transaction [FeedInfoHR]
insertFeed newFeed = OT.insertManyReturning feedInfoTable [O.constant newFeed] id
-- | Insert new items
insertItems
:: [FeedItemHW] -- ^ List of items
-> FeedInfoId Int -- ^ Feed id
-> OT.Transaction [FeedItemHR]
insertItems newItems feedId =
OT.insertManyReturning
feedItemTable
(O.constant <$> (newItems & traverse . feedItemFeedId .~ feedId))
id
-- | Update Feed last updated field, so we can schedule updates
setFeedLastUpdated
:: FeedInfoId Int -- ^ Feed id
-> UTCTime -- ^ Updated 'UTCTime'
-> OT.Transaction Int64 -- ^ Number of updated feeds, should always be 1
setFeedLastUpdated feedId time =
OT.update feedInfoTable (setTime time) (\row -> _feedInfoId row O..=== O.constant feedId)
thisFeed :: FeedInfoHW -> O.Query FeedInfoR
thisFeed fresh =
proc () ->
do feeds <- O.queryTable feedInfoTable -< ()
O.restrict -<
_feedInfoUrl feeds O..== O.constant (_feedInfoUrl fresh)
returnA -< feeds
runFeedInfoQuery :: O.Query FeedInfoR -> OT.Transaction [FeedInfoHR]
runFeedInfoQuery = OT.query
getUser :: T.Text -> O.Query UserR
getUser un =
proc () ->
do user <- O.queryTable userTable -< ()
O.restrict -< _userName user O..=== O.constant un
returnA -< user
getUserDb :: T.Text -> OT.Transaction (Maybe UserH)
getUserDb un = OT.queryFirst (getUser un)
--saveTokenDb :: T.Text -> UserId Int -> OT.Transaction Int64
saveTokenDb :: T.Text -> UserId Int -> OT.Transaction Int64
--saveTokenDb token uid = OT.update authTokenTable (setToken token) (filterUser uid)
saveTokenDb token uid =
OT.update authTokenTable (\x -> x & authTokenToken .~ O.pgStrictText token) (filterUser uid)
filterUser :: UserId Int -> AuthTokenR -> O.Column O.PGBool
filterUser userid auth = _authTokenHeedUserId auth O..=== O.constant userid
verifyToken :: T.Text -> OT.Transaction (Maybe UserH)
verifyToken token =
if token == "invalid"
then return Nothing
else OT.queryFirst (tokenToUser token)
tokenToUser :: T.Text -> O.Query UserR
tokenToUser token =
proc () ->
do users <- O.queryTable userTable -< ()
tokens <- O.queryTable authTokenTable -< ()
O.restrict -<
_userId users O..=== _authTokenHeedUserId tokens O..&&
(_authTokenToken tokens O..=== O.constant token)
returnA -< users
getUserFeedsQ :: UserId Int -> O.Query FeedInfoR
getUserFeedsQ userid =
proc () ->
do sub <- O.queryTable subscriptionTable -< ()
feedWithCustomName <- myLeftJoin -< ()
O.restrict -< _subscriptionUserId sub O..=== O.constant userid
O.restrict -<
_feedInfoId feedWithCustomName O..=== _subscriptionFeedId sub
returnA -< feedWithCustomName
where
myLeftJoin = JOIN.leftJoinF unite id joinOn (O.queryTable feedInfoTable) (getAllUserPref userid)
unite info pref = info & feedInfoName .~ (pref ^. prefName)
joinOn info pref = (info ^. feedInfoId) O..=== (pref ^. prefFeedId)
type FeedInfoIdGrouped = O.Column O.PGInt4
type Count = O.Column O.PGInt8
getUserUnreadItems :: UserId Int -> O.Query (FeedInfoIdGrouped, Count)
getUserUnreadItems userid =
O.aggregate (p2 (O.groupBy, O.count)) $
proc () ->
do unread <- O.queryTable unreadItemTable -< ()
item <- O.queryTable feedItemTable -< ()
O.restrict -< _unreadUserId unread O..=== O.constant userid
O.restrict -< _feedItemId item O..=== _unreadFeedItemId unread
returnA -< (item ^. feedItemFeedId . getFeedInfoId, O.pgInt8 1)
-- Sort after in haskell land since we don't want to use sql's sorting function
getAllUserFeedInfo :: UserId Int -> O.Query FeFeedInfoR
getAllUserFeedInfo uid =
proc () ->
do allfeeds <- getUserFeedsQ uid -< ()
(fIId, unreadCount) <- getUserUnreadItems uid -< ()
let fIName = _feedInfoName allfeeds
O.restrict -< fIId O..=== (_getFeedInfoId . _feedInfoId $ allfeeds)
returnA -< FeFeedInfo' fIId fIName unreadCount
getAllUserPref :: UserId Int -> O.Query UserFeedInfoPrefR
getAllUserPref uid =
proc () ->
do pref <- O.queryTable userPrefTable -< ()
O.restrict -< O.constant uid O..=== (pref ^. prefUserId)
returnA -< pref
getUserUnreadFeedInfo :: UserId Int -> OT.Transaction [FeFeedInfo]
getUserUnreadFeedInfo userid =
OT.query $ JOIN.leftJoinF unite id joinOn (getAllUserFeedInfo userid) (getAllUserPref userid)
where
unite info pref = info & feedListName .~ (pref ^. prefName)
joinOn info pref = (FeedInfoId $ info ^. feedListId) O..=== pref ^. prefFeedId
getUserFeeds :: UserId Int -> OT.Transaction [FeedInfoHR]
getUserFeeds uid = OT.query $ getUserFeedsQ uid
getFeedItemsIds :: FeedInfoId Int -> O.Query FeedItemIdColumnR
getFeedItemsIds fid =
proc () ->
do allItems <- O.queryTable feedItemTable -< ()
O.restrict -< O.constant fid O..=== _feedItemFeedId allItems
returnA -< (_feedItemId allItems)
insertUnread :: [FeedItemHR] -> [UserId Int] -> OT.Transaction Int64
insertUnread newItems uids = OT.insertMany unreadItemTable $ O.constant <$> pairings
where
pairings = do
item <- newItems
user <- uids
return $ UnreadItem (_feedItemId item) user
addSubscription :: UserId Int -> FeedInfoId Int -> OT.Transaction Int64
addSubscription uid fid = OT.insertMany subscriptionTable [O.constant $ Subscription fid uid]
getUserItems :: UserId Int -> FeedInfoId Int -> OT.Transaction [FeItemInfo]
getUserItems uid fid = OT.query (getUserItemsQ (O.constant uid) (O.constant fid))
getUserItemsQ :: UserIdColumnR -> FeedInfoIdColumnR -> O.Query FeItemInfoR
getUserItemsQ uid fid =
O.orderBy (O.asc _itemInfoDate) $
proc () ->
do allItems <- O.queryTable feedItemTable -< ()
allUnread <- O.queryTable unreadItemTable -< ()
O.restrict -< uid O..=== _unreadUserId allUnread
O.restrict -< fid O..=== _feedItemFeedId allItems
O.restrict -<
_unreadFeedItemId allUnread O..=== _feedItemId allItems
let unreadItemId = _getFeedItemId . _feedItemId $ allItems
unreadTitle = _feedItemTitle allItems
unreadLink = _feedItemUrl allItems
unreadDate = _feedItemDate allItems
unreadComments = _feedItemComments allItems
returnA -<
FeItemInfo' unreadItemId unreadTitle unreadLink unreadDate
unreadComments
(O.pgBool False)
readFeed :: UserId Int -> FeedItemId Int -> OT.Transaction Int64
readFeed userid itemid =
OT.delete unreadItemTable $ \cols ->
(_unreadUserId cols O..=== O.constant userid) O..&&
(_unreadFeedItemId cols O..=== O.constant itemid)
allFeeds :: OT.Transaction [FeedInfoHR]
allFeeds = OT.query $ O.queryTable feedInfoTable
getSubs :: FeedInfoId Int -> OT.Transaction [UserId Int]
getSubs fid =
OT.query $
proc () ->
do allSubs <- O.queryTable subscriptionTable -< ()
O.restrict -< O.constant fid O..=== _subscriptionFeedId allSubs
returnA -< _subscriptionUserId allSubs
allItemsRead :: FeedInfoId Int -> UserId Int -> OT.Transaction Int64
allItemsRead fid uid = do
itemsIds :: [FeedItemIdH] <- OT.query $ getFeedItemsIds fid
let feedUnreadFilter (UnreadItem unreadIid unreadUid) =
unreadUid O..=== O.constant uid O..&&
O.in_ (fmap (O.constant . _getFeedItemId) itemsIds) (_getFeedItemId unreadIid)
OT.delete unreadItemTable feedUnreadFilter
allFeedInfo :: FeedInfoIdH -> OT.Transaction [FeedInfoHR]
allFeedInfo fid =
OT.query $
proc () ->
do feed <- O.queryTable feedInfoTable -< ()
O.restrict -< (feed ^. feedInfoId) O..=== O.constant fid
returnA -< feed
userFeedName :: FeedInfoIdH -> UserId Int -> OT.Transaction [UserFeedInfoPrefHR]
userFeedName fid userid =
OT.query $
proc () ->
do pref <- O.queryTable userPrefTable -< ()
O.restrict -<
((pref ^. prefUserId) O..=== O.constant userid) O..&&
((pref ^. prefFeedId) O..=== O.constant fid)
returnA -< pref
insertUserPrefName :: UserFeedInfoPrefHW -> OT.Transaction Int64
insertUserPrefName pref = OT.insertMany userPrefTable [O.constant pref]
updateUserPrefName :: UserFeedInfoPrefHW -> OT.Transaction Int64
updateUserPrefName pref = OT.update userPrefTable (const (O.constant pref)) correctRow
where
correctRow row =
row ^. prefUserId O..=== O.constant (pref ^. prefUserId) O..&& row ^. prefFeedId O..===
O.constant (pref ^. prefFeedId)
feedUpdateInterval :: FeedInfoIdH -> OT.Transaction [Int]
feedUpdateInterval fid = fmap _feedInfoUpdateEvery <$> allFeedInfo fid
updateFeedInterval :: FeedInfoIdH -> Int -> OT.Transaction [FeedInfoHR]
updateFeedInterval fid interval = OT.updateReturning feedInfoTable rToW correctRow id
where
correctRow row = row ^. feedInfoId O..=== O.constant fid
rToW x =
x & feedInfoUpdateEvery .~ O.constant interval & feedInfoId .~ O.constant (Just <$> fid)
|
Arguggi/heed
|
heed-backend/src/Heed/Query.hs
|
bsd-3-clause
| 11,986
| 14
| 16
| 2,434
| 3,574
| 1,769
| 1,805
| -1
| -1
|
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
module Main where
import Graphics.VR.OpenVR
import Graphics.GL.Pal
import Graphics.UI.GLFW.Pal
import Data.Time
import Control.Monad.State
import Control.Lens.Extra
import Halive.Utils
import CubeUniforms
import Cube
import Data.Maybe
data World = World
{ wldKeyboardShowing :: Bool
}
newWorld :: World
newWorld = World
{ wldKeyboardShowing = False
}
data Hand = Hand
{ hndGrip :: Bool
, hndStart :: Bool
, hndTrigger :: GLfloat
, hndXY :: V2 GLfloat
, hndMatrix :: M44 GLfloat
} deriving Show
worldCubes :: [Cube]
worldCubes = [cubeAt x y z | x <- [-5..5], y <- [-5..5], z <- [-5..5] ]
where
cubeAt x y z = Cube
{ _cubMatrix = transformationFromPose $ newPose { _posPosition = V3 x y z }
, _cubColor = color
}
where color = V4 ((y + 2) / 4) 0.4 ((x+2)/4) 1 -- increase redness as y goes up, blueness as x goes up
logIO :: MonadIO m => String -> m ()
logIO = liftIO . putStrLn
main :: IO ()
main = do
(window, _, events) <- reacquire 0 $ createWindow "OpenVR" 1024 768
cubeProg <- createShaderProgram "app/cube.vert" "app/cube.frag"
cubeGeo <- cubeGeometry (0.1 :: V3 GLfloat) (V3 1 1 1)
cubeShape <- makeShape cubeGeo cubeProg
let _ = cubeShape :: Shape Uniforms
glEnable GL_DEPTH_TEST
useProgram (sProgram cubeShape)
mOpenVR <- reacquire 1 $ do
hmdPresent <- isHMDPresent
if hmdPresent then createOpenVR else return Nothing
-- let mOpenVR = Nothing
_ <- flip runStateT newWorld $ case mOpenVR of
Just openVR -> do
forM_ (ovrEyes openVR) $ \eye -> case eiEye eye of
LeftEye -> do
let (_, _, w, h) = eiViewport eye
setWindowSize window (fromIntegral w `div` 2) (fromIntegral h `div` 2)
_ -> return ()
-- If we leave the keyboard showing when we quit an app, we lose input. Hide it at the start to get it back.
hideKeyboard
openVRLoop window events cubeShape openVR
Nothing -> flatLoop window events cubeShape
putStrLn "Done!"
openVRLoop :: (MonadState World m, MonadIO m) => Window -> Events -> Shape Uniforms -> OpenVR -> m ()
openVRLoop window events cubeShape openVR@OpenVR{..} = whileWindow window $ do
_ <- pollNextEvent ovrSystem
(headPose, handPosesByRole) <- waitGetPoses openVR
hands <- forM handPosesByRole $ \(controllerRole, pose) -> do
(x, y, trigger, grip, start) <- getControllerState ovrSystem controllerRole
let hand = Hand
{ hndMatrix = pose
, hndXY = realToFrac <$> V2 x y
, hndTrigger = realToFrac trigger
, hndGrip = grip
, hndStart = start
}
when (trigger > 0.5) $
triggerHapticPulse ovrSystem controllerRole 0 100
when (controllerRole == TrackedControllerRoleRightHand) $ do
keyboardShowing <- gets wldKeyboardShowing
when (grip && not keyboardShowing) $ do
showKeyboard
modify (\world -> world { wldKeyboardShowing = True })
when (not grip) $
modify (\world -> world { wldKeyboardShowing = False })
return hand
let handCubes = flip concatMap hands $ \Hand{..} ->
[ Cube
{ _cubMatrix = hndMatrix !*! translateMatrix (V3 0 0 0.05) !*! scaleMatrix (V3 0.4 0.4 1.6)
, _cubColor = V4 1 0 0 1
}
, Cube
{ _cubMatrix = hndMatrix !*! scaleMatrix (V3 0.1 0.1 0.1) !*! translateMatrix (V3 (hndXY ^. _x * 0.5) 0.4 (-hndXY ^. _y) * 0.5)
, _cubColor = V4 0 1 0 1
}
, Cube
{ _cubMatrix = hndMatrix !*! scaleMatrix (V3 0.1 0.1 0.1) !*! translateMatrix (V3 0 (-hndTrigger - 0.2) 0)
, _cubColor = V4 0 0 1 1
}
, Cube
{ _cubMatrix = hndMatrix !*! scaleMatrix (V3 0.1 0.1 0.1) !*! translateMatrix (V3 0 0.2 0.4)
, _cubColor = if hndStart then V4 1 1 1 1 else V4 0 1 1 1
}
, Cube
{ _cubMatrix = hndMatrix !*! scaleMatrix (V3 0.5 0.1 0.1) !*! translateMatrix (V3 0 0 0.5)
, _cubColor = if hndGrip then V4 1 1 1 1 else V4 1 1 0 1
}
]
now <- (/ 2) . (+ 1) . sin . realToFrac . utctDayTime <$> liftIO getCurrentTime
glClearColor 0.2 0.1 (now * 0.3) 1
let viewM44 = inv44 headPose
-- Render each eye, with multisampling
forM_ ovrEyes $ \EyeInfo{..} -> withMultisamplingFramebuffer eiMultisampleFramebuffer $ do
glClear (GL_COLOR_BUFFER_BIT .|. GL_DEPTH_BUFFER_BIT)
let (x, y, w, h) = eiViewport
finalView = eiEyeHeadTrans !*! viewM44
glViewport x y w h
-- Render the scene
render cubeShape eiProjection finalView (handCubes ++ worldCubes)
-- Submit frames after rendering both
forM_ ovrEyes $ \EyeInfo{..} -> do
let MultisampleFramebuffer{..} = eiMultisampleFramebuffer
submitFrameForEye ovrCompositor eiEye (unTextureID mfbResolveTextureID)
-- Finally, mirror.
forM_ (listToMaybe ovrEyes) $ \eye -> do
(winW, winH) <- getWindowSize window
mirrorOpenVREyeToWindow eye (fromIntegral winW) (fromIntegral winH)
evs <- gatherEvents events
forM_ evs $ closeOnEscape window
swapBuffers window
flatLoop :: (MonadState World m, MonadIO m) => Window -> Events -> Shape Uniforms -> m ()
flatLoop window events cubeShape = do
let viewMat = lookAt (V3 0 2 0) (V3 0 0 3) (V3 0 1 0)
projectionMat <- getWindowProjection window 45 0.1 1000
(x,y,w,h) <- getWindowViewport window
whileWindow window $ do
evs <- gatherEvents events
forM_ evs $ closeOnEscape window
now <- (/ 2) . (+ 1) . sin . realToFrac . utctDayTime <$> liftIO getCurrentTime
glClearColor now 0.2 0.5 1
glClear (GL_COLOR_BUFFER_BIT .|. GL_DEPTH_BUFFER_BIT)
glViewport x y w h
render cubeShape projectionMat viewMat worldCubes
swapBuffers window
render :: (MonadIO m)
=> Shape Uniforms
-> M44 GLfloat
-> M44 GLfloat
-> [Cube]
-> m ()
render cubeShape projection viewMat cubes = do
let Uniforms{..} = sUniforms cubeShape
projectionView = projection !*! viewMat
-- We extract eyePos from the view matrix to get eye-to-head offsets baked in
eyePos = inv44 viewMat ^. translation
uniformV3 uCamera eyePos
withVAO (sVAO cubeShape) $ forM_ cubes $ \cube -> do
uniformV4 uDiffuse (cube ^. cubColor)
draw (cube ^. cubMatrix) projectionView cubeShape
draw :: MonadIO m => M44 GLfloat -> M44 GLfloat -> Shape Uniforms -> m ()
draw model projectionView shape = do
let Uniforms{..} = sUniforms shape
uniformM44 uModelViewProjection (projectionView !*! model)
uniformM44 uModel model
let indexCount = geoIndexCount (sGeometry shape)
glDrawElements GL_TRIANGLES indexCount GL_UNSIGNED_INT nullPtr
|
lukexi/openvr-hs
|
app/Main.hs
|
bsd-3-clause
| 7,182
| 0
| 24
| 2,170
| 2,282
| 1,139
| 1,143
| 147
| 4
|
{-# LANGUAGE TupleSections, OverloadedStrings, QuasiQuotes, TemplateHaskell, TypeFamilies, RecordWildCards,
DeriveGeneric ,MultiParamTypeClasses ,FlexibleInstances #-}
module Protocol.ROC.PointTypes.PointType93 where
import GHC.Generics
import qualified Data.ByteString as BS
import Data.Word
import Data.Binary
import Data.Binary.Get
import Protocol.ROC.Utils
import Data.Time.Clock.POSIX
data PointType93 = PointType93 {
pointType93LicenseInstallationStatus :: !PointType93LicenseInstallationStatus
,pointType93LicenseNumber :: !PointType93LicenseNumber
,pointType93ApplicationName :: !PointType93ApplicationName
,pointType93ApplicationProvider :: !PointType93ApplicationProvider
,pointType93ApplicationCode :: !PointType93ApplicationCode
,pointType93ApplicationVersion :: !PointType93ApplicationVersion
,pointType93QuantityTotal :: !PointType93QuantityTotal
,pointType93QuantityRemaining :: !PointType93QuantityRemaining
,pointType93ExpirationData :: !PointType93ExpirationData
,pointType93LicenseValidityState :: !PointType93LicenseValidityState
,pointType93LiceseCreationDate :: !PointType93LiceseCreationDate
} deriving (Eq, Show, Generic)
type PointType93LicenseInstallationStatus = Bool
type PointType93LicenseNumber = Word8
type PointType93ApplicationName = BS.ByteString
type PointType93ApplicationProvider = BS.ByteString
type PointType93ApplicationCode = Word16
type PointType93ApplicationVersion = BS.ByteString
type PointType93QuantityTotal = Word8
type PointType93QuantityRemaining = Word8
type PointType93ExpirationData = POSIXTime
type PointType93LicenseValidityState = Word8
type PointType93LiceseCreationDate = POSIXTime
pointType93Parser :: Get PointType93
pointType93Parser = do
licenseInstallationStatus <- anyButNull
licenseNumber <- getWord8
applicationName <- getByteString 20
applicationProvider <- getByteString 20
applicationCode <- getWord16le
applicationVersion <- getByteString 10
quantityTotal <- getWord8
quantityRemaining <- getWord8
expirationData <- getPosixTime
licenseValidityState <- getWord8
liceseCreationDate <- getPosixTime
return $ PointType93 licenseInstallationStatus licenseNumber applicationName applicationProvider applicationCode applicationVersion quantityTotal quantityRemaining
expirationData licenseValidityState liceseCreationDate
|
jqpeterson/roc-translator
|
src/Protocol/ROC/PointTypes/PointType93.hs
|
bsd-3-clause
| 3,462
| 0
| 9
| 1,352
| 353
| 196
| 157
| 71
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE BangPatterns #-}
module Quote
( QOption(..)
, proccess
) where
import Control.Monad.State
import Data.ByteString (ByteString)
import Data.Enumerator as E hiding (map, span)
import qualified Data.Enumerator.List as EL
import Data.Time
import qualified Data.Sequence as S
import qualified Data.ListLike as LL
import Network.Pcap
import Network.Pcap.Enumerator
import Quote.Data
import Quote.Parser
data QOption = QOption
{ verbose :: Bool
, reorder :: Bool
, ports :: [Int]
, code :: ByteString
, pcap :: FilePath
}
proccess :: QOption -> IO ()
proccess opt = runStateT (run_ iter) S.empty >> return ()
where
iter = enumOffline (pcap opt) $$ only opt =$ conv2Quote =$ printQ
printQ = if reorder opt then printQuoteReorder else printQuote
only :: MonadIO m => QOption -> Enumeratee (PktHdr, ByteString) (PktHdr, ByteString) m b
only opt = EL.filter $ \(_, bs) ->
let (dest, code') = portAndCode bs
in (elem dest (ports opt)) && code' == code opt
conv2Quote :: MonadIO m => Enumeratee (PktHdr, ByteString) Quote m b
conv2Quote = EL.map $ uncurry parseFrame
type QHistory = S.Seq Quote
printQuote :: Iteratee Quote (StateT QHistory IO) ()
printQuote = do
mq <- EL.head
case mq of
Nothing -> return ()
Just q -> do
liftIO $ print q
printQuote
printQuoteReorder :: Iteratee Quote (StateT QHistory IO) ()
printQuoteReorder = do
mq <- EL.head
case mq of
Nothing -> lift get >>= pr
Just q -> do
qs <- lift get
let (!old, new) = flip LL.span qs $ \x ->
(acceptTime x) <= addUTCTime (-3) (pktTime q)
!new' = add new q
lift $ put new'
pr old
printQuoteReorder
where
pr xs = liftIO $ LL.mapM_ print xs
add v e | S.null v = S.singleton e
| e >= LL.last v = LL.snoc v e
| otherwise = LL.snoc (add (LL.init v) e) (LL.last v)
|
cutsea110/tsr-test
|
Quote.hs
|
bsd-3-clause
| 2,099
| 0
| 20
| 581
| 749
| 388
| 361
| 60
| 2
|
module ProjectEuler (
euler
) where
import qualified Problem1
import qualified Problem2
import qualified Problem3
import qualified Problem4
import qualified Problem5
import qualified Problem6
import qualified Problem7
import qualified Problem8
import qualified Problem9
import qualified Problem10
import qualified Problem11
import qualified Problem12
import qualified Problem13
import qualified Problem14
import qualified Problem15
import qualified Problem16
import qualified Problem17
import qualified Problem18
import qualified Problem19
import qualified Problem20
import qualified Problem21
import qualified Problem22
import qualified Problem24
import qualified Problem25
import qualified Problem28
import qualified Problem29
import qualified Problem30
import qualified Problem31
import qualified Problem33
import qualified Problem34
import qualified Problem35
import qualified Problem36
import qualified Problem37
import qualified Problem40
import qualified Problem41
import qualified Problem42
import qualified Problem43
import qualified Problem48
import qualified Problem50
import qualified Problem53
import qualified Problem56
import qualified Problem59
import qualified Problem67
import qualified Problem71
import qualified Problem73
import qualified Problem79
import qualified Problem97
import qualified Problem99
euler :: Int -> Maybe Integer
euler 1 = Just Problem1.solution
euler 2 = Just Problem2.solution
euler 3 = Just Problem3.solution
euler 4 = Just Problem4.solution
euler 5 = Just Problem5.solution
euler 6 = Just Problem6.solution
euler 7 = Just Problem7.solution
euler 8 = Just Problem8.solution
euler 9 = Just Problem9.solution
euler 10 = Just Problem10.solution
euler 11 = Just Problem11.solution
euler 12 = Just Problem12.solution
euler 13 = Just Problem13.solution
euler 14 = Just Problem14.solution
euler 15 = Just Problem15.solution
euler 16 = Just Problem16.solution
euler 17 = Just Problem17.solution
euler 18 = Just Problem18.solution
euler 19 = Just Problem19.solution
euler 20 = Just Problem20.solution
euler 21 = Just Problem21.solution
euler 22 = Just Problem22.solution
euler 24 = Just Problem24.solution
euler 25 = Just Problem25.solution
euler 28 = Just Problem28.solution
euler 29 = Just Problem29.solution
euler 30 = Just Problem30.solution
euler 31 = Just Problem31.solution
euler 33 = Just Problem33.solution
euler 34 = Just Problem34.solution
euler 35 = Just Problem35.solution
euler 36 = Just Problem36.solution
euler 37 = Just Problem37.solution
euler 40 = Just Problem40.solution
euler 41 = Just Problem41.solution
euler 42 = Just Problem42.solution
euler 43 = Just Problem43.solution
euler 48 = Just Problem48.solution
euler 50 = Just Problem50.solution
euler 53 = Just Problem53.solution
euler 56 = Just Problem56.solution
euler 59 = Just Problem59.solution
euler 67 = Just Problem67.solution
euler 71 = Just Problem71.solution
euler 73 = Just Problem73.solution
euler 79 = Just Problem79.solution
euler 97 = Just Problem97.solution
euler 99 = Just Problem99.solution
euler __ = Nothing -- Problem hasn't been solved yet
-- == PLAYGROUND ==
|
quchen/HaskellEuler
|
src/ProjectEuler.hs
|
bsd-3-clause
| 3,123
| 0
| 6
| 445
| 847
| 450
| 397
| 100
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Program.Controllers.GUI
-- Copyright : (c) Artem Chirkin
-- License : MIT
--
-- Maintainer : Artem Chirkin <chirkin@arch.ethz.ch>
-- Stability : experimental
--
-- Foreign imports for all GUI html elements on qua-server side
--
-----------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Program.Controllers.GUI
( registerLoadingFile
, registerClearGeometry
, displayScenarios
, registerAskLuciForScenario
, registerGetScenarioList
, registerUserConnectToLuci
, showLuciConnected
, showLuciConnecting
, showLuciConnectForm
, registerSaveScenario
, toggleSaveScenarioButton
, registerServiceClear
, registerServiceRun
, toggleServiceClear
, registerColorizeProperty
, showInfo
, registerSubmit
, registerResetCamera
, registerRefreshServiceList
, updateServiceNames
, registerSetActiveService
, registerUpdateSParamValue
, showScenarioServiceResultString
, showScenarioServiceResultPng
) where
--import Control.Concurrent.MVar
import JsHs (JSString, JSVal, LikeJS(..))
import JsHs.Callback
import JsHs.Array (Array)
import JsHs.TypedArray (ArrayBuffer)
import Data.Geometry.Structure.Feature
import Program.Types
-- | Registers two callbacks; comes from Handler.Home.PanelGeometry.
-- onSuccess :: JSON -> IO ()
-- onFailure :: JSString -> IO ()
-- return :: IO ()
registerLoadingFile :: (Either JSString SomeJSONInput -> IO ()) -> IO ()
registerLoadingFile callback = do
callbackSuccess <- asyncCallback1 $ callback . Right . asLikeJS
callbackFailure <- asyncCallback1 $ callback . Left . asLikeJS
js_registerLoadingFile callbackSuccess callbackFailure
foreign import javascript safe "registerLoadingFile($1,$2)"
js_registerLoadingFile :: Callback (JSVal -> IO ()) -> Callback (JSVal -> IO ()) -> IO ()
-- | Registers one callback; comes from Handler.Home.PanelGeometry.
-- onClick :: IO ()
-- return :: IO ()
registerClearGeometry :: (() -> IO ()) -> IO ()
registerClearGeometry callback = do
call <- asyncCallback $ callback ()
js_registerClearGeometry call
foreign import javascript safe "registerClearGeometry($1)"
js_registerClearGeometry :: Callback (IO ()) -> IO ()
-- | Call this when scenarios are parsed; comes from Handler.Home.PanelGeometry.
-- xs :: [{ScenarioDescription, as-is}]
-- return :: IO ()
foreign import javascript safe "displayScenarios($1['scenarios'])" displayScenarios :: JSVal -> IO ()
-- | Registers one callback; comes from Handler.Home.PanelGeometry.
-- h :: ScID -> IO ()
-- return :: IO ()
registerAskLuciForScenario :: (ScenarioId -> ScenarioName -> IO ()) -> IO ()
registerAskLuciForScenario c = asyncCallback2 (\i s -> c (asLikeJS i) (asLikeJS s)) >>= js_registerAskLuciForScenario
foreign import javascript safe "registerAskLuciForScenario($1)" js_registerAskLuciForScenario :: Callback (JSVal -> JSVal -> IO ()) -> IO ()
-- | Registers one callback; comes from Handler.Home.PanelGeometry.
-- onClick :: IO ()
-- return :: IO ()
registerGetScenarioList :: (() -> IO ()) -> IO ()
registerGetScenarioList c = asyncCallback (c ()) >>= js_registerGetScenarioList
foreign import javascript safe "registerGetScenarioList($1)" js_registerGetScenarioList :: Callback (IO ()) -> IO ()
-- | Registers one callback; comes from Handler.Home.PanelServices.
-- onClick :: JSString -> IO () -- address of websocket host
-- return :: IO ()
registerUserConnectToLuci :: (JSString -> IO ()) -> IO ()
registerUserConnectToLuci c = asyncCallback1 (c . asLikeJS) >>= js_registerUserConnectToLuci
foreign import javascript safe "registerUserConnectToLuci($1)" js_registerUserConnectToLuci :: Callback (JSVal -> IO ()) -> IO ()
-- | Display "luci connected message"; comes from Handler.Home.PanelServices.
-- connectedHost :: JSString -- address of websocket host
-- return :: IO ()
foreign import javascript safe "showLuciConnected($1)" showLuciConnected :: JSString -> IO ()
-- | Display "luci connecting message"; comes from Handler.Home.PanelServices.
-- connectedHost :: JSString -- address of websocket host
-- return :: IO ()
foreign import javascript safe "showLuciConnecting($1)" showLuciConnecting :: JSString -> IO ()
-- | Display "connect to luci" form; comes from Handler.Home.PanelServices.
-- defaultHost :: JSString -- default address of websocket host
-- return :: IO ()
foreign import javascript safe "showLuciConnectForm($1)" showLuciConnectForm :: JSString -> IO ()
-- | Registers one callback; comes from Handler.Home.PanelGeometry.
-- sendMsg :: JSString -> IO ()
-- return :: IO ()
registerSaveScenario :: (ScenarioName -> IO ()) -> IO ()
registerSaveScenario c = asyncCallback1 (c . asLikeJS) >>= js_registerSaveScenario
foreign import javascript safe "registerSaveScenario($1)" js_registerSaveScenario :: Callback (JSVal -> IO ()) -> IO ()
-- | call it to setup scenario buttons state; comes from Handler.Home.PanelGeometry.
-- showButton :: Bool -- whether to show "save scenario" button
-- scName :: JSString -- name of the scenario displayed on a panel
-- return :: IO ()
foreign import javascript safe "toggleSaveScenarioButton($1, $2)" toggleSaveScenarioButton :: Bool -> ScenarioName -> IO ()
-- | Registers one callback; comes from Handler.Home.UIButtons.
-- onClick :: IO ()
-- return :: IO ()
registerServiceClear :: (() -> IO ()) -> IO ()
registerServiceClear c = asyncCallback (c ()) >>= js_registerServiceClear
foreign import javascript safe "registerServiceClear($1)" js_registerServiceClear :: Callback (IO ()) -> IO ()
-- | Registers one callback; comes from Handler.Home.UIButtons.
-- onClick :: IO ()
-- return :: IO ()
registerServiceRun :: (() -> IO ()) -> IO ()
registerServiceRun c = asyncCallback (c ()) >>= js_registerServiceRun
foreign import javascript safe "registerServiceRun($1)" js_registerServiceRun :: Callback (IO ()) -> IO ()
-- | Shows or hides button "clear"; comes from Handler.Home.UIButtons.
-- state :: Bool
-- return :: IO ()
foreign import javascript safe "toggleServiceClear($1)" toggleServiceClear :: Bool -> IO ()
-- | Registers one callback; comes from Handler.Home.PanelInfo.
-- f :: JSString -> IO ()
-- return :: IO ()
registerColorizeProperty :: (Maybe JSString -> IO ()) -> IO ()
registerColorizeProperty c = asyncCallback1 (c . f . asLikeJS) >>= js_registerColorizeProperty
where
f (Just "") = Nothing
f x = x
foreign import javascript safe "registerColorizeProperty($1)" js_registerColorizeProperty :: Callback (JSVal -> IO ()) -> IO ()
-- | Show info (pairs of key-value); comes from Handler.Home.PanelInfo.
-- obj :: Object -- all property names and values inside an object
-- return :: IO ()
foreign import javascript safe "showInfo($1)" showInfo :: JSVal -> IO ()
-- | Registers one callback; comes from Handler.Home.UIButtons.
-- onClick :: (submitUrl -> ScenarioJSON -> Image -> IO ()) -> IO ()
-- return :: IO ()
registerSubmit :: (((JSString, ScenarioJSON, JSVal) -> IO ()) -> IO ()) -> IO ()
registerSubmit c = asyncCallback1 (c . (\f (u,d,i) -> f u d i) . js_uncallback3) >>= js_registerSubmit
foreign import javascript safe "registerSubmit($1)" js_registerSubmit :: Callback (JSVal -> IO ()) -> IO ()
foreign import javascript safe "$1($2,$3,$4)"
js_uncallback3 :: JSVal -> JSString -> ScenarioJSON -> JSVal -> IO ()
-- | Registers one callback; comes from Handler.Home.UIButtons.
-- onClick :: IO ()
-- return :: IO ()
registerResetCamera :: (() -> IO ()) -> IO ()
registerResetCamera c = asyncCallback (c ()) >>= js_registerResetCamera
foreign import javascript safe "registerResetCamera($1)" js_registerResetCamera :: Callback (IO ()) -> IO ()
-- | Registers one callback; comes from Handler.Home.PanelServices.
-- onClick :: IO ()
-- return :: IO ()
registerRefreshServiceList :: (() -> IO ()) -> IO ()
registerRefreshServiceList c = asyncCallback (c ()) >>= js_registerRefreshServiceList
foreign import javascript safe "registerRefreshServiceList($1)" js_registerRefreshServiceList :: Callback (IO ()) -> IO ()
-- | Updates visible service list; comes from Handler.Home.PanelServices.
-- xs :: [ServiceName]
-- return :: IO ()
foreign import javascript safe "updateServiceNames($1)" updateServiceNames :: Array ServiceName -> IO ()
-- | Registers one callback; comes from Handler.Home.PanelServices.
-- setActiveService :: String -> IO ()
-- return :: IO ()
registerSetActiveService :: (ServiceName -> IO ()) -> IO ()
registerSetActiveService c = asyncCallback1 (c . asLikeJS) >>= js_registerSetActiveService
foreign import javascript safe "registerSetActiveService($1)" js_registerSetActiveService :: Callback (JSVal -> IO ()) -> IO ()
-- | Registers one callback; comes from Handler.Home.PanelServices.
-- updateParam :: String -> JSVal -> IO ()
-- return :: IO ()
registerUpdateSParamValue :: (JSString -> JSVal -> IO ()) -> IO ()
registerUpdateSParamValue c = asyncCallback2 (c . asLikeJS) >>= js_registerUpdateSParamValue
foreign import javascript safe "registerUpdateSParamValue($1)" js_registerUpdateSParamValue :: Callback (JSVal -> JSVal -> IO ()) -> IO ()
-- | Show service result as a simple string.
-- str :: String -- just a text result
-- return :: IO ()
foreign import javascript safe "showScenarioServiceResultString($1)" showScenarioServiceResultString :: JSString -> IO ()
-- | Show service result as a .png service .
-- buf :: ArrayBuffer -- image content
-- return :: IO ()
foreign import javascript safe "showScenarioServiceResultPng($1)" showScenarioServiceResultPng :: ArrayBuffer -> IO ()
|
achirkin/ghcjs-modeler
|
src/Program/Controllers/GUI.hs
|
bsd-3-clause
| 9,736
| 97
| 13
| 1,551
| 1,867
| 1,003
| 864
| 95
| 2
|
module Game.Bullet(
Bullet(..)
, BulletId(..)
, CreateBullet(..)
, bulletCollectionId
, bulletVelId
, bulletPosId
, bulletPlayerId
, bulletLifeTimeId
) where
import Data.Store
import Game.Player
import GHC.Generics
import Linear
import Game.GoreAndAsh.Sync
-- | Bullet information
data Bullet s = Bullet {
bulletVel :: !(V2 Double) -- ^ Velocity
, bulletPos :: !(V2 Double) -- ^ Position
, bulletPlayer :: !PlayerId -- ^ Owner player
, bulletLifeTime :: !Double -- ^ Time left to live
, bulletCustom :: !s -- ^ Custom data for bullet
} deriving (Generic, Show, Eq)
instance Functor Bullet where
fmap f b = b { bulletCustom = f $ bulletCustom b }
-- | Unique bullet ID
newtype BulletId = BulletId { unBulletId :: Int }
deriving (Generic, Show, Eq, Ord)
instance Store BulletId
-- | Info required to create new bullet
data CreateBullet = CreateBullet {
createBulletPos :: !(V2 Double) -- ^ Bullet spawn position
, createBulletDir :: !(V2 Double) -- ^ Bullet flight direction
, createBulletVel :: !Double -- ^ Initial bullet speed absolute value
, createBulletPlayer :: !PlayerId -- ^ Bullet owner
} deriving (Generic, Show)
instance Store CreateBullet
-- | Unique collection id for bullets
bulletCollectionId :: SyncItemId
bulletCollectionId = 2
-- | IDs for fields of 'Bullet'
bulletVelId, bulletPosId, bulletPlayerId, bulletLifeTimeId :: SyncItemId
bulletVelId = 0
bulletPosId = 1
bulletPlayerId = 2
bulletLifeTimeId = 3
|
Teaspot-Studio/gore-and-ash-demo
|
src/shared/Game/Bullet.hs
|
bsd-3-clause
| 1,508
| 0
| 11
| 309
| 334
| 201
| 133
| 58
| 1
|
{-# LANGUAGE FlexibleContexts, FlexibleInstances, GADTs #-}
-- | The @esqueleto@ EDSL (embedded domain specific language).
-- This module replaces @Database.Persist@, so instead of
-- importing that module you should just import this one:
--
-- @
-- -- For a module using just esqueleto.
-- import Database.Esqueleto
-- @
--
-- If you need to use @persistent@'s default support for queries
-- as well, either import it qualified:
--
-- @
-- -- For a module that mostly uses esqueleto.
-- import Database.Esqueleto
-- import qualified Database.Persistent as P
-- @
--
-- or import @esqueleto@ itself qualified:
--
-- @
-- -- For a module that uses esqueleto just on some queries.
-- import Database.Persistent
-- import qualified Database.Esqueleto as E
-- @
--
-- Other than identifier name clashes, @esqueleto@ does not
-- conflict with @persistent@ in any way.
module Database.Esqueleto
( -- * Setup
-- $setup
-- * Introduction
-- $introduction
-- * Getting started
-- $gettingstarted
-- * @esqueleto@'s Language
Esqueleto( where_, on, groupBy, orderBy, rand, asc, desc, limit, offset
, distinct, distinctOn, don, distinctOnOrderBy, having, locking
, sub_select, sub_selectDistinct, (^.), (?.)
, val, isNothing, just, nothing, joinV
, countRows, count, countDistinct
, not_, (==.), (>=.), (>.), (<=.), (<.), (!=.), (&&.), (||.)
, (+.), (-.), (/.), (*.)
, random_, round_, ceiling_, floor_
, min_, max_, sum_, avg_, castNum, castNumM
, coalesce, coalesceDefault
, lower_, like, ilike, (%), concat_, (++.), castString
, subList_select, subList_selectDistinct, valList, justList
, in_, notIn, exists, notExists
, set, (=.), (+=.), (-=.), (*=.), (/=.)
, case_, toBaseId)
, ToBaseId(..)
, when_
, then_
, else_
, from
, Value(..)
, unValue
, ValueList(..)
, OrderBy
, DistinctOn
, LockingKind(..)
, SqlString
-- ** Joins
, InnerJoin(..)
, CrossJoin(..)
, LeftOuterJoin(..)
, RightOuterJoin(..)
, FullOuterJoin(..)
, OnClauseWithoutMatchingJoinException(..)
-- * SQL backend
, SqlQuery
, SqlExpr
, SqlEntity
, select
, selectDistinct
, selectSource
, selectDistinctSource
, delete
, deleteCount
, update
, updateCount
, insertSelect
, insertSelectCount
, insertSelectDistinct
, (<#)
, (<&>)
-- * RDBMS-specific modules
-- $rdbmsSpecificModules
-- * Helpers
, valkey
, valJ
-- * Re-exports
-- $reexports
, deleteKey
, module Database.Esqueleto.Internal.PersistentImport
) where
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Trans.Reader (ReaderT)
import Data.Int (Int64)
import Database.Esqueleto.Internal.Language
import Database.Esqueleto.Internal.Sql
import Database.Esqueleto.Internal.PersistentImport
import qualified Database.Persist
-- $setup
--
-- If you're already using @persistent@, then you're ready to use
-- @esqueleto@, no further setup is needed. If you're just
-- starting a new project and would like to use @esqueleto@, take
-- a look at @persistent@'s book first
-- (<http://www.yesodweb.com/book/persistent>) to learn how to
-- define your schema.
----------------------------------------------------------------------
-- $introduction
--
-- The main goals of @esqueleto@ are to:
--
-- * Be easily translatable to SQL. When you take a look at a
-- @esqueleto@ query, you should be able to know exactly how
-- the SQL query will end up. (As opposed to being a
-- relational algebra EDSL such as HaskellDB, which is
-- non-trivial to translate into SQL.)
--
-- * Support the most widely used SQL features. We'd like you to be
-- able to use @esqueleto@ for all of your queries, no
-- exceptions. Send a pull request or open an issue on our
-- project page (<https://github.com/prowdsponsor/esqueleto>) if
-- there's anything missing that you'd like to see.
--
-- * Be as type-safe as possible. We strive to provide as many
-- type checks as possible. If you get bitten by some invalid
-- code that type-checks, please open an issue on our project
-- page so we can take a look.
--
-- However, it is /not/ a goal to be able to write portable SQL.
-- We do not try to hide the differences between DBMSs from you,
-- and @esqueleto@ code that works for one database may not work
-- on another. This is a compromise we have to make in order to
-- give you as much control over the raw SQL as possible without
-- losing too much convenience. This also means that you may
-- type-check a query that doesn't work on your DBMS.
----------------------------------------------------------------------
-- $gettingstarted
--
-- We like clean, easy-to-read EDSLs. However, in order to
-- achieve this goal we've used a lot of type hackery, leading to
-- some hard-to-read type signatures. On this section, we'll try
-- to build some intuition about the syntax.
--
-- For the following examples, we'll use this example schema:
--
-- @
-- share [mkPersist sqlSettings, mkMigrate \"migrateAll\"] [persist|
-- Person
-- name String
-- age Int Maybe
-- deriving Eq Show
-- BlogPost
-- title String
-- authorId PersonId
-- deriving Eq Show
-- Follow
-- follower PersonId
-- followed PersonId
-- deriving Eq Show
-- |]
-- @
--
-- Most of @esqueleto@ was created with @SELECT@ statements in
-- mind, not only because they're the most common but also
-- because they're the most complex kind of statement. The most
-- simple kind of @SELECT@ would be:
--
-- @
-- SELECT *
-- FROM Person
-- @
--
-- In @esqueleto@, we may write the same query above as:
--
-- @
-- do people <- 'select' $
-- 'from' $ \\person -> do
-- return person
-- liftIO $ mapM_ (putStrLn . personName . entityVal) people
-- @
--
-- The expression above has type @SqlPersist m ()@, while
-- @people@ has type @[Entity Person]@. The query above will be
-- translated into exactly the same query we wrote manually, but
-- instead of @SELECT *@ it will list all entity fields (using
-- @*@ is not robust). Note that @esqueleto@ knows that we want
-- an @Entity Person@ just because of the @personName@ that we're
-- printing later.
--
-- However, most of the time we need to filter our queries using
-- @WHERE@. For example:
--
-- @
-- SELECT *
-- FROM Person
-- WHERE Person.name = \"John\"
-- @
--
-- In @esqueleto@, we may write the same query above as:
--
-- @
-- 'select' $
-- 'from' $ \\p -> do
-- 'where_' (p '^.' PersonName '==.' 'val' \"John\")
-- return p
-- @
--
-- Although @esqueleto@'s code is a bit more noisy, it's has
-- almost the same structure (save from the @return@). The
-- @('^.')@ operator is used to project a field from an entity.
-- The field name is the same one generated by @persistent@'s
-- Template Haskell functions. We use 'val' to lift a constant
-- Haskell value into the SQL query.
--
-- Another example would be:
--
-- @
-- SELECT *
-- FROM Person
-- WHERE Person.age >= 18
-- @
--
-- In @esqueleto@, we may write the same query above as:
--
-- @
-- 'select' $
-- 'from' $ \\p -> do
-- 'where_' (p '^.' PersonAge '>=.' 'just' ('val' 18))
-- return p
-- @
--
-- Since @age@ is an optional @Person@ field, we use 'just' to lift
-- @'val' 18 :: SqlExpr (Value Int)@ into @just ('val' 18) ::
-- SqlExpr (Value (Maybe Int))@.
--
-- Implicit joins are represented by tuples. For example, to get
-- the list of all blog posts and their authors, we could write:
--
-- @
-- SELECT BlogPost.*, Person.*
-- FROM BlogPost, Person
-- WHERE BlogPost.authorId = Person.id
-- ORDER BY BlogPost.title ASC
-- @
--
-- In @esqueleto@, we may write the same query above as:
--
-- @
-- 'select' $
-- 'from' $ \\(b, p) -> do
-- 'where_' (b '^.' BlogPostAuthorId '==.' p '^.' PersonId)
-- 'orderBy' ['asc' (b '^.' BlogPostTitle)]
-- return (b, p)
-- @
--
-- However, you may want your results to include people who don't
-- have any blog posts as well using a @LEFT OUTER JOIN@:
--
-- @
-- SELECT Person.*, BlogPost.*
-- FROM Person LEFT OUTER JOIN BlogPost
-- ON Person.id = BlogPost.authorId
-- ORDER BY Person.name ASC, BlogPost.title ASC
-- @
--
-- In @esqueleto@, we may write the same query above as:
--
-- @
-- 'select' $
-- 'from' $ \\(p `'LeftOuterJoin`` mb) -> do
-- 'on' ('just' (p '^.' PersonId) '==.' mb '?.' BlogPostAuthorId)
-- 'orderBy' ['asc' (p '^.' PersonName), 'asc' (mb '?.' BlogPostTitle)]
-- return (p, mb)
-- @
--
-- On a @LEFT OUTER JOIN@ the entity on the right hand side may
-- not exist (i.e. there may be a @Person@ without any
-- @BlogPost@s), so while @p :: SqlExpr (Entity Person)@, we have
-- @mb :: SqlExpr (Maybe (Entity BlogPost))@. The whole
-- expression above has type @SqlPersist m [(Entity Person, Maybe
-- (Entity BlogPost))]@. Instead of using @(^.)@, we used
-- @('?.')@ to project a field from a @Maybe (Entity a)@.
--
-- We are by no means limited to joins of two tables, nor by
-- joins of different tables. For example, we may want a list
-- of the @Follow@ entity:
--
-- @
-- SELECT P1.*, Follow.*, P2.*
-- FROM Person AS P1
-- INNER JOIN Follow ON P1.id = Follow.follower
-- INNER JOIN P2 ON P2.id = Follow.followed
-- @
--
-- In @esqueleto@, we may write the same query above as:
--
-- @
-- 'select' $
-- 'from' $ \\(p1 `'InnerJoin`` f `'InnerJoin`` p2) -> do
-- 'on' (p2 '^.' PersonId '==.' f '^.' FollowFollowed)
-- 'on' (p1 '^.' PersonId '==.' f '^.' FollowFollower)
-- return (p1, f, p2)
-- @
--
-- /Note carefully that the order of the ON clauses is/
-- /reversed!/ You're required to write your 'on's in reverse
-- order because that helps composability (see the documentation
-- of 'on' for more details).
--
-- We also currently support @UPDATE@ and @DELETE@ statements.
-- For example:
--
-- @
-- do 'update' $ \\p -> do
-- 'set' p [ PersonName '=.' 'val' \"João\" ]
-- 'where_' (p '^.' PersonName '==.' 'val' \"Joao\")
-- 'delete' $
-- 'from' $ \\p -> do
-- 'where_' (p '^.' PersonAge '<.' 'just' ('val' 14))
-- @
--
-- The results of queries can also be used for insertions.
-- In @SQL@, we might write the following, inserting a new blog
-- post for every user:
--
-- @
-- INSERT INTO BlogPost
-- SELECT ('Group Blog Post', id)
-- FROM Person
-- @
--
-- In @esqueleto@, we may write the same query above as:
--
-- @
-- 'insertSelect' $ 'from' $ \\p->
-- return $ BlogPost '<#' \"Group Blog Post\" '<&>' (p '^.' PersonId)
-- @
--
-- Individual insertions can be performed through Persistent's
-- 'insert' function, reexported for convenience.
----------------------------------------------------------------------
-- $reexports
--
-- We re-export many symbols from @persistent@ for convenince:
--
-- * \"Store functions\" from "Database.Persist".
--
-- * Everything from "Database.Persist.Class" except for
-- @PersistQuery@ and @delete@ (use 'deleteKey' instead).
--
-- * Everything from "Database.Persist.Types" except for
-- @Update@, @SelectOpt@, @BackendSpecificFilter@ and @Filter@.
--
-- * Everything from "Database.Persist.Sql" except for
-- @deleteWhereCount@ and @updateWhereCount@.
----------------------------------------------------------------------
-- $rdbmsSpecificModules
--
-- There are many differences between SQL syntax and functions
-- supported by different RDBMSs. Since version 2.2.8,
-- @esqueleto@ includes modules containing functions that are
-- specific to a given RDBMS.
--
-- * PostgreSQL: "Database.Esqueleto.PostgreSQL".
--
-- In order to use these functions, you need to explicitly import
-- their corresponding modules, they're not re-exported here.
----------------------------------------------------------------------
-- | @valkey i = 'val' . 'toSqlKey'@
-- (<https://github.com/prowdsponsor/esqueleto/issues/9>).
valkey :: (Esqueleto query expr backend, ToBackendKey SqlBackend entity, PersistField (Key entity)) =>
Int64 -> expr (Value (Key entity))
valkey = val . toSqlKey
-- | @valJ@ is like @val@ but for something that is already a @Value@. The use
-- case it was written for was, given a @Value@ lift the @Key@ for that @Value@
-- into the query expression in a type safe way. However, the implementation is
-- more generic than that so we call it @valJ@.
--
-- Its important to note that the input entity and the output entity are
-- constrained to be the same by the type signature on the function
-- (<https://github.com/prowdsponsor/esqueleto/pull/69>).
--
-- /Since: 1.4.2/
valJ :: (Esqueleto query expr backend, PersistField (Key entity)) =>
Value (Key entity) -> expr (Value (Key entity))
valJ = val . unValue
----------------------------------------------------------------------
-- | Synonym for 'Database.Persist.Store.delete' that does not
-- clash with @esqueleto@'s 'delete'.
deleteKey :: ( PersistStore (PersistEntityBackend val)
, MonadIO m
, PersistEntity val )
=> Key val -> ReaderT (PersistEntityBackend val) m ()
deleteKey = Database.Persist.delete
|
prowdsponsor/esqueleto
|
src/Database/Esqueleto.hs
|
bsd-3-clause
| 13,134
| 0
| 11
| 2,634
| 952
| 783
| 169
| 77
| 1
|
module PFDS.Sec5.Ex7 where
import PFDS.Commons.Heap (Heap (..))
import PFDS.Commons.SplayHeap (SplayHeap (..))
sort :: Ord a => [a] -> [a]
sort = inOrder [] . construct
construct :: Ord a => [a] -> SplayHeap a
construct = foldl (flip insert) empty
inOrder :: [a] -> SplayHeap a -> [a]
inOrder xs E = xs
inOrder xs (T a x b) = inOrder (x:bs) a where
bs = inOrder xs b
{-| Doctests for Ex7
>>> mapM_ (print . construct) [[1..x] | x <- [1..5]]
T E 1 E
T (T E 1 E) 2 E
T (T (T E 1 E) 2 E) 3 E
T (T (T (T E 1 E) 2 E) 3 E) 4 E
T (T (T (T (T E 1 E) 2 E) 3 E) 4 E) 5 E
-}
|
matonix/pfds
|
src/PFDS/Sec5/Ex7.hs
|
bsd-3-clause
| 599
| 0
| 7
| 169
| 188
| 103
| 85
| 11
| 1
|
{-
%
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[TcExpr]{Typecheck an expression}
-}
{-# LANGUAGE CPP, TupleSections, ScopedTypeVariables #-}
module TcExpr ( tcPolyExpr, tcMonoExpr, tcMonoExprNC,
tcInferSigma, tcInferSigmaNC, tcInferRho, tcInferRhoNC,
tcSyntaxOp, tcSyntaxOpGen, SyntaxOpType(..), synKnownType,
tcCheckId,
addExprErrCtxt,
getFixedTyVars ) where
#include "HsVersions.h"
import {-# SOURCE #-} TcSplice( tcSpliceExpr, tcTypedBracket, tcUntypedBracket )
import THNames( liftStringName, liftName )
import HsSyn
import TcHsSyn
import TcRnMonad
import TcUnify
import BasicTypes
import Inst
import TcBinds ( chooseInferredQuantifiers, tcLocalBinds
, tcUserTypeSig, tcExtendTyVarEnvFromSig )
import TcSimplify ( simplifyInfer )
import FamInst ( tcGetFamInstEnvs, tcLookupDataFamInst )
import FamInstEnv ( FamInstEnvs )
import RnEnv ( addUsedGRE, addNameClashErrRn
, unknownSubordinateErr )
import TcEnv
import TcArrows
import TcMatches
import TcHsType
import TcPatSyn( tcPatSynBuilderOcc, nonBidirectionalErr )
import TcPat
import TcMType
import TcType
import DsMonad
import Id
import IdInfo
import ConLike
import DataCon
import PatSyn
import Name
import RdrName
import TyCon
import Type
import TysPrim ( tYPE )
import TcEvidence
import VarSet
import TysWiredIn
import TysPrim( intPrimTy )
import PrimOp( tagToEnumKey )
import PrelNames
import MkId ( proxyHashId )
import DynFlags
import SrcLoc
import Util
import VarEnv ( emptyTidyEnv )
import ListSetOps
import Maybes
import Outputable
import FastString
import Control.Monad
import Class(classTyCon)
import qualified GHC.LanguageExtensions as LangExt
import Data.Function
import Data.List
import Data.Either
import qualified Data.Set as Set
{-
************************************************************************
* *
\subsection{Main wrappers}
* *
************************************************************************
-}
tcPolyExpr, tcPolyExprNC
:: LHsExpr Name -- Expression to type check
-> TcSigmaType -- Expected type (could be a polytype)
-> TcM (LHsExpr TcId) -- Generalised expr with expected type
-- tcPolyExpr is a convenient place (frequent but not too frequent)
-- place to add context information.
-- The NC version does not do so, usually because the caller wants
-- to do so himself.
tcPolyExpr expr res_ty = tc_poly_expr expr (mkCheckExpType res_ty)
tcPolyExprNC expr res_ty = tc_poly_expr_nc expr (mkCheckExpType res_ty)
-- these versions take an ExpType
tc_poly_expr, tc_poly_expr_nc :: LHsExpr Name -> ExpSigmaType -> TcM (LHsExpr TcId)
tc_poly_expr expr res_ty
= addExprErrCtxt expr $
do { traceTc "tcPolyExpr" (ppr res_ty); tc_poly_expr_nc expr res_ty }
tc_poly_expr_nc (L loc expr) res_ty
= do { traceTc "tcPolyExprNC" (ppr res_ty)
; (wrap, expr')
<- tcSkolemiseET GenSigCtxt res_ty $ \ res_ty ->
setSrcSpan loc $
-- NB: setSrcSpan *after* skolemising, so we get better
-- skolem locations
tcExpr expr res_ty
; return $ L loc (mkHsWrap wrap expr') }
---------------
tcMonoExpr, tcMonoExprNC
:: LHsExpr Name -- Expression to type check
-> ExpRhoType -- Expected type
-- Definitely no foralls at the top
-> TcM (LHsExpr TcId)
tcMonoExpr expr res_ty
= addErrCtxt (exprCtxt expr) $
tcMonoExprNC expr res_ty
tcMonoExprNC (L loc expr) res_ty
= setSrcSpan loc $
do { expr' <- tcExpr expr res_ty
; return (L loc expr') }
---------------
tcInferSigma, tcInferSigmaNC :: LHsExpr Name -> TcM ( LHsExpr TcId
, TcSigmaType )
-- Infer a *sigma*-type.
tcInferSigma expr = addErrCtxt (exprCtxt expr) (tcInferSigmaNC expr)
tcInferSigmaNC (L loc expr)
= setSrcSpan loc $
do { (expr', sigma) <- tcInfer (tcExpr expr)
; return (L loc expr', sigma) }
tcInferRho, tcInferRhoNC :: LHsExpr Name -> TcM (LHsExpr TcId, TcRhoType)
-- Infer a *rho*-type. The return type is always (shallowly) instantiated.
tcInferRho expr = addErrCtxt (exprCtxt expr) (tcInferRhoNC expr)
tcInferRhoNC expr
= do { (expr', sigma) <- tcInferSigmaNC expr
; (wrap, rho) <- topInstantiate (exprCtOrigin (unLoc expr)) sigma
; return (mkLHsWrap wrap expr', rho) }
{-
************************************************************************
* *
tcExpr: the main expression typechecker
* *
************************************************************************
NB: The res_ty is always deeply skolemised.
-}
tcExpr :: HsExpr Name -> ExpRhoType -> TcM (HsExpr TcId)
tcExpr (HsVar (L _ name)) res_ty = tcCheckId name res_ty
tcExpr (HsUnboundVar uv) res_ty = tcUnboundId uv res_ty
tcExpr e@(HsApp {}) res_ty = tcApp1 e res_ty
tcExpr e@(HsAppType {}) res_ty = tcApp1 e res_ty
tcExpr e@(HsLit lit) res_ty = do { let lit_ty = hsLitType lit
; tcWrapResult e (HsLit lit) lit_ty res_ty }
tcExpr (HsPar expr) res_ty = do { expr' <- tcMonoExprNC expr res_ty
; return (HsPar expr') }
tcExpr (HsSCC src lbl expr) res_ty
= do { expr' <- tcMonoExpr expr res_ty
; return (HsSCC src lbl expr') }
tcExpr (HsTickPragma src info srcInfo expr) res_ty
= do { expr' <- tcMonoExpr expr res_ty
; return (HsTickPragma src info srcInfo expr') }
tcExpr (HsCoreAnn src lbl expr) res_ty
= do { expr' <- tcMonoExpr expr res_ty
; return (HsCoreAnn src lbl expr') }
tcExpr (HsOverLit lit) res_ty
= do { lit' <- newOverloadedLit lit res_ty
; return (HsOverLit lit') }
tcExpr (NegApp expr neg_expr) res_ty
= do { (expr', neg_expr')
<- tcSyntaxOp NegateOrigin neg_expr [SynAny] res_ty $
\[arg_ty] ->
tcMonoExpr expr (mkCheckExpType arg_ty)
; return (NegApp expr' neg_expr') }
tcExpr e@(HsIPVar x) res_ty
= do { {- Implicit parameters must have a *tau-type* not a
type scheme. We enforce this by creating a fresh
type variable as its type. (Because res_ty may not
be a tau-type.) -}
ip_ty <- newOpenFlexiTyVarTy
; let ip_name = mkStrLitTy (hsIPNameFS x)
; ipClass <- tcLookupClass ipClassName
; ip_var <- emitWantedEvVar origin (mkClassPred ipClass [ip_name, ip_ty])
; tcWrapResult e (fromDict ipClass ip_name ip_ty (HsVar (noLoc ip_var)))
ip_ty res_ty }
where
-- Coerces a dictionary for `IP "x" t` into `t`.
fromDict ipClass x ty = HsWrap $ mkWpCastR $
unwrapIP $ mkClassPred ipClass [x,ty]
origin = IPOccOrigin x
tcExpr e@(HsOverLabel l) res_ty -- See Note [Type-checking overloaded labels]
= do { isLabelClass <- tcLookupClass isLabelClassName
; alpha <- newOpenFlexiTyVarTy
; let lbl = mkStrLitTy l
pred = mkClassPred isLabelClass [lbl, alpha]
; loc <- getSrcSpanM
; var <- emitWantedEvVar origin pred
; let proxy_arg = L loc (mkHsWrap (mkWpTyApps [typeSymbolKind, lbl])
(HsVar (L loc proxyHashId)))
tm = L loc (fromDict pred (HsVar (L loc var))) `HsApp` proxy_arg
; tcWrapResult e tm alpha res_ty }
where
-- Coerces a dictionary for `IsLabel "x" t` into `Proxy# x -> t`.
fromDict pred = HsWrap $ mkWpCastR $ unwrapIP pred
origin = OverLabelOrigin l
tcExpr (HsLam match) res_ty
= do { (match', wrap) <- tcMatchLambda herald match_ctxt match res_ty
; return (mkHsWrap wrap (HsLam match')) }
where
match_ctxt = MC { mc_what = LambdaExpr, mc_body = tcBody }
herald = sep [ text "The lambda expression" <+>
quotes (pprSetDepth (PartWay 1) $
pprMatches (LambdaExpr :: HsMatchContext Name) match),
-- The pprSetDepth makes the abstraction print briefly
text "has"]
tcExpr e@(HsLamCase matches) res_ty
= do { (matches', wrap)
<- tcMatchLambda msg match_ctxt matches res_ty
-- The laziness annotation is because we don't want to fail here
-- if there are multiple arguments
; return (mkHsWrap wrap $ HsLamCase matches') }
where
msg = sep [ text "The function" <+> quotes (ppr e)
, text "requires"]
match_ctxt = MC { mc_what = CaseAlt, mc_body = tcBody }
tcExpr e@(ExprWithTySig expr sig_ty) res_ty
= do { sig_info <- checkNoErrs $ -- Avoid error cascade
tcUserTypeSig sig_ty Nothing
; (expr', poly_ty) <- tcExprSig expr sig_info
; let expr'' = ExprWithTySigOut expr' sig_ty
; tcWrapResult e expr'' poly_ty res_ty }
{-
Note [Type-checking overloaded labels]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Recall that (in GHC.OverloadedLabels) we have
class IsLabel (x :: Symbol) a where
fromLabel :: Proxy# x -> a
When we see an overloaded label like `#foo`, we generate a fresh
variable `alpha` for the type and emit an `IsLabel "foo" alpha`
constraint. Because the `IsLabel` class has a single method, it is
represented by a newtype, so we can coerce `IsLabel "foo" alpha` to
`Proxy# "foo" -> alpha` (just like for implicit parameters). We then
apply it to `proxy#` of type `Proxy# "foo"`.
That is, we translate `#foo` to `fromLabel (proxy# :: Proxy# "foo")`.
-}
{-
************************************************************************
* *
Infix operators and sections
* *
************************************************************************
Note [Left sections]
~~~~~~~~~~~~~~~~~~~~
Left sections, like (4 *), are equivalent to
\ x -> (*) 4 x,
or, if PostfixOperators is enabled, just
(*) 4
With PostfixOperators we don't actually require the function to take
two arguments at all. For example, (x `not`) means (not x); you get
postfix operators! Not Haskell 98, but it's less work and kind of
useful.
Note [Typing rule for ($)]
~~~~~~~~~~~~~~~~~~~~~~~~~~
People write
runST $ blah
so much, where
runST :: (forall s. ST s a) -> a
that I have finally given in and written a special type-checking
rule just for saturated appliations of ($).
* Infer the type of the first argument
* Decompose it; should be of form (arg2_ty -> res_ty),
where arg2_ty might be a polytype
* Use arg2_ty to typecheck arg2
Note [Typing rule for seq]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to allow
x `seq` (# p,q #)
which suggests this type for seq:
seq :: forall (a:*) (b:Open). a -> b -> b,
with (b:Open) meaning that be can be instantiated with an unboxed
tuple. The trouble is that this might accept a partially-applied
'seq', and I'm just not certain that would work. I'm only sure it's
only going to work when it's fully applied, so it turns into
case x of _ -> (# p,q #)
So it seems more uniform to treat 'seq' as it it was a language
construct.
See also Note [seqId magic] in MkId
-}
tcExpr expr@(OpApp arg1 op fix arg2) res_ty
| (L loc (HsVar (L lv op_name))) <- op
, op_name `hasKey` seqIdKey -- Note [Typing rule for seq]
= do { arg1_ty <- newFlexiTyVarTy liftedTypeKind
; let arg2_exp_ty = res_ty
; arg1' <- tcArg op arg1 arg1_ty 1
; arg2' <- addErrCtxt (funAppCtxt op arg2 2) $
tc_poly_expr_nc arg2 arg2_exp_ty
; arg2_ty <- readExpType arg2_exp_ty
; op_id <- tcLookupId op_name
; let op' = L loc (HsWrap (mkWpTyApps [arg1_ty, arg2_ty])
(HsVar (L lv op_id)))
; return $ OpApp arg1' op' fix arg2' }
| (L loc (HsVar (L lv op_name))) <- op
, op_name `hasKey` dollarIdKey -- Note [Typing rule for ($)]
= do { traceTc "Application rule" (ppr op)
; (arg1', arg1_ty) <- tcInferSigma arg1
; let doc = text "The first argument of ($) takes"
orig1 = exprCtOrigin (unLoc arg1)
; (wrap_arg1, [arg2_sigma], op_res_ty) <-
matchActualFunTys doc orig1 (Just arg1) 1 arg1_ty
-- We have (arg1 $ arg2)
-- So: arg1_ty = arg2_ty -> op_res_ty
-- where arg2_sigma maybe polymorphic; that's the point
; arg2' <- tcArg op arg2 arg2_sigma 2
-- Make sure that the argument type has kind '*'
-- ($) :: forall (r:RuntimeRep) (a:*) (b:TYPE r). (a->b) -> a -> b
-- Eg we do not want to allow (D# $ 4.0#) Trac #5570
-- (which gives a seg fault)
--
-- The *result* type can have any kind (Trac #8739),
-- so we don't need to check anything for that
; _ <- unifyKind (Just arg2_sigma) (typeKind arg2_sigma) liftedTypeKind
-- ignore the evidence. arg2_sigma must have type * or #,
-- because we know arg2_sigma -> or_res_ty is well-kinded
-- (because otherwise matchActualFunTys would fail)
-- There's no possibility here of, say, a kind family reducing to *.
; wrap_res <- tcSubTypeHR orig1 (Just expr) op_res_ty res_ty
-- op_res -> res
; op_id <- tcLookupId op_name
; res_ty <- readExpType res_ty
; let op' = L loc (HsWrap (mkWpTyApps [ getRuntimeRep "tcExpr ($)" res_ty
, arg2_sigma
, res_ty])
(HsVar (L lv op_id)))
-- arg1' :: arg1_ty
-- wrap_arg1 :: arg1_ty "->" (arg2_sigma -> op_res_ty)
-- wrap_res :: op_res_ty "->" res_ty
-- op' :: (a2_ty -> res_ty) -> a2_ty -> res_ty
-- wrap1 :: arg1_ty "->" (arg2_sigma -> res_ty)
wrap1 = mkWpFun idHsWrapper wrap_res arg2_sigma res_ty
<.> wrap_arg1
; return (OpApp (mkLHsWrap wrap1 arg1') op' fix arg2') }
| (L loc (HsRecFld (Ambiguous lbl _))) <- op
, Just sig_ty <- obviousSig (unLoc arg1)
-- See Note [Disambiguating record fields]
= do { sig_tc_ty <- tcHsSigWcType ExprSigCtxt sig_ty
; sel_name <- disambiguateSelector lbl sig_tc_ty
; let op' = L loc (HsRecFld (Unambiguous lbl sel_name))
; tcExpr (OpApp arg1 op' fix arg2) res_ty
}
| otherwise
= do { traceTc "Non Application rule" (ppr op)
; (wrap, op', [Left arg1', Left arg2'])
<- tcApp (Just $ mk_op_msg op)
op [Left arg1, Left arg2] res_ty
; return (mkHsWrap wrap $ OpApp arg1' op' fix arg2') }
-- Right sections, equivalent to \ x -> x `op` expr, or
-- \ x -> op x expr
tcExpr expr@(SectionR op arg2) res_ty
= do { (op', op_ty) <- tcInferFun op
; (wrap_fun, [arg1_ty, arg2_ty], op_res_ty) <-
matchActualFunTys (mk_op_msg op) SectionOrigin (Just op) 2 op_ty
; wrap_res <- tcSubTypeHR SectionOrigin (Just expr)
(mkFunTy arg1_ty op_res_ty) res_ty
; arg2' <- tcArg op arg2 arg2_ty 2
; return ( mkHsWrap wrap_res $
SectionR (mkLHsWrap wrap_fun op') arg2' ) }
tcExpr expr@(SectionL arg1 op) res_ty
= do { (op', op_ty) <- tcInferFun op
; dflags <- getDynFlags -- Note [Left sections]
; let n_reqd_args | xopt LangExt.PostfixOperators dflags = 1
| otherwise = 2
; (wrap_fn, (arg1_ty:arg_tys), op_res_ty)
<- matchActualFunTys (mk_op_msg op) SectionOrigin (Just op)
n_reqd_args op_ty
; wrap_res <- tcSubTypeHR SectionOrigin (Just expr)
(mkFunTys arg_tys op_res_ty) res_ty
; arg1' <- tcArg op arg1 arg1_ty 1
; return ( mkHsWrap wrap_res $
SectionL arg1' (mkLHsWrap wrap_fn op') ) }
tcExpr expr@(ExplicitTuple tup_args boxity) res_ty
| all tupArgPresent tup_args
= do { let arity = length tup_args
tup_tc = tupleTyCon boxity arity
; res_ty <- expTypeToType res_ty
; (coi, arg_tys) <- matchExpectedTyConApp tup_tc res_ty
-- Unboxed tuples have RuntimeRep vars, which we
-- don't care about here
-- See Note [Unboxed tuple RuntimeRep vars] in TyCon
; let arg_tys' = case boxity of Unboxed -> drop arity arg_tys
Boxed -> arg_tys
; tup_args1 <- tcTupArgs tup_args arg_tys'
; return $ mkHsWrapCo coi (ExplicitTuple tup_args1 boxity) }
| otherwise
= -- The tup_args are a mixture of Present and Missing (for tuple sections)
do { let arity = length tup_args
; arg_tys <- case boxity of
{ Boxed -> newFlexiTyVarTys arity liftedTypeKind
; Unboxed -> replicateM arity newOpenFlexiTyVarTy }
; let actual_res_ty
= mkFunTys [ty | (ty, (L _ (Missing _))) <- arg_tys `zip` tup_args]
(mkTupleTy boxity arg_tys)
; wrap <- tcSubTypeHR (Shouldn'tHappenOrigin "ExpTuple")
(Just expr)
actual_res_ty res_ty
-- Handle tuple sections where
; tup_args1 <- tcTupArgs tup_args arg_tys
; return $ mkHsWrap wrap (ExplicitTuple tup_args1 boxity) }
tcExpr (ExplicitList _ witness exprs) res_ty
= case witness of
Nothing -> do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedListTy res_ty
; exprs' <- mapM (tc_elt elt_ty) exprs
; return $
mkHsWrapCo coi $ ExplicitList elt_ty Nothing exprs' }
Just fln -> do { ((exprs', elt_ty), fln')
<- tcSyntaxOp ListOrigin fln
[synKnownType intTy, SynList] res_ty $
\ [elt_ty] ->
do { exprs' <-
mapM (tc_elt elt_ty) exprs
; return (exprs', elt_ty) }
; return $ ExplicitList elt_ty (Just fln') exprs' }
where tc_elt elt_ty expr = tcPolyExpr expr elt_ty
tcExpr (ExplicitPArr _ exprs) res_ty -- maybe empty
= do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedPArrTy res_ty
; exprs' <- mapM (tc_elt elt_ty) exprs
; return $
mkHsWrapCo coi $ ExplicitPArr elt_ty exprs' }
where
tc_elt elt_ty expr = tcPolyExpr expr elt_ty
{-
************************************************************************
* *
Let, case, if, do
* *
************************************************************************
-}
tcExpr (HsLet (L l binds) expr) res_ty
= do { (binds', expr') <- tcLocalBinds binds $
tcMonoExpr expr res_ty
; return (HsLet (L l binds') expr') }
tcExpr (HsCase scrut matches) res_ty
= do { -- We used to typecheck the case alternatives first.
-- The case patterns tend to give good type info to use
-- when typechecking the scrutinee. For example
-- case (map f) of
-- (x:xs) -> ...
-- will report that map is applied to too few arguments
--
-- But now, in the GADT world, we need to typecheck the scrutinee
-- first, to get type info that may be refined in the case alternatives
(scrut', scrut_ty) <- tcInferRho scrut
; traceTc "HsCase" (ppr scrut_ty)
; matches' <- tcMatchesCase match_ctxt scrut_ty matches res_ty
; return (HsCase scrut' matches') }
where
match_ctxt = MC { mc_what = CaseAlt,
mc_body = tcBody }
tcExpr (HsIf Nothing pred b1 b2) res_ty -- Ordinary 'if'
= do { pred' <- tcMonoExpr pred (mkCheckExpType boolTy)
-- this forces the branches to be fully instantiated
-- (See #10619)
; res_ty <- mkCheckExpType <$> expTypeToType res_ty
; b1' <- tcMonoExpr b1 res_ty
; b2' <- tcMonoExpr b2 res_ty
; return (HsIf Nothing pred' b1' b2') }
tcExpr (HsIf (Just fun) pred b1 b2) res_ty
= do { ((pred', b1', b2'), fun')
<- tcSyntaxOp IfOrigin fun [SynAny, SynAny, SynAny] res_ty $
\ [pred_ty, b1_ty, b2_ty] ->
do { pred' <- tcPolyExpr pred pred_ty
; b1' <- tcPolyExpr b1 b1_ty
; b2' <- tcPolyExpr b2 b2_ty
; return (pred', b1', b2') }
; return (HsIf (Just fun') pred' b1' b2') }
tcExpr (HsMultiIf _ alts) res_ty
= do { res_ty <- if isSingleton alts
then return res_ty
else mkCheckExpType <$> expTypeToType res_ty
-- Just like Note [Case branches must never infer a non-tau type]
-- in TcMatches
; alts' <- mapM (wrapLocM $ tcGRHS match_ctxt res_ty) alts
; res_ty <- readExpType res_ty
; return (HsMultiIf res_ty alts') }
where match_ctxt = MC { mc_what = IfAlt, mc_body = tcBody }
tcExpr (HsDo do_or_lc stmts _) res_ty
= do { expr' <- tcDoStmts do_or_lc stmts res_ty
; return expr' }
tcExpr (HsProc pat cmd) res_ty
= do { (pat', cmd', coi) <- tcProc pat cmd res_ty
; return $ mkHsWrapCo coi (HsProc pat' cmd') }
-- Typechecks the static form and wraps it with a call to 'fromStaticPtr'.
tcExpr (HsStatic expr) res_ty
= do { res_ty <- expTypeToType res_ty
; (co, (p_ty, expr_ty)) <- matchExpectedAppTy res_ty
; (expr', lie) <- captureConstraints $
addErrCtxt (hang (text "In the body of a static form:")
2 (ppr expr)
) $
tcPolyExprNC expr expr_ty
-- Require the type of the argument to be Typeable.
-- The evidence is not used, but asking the constraint ensures that
-- the current implementation is as restrictive as future versions
-- of the StaticPointers extension.
; typeableClass <- tcLookupClass typeableClassName
; _ <- emitWantedEvVar StaticOrigin $
mkTyConApp (classTyCon typeableClass)
[liftedTypeKind, expr_ty]
-- Insert the constraints of the static form in a global list for later
-- validation.
; stWC <- tcg_static_wc <$> getGblEnv
; updTcRef stWC (andWC lie)
-- Wrap the static form with the 'fromStaticPtr' call.
; fromStaticPtr <- newMethodFromName StaticOrigin fromStaticPtrName p_ty
; let wrap = mkWpTyApps [expr_ty]
; loc <- getSrcSpanM
; return $ mkHsWrapCo co $ HsApp (L loc $ mkHsWrap wrap fromStaticPtr)
(L loc (HsStatic expr'))
}
{-
************************************************************************
* *
Record construction and update
* *
************************************************************************
-}
tcExpr expr@(RecordCon { rcon_con_name = L loc con_name
, rcon_flds = rbinds }) res_ty
= do { con_like <- tcLookupConLike con_name
-- Check for missing fields
; checkMissingFields con_like rbinds
; (con_expr, con_sigma) <- tcInferId con_name
; (con_wrap, con_tau) <-
topInstantiate (OccurrenceOf con_name) con_sigma
-- a shallow instantiation should really be enough for
-- a data constructor.
; let arity = conLikeArity con_like
(arg_tys, actual_res_ty) = tcSplitFunTysN con_tau arity
; case conLikeWrapId_maybe con_like of
Nothing -> nonBidirectionalErr (conLikeName con_like)
Just con_id -> do {
res_wrap <- tcSubTypeHR (Shouldn'tHappenOrigin "RecordCon")
(Just expr) actual_res_ty res_ty
; rbinds' <- tcRecordBinds con_like arg_tys rbinds
; return $
mkHsWrap res_wrap $
RecordCon { rcon_con_name = L loc con_id
, rcon_con_expr = mkHsWrap con_wrap con_expr
, rcon_con_like = con_like
, rcon_flds = rbinds' } } }
{-
Note [Type of a record update]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The main complication with RecordUpd is that we need to explicitly
handle the *non-updated* fields. Consider:
data T a b c = MkT1 { fa :: a, fb :: (b,c) }
| MkT2 { fa :: a, fb :: (b,c), fc :: c -> c }
| MkT3 { fd :: a }
upd :: T a b c -> (b',c) -> T a b' c
upd t x = t { fb = x}
The result type should be (T a b' c)
not (T a b c), because 'b' *is not* mentioned in a non-updated field
not (T a b' c'), because 'c' *is* mentioned in a non-updated field
NB that it's not good enough to look at just one constructor; we must
look at them all; cf Trac #3219
After all, upd should be equivalent to:
upd t x = case t of
MkT1 p q -> MkT1 p x
MkT2 a b -> MkT2 p b
MkT3 d -> error ...
So we need to give a completely fresh type to the result record,
and then constrain it by the fields that are *not* updated ("p" above).
We call these the "fixed" type variables, and compute them in getFixedTyVars.
Note that because MkT3 doesn't contain all the fields being updated,
its RHS is simply an error, so it doesn't impose any type constraints.
Hence the use of 'relevant_cont'.
Note [Implicit type sharing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
We also take into account any "implicit" non-update fields. For example
data T a b where { MkT { f::a } :: T a a; ... }
So the "real" type of MkT is: forall ab. (a~b) => a -> T a b
Then consider
upd t x = t { f=x }
We infer the type
upd :: T a b -> a -> T a b
upd (t::T a b) (x::a)
= case t of { MkT (co:a~b) (_:a) -> MkT co x }
We can't give it the more general type
upd :: T a b -> c -> T c b
Note [Criteria for update]
~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to allow update for existentials etc, provided the updated
field isn't part of the existential. For example, this should be ok.
data T a where { MkT { f1::a, f2::b->b } :: T a }
f :: T a -> b -> T b
f t b = t { f1=b }
The criterion we use is this:
The types of the updated fields
mention only the universally-quantified type variables
of the data constructor
NB: this is not (quite) the same as being a "naughty" record selector
(See Note [Naughty record selectors]) in TcTyClsDecls), at least
in the case of GADTs. Consider
data T a where { MkT :: { f :: a } :: T [a] }
Then f is not "naughty" because it has a well-typed record selector.
But we don't allow updates for 'f'. (One could consider trying to
allow this, but it makes my head hurt. Badly. And no one has asked
for it.)
In principle one could go further, and allow
g :: T a -> T a
g t = t { f2 = \x -> x }
because the expression is polymorphic...but that seems a bridge too far.
Note [Data family example]
~~~~~~~~~~~~~~~~~~~~~~~~~~
data instance T (a,b) = MkT { x::a, y::b }
--->
data :TP a b = MkT { a::a, y::b }
coTP a b :: T (a,b) ~ :TP a b
Suppose r :: T (t1,t2), e :: t3
Then r { x=e } :: T (t3,t1)
--->
case r |> co1 of
MkT x y -> MkT e y |> co2
where co1 :: T (t1,t2) ~ :TP t1 t2
co2 :: :TP t3 t2 ~ T (t3,t2)
The wrapping with co2 is done by the constructor wrapper for MkT
Outgoing invariants
~~~~~~~~~~~~~~~~~~~
In the outgoing (HsRecordUpd scrut binds cons in_inst_tys out_inst_tys):
* cons are the data constructors to be updated
* in_inst_tys, out_inst_tys have same length, and instantiate the
*representation* tycon of the data cons. In Note [Data
family example], in_inst_tys = [t1,t2], out_inst_tys = [t3,t2]
Note [Mixed Record Field Updates]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider the following pattern synonym.
data MyRec = MyRec { foo :: Int, qux :: String }
pattern HisRec{f1, f2} = MyRec{foo = f1, qux=f2}
This allows updates such as the following
updater :: MyRec -> MyRec
updater a = a {f1 = 1 }
It would also make sense to allow the following update (which we reject).
updater a = a {f1 = 1, qux = "two" } ==? MyRec 1 "two"
This leads to confusing behaviour when the selectors in fact refer the same
field.
updater a = a {f1 = 1, foo = 2} ==? ???
For this reason, we reject a mixture of pattern synonym and normal record
selectors in the same update block. Although of course we still allow the
following.
updater a = (a {f1 = 1}) {foo = 2}
> updater (MyRec 0 "str")
MyRec 2 "str"
-}
tcExpr expr@(RecordUpd { rupd_expr = record_expr, rupd_flds = rbnds }) res_ty
= ASSERT( notNull rbnds )
do { -- STEP -2: typecheck the record_expr, the record to be updated
(record_expr', record_rho) <- tcInferRho record_expr
-- STEP -1 See Note [Disambiguating record fields]
-- After this we know that rbinds is unambiguous
; rbinds <- disambiguateRecordBinds record_expr record_rho rbnds res_ty
; let upd_flds = map (unLoc . hsRecFieldLbl . unLoc) rbinds
upd_fld_occs = map (occNameFS . rdrNameOcc . rdrNameAmbiguousFieldOcc) upd_flds
sel_ids = map selectorAmbiguousFieldOcc upd_flds
-- STEP 0
-- Check that the field names are really field names
-- and they are all field names for proper records or
-- all field names for pattern synonyms.
; let bad_guys = [ setSrcSpan loc $ addErrTc (notSelector fld_name)
| fld <- rbinds,
-- Excludes class ops
let L loc sel_id = hsRecUpdFieldId (unLoc fld),
not (isRecordSelector sel_id),
let fld_name = idName sel_id ]
; unless (null bad_guys) (sequence bad_guys >> failM)
-- See note [Mixed Record Selectors]
; let (data_sels, pat_syn_sels) =
partition isDataConRecordSelector sel_ids
; MASSERT( all isPatSynRecordSelector pat_syn_sels )
; checkTc ( null data_sels || null pat_syn_sels )
( mixedSelectors data_sels pat_syn_sels )
-- STEP 1
-- Figure out the tycon and data cons from the first field name
; let -- It's OK to use the non-tc splitters here (for a selector)
sel_id : _ = sel_ids
mtycon :: Maybe TyCon
mtycon = case idDetails sel_id of
RecSelId (RecSelData tycon) _ -> Just tycon
_ -> Nothing
con_likes :: [ConLike]
con_likes = case idDetails sel_id of
RecSelId (RecSelData tc) _
-> map RealDataCon (tyConDataCons tc)
RecSelId (RecSelPatSyn ps) _
-> [PatSynCon ps]
_ -> panic "tcRecordUpd"
-- NB: for a data type family, the tycon is the instance tycon
relevant_cons = conLikesWithFields con_likes upd_fld_occs
-- A constructor is only relevant to this process if
-- it contains *all* the fields that are being updated
-- Other ones will cause a runtime error if they occur
-- Step 2
-- Check that at least one constructor has all the named fields
-- i.e. has an empty set of bad fields returned by badFields
; checkTc (not (null relevant_cons)) (badFieldsUpd rbinds con_likes)
-- Take apart a representative constructor
; let con1 = ASSERT( not (null relevant_cons) ) head relevant_cons
(con1_tvs, _, _, _prov_theta, req_theta, con1_arg_tys, _)
= conLikeFullSig con1
con1_flds = map flLabel $ conLikeFieldLabels con1
con1_tv_tys = mkTyVarTys con1_tvs
con1_res_ty = case mtycon of
Just tc -> mkFamilyTyConApp tc con1_tv_tys
Nothing -> conLikeResTy con1 con1_tv_tys
-- Check that we're not dealing with a unidirectional pattern
-- synonym
; unless (isJust $ conLikeWrapId_maybe con1)
(nonBidirectionalErr (conLikeName con1))
-- STEP 3 Note [Criteria for update]
-- Check that each updated field is polymorphic; that is, its type
-- mentions only the universally-quantified variables of the data con
; let flds1_w_tys = zipEqual "tcExpr:RecConUpd" con1_flds con1_arg_tys
bad_upd_flds = filter bad_fld flds1_w_tys
con1_tv_set = mkVarSet con1_tvs
bad_fld (fld, ty) = fld `elem` upd_fld_occs &&
not (tyCoVarsOfType ty `subVarSet` con1_tv_set)
; checkTc (null bad_upd_flds) (badFieldTypes bad_upd_flds)
-- STEP 4 Note [Type of a record update]
-- Figure out types for the scrutinee and result
-- Both are of form (T a b c), with fresh type variables, but with
-- common variables where the scrutinee and result must have the same type
-- These are variables that appear in *any* arg of *any* of the
-- relevant constructors *except* in the updated fields
--
; let fixed_tvs = getFixedTyVars upd_fld_occs con1_tvs relevant_cons
is_fixed_tv tv = tv `elemVarSet` fixed_tvs
mk_inst_ty :: TCvSubst -> (TyVar, TcType) -> TcM (TCvSubst, TcType)
-- Deals with instantiation of kind variables
-- c.f. TcMType.newMetaTyVars
mk_inst_ty subst (tv, result_inst_ty)
| is_fixed_tv tv -- Same as result type
= return (extendTvSubst subst tv result_inst_ty, result_inst_ty)
| otherwise -- Fresh type, of correct kind
= do { (subst', new_tv) <- newMetaTyVarX subst tv
; return (subst', mkTyVarTy new_tv) }
; (result_subst, con1_tvs') <- newMetaTyVars con1_tvs
; let result_inst_tys = mkTyVarTys con1_tvs'
; (scrut_subst, scrut_inst_tys) <- mapAccumLM mk_inst_ty emptyTCvSubst
(con1_tvs `zip` result_inst_tys)
; let rec_res_ty = TcType.substTy result_subst con1_res_ty
scrut_ty = TcType.substTyUnchecked scrut_subst con1_res_ty
con1_arg_tys' = map (TcType.substTy result_subst) con1_arg_tys
; wrap_res <- tcSubTypeHR (exprCtOrigin expr)
(Just expr) rec_res_ty res_ty
; co_scrut <- unifyType (Just record_expr) record_rho scrut_ty
-- NB: normal unification is OK here (as opposed to subsumption),
-- because for this to work out, both record_rho and scrut_ty have
-- to be normal datatypes -- no contravariant stuff can go on
-- STEP 5
-- Typecheck the bindings
; rbinds' <- tcRecordUpd con1 con1_arg_tys' rbinds
-- STEP 6: Deal with the stupid theta
; let theta' = substThetaUnchecked scrut_subst (conLikeStupidTheta con1)
; instStupidTheta RecordUpdOrigin theta'
-- Step 7: make a cast for the scrutinee, in the
-- case that it's from a data family
; let fam_co :: HsWrapper -- RepT t1 .. tn ~R scrut_ty
fam_co | Just tycon <- mtycon
, Just co_con <- tyConFamilyCoercion_maybe tycon
= mkWpCastR (mkTcUnbranchedAxInstCo co_con scrut_inst_tys [])
| otherwise
= idHsWrapper
-- Step 8: Check that the req constraints are satisfied
-- For normal data constructors req_theta is empty but we must do
-- this check for pattern synonyms.
; let req_theta' = substThetaUnchecked scrut_subst req_theta
; req_wrap <- instCallConstraints RecordUpdOrigin req_theta'
-- Phew!
; return $
mkHsWrap wrap_res $
RecordUpd { rupd_expr = mkLHsWrap fam_co (mkLHsWrapCo co_scrut record_expr')
, rupd_flds = rbinds'
, rupd_cons = relevant_cons, rupd_in_tys = scrut_inst_tys
, rupd_out_tys = result_inst_tys, rupd_wrap = req_wrap } }
tcExpr (HsRecFld f) res_ty
= tcCheckRecSelId f res_ty
{-
************************************************************************
* *
Arithmetic sequences e.g. [a,b..]
and their parallel-array counterparts e.g. [: a,b.. :]
* *
************************************************************************
-}
tcExpr (ArithSeq _ witness seq) res_ty
= tcArithSeq witness seq res_ty
tcExpr (PArrSeq _ seq@(FromTo expr1 expr2)) res_ty
= do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedPArrTy res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; enumFromToP <- initDsTc $ dsDPHBuiltin enumFromToPVar
; enum_from_to <- newMethodFromName (PArrSeqOrigin seq)
(idName enumFromToP) elt_ty
; return $
mkHsWrapCo coi $ PArrSeq enum_from_to (FromTo expr1' expr2') }
tcExpr (PArrSeq _ seq@(FromThenTo expr1 expr2 expr3)) res_ty
= do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedPArrTy res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; expr3' <- tcPolyExpr expr3 elt_ty
; enumFromThenToP <- initDsTc $ dsDPHBuiltin enumFromThenToPVar
; eft <- newMethodFromName (PArrSeqOrigin seq)
(idName enumFromThenToP) elt_ty -- !!!FIXME: chak
; return $
mkHsWrapCo coi $
PArrSeq eft (FromThenTo expr1' expr2' expr3') }
tcExpr (PArrSeq _ _) _
= panic "TcExpr.tcExpr: Infinite parallel array!"
-- the parser shouldn't have generated it and the renamer shouldn't have
-- let it through
{-
************************************************************************
* *
Template Haskell
* *
************************************************************************
-}
tcExpr (HsSpliceE splice) res_ty
= tcSpliceExpr splice res_ty
tcExpr (HsBracket brack) res_ty
= tcTypedBracket brack res_ty
tcExpr (HsRnBracketOut brack ps) res_ty
= tcUntypedBracket brack ps res_ty
{-
************************************************************************
* *
Catch-all
* *
************************************************************************
-}
tcExpr other _ = pprPanic "tcMonoExpr" (ppr other)
-- Include ArrForm, ArrApp, which shouldn't appear at all
-- Also HsTcBracketOut, HsQuasiQuoteE
{-
************************************************************************
* *
Arithmetic sequences [a..b] etc
* *
************************************************************************
-}
tcArithSeq :: Maybe (SyntaxExpr Name) -> ArithSeqInfo Name -> ExpRhoType
-> TcM (HsExpr TcId)
tcArithSeq witness seq@(From expr) res_ty
= do { (wrap, elt_ty, wit') <- arithSeqEltType witness res_ty
; expr' <- tcPolyExpr expr elt_ty
; enum_from <- newMethodFromName (ArithSeqOrigin seq)
enumFromName elt_ty
; return $ mkHsWrap wrap $
ArithSeq enum_from wit' (From expr') }
tcArithSeq witness seq@(FromThen expr1 expr2) res_ty
= do { (wrap, elt_ty, wit') <- arithSeqEltType witness res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; enum_from_then <- newMethodFromName (ArithSeqOrigin seq)
enumFromThenName elt_ty
; return $ mkHsWrap wrap $
ArithSeq enum_from_then wit' (FromThen expr1' expr2') }
tcArithSeq witness seq@(FromTo expr1 expr2) res_ty
= do { (wrap, elt_ty, wit') <- arithSeqEltType witness res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; enum_from_to <- newMethodFromName (ArithSeqOrigin seq)
enumFromToName elt_ty
; return $ mkHsWrap wrap $
ArithSeq enum_from_to wit' (FromTo expr1' expr2') }
tcArithSeq witness seq@(FromThenTo expr1 expr2 expr3) res_ty
= do { (wrap, elt_ty, wit') <- arithSeqEltType witness res_ty
; expr1' <- tcPolyExpr expr1 elt_ty
; expr2' <- tcPolyExpr expr2 elt_ty
; expr3' <- tcPolyExpr expr3 elt_ty
; eft <- newMethodFromName (ArithSeqOrigin seq)
enumFromThenToName elt_ty
; return $ mkHsWrap wrap $
ArithSeq eft wit' (FromThenTo expr1' expr2' expr3') }
-----------------
arithSeqEltType :: Maybe (SyntaxExpr Name) -> ExpRhoType
-> TcM (HsWrapper, TcType, Maybe (SyntaxExpr Id))
arithSeqEltType Nothing res_ty
= do { res_ty <- expTypeToType res_ty
; (coi, elt_ty) <- matchExpectedListTy res_ty
; return (mkWpCastN coi, elt_ty, Nothing) }
arithSeqEltType (Just fl) res_ty
= do { (elt_ty, fl')
<- tcSyntaxOp ListOrigin fl [SynList] res_ty $
\ [elt_ty] -> return elt_ty
; return (idHsWrapper, elt_ty, Just fl') }
{-
************************************************************************
* *
Applications
* *
************************************************************************
-}
type LHsExprArgIn = Either (LHsExpr Name) (LHsWcType Name)
type LHsExprArgOut = Either (LHsExpr TcId) (LHsWcType Name)
tcApp1 :: HsExpr Name -- either HsApp or HsAppType
-> ExpRhoType -> TcM (HsExpr TcId)
tcApp1 e res_ty
= do { (wrap, fun, args) <- tcApp Nothing (noLoc e) [] res_ty
; return (mkHsWrap wrap $ unLoc $ foldl mk_hs_app fun args) }
where
mk_hs_app f (Left a) = mkHsApp f a
mk_hs_app f (Right a) = mkHsAppTypeOut f a
tcApp :: Maybe SDoc -- like "The function `f' is applied to"
-- or leave out to get exactly that message
-> LHsExpr Name -> [LHsExprArgIn] -- Function and args
-> ExpRhoType -> TcM (HsWrapper, LHsExpr TcId, [LHsExprArgOut])
-- (wrap, fun, args). For an ordinary function application,
-- these should be assembled as (wrap (fun args)).
-- But OpApp is slightly different, so that's why the caller
-- must assemble
tcApp m_herald orig_fun orig_args res_ty
= go orig_fun orig_args
where
go :: LHsExpr Name -> [LHsExprArgIn]
-> TcM (HsWrapper, LHsExpr TcId, [LHsExprArgOut])
go (L _ (HsPar e)) args = go e args
go (L _ (HsApp e1 e2)) args = go e1 (Left e2:args)
go (L _ (HsAppType e t)) args = go e (Right t:args)
go (L loc (HsVar (L _ fun))) args
| fun `hasKey` tagToEnumKey
, count isLeft args == 1
= do { (wrap, expr, args) <- tcTagToEnum loc fun args res_ty
; return (wrap, expr, args) }
| fun `hasKey` seqIdKey
, count isLeft args == 2
= do { (wrap, expr, args) <- tcSeq loc fun args res_ty
; return (wrap, expr, args) }
go (L loc (HsRecFld (Ambiguous lbl _))) args@(Left (L _ arg) : _)
| Just sig_ty <- obviousSig arg
= do { sig_tc_ty <- tcHsSigWcType ExprSigCtxt sig_ty
; sel_name <- disambiguateSelector lbl sig_tc_ty
; go (L loc (HsRecFld (Unambiguous lbl sel_name))) args }
go fun args
= do { -- Type-check the function
; (fun1, fun_sigma) <- tcInferFun fun
; let orig = exprCtOrigin (unLoc fun)
; (wrap_fun, args1, actual_res_ty)
<- tcArgs fun fun_sigma orig args
(m_herald `orElse` mk_app_msg fun)
-- this is just like tcWrapResult, but the types don't line
-- up to call that function
; wrap_res <- addFunResCtxt True (unLoc fun) actual_res_ty res_ty $
tcSubTypeDS_NC_O orig GenSigCtxt
(Just $ foldl mk_hs_app fun args)
actual_res_ty res_ty
; return (wrap_res, mkLHsWrap wrap_fun fun1, args1) }
mk_hs_app f (Left a) = mkHsApp f a
mk_hs_app f (Right a) = mkHsAppType f a
mk_app_msg :: LHsExpr Name -> SDoc
mk_app_msg fun = sep [ text "The function" <+> quotes (ppr fun)
, text "is applied to"]
mk_op_msg :: LHsExpr Name -> SDoc
mk_op_msg op = text "The operator" <+> quotes (ppr op) <+> text "takes"
----------------
tcInferFun :: LHsExpr Name -> TcM (LHsExpr TcId, TcSigmaType)
-- Infer type of a function
tcInferFun (L loc (HsVar (L _ name)))
= do { (fun, ty) <- setSrcSpan loc (tcInferId name)
-- Don't wrap a context around a plain Id
; return (L loc fun, ty) }
tcInferFun (L loc (HsRecFld f))
= do { (fun, ty) <- setSrcSpan loc (tcInferRecSelId f)
-- Don't wrap a context around a plain Id
; return (L loc fun, ty) }
tcInferFun fun
= do { (fun, fun_ty) <- tcInferSigma fun
-- Zonk the function type carefully, to expose any polymorphism
-- E.g. (( \(x::forall a. a->a). blah ) e)
-- We can see the rank-2 type of the lambda in time to generalise e
; fun_ty' <- zonkTcType fun_ty
; return (fun, fun_ty') }
----------------
-- | Type-check the arguments to a function, possibly including visible type
-- applications
tcArgs :: LHsExpr Name -- ^ The function itself (for err msgs only)
-> TcSigmaType -- ^ the (uninstantiated) type of the function
-> CtOrigin -- ^ the origin for the function's type
-> [LHsExprArgIn] -- ^ the args
-> SDoc -- ^ the herald for matchActualFunTys
-> TcM (HsWrapper, [LHsExprArgOut], TcSigmaType)
-- ^ (a wrapper for the function, the tc'd args, result type)
tcArgs fun orig_fun_ty fun_orig orig_args herald
= go [] 1 orig_fun_ty orig_args
where
orig_arity = length orig_args
go _ _ fun_ty [] = return (idHsWrapper, [], fun_ty)
go acc_args n fun_ty (Right hs_ty_arg:args)
= do { (wrap1, upsilon_ty) <- topInstantiateInferred fun_orig fun_ty
-- wrap1 :: fun_ty "->" upsilon_ty
; case tcSplitForAllTy_maybe upsilon_ty of
Just (binder, inner_ty)
| Just tv <- binderVar_maybe binder ->
ASSERT2( binderVisibility binder == Specified
, (vcat [ ppr fun_ty, ppr upsilon_ty, ppr binder
, ppr inner_ty, pprTvBndr tv
, ppr (binderVisibility binder) ]) )
do { let kind = tyVarKind tv
; ty_arg <- tcHsTypeApp hs_ty_arg kind
; let insted_ty = substTyWithUnchecked [tv] [ty_arg] inner_ty
; (inner_wrap, args', res_ty)
<- go acc_args (n+1) insted_ty args
-- inner_wrap :: insted_ty "->" (map typeOf args') -> res_ty
; let inst_wrap = mkWpTyApps [ty_arg]
; return ( inner_wrap <.> inst_wrap <.> wrap1
, Right hs_ty_arg : args'
, res_ty ) }
_ -> ty_app_err upsilon_ty hs_ty_arg }
go acc_args n fun_ty (Left arg : args)
= do { (wrap, [arg_ty], res_ty)
<- matchActualFunTysPart herald fun_orig (Just fun) 1 fun_ty
acc_args orig_arity
-- wrap :: fun_ty "->" arg_ty -> res_ty
; arg' <- tcArg fun arg arg_ty n
; (inner_wrap, args', inner_res_ty)
<- go (arg_ty : acc_args) (n+1) res_ty args
-- inner_wrap :: res_ty "->" (map typeOf args') -> inner_res_ty
; return ( mkWpFun idHsWrapper inner_wrap arg_ty res_ty <.> wrap
, Left arg' : args'
, inner_res_ty ) }
ty_app_err ty arg
= do { (_, ty) <- zonkTidyTcType emptyTidyEnv ty
; failWith $
text "Cannot apply expression of type" <+> quotes (ppr ty) $$
text "to a visible type argument" <+> quotes (ppr arg) }
----------------
tcArg :: LHsExpr Name -- The function (for error messages)
-> LHsExpr Name -- Actual arguments
-> TcRhoType -- expected arg type
-> Int -- # of argument
-> TcM (LHsExpr TcId) -- Resulting argument
tcArg fun arg ty arg_no = addErrCtxt (funAppCtxt fun arg arg_no) $
tcPolyExprNC arg ty
----------------
tcTupArgs :: [LHsTupArg Name] -> [TcSigmaType] -> TcM [LHsTupArg TcId]
tcTupArgs args tys
= ASSERT( equalLength args tys ) mapM go (args `zip` tys)
where
go (L l (Missing {}), arg_ty) = return (L l (Missing arg_ty))
go (L l (Present expr), arg_ty) = do { expr' <- tcPolyExpr expr arg_ty
; return (L l (Present expr')) }
---------------------------
-- See TcType.SyntaxOpType also for commentary
tcSyntaxOp :: CtOrigin
-> SyntaxExpr Name
-> [SyntaxOpType] -- ^ shape of syntax operator arguments
-> ExpType -- ^ overall result type
-> ([TcSigmaType] -> TcM a) -- ^ Type check any arguments
-> TcM (a, SyntaxExpr TcId)
-- ^ Typecheck a syntax operator
-- The operator is always a variable at this stage (i.e. renamer output)
tcSyntaxOp orig expr arg_tys res_ty
= tcSyntaxOpGen orig expr arg_tys (SynType res_ty)
-- | Slightly more general version of 'tcSyntaxOp' that allows the caller
-- to specify the shape of the result of the syntax operator
tcSyntaxOpGen :: CtOrigin
-> SyntaxExpr Name
-> [SyntaxOpType]
-> SyntaxOpType
-> ([TcSigmaType] -> TcM a)
-> TcM (a, SyntaxExpr TcId)
tcSyntaxOpGen orig (SyntaxExpr { syn_expr = HsVar (L _ op) })
arg_tys res_ty thing_inside
= do { (expr, sigma) <- tcInferId op
; (result, expr_wrap, arg_wraps, res_wrap)
<- tcSynArgA orig sigma arg_tys res_ty $
thing_inside
; return (result, SyntaxExpr { syn_expr = mkHsWrap expr_wrap expr
, syn_arg_wraps = arg_wraps
, syn_res_wrap = res_wrap }) }
tcSyntaxOpGen _ other _ _ _ = pprPanic "tcSyntaxOp" (ppr other)
{-
Note [tcSynArg]
~~~~~~~~~~~~~~~
Because of the rich structure of SyntaxOpType, we must do the
contra-/covariant thing when working down arrows, to get the
instantiation vs. skolemisation decisions correct (and, more
obviously, the orientation of the HsWrappers). We thus have
two tcSynArgs.
-}
-- works on "expected" types, skolemising where necessary
-- See Note [tcSynArg]
tcSynArgE :: CtOrigin
-> TcSigmaType
-> SyntaxOpType -- ^ shape it is expected to have
-> ([TcSigmaType] -> TcM a) -- ^ check the arguments
-> TcM (a, HsWrapper)
-- ^ returns a wrapper :: (type of right shape) "->" (type passed in)
tcSynArgE orig sigma_ty syn_ty thing_inside
= do { (skol_wrap, (result, ty_wrapper))
<- tcSkolemise GenSigCtxt sigma_ty $ \ _ rho_ty ->
go rho_ty syn_ty
; return (result, skol_wrap <.> ty_wrapper) }
where
go rho_ty SynAny
= do { result <- thing_inside [rho_ty]
; return (result, idHsWrapper) }
go rho_ty SynRho -- same as SynAny, because we skolemise eagerly
= do { result <- thing_inside [rho_ty]
; return (result, idHsWrapper) }
go rho_ty SynList
= do { (list_co, elt_ty) <- matchExpectedListTy rho_ty
; result <- thing_inside [elt_ty]
; return (result, mkWpCastN list_co) }
go rho_ty (SynFun arg_shape res_shape)
= do { ( ( ( (result, arg_ty, res_ty)
, res_wrapper ) -- :: res_ty_out "->" res_ty
, arg_wrapper1, [], arg_wrapper2 ) -- :: arg_ty "->" arg_ty_out
, match_wrapper ) -- :: (arg_ty -> res_ty) "->" rho_ty
<- matchExpectedFunTys herald 1 (mkCheckExpType rho_ty) $
\ [arg_ty] res_ty ->
do { arg_tc_ty <- expTypeToType arg_ty
; res_tc_ty <- expTypeToType res_ty
-- another nested arrow is too much for now,
-- but I bet we'll never need this
; MASSERT2( case arg_shape of
SynFun {} -> False;
_ -> True
, text "Too many nested arrows in SyntaxOpType" $$
pprCtOrigin orig )
; tcSynArgA orig arg_tc_ty [] arg_shape $
\ arg_results ->
tcSynArgE orig res_tc_ty res_shape $
\ res_results ->
do { result <- thing_inside (arg_results ++ res_results)
; return (result, arg_tc_ty, res_tc_ty) }}
; return ( result
, match_wrapper <.>
mkWpFun (arg_wrapper2 <.> arg_wrapper1) res_wrapper
arg_ty res_ty ) }
where
herald = text "This rebindable syntax expects a function with"
go rho_ty (SynType the_ty)
= do { wrap <- tcSubTypeET orig the_ty rho_ty
; result <- thing_inside []
; return (result, wrap) }
-- works on "actual" types, instantiating where necessary
-- See Note [tcSynArg]
tcSynArgA :: CtOrigin
-> TcSigmaType
-> [SyntaxOpType] -- ^ argument shapes
-> SyntaxOpType -- ^ result shape
-> ([TcSigmaType] -> TcM a) -- ^ check the arguments
-> TcM (a, HsWrapper, [HsWrapper], HsWrapper)
-- ^ returns a wrapper to be applied to the original function,
-- wrappers to be applied to arguments
-- and a wrapper to be applied to the overall expression
tcSynArgA orig sigma_ty arg_shapes res_shape thing_inside
= do { (match_wrapper, arg_tys, res_ty)
<- matchActualFunTys herald orig noThing (length arg_shapes) sigma_ty
-- match_wrapper :: sigma_ty "->" (arg_tys -> res_ty)
; ((result, res_wrapper), arg_wrappers)
<- tc_syn_args_e arg_tys arg_shapes $ \ arg_results ->
tc_syn_arg res_ty res_shape $ \ res_results ->
thing_inside (arg_results ++ res_results)
; return (result, match_wrapper, arg_wrappers, res_wrapper) }
where
herald = text "This rebindable syntax expects a function with"
tc_syn_args_e :: [TcSigmaType] -> [SyntaxOpType]
-> ([TcSigmaType] -> TcM a)
-> TcM (a, [HsWrapper])
-- the wrappers are for arguments
tc_syn_args_e (arg_ty : arg_tys) (arg_shape : arg_shapes) thing_inside
= do { ((result, arg_wraps), arg_wrap)
<- tcSynArgE orig arg_ty arg_shape $ \ arg1_results ->
tc_syn_args_e arg_tys arg_shapes $ \ args_results ->
thing_inside (arg1_results ++ args_results)
; return (result, arg_wrap : arg_wraps) }
tc_syn_args_e _ _ thing_inside = (, []) <$> thing_inside []
tc_syn_arg :: TcSigmaType -> SyntaxOpType
-> ([TcSigmaType] -> TcM a)
-> TcM (a, HsWrapper)
-- the wrapper applies to the overall result
tc_syn_arg res_ty SynAny thing_inside
= do { result <- thing_inside [res_ty]
; return (result, idHsWrapper) }
tc_syn_arg res_ty SynRho thing_inside
= do { (inst_wrap, rho_ty) <- deeplyInstantiate orig res_ty
-- inst_wrap :: res_ty "->" rho_ty
; result <- thing_inside [rho_ty]
; return (result, inst_wrap) }
tc_syn_arg res_ty SynList thing_inside
= do { (inst_wrap, rho_ty) <- topInstantiate orig res_ty
-- inst_wrap :: res_ty "->" rho_ty
; (list_co, elt_ty) <- matchExpectedListTy rho_ty
-- list_co :: [elt_ty] ~N rho_ty
; result <- thing_inside [elt_ty]
; return (result, mkWpCastN (mkTcSymCo list_co) <.> inst_wrap) }
tc_syn_arg _ (SynFun {}) _
= pprPanic "tcSynArgA hits a SynFun" (ppr orig)
tc_syn_arg res_ty (SynType the_ty) thing_inside
= do { wrap <- tcSubTypeO orig GenSigCtxt res_ty the_ty
; result <- thing_inside []
; return (result, wrap) }
{-
Note [Push result type in]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Unify with expected result before type-checking the args so that the
info from res_ty percolates to args. This is when we might detect a
too-few args situation. (One can think of cases when the opposite
order would give a better error message.)
experimenting with putting this first.
Here's an example where it actually makes a real difference
class C t a b | t a -> b
instance C Char a Bool
data P t a = forall b. (C t a b) => MkP b
data Q t = MkQ (forall a. P t a)
f1, f2 :: Q Char;
f1 = MkQ (MkP True)
f2 = MkQ (MkP True :: forall a. P Char a)
With the change, f1 will type-check, because the 'Char' info from
the signature is propagated into MkQ's argument. With the check
in the other order, the extra signature in f2 is reqd.
************************************************************************
* *
Expressions with a type signature
expr :: type
* *
********************************************************************* -}
tcExprSig :: LHsExpr Name -> TcIdSigInfo -> TcM (LHsExpr TcId, TcType)
tcExprSig expr sig@(TISI { sig_bndr = s_bndr
, sig_skols = skol_prs
, sig_theta = theta
, sig_tau = tau })
| null skol_prs -- Fast path when there is no quantification at all
, null theta
, CompleteSig {} <- s_bndr
= do { expr' <- tcPolyExprNC expr tau
; return (expr', tau) }
| CompleteSig poly_id <- s_bndr
= do { given <- newEvVars theta
; (ev_binds, expr') <- checkConstraints skol_info skol_tvs given $
tcExtendTyVarEnvFromSig sig $
tcPolyExprNC expr tau
; let poly_wrap = mkWpTyLams skol_tvs
<.> mkWpLams given
<.> mkWpLet ev_binds
; return (mkLHsWrap poly_wrap expr', idType poly_id) }
| PartialSig { sig_name = name } <- s_bndr
= do { (tclvl, wanted, expr') <- pushLevelAndCaptureConstraints $
tcExtendTyVarEnvFromSig sig $
tcPolyExprNC expr tau
; (qtvs, givens, ev_binds)
<- simplifyInfer tclvl False [sig] [(name, tau)] wanted
; tau <- zonkTcType tau
; let inferred_theta = map evVarPred givens
tau_tvs = tyCoVarsOfType tau
; (binders, my_theta) <- chooseInferredQuantifiers inferred_theta
tau_tvs qtvs (Just sig)
; let inferred_sigma = mkInvSigmaTy qtvs inferred_theta tau
my_sigma = mkForAllTys binders (mkPhiTy my_theta tau)
; wrap <- if inferred_sigma `eqType` my_sigma -- NB: eqType ignores vis.
then return idHsWrapper -- Fast path; also avoids complaint when we infer
-- an ambiguouse type and have AllowAmbiguousType
-- e..g infer x :: forall a. F a -> Int
else tcSubType_NC ExprSigCtxt inferred_sigma
(mkCheckExpType my_sigma)
; traceTc "tcExpSig" (ppr qtvs $$ ppr givens $$ ppr inferred_sigma $$ ppr my_sigma)
; let poly_wrap = wrap
<.> mkWpTyLams qtvs
<.> mkWpLams givens
<.> mkWpLet ev_binds
; return (mkLHsWrap poly_wrap expr', my_sigma) }
| otherwise = panic "tcExprSig" -- Can't happen
where
skol_info = SigSkol ExprSigCtxt (mkPhiTy theta tau)
skol_tvs = map snd skol_prs
{- *********************************************************************
* *
tcInferId
* *
********************************************************************* -}
tcCheckId :: Name -> ExpRhoType -> TcM (HsExpr TcId)
tcCheckId name res_ty
= do { (expr, actual_res_ty) <- tcInferId name
; traceTc "tcCheckId" (vcat [ppr name, ppr actual_res_ty, ppr res_ty])
; addFunResCtxt False (HsVar (noLoc name)) actual_res_ty res_ty $
tcWrapResultO (OccurrenceOf name) expr actual_res_ty res_ty }
tcCheckRecSelId :: AmbiguousFieldOcc Name -> ExpRhoType -> TcM (HsExpr TcId)
tcCheckRecSelId f@(Unambiguous (L _ lbl) _) res_ty
= do { (expr, actual_res_ty) <- tcInferRecSelId f
; addFunResCtxt False (HsRecFld f) actual_res_ty res_ty $
tcWrapResultO (OccurrenceOfRecSel lbl) expr actual_res_ty res_ty }
tcCheckRecSelId (Ambiguous lbl _) res_ty
= case tcSplitFunTy_maybe =<< checkingExpType_maybe res_ty of
Nothing -> ambiguousSelector lbl
Just (arg, _) -> do { sel_name <- disambiguateSelector lbl arg
; tcCheckRecSelId (Unambiguous lbl sel_name) res_ty }
------------------------
tcInferRecSelId :: AmbiguousFieldOcc Name -> TcM (HsExpr TcId, TcRhoType)
tcInferRecSelId (Unambiguous (L _ lbl) sel)
= do { (expr', ty) <- tc_infer_id lbl sel
; return (expr', ty) }
tcInferRecSelId (Ambiguous lbl _)
= ambiguousSelector lbl
------------------------
tcInferId :: Name -> TcM (HsExpr TcId, TcSigmaType)
-- Look up an occurrence of an Id
tcInferId id_name
| id_name `hasKey` tagToEnumKey
= failWithTc (text "tagToEnum# must appear applied to one argument")
-- tcApp catches the case (tagToEnum# arg)
| id_name `hasKey` assertIdKey
= do { dflags <- getDynFlags
; if gopt Opt_IgnoreAsserts dflags
then tc_infer_id (nameRdrName id_name) id_name
else tc_infer_assert id_name }
| otherwise
= do { (expr, ty) <- tc_infer_id (nameRdrName id_name) id_name
; traceTc "tcInferId" (ppr id_name <+> dcolon <+> ppr ty)
; return (expr, ty) }
tc_infer_assert :: Name -> TcM (HsExpr TcId, TcSigmaType)
-- Deal with an occurrence of 'assert'
-- See Note [Adding the implicit parameter to 'assert']
tc_infer_assert assert_name
= do { assert_error_id <- tcLookupId assertErrorName
; (wrap, id_rho) <- topInstantiate (OccurrenceOf assert_name)
(idType assert_error_id)
; return (mkHsWrap wrap (HsVar (noLoc assert_error_id)), id_rho)
}
tc_infer_id :: RdrName -> Name -> TcM (HsExpr TcId, TcSigmaType)
tc_infer_id lbl id_name
= do { thing <- tcLookup id_name
; case thing of
ATcId { tct_id = id }
-> do { check_naughty id -- Note [Local record selectors]
; checkThLocalId id
; return_id id }
AGlobal (AnId id)
-> do { check_naughty id
; return_id id }
-- A global cannot possibly be ill-staged
-- nor does it need the 'lifting' treatment
-- hence no checkTh stuff here
AGlobal (AConLike cl) -> case cl of
RealDataCon con -> return_data_con con
PatSynCon ps -> tcPatSynBuilderOcc ps
_ -> failWithTc $
ppr thing <+> text "used where a value identifier was expected" }
where
return_id id = return (HsVar (noLoc id), idType id)
return_data_con con
-- For data constructors, must perform the stupid-theta check
| null stupid_theta
= return_id con_wrapper_id
| otherwise
-- See Note [Instantiating stupid theta]
= do { let (tvs, theta, rho) = tcSplitSigmaTy (idType con_wrapper_id)
; (subst, tvs') <- newMetaTyVars tvs
; let tys' = mkTyVarTys tvs'
theta' = substTheta subst theta
rho' = substTy subst rho
; wrap <- instCall (OccurrenceOf id_name) tys' theta'
; addDataConStupidTheta con tys'
; return (mkHsWrap wrap (HsVar (noLoc con_wrapper_id)), rho') }
where
con_wrapper_id = dataConWrapId con
stupid_theta = dataConStupidTheta con
check_naughty id
| isNaughtyRecordSelector id = failWithTc (naughtyRecordSel lbl)
| otherwise = return ()
tcUnboundId :: UnboundVar -> ExpRhoType -> TcM (HsExpr TcId)
-- Typechedk an occurrence of an unbound Id
--
-- Some of these started life as a true hole "_". Others might simply
-- be variables that accidentally have no binding site
--
-- We turn all of them into HsVar, since HsUnboundVar can't contain an
-- Id; and indeed the evidence for the CHoleCan does bind it, so it's
-- not unbound any more!
tcUnboundId unbound res_ty
= do { ty <- newFlexiTyVarTy liftedTypeKind
; let occ = unboundVarOcc unbound
; name <- newSysName occ
; let ev = mkLocalId name ty
; loc <- getCtLocM HoleOrigin Nothing
; let can = CHoleCan { cc_ev = CtWanted { ctev_pred = ty
, ctev_dest = EvVarDest ev
, ctev_loc = loc}
, cc_hole = ExprHole unbound }
; emitInsoluble can
; tcWrapResultO (UnboundOccurrenceOf occ) (HsVar (noLoc ev)) ty res_ty }
{-
Note [Adding the implicit parameter to 'assert']
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The typechecker transforms (assert e1 e2) to (assertError e1 e2).
This isn't really the Right Thing because there's no way to "undo"
if you want to see the original source code in the typechecker
output. We'll have fix this in due course, when we care more about
being able to reconstruct the exact original program.
Note [tagToEnum#]
~~~~~~~~~~~~~~~~~
Nasty check to ensure that tagToEnum# is applied to a type that is an
enumeration TyCon. Unification may refine the type later, but this
check won't see that, alas. It's crude, because it relies on our
knowing *now* that the type is ok, which in turn relies on the
eager-unification part of the type checker pushing enough information
here. In theory the Right Thing to do is to have a new form of
constraint but I definitely cannot face that! And it works ok as-is.
Here's are two cases that should fail
f :: forall a. a
f = tagToEnum# 0 -- Can't do tagToEnum# at a type variable
g :: Int
g = tagToEnum# 0 -- Int is not an enumeration
When data type families are involved it's a bit more complicated.
data family F a
data instance F [Int] = A | B | C
Then we want to generate something like
tagToEnum# R:FListInt 3# |> co :: R:FListInt ~ F [Int]
Usually that coercion is hidden inside the wrappers for
constructors of F [Int] but here we have to do it explicitly.
It's all grotesquely complicated.
Note [Instantiating stupid theta]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Normally, when we infer the type of an Id, we don't instantiate,
because we wish to allow for visible type application later on.
But if a datacon has a stupid theta, we're a bit stuck. We need
to emit the stupid theta constraints with instantiated types. It's
difficult to defer this to the lazy instantiation, because a stupid
theta has no spot to put it in a type. So we just instantiate eagerly
in this case. Thus, users cannot use visible type application with
a data constructor sporting a stupid theta. I won't feel so bad for
the users that complain.
-}
tcSeq :: SrcSpan -> Name -> [LHsExprArgIn]
-> ExpRhoType -> TcM (HsWrapper, LHsExpr TcId, [LHsExprArgOut])
-- (seq e1 e2) :: res_ty
-- We need a special typing rule because res_ty can be unboxed
-- See Note [Typing rule for seq]
tcSeq loc fun_name args res_ty
= do { fun <- tcLookupId fun_name
; (arg1_ty, args1) <- case args of
(Right hs_ty_arg1 : args1)
-> do { ty_arg1 <- tcHsTypeApp hs_ty_arg1 liftedTypeKind
; return (ty_arg1, args1) }
_ -> do { arg_ty1 <- newFlexiTyVarTy liftedTypeKind
; return (arg_ty1, args) }
; (arg1, arg2, arg2_exp_ty) <- case args1 of
[Right hs_ty_arg2, Left term_arg1, Left term_arg2]
-> do { rr_ty <- newFlexiTyVarTy runtimeRepTy
; ty_arg2 <- tcHsTypeApp hs_ty_arg2 (tYPE rr_ty)
-- see Note [Typing rule for seq]
; _ <- tcSubTypeDS GenSigCtxt noThing ty_arg2 res_ty
; return (term_arg1, term_arg2, mkCheckExpType ty_arg2) }
[Left term_arg1, Left term_arg2]
-> return (term_arg1, term_arg2, res_ty)
_ -> too_many_args "seq" args
; arg1' <- tcMonoExpr arg1 (mkCheckExpType arg1_ty)
; arg2' <- tcMonoExpr arg2 arg2_exp_ty
; res_ty <- readExpType res_ty -- by now, it's surely filled in
; let fun' = L loc (HsWrap ty_args (HsVar (L loc fun)))
ty_args = WpTyApp res_ty <.> WpTyApp arg1_ty
; return (idHsWrapper, fun', [Left arg1', Left arg2']) }
tcTagToEnum :: SrcSpan -> Name -> [LHsExprArgIn] -> ExpRhoType
-> TcM (HsWrapper, LHsExpr TcId, [LHsExprArgOut])
-- tagToEnum# :: forall a. Int# -> a
-- See Note [tagToEnum#] Urgh!
tcTagToEnum loc fun_name args res_ty
= do { fun <- tcLookupId fun_name
; arg <- case args of
[Right hs_ty_arg, Left term_arg]
-> do { ty_arg <- tcHsTypeApp hs_ty_arg liftedTypeKind
; _ <- tcSubTypeDS GenSigCtxt noThing ty_arg res_ty
-- other than influencing res_ty, we just
-- don't care about a type arg passed in.
-- So drop the evidence.
; return term_arg }
[Left term_arg] -> do { _ <- expTypeToType res_ty
; return term_arg }
_ -> too_many_args "tagToEnum#" args
; res_ty <- readExpType res_ty
; ty' <- zonkTcType res_ty
-- Check that the type is algebraic
; let mb_tc_app = tcSplitTyConApp_maybe ty'
Just (tc, tc_args) = mb_tc_app
; checkTc (isJust mb_tc_app)
(mk_error ty' doc1)
-- Look through any type family
; fam_envs <- tcGetFamInstEnvs
; let (rep_tc, rep_args, coi)
= tcLookupDataFamInst fam_envs tc tc_args
-- coi :: tc tc_args ~R rep_tc rep_args
; checkTc (isEnumerationTyCon rep_tc)
(mk_error ty' doc2)
; arg' <- tcMonoExpr arg (mkCheckExpType intPrimTy)
; let fun' = L loc (HsWrap (WpTyApp rep_ty) (HsVar (L loc fun)))
rep_ty = mkTyConApp rep_tc rep_args
; return (mkWpCastR (mkTcSymCo coi), fun', [Left arg']) }
-- coi is a Representational coercion
where
doc1 = vcat [ text "Specify the type by giving a type signature"
, text "e.g. (tagToEnum# x) :: Bool" ]
doc2 = text "Result type must be an enumeration type"
mk_error :: TcType -> SDoc -> SDoc
mk_error ty what
= hang (text "Bad call to tagToEnum#"
<+> text "at type" <+> ppr ty)
2 what
too_many_args :: String -> [LHsExprArgIn] -> TcM a
too_many_args fun args
= failWith $
hang (text "Too many type arguments to" <+> text fun <> colon)
2 (sep (map pp args))
where
pp (Left e) = pprParendLExpr e
pp (Right (HsWC { hswc_body = L _ t })) = pprParendHsType t
{-
************************************************************************
* *
Template Haskell checks
* *
************************************************************************
-}
checkThLocalId :: Id -> TcM ()
checkThLocalId id
= do { mb_local_use <- getStageAndBindLevel (idName id)
; case mb_local_use of
Just (top_lvl, bind_lvl, use_stage)
| thLevel use_stage > bind_lvl
, isNotTopLevel top_lvl
-> checkCrossStageLifting id use_stage
_ -> return () -- Not a locally-bound thing, or
-- no cross-stage link
}
--------------------------------------
checkCrossStageLifting :: Id -> ThStage -> TcM ()
-- If we are inside typed brackets, and (use_lvl > bind_lvl)
-- we must check whether there's a cross-stage lift to do
-- Examples \x -> [|| x ||]
-- [|| map ||]
-- There is no error-checking to do, because the renamer did that
--
-- This is similar to checkCrossStageLifting in RnSplice, but
-- this code is applied to *typed* brackets.
checkCrossStageLifting id (Brack _ (TcPending ps_var lie_var))
= -- Nested identifiers, such as 'x' in
-- E.g. \x -> [|| h x ||]
-- We must behave as if the reference to x was
-- h $(lift x)
-- We use 'x' itself as the splice proxy, used by
-- the desugarer to stitch it all back together.
-- If 'x' occurs many times we may get many identical
-- bindings of the same splice proxy, but that doesn't
-- matter, although it's a mite untidy.
do { let id_ty = idType id
; checkTc (isTauTy id_ty) (polySpliceErr id)
-- If x is polymorphic, its occurrence sites might
-- have different instantiations, so we can't use plain
-- 'x' as the splice proxy name. I don't know how to
-- solve this, and it's probably unimportant, so I'm
-- just going to flag an error for now
; lift <- if isStringTy id_ty then
do { sid <- tcLookupId THNames.liftStringName
-- See Note [Lifting strings]
; return (HsVar (noLoc sid)) }
else
setConstraintVar lie_var $
-- Put the 'lift' constraint into the right LIE
newMethodFromName (OccurrenceOf (idName id))
THNames.liftName id_ty
-- Update the pending splices
; ps <- readMutVar ps_var
; let pending_splice = PendingTcSplice (idName id) (nlHsApp (noLoc lift) (nlHsVar id))
; writeMutVar ps_var (pending_splice : ps)
; return () }
checkCrossStageLifting _ _ = return ()
polySpliceErr :: Id -> SDoc
polySpliceErr id
= text "Can't splice the polymorphic local variable" <+> quotes (ppr id)
{-
Note [Lifting strings]
~~~~~~~~~~~~~~~~~~~~~~
If we see $(... [| s |] ...) where s::String, we don't want to
generate a mass of Cons (CharL 'x') (Cons (CharL 'y') ...)) etc.
So this conditional short-circuits the lifting mechanism to generate
(liftString "xy") in that case. I didn't want to use overlapping instances
for the Lift class in TH.Syntax, because that can lead to overlapping-instance
errors in a polymorphic situation.
If this check fails (which isn't impossible) we get another chance; see
Note [Converting strings] in Convert.hs
Local record selectors
~~~~~~~~~~~~~~~~~~~~~~
Record selectors for TyCons in this module are ordinary local bindings,
which show up as ATcIds rather than AGlobals. So we need to check for
naughtiness in both branches. c.f. TcTyClsBindings.mkAuxBinds.
************************************************************************
* *
\subsection{Record bindings}
* *
************************************************************************
-}
getFixedTyVars :: [FieldLabelString] -> [TyVar] -> [ConLike] -> TyVarSet
-- These tyvars must not change across the updates
getFixedTyVars upd_fld_occs univ_tvs cons
= mkVarSet [tv1 | con <- cons
, let (u_tvs, _, eqspec, prov_theta
, req_theta, arg_tys, _)
= conLikeFullSig con
theta = eqSpecPreds eqspec
++ prov_theta
++ req_theta
flds = conLikeFieldLabels con
fixed_tvs = exactTyCoVarsOfTypes fixed_tys
-- fixed_tys: See Note [Type of a record update]
`unionVarSet` tyCoVarsOfTypes theta
-- Universally-quantified tyvars that
-- appear in any of the *implicit*
-- arguments to the constructor are fixed
-- See Note [Implict type sharing]
fixed_tys = [ty | (fl, ty) <- zip flds arg_tys
, not (flLabel fl `elem` upd_fld_occs)]
, (tv1,tv) <- univ_tvs `zip` u_tvs
, tv `elemVarSet` fixed_tvs ]
{-
Note [Disambiguating record fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When the -XDuplicateRecordFields extension is used, and the renamer
encounters a record selector or update that it cannot immediately
disambiguate (because it involves fields that belong to multiple
datatypes), it will defer resolution of the ambiguity to the
typechecker. In this case, the `Ambiguous` constructor of
`AmbiguousFieldOcc` is used.
Consider the following definitions:
data S = MkS { foo :: Int }
data T = MkT { foo :: Int, bar :: Int }
data U = MkU { bar :: Int, baz :: Int }
When the renamer sees `foo` as a selector or an update, it will not
know which parent datatype is in use.
For selectors, there are two possible ways to disambiguate:
1. Check if the pushed-in type is a function whose domain is a
datatype, for example:
f s = (foo :: S -> Int) s
g :: T -> Int
g = foo
This is checked by `tcCheckRecSelId` when checking `HsRecFld foo`.
2. Check if the selector is applied to an argument that has a type
signature, for example:
h = foo (s :: S)
This is checked by `tcApp`.
Updates are slightly more complex. The `disambiguateRecordBinds`
function tries to determine the parent datatype in three ways:
1. Check for types that have all the fields being updated. For example:
f x = x { foo = 3, bar = 2 }
Here `f` must be updating `T` because neither `S` nor `U` have
both fields. This may also discover that no possible type exists.
For example the following will be rejected:
f' x = x { foo = 3, baz = 3 }
2. Use the type being pushed in, if it is already a TyConApp. The
following are valid updates to `T`:
g :: T -> T
g x = x { foo = 3 }
g' x = x { foo = 3 } :: T
3. Use the type signature of the record expression, if it exists and
is a TyConApp. Thus this is valid update to `T`:
h x = (x :: T) { foo = 3 }
Note that we do not look up the types of variables being updated, and
no constraint-solving is performed, so for example the following will
be rejected as ambiguous:
let bad (s :: S) = foo s
let r :: T
r = blah
in r { foo = 3 }
\r. (r { foo = 3 }, r :: T )
We could add further tests, of a more heuristic nature. For example,
rather than looking for an explicit signature, we could try to infer
the type of the argument to a selector or the record expression being
updated, in case we are lucky enough to get a TyConApp straight
away. However, it might be hard for programmers to predict whether a
particular update is sufficiently obvious for the signature to be
omitted. Moreover, this might change the behaviour of typechecker in
non-obvious ways.
See also Note [HsRecField and HsRecUpdField] in HsPat.
-}
-- Given a RdrName that refers to multiple record fields, and the type
-- of its argument, try to determine the name of the selector that is
-- meant.
disambiguateSelector :: Located RdrName -> Type -> TcM Name
disambiguateSelector lr@(L _ rdr) parent_type
= do { fam_inst_envs <- tcGetFamInstEnvs
; case tyConOf fam_inst_envs parent_type of
Nothing -> ambiguousSelector lr
Just p ->
do { xs <- lookupParents rdr
; let parent = RecSelData p
; case lookup parent xs of
Just gre -> do { addUsedGRE True gre
; return (gre_name gre) }
Nothing -> failWithTc (fieldNotInType parent rdr) } }
-- This field name really is ambiguous, so add a suitable "ambiguous
-- occurrence" error, then give up.
ambiguousSelector :: Located RdrName -> TcM a
ambiguousSelector (L _ rdr)
= do { env <- getGlobalRdrEnv
; let gres = lookupGRE_RdrName rdr env
; setErrCtxt [] $ addNameClashErrRn rdr gres
; failM }
-- Disambiguate the fields in a record update.
-- See Note [Disambiguating record fields]
disambiguateRecordBinds :: LHsExpr Name -> TcRhoType
-> [LHsRecUpdField Name] -> ExpRhoType
-> TcM [LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name)]
disambiguateRecordBinds record_expr record_rho rbnds res_ty
-- Are all the fields unambiguous?
= case mapM isUnambiguous rbnds of
-- If so, just skip to looking up the Ids
-- Always the case if DuplicateRecordFields is off
Just rbnds' -> mapM lookupSelector rbnds'
Nothing -> -- If not, try to identify a single parent
do { fam_inst_envs <- tcGetFamInstEnvs
-- Look up the possible parents for each field
; rbnds_with_parents <- getUpdFieldsParents
; let possible_parents = map (map fst . snd) rbnds_with_parents
-- Identify a single parent
; p <- identifyParent fam_inst_envs possible_parents
-- Pick the right selector with that parent for each field
; checkNoErrs $ mapM (pickParent p) rbnds_with_parents }
where
-- Extract the selector name of a field update if it is unambiguous
isUnambiguous :: LHsRecUpdField Name -> Maybe (LHsRecUpdField Name, Name)
isUnambiguous x = case unLoc (hsRecFieldLbl (unLoc x)) of
Unambiguous _ sel_name -> Just (x, sel_name)
Ambiguous{} -> Nothing
-- Look up the possible parents and selector GREs for each field
getUpdFieldsParents :: TcM [(LHsRecUpdField Name
, [(RecSelParent, GlobalRdrElt)])]
getUpdFieldsParents
= fmap (zip rbnds) $ mapM
(lookupParents . unLoc . hsRecUpdFieldRdr . unLoc)
rbnds
-- Given a the lists of possible parents for each field,
-- identify a single parent
identifyParent :: FamInstEnvs -> [[RecSelParent]] -> TcM RecSelParent
identifyParent fam_inst_envs possible_parents
= case foldr1 intersect possible_parents of
-- No parents for all fields: record update is ill-typed
[] -> failWithTc (noPossibleParents rbnds)
-- Exactly one datatype with all the fields: use that
[p] -> return p
-- Multiple possible parents: try harder to disambiguate
-- Can we get a parent TyCon from the pushed-in type?
_:_ | Just p <- tyConOfET fam_inst_envs res_ty -> return (RecSelData p)
-- Does the expression being updated have a type signature?
-- If so, try to extract a parent TyCon from it
| Just {} <- obviousSig (unLoc record_expr)
, Just tc <- tyConOf fam_inst_envs record_rho
-> return (RecSelData tc)
-- Nothing else we can try...
_ -> failWithTc badOverloadedUpdate
-- Make a field unambiguous by choosing the given parent.
-- Emits an error if the field cannot have that parent,
-- e.g. if the user writes
-- r { x = e } :: T
-- where T does not have field x.
pickParent :: RecSelParent
-> (LHsRecUpdField Name, [(RecSelParent, GlobalRdrElt)])
-> TcM (LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name))
pickParent p (upd, xs)
= case lookup p xs of
-- Phew! The parent is valid for this field.
-- Previously ambiguous fields must be marked as
-- used now that we know which one is meant, but
-- unambiguous ones shouldn't be recorded again
-- (giving duplicate deprecation warnings).
Just gre -> do { unless (null (tail xs)) $ do
let L loc _ = hsRecFieldLbl (unLoc upd)
setSrcSpan loc $ addUsedGRE True gre
; lookupSelector (upd, gre_name gre) }
-- The field doesn't belong to this parent, so report
-- an error but keep going through all the fields
Nothing -> do { addErrTc (fieldNotInType p
(unLoc (hsRecUpdFieldRdr (unLoc upd))))
; lookupSelector (upd, gre_name (snd (head xs))) }
-- Given a (field update, selector name) pair, look up the
-- selector to give a field update with an unambiguous Id
lookupSelector :: (LHsRecUpdField Name, Name)
-> TcM (LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name))
lookupSelector (L l upd, n)
= do { i <- tcLookupId n
; let L loc af = hsRecFieldLbl upd
lbl = rdrNameAmbiguousFieldOcc af
; return $ L l upd { hsRecFieldLbl
= L loc (Unambiguous (L loc lbl) i) } }
-- Extract the outermost TyCon of a type, if there is one; for
-- data families this is the representation tycon (because that's
-- where the fields live).
tyConOf :: FamInstEnvs -> TcSigmaType -> Maybe TyCon
tyConOf fam_inst_envs ty0
= case tcSplitTyConApp_maybe ty of
Just (tc, tys) -> Just (fstOf3 (tcLookupDataFamInst fam_inst_envs tc tys))
Nothing -> Nothing
where
(_, _, ty) = tcSplitSigmaTy ty0
-- Variant of tyConOf that works for ExpTypes
tyConOfET :: FamInstEnvs -> ExpRhoType -> Maybe TyCon
tyConOfET fam_inst_envs ty0 = tyConOf fam_inst_envs =<< checkingExpType_maybe ty0
-- For an ambiguous record field, find all the candidate record
-- selectors (as GlobalRdrElts) and their parents.
lookupParents :: RdrName -> RnM [(RecSelParent, GlobalRdrElt)]
lookupParents rdr
= do { env <- getGlobalRdrEnv
; let gres = lookupGRE_RdrName rdr env
; mapM lookupParent gres }
where
lookupParent :: GlobalRdrElt -> RnM (RecSelParent, GlobalRdrElt)
lookupParent gre = do { id <- tcLookupId (gre_name gre)
; if isRecordSelector id
then return (recordSelectorTyCon id, gre)
else failWithTc (notSelector (gre_name gre)) }
-- A type signature on the argument of an ambiguous record selector or
-- the record expression in an update must be "obvious", i.e. the
-- outermost constructor ignoring parentheses.
obviousSig :: HsExpr Name -> Maybe (LHsSigWcType Name)
obviousSig (ExprWithTySig _ ty) = Just ty
obviousSig (HsPar p) = obviousSig (unLoc p)
obviousSig _ = Nothing
{-
Game plan for record bindings
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1. Find the TyCon for the bindings, from the first field label.
2. Instantiate its tyvars and unify (T a1 .. an) with expected_ty.
For each binding field = value
3. Instantiate the field type (from the field label) using the type
envt from step 2.
4 Type check the value using tcArg, passing the field type as
the expected argument type.
This extends OK when the field types are universally quantified.
-}
tcRecordBinds
:: ConLike
-> [TcType] -- Expected type for each field
-> HsRecordBinds Name
-> TcM (HsRecordBinds TcId)
tcRecordBinds con_like arg_tys (HsRecFields rbinds dd)
= do { mb_binds <- mapM do_bind rbinds
; return (HsRecFields (catMaybes mb_binds) dd) }
where
fields = map flLabel $ conLikeFieldLabels con_like
flds_w_tys = zipEqual "tcRecordBinds" fields arg_tys
do_bind :: LHsRecField Name (LHsExpr Name)
-> TcM (Maybe (LHsRecField TcId (LHsExpr TcId)))
do_bind (L l fld@(HsRecField { hsRecFieldLbl = f
, hsRecFieldArg = rhs }))
= do { mb <- tcRecordField con_like flds_w_tys f rhs
; case mb of
Nothing -> return Nothing
Just (f', rhs') -> return (Just (L l (fld { hsRecFieldLbl = f'
, hsRecFieldArg = rhs' }))) }
tcRecordUpd
:: ConLike
-> [TcType] -- Expected type for each field
-> [LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name)]
-> TcM [LHsRecUpdField TcId]
tcRecordUpd con_like arg_tys rbinds = fmap catMaybes $ mapM do_bind rbinds
where
flds_w_tys = zipEqual "tcRecordUpd" (map flLabel $ conLikeFieldLabels con_like) arg_tys
do_bind :: LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name) -> TcM (Maybe (LHsRecUpdField TcId))
do_bind (L l fld@(HsRecField { hsRecFieldLbl = L loc af
, hsRecFieldArg = rhs }))
= do { let lbl = rdrNameAmbiguousFieldOcc af
sel_id = selectorAmbiguousFieldOcc af
f = L loc (FieldOcc (L loc lbl) (idName sel_id))
; mb <- tcRecordField con_like flds_w_tys f rhs
; case mb of
Nothing -> return Nothing
Just (f', rhs') ->
return (Just
(L l (fld { hsRecFieldLbl
= L loc (Unambiguous (L loc lbl)
(selectorFieldOcc (unLoc f')))
, hsRecFieldArg = rhs' }))) }
tcRecordField :: ConLike -> Assoc FieldLabelString Type -> LFieldOcc Name -> LHsExpr Name
-> TcM (Maybe (LFieldOcc Id, LHsExpr Id))
tcRecordField con_like flds_w_tys (L loc (FieldOcc lbl sel_name)) rhs
| Just field_ty <- assocMaybe flds_w_tys field_lbl
= addErrCtxt (fieldCtxt field_lbl) $
do { rhs' <- tcPolyExprNC rhs field_ty
; let field_id = mkUserLocal (nameOccName sel_name)
(nameUnique sel_name)
field_ty loc
-- Yuk: the field_id has the *unique* of the selector Id
-- (so we can find it easily)
-- but is a LocalId with the appropriate type of the RHS
-- (so the desugarer knows the type of local binder to make)
; return (Just (L loc (FieldOcc lbl field_id), rhs')) }
| otherwise
= do { addErrTc (badFieldCon con_like field_lbl)
; return Nothing }
where
field_lbl = occNameFS $ rdrNameOcc (unLoc lbl)
checkMissingFields :: ConLike -> HsRecordBinds Name -> TcM ()
checkMissingFields con_like rbinds
| null field_labels -- Not declared as a record;
-- But C{} is still valid if no strict fields
= if any isBanged field_strs then
-- Illegal if any arg is strict
addErrTc (missingStrictFields con_like [])
else
return ()
| otherwise = do -- A record
unless (null missing_s_fields)
(addErrTc (missingStrictFields con_like missing_s_fields))
warn <- woptM Opt_WarnMissingFields
unless (not (warn && notNull missing_ns_fields))
(warnTc (Reason Opt_WarnMissingFields) True
(missingFields con_like missing_ns_fields))
where
missing_s_fields
= [ flLabel fl | (fl, str) <- field_info,
isBanged str,
not (fl `elemField` field_names_used)
]
missing_ns_fields
= [ flLabel fl | (fl, str) <- field_info,
not (isBanged str),
not (fl `elemField` field_names_used)
]
field_names_used = hsRecFields rbinds
field_labels = conLikeFieldLabels con_like
field_info = zipEqual "missingFields"
field_labels
field_strs
field_strs = conLikeImplBangs con_like
fl `elemField` flds = any (\ fl' -> flSelector fl == fl') flds
{-
************************************************************************
* *
\subsection{Errors and contexts}
* *
************************************************************************
Boring and alphabetical:
-}
addExprErrCtxt :: LHsExpr Name -> TcM a -> TcM a
addExprErrCtxt expr = addErrCtxt (exprCtxt expr)
exprCtxt :: LHsExpr Name -> SDoc
exprCtxt expr
= hang (text "In the expression:") 2 (ppr expr)
fieldCtxt :: FieldLabelString -> SDoc
fieldCtxt field_name
= text "In the" <+> quotes (ppr field_name) <+> ptext (sLit "field of a record")
addFunResCtxt :: Bool -- There is at least one argument
-> HsExpr Name -> TcType -> ExpRhoType
-> TcM a -> TcM a
-- When we have a mis-match in the return type of a function
-- try to give a helpful message about too many/few arguments
--
-- Used for naked variables too; but with has_args = False
addFunResCtxt has_args fun fun_res_ty env_ty
= addLandmarkErrCtxtM (\env -> (env, ) <$> mk_msg)
-- NB: use a landmark error context, so that an empty context
-- doesn't suppress some more useful context
where
mk_msg
= do { mb_env_ty <- readExpType_maybe env_ty
-- by the time the message is rendered, the ExpType
-- will be filled in (except if we're debugging)
; fun_res' <- zonkTcType fun_res_ty
; env' <- case mb_env_ty of
Just env_ty -> zonkTcType env_ty
Nothing ->
do { dumping <- doptM Opt_D_dump_tc_trace
; MASSERT( dumping )
; newFlexiTyVarTy liftedTypeKind }
; let (_, _, fun_tau) = tcSplitSigmaTy fun_res'
(_, _, env_tau) = tcSplitSigmaTy env'
(args_fun, res_fun) = tcSplitFunTys fun_tau
(args_env, res_env) = tcSplitFunTys env_tau
n_fun = length args_fun
n_env = length args_env
info | n_fun == n_env = Outputable.empty
| n_fun > n_env
, not_fun res_env
= text "Probable cause:" <+> quotes (ppr fun)
<+> text "is applied to too few arguments"
| has_args
, not_fun res_fun
= text "Possible cause:" <+> quotes (ppr fun)
<+> text "is applied to too many arguments"
| otherwise
= Outputable.empty -- Never suggest that a naked variable is -- applied to too many args!
; return info }
where
not_fun ty -- ty is definitely not an arrow type,
-- and cannot conceivably become one
= case tcSplitTyConApp_maybe ty of
Just (tc, _) -> isAlgTyCon tc
Nothing -> False
badFieldTypes :: [(FieldLabelString,TcType)] -> SDoc
badFieldTypes prs
= hang (text "Record update for insufficiently polymorphic field"
<> plural prs <> colon)
2 (vcat [ ppr f <+> dcolon <+> ppr ty | (f,ty) <- prs ])
badFieldsUpd
:: [LHsRecField' (AmbiguousFieldOcc Id) (LHsExpr Name)] -- Field names that don't belong to a single datacon
-> [ConLike] -- Data cons of the type which the first field name belongs to
-> SDoc
badFieldsUpd rbinds data_cons
= hang (text "No constructor has all these fields:")
2 (pprQuotedList conflictingFields)
-- See Note [Finding the conflicting fields]
where
-- A (preferably small) set of fields such that no constructor contains
-- all of them. See Note [Finding the conflicting fields]
conflictingFields = case nonMembers of
-- nonMember belongs to a different type.
(nonMember, _) : _ -> [aMember, nonMember]
[] -> let
-- All of rbinds belong to one type. In this case, repeatedly add
-- a field to the set until no constructor contains the set.
-- Each field, together with a list indicating which constructors
-- have all the fields so far.
growingSets :: [(FieldLabelString, [Bool])]
growingSets = scanl1 combine membership
combine (_, setMem) (field, fldMem)
= (field, zipWith (&&) setMem fldMem)
in
-- Fields that don't change the membership status of the set
-- are redundant and can be dropped.
map (fst . head) $ groupBy ((==) `on` snd) growingSets
aMember = ASSERT( not (null members) ) fst (head members)
(members, nonMembers) = partition (or . snd) membership
-- For each field, which constructors contain the field?
membership :: [(FieldLabelString, [Bool])]
membership = sortMembership $
map (\fld -> (fld, map (Set.member fld) fieldLabelSets)) $
map (occNameFS . rdrNameOcc . rdrNameAmbiguousFieldOcc . unLoc . hsRecFieldLbl . unLoc) rbinds
fieldLabelSets :: [Set.Set FieldLabelString]
fieldLabelSets = map (Set.fromList . map flLabel . conLikeFieldLabels) data_cons
-- Sort in order of increasing number of True, so that a smaller
-- conflicting set can be found.
sortMembership =
map snd .
sortBy (compare `on` fst) .
map (\ item@(_, membershipRow) -> (countTrue membershipRow, item))
countTrue = length . filter id
{-
Note [Finding the conflicting fields]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Suppose we have
data A = A {a0, a1 :: Int}
| B {b0, b1 :: Int}
and we see a record update
x { a0 = 3, a1 = 2, b0 = 4, b1 = 5 }
Then we'd like to find the smallest subset of fields that no
constructor has all of. Here, say, {a0,b0}, or {a0,b1}, etc.
We don't really want to report that no constructor has all of
{a0,a1,b0,b1}, because when there are hundreds of fields it's
hard to see what was really wrong.
We may need more than two fields, though; eg
data T = A { x,y :: Int, v::Int }
| B { y,z :: Int, v::Int }
| C { z,x :: Int, v::Int }
with update
r { x=e1, y=e2, z=e3 }, we
Finding the smallest subset is hard, so the code here makes
a decent stab, no more. See Trac #7989.
-}
naughtyRecordSel :: RdrName -> SDoc
naughtyRecordSel sel_id
= text "Cannot use record selector" <+> quotes (ppr sel_id) <+>
text "as a function due to escaped type variables" $$
text "Probable fix: use pattern-matching syntax instead"
notSelector :: Name -> SDoc
notSelector field
= hsep [quotes (ppr field), text "is not a record selector"]
mixedSelectors :: [Id] -> [Id] -> SDoc
mixedSelectors data_sels@(dc_rep_id:_) pat_syn_sels@(ps_rep_id:_)
= ptext
(sLit "Cannot use a mixture of pattern synonym and record selectors") $$
text "Record selectors defined by"
<+> quotes (ppr (tyConName rep_dc))
<> text ":"
<+> pprWithCommas ppr data_sels $$
text "Pattern synonym selectors defined by"
<+> quotes (ppr (patSynName rep_ps))
<> text ":"
<+> pprWithCommas ppr pat_syn_sels
where
RecSelPatSyn rep_ps = recordSelectorTyCon ps_rep_id
RecSelData rep_dc = recordSelectorTyCon dc_rep_id
mixedSelectors _ _ = panic "TcExpr: mixedSelectors emptylists"
missingStrictFields :: ConLike -> [FieldLabelString] -> SDoc
missingStrictFields con fields
= header <> rest
where
rest | null fields = Outputable.empty -- Happens for non-record constructors
-- with strict fields
| otherwise = colon <+> pprWithCommas ppr fields
header = text "Constructor" <+> quotes (ppr con) <+>
text "does not have the required strict field(s)"
missingFields :: ConLike -> [FieldLabelString] -> SDoc
missingFields con fields
= text "Fields of" <+> quotes (ppr con) <+> ptext (sLit "not initialised:")
<+> pprWithCommas ppr fields
-- callCtxt fun args = text "In the call" <+> parens (ppr (foldl mkHsApp fun args))
noPossibleParents :: [LHsRecUpdField Name] -> SDoc
noPossibleParents rbinds
= hang (text "No type has all these fields:")
2 (pprQuotedList fields)
where
fields = map (hsRecFieldLbl . unLoc) rbinds
badOverloadedUpdate :: SDoc
badOverloadedUpdate = text "Record update is ambiguous, and requires a type signature"
fieldNotInType :: RecSelParent -> RdrName -> SDoc
fieldNotInType p rdr
= unknownSubordinateErr (text "field of type" <+> quotes (ppr p)) rdr
|
tjakway/ghcjvm
|
compiler/typecheck/TcExpr.hs
|
bsd-3-clause
| 103,608
| 228
| 24
| 33,634
| 19,073
| 9,936
| 9,137
| -1
| -1
|
{-# LANGUAGE TypeFamilies #-}
-- | Explorer's local Txp.
module Pos.Explorer.Txp.Local
( eTxProcessTransaction
, eTxNormalize
) where
import Universum
import qualified Data.HashMap.Strict as HM
import Pos.Chain.Genesis as Genesis (Config (..), configEpochSlots)
import Pos.Chain.Txp (ToilVerFailure (..), TxAux (..), TxId,
TxValidationRules, TxpConfiguration, Utxo)
import Pos.Chain.Update (BlockVersionData)
import Pos.Core (EpochIndex, Timestamp)
import Pos.Core.JsonLog (CanJsonLog (..))
import Pos.DB.Txp.Logic (txNormalizeAbstract,
txProcessTransactionAbstract)
import Pos.DB.Txp.MemState (MempoolExt, TxpLocalWorkMode, getTxpExtra,
withTxpLocalData)
import Pos.Infra.Slotting (MonadSlots (getCurrentSlot), getSlotStart)
import Pos.Infra.StateLock (Priority (..), StateLock,
StateLockMetrics, withStateLock)
import Pos.Infra.Util.JsonLog.Events (MemPoolModifyReason (..))
import qualified Pos.Util.Modifier as MM
import Pos.Util.Util (HasLens')
import Pos.Explorer.Core (TxExtra (..))
import Pos.Explorer.Txp.Common (buildExplorerExtraLookup)
import Pos.Explorer.Txp.Toil (ELocalToilM, ExplorerExtraLookup (..),
ExplorerExtraModifier, eNormalizeToil, eProcessTx,
eemLocalTxsExtra)
type ETxpLocalWorkMode ctx m =
( TxpLocalWorkMode ctx m
, MempoolExt m ~ ExplorerExtraModifier
)
eTxProcessTransaction ::
( ETxpLocalWorkMode ctx m
, HasLens' ctx StateLock
, HasLens' ctx (StateLockMetrics MemPoolModifyReason)
, CanJsonLog m
)
=> Genesis.Config
-> TxpConfiguration
-> (TxId, TxAux)
-> m (Either ToilVerFailure ())
eTxProcessTransaction genesisConfig txpConfig itw =
withStateLock LowPriority ProcessTransaction
$ \__tip -> eTxProcessTransactionNoLock genesisConfig txpConfig itw
eTxProcessTransactionNoLock ::
forall ctx m. (ETxpLocalWorkMode ctx m)
=> Genesis.Config
-> TxpConfiguration
-> (TxId, TxAux)
-> m (Either ToilVerFailure ())
eTxProcessTransactionNoLock genesisConfig txpConfig itw = getCurrentSlot epochSlots >>= \case
Nothing -> pure $ Left ToilSlotUnknown
Just slot -> do
-- First get the current @SlotId@ so we can calculate the time.
-- Then get when that @SlotId@ started and use that as a time for @Tx@.
mTxTimestamp <- getSlotStart slot
txProcessTransactionAbstract epochSlots
genesisConfig
buildContext
(processTx' mTxTimestamp)
itw
where
epochSlots = configEpochSlots genesisConfig
buildContext :: Utxo -> TxAux -> m ExplorerExtraLookup
buildContext utxo = buildExplorerExtraLookup utxo . one
processTx' ::
Maybe Timestamp
-> BlockVersionData
-> TxValidationRules
-> EpochIndex
-> (TxId, TxAux)
-> ExceptT ToilVerFailure ELocalToilM ()
processTx' mTxTimestamp bvd txValRules epoch tx = eProcessTx
(configProtocolMagic genesisConfig)
txValRules
txpConfig
bvd
epoch
tx
(TxExtra Nothing mTxTimestamp)
-- | 1. Recompute UtxoView by current MemPool
-- 2. Remove invalid transactions from MemPool
-- 3. Set new tip to txp local data
eTxNormalize
:: forall ctx m . (ETxpLocalWorkMode ctx m)
=> Genesis.Config
-> TxValidationRules
-> TxpConfiguration
-> m ()
eTxNormalize genesisConfig txValRules txpConfig = do
extras <- MM.insertionsMap . view eemLocalTxsExtra <$> withTxpLocalData getTxpExtra
txNormalizeAbstract (configEpochSlots genesisConfig)
buildExplorerExtraLookup
(normalizeToil' extras txValRules)
where
normalizeToil' ::
HashMap TxId TxExtra
-> TxValidationRules
-> BlockVersionData
-> EpochIndex
-> HashMap TxId TxAux
-> ELocalToilM ()
normalizeToil' extras txValRules' bvd epoch txs =
let toNormalize = HM.toList $ HM.intersectionWith (,) txs extras
in eNormalizeToil (configProtocolMagic genesisConfig)
txValRules'
txpConfig
bvd
epoch
toNormalize
|
input-output-hk/pos-haskell-prototype
|
explorer/src/Pos/Explorer/Txp/Local.hs
|
mit
| 4,591
| 0
| 13
| 1,417
| 900
| 504
| 396
| -1
| -1
|
{- |
Module : ./Static/test/TestDGTrans.hs
Copyright : Heng Jiang, Uni Bremen 2004-2006
License : GPLv2 or higher, see LICENSE.txt
Test Logic translation for development graphs.
Follows Sect. IV:4.2 of the CASL Reference Manual.
-}
module Main where
import Logic.Comorphism
import Syntax.AS_Library
import Static.AnalysisLibrary
import Static.DevGraph
import Static.DGTranslation
import Driver.Options
import GUI.ShowGraph
import Comorphisms.CASL2PCFOL
import Comorphisms.CASL2SubCFOL
import Common.Result
import Data.Maybe
import System.Environment
process :: HetcatsOpts -> FilePath -> IO (Maybe (LIB_NAME, LibEnv))
process opts file = do
mResult <- anaLib opts file
case mResult of
Just (libName, gcMap) ->
do ccomor <- compComorphism (Comorphism CASL2PCFOL)
(Comorphism defaultCASL2SubCFOL)
gcMap' <- trans gcMap ccomor
return $ Just (libName, gcMap')
_ -> fail "analib error."
trans :: LibEnv -> AnyComorphism -> IO LibEnv
trans libEnv acm =
case libEnv_translation libEnv acm of
Result diags' maybeLE -> do
printDiags 2 diags'
case maybeLE of
Just libEnv' -> return libEnv'
Nothing -> fail "no translation"
main :: IO ()
main = do
opts <- getArgs >>= hetcatsOpts
case infiles opts of
[hd] -> do
res <- process opts hd
showGraph hd opts res
_ -> error "usage: TestDGTrans filename"
|
spechub/Hets
|
Static/test/TestDGTrans.hs
|
gpl-2.0
| 1,481
| 0
| 15
| 377
| 356
| 176
| 180
| 39
| 2
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ru-RU">
<title>TLS Debug | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/tlsdebug/src/main/javahelp/org/zaproxy/zap/extension/tlsdebug/resources/help_ru_RU/helpset_ru_RU.hs
|
apache-2.0
| 971
| 80
| 66
| 160
| 415
| 210
| 205
| -1
| -1
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1993-1998
\section[WwLib]{A library for the ``worker\/wrapper'' back-end to the strictness analyser}
-}
{-# LANGUAGE CPP #-}
module WwLib ( mkWwBodies, mkWWstr, mkWorkerArgs
, deepSplitProductType_maybe, findTypeShape
) where
#include "HsVersions.h"
import CoreSyn
import CoreUtils ( exprType, mkCast )
import Id
import IdInfo ( vanillaIdInfo )
import DataCon
import Demand
import MkCore ( mkRuntimeErrorApp, aBSENT_ERROR_ID, mkCoreUbxTup )
import MkId ( voidArgId, voidPrimId )
import TysPrim ( voidPrimTy )
import TysWiredIn ( tupleDataCon )
import VarEnv ( mkInScopeSet )
import Type
import Coercion
import FamInstEnv
import BasicTypes ( Boxity(..) )
import Literal ( absentLiteralOf )
import TyCon
import UniqSupply
import Unique
import Maybes
import Util
import Outputable
import DynFlags
import FastString
import ListSetOps
{-
************************************************************************
* *
\subsection[mkWrapperAndWorker]{@mkWrapperAndWorker@}
* *
************************************************************************
Here's an example. The original function is:
\begin{verbatim}
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
From this, we want to produce:
\begin{verbatim}
-- wrapper (an unfolding)
g :: forall a . Int -> [a] -> a
g = \/\ a -> \ x ys ->
case x of
I# x# -> $wg a x# ys
-- call the worker; don't forget the type args!
-- worker
$wg :: forall a . Int# -> [a] -> a
$wg = \/\ a -> \ x# ys ->
let
x = I# x#
in
case x of -- note: body of g moved intact
0 -> head ys
_ -> head (tail ys)
\end{verbatim}
Something we have to be careful about: Here's an example:
\begin{verbatim}
-- "f" strictness: U(P)U(P)
f (I# a) (I# b) = a +# b
g = f -- "g" strictness same as "f"
\end{verbatim}
\tr{f} will get a worker all nice and friendly-like; that's good.
{\em But we don't want a worker for \tr{g}}, even though it has the
same strictness as \tr{f}. Doing so could break laziness, at best.
Consequently, we insist that the number of strictness-info items is
exactly the same as the number of lambda-bound arguments. (This is
probably slightly paranoid, but OK in practice.) If it isn't the
same, we ``revise'' the strictness info, so that we won't propagate
the unusable strictness-info into the interfaces.
************************************************************************
* *
\subsection{The worker wrapper core}
* *
************************************************************************
@mkWwBodies@ is called when doing the worker\/wrapper split inside a module.
-}
mkWwBodies :: DynFlags
-> FamInstEnvs
-> Type -- Type of original function
-> [Demand] -- Strictness of original function
-> DmdResult -- Info about function result
-> UniqSM (Maybe ([Demand], -- Demands for worker (value) args
Id -> CoreExpr, -- Wrapper body, lacking only the worker Id
CoreExpr -> CoreExpr)) -- Worker body, lacking the original function rhs
-- wrap_fn_args E = \x y -> E
-- work_fn_args E = E x y
-- wrap_fn_str E = case x of { (a,b) ->
-- case a of { (a1,a2) ->
-- E a1 a2 b y }}
-- work_fn_str E = \a2 a2 b y ->
-- let a = (a1,a2) in
-- let x = (a,b) in
-- E
mkWwBodies dflags fam_envs fun_ty demands res_info
= do { let empty_subst = mkEmptyTCvSubst (mkInScopeSet (tyCoVarsOfType fun_ty))
; (wrap_args, wrap_fn_args, work_fn_args, res_ty) <- mkWWargs empty_subst fun_ty demands
; (useful1, work_args, wrap_fn_str, work_fn_str) <- mkWWstr dflags fam_envs wrap_args
-- Do CPR w/w. See Note [Always do CPR w/w]
; (useful2, wrap_fn_cpr, work_fn_cpr, cpr_res_ty)
<- mkWWcpr (gopt Opt_CprAnal dflags) fam_envs res_ty res_info
; let (work_lam_args, work_call_args) = mkWorkerArgs dflags work_args cpr_res_ty
worker_args_dmds = [idDemandInfo v | v <- work_call_args, isId v]
wrapper_body = wrap_fn_args . wrap_fn_cpr . wrap_fn_str . applyToVars work_call_args . Var
worker_body = mkLams work_lam_args. work_fn_str . work_fn_cpr . work_fn_args
; if useful1 && not only_one_void_argument || useful2
then return (Just (worker_args_dmds, wrapper_body, worker_body))
else return Nothing
}
-- We use an INLINE unconditionally, even if the wrapper turns out to be
-- something trivial like
-- fw = ...
-- f = __inline__ (coerce T fw)
-- The point is to propagate the coerce to f's call sites, so even though
-- f's RHS is now trivial (size 1) we still want the __inline__ to prevent
-- fw from being inlined into f's RHS
where
-- Note [Do not split void functions]
only_one_void_argument
| [d] <- demands
, Just (arg_ty1, _) <- splitFunTy_maybe fun_ty
, isAbsDmd d && isVoidTy arg_ty1
= True
| otherwise
= False
{-
Note [Always do CPR w/w]
~~~~~~~~~~~~~~~~~~~~~~~~
At one time we refrained from doing CPR w/w for thunks, on the grounds that
we might duplicate work. But that is already handled by the demand analyser,
which doesn't give the CPR proprety if w/w might waste work: see
Note [CPR for thunks] in DmdAnal.
And if something *has* been given the CPR property and we don't w/w, it's
a disaster, because then the enclosing function might say it has the CPR
property, but now doesn't and there a cascade of disaster. A good example
is Trac #5920.
************************************************************************
* *
\subsection{Making wrapper args}
* *
************************************************************************
During worker-wrapper stuff we may end up with an unlifted thing
which we want to let-bind without losing laziness. So we
add a void argument. E.g.
f = /\a -> \x y z -> E::Int# -- E does not mention x,y,z
==>
fw = /\ a -> \void -> E
f = /\ a -> \x y z -> fw realworld
We use the state-token type which generates no code.
-}
mkWorkerArgs :: DynFlags -> [Var]
-> Type -- Type of body
-> ([Var], -- Lambda bound args
[Var]) -- Args at call site
mkWorkerArgs dflags args res_ty
| any isId args || not needsAValueLambda
= (args, args)
| otherwise
= (args ++ [voidArgId], args ++ [voidPrimId])
where
needsAValueLambda =
isUnliftedType res_ty
|| not (gopt Opt_FunToThunk dflags)
-- see Note [Protecting the last value argument]
{-
Note [Protecting the last value argument]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the user writes (\_ -> E), they might be intentionally disallowing
the sharing of E. Since absence analysis and worker-wrapper are keen
to remove such unused arguments, we add in a void argument to prevent
the function from becoming a thunk.
The user can avoid adding the void argument with the -ffun-to-thunk
flag. However, this can create sharing, which may be bad in two ways. 1) It can
create a space leak. 2) It can prevent inlining *under a lambda*. If w/w
removes the last argument from a function f, then f now looks like a thunk, and
so f can't be inlined *under a lambda*.
************************************************************************
* *
\subsection{Coercion stuff}
* *
************************************************************************
We really want to "look through" coerces.
Reason: I've seen this situation:
let f = coerce T (\s -> E)
in \x -> case x of
p -> coerce T' f
q -> \s -> E2
r -> coerce T' f
If only we w/w'd f, we'd get
let f = coerce T (\s -> fw s)
fw = \s -> E
in ...
Now we'll inline f to get
let fw = \s -> E
in \x -> case x of
p -> fw
q -> \s -> E2
r -> fw
Now we'll see that fw has arity 1, and will arity expand
the \x to get what we want.
-}
-- mkWWargs just does eta expansion
-- is driven off the function type and arity.
-- It chomps bites off foralls, arrows, newtypes
-- and keeps repeating that until it's satisfied the supplied arity
mkWWargs :: TCvSubst -- Freshening substitution to apply to the type
-- See Note [Freshen type variables]
-> Type -- The type of the function
-> [Demand] -- Demands and one-shot info for value arguments
-> UniqSM ([Var], -- Wrapper args
CoreExpr -> CoreExpr, -- Wrapper fn
CoreExpr -> CoreExpr, -- Worker fn
Type) -- Type of wrapper body
mkWWargs subst fun_ty demands
| null demands
= return ([], id, id, substTy subst fun_ty)
| (dmd:demands') <- demands
, Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty
= do { uniq <- getUniqueM
; let arg_ty' = substTy subst arg_ty
id = mk_wrap_arg uniq arg_ty' dmd
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst fun_ty' demands'
; return (id : wrap_args,
Lam id . wrap_fn_args,
work_fn_args . (`App` varToCoreExpr id),
res_ty) }
| Just (tv, fun_ty') <- splitForAllTy_maybe fun_ty
= do { let (subst', tv') = substTyVarBndr subst tv
-- This substTyVarBndr clones the type variable when necy
-- See Note [Freshen type variables]
; (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst' fun_ty' demands
; return (tv' : wrap_args,
Lam tv' . wrap_fn_args,
work_fn_args . (`mkTyApps` [mkTyVarTy tv']),
res_ty) }
| Just (co, rep_ty) <- topNormaliseNewType_maybe fun_ty
-- The newtype case is for when the function has
-- a newtype after the arrow (rare)
--
-- It's also important when we have a function returning (say) a pair
-- wrapped in a newtype, at least if CPR analysis can look
-- through such newtypes, which it probably can since they are
-- simply coerces.
= do { (wrap_args, wrap_fn_args, work_fn_args, res_ty)
<- mkWWargs subst rep_ty demands
; return (wrap_args,
\e -> Cast (wrap_fn_args e) (mkSymCo co),
\e -> work_fn_args (Cast e co),
res_ty) }
| otherwise
= WARN( True, ppr fun_ty ) -- Should not happen: if there is a demand
return ([], id, id, substTy subst fun_ty) -- then there should be a function arrow
applyToVars :: [Var] -> CoreExpr -> CoreExpr
applyToVars vars fn = mkVarApps fn vars
mk_wrap_arg :: Unique -> Type -> Demand -> Id
mk_wrap_arg uniq ty dmd
= mkSysLocalOrCoVar (fsLit "w") uniq ty
`setIdDemandInfo` dmd
{-
Note [Freshen type variables]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wen we do a worker/wrapper split, we must not use shadowed names,
else we'll get
f = /\ a /\a. fw a a
which is obviously wrong. Type variables can can in principle shadow,
within a type (e.g. forall a. a -> forall a. a->a). But type
variables *are* mentioned in <blah>, so we must substitute.
That's why we carry the TCvSubst through mkWWargs
************************************************************************
* *
\subsection{Strictness stuff}
* *
************************************************************************
-}
mkWWstr :: DynFlags
-> FamInstEnvs
-> [Var] -- Wrapper args; have their demand info on them
-- *Includes type variables*
-> UniqSM (Bool, -- Is this useful
[Var], -- Worker args
CoreExpr -> CoreExpr, -- Wrapper body, lacking the worker call
-- and without its lambdas
-- This fn adds the unboxing
CoreExpr -> CoreExpr) -- Worker body, lacking the original body of the function,
-- and lacking its lambdas.
-- This fn does the reboxing
mkWWstr _ _ []
= return (False, [], nop_fn, nop_fn)
mkWWstr dflags fam_envs (arg : args) = do
(useful1, args1, wrap_fn1, work_fn1) <- mkWWstr_one dflags fam_envs arg
(useful2, args2, wrap_fn2, work_fn2) <- mkWWstr dflags fam_envs args
return (useful1 || useful2, args1 ++ args2, wrap_fn1 . wrap_fn2, work_fn1 . work_fn2)
{-
Note [Unpacking arguments with product and polymorphic demands]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The argument is unpacked in a case if it has a product type and has a
strict *and* used demand put on it. I.e., arguments, with demands such
as the following ones:
<S,U(U, L)>
<S(L,S),U>
will be unpacked, but
<S,U> or <B,U>
will not, because the pieces aren't used. This is quite important otherwise
we end up unpacking massive tuples passed to the bottoming function. Example:
f :: ((Int,Int) -> String) -> (Int,Int) -> a
f g pr = error (g pr)
main = print (f fst (1, error "no"))
Does 'main' print "error 1" or "error no"? We don't really want 'f'
to unbox its second argument. This actually happened in GHC's onwn
source code, in Packages.applyPackageFlag, which ended up un-boxing
the enormous DynFlags tuple, and being strict in the
as-yet-un-filled-in pkgState files.
-}
----------------------
-- mkWWstr_one wrap_arg = (useful, work_args, wrap_fn, work_fn)
-- * wrap_fn assumes wrap_arg is in scope,
-- brings into scope work_args (via cases)
-- * work_fn assumes work_args are in scope, a
-- brings into scope wrap_arg (via lets)
mkWWstr_one :: DynFlags -> FamInstEnvs -> Var
-> UniqSM (Bool, [Var], CoreExpr -> CoreExpr, CoreExpr -> CoreExpr)
mkWWstr_one dflags fam_envs arg
| isTyVar arg
= return (False, [arg], nop_fn, nop_fn)
-- See Note [Worker-wrapper for bottoming functions]
| isAbsDmd dmd
, Just work_fn <- mk_absent_let dflags arg
-- Absent case. We can't always handle absence for arbitrary
-- unlifted types, so we need to choose just the cases we can
--- (that's what mk_absent_let does)
= return (True, [], nop_fn, work_fn)
-- See Note [Worthy functions for Worker-Wrapper split]
| isSeqDmd dmd -- `seq` demand; evaluate in wrapper in the hope
-- of dropping seqs in the worker
= let arg_w_unf = arg `setIdUnfolding` evaldUnfolding
-- Tell the worker arg that it's sure to be evaluated
-- so that internal seqs can be dropped
in return (True, [arg_w_unf], mk_seq_case arg, nop_fn)
-- Pass the arg, anyway, even if it is in theory discarded
-- Consider
-- f x y = x `seq` y
-- x gets a (Eval (Poly Abs)) demand, but if we fail to pass it to the worker
-- we ABSOLUTELY MUST record that x is evaluated in the wrapper.
-- Something like:
-- f x y = x `seq` fw y
-- fw y = let x{Evald} = error "oops" in (x `seq` y)
-- If we don't pin on the "Evald" flag, the seq doesn't disappear, and
-- we end up evaluating the absent thunk.
-- But the Evald flag is pretty weird, and I worry that it might disappear
-- during simplification, so for now I've just nuked this whole case
| isStrictDmd dmd
, Just cs <- splitProdDmd_maybe dmd
-- See Note [Unpacking arguments with product and polymorphic demands]
, Just (data_con, inst_tys, inst_con_arg_tys, co)
<- deepSplitProductType_maybe fam_envs (idType arg)
, cs `equalLength` inst_con_arg_tys
-- See Note [mkWWstr and unsafeCoerce]
= do { (uniq1:uniqs) <- getUniquesM
; let unpk_args = zipWith mk_ww_local uniqs inst_con_arg_tys
unpk_args_w_ds = zipWithEqual "mkWWstr" setIdDemandInfo unpk_args cs
unbox_fn = mkUnpackCase (Var arg) co uniq1
data_con unpk_args
rebox_fn = Let (NonRec arg con_app)
con_app = mkConApp2 data_con inst_tys unpk_args `mkCast` mkSymCo co
; (_, worker_args, wrap_fn, work_fn) <- mkWWstr dflags fam_envs unpk_args_w_ds
; return (True, worker_args, unbox_fn . wrap_fn, work_fn . rebox_fn) }
-- Don't pass the arg, rebox instead
| otherwise -- Other cases
= return (False, [arg], nop_fn, nop_fn)
where
dmd = idDemandInfo arg
----------------------
nop_fn :: CoreExpr -> CoreExpr
nop_fn body = body
{-
Note [mkWWstr and unsafeCoerce]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
By using unsafeCoerce, it is possible to make the number of demands fail to
match the number of constructor arguments; this happened in Trac #8037.
If so, the worker/wrapper split doesn't work right and we get a Core Lint
bug. The fix here is simply to decline to do w/w if that happens.
************************************************************************
* *
Type scrutiny that is specfic to demand analysis
* *
************************************************************************
Note [Do not unpack class dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have
f :: Ord a => [a] -> Int -> a
{-# INLINABLE f #-}
and we worker/wrapper f, we'll get a worker with an INLINALBE pragma
(see Note [Worker-wrapper for INLINABLE functions] in WorkWrap), which
can still be specialised by the type-class specialiser, something like
fw :: Ord a => [a] -> Int# -> a
BUT if f is strict in the Ord dictionary, we might unpack it, to get
fw :: (a->a->Bool) -> [a] -> Int# -> a
and the type-class specialiser can't specialise that. An example is
Trac #6056.
Moreover, dictinoaries can have a lot of fields, so unpacking them can
increase closure sizes.
Conclusion: don't unpack dictionaries.
-}
deepSplitProductType_maybe :: FamInstEnvs -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitProductType_maybe ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitProductType_maybe fam_envs ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkRepReflCo ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, Just con <- isDataProductTyCon_maybe tc
, not (isClassTyCon tc) -- See Note [Do not unpack class dictionaries]
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitProductType_maybe _ _ = Nothing
deepSplitCprType_maybe :: FamInstEnvs -> ConTag -> Type -> Maybe (DataCon, [Type], [Type], Coercion)
-- If deepSplitCprType_maybe n ty = Just (dc, tys, arg_tys, co)
-- then dc @ tys (args::arg_tys) :: rep_ty
-- co :: ty ~ rep_ty
deepSplitCprType_maybe fam_envs con_tag ty
| let (co, ty1) = topNormaliseType_maybe fam_envs ty
`orElse` (mkRepReflCo ty, ty)
, Just (tc, tc_args) <- splitTyConApp_maybe ty1
, isDataTyCon tc
, let cons = tyConDataCons tc
, cons `lengthAtLeast` con_tag -- This might not be true if we import the
-- type constructor via a .hs-bool file (#8743)
, let con = cons `getNth` (con_tag - fIRST_TAG)
= Just (con, tc_args, dataConInstArgTys con tc_args, co)
deepSplitCprType_maybe _ _ _ = Nothing
findTypeShape :: FamInstEnvs -> Type -> TypeShape
-- Uncover the arrow and product shape of a type
-- The data type TypeShape is defined in Demand
-- See Note [Trimming a demand to a type] in Demand
findTypeShape fam_envs ty
| Just (tc, tc_args) <- splitTyConApp_maybe ty
, Just con <- isDataProductTyCon_maybe tc
= TsProd (map (findTypeShape fam_envs) $ dataConInstArgTys con tc_args)
| Just (_, res) <- splitFunTy_maybe ty
= TsFun (findTypeShape fam_envs res)
| Just (_, ty') <- splitForAllTy_maybe ty
= findTypeShape fam_envs ty'
| Just (_, ty') <- topNormaliseType_maybe fam_envs ty
= findTypeShape fam_envs ty'
| otherwise
= TsUnk
{-
************************************************************************
* *
\subsection{CPR stuff}
* *
************************************************************************
@mkWWcpr@ takes the worker/wrapper pair produced from the strictness
info and adds in the CPR transformation. The worker returns an
unboxed tuple containing non-CPR components. The wrapper takes this
tuple and re-produces the correct structured output.
The non-CPR results appear ordered in the unboxed tuple as if by a
left-to-right traversal of the result structure.
-}
mkWWcpr :: Bool
-> FamInstEnvs
-> Type -- function body type
-> DmdResult -- CPR analysis results
-> UniqSM (Bool, -- Is w/w'ing useful?
CoreExpr -> CoreExpr, -- New wrapper
CoreExpr -> CoreExpr, -- New worker
Type) -- Type of worker's body
mkWWcpr opt_CprAnal fam_envs body_ty res
-- CPR explicitly turned off (or in -O0)
| not opt_CprAnal = return (False, id, id, body_ty)
-- CPR is turned on by default for -O and O2
| otherwise
= case returnsCPR_maybe res of
Nothing -> return (False, id, id, body_ty) -- No CPR info
Just con_tag | Just stuff <- deepSplitCprType_maybe fam_envs con_tag body_ty
-> mkWWcpr_help stuff
| otherwise
-- See Note [non-algebraic or open body type warning]
-> WARN( True, text "mkWWcpr: non-algebraic or open body type" <+> ppr body_ty )
return (False, id, id, body_ty)
mkWWcpr_help :: (DataCon, [Type], [Type], Coercion)
-> UniqSM (Bool, CoreExpr -> CoreExpr, CoreExpr -> CoreExpr, Type)
mkWWcpr_help (data_con, inst_tys, arg_tys, co)
| [arg_ty1] <- arg_tys
, isUnliftedType arg_ty1
-- Special case when there is a single result of unlifted type
--
-- Wrapper: case (..call worker..) of x -> C x
-- Worker: case ( ..body.. ) of C x -> x
= do { (work_uniq : arg_uniq : _) <- getUniquesM
; let arg = mk_ww_local arg_uniq arg_ty1
con_app = mkConApp2 data_con inst_tys [arg] `mkCast` mkSymCo co
; return ( True
, \ wkr_call -> Case wkr_call arg (exprType con_app) [(DEFAULT, [], con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con [arg] (varToCoreExpr arg)
-- varToCoreExpr important here: arg can be a coercion
-- Lacking this caused Trac #10658
, arg_ty1 ) }
| otherwise -- The general case
-- Wrapper: case (..call worker..) of (# a, b #) -> C a b
-- Worker: case ( ...body... ) of C a b -> (# a, b #)
= do { (work_uniq : uniqs) <- getUniquesM
; let (wrap_wild : args) = zipWith mk_ww_local uniqs (ubx_tup_ty : arg_tys)
ubx_tup_ty = exprType ubx_tup_app
ubx_tup_app = mkCoreUbxTup arg_tys (map varToCoreExpr args)
con_app = mkConApp2 data_con inst_tys args `mkCast` mkSymCo co
; return (True
, \ wkr_call -> Case wkr_call wrap_wild (exprType con_app) [(DataAlt (tupleDataCon Unboxed (length arg_tys)), args, con_app)]
, \ body -> mkUnpackCase body co work_uniq data_con args ubx_tup_app
, ubx_tup_ty ) }
mkUnpackCase :: CoreExpr -> Coercion -> Unique -> DataCon -> [Id] -> CoreExpr -> CoreExpr
-- (mkUnpackCase e co uniq Con args body)
-- returns
-- case e |> co of bndr { Con args -> body }
mkUnpackCase (Tick tickish e) co uniq con args body -- See Note [Profiling and unpacking]
= Tick tickish (mkUnpackCase e co uniq con args body)
mkUnpackCase scrut co uniq boxing_con unpk_args body
= Case casted_scrut bndr (exprType body)
[(DataAlt boxing_con, unpk_args, body)]
where
casted_scrut = scrut `mkCast` co
bndr = mk_ww_local uniq (exprType casted_scrut)
{-
Note [non-algebraic or open body type warning]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are a few cases where the W/W transformation is told that something
returns a constructor, but the type at hand doesn't really match this. One
real-world example involves unsafeCoerce:
foo = IO a
foo = unsafeCoerce c_exit
foreign import ccall "c_exit" c_exit :: IO ()
Here CPR will tell you that `foo` returns a () constructor for sure, but trying
to create a worker/wrapper for type `a` obviously fails.
(This was a real example until ee8e792 in libraries/base.)
It does not seem feasible to avoid all such cases already in the analyser (and
after all, the analysis is not really wrong), so we simply do nothing here in
mkWWcpr. But we still want to emit warning with -DDEBUG, to hopefully catch
other cases where something went avoidably wrong.
Note [Profiling and unpacking]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the original function looked like
f = \ x -> {-# SCC "foo" #-} E
then we want the CPR'd worker to look like
\ x -> {-# SCC "foo" #-} (case E of I# x -> x)
and definitely not
\ x -> case ({-# SCC "foo" #-} E) of I# x -> x)
This transform doesn't move work or allocation
from one cost centre to another.
Later [SDM]: presumably this is because we want the simplifier to
eliminate the case, and the scc would get in the way? I'm ok with
including the case itself in the cost centre, since it is morally
part of the function (post transformation) anyway.
************************************************************************
* *
\subsection{Utilities}
* *
************************************************************************
Note [Absent errors]
~~~~~~~~~~~~~~~~~~~~
We make a new binding for Ids that are marked absent, thus
let x = absentError "x :: Int"
The idea is that this binding will never be used; but if it
buggily is used we'll get a runtime error message.
Coping with absence for *unlifted* types is important; see, for
example, Trac #4306. For these we find a suitable literal,
using Literal.absentLiteralOf. We don't have literals for
every primitive type, so the function is partial.
[I did try the experiment of using an error thunk for unlifted
things too, relying on the simplifier to drop it as dead code,
by making absentError
(a) *not* be a bottoming Id,
(b) be "ok for speculation"
But that relies on the simplifier finding that it really
is dead code, which is fragile, and indeed failed when
profiling is on, which disables various optimisations. So
using a literal will do.]
-}
mk_absent_let :: DynFlags -> Id -> Maybe (CoreExpr -> CoreExpr)
mk_absent_let dflags arg
| not (isUnliftedType arg_ty)
= Just (Let (NonRec arg abs_rhs))
| Just tc <- tyConAppTyCon_maybe arg_ty
, Just lit <- absentLiteralOf tc
= Just (Let (NonRec arg (Lit lit)))
| arg_ty `eqType` voidPrimTy
= Just (Let (NonRec arg (Var voidPrimId)))
| otherwise
= WARN( True, text "No absent value for" <+> ppr arg_ty )
Nothing
where
arg_ty = idType arg
abs_rhs = mkRuntimeErrorApp aBSENT_ERROR_ID arg_ty msg
msg = showSDoc (gopt_set dflags Opt_SuppressUniques)
(ppr arg <+> ppr (idType arg))
-- We need to suppress uniques here because otherwise they'd
-- end up in the generated code as strings. This is bad for
-- determinism, because with different uniques the strings
-- will have different lengths and hence different costs for
-- the inliner leading to different inlining.
-- See also Note [Unique Determinism] in Unique
mk_seq_case :: Id -> CoreExpr -> CoreExpr
mk_seq_case arg body = Case (Var arg) (sanitiseCaseBndr arg) (exprType body) [(DEFAULT, [], body)]
sanitiseCaseBndr :: Id -> Id
-- The argument we are scrutinising has the right type to be
-- a case binder, so it's convenient to re-use it for that purpose.
-- But we *must* throw away all its IdInfo. In particular, the argument
-- will have demand info on it, and that demand info may be incorrect for
-- the case binder. e.g. case ww_arg of ww_arg { I# x -> ... }
-- Quite likely ww_arg isn't used in '...'. The case may get discarded
-- if the case binder says "I'm demanded". This happened in a situation
-- like (x+y) `seq` ....
sanitiseCaseBndr id = id `setIdInfo` vanillaIdInfo
mk_ww_local :: Unique -> Type -> Id
mk_ww_local uniq ty = mkSysLocalOrCoVar (fsLit "ww") uniq ty
|
vikraman/ghc
|
compiler/stranal/WwLib.hs
|
bsd-3-clause
| 30,608
| 0
| 18
| 9,273
| 3,740
| 2,033
| 1,707
| 259
| 2
|
{-# LANGUAGE PartialTypeSignatures, NamedWildcards #-}
module EveryNamed where
every :: (_a -> Bool) -> [_a] -> Bool
every _ [] = True
every p (x:xs) = p x && every p xs
|
bitemyapp/ghc
|
testsuite/tests/partial-sigs/should_compile/EveryNamed.hs
|
bsd-3-clause
| 171
| 0
| 7
| 33
| 69
| 37
| 32
| 5
| 1
|
{-# LANGUAGE DeriveDataTypeable
, PatternSignatures #-}
{- This program is free software: it is released under the BSD3 open source
license. You can find details of this license in the file LICENSE at the
root of the source tree.
Copyright 2008 Denis Bueno
-}
-- | A Haskell implementation of the basic algorithm, including
-- non-chronological backtracking, from ''SAT-MICRO: petit mais costaud!'' by
-- Sylvain Conchon, Johannes Kanig, and Stephane Lescuyer.
--
-- One interesting thing about this implementation is its use of CPS where the
-- OCaml implementation uses exceptions, to handle control flow.
--
-- Optimisations:
-- non-chronological backtracking;
--
-- Backtracking uses the control stack, so, you may want to invoke with
-- something like @
-- sat-micro cnf-file +RTS -K768M -RTS@,
-- depending on the size of the SAT instance.
module SatMicro where
import Control.Monad.Cont hiding (mapM_)
import Control.Monad.State.Strict hiding ((>=>), mapM_)
import Data.Foldable hiding (sequence_)
import Data.List hiding (elem, concat, foldl', foldl, any, all, foldr, maximumBy)
import Data.Map (Map)
import Data.Ord (comparing)
import Data.Set (Set)
import Debug.Trace()
import Prelude hiding (or, and, all, any, elem, minimum, foldr, splitAt
, concatMap, foldl, catch, mapM_)
import Text.PrettyPrint.HughesPJ
import qualified Data.Foldable as Foldable
import qualified Data.List as L
import qualified Data.Map as Map
import qualified Data.Set as Set
type CNF = [[Lit]]
data Result = Sat [Lit] | Unsat
instance Show Result where
show (Sat lits) = "satisfiable: " ++ intercalate " " (map show lits)
show Unsat = "unsatisfiable"
newtype Lit = L {unLit :: Int} deriving (Eq, Ord)
inLit :: (Int -> Int) -> Lit -> Lit
{-# INLINE inLit #-}
inLit f = L . f . unLit
instance Show Lit where
show = show . unLit
instance Read Lit where
readsPrec i s = map (\(i',s') -> (L i', s')) (readsPrec i s :: [(Int, String)])
instance Num Lit where
_ + _ = error "+ doesn't make sense for literals"
_ - _ = error "- doesn't make sense for literals"
_ * _ = error "* doesn't make sense for literals"
signum _ = error "signum doesn't make sense for literals"
negate = inLit negate
abs = inLit abs
fromInteger l | l == 0 = error "0 is not a literal"
| otherwise = L $ fromInteger l
-- | The state of the search process.
data StateContents = S {
gamma :: Map Lit (Set Lit), -- ^ annotated assignment literals
delta :: [([Lit], Set Lit)] -- ^ annotated CNF
}
getGamma :: Lit -> StateContents -> Set Lit
getGamma l e = Map.findWithDefault (error $ show l ++ ": annotation not found")
l (gamma e)
instance Show StateContents where
show = render . stateDoc
where
stateDoc :: StateContents -> Doc
stateDoc (S {gamma=g, delta=d}) =
brackets (hcat . intersperse space . map (text . show) $ Map.keys g)
<+> braces (hcat
. intersperse (comma <> space)
. map (\(c, a) -> braces (hcat
. intersperse space
. map (text . show) $ c)
<> tups (hcat
. intersperse comma
. map (text . show)
$ Set.toList a))
$ d)
where tups p = char '<' <> p <> char '>'
-- | The entry point to the solver. Searches for a solution to the given
-- satisfiability problem.
dpll :: CNF -> Result
dpll f = (`runCont` id) $ do
r <- callCC $ \bj -> do
(Right env) <- bcp bj (initialState f)
unsat env return
either (const $ return Unsat) (return . Sat) r
dispatch :: t -> [a] -> [(a, t)]
dispatch d = map (\l -> (l, d))
initialState :: [[Lit]] -> StateContents
initialState f = S {gamma = Map.empty,
delta = (dispatch Set.empty f)}
-- bcp either:
-- 1. finds a conflict and returns annotation literals (Left)
-- 2. computes a new environment (Right)
-- | Given an annotated literal, assume it and propagate this information.
-- This may cause other assignments to take place.
assume :: (Monad m) =>
(Either (Set Lit) b -> m StateContents)
-> StateContents
-> (Lit, Set Lit)
-> m (Either a StateContents)
{-# INLINE assume #-}
assume bj env (l, s) = -- update only if not present
if l `Map.member` gamma env
then return (Right env)
else bcp bj env{gamma = Map.insert l s (gamma env)}
-- | Boolean constraint propagation. Under the current assignment, finds any
-- conflicting or unit clauses, and then back jumps or assigns, respectively.
-- If there is no conflict, computes a new environment (@Right@). If this
-- function finds a conflict, calls @bj@ with set of literals annotating the
-- conflicting clause (@Left@).
bcp :: (Monad m) =>
(Either (Set Lit) b -> m StateContents) -- ^ for backjumping
-> StateContents
-> m (Either a StateContents)
bcp bj env = do
env' <-
foldM (\env' (cl, a) -> do
let (cl_neg, cl') =
partition (\l -> negate l `Map.member` gamma env') cl
if any (`Map.member` gamma env') cl'
then return env'
else do
-- update clause annotation
let a' = foldl'
(\set l -> set `Set.union` getGamma (negate l) env')
a cl_neg
case cl' of
[] -> bj (Left a')
[f] -> assume bj env' (f, a') >>= return . fromRight
_ -> return $ env'{delta = (cl', a'):(delta env')})
(env{delta = []})
(delta env)
return $ Right env'
-- | @unsat@ either:
--
-- 1. returns annotation literals (@Left@)
--
-- 2. finds satisfying assignment (@Right@)
unsat :: (MonadCont m) =>
StateContents
-> (Either (Set Lit) [Lit] -> m (Either (Set Lit) [Lit]))
-- ^ the back jump function, allowing conflicts to backtrack to
-- the point where the last involved literal was decided.
-> m (Either (Set Lit) [Lit])
unsat env bj =
case delta env of
[] -> return $ Right $ Map.keys (gamma env)
([_],_):_ -> error "unpropagated unit literal"
([],_):_ -> error "conflict unresolved"
_ -> do
let a = maxSatLit (delta env)
r <- callCC $ \innerBj -> do
(Right env') <- assume innerBj env (a, Set.singleton a)
-- done propagating, no conflicts: continue
unsat env' return
case r of
Left d ->
if not $ a `elem` d
then bj (Left d)
else (callCC $ \innerBj -> do
(Right env') <-
assume innerBj env (negate a, Set.delete a d)
unsat env' bj)
>>= either (bj . Left) (return . Right)
Right _ -> bj r
-- | Returns a literals satisfying a maximal number of clauses.
maxSatLit :: (Foldable t) => t ([Lit], a) -> Lit
maxSatLit cs = (`evalState` Map.empty) $ do
mapM_ (\(c, _) -> mapM_ incr c) cs
freqMap <- get
return $ maximumBy (comparing (valueIn freqMap)) lits
where
valueIn :: (Map Lit Int) -> Lit -> Int
valueIn m l = Map.findWithDefault (error $ "key not found: " ++ show l) l m
lits = foldl (\cs' (c, _) -> cs' `L.union` c) [] cs
-- * Helpers
fromRight :: Either a b -> b
fromRight (Right a) = a
fromRight (Left _) = error "fromRight: Left"
incr :: (Num a) => Lit -> State (Map Lit a) ()
{-# INLINE incr #-}
incr l = modify $! Map.insertWith (\_ i -> 1+i) l 1
-- | An example from the paper.
paper1 :: CNF
paper1 =
[[-1, -3, -4]
,[-1, -3, 4]
,[2, 3, 5]
,[3, 5]
,[3, -5]]
-- | Verify a satisfying assignment.
verifyResult :: Result -> CNF -> Bool
verifyResult (Sat m) cnf =
-- m is well-formed
all (\l -> not $ negate l `elem` m) m
&& all (\cl -> any (`elem` cl) m) cnf
verifyResult Unsat _ = True
|
dbueno/funsat
|
etc/sat-micro/SatMicro.hs
|
bsd-3-clause
| 8,219
| 0
| 25
| 2,569
| 2,449
| 1,327
| 1,122
| 155
| 6
|
module B1.Program.Chart.SymbolEntry
( SymbolEntryInput(..)
, SymbolEntryOutput(..)
, SymbolEntryState
, drawSymbolEntry
, newSymbolEntryState
) where
import Control.Monad
import Data.Char
import Graphics.Rendering.OpenGL
import Graphics.UI.GLFW
import B1.Data.Symbol
import B1.Graphics.Rendering.FTGL.Utils
import B1.Graphics.Rendering.OpenGL.Box
import B1.Graphics.Rendering.OpenGL.Shapes
import B1.Graphics.Rendering.OpenGL.Utils
import B1.Program.Chart.Colors
import B1.Program.Chart.Dirty
import B1.Program.Chart.Resources
data SymbolEntryState = SymbolEntryState
{ pendingSymbol :: Symbol
}
newSymbolEntryState :: SymbolEntryState
newSymbolEntryState = SymbolEntryState { pendingSymbol = "" }
data SymbolEntryInput = SymbolEntryInput
{ bounds :: Box
, inputState :: SymbolEntryState
}
data SymbolEntryOutput = SymbolEntryOutput
{ outputState :: SymbolEntryState
, isDirty :: Dirty
, maybeEnteredSymbol :: Maybe Symbol
}
drawSymbolEntry :: Resources -> SymbolEntryInput -> IO SymbolEntryOutput
drawSymbolEntry resources input = do
renderSymbolEntry resources input
return output
where
currentSymbol = pendingSymbol (inputState input)
checkKeyPress = isKeyPressed resources
maybeLetterKey = getKeyPressed resources $ map CharKey ['A'..'Z']
output
| checkKeyPress (SpecialKey ENTER) = handleEnterKey currentSymbol
| checkKeyPress (SpecialKey BACKSPACE) = handleBackspaceKey currentSymbol
| checkKeyPress (SpecialKey ESC) = handleEscapeKey currentSymbol
| otherwise = handleCharKey maybeLetterKey currentSymbol
renderSymbolEntry :: Resources -> SymbolEntryInput -> IO ()
renderSymbolEntry resources input = do
let symbol = pendingSymbol (inputState input)
unless (null symbol) $ do
let textSpec = TextSpec (font resources) 48 symbol
textBounds <- measureText textSpec
let textBubblePadding = 15
textBubbleWidth = boxWidth textBounds + textBubblePadding * 2
textBubbleHeight = boxHeight textBounds + textBubblePadding * 2
opaqueBubble textBubbleWidth textBubbleHeight textBubblePadding
(black4 1) (blue4 1)
let textWidth = boxWidth textBounds
textHeight = boxHeight textBounds
preservingMatrix $ do
color $ green4 1
translate $ vector3 (-textWidth / 2) (-textHeight / 2) 0
renderText textSpec
handleEnterKey :: Symbol -> SymbolEntryOutput
handleEnterKey currentSymbol
| null currentSymbol = newSymbolEntryOutput currentSymbol False Nothing
| otherwise = newSymbolEntryOutput "" True (Just currentSymbol)
handleBackspaceKey :: Symbol -> SymbolEntryOutput
handleBackspaceKey currentSymbol = newSymbolEntryOutput nextSymbol nextIsDirty Nothing
where
(nextSymbol, nextIsDirty)
| null currentSymbol = (currentSymbol, False)
| otherwise = (init currentSymbol, True)
handleEscapeKey :: Symbol -> SymbolEntryOutput
handleEscapeKey currentSymbol = newSymbolEntryOutput "" True Nothing
handleCharKey :: Maybe Key -> Symbol -> SymbolEntryOutput
handleCharKey (Just (CharKey char)) symbol
| isAlpha char = newSymbolEntryOutput (symbol ++ [char]) True Nothing
| otherwise = newSymbolEntryOutput symbol False Nothing
handleCharKey _ symbol = newSymbolEntryOutput symbol False Nothing
newSymbolEntryOutput :: Symbol -> Dirty -> Maybe Symbol -> SymbolEntryOutput
newSymbolEntryOutput newSymbol newIsDirty maybeSymbol = SymbolEntryOutput
{ outputState = SymbolEntryState { pendingSymbol = newSymbol }
, isDirty = newIsDirty
, maybeEnteredSymbol = maybeSymbol
}
|
madjestic/b1
|
src/B1/Program/Chart/SymbolEntry.hs
|
bsd-3-clause
| 3,548
| 0
| 17
| 583
| 906
| 468
| 438
| 79
| 1
|
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall -Werror #-}
-- | This module contains common code for generating memory accesses.
--
-- Note: this will probably get absorbed into whatever codegen monad I
-- end up creating.
module IR.FlatIR.LLVMGen.MemAccess(
genLoad,
genStore
) where
import Data.Word
import Data.Interval(Interval(..), toIntervalList, allNumbers)
import IR.FlatIR.Syntax
import qualified LLVM.Core as LLVM
-- Set the volatility of the operation based on its mutability
setVolatility :: Mutability -> LLVM.ValueRef -> IO ()
setVolatility Volatile val = LLVM.setVolatile val True
setVolatility VolatileOnce val = LLVM.setVolatile val True
setVolatility _ _ = return ()
-- | Generate a load, adding the necessary metadata and volatility.
genLoad :: LLVM.ContextRef
-- ^ The LLVM Context
-> LLVM.BuilderRef
-- ^ The LLVM Instruction builder handle
-> LLVM.ValueRef
-- ^ The pointer LLVM value
-> Mutability
-- ^ The Mutability of the value being loaded
-> Type
-- ^ The type of the value being loaded
-> IO LLVM.ValueRef
-- ^ The load instruction LLVM value
genLoad ctx builder addr mut ty =
let
-- Add type-based metadata to the load operation. This includes
-- range and TBAA metadata.
--
-- XXX Actually add the TBAA info here when we have the machinery for it
addTypeMetadata :: LLVM.ValueRef -> Type -> IO ()
addTypeMetadata val IntType { intSize = size, intSigned = signed,
intIntervals = intervals }
| intervals /= allNumbers =
let
-- Lower bounds on integers of the given size
extremelow :: Integer
extremelow
| signed = negate (2 ^ (size - 1))
| otherwise = 0
-- Upper bounds on integers of the given size
extremehigh :: Integer
extremehigh
| signed = (2 ^ (size - 1))
| otherwise = (2 ^ size) + 1
-- Generate the elements in the list of ranges.
--
-- XXX This won't actually work as is. LLVM expects the
-- values in ascending *signed* order, meaning even for
-- unsigned integers, we need to take any values over 2^n and
-- move them to the back of the list.
--
-- Also, we need to make sure that the range doesn't cover the
-- entire set of values for integers of this size. The
-- Intervals datatype will ensure that all contiguous
-- intervals are merged together. Past that, we could just
-- require that the range data either be allNumbers or not
-- cover all possible values.
intervalVals :: LLVM.TypeRef -> Interval Integer -> [LLVM.ValueRef]
intervalVals llvmty (Interval low high) =
[ LLVM.constInt llvmty low signed,
LLVM.constInt llvmty (high + 1) signed ]
intervalVals llvmty (Single single) =
[ LLVM.constInt llvmty single signed,
LLVM.constInt llvmty (single + 1) signed ]
intervalVals llvmty (Min low) =
[ LLVM.constInt llvmty low signed,
LLVM.constInt llvmty extremehigh signed ]
intervalVals llvmty (Max high) =
[ LLVM.constInt llvmty extremelow signed,
LLVM.constInt llvmty high signed ]
in do
mdkind <- LLVM.getMDKindIDInContext ctx "range"
intty <- LLVM.intTypeInContext ctx size
md <- LLVM.mdNodeInContext ctx (concat (map (intervalVals intty)
(toIntervalList intervals)))
LLVM.setMetadata val (mdkind :: Word) md
addTypeMetadata _ _ = return ()
in do
out <- LLVM.buildLoad builder addr ""
setVolatility mut out
addTypeMetadata out ty
return out
-- | Generate a load, adding the necessary metadata and volatility.
genStore :: LLVM.ContextRef
-- ^ The LLVM Context
-> LLVM.BuilderRef
-- ^ The LLVM Instruction builder handle
-> LLVM.ValueRef
-- ^ The stored value
-> LLVM.ValueRef
-- ^ The pointer value
-> Mutability
-- ^ The Mutability of the value being loaded
-> Type
-- ^ The type of the value being loaded (not currently used)
-> IO ()
-- ^ The load instruction LLVM value
genStore _ builder addr val mut _ =
do
instr <- LLVM.buildStore builder val addr
setVolatility mut instr
|
emc2/chill
|
src/IR/FlatIR/LLVMGen/MemAccess.hs
|
bsd-3-clause
| 6,011
| 0
| 21
| 1,627
| 832
| 451
| 381
| 69
| 5
|
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.IBC (loadIBC, loadPkgIndex,
writeIBC, writePkgIndex,
hasValidIBCVersion) where
import Idris.Core.Evaluate
import Idris.Core.TT
import Idris.Core.Binary
import Idris.Core.CaseTree
import Idris.AbsSyntax
import Idris.Imports
import Idris.Error
import Idris.DeepSeq
import Idris.Delaborate
import qualified Idris.Docstrings as D
import Idris.Docstrings (Docstring)
import Idris.Output
import IRTS.System (getIdrisLibDir)
import Paths_idris
import qualified Cheapskate.Types as CT
import Data.Binary
import Data.Functor
import Data.Vector.Binary
import Data.List as L
import Data.Maybe (catMaybes)
import Data.ByteString.Lazy as B hiding (length, elem, map)
import qualified Data.Text as T
import qualified Data.Set as S
import Control.Monad
import Control.DeepSeq
import Control.Monad.State.Strict hiding (get, put)
import qualified Control.Monad.State.Strict as ST
import System.FilePath
import System.Directory
import Codec.Archive.Zip
ibcVersion :: Word16
ibcVersion = 128
data IBCFile = IBCFile { ver :: Word16,
sourcefile :: FilePath,
ibc_imports :: ![(Bool, FilePath)],
ibc_importdirs :: ![FilePath],
ibc_implicits :: ![(Name, [PArg])],
ibc_fixes :: ![FixDecl],
ibc_statics :: ![(Name, [Bool])],
ibc_classes :: ![(Name, ClassInfo)],
ibc_records :: ![(Name, RecordInfo)],
ibc_instances :: ![(Bool, Bool, Name, Name)],
ibc_dsls :: ![(Name, DSL)],
ibc_datatypes :: ![(Name, TypeInfo)],
ibc_optimise :: ![(Name, OptInfo)],
ibc_syntax :: ![Syntax],
ibc_keywords :: ![String],
ibc_objs :: ![(Codegen, FilePath)],
ibc_libs :: ![(Codegen, String)],
ibc_cgflags :: ![(Codegen, String)],
ibc_dynamic_libs :: ![String],
ibc_hdrs :: ![(Codegen, String)],
ibc_access :: ![(Name, Accessibility)],
ibc_total :: ![(Name, Totality)],
ibc_totcheckfail :: ![(FC, String)],
ibc_flags :: ![(Name, [FnOpt])],
ibc_fninfo :: ![(Name, FnInfo)],
ibc_cg :: ![(Name, CGInfo)],
ibc_defs :: ![(Name, Def)],
ibc_docstrings :: ![(Name, (Docstring D.DocTerm, [(Name, Docstring D.DocTerm)]))],
ibc_moduledocs :: ![(Name, Docstring D.DocTerm)],
ibc_transforms :: ![(Name, (Term, Term))],
ibc_errRev :: ![(Term, Term)],
ibc_coercions :: ![Name],
ibc_lineapps :: ![(FilePath, Int, PTerm)],
ibc_namehints :: ![(Name, Name)],
ibc_metainformation :: ![(Name, MetaInformation)],
ibc_errorhandlers :: ![Name],
ibc_function_errorhandlers :: ![(Name, Name, Name)], -- fn, arg, handler
ibc_metavars :: ![(Name, (Maybe Name, Int, [Name], Bool))],
ibc_patdefs :: ![(Name, ([([(Name, Term)], Term, Term)], [PTerm]))],
ibc_postulates :: ![Name],
ibc_externs :: ![(Name, Int)],
ibc_parsedSpan :: !(Maybe FC),
ibc_usage :: ![(Name, Int)],
ibc_exports :: ![Name],
ibc_autohints :: ![(Name, Name)],
ibc_deprecated :: ![(Name, String)]
}
deriving Show
{-!
deriving instance Binary IBCFile
!-}
initIBC :: IBCFile
initIBC = IBCFile ibcVersion "" [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] [] Nothing [] [] [] []
hasValidIBCVersion :: FilePath -> Idris Bool
hasValidIBCVersion fp = do
archiveFile <- runIO $ B.readFile fp
case toArchiveOrFail archiveFile of
Left _ -> return False
Right archive -> do ver <- getEntry 0 "ver" archive
return (ver == ibcVersion)
loadIBC :: Bool -- ^ True = reexport, False = make everything private
-> FilePath -> Idris ()
loadIBC reexport fp
= do imps <- getImported
let redo = case lookup fp imps of
Nothing -> True
Just p -> not p && reexport
when redo $
do logIBC 1 $ "Loading ibc " ++ fp ++ " " ++ show reexport
archiveFile <- runIO $ B.readFile fp
case toArchiveOrFail archiveFile of
Left _ -> ifail $ fp ++ " isn't loadable, it may have an old ibc format.\n"
++ "Please clean and rebuild it."
Right archive -> do process reexport archive fp
addImported reexport fp
-- | Load an entire package from its index file
loadPkgIndex :: String -> Idris ()
loadPkgIndex pkg = do ddir <- runIO $ getIdrisLibDir
addImportDir (ddir </> pkg)
fp <- findPkgIndex pkg
loadIBC True fp
makeEntry :: (Binary b) => String -> [b] -> Maybe Entry
makeEntry name val = if L.null val
then Nothing
else Just $ toEntry name 0 (encode val)
entries :: IBCFile -> [Entry]
entries i = catMaybes [Just $ toEntry "ver" 0 (encode $ ver i),
makeEntry "sourcefile" (sourcefile i),
makeEntry "ibc_imports" (ibc_imports i),
makeEntry "ibc_importdirs" (ibc_importdirs i),
makeEntry "ibc_implicits" (ibc_implicits i),
makeEntry "ibc_fixes" (ibc_fixes i),
makeEntry "ibc_statics" (ibc_statics i),
makeEntry "ibc_classes" (ibc_classes i),
makeEntry "ibc_records" (ibc_records i),
makeEntry "ibc_instances" (ibc_instances i),
makeEntry "ibc_dsls" (ibc_dsls i),
makeEntry "ibc_datatypes" (ibc_datatypes i),
makeEntry "ibc_optimise" (ibc_optimise i),
makeEntry "ibc_syntax" (ibc_syntax i),
makeEntry "ibc_keywords" (ibc_keywords i),
makeEntry "ibc_objs" (ibc_objs i),
makeEntry "ibc_libs" (ibc_libs i),
makeEntry "ibc_cgflags" (ibc_cgflags i),
makeEntry "ibc_dynamic_libs" (ibc_dynamic_libs i),
makeEntry "ibc_hdrs" (ibc_hdrs i),
makeEntry "ibc_access" (ibc_access i),
makeEntry "ibc_total" (ibc_total i),
makeEntry "ibc_totcheckfail" (ibc_totcheckfail i),
makeEntry "ibc_flags" (ibc_flags i),
makeEntry "ibc_fninfo" (ibc_fninfo i),
makeEntry "ibc_cg" (ibc_cg i),
makeEntry "ibc_defs" (ibc_defs i),
makeEntry "ibc_docstrings" (ibc_docstrings i),
makeEntry "ibc_moduledocs" (ibc_moduledocs i),
makeEntry "ibc_transforms" (ibc_transforms i),
makeEntry "ibc_errRev" (ibc_errRev i),
makeEntry "ibc_coercions" (ibc_coercions i),
makeEntry "ibc_lineapps" (ibc_lineapps i),
makeEntry "ibc_namehints" (ibc_namehints i),
makeEntry "ibc_metainformation" (ibc_metainformation i),
makeEntry "ibc_errorhandlers" (ibc_errorhandlers i),
makeEntry "ibc_function_errorhandlers" (ibc_function_errorhandlers i),
makeEntry "ibc_metavars" (ibc_metavars i),
makeEntry "ibc_patdefs" (ibc_patdefs i),
makeEntry "ibc_postulates" (ibc_postulates i),
makeEntry "ibc_externs" (ibc_externs i),
toEntry "ibc_parsedSpan" 0 . encode <$> ibc_parsedSpan i,
makeEntry "ibc_usage" (ibc_usage i),
makeEntry "ibc_exports" (ibc_exports i),
makeEntry "ibc_autohints" (ibc_autohints i),
makeEntry "ibc_deprecated" (ibc_deprecated i)]
writeArchive :: FilePath -> IBCFile -> Idris ()
writeArchive fp i = do let a = L.foldl (\x y -> addEntryToArchive y x) emptyArchive (entries i)
runIO $ B.writeFile fp (fromArchive a)
writeIBC :: FilePath -> FilePath -> Idris ()
writeIBC src f
= do logIBC 1 $ "Writing ibc " ++ show f
i <- getIState
-- case (Data.List.map fst (idris_metavars i)) \\ primDefs of
-- (_:_) -> ifail "Can't write ibc when there are unsolved metavariables"
-- [] -> return ()
resetNameIdx
ibcf <- mkIBC (ibc_write i) (initIBC { sourcefile = src })
idrisCatch (do runIO $ createDirectoryIfMissing True (dropFileName f)
writeArchive f ibcf
logIBC 1 "Written")
(\c -> do logIBC 1 $ "Failed " ++ pshow i c)
return ()
-- Write a package index containing all the imports in the current IState
-- Used for ':search' of an entire package, to ensure everything is loaded.
writePkgIndex :: FilePath -> Idris ()
writePkgIndex f
= do i <- getIState
let imps = map (\ (x, y) -> (True, x)) $ idris_imported i
logIBC 1 $ "Writing package index " ++ show f ++ " including\n" ++
show (map snd imps)
resetNameIdx
let ibcf = initIBC { ibc_imports = imps }
idrisCatch (do runIO $ createDirectoryIfMissing True (dropFileName f)
writeArchive f ibcf
logIBC 1 "Written")
(\c -> do logIBC 1 $ "Failed " ++ pshow i c)
return ()
mkIBC :: [IBCWrite] -> IBCFile -> Idris IBCFile
mkIBC [] f = return f
mkIBC (i:is) f = do ist <- getIState
logIBC 5 $ show i ++ " " ++ show (L.length is)
f' <- ibc ist i f
mkIBC is f'
ibc :: IState -> IBCWrite -> IBCFile -> Idris IBCFile
ibc i (IBCFix d) f = return f { ibc_fixes = d : ibc_fixes f }
ibc i (IBCImp n) f = case lookupCtxtExact n (idris_implicits i) of
Just v -> return f { ibc_implicits = (n,v): ibc_implicits f }
_ -> ifail "IBC write failed"
ibc i (IBCStatic n) f
= case lookupCtxtExact n (idris_statics i) of
Just v -> return f { ibc_statics = (n,v): ibc_statics f }
_ -> ifail "IBC write failed"
ibc i (IBCClass n) f
= case lookupCtxtExact n (idris_classes i) of
Just v -> return f { ibc_classes = (n,v): ibc_classes f }
_ -> ifail "IBC write failed"
ibc i (IBCRecord n) f
= case lookupCtxtExact n (idris_records i) of
Just v -> return f { ibc_records = (n,v): ibc_records f }
_ -> ifail "IBC write failed"
ibc i (IBCInstance int res n ins) f
= return f { ibc_instances = (int, res, n, ins) : ibc_instances f }
ibc i (IBCDSL n) f
= case lookupCtxtExact n (idris_dsls i) of
Just v -> return f { ibc_dsls = (n,v): ibc_dsls f }
_ -> ifail "IBC write failed"
ibc i (IBCData n) f
= case lookupCtxtExact n (idris_datatypes i) of
Just v -> return f { ibc_datatypes = (n,v): ibc_datatypes f }
_ -> ifail "IBC write failed"
ibc i (IBCOpt n) f = case lookupCtxtExact n (idris_optimisation i) of
Just v -> return f { ibc_optimise = (n,v): ibc_optimise f }
_ -> ifail "IBC write failed"
ibc i (IBCSyntax n) f = return f { ibc_syntax = n : ibc_syntax f }
ibc i (IBCKeyword n) f = return f { ibc_keywords = n : ibc_keywords f }
ibc i (IBCImport n) f = return f { ibc_imports = n : ibc_imports f }
ibc i (IBCImportDir n) f = return f { ibc_importdirs = n : ibc_importdirs f }
ibc i (IBCObj tgt n) f = return f { ibc_objs = (tgt, n) : ibc_objs f }
ibc i (IBCLib tgt n) f = return f { ibc_libs = (tgt, n) : ibc_libs f }
ibc i (IBCCGFlag tgt n) f = return f { ibc_cgflags = (tgt, n) : ibc_cgflags f }
ibc i (IBCDyLib n) f = return f {ibc_dynamic_libs = n : ibc_dynamic_libs f }
ibc i (IBCHeader tgt n) f = return f { ibc_hdrs = (tgt, n) : ibc_hdrs f }
ibc i (IBCDef n) f
= do f' <- case lookupDefExact n (tt_ctxt i) of
Just v -> return f { ibc_defs = (n,v) : ibc_defs f }
_ -> ifail "IBC write failed"
case lookupCtxtExact n (idris_patdefs i) of
Just v -> return f' { ibc_patdefs = (n,v) : ibc_patdefs f }
_ -> return f' -- Not a pattern definition
ibc i (IBCDoc n) f = case lookupCtxtExact n (idris_docstrings i) of
Just v -> return f { ibc_docstrings = (n,v) : ibc_docstrings f }
_ -> ifail "IBC write failed"
ibc i (IBCCG n) f = case lookupCtxtExact n (idris_callgraph i) of
Just v -> return f { ibc_cg = (n,v) : ibc_cg f }
_ -> ifail "IBC write failed"
ibc i (IBCCoercion n) f = return f { ibc_coercions = n : ibc_coercions f }
ibc i (IBCAccess n a) f = return f { ibc_access = (n,a) : ibc_access f }
ibc i (IBCFlags n a) f = return f { ibc_flags = (n,a) : ibc_flags f }
ibc i (IBCFnInfo n a) f = return f { ibc_fninfo = (n,a) : ibc_fninfo f }
ibc i (IBCTotal n a) f = return f { ibc_total = (n,a) : ibc_total f }
ibc i (IBCTrans n t) f = return f { ibc_transforms = (n, t) : ibc_transforms f }
ibc i (IBCErrRev t) f = return f { ibc_errRev = t : ibc_errRev f }
ibc i (IBCLineApp fp l t) f
= return f { ibc_lineapps = (fp,l,t) : ibc_lineapps f }
ibc i (IBCNameHint (n, ty)) f
= return f { ibc_namehints = (n, ty) : ibc_namehints f }
ibc i (IBCMetaInformation n m) f = return f { ibc_metainformation = (n,m) : ibc_metainformation f }
ibc i (IBCErrorHandler n) f = return f { ibc_errorhandlers = n : ibc_errorhandlers f }
ibc i (IBCFunctionErrorHandler fn a n) f =
return f { ibc_function_errorhandlers = (fn, a, n) : ibc_function_errorhandlers f }
ibc i (IBCMetavar n) f =
case lookup n (idris_metavars i) of
Nothing -> return f
Just t -> return f { ibc_metavars = (n, t) : ibc_metavars f }
ibc i (IBCPostulate n) f = return f { ibc_postulates = n : ibc_postulates f }
ibc i (IBCExtern n) f = return f { ibc_externs = n : ibc_externs f }
ibc i (IBCTotCheckErr fc err) f = return f { ibc_totcheckfail = (fc, err) : ibc_totcheckfail f }
ibc i (IBCParsedRegion fc) f = return f { ibc_parsedSpan = Just fc }
ibc i (IBCModDocs n) f = case lookupCtxtExact n (idris_moduledocs i) of
Just v -> return f { ibc_moduledocs = (n,v) : ibc_moduledocs f }
_ -> ifail "IBC write failed"
ibc i (IBCUsage n) f = return f { ibc_usage = n : ibc_usage f }
ibc i (IBCExport n) f = return f { ibc_exports = n : ibc_exports f }
ibc i (IBCAutoHint n h) f = return f { ibc_autohints = (n, h) : ibc_autohints f }
ibc i (IBCDeprecate n r) f = return f { ibc_deprecated = (n, r) : ibc_deprecated f }
getEntry :: (Binary b, NFData b) => b -> FilePath -> Archive -> Idris b
getEntry alt f a = case findEntryByPath f a of
Nothing -> return alt
Just e -> return $! (force . decode . fromEntry) e
process :: Bool -- ^ Reexporting
-> Archive -> FilePath -> Idris ()
process reexp i fn = do
ver <- getEntry 0 "ver" i
when (ver /= ibcVersion) $ do
logIBC 1 "ibc out of date"
let e = if ver < ibcVersion
then " an earlier " else " a later "
ifail $ "Incompatible ibc version.\nThis library was built with"
++ e ++ "version of Idris.\n" ++ "Please clean and rebuild."
source <- getEntry "" "sourcefile" i
srcok <- runIO $ doesFileExist source
when srcok $ timestampOlder source fn
pImportDirs =<< getEntry [] "ibc_importdirs" i
pImports reexp =<< getEntry [] "ibc_imports" i
pImps =<< getEntry [] "ibc_implicits" i
pFixes =<< getEntry [] "ibc_fixes" i
pStatics =<< getEntry [] "ibc_statics" i
pClasses =<< getEntry [] "ibc_classes" i
pRecords =<< getEntry [] "ibc_records" i
pInstances =<< getEntry [] "ibc_instances" i
pDSLs =<< getEntry [] "ibc_dsls" i
pDatatypes =<< getEntry [] "ibc_datatypes" i
pOptimise =<< getEntry [] "ibc_optimise" i
pSyntax =<< getEntry [] "ibc_syntax" i
pKeywords =<< getEntry [] "ibc_keywords" i
pObjs =<< getEntry [] "ibc_objs" i
pLibs =<< getEntry [] "ibc_libs" i
pCGFlags =<< getEntry [] "ibc_cgflags" i
pDyLibs =<< getEntry [] "ibc_dynamic_libs" i
pHdrs =<< getEntry [] "ibc_hdrs" i
pDefs reexp =<< getEntry [] "ibc_defs" i
pPatdefs =<< getEntry [] "ibc_patdefs" i
pAccess reexp =<< getEntry [] "ibc_access" i
pFlags =<< getEntry [] "ibc_flags" i
pFnInfo =<< getEntry [] "ibc_fninfo" i
pTotal =<< getEntry [] "ibc_total" i
pTotCheckErr =<< getEntry [] "ibc_totcheckfail" i
pCG =<< getEntry [] "ibc_cg" i
pDocs =<< getEntry [] "ibc_docstrings" i
pMDocs =<< getEntry [] "ibc_moduledocs" i
pCoercions =<< getEntry [] "ibc_coercions" i
pTrans =<< getEntry [] "ibc_transforms" i
pErrRev =<< getEntry [] "ibc_errRev" i
pLineApps =<< getEntry [] "ibc_lineapps" i
pNameHints =<< getEntry [] "ibc_namehints" i
pMetaInformation =<< getEntry [] "ibc_metainformation" i
pErrorHandlers =<< getEntry [] "ibc_errorhandlers" i
pFunctionErrorHandlers =<< getEntry [] "ibc_function_errorhandlers" i
pMetavars =<< getEntry [] "ibc_metavars" i
pPostulates =<< getEntry [] "ibc_postulates" i
pExterns =<< getEntry [] "ibc_externs" i
pParsedSpan =<< getEntry Nothing "ibc_parsedSpan" i
pUsage =<< getEntry [] "ibc_usage" i
pExports =<< getEntry [] "ibc_exports" i
pAutoHints =<< getEntry [] "ibc_autohints" i
pDeprecate =<< getEntry [] "ibc_deprecated" i
timestampOlder :: FilePath -> FilePath -> Idris ()
timestampOlder src ibc = do srct <- runIO $ getModificationTime src
ibct <- runIO $ getModificationTime ibc
if (srct > ibct)
then ifail $ "Needs reloading " ++ show (srct, ibct)
else return ()
pPostulates :: [Name] -> Idris ()
pPostulates ns = updateIState
(\i -> i { idris_postulates = idris_postulates i `S.union` S.fromList ns })
pExterns :: [(Name, Int)] -> Idris ()
pExterns ns = updateIState (\i -> i{ idris_externs = idris_externs i `S.union` S.fromList ns })
pParsedSpan :: Maybe FC -> Idris ()
pParsedSpan fc = updateIState (\i -> i { idris_parsedSpan = fc })
pUsage :: [(Name, Int)] -> Idris ()
pUsage ns = updateIState (\i -> i { idris_erasureUsed = ns ++ idris_erasureUsed i })
pExports :: [Name] -> Idris ()
pExports ns = updateIState (\i -> i { idris_exports = ns ++ idris_exports i })
pAutoHints :: [(Name, Name)] -> Idris ()
pAutoHints ns = mapM_ (\(n,h) -> addAutoHint n h) ns
pDeprecate :: [(Name, String)] -> Idris ()
pDeprecate ns = mapM_ (\(n,reason) -> addDeprecated n reason) ns
pImportDirs :: [FilePath] -> Idris ()
pImportDirs fs = mapM_ addImportDir fs
pImports :: Bool -> [(Bool, FilePath)] -> Idris ()
pImports reexp fs
= do mapM_ (\(re, f) ->
do i <- getIState
ibcsd <- valIBCSubDir i
ids <- allImportDirs
fp <- findImport ids ibcsd f
-- if (f `elem` imported i)
-- then logLvl 1 $ "Already read " ++ f
putIState (i { imported = f : imported i })
case fp of
LIDR fn -> do
logIBC 1 $ "Failed at " ++ fn
ifail "Must be an ibc"
IDR fn -> do
logIBC 1 $ "Failed at " ++ fn
ifail "Must be an ibc"
IBC fn src -> loadIBC (reexp && re) fn)
fs
pImps :: [(Name, [PArg])] -> Idris ()
pImps imps = mapM_ (\ (n, imp) ->
do i <- getIState
case lookupDefAccExact n False (tt_ctxt i) of
Just (n, Hidden) -> return ()
_ -> putIState (i { idris_implicits
= addDef n imp (idris_implicits i) }))
imps
pFixes :: [FixDecl] -> Idris ()
pFixes f = do i <- getIState
putIState (i { idris_infixes = sort $ f ++ idris_infixes i })
pStatics :: [(Name, [Bool])] -> Idris ()
pStatics ss = mapM_ (\ (n, s) ->
do i <- getIState
putIState (i { idris_statics
= addDef n s (idris_statics i) }))
ss
pClasses :: [(Name, ClassInfo)] -> Idris ()
pClasses cs = mapM_ (\ (n, c) ->
do i <- getIState
-- Don't lose instances from previous IBCs, which
-- could have loaded in any order
let is = case lookupCtxtExact n (idris_classes i) of
Just (CI _ _ _ _ _ ins _) -> ins
_ -> []
let c' = c { class_instances =
class_instances c ++ is }
putIState (i { idris_classes
= addDef n c' (idris_classes i) }))
cs
pRecords :: [(Name, RecordInfo)] -> Idris ()
pRecords rs = mapM_ (\ (n, r) ->
do i <- getIState
putIState (i { idris_records
= addDef n r (idris_records i) }))
rs
pInstances :: [(Bool, Bool, Name, Name)] -> Idris ()
pInstances cs = mapM_ (\ (i, res, n, ins) -> addInstance i res n ins) cs
pDSLs :: [(Name, DSL)] -> Idris ()
pDSLs cs = mapM_ (\ (n, c) -> updateIState (\i ->
i { idris_dsls = addDef n c (idris_dsls i) })) cs
pDatatypes :: [(Name, TypeInfo)] -> Idris ()
pDatatypes cs = mapM_ (\ (n, c) -> updateIState (\i ->
i { idris_datatypes = addDef n c (idris_datatypes i) })) cs
pOptimise :: [(Name, OptInfo)] -> Idris ()
pOptimise cs = mapM_ (\ (n, c) -> updateIState (\i ->
i { idris_optimisation = addDef n c (idris_optimisation i) })) cs
pSyntax :: [Syntax] -> Idris ()
pSyntax s = updateIState (\i -> i { syntax_rules = updateSyntaxRules s (syntax_rules i) })
pKeywords :: [String] -> Idris ()
pKeywords k = updateIState (\i -> i { syntax_keywords = k ++ syntax_keywords i })
pObjs :: [(Codegen, FilePath)] -> Idris ()
pObjs os = mapM_ (\ (cg, obj) -> do dirs <- allImportDirs
o <- runIO $ findInPath dirs obj
addObjectFile cg o) os
pLibs :: [(Codegen, String)] -> Idris ()
pLibs ls = mapM_ (uncurry addLib) ls
pCGFlags :: [(Codegen, String)] -> Idris ()
pCGFlags ls = mapM_ (uncurry addFlag) ls
pDyLibs :: [String] -> Idris ()
pDyLibs ls = do res <- mapM (addDyLib . return) ls
mapM_ checkLoad res
return ()
where checkLoad (Left _) = return ()
checkLoad (Right err) = ifail err
pHdrs :: [(Codegen, String)] -> Idris ()
pHdrs hs = mapM_ (uncurry addHdr) hs
pPatdefs :: [(Name, ([([(Name, Term)], Term, Term)], [PTerm]))] -> Idris ()
pPatdefs ds = mapM_ (\ (n, d) -> updateIState (\i ->
i { idris_patdefs = addDef n (force d) (idris_patdefs i) })) ds
pDefs :: Bool -> [(Name, Def)] -> Idris ()
pDefs reexp ds
= mapM_ (\ (n, d) ->
do d' <- updateDef d
case d' of
TyDecl _ _ -> return ()
_ -> do logIBC 1 $ "SOLVING " ++ show n
solveDeferred n
updateIState (\i -> i { tt_ctxt = addCtxtDef n d' (tt_ctxt i) })
-- logLvl 1 $ "Added " ++ show (n, d')
if (not reexp) then do logIBC 1 $ "Not exporting " ++ show n
setAccessibility n Hidden
else logIBC 1 $ "Exporting " ++ show n) ds
where
updateDef (CaseOp c t args o s cd)
= do o' <- mapM updateOrig o
cd' <- updateCD cd
return $ CaseOp c t args o' s cd'
updateDef t = return t
updateOrig (Left t) = liftM Left (update t)
updateOrig (Right (l, r)) = do l' <- update l
r' <- update r
return $ Right (l', r')
updateCD (CaseDefs (ts, t) (cs, c) (is, i) (rs, r))
= do t' <- updateSC t
c' <- updateSC c
i' <- updateSC i
r' <- updateSC r
return $ CaseDefs (ts, t') (cs, c') (is, i') (rs, r')
updateSC (Case t n alts) = do alts' <- mapM updateAlt alts
return (Case t n alts')
updateSC (ProjCase t alts) = do alts' <- mapM updateAlt alts
return (ProjCase t alts')
updateSC (STerm t) = do t' <- update t
return (STerm t')
updateSC c = return c
updateAlt (ConCase n i ns t) = do t' <- updateSC t
return (ConCase n i ns t')
updateAlt (FnCase n ns t) = do t' <- updateSC t
return (FnCase n ns t')
updateAlt (ConstCase c t) = do t' <- updateSC t
return (ConstCase c t')
updateAlt (SucCase n t) = do t' <- updateSC t
return (SucCase n t')
updateAlt (DefaultCase t) = do t' <- updateSC t
return (DefaultCase t')
update (P t n ty) = do n' <- getSymbol n
return $ P t n' ty
update (App s f a) = liftM2 (App s) (update f) (update a)
update (Bind n b sc) = do b' <- updateB b
sc' <- update sc
return $ Bind n b' sc'
where
updateB (Let t v) = liftM2 Let (update t) (update v)
updateB b = do ty' <- update (binderTy b)
return (b { binderTy = ty' })
update (Proj t i) = do t' <- update t
return $ Proj t' i
update t = return t
pDocs :: [(Name, (Docstring D.DocTerm, [(Name, Docstring D.DocTerm)]))] -> Idris ()
pDocs ds = mapM_ (\(n, a) -> addDocStr n (fst a) (snd a)) ds
pMDocs :: [(Name, Docstring D.DocTerm)] -> Idris ()
pMDocs ds = mapM_ (\ (n, d) -> updateIState (\i ->
i { idris_moduledocs = addDef n d (idris_moduledocs i) })) ds
pAccess :: Bool -- ^ Reexporting?
-> [(Name, Accessibility)] -> Idris ()
pAccess reexp ds
= mapM_ (\ (n, a_in) ->
do let a = if reexp then a_in else Hidden
logIBC 3 $ "Setting " ++ show (a, n) ++ " to " ++ show a
updateIState (\i -> i { tt_ctxt = setAccess n a (tt_ctxt i) })) ds
pFlags :: [(Name, [FnOpt])] -> Idris ()
pFlags ds = mapM_ (\ (n, a) -> setFlags n a) ds
pFnInfo :: [(Name, FnInfo)] -> Idris ()
pFnInfo ds = mapM_ (\ (n, a) -> setFnInfo n a) ds
pTotal :: [(Name, Totality)] -> Idris ()
pTotal ds = mapM_ (\ (n, a) -> updateIState (\i -> i { tt_ctxt = setTotal n a (tt_ctxt i) })) ds
pTotCheckErr :: [(FC, String)] -> Idris ()
pTotCheckErr es = updateIState (\i -> i { idris_totcheckfail = idris_totcheckfail i ++ es })
pCG :: [(Name, CGInfo)] -> Idris ()
pCG ds = mapM_ (\ (n, a) -> addToCG n a) ds
pCoercions :: [Name] -> Idris ()
pCoercions ns = mapM_ (\ n -> addCoercion n) ns
pTrans :: [(Name, (Term, Term))] -> Idris ()
pTrans ts = mapM_ (\ (n, t) -> addTrans n t) ts
pErrRev :: [(Term, Term)] -> Idris ()
pErrRev ts = mapM_ addErrRev ts
pLineApps :: [(FilePath, Int, PTerm)] -> Idris ()
pLineApps ls = mapM_ (\ (f, i, t) -> addInternalApp f i t) ls
pNameHints :: [(Name, Name)] -> Idris ()
pNameHints ns = mapM_ (\ (n, ty) -> addNameHint n ty) ns
pMetaInformation :: [(Name, MetaInformation)] -> Idris ()
pMetaInformation ds = mapM_ (\ (n, m) -> updateIState (\i ->
i { tt_ctxt = setMetaInformation n m (tt_ctxt i) })) ds
pErrorHandlers :: [Name] -> Idris ()
pErrorHandlers ns = updateIState (\i ->
i { idris_errorhandlers = idris_errorhandlers i ++ ns })
pFunctionErrorHandlers :: [(Name, Name, Name)] -> Idris ()
pFunctionErrorHandlers ns = mapM_ (\ (fn,arg,handler) ->
addFunctionErrorHandlers fn arg [handler]) ns
pMetavars :: [(Name, (Maybe Name, Int, [Name], Bool))] -> Idris ()
pMetavars ns = updateIState (\i -> i { idris_metavars = L.reverse ns ++ idris_metavars i })
----- For Cheapskate and docstrings
instance Binary a => Binary (D.Docstring a) where
put (D.DocString opts lines) = do put opts ; put lines
get = do opts <- get
lines <- get
return (D.DocString opts lines)
instance Binary CT.Options where
put (CT.Options x1 x2 x3 x4) = do put x1 ; put x2 ; put x3 ; put x4
get = do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (CT.Options x1 x2 x3 x4)
instance Binary D.DocTerm where
put D.Unchecked = putWord8 0
put (D.Checked t) = putWord8 1 >> put t
put (D.Example t) = putWord8 2 >> put t
put (D.Failing e) = putWord8 3 >> put e
get = do i <- getWord8
case i of
0 -> return D.Unchecked
1 -> fmap D.Checked get
2 -> fmap D.Example get
3 -> fmap D.Failing get
_ -> error "Corrupted binary data for DocTerm"
instance Binary a => Binary (D.Block a) where
put (D.Para lines) = do putWord8 0 ; put lines
put (D.Header i lines) = do putWord8 1 ; put i ; put lines
put (D.Blockquote bs) = do putWord8 2 ; put bs
put (D.List b t xs) = do putWord8 3 ; put b ; put t ; put xs
put (D.CodeBlock attr txt src) = do putWord8 4 ; put attr ; put txt ; put src
put (D.HtmlBlock txt) = do putWord8 5 ; put txt
put D.HRule = putWord8 6
get = do i <- getWord8
case i of
0 -> fmap D.Para get
1 -> liftM2 D.Header get get
2 -> fmap D.Blockquote get
3 -> liftM3 D.List get get get
4 -> liftM3 D.CodeBlock get get get
5 -> liftM D.HtmlBlock get
6 -> return D.HRule
_ -> error "Corrupted binary data for Block"
instance Binary a => Binary (D.Inline a) where
put (D.Str txt) = do putWord8 0 ; put txt
put D.Space = putWord8 1
put D.SoftBreak = putWord8 2
put D.LineBreak = putWord8 3
put (D.Emph xs) = putWord8 4 >> put xs
put (D.Strong xs) = putWord8 5 >> put xs
put (D.Code xs tm) = putWord8 6 >> put xs >> put tm
put (D.Link a b c) = putWord8 7 >> put a >> put b >> put c
put (D.Image a b c) = putWord8 8 >> put a >> put b >> put c
put (D.Entity a) = putWord8 9 >> put a
put (D.RawHtml x) = putWord8 10 >> put x
get = do i <- getWord8
case i of
0 -> liftM D.Str get
1 -> return D.Space
2 -> return D.SoftBreak
3 -> return D.LineBreak
4 -> liftM D.Emph get
5 -> liftM D.Strong get
6 -> liftM2 D.Code get get
7 -> liftM3 D.Link get get get
8 -> liftM3 D.Image get get get
9 -> liftM D.Entity get
10 -> liftM D.RawHtml get
_ -> error "Corrupted binary data for Inline"
instance Binary CT.ListType where
put (CT.Bullet c) = putWord8 0 >> put c
put (CT.Numbered nw i) = putWord8 1 >> put nw >> put i
get = do i <- getWord8
case i of
0 -> liftM CT.Bullet get
1 -> liftM2 CT.Numbered get get
_ -> error "Corrupted binary data for ListType"
instance Binary CT.CodeAttr where
put (CT.CodeAttr a b) = put a >> put b
get = liftM2 CT.CodeAttr get get
instance Binary CT.NumWrapper where
put (CT.PeriodFollowing) = putWord8 0
put (CT.ParenFollowing) = putWord8 1
get = do i <- getWord8
case i of
0 -> return CT.PeriodFollowing
1 -> return CT.ParenFollowing
_ -> error "Corrupted binary data for NumWrapper"
----- Generated by 'derive'
instance Binary SizeChange where
put x
= case x of
Smaller -> putWord8 0
Same -> putWord8 1
Bigger -> putWord8 2
Unknown -> putWord8 3
get
= do i <- getWord8
case i of
0 -> return Smaller
1 -> return Same
2 -> return Bigger
3 -> return Unknown
_ -> error "Corrupted binary data for SizeChange"
instance Binary CGInfo where
put (CGInfo x1 x2 x3 x4 x5)
= do put x1
put x2
-- put x3 -- Already used SCG info for totality check
put x4
put x5
get
= do x1 <- get
x2 <- get
x4 <- get
x5 <- get
return (CGInfo x1 x2 [] x4 x5)
instance Binary CaseType where
put x = case x of
Updatable -> putWord8 0
Shared -> putWord8 1
get = do i <- getWord8
case i of
0 -> return Updatable
1 -> return Shared
_ -> error "Corrupted binary data for CaseType"
instance Binary SC where
put x
= case x of
Case x1 x2 x3 -> do putWord8 0
put x1
put x2
put x3
ProjCase x1 x2 -> do putWord8 1
put x1
put x2
STerm x1 -> do putWord8 2
put x1
UnmatchedCase x1 -> do putWord8 3
put x1
ImpossibleCase -> do putWord8 4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (Case x1 x2 x3)
1 -> do x1 <- get
x2 <- get
return (ProjCase x1 x2)
2 -> do x1 <- get
return (STerm x1)
3 -> do x1 <- get
return (UnmatchedCase x1)
4 -> return ImpossibleCase
_ -> error "Corrupted binary data for SC"
instance Binary CaseAlt where
put x
= {-# SCC "putCaseAlt" #-}
case x of
ConCase x1 x2 x3 x4 -> do putWord8 0
put x1
put x2
put x3
put x4
ConstCase x1 x2 -> do putWord8 1
put x1
put x2
DefaultCase x1 -> do putWord8 2
put x1
FnCase x1 x2 x3 -> do putWord8 3
put x1
put x2
put x3
SucCase x1 x2 -> do putWord8 4
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (ConCase x1 x2 x3 x4)
1 -> do x1 <- get
x2 <- get
return (ConstCase x1 x2)
2 -> do x1 <- get
return (DefaultCase x1)
3 -> do x1 <- get
x2 <- get
x3 <- get
return (FnCase x1 x2 x3)
4 -> do x1 <- get
x2 <- get
return (SucCase x1 x2)
_ -> error "Corrupted binary data for CaseAlt"
instance Binary CaseDefs where
put (CaseDefs x1 x2 x3 x4)
= do -- don't need totality checked or inlined versions
put x2
put x4
get
= do x2 <- get
x4 <- get
return (CaseDefs x2 x2 x2 x4)
instance Binary CaseInfo where
put x@(CaseInfo x1 x2 x3) = do put x1
put x2
put x3
get = do x1 <- get
x2 <- get
x3 <- get
return (CaseInfo x1 x2 x3)
instance Binary Def where
put x
= {-# SCC "putDef" #-}
case x of
Function x1 x2 -> do putWord8 0
put x1
put x2
TyDecl x1 x2 -> do putWord8 1
put x1
put x2
-- all primitives just get added at the start, don't write
Operator x1 x2 x3 -> do return ()
CaseOp x1 x2 x2a x3 x3a x4 -> do putWord8 3
put x1
put x2
put x2a
put x3
-- no x3a
put x4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
return (Function x1 x2)
1 -> do x1 <- get
x2 <- get
return (TyDecl x1 x2)
-- Operator isn't written, don't read
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
-- x3 <- get always []
x5 <- get
return (CaseOp x1 x2 x3 x4 [] x5)
_ -> error "Corrupted binary data for Def"
instance Binary Accessibility where
put x
= case x of
Public -> putWord8 0
Frozen -> putWord8 1
Hidden -> putWord8 2
get
= do i <- getWord8
case i of
0 -> return Public
1 -> return Frozen
2 -> return Hidden
_ -> error "Corrupted binary data for Accessibility"
safeToEnum :: (Enum a, Bounded a, Integral int) => String -> int -> a
safeToEnum label x' = result
where
x = fromIntegral x'
result
| x < fromEnum (minBound `asTypeOf` result)
|| x > fromEnum (maxBound `asTypeOf` result)
= error $ label ++ ": corrupted binary representation in IBC"
| otherwise = toEnum x
instance Binary PReason where
put x
= case x of
Other x1 -> do putWord8 0
put x1
Itself -> putWord8 1
NotCovering -> putWord8 2
NotPositive -> putWord8 3
Mutual x1 -> do putWord8 4
put x1
NotProductive -> putWord8 5
BelieveMe -> putWord8 6
UseUndef x1 -> do putWord8 7
put x1
ExternalIO -> putWord8 8
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Other x1)
1 -> return Itself
2 -> return NotCovering
3 -> return NotPositive
4 -> do x1 <- get
return (Mutual x1)
5 -> return NotProductive
6 -> return BelieveMe
7 -> do x1 <- get
return (UseUndef x1)
8 -> return ExternalIO
_ -> error "Corrupted binary data for PReason"
instance Binary Totality where
put x
= case x of
Total x1 -> do putWord8 0
put x1
Partial x1 -> do putWord8 1
put x1
Unchecked -> do putWord8 2
Productive -> do putWord8 3
Generated -> do putWord8 4
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Total x1)
1 -> do x1 <- get
return (Partial x1)
2 -> return Unchecked
3 -> return Productive
4 -> return Generated
_ -> error "Corrupted binary data for Totality"
instance Binary MetaInformation where
put x
= case x of
EmptyMI -> do putWord8 0
DataMI x1 -> do putWord8 1
put x1
get = do i <- getWord8
case i of
0 -> return EmptyMI
1 -> do x1 <- get
return (DataMI x1)
_ -> error "Corrupted binary data for MetaInformation"
instance Binary DataOpt where
put x = case x of
Codata -> putWord8 0
DefaultEliminator -> putWord8 1
DataErrRev -> putWord8 2
DefaultCaseFun -> putWord8 3
get = do i <- getWord8
case i of
0 -> return Codata
1 -> return DefaultEliminator
2 -> return DataErrRev
3 -> return DefaultCaseFun
_ -> error "Corrupted binary data for DataOpt"
instance Binary FnOpt where
put x
= case x of
Inlinable -> putWord8 0
TotalFn -> putWord8 1
Dictionary -> putWord8 2
AssertTotal -> putWord8 3
Specialise x -> do putWord8 4
put x
Coinductive -> putWord8 5
PartialFn -> putWord8 6
Implicit -> putWord8 7
Reflection -> putWord8 8
ErrorHandler -> putWord8 9
ErrorReverse -> putWord8 10
CoveringFn -> putWord8 11
NoImplicit -> putWord8 12
Constructor -> putWord8 13
CExport x1 -> do putWord8 14
put x1
AutoHint -> putWord8 15
PEGenerated -> putWord8 16
get
= do i <- getWord8
case i of
0 -> return Inlinable
1 -> return TotalFn
2 -> return Dictionary
3 -> return AssertTotal
4 -> do x <- get
return (Specialise x)
5 -> return Coinductive
6 -> return PartialFn
7 -> return Implicit
8 -> return Reflection
9 -> return ErrorHandler
10 -> return ErrorReverse
11 -> return CoveringFn
12 -> return NoImplicit
13 -> return Constructor
14 -> do x1 <- get
return $ CExport x1
15 -> return AutoHint
16 -> return PEGenerated
_ -> error "Corrupted binary data for FnOpt"
instance Binary Fixity where
put x
= case x of
Infixl x1 -> do putWord8 0
put x1
Infixr x1 -> do putWord8 1
put x1
InfixN x1 -> do putWord8 2
put x1
PrefixN x1 -> do putWord8 3
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Infixl x1)
1 -> do x1 <- get
return (Infixr x1)
2 -> do x1 <- get
return (InfixN x1)
3 -> do x1 <- get
return (PrefixN x1)
_ -> error "Corrupted binary data for Fixity"
instance Binary FixDecl where
put (Fix x1 x2)
= do put x1
put x2
get
= do x1 <- get
x2 <- get
return (Fix x1 x2)
instance Binary ArgOpt where
put x
= case x of
HideDisplay -> putWord8 0
InaccessibleArg -> putWord8 1
AlwaysShow -> putWord8 2
UnknownImp -> putWord8 3
get
= do i <- getWord8
case i of
0 -> return HideDisplay
1 -> return InaccessibleArg
2 -> return AlwaysShow
3 -> return UnknownImp
_ -> error "Corrupted binary data for Static"
instance Binary Static where
put x
= case x of
Static -> putWord8 0
Dynamic -> putWord8 1
get
= do i <- getWord8
case i of
0 -> return Static
1 -> return Dynamic
_ -> error "Corrupted binary data for Static"
instance Binary Plicity where
put x
= case x of
Imp x1 x2 x3 x4 ->
do putWord8 0
put x1
put x2
put x3
put x4
Exp x1 x2 x3 ->
do putWord8 1
put x1
put x2
put x3
Constraint x1 x2 ->
do putWord8 2
put x1
put x2
TacImp x1 x2 x3 ->
do putWord8 3
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (Imp x1 x2 x3 x4)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (Exp x1 x2 x3)
2 -> do x1 <- get
x2 <- get
return (Constraint x1 x2)
3 -> do x1 <- get
x2 <- get
x3 <- get
return (TacImp x1 x2 x3)
_ -> error "Corrupted binary data for Plicity"
instance (Binary t) => Binary (PDecl' t) where
put x
= case x of
PFix x1 x2 x3 -> do putWord8 0
put x1
put x2
put x3
PTy x1 x2 x3 x4 x5 x6 x7 x8
-> do putWord8 1
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
PClauses x1 x2 x3 x4 -> do putWord8 2
put x1
put x2
put x3
put x4
PData x1 x2 x3 x4 x5 x6 ->
do putWord8 3
put x1
put x2
put x3
put x4
put x5
put x6
PParams x1 x2 x3 -> do putWord8 4
put x1
put x2
put x3
PNamespace x1 x2 x3 -> do putWord8 5
put x1
put x2
put x3
PRecord x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 ->
do putWord8 6
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
put x11
put x12
PClass x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12
-> do putWord8 7
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
put x11
put x12
PInstance x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 ->
do putWord8 8
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
put x11
PDSL x1 x2 -> do putWord8 9
put x1
put x2
PCAF x1 x2 x3 -> do putWord8 10
put x1
put x2
put x3
PMutual x1 x2 -> do putWord8 11
put x1
put x2
PPostulate x1 x2 x3 x4 x5 x6 x7 x8
-> do putWord8 12
put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
PSyntax x1 x2 -> do putWord8 13
put x1
put x2
PDirective x1 -> error "Cannot serialize PDirective"
PProvider x1 x2 x3 x4 x5 x6 ->
do putWord8 15
put x1
put x2
put x3
put x4
put x5
put x6
PTransform x1 x2 x3 x4 -> do putWord8 16
put x1
put x2
put x3
put x4
PRunElabDecl x1 x2 x3 -> do putWord8 17
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (PFix x1 x2 x3)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (PTy x1 x2 x3 x4 x5 x6 x7 x8)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PClauses x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PData x1 x2 x3 x4 x5 x6)
4 -> do x1 <- get
x2 <- get
x3 <- get
return (PParams x1 x2 x3)
5 -> do x1 <- get
x2 <- get
x3 <- get
return (PNamespace x1 x2 x3)
6 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
x11 <- get
x12 <- get
return (PRecord x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12)
7 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
x11 <- get
x12 <- get
return (PClass x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12)
8 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
x11 <- get
return (PInstance x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11)
9 -> do x1 <- get
x2 <- get
return (PDSL x1 x2)
10 -> do x1 <- get
x2 <- get
x3 <- get
return (PCAF x1 x2 x3)
11 -> do x1 <- get
x2 <- get
return (PMutual x1 x2)
12 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (PPostulate x1 x2 x3 x4 x5 x6 x7 x8)
13 -> do x1 <- get
x2 <- get
return (PSyntax x1 x2)
14 -> do error "Cannot deserialize PDirective"
15 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PProvider x1 x2 x3 x4 x5 x6)
16 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PTransform x1 x2 x3 x4)
17 -> do x1 <- get
x2 <- get
x3 <- get
return (PRunElabDecl x1 x2 x3)
_ -> error "Corrupted binary data for PDecl'"
instance Binary t => Binary (ProvideWhat' t) where
put (ProvTerm x1 x2) = do putWord8 0
put x1
put x2
put (ProvPostulate x1) = do putWord8 1
put x1
get = do y <- getWord8
case y of
0 -> do x1 <- get
x2 <- get
return (ProvTerm x1 x2)
1 -> do x1 <- get
return (ProvPostulate x1)
_ -> error "Corrupted binary data for ProvideWhat"
instance Binary Using where
put (UImplicit x1 x2) = do putWord8 0; put x1; put x2
put (UConstraint x1 x2) = do putWord8 1; put x1; put x2
get = do i <- getWord8
case i of
0 -> do x1 <- get; x2 <- get; return (UImplicit x1 x2)
1 -> do x1 <- get; x2 <- get; return (UConstraint x1 x2)
_ -> error "Corrupted binary data for Using"
instance Binary SyntaxInfo where
put (Syn x1 x2 x3 x4 _ _ x5 x6 _ _ x7 _)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
return (Syn x1 x2 x3 x4 [] id x5 x6 Nothing 0 x7 0)
instance (Binary t) => Binary (PClause' t) where
put x
= case x of
PClause x1 x2 x3 x4 x5 x6 -> do putWord8 0
put x1
put x2
put x3
put x4
put x5
put x6
PWith x1 x2 x3 x4 x5 x6 x7 -> do putWord8 1
put x1
put x2
put x3
put x4
put x5
put x6
put x7
PClauseR x1 x2 x3 x4 -> do putWord8 2
put x1
put x2
put x3
put x4
PWithR x1 x2 x3 x4 x5 -> do putWord8 3
put x1
put x2
put x3
put x4
put x5
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PClause x1 x2 x3 x4 x5 x6)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
return (PWith x1 x2 x3 x4 x5 x6 x7)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PClauseR x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PWithR x1 x2 x3 x4 x5)
_ -> error "Corrupted binary data for PClause'"
instance (Binary t) => Binary (PData' t) where
put x
= case x of
PDatadecl x1 x2 x3 x4 -> do putWord8 0
put x1
put x2
put x3
put x4
PLaterdecl x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PDatadecl x1 x2 x3 x4)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (PLaterdecl x1 x2 x3)
_ -> error "Corrupted binary data for PData'"
instance Binary PunInfo where
put x
= case x of
TypeOrTerm -> putWord8 0
IsType -> putWord8 1
IsTerm -> putWord8 2
get
= do i <- getWord8
case i of
0 -> return TypeOrTerm
1 -> return IsType
2 -> return IsTerm
_ -> error "Corrupted binary data for PunInfo"
instance Binary PTerm where
put x
= case x of
PQuote x1 -> do putWord8 0
put x1
PRef x1 x2 x3 -> do putWord8 1
put x1
put x2
put x3
PInferRef x1 x2 x3 -> do putWord8 2
put x1
put x2
put x3
PPatvar x1 x2 -> do putWord8 3
put x1
put x2
PLam x1 x2 x3 x4 x5 -> do putWord8 4
put x1
put x2
put x3
put x4
put x5
PPi x1 x2 x3 x4 x5 -> do putWord8 5
put x1
put x2
put x3
put x4
put x5
PLet x1 x2 x3 x4 x5 x6 -> do putWord8 6
put x1
put x2
put x3
put x4
put x5
put x6
PTyped x1 x2 -> do putWord8 7
put x1
put x2
PAppImpl x1 x2 -> error "PAppImpl in final term"
PApp x1 x2 x3 -> do putWord8 8
put x1
put x2
put x3
PAppBind x1 x2 x3 -> do putWord8 9
put x1
put x2
put x3
PMatchApp x1 x2 -> do putWord8 10
put x1
put x2
PCase x1 x2 x3 -> do putWord8 11
put x1
put x2
put x3
PTrue x1 x2 -> do putWord8 12
put x1
put x2
PResolveTC x1 -> do putWord8 15
put x1
PRewrite x1 x2 x3 x4 -> do putWord8 17
put x1
put x2
put x3
put x4
PPair x1 x2 x3 x4 x5 -> do putWord8 18
put x1
put x2
put x3
put x4
put x5
PDPair x1 x2 x3 x4 x5 x6 -> do putWord8 19
put x1
put x2
put x3
put x4
put x5
put x6
PAlternative x1 x2 x3 -> do putWord8 20
put x1
put x2
put x3
PHidden x1 -> do putWord8 21
put x1
PType x1 -> do putWord8 22
put x1
PGoal x1 x2 x3 x4 -> do putWord8 23
put x1
put x2
put x3
put x4
PConstant x1 x2 -> do putWord8 24
put x1
put x2
Placeholder -> putWord8 25
PDoBlock x1 -> do putWord8 26
put x1
PIdiom x1 x2 -> do putWord8 27
put x1
put x2
PReturn x1 -> do putWord8 28
put x1
PMetavar x1 x2 -> do putWord8 29
put x1
put x2
PProof x1 -> do putWord8 30
put x1
PTactics x1 -> do putWord8 31
put x1
PImpossible -> putWord8 33
PCoerced x1 -> do putWord8 34
put x1
PUnifyLog x1 -> do putWord8 35
put x1
PNoImplicits x1 -> do putWord8 36
put x1
PDisamb x1 x2 -> do putWord8 37
put x1
put x2
PUniverse x1 -> do putWord8 38
put x1
PRunElab x1 x2 x3 -> do putWord8 39
put x1
put x2
put x3
PAs x1 x2 x3 -> do putWord8 40
put x1
put x2
put x3
PElabError x1 -> do putWord8 41
put x1
PQuasiquote x1 x2 -> do putWord8 42
put x1
put x2
PUnquote x1 -> do putWord8 43
put x1
PQuoteName x1 x2 x3 -> do putWord8 44
put x1
put x2
put x3
PIfThenElse x1 x2 x3 x4 -> do putWord8 45
put x1
put x2
put x3
put x4
PConstSugar x1 x2 -> do putWord8 46
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (PQuote x1)
1 -> do x1 <- get
x2 <- get
x3 <- get
return (PRef x1 x2 x3)
2 -> do x1 <- get
x2 <- get
x3 <- get
return (PInferRef x1 x2 x3)
3 -> do x1 <- get
x2 <- get
return (PPatvar x1 x2)
4 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PLam x1 x2 x3 x4 x5)
5 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PPi x1 x2 x3 x4 x5)
6 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PLet x1 x2 x3 x4 x5 x6)
7 -> do x1 <- get
x2 <- get
return (PTyped x1 x2)
8 -> do x1 <- get
x2 <- get
x3 <- get
return (PApp x1 x2 x3)
9 -> do x1 <- get
x2 <- get
x3 <- get
return (PAppBind x1 x2 x3)
10 -> do x1 <- get
x2 <- get
return (PMatchApp x1 x2)
11 -> do x1 <- get
x2 <- get
x3 <- get
return (PCase x1 x2 x3)
12 -> do x1 <- get
x2 <- get
return (PTrue x1 x2)
15 -> do x1 <- get
return (PResolveTC x1)
17 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PRewrite x1 x2 x3 x4)
18 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PPair x1 x2 x3 x4 x5)
19 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (PDPair x1 x2 x3 x4 x5 x6)
20 -> do x1 <- get
x2 <- get
x3 <- get
return (PAlternative x1 x2 x3)
21 -> do x1 <- get
return (PHidden x1)
22 -> do x1 <- get
return (PType x1)
23 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PGoal x1 x2 x3 x4)
24 -> do x1 <- get
x2 <- get
return (PConstant x1 x2)
25 -> return Placeholder
26 -> do x1 <- get
return (PDoBlock x1)
27 -> do x1 <- get
x2 <- get
return (PIdiom x1 x2)
28 -> do x1 <- get
return (PReturn x1)
29 -> do x1 <- get
x2 <- get
return (PMetavar x1 x2)
30 -> do x1 <- get
return (PProof x1)
31 -> do x1 <- get
return (PTactics x1)
33 -> return PImpossible
34 -> do x1 <- get
return (PCoerced x1)
35 -> do x1 <- get
return (PUnifyLog x1)
36 -> do x1 <- get
return (PNoImplicits x1)
37 -> do x1 <- get
x2 <- get
return (PDisamb x1 x2)
38 -> do x1 <- get
return (PUniverse x1)
39 -> do x1 <- get
x2 <- get
x3 <- get
return (PRunElab x1 x2 x3)
40 -> do x1 <- get
x2 <- get
x3 <- get
return (PAs x1 x2 x3)
41 -> do x1 <- get
return (PElabError x1)
42 -> do x1 <- get
x2 <- get
return (PQuasiquote x1 x2)
43 -> do x1 <- get
return (PUnquote x1)
44 -> do x1 <- get
x2 <- get
x3 <- get
return (PQuoteName x1 x2 x3)
45 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PIfThenElse x1 x2 x3 x4)
46 -> do x1 <- get
x2 <- get
return (PConstSugar x1 x2)
_ -> error "Corrupted binary data for PTerm"
instance Binary PAltType where
put x
= case x of
ExactlyOne x1 -> do putWord8 0
put x1
FirstSuccess -> putWord8 1
TryImplicit -> putWord8 2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (ExactlyOne x1)
1 -> return FirstSuccess
2 -> return TryImplicit
_ -> error "Corrupted binary data for PAltType"
instance (Binary t) => Binary (PTactic' t) where
put x
= case x of
Intro x1 -> do putWord8 0
put x1
Focus x1 -> do putWord8 1
put x1
Refine x1 x2 -> do putWord8 2
put x1
put x2
Rewrite x1 -> do putWord8 3
put x1
LetTac x1 x2 -> do putWord8 4
put x1
put x2
Exact x1 -> do putWord8 5
put x1
Compute -> putWord8 6
Trivial -> putWord8 7
Solve -> putWord8 8
Attack -> putWord8 9
ProofState -> putWord8 10
ProofTerm -> putWord8 11
Undo -> putWord8 12
Try x1 x2 -> do putWord8 13
put x1
put x2
TSeq x1 x2 -> do putWord8 14
put x1
put x2
Qed -> putWord8 15
ApplyTactic x1 -> do putWord8 16
put x1
Reflect x1 -> do putWord8 17
put x1
Fill x1 -> do putWord8 18
put x1
Induction x1 -> do putWord8 19
put x1
ByReflection x1 -> do putWord8 20
put x1
ProofSearch x1 x2 x3 x4 x5 x6 -> do putWord8 21
put x1
put x2
put x3
put x4
put x5
put x6
DoUnify -> putWord8 22
CaseTac x1 -> do putWord8 23
put x1
SourceFC -> putWord8 24
Intros -> putWord8 25
Equiv x1 -> do putWord8 26
put x1
Claim x1 x2 -> do putWord8 27
put x1
put x2
Unfocus -> putWord8 28
MatchRefine x1 -> do putWord8 29
put x1
LetTacTy x1 x2 x3 -> do putWord8 30
put x1
put x2
put x3
TCInstance -> putWord8 31
GoalType x1 x2 -> do putWord8 32
put x1
put x2
TCheck x1 -> do putWord8 33
put x1
TEval x1 -> do putWord8 34
put x1
TDocStr x1 -> do putWord8 35
put x1
TSearch x1 -> do putWord8 36
put x1
Skip -> putWord8 37
TFail x1 -> do putWord8 38
put x1
Abandon -> putWord8 39
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Intro x1)
1 -> do x1 <- get
return (Focus x1)
2 -> do x1 <- get
x2 <- get
return (Refine x1 x2)
3 -> do x1 <- get
return (Rewrite x1)
4 -> do x1 <- get
x2 <- get
return (LetTac x1 x2)
5 -> do x1 <- get
return (Exact x1)
6 -> return Compute
7 -> return Trivial
8 -> return Solve
9 -> return Attack
10 -> return ProofState
11 -> return ProofTerm
12 -> return Undo
13 -> do x1 <- get
x2 <- get
return (Try x1 x2)
14 -> do x1 <- get
x2 <- get
return (TSeq x1 x2)
15 -> return Qed
16 -> do x1 <- get
return (ApplyTactic x1)
17 -> do x1 <- get
return (Reflect x1)
18 -> do x1 <- get
return (Fill x1)
19 -> do x1 <- get
return (Induction x1)
20 -> do x1 <- get
return (ByReflection x1)
21 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (ProofSearch x1 x2 x3 x4 x5 x6)
22 -> return DoUnify
23 -> do x1 <- get
return (CaseTac x1)
24 -> return SourceFC
25 -> return Intros
26 -> do x1 <- get
return (Equiv x1)
27 -> do x1 <- get
x2 <- get
return (Claim x1 x2)
28 -> return Unfocus
29 -> do x1 <- get
return (MatchRefine x1)
30 -> do x1 <- get
x2 <- get
x3 <- get
return (LetTacTy x1 x2 x3)
31 -> return TCInstance
32 -> do x1 <- get
x2 <- get
return (GoalType x1 x2)
33 -> do x1 <- get
return (TCheck x1)
34 -> do x1 <- get
return (TEval x1)
35 -> do x1 <- get
return (TDocStr x1)
36 -> do x1 <- get
return (TSearch x1)
37 -> return Skip
38 -> do x1 <- get
return (TFail x1)
39 -> return Abandon
_ -> error "Corrupted binary data for PTactic'"
instance (Binary t) => Binary (PDo' t) where
put x
= case x of
DoExp x1 x2 -> do putWord8 0
put x1
put x2
DoBind x1 x2 x3 x4 -> do putWord8 1
put x1
put x2
put x3
put x4
DoBindP x1 x2 x3 x4 -> do putWord8 2
put x1
put x2
put x3
put x4
DoLet x1 x2 x3 x4 x5 -> do putWord8 3
put x1
put x2
put x3
put x4
put x5
DoLetP x1 x2 x3 -> do putWord8 4
put x1
put x2
put x3
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
return (DoExp x1 x2)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (DoBind x1 x2 x3 x4)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (DoBindP x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (DoLet x1 x2 x3 x4 x5)
4 -> do x1 <- get
x2 <- get
x3 <- get
return (DoLetP x1 x2 x3)
_ -> error "Corrupted binary data for PDo'"
instance (Binary t) => Binary (PArg' t) where
put x
= case x of
PImp x1 x2 x3 x4 x5 ->
do putWord8 0
put x1
put x2
put x3
put x4
put x5
PExp x1 x2 x3 x4 ->
do putWord8 1
put x1
put x2
put x3
put x4
PConstraint x1 x2 x3 x4 ->
do putWord8 2
put x1
put x2
put x3
put x4
PTacImplicit x1 x2 x3 x4 x5 ->
do putWord8 3
put x1
put x2
put x3
put x4
put x5
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PImp x1 x2 x3 x4 x5)
1 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PExp x1 x2 x3 x4)
2 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
return (PConstraint x1 x2 x3 x4)
3 -> do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (PTacImplicit x1 x2 x3 x4 x5)
_ -> error "Corrupted binary data for PArg'"
instance Binary ClassInfo where
put (CI x1 x2 x3 x4 x5 _ x6)
= do put x1
put x2
put x3
put x4
put x5
put x6
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
return (CI x1 x2 x3 x4 x5 [] x6)
instance Binary RecordInfo where
put (RI x1 x2 x3)
= do put x1
put x2
put x3
get
= do x1 <- get
x2 <- get
x3 <- get
return (RI x1 x2 x3)
instance Binary OptInfo where
put (Optimise x1 x2)
= do put x1
put x2
get
= do x1 <- get
x2 <- get
return (Optimise x1 x2)
instance Binary FnInfo where
put (FnInfo x1)
= put x1
get
= do x1 <- get
return (FnInfo x1)
instance Binary TypeInfo where
put (TI x1 x2 x3 x4 x5) = do put x1
put x2
put x3
put x4
put x5
get = do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
return (TI x1 x2 x3 x4 x5)
instance Binary SynContext where
put x
= case x of
PatternSyntax -> putWord8 0
TermSyntax -> putWord8 1
AnySyntax -> putWord8 2
get
= do i <- getWord8
case i of
0 -> return PatternSyntax
1 -> return TermSyntax
2 -> return AnySyntax
_ -> error "Corrupted binary data for SynContext"
instance Binary Syntax where
put (Rule x1 x2 x3)
= do putWord8 0
put x1
put x2
put x3
put (DeclRule x1 x2)
= do putWord8 1
put x1
put x2
get
= do i <- getWord8
case i of
0 -> do x1 <- get
x2 <- get
x3 <- get
return (Rule x1 x2 x3)
1 -> do x1 <- get
x2 <- get
return (DeclRule x1 x2)
_ -> error "Corrupted binary data for Syntax"
instance (Binary t) => Binary (DSL' t) where
put (DSL x1 x2 x3 x4 x5 x6 x7 x8 x9 x10)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
put x9
put x10
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
x9 <- get
x10 <- get
return (DSL x1 x2 x3 x4 x5 x6 x7 x8 x9 x10)
instance Binary SSymbol where
put x
= case x of
Keyword x1 -> do putWord8 0
put x1
Symbol x1 -> do putWord8 1
put x1
Expr x1 -> do putWord8 2
put x1
SimpleExpr x1 -> do putWord8 3
put x1
Binding x1 -> do putWord8 4
put x1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Keyword x1)
1 -> do x1 <- get
return (Symbol x1)
2 -> do x1 <- get
return (Expr x1)
3 -> do x1 <- get
return (SimpleExpr x1)
4 -> do x1 <- get
return (Binding x1)
_ -> error "Corrupted binary data for SSymbol"
instance Binary Codegen where
put x
= case x of
Via str -> do putWord8 0
put str
Bytecode -> putWord8 1
get
= do i <- getWord8
case i of
0 -> do x1 <- get
return (Via x1)
1 -> return Bytecode
_ -> error "Corrupted binary data for Codegen"
|
aaronc/Idris-dev
|
src/Idris/IBC.hs
|
bsd-3-clause
| 95,229
| 0
| 19
| 54,321
| 26,500
| 12,261
| 14,239
| 2,225
| 17
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Ros.Geometry_msgs.TransformStamped where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Ros.Geometry_msgs.Transform as Transform
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data TransformStamped = TransformStamped { _header :: Header.Header
, _child_frame_id :: P.String
, _transform :: Transform.Transform
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''TransformStamped)
instance RosBinary TransformStamped where
put obj' = put (_header obj') *> put (_child_frame_id obj') *> put (_transform obj')
get = TransformStamped <$> get <*> get <*> get
putMsg = putStampedMsg
instance HasHeader TransformStamped where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo TransformStamped where
sourceMD5 _ = "b5764a33bfeb3588febc2682852579b0"
msgTypeName _ = "geometry_msgs/TransformStamped"
instance D.Default TransformStamped
|
acowley/roshask
|
msgs/Geometry_msgs/Ros/Geometry_msgs/TransformStamped.hs
|
bsd-3-clause
| 1,612
| 1
| 10
| 322
| 389
| 228
| 161
| 36
| 0
|
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Network/Wai/Handler/Warp/FdCache.hs" #-}
{-# LANGUAGE BangPatterns, CPP #-}
-- | File descriptor cache to avoid locks in kernel.
module Network.Wai.Handler.Warp.FdCache (
withFdCache
, Fd
, Refresh
, openFile
, closeFile
, setFileCloseOnExec
) where
import Control.Exception (bracket)
import Network.Wai.Handler.Warp.IORef
import Network.Wai.Handler.Warp.MultiMap
import Control.Reaper
import System.Posix.IO (openFd, OpenFileFlags(..), defaultFileFlags, OpenMode(ReadOnly), closeFd, FdOption(CloseOnExec), setFdOption)
import System.Posix.Types (Fd)
----------------------------------------------------------------
type Hash = Int
-- | An action to activate a Fd cache entry.
type Refresh = IO ()
getFdNothing :: Hash -> FilePath -> IO (Maybe Fd, Refresh)
getFdNothing _ _ = return (Nothing, return ())
----------------------------------------------------------------
-- | Creating 'MutableFdCache' and executing the action in the second
-- argument. The first argument is a cache duration in second.
withFdCache :: Int -> ((Hash -> FilePath -> IO (Maybe Fd, Refresh)) -> IO a) -> IO a
withFdCache 0 action = action getFdNothing
withFdCache duration action = bracket (initialize duration)
terminate
(\mfc -> action (getFd mfc))
----------------------------------------------------------------
data Status = Active | Inactive
newtype MutableStatus = MutableStatus (IORef Status)
status :: MutableStatus -> IO Status
status (MutableStatus ref) = readIORef ref
newActiveStatus :: IO MutableStatus
newActiveStatus = MutableStatus <$> newIORef Active
refresh :: MutableStatus -> Refresh
refresh (MutableStatus ref) = writeIORef ref Active
inactive :: MutableStatus -> IO ()
inactive (MutableStatus ref) = writeIORef ref Inactive
----------------------------------------------------------------
data FdEntry = FdEntry !FilePath !Fd !MutableStatus
openFile :: FilePath -> IO Fd
openFile path = do
fd <- openFd path ReadOnly Nothing defaultFileFlags{nonBlock=False}
setFileCloseOnExec fd
return fd
closeFile :: Fd -> IO ()
closeFile = closeFd
newFdEntry :: FilePath -> IO FdEntry
newFdEntry path = FdEntry path <$> openFile path <*> newActiveStatus
setFileCloseOnExec :: Fd -> IO ()
setFileCloseOnExec fd = setFdOption fd CloseOnExec True
----------------------------------------------------------------
type FdCache = MMap FdEntry
-- | Mutable Fd cacher.
newtype MutableFdCache = MutableFdCache (Reaper FdCache (Hash, FdEntry))
fdCache :: MutableFdCache -> IO FdCache
fdCache (MutableFdCache reaper) = reaperRead reaper
look :: MutableFdCache -> FilePath -> Hash -> IO (Maybe FdEntry)
look mfc path key = searchWith key check <$> fdCache mfc
where
check (FdEntry path' _ _) = path == path'
----------------------------------------------------------------
-- The first argument is a cache duration in second.
initialize :: Int -> IO MutableFdCache
initialize duration = MutableFdCache <$> mkReaper settings
where
settings = defaultReaperSettings {
reaperAction = clean
, reaperDelay = duration
, reaperCons = uncurry insert
, reaperNull = isEmpty
, reaperEmpty = empty
}
clean :: FdCache -> IO (FdCache -> FdCache)
clean old = do
new <- pruneWith old prune
return $ merge new
where
prune (FdEntry _ fd mst) = status mst >>= act
where
act Active = inactive mst >> return True
act Inactive = closeFd fd >> return False
----------------------------------------------------------------
terminate :: MutableFdCache -> IO ()
terminate (MutableFdCache reaper) = do
!t <- reaperStop reaper
mapM_ closeIt $ toList t
where
closeIt (FdEntry _ fd _) = closeFd fd
----------------------------------------------------------------
-- | Getting 'Fd' and 'Refresh' from the mutable Fd cacher.
getFd :: MutableFdCache -> Hash -> FilePath -> IO (Maybe Fd, Refresh)
getFd mfc@(MutableFdCache reaper) h path = look mfc path h >>= get
where
get Nothing = do
ent@(FdEntry _ fd mst) <- newFdEntry path
reaperAdd reaper (h, ent)
return (Just fd, refresh mst)
get (Just (FdEntry _ fd mst)) = do
refresh mst
return (Just fd, refresh mst)
|
phischu/fragnix
|
tests/packages/scotty/Network.Wai.Handler.Warp.FdCache.hs
|
bsd-3-clause
| 4,462
| 0
| 14
| 969
| 1,152
| 599
| 553
| 89
| 2
|
main :: IO ()
main = return ()
|
metasepi/chibios-arafura
|
demos/ARMCM4-STM32F407-LWIP_hs/hs_src/Main.hs
|
gpl-3.0
| 32
| 0
| 6
| 9
| 21
| 10
| 11
| 2
| 1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ja-JP">
<title>Groovy Support</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/groovy/src/main/javahelp/org/zaproxy/zap/extension/groovy/resources/help_ja_JP/helpset_ja_JP.hs
|
apache-2.0
| 959
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fr-FR">
<title>Script Console</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Rechercher</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
kingthorin/zap-extensions
|
addOns/scripts/src/main/javahelp/org/zaproxy/zap/extension/scripts/resources/help_fr_FR/helpset_fr_FR.hs
|
apache-2.0
| 964
| 77
| 66
| 156
| 407
| 206
| 201
| -1
| -1
|
module C3 (Tree(..), SameOrNot(..)) where
data Tree a = Leaf a | Branch (Tree a) (Tree a)
sumTree :: Num a => (Tree a) -> a
sumTree (Leaf x) = x
sumTree (Branch left right)
= (sumTree left) + (sumTree right)
class SameOrNot a
where
isSame :: a -> a -> Bool
isNotSame :: a -> a -> Bool
instance SameOrNot Int
where
isSame a b = a == b
isNotSame a b = a /= b
|
mpickering/HaRe
|
old/testing/moveDefBtwMods/C3_AstOut.hs
|
bsd-3-clause
| 416
| 0
| 8
| 136
| 188
| 100
| 88
| 12
| 1
|
{-# Language TypeFamilyDependencies #-}
{-# Language RankNTypes #-}
{-# Language KindSignatures #-}
{-# Language DataKinds #-}
{-# Language PolyKinds #-}
{-# Language GADTs #-}
import Data.Kind (Type)
data Code = I
type family
Interp (a :: Code) = (res :: Type) | res -> a where
Interp I = Bool
data T :: forall a. Interp a -> Type where
MkNat :: T False
instance Show (T a) where show _ = "MkNat"
main = do
print MkNat
|
sdiehl/ghc
|
testsuite/tests/typecheck/should_compile/T13643.hs
|
bsd-3-clause
| 497
| 0
| 7
| 155
| 119
| 70
| 49
| -1
| -1
|
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE RecordWildCards #-}
module Distribution.Client.Dependency.Modular.Dependency (
-- * Variables
Var(..)
, simplifyVar
, showVar
, varPI
-- * Conflict sets
, ConflictSet
, showCS
-- * Constrained instances
, CI(..)
, showCI
, merge
-- * Flagged dependencies
, FlaggedDeps
, FlaggedDep(..)
, TrueFlaggedDeps
, FalseFlaggedDeps
, Dep(..)
, showDep
, flattenFlaggedDeps
, QualifyOptions(..)
, qualifyDeps
-- ** Setting/forgetting components
, forgetCompOpenGoal
, setCompFlaggedDeps
-- ** Selecting subsets
, nonSetupDeps
, setupDeps
, select
-- * Reverse dependency map
, RevDepMap
-- * Goals
, Goal(..)
, GoalReason(..)
, GoalReasonChain
, QGoalReasonChain
, ResetGoal(..)
, toConflictSet
, goalReasonToVars
, goalReasonChainToVars
, goalReasonChainsToVars
-- * Open goals
, OpenGoal(..)
, close
-- * Version ranges pairsed with origins (goals)
, VROrigin
, collapse
) where
import Prelude hiding (pi)
import Data.List (intercalate)
import Data.Map (Map)
import Data.Maybe (mapMaybe)
import Data.Set (Set)
import qualified Data.List as L
import qualified Data.Set as S
import Distribution.Client.Dependency.Modular.Flag
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.Version
import Distribution.Client.ComponentDeps (Component(..))
{-------------------------------------------------------------------------------
Variables
-------------------------------------------------------------------------------}
-- | The type of variables that play a role in the solver.
-- Note that the tree currently does not use this type directly,
-- and rather has separate tree nodes for the different types of
-- variables. This fits better with the fact that in most cases,
-- these have to be treated differently.
--
-- TODO: This isn't the ideal location to declare the type,
-- but we need them for constrained instances.
data Var qpn = P qpn | F (FN qpn) | S (SN qpn)
deriving (Eq, Ord, Show, Functor)
-- | For computing conflict sets, we map flag choice vars to a
-- single flag choice. This means that all flag choices are treated
-- as interdependent. So if one flag of a package ends up in a
-- conflict set, then all flags are being treated as being part of
-- the conflict set.
simplifyVar :: Var qpn -> Var qpn
simplifyVar (P qpn) = P qpn
simplifyVar (F (FN pi _)) = F (FN pi (mkFlag "flag"))
simplifyVar (S qsn) = S qsn
showVar :: Var QPN -> String
showVar (P qpn) = showQPN qpn
showVar (F qfn) = showQFN qfn
showVar (S qsn) = showQSN qsn
-- | Extract the package instance from a Var
varPI :: Var QPN -> (QPN, Maybe I)
varPI (P qpn) = (qpn, Nothing)
varPI (F (FN (PI qpn i) _)) = (qpn, Just i)
varPI (S (SN (PI qpn i) _)) = (qpn, Just i)
{-------------------------------------------------------------------------------
Conflict sets
-------------------------------------------------------------------------------}
type ConflictSet qpn = Set (Var qpn)
showCS :: ConflictSet QPN -> String
showCS = intercalate ", " . L.map showVar . S.toList
{-------------------------------------------------------------------------------
Constrained instances
-------------------------------------------------------------------------------}
-- | Constrained instance. If the choice has already been made, this is
-- a fixed instance, and we record the package name for which the choice
-- is for convenience. Otherwise, it is a list of version ranges paired with
-- the goals / variables that introduced them.
data CI qpn = Fixed I (Goal qpn) | Constrained [VROrigin qpn]
deriving (Eq, Show, Functor)
showCI :: CI QPN -> String
showCI (Fixed i _) = "==" ++ showI i
showCI (Constrained vr) = showVR (collapse vr)
-- | Merge constrained instances. We currently adopt a lazy strategy for
-- merging, i.e., we only perform actual checking if one of the two choices
-- is fixed. If the merge fails, we return a conflict set indicating the
-- variables responsible for the failure, as well as the two conflicting
-- fragments.
--
-- Note that while there may be more than one conflicting pair of version
-- ranges, we only return the first we find.
--
-- TODO: Different pairs might have different conflict sets. We're
-- obviously interested to return a conflict that has a "better" conflict
-- set in the sense the it contains variables that allow us to backjump
-- further. We might apply some heuristics here, such as to change the
-- order in which we check the constraints.
merge :: Ord qpn => CI qpn -> CI qpn -> Either (ConflictSet qpn, (CI qpn, CI qpn)) (CI qpn)
merge c@(Fixed i g1) d@(Fixed j g2)
| i == j = Right c
| otherwise = Left (S.union (toConflictSet g1) (toConflictSet g2), (c, d))
merge c@(Fixed (I v _) g1) (Constrained rs) = go rs -- I tried "reverse rs" here, but it seems to slow things down ...
where
go [] = Right c
go (d@(vr, g2) : vrs)
| checkVR vr v = go vrs
| otherwise = Left (S.union (toConflictSet g1) (toConflictSet g2), (c, Constrained [d]))
merge c@(Constrained _) d@(Fixed _ _) = merge d c
merge (Constrained rs) (Constrained ss) = Right (Constrained (rs ++ ss))
{-------------------------------------------------------------------------------
Flagged dependencies
-------------------------------------------------------------------------------}
-- | Flagged dependencies
--
-- 'FlaggedDeps' is the modular solver's view of a packages dependencies:
-- rather than having the dependencies indexed by component, each dependency
-- defines what component it is in.
--
-- However, top-level goals are also modelled as dependencies, but of course
-- these don't actually belong in any component of any package. Therefore, we
-- parameterize 'FlaggedDeps' and derived datatypes with a type argument that
-- specifies whether or not we have a component: we only ever instantiate this
-- type argument with @()@ for top-level goals, or 'Component' for everything
-- else (we could express this as a kind at the type-level, but that would
-- require a very recent GHC).
--
-- Note however, crucially, that independent of the type parameters, the list
-- of dependencies underneath a flag choice or stanza choices _always_ uses
-- Component as the type argument. This is important: when we pick a value for
-- a flag, we _must_ know what component the new dependencies belong to, or
-- else we don't be able to construct fine-grained reverse dependencies.
type FlaggedDeps comp qpn = [FlaggedDep comp qpn]
-- | Flagged dependencies can either be plain dependency constraints,
-- or flag-dependent dependency trees.
data FlaggedDep comp qpn =
Flagged (FN qpn) FInfo (TrueFlaggedDeps qpn) (FalseFlaggedDeps qpn)
| Stanza (SN qpn) (TrueFlaggedDeps qpn)
| Simple (Dep qpn) comp
deriving (Eq, Show, Functor)
-- | Conversatively flatten out flagged dependencies
--
-- NOTE: We do not filter out duplicates.
flattenFlaggedDeps :: FlaggedDeps Component qpn -> [(Dep qpn, Component)]
flattenFlaggedDeps = concatMap aux
where
aux :: FlaggedDep Component qpn -> [(Dep qpn, Component)]
aux (Flagged _ _ t f) = flattenFlaggedDeps t ++ flattenFlaggedDeps f
aux (Stanza _ t) = flattenFlaggedDeps t
aux (Simple d c) = [(d, c)]
type TrueFlaggedDeps qpn = FlaggedDeps Component qpn
type FalseFlaggedDeps qpn = FlaggedDeps Component qpn
-- | A dependency (constraint) associates a package name with a
-- constrained instance.
data Dep qpn = Dep qpn (CI qpn)
deriving (Eq, Show, Functor)
showDep :: Dep QPN -> String
showDep (Dep qpn (Fixed i (Goal v _)) ) =
(if P qpn /= v then showVar v ++ " => " else "") ++
showQPN qpn ++ "==" ++ showI i
showDep (Dep qpn (Constrained [(vr, Goal v _)])) =
showVar v ++ " => " ++ showQPN qpn ++ showVR vr
showDep (Dep qpn ci ) =
showQPN qpn ++ showCI ci
-- | Options for goal qualification (used in 'qualifyDeps')
--
-- See also 'defaultQualifyOptions'
data QualifyOptions = QO {
-- | Do we have a version of base relying on another version of base?
qoBaseShim :: Bool
-- Should dependencies of the setup script be treated as independent?
, qoSetupIndependent :: Bool
}
deriving Show
-- | Apply built-in rules for package qualifiers
--
-- NOTE: It's the _dependencies_ of a package that may or may not be independent
-- from the package itself. Package flag choices must of course be consistent.
qualifyDeps :: QualifyOptions -> QPN -> FlaggedDeps Component PN -> FlaggedDeps Component QPN
qualifyDeps QO{..} (Q pp' pn) = go
where
-- The Base qualifier does not get inherited
pp :: PP
pp = (if qoBaseShim then stripBase else id) pp'
go :: FlaggedDeps Component PN -> FlaggedDeps Component QPN
go = map go1
go1 :: FlaggedDep Component PN -> FlaggedDep Component QPN
go1 (Flagged fn nfo t f) = Flagged (fmap (Q pp) fn) nfo (go t) (go f)
go1 (Stanza sn t) = Stanza (fmap (Q pp) sn) (go t)
go1 (Simple dep comp) = Simple (goD dep comp) comp
goD :: Dep PN -> Component -> Dep QPN
goD dep comp
| qBase dep = fmap (Q (Base pn pp)) dep
| qSetup comp = fmap (Q (Setup pn pp)) dep
| otherwise = fmap (Q pp ) dep
-- Should we qualify this goal with the 'Base' package path?
qBase :: Dep PN -> Bool
qBase (Dep dep _ci) = qoBaseShim && unPackageName dep == "base"
-- Should we qualify this goal with the 'Setup' packaeg path?
qSetup :: Component -> Bool
qSetup comp = qoSetupIndependent && comp == ComponentSetup
{-------------------------------------------------------------------------------
Setting/forgetting the Component
-------------------------------------------------------------------------------}
forgetCompOpenGoal :: OpenGoal Component -> OpenGoal ()
forgetCompOpenGoal = mapCompOpenGoal $ const ()
setCompFlaggedDeps :: Component -> FlaggedDeps () qpn -> FlaggedDeps Component qpn
setCompFlaggedDeps = mapCompFlaggedDeps . const
{-------------------------------------------------------------------------------
Auxiliary: Mapping over the Component goal
We don't export these, because the only type instantiations for 'a' and 'b'
here should be () or Component. (We could express this at the type level
if we relied on newer versions of GHC.)
-------------------------------------------------------------------------------}
mapCompOpenGoal :: (a -> b) -> OpenGoal a -> OpenGoal b
mapCompOpenGoal g (OpenGoal d gr) = OpenGoal (mapCompFlaggedDep g d) gr
mapCompFlaggedDeps :: (a -> b) -> FlaggedDeps a qpn -> FlaggedDeps b qpn
mapCompFlaggedDeps = L.map . mapCompFlaggedDep
mapCompFlaggedDep :: (a -> b) -> FlaggedDep a qpn -> FlaggedDep b qpn
mapCompFlaggedDep _ (Flagged fn nfo t f) = Flagged fn nfo t f
mapCompFlaggedDep _ (Stanza sn t ) = Stanza sn t
mapCompFlaggedDep g (Simple pn a ) = Simple pn (g a)
{-------------------------------------------------------------------------------
Selecting FlaggedDeps subsets
(Correspond to the functions with the same names in ComponentDeps).
-------------------------------------------------------------------------------}
nonSetupDeps :: FlaggedDeps Component a -> FlaggedDeps Component a
nonSetupDeps = select (/= ComponentSetup)
setupDeps :: FlaggedDeps Component a -> FlaggedDeps Component a
setupDeps = select (== ComponentSetup)
-- | Select the dependencies of a given component
--
-- The modular solver kind of flattens the dependency trees from the .cabal
-- file, putting the component of each dependency at the leaves, rather than
-- indexing per component. For instance, package C might have flagged deps that
-- look something like
--
-- > Flagged <flagName> ..
-- > [Simple <package A> ComponentLib]
-- > [Simple <package B> ComponentLib]
--
-- indicating that the library component of C relies on either A or B, depending
-- on the flag. This makes it somewhat awkward however to extract certain kinds
-- of dependencies. In particular, extracting, say, the setup dependencies from
-- the above set of dependencies could either return the empty list, or else
--
-- > Flagged <flagName> ..
-- > []
-- > []
--
-- Both answers are reasonable; we opt to return the empty list in this
-- case, as it results in simpler search trees in the builder.
--
-- (Note that the builder already introduces separate goals for all flags of a
-- package, independently of whether or not they are used in any component, so
-- we don't have to worry about preserving flags here.)
select :: (Component -> Bool) -> FlaggedDeps Component a -> FlaggedDeps Component a
select p = mapMaybe go
where
go :: FlaggedDep Component a -> Maybe (FlaggedDep Component a)
go (Flagged fn nfo t f) = let t' = mapMaybe go t
f' = mapMaybe go f
in if null t' && null f'
then Nothing
else Just $ Flagged fn nfo t' f'
go (Stanza sn t ) = let t' = mapMaybe go t
in if null t'
then Nothing
else Just $ Stanza sn t'
go (Simple pn comp ) = if p comp then Just $ Simple pn comp
else Nothing
{-------------------------------------------------------------------------------
Reverse dependency map
-------------------------------------------------------------------------------}
-- | A map containing reverse dependencies between qualified
-- package names.
type RevDepMap = Map QPN [(Component, QPN)]
{-------------------------------------------------------------------------------
Goals
-------------------------------------------------------------------------------}
-- | Goals are solver variables paired with information about
-- why they have been introduced.
data Goal qpn = Goal (Var qpn) (GoalReasonChain qpn)
deriving (Eq, Show, Functor)
-- | Reasons why a goal can be added to a goal set.
data GoalReason qpn =
UserGoal
| PDependency (PI qpn)
| FDependency (FN qpn) Bool
| SDependency (SN qpn)
deriving (Eq, Show, Functor)
-- | The first element is the immediate reason. The rest are the reasons
-- for the reasons ...
type GoalReasonChain qpn = [GoalReason qpn]
type QGoalReasonChain = GoalReasonChain QPN
class ResetGoal f where
resetGoal :: Goal qpn -> f qpn -> f qpn
instance ResetGoal CI where
resetGoal g (Fixed i _) = Fixed i g
resetGoal g (Constrained vrs) = Constrained (L.map (\ (x, y) -> (x, resetGoal g y)) vrs)
instance ResetGoal Dep where
resetGoal g (Dep qpn ci) = Dep qpn (resetGoal g ci)
instance ResetGoal Goal where
resetGoal = const
-- | Compute a conflic set from a goal. The conflict set contains the
-- closure of goal reasons as well as the variable of the goal itself.
toConflictSet :: Ord qpn => Goal qpn -> ConflictSet qpn
toConflictSet (Goal g grs) = S.insert (simplifyVar g) (goalReasonChainToVars grs)
goalReasonToVars :: GoalReason qpn -> ConflictSet qpn
goalReasonToVars UserGoal = S.empty
goalReasonToVars (PDependency (PI qpn _)) = S.singleton (P qpn)
goalReasonToVars (FDependency qfn _) = S.singleton (simplifyVar (F qfn))
goalReasonToVars (SDependency qsn) = S.singleton (S qsn)
goalReasonChainToVars :: Ord qpn => GoalReasonChain qpn -> ConflictSet qpn
goalReasonChainToVars = S.unions . L.map goalReasonToVars
goalReasonChainsToVars :: Ord qpn => [GoalReasonChain qpn] -> ConflictSet qpn
goalReasonChainsToVars = S.unions . L.map goalReasonChainToVars
{-------------------------------------------------------------------------------
Open goals
-------------------------------------------------------------------------------}
-- | For open goals as they occur during the build phase, we need to store
-- additional information about flags.
data OpenGoal comp = OpenGoal (FlaggedDep comp QPN) QGoalReasonChain
deriving (Eq, Show)
-- | Closes a goal, i.e., removes all the extraneous information that we
-- need only during the build phase.
close :: OpenGoal comp -> Goal QPN
close (OpenGoal (Simple (Dep qpn _) _) gr) = Goal (P qpn) gr
close (OpenGoal (Flagged qfn _ _ _ ) gr) = Goal (F qfn) gr
close (OpenGoal (Stanza qsn _) gr) = Goal (S qsn) gr
{-------------------------------------------------------------------------------
Version ranges paired with origins
-------------------------------------------------------------------------------}
type VROrigin qpn = (VR, Goal qpn)
-- | Helper function to collapse a list of version ranges with origins into
-- a single, simplified, version range.
collapse :: [VROrigin qpn] -> VR
collapse = simplifyVR . L.foldr (.&&.) anyVR . L.map fst
|
plow-technologies/cabal
|
cabal-install/Distribution/Client/Dependency/Modular/Dependency.hs
|
bsd-3-clause
| 16,983
| 0
| 12
| 3,494
| 3,486
| 1,868
| 1,618
| 201
| 6
|
{-# LANGUAGE UnboxedTuples #-}
-- See Note [Float coercions (unlifted)] in Simplify
-- This one gave a CoreLint error when compiled optimised
--
-- See also Trac #1718, of which this is a simplified version
module ShouldCompile where
bar :: Bool -> Int
bar x = case (case x of { True -> (# 2,3 #); False -> error "urk" }) of
(# p,q #) -> p+q
|
olsner/ghc
|
testsuite/tests/simplCore/should_compile/simpl018.hs
|
bsd-3-clause
| 361
| 0
| 10
| 86
| 75
| 44
| 31
| 5
| 2
|
module F where
import System.Environment
foreign export ccall f :: IO ()
f = do
getProgName >>= print
getArgs >>= print
|
urbanslug/ghc
|
testsuite/tests/rts/T6006.hs
|
bsd-3-clause
| 125
| 0
| 7
| 26
| 43
| 23
| 20
| 6
| 1
|
-- Nada Victor, Fatma Ziwar, Fred Morcos
data LogicExpr = Prop Char | Neg LogicExpr | And LogicExpr LogicExpr | Or LogicExpr LogicExpr | Impl LogicExpr LogicExpr deriving Show
evaluate :: LogicExpr->[(Char,Bool)]->Bool
evaluate (Prop p) []=error "Element Not in List"
evaluate (Prop p) ((x,y):xs) | p==x =y
| otherwise =evaluate (Prop p) xs
evaluate (Neg l) []=error "Empty List"
evaluate (Neg l) ((x,y):xs) | evaluate (l) ((x,y):xs)==True =False
| otherwise =True
evaluate (And l n) []=error "Empty List"
evaluate (And l n) ((x,y):xs) | evaluate (l) ((x,y):xs)==True && evaluate (n) ((x,y):xs)==True =True
| otherwise =False
evaluate (Or l n) []=error "Empty List"
evaluate (Or l n) ((x,y):xs) | evaluate (l) ((x,y):xs)==False && evaluate (n) ((x,y):xs)==False =False
| otherwise =True
evaluate (Impl l n) []=error "Empty List"
evaluate (Impl l n) ((x,y):xs) | evaluate (l) ((x,y):xs)==False && evaluate (n) ((x,y):xs)==False =True
| evaluate (l) ((x,y):xs)==True && evaluate (n) ((x,y):xs)==True =True
| evaluate (l) ((x,y):xs)==False && evaluate (n) ((x,y):xs)==True =True
| otherwise =False
clausalNF :: LogicExpr -> LogicExpr
causalNF e = simplify e
--clausalNF e = (simplifyDist (simplifyDeMorg (simplifyDeMor (simplifyNeg (simplifyImp e)))))
--clausalNF e | (checkSimplify e)==True = clausalNF (simplify e)
-- | otherwise =e
simplifyImp :: LogicExpr -> LogicExpr
simplifyImp (Impl l n) =(Or (Neg (simplifyImp l)) (simplifyImp n))
simplifyImp (Prop p) =(Prop p)
simplifyImp (Neg l) =(Neg (simplifyImp l))
simplifyImp (And l n) =(And (simplifyImp l) (simplifyImp n))
simplifyImp (Or l n) =(Or (simplifyImp l) (simplifyImp n))
simplifyNeg :: LogicExpr -> LogicExpr
simplifyNeg (Neg (Neg l)) =(simplifyNeg l)
simplifyNeg (Prop p) =(Prop p)
simplifyNeg (Neg l) =(Neg (simplifyNeg l))
simplifyNeg (And l n) =(And (simplifyNeg l) (simplifyNeg n))
simplifyNeg (Or l n) =(Or (simplifyNeg l) (simplifyNeg n))
simplifyNeg (Impl l n) =(Impl (simplifyNeg l) (simplifyNeg n))
simplifyDeMor :: LogicExpr -> LogicExpr
simplifyDeMor (Neg (Or l n)) =(And (Neg (simplifyDeMor l)) (Neg (simplifyDeMor n)))
simplifyDeMor (Prop p) =(Prop p)
simplifyDeMor (Neg l) =(Neg (simplifyDeMor l))
simplifyDeMor (And l n) =(And (simplifyDeMor l) (simplifyDeMor n))
simplifyDeMor (Or l n) =(Or (simplifyDeMor l) (simplifyDeMor n))
simplifyDeMor (Impl l n) =(Impl (simplifyDeMor l) (simplifyDeMor n))
simplifyDeMorg :: LogicExpr -> LogicExpr
simplifyDeMorg (Neg (And l n)) =(Or (Neg (simplifyDeMorg l)) (Neg (simplifyDeMorg n)))
simplifyDeMorg (Prop p) =(Prop p)
simplifyDeMorg (Neg l) =(Neg (simplifyDeMorg l))
simplifyDeMorg (And l n) =(And (simplifyDeMorg l) (simplifyDeMorg n))
simplifyDeMorg (Or l n) =(Or (simplifyDeMorg l) (simplifyDeMorg n))
simplifyDeMorg (Impl l n) =(Impl (simplifyDeMorg l) (simplifyDeMorg n))
simplifyDist :: LogicExpr -> LogicExpr
simplifyDist (Or (And p c) d) =(And (Or (simplifyDist p) (simplifyDist d)) (Or (simplifyDist c) (simplifyDist d)))
simplifyDist (Prop p) =(Prop p)
simplifyDist (Neg l) =(Neg (simplifyDist l))
simplifyDist (And l n) =(And (simplifyDist l) (simplifyDist n))
simplifyDist (Or l n) =(Or (simplifyDist l) (simplifyDist n))
simplifyDist (Impl l n) =(Impl (simplifyDist l) (simplifyDist n))
simplify :: LogicExpr-> LogicExpr
simplify x= (simplifyDist (simplifyDeMorg (simplifyDeMorg (simplifyNeg (simplifyImp x)))))
--simplify (Prop p) =(Prop p)
--simplify (Impl l n) =(Or (Neg (simplify l)) (simplify n))
--simplify (Neg (Neg l)) = simplify l
--simplify (Neg (Or l n)) =(And (Neg (simplify l)) (Neg (simplify n)))
--simplify (Neg (And l n)) =(Or (Neg (simplify l)) (Neg (simplify n)))
--simplify (Or (And p c) d) =(And (Or (simplify p) (simplify d)) (Or (simplify c) (simplify d)))
--checkSimplify :: LogicExpr -> Bool
--checkSimplify (Impl l n)=True
--checkSimplify (Neg (Neg l))=True
--checkSimplify (Neg (Or l n))=True
--checkSimplify (Neg (And l n))=True
--checkSimplify (Or (And p c) d)=True
--checkSimplify (Prop p)= False
countVar :: LogicExpr-> [Char]
countVar (Prop p)=[p]
countVar (Neg l)= countVar l
countVar (And l n)= countVar l ++ countVar n
countVar (Or l n)= countVar l ++ countVar n
countVar (Impl l n)= countVar l ++ countVar n
--getBinDig:: Int->Int
--getBinDig n=length(convertToBin n)
fillSpace :: Int->[Int]->[Int]
fillSpace n l | (length l)==n =l
| (length l)<n =fillSpace n (0:l)
convertToBin :: Int ->[Int]
convertToBin 0=[0]
convertToBin 1 =[1]
convertToBin n = convertToBin (div n 2)++[mod n 2]
convertToBool :: [Int]->[Bool]
convertToBool [0]=[False]
convertToBool [1]=[True]
convertToBool (x:xs)= convertToBool [x] ++ convertToBool xs
numOfVar :: Int-> Int
numOfVar 0=0
numOfVar 1=1
numOfVar n= (2^n)-1
count ::Int -> [Int]
count 0=[0]
count n= (count (n-1))++[n]
--generate :: [Int]->[[Bool]]
generateTTbool :: Int->[[Bool]]
--generateTTbool x= map (convertToBool) (map (convertToBin) (count (numOfVar x)))
generateTTbool x= map (convertToBool) (map (fillSpace x) (map (convertToBin) (count (numOfVar x))))
--generateTTbool x= map (convertToBool) map (fillSpace (map (convertToBin) (fillSpace (count (numOfVar x)) (numOfVar x)))
generateTT :: LogicExpr->[[(Char,Bool)]]
generateTT x=(map (zip (countVar x)) ((generateTTbool (length (countVar x)))))
generate :: LogicExpr->[Bool]
generate x=(map (evaluate x) (generateTT x))
getR :: Int-> [Char]
getR 0=[]
getR 1=['r']
getR n=['r']++(getR (n-1))
generating :: LogicExpr->[(Char,Bool)]
generating x= (zip (getR (length (generate x)))) (generate x)
--getHead :: [(Char,Bool)]->(Char,Bool)
--goThrough [x]=x
--sget
generateTruthTable :: LogicExpr -> [[(Char,Bool)]]
generateTruthTable x= makeresult (generating x) (generateTT x)
--generateTruthTable x=map (++ generating x) (generateTT x)
makeresult :: [(Char,Bool)] -> [[(Char,Bool)]] -> [[(Char,Bool)]]
makeresult [] []=[]
makeresult (x:xs) ((y:ys):l) = ((y:ys)++[x]) : makeresult xs l
--generate x=map (zip ['r']) (map (evaluate x) (generateTT x))
--generateTT x= (map (: ('r',evaluate x (l))) (l:ls)) where (l:ls)=(map (zip (countVar x)) ((generateTTbool (length (countVar x)))))
--generateTT (Prop x)= (generateTTbool 1)++(evaluate )
--generateTT x= map (++(evaluate x (head (generateTTbool (length (countVar x)))))) generateTTbool (length (countVar x))
--clausalNF(And(Or(Prop 'p')(Or(Prop 'q')(Neg(Prop 's'))))(And (Impl (Prop 'q') (Prop 'r')) (Neg(Impl(Neg(Prop 'q')) (Prop 's')))))
|
fredmorcos/attic
|
projects/logic-expr-eval/logic-expression-evaluator.hs
|
isc
| 6,457
| 58
| 14
| 977
| 2,805
| 1,465
| 1,340
| 98
| 1
|
{-# LANGUAGE FlexibleContexts, FlexibleInstances, TypeSynonymInstances #-}
-- | Functions for inspecting and constructing various types.
module Futhark.Representation.AST.Attributes.Types
(
rankShaped
, arrayRank
, arrayShape
, modifyArrayShape
, setArrayShape
, existential
, uniqueness
, setUniqueness
, unifyUniqueness
, unique
, staticShapes
, staticShapes1
, primType
, arrayOf
, arrayOfRow
, arrayOfShape
, setOuterSize
, setDimSize
, setOuterDim
, setDim
, setArrayDims
, setArrayExtDims
, peelArray
, stripArray
, arrayDims
, arrayExtDims
, shapeSize
, arraySize
, arraysSize
, rowType
, elemType
, transposeType
, rearrangeType
, diet
, subtypeOf
, subtypesOf
, toDecl
, fromDecl
, extractShapeContext
, shapeContext
, shapeContextSize
, hasStaticShape
, hasStaticShapes
, generaliseExtTypes
, existentialiseExtTypes
, shapeMapping
, shapeMapping'
, shapeExtMapping
-- * Abbreviations
, int8, int16, int32, int64
, float32, float64
-- * The Typed typeclass
, Typed (..)
, DeclTyped (..)
, ExtTyped (..)
, DeclExtTyped (..)
, SetType (..)
, FixExt (..)
)
where
import Control.Applicative
import Control.Monad.State
import Data.Maybe
import Data.Monoid
import Data.List (elemIndex)
import qualified Data.Set as S
import qualified Data.Map.Strict as M
import Prelude
import Futhark.Representation.AST.Syntax.Core
import Futhark.Representation.AST.Attributes.Constants
import Futhark.Representation.AST.Attributes.Rearrange
-- | Remove shape information from a type.
rankShaped :: ArrayShape shape => TypeBase shape u -> TypeBase Rank u
rankShaped (Array et sz u) = Array et (Rank $ shapeRank sz) u
rankShaped (Prim et) = Prim et
rankShaped (Mem size space) = Mem size space
-- | Return the dimensionality of a type. For non-arrays, this is
-- zero. For a one-dimensional array it is one, for a two-dimensional
-- it is two, and so forth.
arrayRank :: ArrayShape shape => TypeBase shape u -> Int
arrayRank = shapeRank . arrayShape
-- | Return the shape of a type - for non-arrays, this is the
-- 'mempty'.
arrayShape :: ArrayShape shape => TypeBase shape u -> shape
arrayShape (Array _ ds _) = ds
arrayShape _ = mempty
-- | Modify the shape of an array - for non-arrays, this does nothing.
modifyArrayShape :: ArrayShape newshape =>
(oldshape -> newshape)
-> TypeBase oldshape u
-> TypeBase newshape u
modifyArrayShape f (Array t ds u)
| shapeRank ds' == 0 = Prim t
| otherwise = Array t (f ds) u
where ds' = f ds
modifyArrayShape _ (Prim t) = Prim t
modifyArrayShape _ (Mem size space) = Mem size space
-- | Set the shape of an array. If the given type is not an
-- array, return the type unchanged.
setArrayShape :: ArrayShape newshape =>
TypeBase oldshape u
-> newshape
-> TypeBase newshape u
setArrayShape t ds = modifyArrayShape (const ds) t
-- | True if the given type has a dimension that is existentially sized.
existential :: ExtType -> Bool
existential = any ext . shapeDims . arrayShape
where ext (Ext _) = True
ext (Free _) = False
-- | Return the uniqueness of a type.
uniqueness :: TypeBase shape Uniqueness -> Uniqueness
uniqueness (Array _ _ u) = u
uniqueness _ = Nonunique
-- | @unique t@ is 'True' if the type of the argument is unique.
unique :: TypeBase shape Uniqueness -> Bool
unique = (==Unique) . uniqueness
-- | Set the uniqueness attribute of a type.
setUniqueness :: TypeBase shape Uniqueness
-> Uniqueness
-> TypeBase shape Uniqueness
setUniqueness (Array et dims _) u = Array et dims u
setUniqueness t _ = t
-- | Unify the uniqueness attributes and aliasing information of two
-- types. The two types must otherwise be identical. The resulting
-- alias set will be the 'mappend' of the two input types aliasing sets,
-- and the uniqueness will be 'Unique' only if both of the input types
-- are unique.
unifyUniqueness :: Monoid u =>
TypeBase shape u
-> TypeBase shape u
-> TypeBase shape u
unifyUniqueness (Array et dims u1) (Array _ _ u2) =
Array et dims (u1 <> u2)
unifyUniqueness t1 _ = t1
-- | Convert types with non-existential shapes to types with
-- non-existential shapes. Only the representation is changed, so all
-- the shapes will be 'Free'.
staticShapes :: [TypeBase Shape u] -> [TypeBase ExtShape u]
staticShapes = map staticShapes1
-- | As 'staticShapes', but on a single type.
staticShapes1 :: TypeBase Shape u -> TypeBase ExtShape u
staticShapes1 (Prim bt) =
Prim bt
staticShapes1 (Array bt (Shape shape) u) =
Array bt (Shape $ map Free shape) u
staticShapes1 (Mem size space) =
Mem size space
-- | @arrayOf t s u@ constructs an array type. The convenience
-- compared to using the 'Array' constructor directly is that @t@ can
-- itself be an array. If @t@ is an @n@-dimensional array, and @s@ is
-- a list of length @n@, the resulting type is of an @n+m@ dimensions.
-- The uniqueness of the new array will be @u@, no matter the
-- uniqueness of @t@. If the shape @s@ has rank 0, then the @t@ will
-- be returned, although if it is an array, with the uniqueness
-- changed to @u@.
arrayOf :: ArrayShape shape =>
TypeBase shape u_unused -> shape -> u -> TypeBase shape u
arrayOf (Array et size1 _) size2 u =
Array et (size2 <> size1) u
arrayOf (Prim et) s _
| 0 <- shapeRank s = Prim et
arrayOf (Prim et) size u =
Array et size u
arrayOf Mem{} _ _ =
error "arrayOf Mem"
-- | Construct an array whose rows are the given type, and the outer
-- size is the given 'SubExp'. This is just a convenient wrapper
-- around 'arrayOf'.
arrayOfRow :: Type -> SubExp -> Type
arrayOfRow t size = arrayOf t (Shape [size]) NoUniqueness
-- | Construct an array whose rows are the given type, and the outer
-- size is the given 'Shape'. This is just a convenient wrapper
-- around 'arrayOf'.
arrayOfShape :: Type -> Shape -> Type
arrayOfShape t shape = arrayOf t shape NoUniqueness
-- | Set the dimensions of an array. If the given type is not an
-- array, return the type unchanged.
setArrayDims :: TypeBase oldshape u -> [SubExp] -> TypeBase Shape u
setArrayDims t dims = t `setArrayShape` Shape dims
-- | Set the existential dimensions of an array. If the given type is
-- not an array, return the type unchanged.
setArrayExtDims :: TypeBase oldshape u -> [ExtSize] -> TypeBase ExtShape u
setArrayExtDims t dims = t `setArrayShape` Shape dims
-- | Replace the size of the outermost dimension of an array. If the
-- given type is not an array, it is returned unchanged.
setOuterSize :: ArrayShape (ShapeBase d) =>
TypeBase (ShapeBase d) u -> d -> TypeBase (ShapeBase d) u
setOuterSize = setDimSize 0
-- | Replace the size of the given dimension of an array. If the
-- given type is not an array, it is returned unchanged.
setDimSize :: ArrayShape (ShapeBase d) =>
Int -> TypeBase (ShapeBase d) u -> d -> TypeBase (ShapeBase d) u
setDimSize i t e = t `setArrayShape` setDim i (arrayShape t) e
-- | Replace the outermost dimension of an array shape.
setOuterDim :: ShapeBase d -> d -> ShapeBase d
setOuterDim = setDim 0
-- | Replace the specified dimension of an array shape.
setDim :: Int -> ShapeBase d -> d -> ShapeBase d
setDim i (Shape ds) e = Shape $ take i ds ++ e : drop (i+1) ds
-- | @peelArray n t@ returns the type resulting from peeling the first
-- @n@ array dimensions from @t@. Returns @Nothing@ if @t@ has less
-- than @n@ dimensions.
peelArray :: ArrayShape shape =>
Int -> TypeBase shape u -> Maybe (TypeBase shape u)
peelArray 0 t = Just t
peelArray n (Array et shape u)
| shapeRank shape == n = Just $ Prim et
| shapeRank shape > n = Just $ Array et (stripDims n shape) u
peelArray _ _ = Nothing
-- | @stripArray n t@ removes the @n@ outermost layers of the array.
-- Essentially, it is the type of indexing an array of type @t@ with
-- @n@ indexes.
stripArray :: ArrayShape shape => Int -> TypeBase shape u -> TypeBase shape u
stripArray n (Array et shape u)
| n < shapeRank shape = Array et (stripDims n shape) u
| otherwise = Prim et
stripArray _ t = t
-- | Return the size of the given dimension. If the dimension does
-- not exist, the zero constant is returned.
shapeSize :: Int -> Shape -> SubExp
shapeSize i shape = case drop i $ shapeDims shape of
e : _ -> e
[] -> constant (0 :: Int32)
-- | Return the dimensions of a type - for non-arrays, this is the
-- empty list.
arrayDims :: TypeBase Shape u -> [SubExp]
arrayDims = shapeDims . arrayShape
-- | Return the existential dimensions of a type - for non-arrays,
-- this is the empty list.
arrayExtDims :: TypeBase ExtShape u -> [ExtSize]
arrayExtDims = shapeDims . arrayShape
-- | Return the size of the given dimension. If the dimension does
-- not exist, the zero constant is returned.
arraySize :: Int -> TypeBase Shape u -> SubExp
arraySize i = shapeSize i . arrayShape
-- | Return the size of the given dimension in the first element of
-- the given type list. If the dimension does not exist, or no types
-- are given, the zero constant is returned.
arraysSize :: Int -> [TypeBase Shape u] -> SubExp
arraysSize _ [] = constant (0 :: Int32)
arraysSize i (t:_) = arraySize i t
-- | Return the immediate row-type of an array. For @[[int]]@, this
-- would be @[int]@.
rowType :: ArrayShape shape => TypeBase shape u -> TypeBase shape u
rowType = stripArray 1
-- | A type is a primitive type if it is not an array or memory block.
primType :: TypeBase shape u -> Bool
primType Array{} = False
primType Mem{} = False
primType _ = True
-- | Returns the bottommost type of an array. For @[[int]]@, this
-- would be @int@. If the given type is not an array, it is returned.
elemType :: TypeBase shape u -> PrimType
elemType (Array t _ _) = t
elemType (Prim t) = t
elemType Mem{} = error "elemType Mem"
-- | Swap the two outer dimensions of the type.
transposeType :: Type -> Type
transposeType = rearrangeType [1,0]
-- | Rearrange the dimensions of the type. If the length of the
-- permutation does not match the rank of the type, the permutation
-- will be extended with identity.
rearrangeType :: [Int] -> Type -> Type
rearrangeType perm t =
t `setArrayShape` Shape (rearrangeShape perm' $ arrayDims t)
where perm' = perm ++ [length perm .. arrayRank t - 1]
-- | @diet t@ returns a description of how a function parameter of
-- type @t@ might consume its argument.
diet :: TypeBase shape Uniqueness -> Diet
diet (Prim _) = Observe
diet (Array _ _ Unique) = Consume
diet (Array _ _ Nonunique) = Observe
diet Mem{} = Observe
-- | @x \`subtypeOf\` y@ is true if @x@ is a subtype of @y@ (or equal to
-- @y@), meaning @x@ is valid whenever @y@ is.
subtypeOf :: (Ord u, ArrayShape shape) =>
TypeBase shape u
-> TypeBase shape u
-> Bool
subtypeOf (Array t1 shape1 u1) (Array t2 shape2 u2) =
u2 <= u1 &&
t1 == t2 &&
shape1 `subShapeOf` shape2
subtypeOf (Prim t1) (Prim t2) = t1 == t2
subtypeOf (Mem _ space1) (Mem _ space2) = space1 == space2
subtypeOf _ _ = False
-- | @xs \`subtypesOf\` ys@ is true if @xs@ is the same size as @ys@,
-- and each element in @xs@ is a subtype of the corresponding element
-- in @ys@..
subtypesOf :: (Ord u, ArrayShape shape) =>
[TypeBase shape u]
-> [TypeBase shape u]
-> Bool
subtypesOf xs ys = length xs == length ys &&
and (zipWith subtypeOf xs ys)
toDecl :: TypeBase shape NoUniqueness
-> Uniqueness
-> TypeBase shape Uniqueness
toDecl (Prim bt) _ = Prim bt
toDecl (Array et shape _) u = Array et shape u
toDecl (Mem size space) _ = Mem size space
fromDecl :: TypeBase shape Uniqueness
-> TypeBase shape NoUniqueness
fromDecl (Prim bt) = Prim bt
fromDecl (Array et shape _) = Array et shape NoUniqueness
fromDecl (Mem size space) = Mem size space
-- | Given the existential return type of a function, and the shapes
-- of the values returned by the function, return the existential
-- shape context. That is, those sizes that are existential in the
-- return type.
extractShapeContext :: [TypeBase ExtShape u] -> [[a]] -> [a]
extractShapeContext ts shapes =
evalState (concat <$> zipWithM extract ts shapes) S.empty
where extract t shape =
catMaybes <$> zipWithM extract' (shapeDims $ arrayShape t) shape
extract' (Ext x) v = do
seen <- gets $ S.member x
if seen then return Nothing
else do modify $ S.insert x
return $ Just v
extract' (Free _) _ = return Nothing
-- | The set of identifiers used for the shape context in the given
-- 'ExtType's.
shapeContext :: [TypeBase ExtShape u] -> S.Set Int
shapeContext = S.fromList
. concatMap (mapMaybe ext . shapeDims . arrayShape)
where ext (Ext x) = Just x
ext (Free _) = Nothing
-- | The size of the set that would be returned by 'shapeContext'.
shapeContextSize :: [ExtType] -> Int
shapeContextSize = S.size . shapeContext
-- | If all dimensions of the given 'RetType' are statically known,
-- return the corresponding list of 'Type'.
hasStaticShape :: ExtType -> Maybe Type
hasStaticShape (Prim bt) =
Just $ Prim bt
hasStaticShape (Mem size space) =
Just $ Mem size space
hasStaticShape (Array bt (Shape shape) u) =
Array bt <$> (Shape <$> mapM isFree shape) <*> pure u
where isFree (Free s) = Just s
isFree (Ext _) = Nothing
hasStaticShapes :: [ExtType] -> Maybe [Type]
hasStaticShapes = mapM hasStaticShape
-- | Given two lists of 'ExtType's of the same length, return a list
-- of 'ExtType's that is a subtype (as per 'isSubtypeOf') of the two
-- operands.
generaliseExtTypes :: [TypeBase ExtShape u]
-> [TypeBase ExtShape u]
-> [TypeBase ExtShape u]
generaliseExtTypes rt1 rt2 =
evalState (zipWithM unifyExtShapes rt1 rt2) (0, M.empty)
where unifyExtShapes t1 t2 =
setArrayShape t1 . Shape <$>
zipWithM unifyExtDims
(shapeDims $ arrayShape t1)
(shapeDims $ arrayShape t2)
unifyExtDims (Free se1) (Free se2)
| se1 == se2 = return $ Free se1 -- Arbitrary
| otherwise = do (n,m) <- get
put (n + 1, m)
return $ Ext n
unifyExtDims (Ext x) (Ext y)
| x == y = Ext <$> (maybe (new x) return =<<
gets (M.lookup x . snd))
unifyExtDims (Ext x) _ = Ext <$> new x
unifyExtDims _ (Ext x) = Ext <$> new x
new x = do (n,m) <- get
put (n + 1, M.insert x n m)
return n
-- | Given a list of 'ExtType's and a list of "forbidden" names,
-- modify the dimensions of the 'ExtType's such that they are 'Ext'
-- where they were previously 'Free' with a variable in the set of
-- forbidden names.
existentialiseExtTypes :: [VName] -> [ExtType] -> [ExtType]
existentialiseExtTypes inaccessible = map makeBoundShapesFree
where makeBoundShapesFree =
modifyArrayShape $ fmap checkDim
checkDim (Free (Var v))
| Just i <- v `elemIndex` inaccessible =
Ext i
checkDim d = d
-- | In the call @shapeMapping ts1 ts2@, the lists @ts1@ and @ts@ must
-- be of equal length and their corresponding elements have the same
-- types modulo exact dimensions (but matching array rank is
-- important). The result is a mapping from named dimensions of @ts1@
-- to the corresponding dimension in @ts2@.
--
-- This function is useful when @ts1@ are the value parameters of some
-- function and @ts2@ are the value arguments, and we need to figure
-- out which shape context to pass.
shapeMapping :: [TypeBase Shape u0] -> [TypeBase Shape u1] -> M.Map VName SubExp
shapeMapping ts = shapeMapping' ts . map arrayDims
-- | Like @shapeMapping@, but works with explicit dimensions.
shapeMapping' :: [TypeBase Shape u] -> [[a]] -> M.Map VName a
shapeMapping' = dimMapping arrayDims id match
where match Constant{} _ = M.empty
match (Var v) dim = M.singleton v dim
-- | Like 'shapeMapping', but produces a mapping for the dimensions context.
shapeExtMapping :: [TypeBase ExtShape u] -> [TypeBase Shape u1] -> M.Map Int SubExp
shapeExtMapping = dimMapping arrayExtDims arrayDims match
where match Free{} _ = mempty
match (Ext i) dim = M.singleton i dim
dimMapping :: Monoid res =>
(t1 -> [dim1]) -> (t2 -> [dim2]) -> (dim1 -> dim2 -> res)
-> [t1] -> [t2]
-> res
dimMapping getDims1 getDims2 f ts1 ts2 =
mconcat $ concat $ zipWith (zipWith f) (map getDims1 ts1) (map getDims2 ts2)
int8 :: PrimType
int8 = IntType Int8
int16 :: PrimType
int16 = IntType Int16
int32 :: PrimType
int32 = IntType Int32
int64 :: PrimType
int64 = IntType Int64
float32 :: PrimType
float32 = FloatType Float32
float64 :: PrimType
float64 = FloatType Float64
-- | Typeclass for things that contain 'Type's.
class Typed t where
typeOf :: t -> Type
instance Typed Type where
typeOf = id
instance Typed DeclType where
typeOf = fromDecl
instance Typed Ident where
typeOf = identType
instance Typed attr => Typed (Param attr) where
typeOf = typeOf . paramAttr
instance Typed attr => Typed (PatElemT attr) where
typeOf = typeOf . patElemAttr
instance Typed b => Typed (a,b) where
typeOf = typeOf . snd
-- | Typeclass for things that contain 'DeclType's.
class DeclTyped t where
declTypeOf :: t -> DeclType
instance DeclTyped DeclType where
declTypeOf = id
instance DeclTyped attr => DeclTyped (Param attr) where
declTypeOf = declTypeOf . paramAttr
-- | Typeclass for things that contain 'ExtType's.
class FixExt t => ExtTyped t where
extTypeOf :: t -> ExtType
instance ExtTyped ExtType where
extTypeOf = id
-- | Typeclass for things that contain 'DeclExtType's.
class FixExt t => DeclExtTyped t where
declExtTypeOf :: t -> DeclExtType
instance DeclExtTyped DeclExtType where
declExtTypeOf = id
-- | Typeclass for things whose type can be changed.
class Typed a => SetType a where
setType :: a -> Type -> a
instance SetType Type where
setType _ t = t
instance SetType b => SetType (a, b) where
setType (a, b) t = (a, setType b t)
instance SetType attr => SetType (PatElemT attr) where
setType (PatElem name bindage attr) t =
PatElem name bindage $ setType attr t
-- | Something with an existential context that can be (partially)
-- fixed.
class FixExt t where
-- | Fix the given existentional variable to the indicated free
-- value.
fixExt :: Int -> SubExp -> t -> t
instance (FixExt shape, ArrayShape shape) => FixExt (TypeBase shape u) where
fixExt i se = modifyArrayShape $ fixExt i se
instance FixExt d => FixExt (ShapeBase d) where
fixExt i se = fmap $ fixExt i se
instance FixExt a => FixExt [a] where
fixExt i se = fmap $ fixExt i se
instance FixExt ExtSize where
fixExt i se (Ext j) | j > i = Ext $ j - 1
| j == i = Free se
| otherwise = Ext j
fixExt _ _ (Free x) = Free x
instance FixExt () where
fixExt _ _ () = ()
|
ihc/futhark
|
src/Futhark/Representation/AST/Attributes/Types.hs
|
isc
| 19,557
| 0
| 15
| 4,837
| 4,963
| 2,564
| 2,399
| 366
| 4
|
module Main where
import Prelude hiding (lookup)
import Network.Wai.Handler.Warp (run)
import System.Environment (lookupEnv)
import Database.Persist.Postgresql (runSqlPool)
import Data.Yaml.Config (load, subconfig, lookupDefault, lookup)
import Data.Text.Encoding (encodeUtf8)
import Control.Applicative ((<$>))
import Qy.Config
import Qy.App (app)
import Qy.Model (doMigrations)
import Qy.Types
import Qy.Chat.Simple (appWithSocket)
main :: IO ()
main = do
config <- load "./chatqy.yaml"
qyConfig <- subconfig "chatqy" config
let port = lookupDefault "port" 8081 qyConfig
poolNum = lookupDefault "poolNum" 1 qyConfig
connStr <- encodeUtf8 <$> lookup "connStr" qyConfig
env <- lookup "env" qyConfig
pool <- makePool env connStr poolNum
rmap <- makeTokenMap
tmap <- makeRoomMap
let cfg = defaultConfig { getPool = pool
, getEnv = env
, getTokenMap = rmap
, getRoomMap = tmap}
logger = setLogger env
runSqlPool doMigrations pool
run port . logger . appWithSocket cfg $ app cfg
|
realli/chatqy
|
src/Main.hs
|
mit
| 1,142
| 0
| 11
| 298
| 326
| 175
| 151
| 31
| 1
|
module Rebase.Data.Bifunctor
(
module Data.Bifunctor,
mapLeft,
mapRight,
)
where
import Data.Bifunctor
-- |
-- A more meaningful and conflict-free alias for 'first'.
{-# INLINE mapLeft #-}
mapLeft :: Bifunctor p => (a -> b) -> p a c -> p b c
mapLeft =
first
-- |
-- A more meaningful and conflict-free alias for 'second'.
{-# INLINE mapRight #-}
mapRight :: Bifunctor p => (b -> c) -> p a b -> p a c
mapRight =
second
|
nikita-volkov/rebase
|
library/Rebase/Data/Bifunctor.hs
|
mit
| 432
| 0
| 8
| 93
| 117
| 66
| 51
| 14
| 1
|
module Plow.Extras.Aeson.Internal
(
) where
|
plow-technologies/plow-extras
|
plow-extras-aeson/src/Plow/Extras/Aeson/Internal.hs
|
mit
| 52
| 0
| 3
| 13
| 11
| 8
| 3
| 2
| 0
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Data.Song where
import Data.Aeson
import Data.String
import GHC.Generics
import Web.Scotty
import qualified Data.Map as M
import qualified Network.MPD as MPD
import qualified Data.Text.Lazy as L
import qualified Data.Text as T
data Song = Song { title :: String
, artist :: String
, length :: Integer
, path :: Path
, index :: Maybe Int
} deriving (Show, Generic)
instance ToJSON Song
instance FromJSON Song
toSong :: MPD.Song -> Song
toSong s = Song
(getOrDefault s MPD.Title "")
(getOrDefault s MPD.Artist "")
(MPD.sgLength s)
(Path $ MPD.sgFilePath s)
(MPD.sgIndex s)
newtype Path = Path MPD.Path deriving (Show)
instance ToJSON Path where
toJSON (Path p) = String $ T.pack $ MPD.toString p
instance FromJSON Path where
parseJSON (String t) = return $ Path . fromString $ T.unpack t
parseJSON _ = fail "Should be string"
toPath :: L.Text -> MPD.Path
toPath t = fromString $ L.unpack t
instance Parsable Path where
parseParam t = Right (Path $ toPath t)
fromPath :: Path -> MPD.Path
fromPath (Path p) = p
addSong :: Song -> MPD.MPD ()
addSong = MPD.add . fromPath . path
type Playlist = [Song]
toPlaylist :: [MPD.Song] -> Playlist
toPlaylist = map toSong
playlist :: MPD.MPD Playlist
playlist = do
songs <- MPD.playlistInfo Nothing
return $ toPlaylist songs
getOrDefault :: MPD.Song -> MPD.Metadata -> String -> String
getOrDefault s m def = case M.lookup m tags of
Just value -> MPD.toString $ head value
Nothing -> def
where tags = MPD.sgTags s
|
kalhauge/vagnplayer
|
src/Data/Song.hs
|
mit
| 1,726
| 0
| 9
| 456
| 577
| 306
| 271
| 52
| 2
|
import Drawing
import Geometry.Exercises
import Geometry (find,beyond,dist,sameside,line_circle,circle_circle)
main = drawPicture myPicture
myPicture points =
drawCircle (o,p) &
drawPoints good &
message $ "Computing PI=" ++ show (4 * approx_pi4)
++ " using " ++ show nsamples ++ " samples"
where o = (0,0)
p = (5,0)
nsamples = 100000
samples = take nsamples points
good = [(x,y) | (x,y) <- samples, x*x+y*y <= 25 ]
area = fromIntegral (length good)
total = fromIntegral (length samples)
approx_pi4 = area/total;
|
alphalambda/k12math
|
prog/demo/pimc.hs
|
mit
| 618
| 0
| 13
| 183
| 226
| 124
| 102
| 17
| 1
|
module Tictactoe.HTTPHelper (
getMove,
makeMove,
TictactoePlayer(Attacker,Defender),
TictactoeCType(BencodeList,BencodeDict),
TictactoeReq(TictactoeReq)
) where
import Network.HTTP
import Network.URI
import Network.BufferType
import Tictactoe.Base
import Tictactoe.Bencode.Encoder as BencodeList
import Tictactoe.Bencode.Decoder as BencodeList
import Tictactoe.BencodeDict.Encoder as BencodeDict
import Tictactoe.BencodeDict.Decoder as BencodeDict
data TictactoePlayer = Attacker | Defender
instance Show TictactoePlayer where
show Attacker = "1"
show Defender = "2"
data TictactoeCType = BencodeList | BencodeDict
instance Show TictactoeCType where
show BencodeList = "application/bencode+list"
show BencodeDict = "application/bencode+map"
data TictactoeReq = TictactoeReq {
player :: TictactoePlayer
, gameName :: String
, contentType :: TictactoeCType
}
gameHost :: String
gameHost = "http://tictactoe.homedir.eu/game"
fullUrl :: TictactoeReq -> String
fullUrl req = gameHost ++ "/" ++ (gameName req) ++ "/player/" ++ (show (player req))
toBufOps :: BufferType a => Request a -> BufferOp a
toBufOps _ = bufferOps
getMoveRequest :: BufferType ty => URI -> String -> Request ty
getMoveRequest uri acceptType =
Request { rqURI = uri
, rqBody = buf_empty (bufferOps)
, rqHeaders = [ Header HdrContentLength "0"
, Header HdrUserAgent defaultUserAgent
, Header HdrAccept acceptType
]
, rqMethod = GET
}
getMoveRequestString :: String -> String -> Request_String
getMoveRequestString urlString acceptType =
case parseURI urlString of
Nothing -> error ("getRequest: Not a valid URL - " ++ urlString)
Just uri -> getMoveRequest uri acceptType
boardifyString :: String -> TictactoeCType -> Board
boardifyString strBoard cType =
case cType of
BencodeList -> BencodeList.parseBoard strBoard
BencodeDict -> BencodeDict.parseBoard strBoard
strBoard :: Board -> TictactoeCType -> String
strBoard board cType =
case cType of
BencodeDict -> BencodeDict.stringifyBoard board
BencodeList -> BencodeList.stringifyBoard board
getMove :: TictactoeReq -> IO Board
getMove req = do
resp <- simpleHTTP (getMoveRequestString (fullUrl req) (show (contentType req))) >>= getResponseBody
return $ boardifyString resp (contentType req)
makeMove :: TictactoeReq -> Board -> IO String
makeMove req board = simpleHTTP (postRequestWithBody (fullUrl req) (show (contentType req)) (strBoard board (contentType req))) >>= getResponseBody
|
viktorasl/tictactoe-bot
|
src/Tictactoe/HTTPHelper.hs
|
mit
| 2,654
| 0
| 15
| 552
| 677
| 362
| 315
| 67
| 2
|
{-# LANGUAGE CPP #-}
module GHCJS.DOM.CryptoKey (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.CryptoKey
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.CryptoKey
#else
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/CryptoKey.hs
|
mit
| 340
| 0
| 5
| 33
| 33
| 26
| 7
| 4
| 0
|
module Bad where
{-# LANGUAGE ViewPatterns, TemplateHaskell #-}
{-# LANGUAGE GeneralizedNewtypeDeriving, ViewPatterns,
ScopedTypeVariables #-}
import Control.Applicative ((<$>))
import System.Directory (doesFileExist)
import qualified Data.Map as M
import Data.Map ((!), keys, Map)
data Point = Point
{ pointX,pointY :: Double
, pointName :: String
} deriving (Show)
|
vzaccaria/haskell-format-atom
|
pippo.hs
|
mit
| 397
| 0
| 8
| 75
| 84
| 56
| 28
| -1
| -1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Shake.Configure.Finder
( FindMethod (..)
, Finder
, tryFindMethod
, runFinder
, finderPackage
, defPackage
) where
import Control.Applicative
import Control.Monad
import Control.Monad.Trans.Maybe
import Control.Monad.IO.Class
import Data.Monoid
import Shake.Configure.Package
-- | A find method specifies a way to find a package. It can fail using Nothing, or succeed with a value of type a. Multiple
-- FindMethods can be combined with the Monoid, Alternative or MonadPlus instance. The combined find method will try the first
-- method, and if it fails, try the second one. FindMethods can also make use of IO by lifing IO actions with 'liftIO'.
newtype FindMethod a = FindMethod { runFindMethod :: MaybeT IO a } deriving (Functor, Applicative, Monad, Alternative, MonadPlus, MonadIO)
-- | Try a given method to find a package. Returns Nothing if the package could not be found.
tryFindMethod :: FindMethod a -> IO (Maybe a)
tryFindMethod = runMaybeT . runFindMethod
-- | The monoid instance behaves the same as the Alternative/MonadPlus instance for Maybe.
instance Monoid (FindMethod a) where
mempty = FindMethod $ MaybeT (return Nothing)
mappend = (<|>)
type Finder = (String, FindMethod PackageConfig)
-- | Run a finder. Returns Nothing if the package could not be found.
runFinder :: Finder -> IO (Maybe PackageConfig)
runFinder (_, method) = tryFindMethod method
-- | Get the name of the package that a given finder tries to find.
finderPackage :: Finder -> String
finderPackage (name, _) = name
-- | Define a new package finder.
defPackage :: String -- ^ Name of the package to define
-> FindMethod PackageConfig -- ^ Method(s) to use for finding the package
-> Finder
defPackage = (,)
|
bennofs/shake-configure
|
Shake/Configure/Finder.hs
|
mit
| 1,812
| 0
| 9
| 341
| 289
| 170
| 119
| 29
| 1
|
-- General Sequence Classifier
module Main (main) where
-- Classifies a set of sequences using a classifier produced by gsc_mk.
-- module Main where
import System.Environment (getArgs)
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.IO as LTIO
import qualified Data.Text as ST
import qualified Data.Text.IO as STIO
import Text.Printf
import qualified Text.Parse as TP
import Control.Applicative
import Control.Monad.Reader
import Options.Applicative
import Codec.Compression.GZip (decompress)
import qualified Data.ByteString.Lazy as LB
import Data.Map (Map, fromList, findWithDefault)
import Data.Binary (decodeFile, decode)
import Data.Tree
import Data.Char
import Data.List
import Data.Ord
import MlgscTypes
import FastA
import Trim (trimSeq, maskFlaky)
import PWMModel
import Align
import Classifier (Classifier(..), classifySequence, classifySequenceMulti,
classifySequenceAll, StoredClassifier(..))
import Output
-- distinguish between MlgscAlignMode (which includes no alignment) from
-- AlignMode (which is exported by Align), in which no alignment makes little
-- sense.
data MlgscAlignMode = DoAlignment AlignMode | NoAlignment
data MaskMode = None | Trim | MaskFlaky
data TreeTraversalMode = BestTraversal
| FullTraversal
| RecoverTraversal Int
| SingleNodeTraversal CladeName
data Params = Params {
optTreeTraversalMode :: TreeTraversalMode
, optNoAlign :: Bool
, optOutFmtString :: String
, optStepFmtString :: String
, optERCutoff :: Int -- for Best mode (TODO: could be an argument to the BestTraversal c'tor)
, optMaskMode :: MaskMode
, optAlnMode :: MlgscAlignMode
, queryFname :: String
, clsfrFname :: String
}
parseTreeTraversal :: Monad m => String -> m TreeTraversalMode
parseTreeTraversal optString
| 'b' == initC = return BestTraversal
| 'a' == initC = return FullTraversal
| 'r' == initC = do
let (Right num,_) = TP.runParser TP.parseDec $ tail optString
-- TODO: handle bad parse
return $ RecoverTraversal num
where initC = toLower $ head optString
parseMaskMode :: Monad m => String -> m MaskMode
parseMaskMode optString
| 't' == initC = return Trim
| 'n' == initC = return None
| 'f' == initC = return MaskFlaky
| otherwise = return None -- TODO: warn about unrecognized opt
where initC = toLower $ head optString
parseAlignMode :: Monad m => String -> m MlgscAlignMode
parseAlignMode optString
| 'g' == initC = return $ DoAlignment AlignGlobal
| 's' == initC = return $ DoAlignment AlignSemiglobal
| 'n' == initC = return NoAlignment
| otherwise = error ("invalid alignment mode: " ++ optString)
where initC = toLower $ head optString
parseOptions :: Parser Params
parseOptions = Params
<$> option (str >>= parseTreeTraversal)
(long "traversal-mode"
<> short 'm'
<> help "tree traversal mode (b|a|r<int>)"
<> value BestTraversal)
-- NOTE: the -A switch is deprecated; use -a n instead.
-- We keep it for compatibility
<*> switch
(long "no-align"
<> short 'A'
<> help "do not align query sequences")
<*> option str
(long "output-format"
<> short 'f'
<> help "printf-like format string for output."
<> value "%h -> %p")
<*> option str
(long "step-format"
<> short 's'
<> help "printf-like format string for step (path element)"
<> value "%t (%s)")
<*> option auto
(long "ER-cutoff"
<> short 'e'
<> help "drop clades with ER lower than this"
<> value 0)
<*> option (str >>= parseMaskMode)
(long "mask-mode"
<> short 'M'
<> help "mask aligned query: n)one* | t)trim | mask f)laky"
<> value None)
<*> option (str >>= parseAlignMode)
(long "align-mode"
<> short 'a'
<> help "alignment mode: g)lobal* | s)emiglobal | n)one"
<> value (DoAlignment AlignGlobal))
<*> argument str (metavar "<query seq file>")
<*> argument str (metavar "<classifier file>")
parseOptionsInfo :: ParserInfo Params
parseOptionsInfo = info (helper <*> parseOptions)
( fullDesc
<> progDesc "classify sequences according to a model"
<> Options.Applicative.header
"mlgsc - maximum-likelihood general sequence classifier")
-- Some common format options have names (e.g. "simple" -> "%h -> %P"). These
-- must be translated using the following map (some short forms are possible)
fmtMap :: Map String String
fmtMap = fromList [
("minimal", "%i\t%P"),
("min", "%i\t%P"),
("m", "%i\t%P"),
("simple", "%h -> %P (%s)"),
("s", "%h -> %P (%s)")
]
translateFmtKw :: Params -> IO Params
translateFmtKw params = do
let origFmt = optOutFmtString params
let fmt = findWithDefault origFmt origFmt fmtMap
return params { optOutFmtString = fmt }
main :: IO ()
main = do
params <- execParser parseOptionsInfo >>= translateFmtKw
queryFastA <- LTIO.readFile $ queryFname params
let queryRecs = fastATextToRecords queryFastA
let clsfrFn = clsfrFname params
storedClassifier <- if isSuffixOf ".gz" clsfrFn
then do
z <- LB.readFile clsfrFn
return (decode $ decompress z) :: IO StoredClassifier
else (decodeFile clsfrFn) :: IO StoredClassifier
let (StoredClassifier classifier@(PWMClassifier modTree scale) _) = storedClassifier
let rootMod = rootLabel modTree
-- TODO: replace the magic "2" below by a meaningful constant/param
let scoringScheme = ScoringScheme (-2) (scoringSchemeMap (absentResScore rootMod))
let headers = map FastA.header queryRecs
let processedQueries =
map (
-- All the transformations from Fasta record to ready-to-score
-- query. Note that some (mb*) depend on params among other
-- things.
mbMask params
. mbAlign params scoringScheme rootMod
. ST.toUpper
. LT.toStrict
. FastA.sequence
) queryRecs
let outlines =
case optTreeTraversalMode params of
BestTraversal -> bestTraversal params classifier queryRecs processedQueries
(RecoverTraversal _) -> recoverTraversal params classifier queryRecs processedQueries
FullTraversal -> fullTraversal params classifier queryRecs processedQueries
mapM_ STIO.putStrLn outlines
-- Returns an alignment step (technically, a ST.Text -> ST.Text function), or
-- just id if option 'optNoAlign' is set. NOTE: for now there are two ways of
-- spcifying 'no alignment', because we introduced semiglobal, yet either
-- alignment mode is incompatible with 'no alignment', so in reality it's either
-- global, semiglobal, or not at all, which are all governed by -a.
-- We could just drop -A, but this could break existing code. For now we just
-- handle both here, in the future there should be a distinction between CLI
-- params and run params, with the former essentially the same as the current
-- Params, and a function to map them to the latter.
mbAlign params scsc rootMod =
if optNoAlign params
then id
else case (optAlnMode params) of
NoAlignment -> id
(DoAlignment mode) -> msalign mode scsc rootMod
-- Returns a masking step (a ST.Text -> ST.Text function), or just id if no
-- masking was requested. TODO: this function could be called during
-- command-line argument parsing...
mbMask params =
case optMaskMode params of
None -> id
Trim -> trimSeq
MaskFlaky -> maskFlaky
bestTraversal :: Params -> Classifier -> [FastA] -> [Sequence] -> [ST.Text]
bestTraversal params classifier queryRecs processedQueries =
getZipList $ (formatResultWrapper params)
<$> ZipList queryRecs
<*> ZipList processedQueries
<*> ZipList predictions
where
log10ER = optERCutoff params
predictions = map (classifySequence classifier log10ER) processedQueries
recoverTraversal :: Params -> Classifier -> [FastA] -> [Sequence] -> [ST.Text]
recoverTraversal params classifier queryRecs processedQueries =
getZipList $ (formatResultWrapper params)
<$> ZipList queryRecs
<*> ZipList processedQueries
<*> ZipList bestPredictions
where
(RecoverTraversal tieThreshold) = optTreeTraversalMode params
bestPredictions = map getBestPrediction predictions
predictions = map (classifySequenceMulti classifier tieThreshold) processedQueries
getBestPrediction :: [Trail] -> Trail
getBestPrediction trails = maximumBy (comparing lastScore) trails
lastScore :: Trail -> Score
lastScore trail = bestScore $ last trail
fullTraversal :: Params -> Classifier -> [FastA] -> [Sequence] -> [ST.Text]
fullTraversal params classifier queryRecs processedQueries =
concat $ getZipList $ (fullTraversalFmt1Query params)
<$> ZipList queryRecs
<*> ZipList processedQueries
<*> ZipList predictions
where
predictions = map (classifySequenceAll classifier) processedQueries
fullTraversalFmt1Query :: Params -> FastA -> Sequence -> [Trail] -> [ST.Text]
fullTraversalFmt1Query params queryRec processQuery trails =
map (formatResultWrapper params queryRec processQuery) trails
{- I like to apply the output formatter in aplicative style to the lists of
- arguments. However, I'm not sure how to make this play with the Reader monad,
- except by using a wrapper like below. -}
formatResultWrapper :: Params -> FastA -> Sequence -> Trail -> ST.Text
formatResultWrapper params query alnQry trail =
runReader (formatResultReader query alnQry trail) params
{- This works, but it's not quite clear what should go in Output,
- OutputFormatStringParser, or just plain here in Main. -}
formatResultReader :: FastA -> Sequence -> Trail -> Reader Params ST.Text
formatResultReader query alnQry trail = do
fmtString <- asks optOutFmtString
stepFmtString <- asks optStepFmtString
let (Right format) = parseOutputFormatString fmtString
let (Right stepfmt) = parseStepFormatString stepFmtString
return $ ST.concat $ map (evalFmtComponent query alnQry trail stepfmt) format
|
tjunier/mlgsc
|
src/mlgsc.hs
|
mit
| 11,299
| 0
| 19
| 3,365
| 2,221
| 1,136
| 1,085
| 202
| 4
|
module GHCJS.DOM.StorageUsageCallback (
) where
|
manyoo/ghcjs-dom
|
ghcjs-dom-webkit/src/GHCJS/DOM/StorageUsageCallback.hs
|
mit
| 50
| 0
| 3
| 7
| 10
| 7
| 3
| 1
| 0
|
{-# LANGUAGE CPP #-}
module Document.Tests.TrainStationRefinement
( test, test_case, path3 )
where
-- Modules
import Document.Tests.Suite
-- Libraries
#if MIN_VERSION_semigroups(0,18,0)
import Data.List.NonEmpty as NE
#else
import Data.List.NonEmpty as NE hiding (unlines)
#endif
import Test.UnitTest
test_case :: TestCase
test_case = test
test :: TestCase
test = test_cases
"train station example, with refinement"
[ poCase "verify machine m0 (ref)" (verify path0 0) result0
, poCase "verify machine m1 (ref)" (verify path0 1) result1
, stringCase "Feasibility in m1" case6 result6
, poCase "verify machine m2 (ref)" (verify path0 2) result2
, poCase "verify machine m2 (ref), in many files"
(verifyFiles (NE.fromList [path1,path1']) 2) result2
, stringCase "cyclic proof of liveness through 3 refinements" (find_errors path3) result3
, stringCase "refinement of undefined machine" (find_errors path4) result4
, stringCase "repeated imports" case5 result5
]
result0 :: String
result0 = unlines
[ " o m0/m0:enter/FIS/in@prime"
, " o m0/m0:leave/FIS/in@prime"
, " o m0/m0:prog0/LIVE/discharge/tr/lhs"
, " o m0/m0:prog0/LIVE/discharge/tr/rhs"
, " o m0/m0:tr0/TR/WFIS/t/t@prime"
, " o m0/m0:tr0/TR/m0:leave/EN"
, " o m0/m0:tr0/TR/m0:leave/NEG"
, "passed 7 / 7"
]
result1 :: String
result1 = unlines
[ " o m1/INIT/INV/inv0"
, " o m1/m0:enter/FIS/in@prime"
, " o m1/m0:enter/FIS/loc@prime"
, " o m1/m0:enter/INV/inv0"
, " o m1/m0:enter/SAF/m1:saf0"
, " o m1/m0:enter/SAF/m1:saf1"
, " o m1/m0:enter/SAF/m1:saf2"
, " o m1/m0:enter/SAF/m1:saf3"
, " o m1/m0:enter/SCH/ent:grd1"
, " o m1/m0:leave/C_SCH/delay/0/prog/m1:prog0/lhs"
, " o m1/m0:leave/C_SCH/delay/0/prog/m1:prog0/rhs/lv:c1"
, " o m1/m0:leave/C_SCH/delay/0/saf/m0:enter/SAF/m0:leave"
, " o m1/m0:leave/C_SCH/delay/0/saf/m0:leave/SAF/m0:leave"
, " o m1/m0:leave/C_SCH/delay/0/saf/m1:movein/SAF/m0:leave"
, " o m1/m0:leave/C_SCH/delay/0/saf/m1:moveout/SAF/m0:leave"
, " o m1/m0:leave/FIS/in@prime"
, " o m1/m0:leave/FIS/loc@prime"
, " o m1/m0:leave/INV/inv0"
, " o m1/m0:leave/SAF/m1:saf0"
, " o m1/m0:leave/SAF/m1:saf1"
, " o m1/m0:leave/SAF/m1:saf2"
, " o m1/m0:leave/SAF/m1:saf3"
, " o m1/m0:leave/SCH/lv:grd0"
, " o m1/m0:leave/SCH/lv:grd1"
, " o m1/m0:leave/WD/C_SCH"
, " o m1/m0:leave/WD/GRD"
, " o m1/m1:movein/FIS/loc@prime"
, " o m1/m1:movein/INV/inv0"
, " o m1/m1:movein/SAF/m1:saf0"
, " o m1/m1:movein/SAF/m1:saf1"
, " o m1/m1:movein/SAF/m1:saf2"
, " o m1/m1:movein/SAF/m1:saf3"
, " o m1/m1:movein/SCH"
, " o m1/m1:movein/SCH/b"
, " o m1/m1:movein/WD/C_SCH"
, " o m1/m1:movein/WD/GRD"
, " o m1/m1:moveout/FIS/loc@prime"
, " o m1/m1:moveout/INV/inv0"
, " o m1/m1:moveout/SAF/m1:saf0"
, " o m1/m1:moveout/SAF/m1:saf1"
, " o m1/m1:moveout/SAF/m1:saf2"
, " o m1/m1:moveout/SAF/m1:saf3"
, " o m1/m1:moveout/SCH/mo:g1"
, " o m1/m1:moveout/SCH/mo:g2"
, " o m1/m1:moveout/WD/C_SCH"
, " o m1/m1:moveout/WD/GRD"
, " o m1/m1:prog0/LIVE/disjunction/lhs"
, " o m1/m1:prog0/LIVE/disjunction/rhs"
, " o m1/m1:prog0/PROG/WD/rhs"
, " o m1/m1:prog1/LIVE/transitivity/lhs"
, " o m1/m1:prog1/LIVE/transitivity/mhs/0/1"
, " o m1/m1:prog1/LIVE/transitivity/rhs"
, " o m1/m1:prog1/PROG/WD/lhs"
, " o m1/m1:prog1/PROG/WD/rhs"
, " o m1/m1:prog2/LIVE/implication"
, " o m1/m1:prog2/PROG/WD/lhs"
, " o m1/m1:prog2/PROG/WD/rhs"
, " o m1/m1:prog3/LIVE/discharge/saf/lhs"
, " o m1/m1:prog3/LIVE/discharge/saf/rhs"
, " o m1/m1:prog3/LIVE/discharge/tr"
, " o m1/m1:prog3/PROG/WD/lhs"
, " o m1/m1:prog3/PROG/WD/rhs"
, " o m1/m1:prog4/LIVE/discharge/saf/lhs"
, " o m1/m1:prog4/LIVE/discharge/saf/rhs"
, " o m1/m1:prog4/LIVE/discharge/tr"
, " o m1/m1:prog4/PROG/WD/lhs"
, " o m1/m1:prog4/PROG/WD/rhs"
, " o m1/m1:saf0/SAF/WD/rhs"
, " o m1/m1:saf1/SAF/WD/lhs"
, " o m1/m1:saf1/SAF/WD/rhs"
, " o m1/m1:saf2/SAF/WD/lhs"
, " o m1/m1:saf2/SAF/WD/rhs"
, " o m1/m1:saf3/SAF/WD/lhs"
, " o m1/m1:tr0/TR/WD"
, " o m1/m1:tr0/TR/WFIS/t/t@prime"
, " o m1/m1:tr0/TR/m1:moveout/EN"
, " o m1/m1:tr0/TR/m1:moveout/NEG"
, " o m1/m1:tr1/TR/WD"
, " o m1/m1:tr1/TR/WFIS/t/t@prime"
, " o m1/m1:tr1/TR/m1:movein/EN"
, " o m1/m1:tr1/TR/m1:movein/NEG"
, "passed 81 / 81"
]
result2 :: String
result2 = unlines
[ " o m2/INIT/INV/m2:inv0"
, " o m2/INV/WD"
, " o m2/m0:enter/FIS/in@prime"
, " o m2/m0:enter/FIS/loc@prime"
, " o m2/m0:enter/INV/m2:inv0"
, " o m2/m0:enter/SAF/m2:saf1"
, " o m2/m0:enter/SAF/m2:saf2"
, " o m2/m0:enter/SCH/et:g1"
, " o m2/m0:enter/WD/GRD"
, " o m2/m0:leave/FIS/in@prime"
, " o m2/m0:leave/FIS/loc@prime"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp3/easy"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp4/easy"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp5/easy"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp6/easy"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp7/goal"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp7/hypotheses"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp7/relation"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp7/step 1"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp7/step 2"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp7/step 3"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp8/goal"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp8/hypotheses"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp8/relation"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp8/step 1"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp8/step 2"
, " o m2/m0:leave/INV/m2:inv0/assertion/hyp8/step 3"
, " o m2/m0:leave/INV/m2:inv0/main goal/goal"
, " o m2/m0:leave/INV/m2:inv0/main goal/hypotheses"
, " o m2/m0:leave/INV/m2:inv0/main goal/relation"
, " o m2/m0:leave/INV/m2:inv0/main goal/step 1"
, " o m2/m0:leave/INV/m2:inv0/main goal/step 2"
, " o m2/m0:leave/INV/m2:inv0/main goal/step 3"
, " o m2/m0:leave/INV/m2:inv0/new assumption"
, " o m2/m0:leave/SAF/m2:saf1"
, " o m2/m0:leave/SAF/m2:saf2"
, " o m2/m0:leave/WD/GRD"
, " o m2/m1:movein/C_SCH/delay/0/prog/m2:prog0/rhs/mi:c0"
, " o m2/m1:movein/C_SCH/delay/0/saf/m0:enter/SAF/m1:movein"
, " o m2/m1:movein/C_SCH/delay/0/saf/m0:leave/SAF/m1:movein"
, " o m2/m1:movein/C_SCH/delay/0/saf/m1:movein/SAF/m1:movein"
, " o m2/m1:movein/C_SCH/delay/0/saf/m1:moveout/SAF/m1:movein"
, " o m2/m1:movein/FIS/loc@prime"
, " o m2/m1:movein/INV/m2:inv0"
, " o m2/m1:movein/SAF/m2:saf1"
, " o m2/m1:movein/SAF/m2:saf2"
, " o m2/m1:movein/SCH"
, " o m2/m1:movein/SCH/b"
, " o m2/m1:movein/WD/C_SCH"
, " o m2/m1:movein/WD/GRD"
, " o m2/m1:moveout/FIS/loc@prime"
, " o m2/m1:moveout/F_SCH/replace/prog/m2:prog1/rhs/mo:f0"
, " o m2/m1:moveout/INV/m2:inv0"
, " o m2/m1:moveout/SAF/m2:saf1"
, " o m2/m1:moveout/SAF/m2:saf2"
, " o m2/m1:moveout/SCH/mo:g3"
, " o m2/m1:moveout/WD/F_SCH"
, " o m2/m1:moveout/WD/GRD"
, " o m2/m2:prog0/LIVE/trading/lhs"
, " o m2/m2:prog0/LIVE/trading/rhs"
, " o m2/m2:prog0/PROG/WD/rhs"
, " o m2/m2:prog1/LIVE/trading/lhs"
, " o m2/m2:prog1/LIVE/trading/rhs"
, " o m2/m2:prog1/PROG/WD/rhs"
, " o m2/m2:prog2/LIVE/disjunction/lhs"
, " o m2/m2:prog2/LIVE/disjunction/rhs"
, " o m2/m2:prog2/PROG/WD/lhs"
, " o m2/m2:prog2/PROG/WD/rhs"
, " o m2/m2:prog3/LIVE/discharge/saf/lhs"
, " o m2/m2:prog3/LIVE/discharge/saf/rhs"
, " o m2/m2:prog3/LIVE/discharge/tr"
, " o m2/m2:prog3/PROG/WD/lhs"
, " o m2/m2:prog3/PROG/WD/rhs"
, " o m2/m2:prog4/LIVE/monotonicity/lhs"
, " o m2/m2:prog4/LIVE/monotonicity/rhs"
, " o m2/m2:prog4/PROG/WD/lhs"
, " o m2/m2:prog4/PROG/WD/rhs"
, " o m2/m2:prog5/LIVE/disjunction/lhs"
, " o m2/m2:prog5/LIVE/disjunction/rhs"
, " o m2/m2:prog5/PROG/WD/lhs"
, " o m2/m2:prog5/PROG/WD/rhs"
, " o m2/m2:prog6/LIVE/discharge/saf/lhs"
, " o m2/m2:prog6/LIVE/discharge/saf/rhs"
, " o m2/m2:prog6/LIVE/discharge/tr"
, " o m2/m2:prog6/PROG/WD/lhs"
, " o m2/m2:prog6/PROG/WD/rhs"
, " o m2/m2:saf1/SAF/WD/lhs"
, " o m2/m2:saf1/SAF/WD/rhs"
, " o m2/m2:saf2/SAF/WD/lhs"
, " o m2/m2:saf2/SAF/WD/rhs"
, " o m2/m2:tr0/TR/WD"
, " o m2/m2:tr0/TR/WFIS/t/t@prime"
, " o m2/m2:tr0/TR/m0:leave/EN"
, " o m2/m2:tr0/TR/m0:leave/NEG"
, " o m2/m2:tr1/TR/WD"
, " o m2/m2:tr1/TR/WFIS/t/t@prime"
, " o m2/m2:tr1/TR/leadsto/lhs"
, " o m2/m2:tr1/TR/leadsto/rhs"
, " o m2/m2:tr1/TR/m1:moveout/EN"
, " o m2/m2:tr1/TR/m1:moveout/NEG"
, "passed 100 / 100"
]
path0 :: FilePath
path0 = [path|Tests/train-station-ref.tex|]
path1 :: FilePath
path1 = [path|Tests/train-station-ref/main.tex|]
path1' :: FilePath
path1' = [path|Tests/train-station-ref/ref0.tex|]
path3 :: FilePath
path3 = [path|Tests/train-station-ref-err0.tex|]
result3 :: String
result3 = unlines
[ "A cycle exists in the liveness proof"
, "error 42:1:"
, "\tProgress property p0 (refined in m0)"
, ""
, "error 51:1:"
, "\tEvent evt (refined in m1)"
, ""
]
path4 :: FilePath
path4 = [path|Tests/train-station-ref-err1.tex|]
result4 :: String
result4 = unlines
[ "error 31:1:"
, " Machine m0 refines a non-existant machine: mm"
]
-- parse :: FilePath -> IO String
-- parse path = do
-- r <- parse_machine path
-- return $ case r of
-- Right _ -> "ok"
-- Left xs -> unlines $ map report xs
path5 :: FilePath
path5 = [path|Tests/train-station-ref-err2.tex|]
result5 :: String
result5 = unlines
[ "Theory imported multiple times"
, "error 38:1:"
, "\tsets"
, ""
, "error 88:1:"
, "\tsets"
, ""
, "error 444:1:"
, "\tsets"
, ""
, "error 445:1:"
, "\tsets"
, ""
, ""
, "Theory imported multiple times"
, "error 89:1:"
, "\tfunctions"
, ""
, "error 446:1:"
, "\tfunctions"
, ""
]
case5 :: IO String
case5 = find_errors path5
case6 :: IO String
case6 = proof_obligation path0 "m1/m1:moveout/FIS/loc@prime" 1
result6 :: String
result6 = unlines
[ "; m1/m1:moveout/FIS/loc@prime"
, "(set-option :auto-config false)"
, "(set-option :smt.timeout 3000)"
, "(declare-datatypes (a) ( (Maybe (Just (fromJust a)) Nothing) ))"
, "(declare-datatypes () ( (Null null) ))"
, "(declare-datatypes (a b) ( (Pair (pair (first a) (second b))) ))"
, "(define-sort guarded (a) (Maybe a))"
, "(declare-sort sl$Blk 0)"
, "; comment: we don't need to declare the sort Bool"
, "; comment: we don't need to declare the sort Int"
, "; comment: we don't need to declare the sort Real"
, "(declare-sort sl$Train 0)"
, "(define-sort pfun (a b) (Array a (Maybe b)))"
, "(define-sort set (a) (Array a Bool))"
, "(declare-const ent sl$Blk)"
, "(declare-const ext sl$Blk)"
, "(declare-const in (set sl$Train))"
, "(declare-const in@prime (set sl$Train))"
, "(declare-const loc (pfun sl$Train sl$Blk))"
, "(declare-const loc@prime (pfun sl$Train sl$Blk))"
, "(declare-const plf (set sl$Blk))"
, "(declare-const t sl$Train)"
, "(declare-fun apply@@sl$Train@@sl$Blk"
, " ( (pfun sl$Train sl$Blk)"
, " sl$Train )"
, " sl$Blk)"
, "(declare-fun card@@sl$Blk ( (set sl$Blk) ) Int)"
, "(declare-fun card@@sl$Train ( (set sl$Train) ) Int)"
, "(declare-fun dom@@sl$Train@@sl$Blk"
, " ( (pfun sl$Train sl$Blk) )"
, " (set sl$Train))"
, "(declare-fun dom-rest@@sl$Train@@sl$Blk"
, " ( (set sl$Train)"
, " (pfun sl$Train sl$Blk) )"
, " (pfun sl$Train sl$Blk))"
, "(declare-fun dom-subt@@sl$Train@@sl$Blk"
, " ( (set sl$Train)"
, " (pfun sl$Train sl$Blk) )"
, " (pfun sl$Train sl$Blk))"
, "(declare-fun empty-fun@@sl$Train@@sl$Blk"
, " ()"
, " (pfun sl$Train sl$Blk))"
, "(declare-fun finite@@sl$Blk ( (set sl$Blk) ) Bool)"
, "(declare-fun finite@@sl$Train ( (set sl$Train) ) Bool)"
, "(declare-fun injective@@sl$Train@@sl$Blk"
, " ( (pfun sl$Train sl$Blk) )"
, " Bool)"
, "(declare-fun mk-fun@@sl$Train@@sl$Blk"
, " (sl$Train sl$Blk)"
, " (pfun sl$Train sl$Blk))"
, "(declare-fun mk-set@@sl$Blk (sl$Blk) (set sl$Blk))"
, "(declare-fun mk-set@@sl$Train (sl$Train) (set sl$Train))"
, "(declare-fun ovl@@sl$Train@@sl$Blk"
, " ( (pfun sl$Train sl$Blk)"
, " (pfun sl$Train sl$Blk) )"
, " (pfun sl$Train sl$Blk))"
, "(declare-fun ran@@sl$Train@@sl$Blk"
, " ( (pfun sl$Train sl$Blk) )"
, " (set sl$Blk))"
, "(define-fun all@@sl$Blk"
, " ()"
, " (set sl$Blk)"
, " ( (as const (set sl$Blk))"
, " true ))"
, "(define-fun all@@sl$Train"
, " ()"
, " (set sl$Train)"
, " ( (as const (set sl$Train))"
, " true ))"
, "(define-fun compl@@sl$Blk"
, " ( (s1 (set sl$Blk)) )"
, " (set sl$Blk)"
, " ( (_ map not)"
, " s1 ))"
, "(define-fun compl@@sl$Train"
, " ( (s1 (set sl$Train)) )"
, " (set sl$Train)"
, " ( (_ map not)"
, " s1 ))"
, "(define-fun elem@@sl$Blk"
, " ( (x sl$Blk)"
, " (s1 (set sl$Blk)) )"
, " Bool"
, " (select s1 x))"
, "(define-fun elem@@sl$Train"
, " ( (x sl$Train)"
, " (s1 (set sl$Train)) )"
, " Bool"
, " (select s1 x))"
, "(define-fun empty-set@@sl$Blk"
, " ()"
, " (set sl$Blk)"
, " ( (as const (set sl$Blk))"
, " false ))"
, "(define-fun empty-set@@sl$Train"
, " ()"
, " (set sl$Train)"
, " ( (as const (set sl$Train))"
, " false ))"
, "(define-fun set-diff@@sl$Blk"
, " ( (s1 (set sl$Blk))"
, " (s2 (set sl$Blk)) )"
, " (set sl$Blk)"
, " (intersect s1 ( (_ map not) s2 )))"
, "(define-fun set-diff@@sl$Train"
, " ( (s1 (set sl$Train))"
, " (s2 (set sl$Train)) )"
, " (set sl$Train)"
, " (intersect s1 ( (_ map not) s2 )))"
, "(define-fun st-subset@@sl$Blk"
, " ( (s1 (set sl$Blk))"
, " (s2 (set sl$Blk)) )"
, " Bool"
, " (and (subset s1 s2) (not (= s1 s2))))"
, "(define-fun st-subset@@sl$Train"
, " ( (s1 (set sl$Train))"
, " (s2 (set sl$Train)) )"
, " Bool"
, " (and (subset s1 s2) (not (= s1 s2))))"
, "(define-fun sl$Blk"
, " ()"
, " (set sl$Blk)"
, " ( (as const (set sl$Blk))"
, " true ))"
, "(define-fun sl$Train"
, " ()"
, " (set sl$Train)"
, " ( (as const (set sl$Train))"
, " true ))"
, "(assert (forall ( (r (set sl$Blk)) )"
, " (! (=> (finite@@sl$Blk r) (<= 0 (card@@sl$Blk r)))"
, " :pattern"
, " ( (<= 0 (card@@sl$Blk r)) ))))"
, "(assert (forall ( (r (set sl$Train)) )"
, " (! (=> (finite@@sl$Train r) (<= 0 (card@@sl$Train r)))"
, " :pattern"
, " ( (<= 0 (card@@sl$Train r)) ))))"
, "(assert (forall ( (r (set sl$Blk)) )"
, " (! (= (= (card@@sl$Blk r) 0) (= r empty-set@@sl$Blk))"
, " :pattern"
, " ( (card@@sl$Blk r) ))))"
, "(assert (forall ( (r (set sl$Train)) )"
, " (! (= (= (card@@sl$Train r) 0)"
, " (= r empty-set@@sl$Train))"
, " :pattern"
, " ( (card@@sl$Train r) ))))"
, "(assert (forall ( (x sl$Blk) )"
, " (! (= (card@@sl$Blk (mk-set@@sl$Blk x)) 1)"
, " :pattern"
, " ( (card@@sl$Blk (mk-set@@sl$Blk x)) ))))"
, "(assert (forall ( (x sl$Train) )"
, " (! (= (card@@sl$Train (mk-set@@sl$Train x)) 1)"
, " :pattern"
, " ( (card@@sl$Train (mk-set@@sl$Train x)) ))))"
, "(assert (forall ( (r (set sl$Blk)) )"
, " (! (= (= (card@@sl$Blk r) 1)"
, " (exists ( (x sl$Blk) ) (and true (= r (mk-set@@sl$Blk x)))))"
, " :pattern"
, " ( (card@@sl$Blk r) ))))"
, "(assert (forall ( (r (set sl$Train)) )"
, " (! (= (= (card@@sl$Train r) 1)"
, " (exists ( (x sl$Train) )"
, " (and true (= r (mk-set@@sl$Train x)))))"
, " :pattern"
, " ( (card@@sl$Train r) ))))"
, "(assert (forall ( (r (set sl$Blk))"
, " (r0 (set sl$Blk)) )"
, " (! (=> (= (intersect r r0) empty-set@@sl$Blk)"
, " (= (card@@sl$Blk (union r r0))"
, " (+ (card@@sl$Blk r) (card@@sl$Blk r0))))"
, " :pattern"
, " ( (card@@sl$Blk (union r r0)) ))))"
, "(assert (forall ( (r (set sl$Train))"
, " (r0 (set sl$Train)) )"
, " (! (=> (= (intersect r r0) empty-set@@sl$Train)"
, " (= (card@@sl$Train (union r r0))"
, " (+ (card@@sl$Train r) (card@@sl$Train r0))))"
, " :pattern"
, " ( (card@@sl$Train (union r r0)) ))))"
, "(assert (= (dom@@sl$Train@@sl$Blk empty-fun@@sl$Train@@sl$Blk)"
, " empty-set@@sl$Train))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk)) )"
, " (! (= (ovl@@sl$Train@@sl$Blk f1 empty-fun@@sl$Train@@sl$Blk)"
, " f1)"
, " :pattern"
, " ( (ovl@@sl$Train@@sl$Blk f1 empty-fun@@sl$Train@@sl$Blk) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk)) )"
, " (! (= (ovl@@sl$Train@@sl$Blk empty-fun@@sl$Train@@sl$Blk f1)"
, " f1)"
, " :pattern"
, " ( (ovl@@sl$Train@@sl$Blk empty-fun@@sl$Train@@sl$Blk f1) ))))"
, "(assert (forall ( (x sl$Train)"
, " (y sl$Blk) )"
, " (! (= (dom@@sl$Train@@sl$Blk (mk-fun@@sl$Train@@sl$Blk x y))"
, " (mk-set@@sl$Train x))"
, " :pattern"
, " ( (dom@@sl$Train@@sl$Blk (mk-fun@@sl$Train@@sl$Blk x y)) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (f2 (pfun sl$Train sl$Blk))"
, " (x sl$Train) )"
, " (! (=> (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f2))"
, " (= (apply@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 f2) x)"
, " (apply@@sl$Train@@sl$Blk f2 x)))"
, " :pattern"
, " ( (apply@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 f2) x) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (f2 (pfun sl$Train sl$Blk))"
, " (x sl$Train) )"
, " (! (=> (and (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f1))"
, " (not (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f2))))"
, " (= (apply@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 f2) x)"
, " (apply@@sl$Train@@sl$Blk f1 x)))"
, " :pattern"
, " ( (apply@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 f2) x) ))))"
, "(assert (forall ( (x sl$Train)"
, " (y sl$Blk) )"
, " (! (= (apply@@sl$Train@@sl$Blk (mk-fun@@sl$Train@@sl$Blk x y) x)"
, " y)"
, " :pattern"
, " ( (apply@@sl$Train@@sl$Blk (mk-fun@@sl$Train@@sl$Blk x y) x) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (s1 (set sl$Train))"
, " (x sl$Train) )"
, " (! (=> (and (elem@@sl$Train x s1)"
, " (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f1)))"
, " (= (apply@@sl$Train@@sl$Blk (dom-rest@@sl$Train@@sl$Blk s1 f1) x)"
, " (apply@@sl$Train@@sl$Blk f1 x)))"
, " :pattern"
, " ( (apply@@sl$Train@@sl$Blk (dom-rest@@sl$Train@@sl$Blk s1 f1) x) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (s1 (set sl$Train))"
, " (x sl$Train) )"
, " (! (=> (elem@@sl$Train x"
, " (set-diff@@sl$Train (dom@@sl$Train@@sl$Blk f1) s1))"
, " (= (apply@@sl$Train@@sl$Blk (dom-subt@@sl$Train@@sl$Blk s1 f1) x)"
, " (apply@@sl$Train@@sl$Blk f1 x)))"
, " :pattern"
, " ( (apply@@sl$Train@@sl$Blk (dom-subt@@sl$Train@@sl$Blk s1 f1) x) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (f2 (pfun sl$Train sl$Blk)) )"
, " (! (= (dom@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 f2))"
, " (union (dom@@sl$Train@@sl$Blk f1)"
, " (dom@@sl$Train@@sl$Blk f2)))"
, " :pattern"
, " ( (dom@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 f2)) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (s1 (set sl$Train)) )"
, " (! (= (dom@@sl$Train@@sl$Blk (dom-rest@@sl$Train@@sl$Blk s1 f1))"
, " (intersect s1 (dom@@sl$Train@@sl$Blk f1)))"
, " :pattern"
, " ( (dom@@sl$Train@@sl$Blk (dom-rest@@sl$Train@@sl$Blk s1 f1)) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (s1 (set sl$Train)) )"
, " (! (= (dom@@sl$Train@@sl$Blk (dom-subt@@sl$Train@@sl$Blk s1 f1))"
, " (set-diff@@sl$Train (dom@@sl$Train@@sl$Blk f1) s1))"
, " :pattern"
, " ( (dom@@sl$Train@@sl$Blk (dom-subt@@sl$Train@@sl$Blk s1 f1)) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (x sl$Train)"
, " (y sl$Blk) )"
, " (! (= (and (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f1))"
, " (= (apply@@sl$Train@@sl$Blk f1 x) y))"
, " (= (select f1 x) (Just y)))"
, " :pattern"
, " ( (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f1))"
, " (apply@@sl$Train@@sl$Blk f1 x)"
, " (select f1 x)"
, " (Just y) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (x sl$Train)"
, " (x2 sl$Train)"
, " (y sl$Blk) )"
, " (! (=> (not (= x x2))"
, " (= (apply@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 (mk-fun@@sl$Train@@sl$Blk x y))"
, " x2)"
, " (apply@@sl$Train@@sl$Blk f1 x2)))"
, " :pattern"
, " ( (apply@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 (mk-fun@@sl$Train@@sl$Blk x y))"
, " x2) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (x sl$Train)"
, " (y sl$Blk) )"
, " (! (= (apply@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 (mk-fun@@sl$Train@@sl$Blk x y))"
, " x)"
, " y)"
, " :pattern"
, " ( (apply@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 (mk-fun@@sl$Train@@sl$Blk x y))"
, " x) ))))"
, "(assert (= (ran@@sl$Train@@sl$Blk empty-fun@@sl$Train@@sl$Blk)"
, " empty-set@@sl$Blk))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (y sl$Blk) )"
, " (! (= (elem@@sl$Blk y (ran@@sl$Train@@sl$Blk f1))"
, " (exists ( (x sl$Train) )"
, " (and true"
, " (and (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f1))"
, " (= (apply@@sl$Train@@sl$Blk f1 x) y)))))"
, " :pattern"
, " ( (elem@@sl$Blk y (ran@@sl$Train@@sl$Blk f1)) ))))"
, "(assert (forall ( (x sl$Train)"
, " (y sl$Blk) )"
, " (! (= (ran@@sl$Train@@sl$Blk (mk-fun@@sl$Train@@sl$Blk x y))"
, " (mk-set@@sl$Blk y))"
, " :pattern"
, " ( (ran@@sl$Train@@sl$Blk (mk-fun@@sl$Train@@sl$Blk x y)) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk)) )"
, " (! (= (injective@@sl$Train@@sl$Blk f1)"
, " (forall ( (x sl$Train)"
, " (x2 sl$Train) )"
, " (=> (and (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f1))"
, " (elem@@sl$Train x2 (dom@@sl$Train@@sl$Blk f1)))"
, " (=> (= (apply@@sl$Train@@sl$Blk f1 x)"
, " (apply@@sl$Train@@sl$Blk f1 x2))"
, " (= x x2)))))"
, " :pattern"
, " ( (injective@@sl$Train@@sl$Blk f1) ))))"
, "(assert (injective@@sl$Train@@sl$Blk empty-fun@@sl$Train@@sl$Blk))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (x sl$Train) )"
, " (! (=> (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f1))"
, " (elem@@sl$Blk (apply@@sl$Train@@sl$Blk f1 x)"
, " (ran@@sl$Train@@sl$Blk f1)))"
, " :pattern"
, " ( (elem@@sl$Blk (apply@@sl$Train@@sl$Blk f1 x)"
, " (ran@@sl$Train@@sl$Blk f1)) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (s1 (set sl$Train))"
, " (x sl$Train) )"
, " (! (=> (elem@@sl$Train x"
, " (set-diff@@sl$Train (dom@@sl$Train@@sl$Blk f1) s1))"
, " (elem@@sl$Blk (apply@@sl$Train@@sl$Blk f1 x)"
, " (ran@@sl$Train@@sl$Blk (dom-subt@@sl$Train@@sl$Blk s1 f1))))"
, " :pattern"
, " ( (elem@@sl$Blk (apply@@sl$Train@@sl$Blk f1 x)"
, " (ran@@sl$Train@@sl$Blk (dom-subt@@sl$Train@@sl$Blk s1 f1))) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (s1 (set sl$Train))"
, " (x sl$Train) )"
, " (! (=> (elem@@sl$Train x (intersect (dom@@sl$Train@@sl$Blk f1) s1))"
, " (elem@@sl$Blk (apply@@sl$Train@@sl$Blk f1 x)"
, " (ran@@sl$Train@@sl$Blk (dom-rest@@sl$Train@@sl$Blk s1 f1))))"
, " :pattern"
, " ( (elem@@sl$Blk (apply@@sl$Train@@sl$Blk f1 x)"
, " (ran@@sl$Train@@sl$Blk (dom-rest@@sl$Train@@sl$Blk s1 f1))) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (x sl$Train)"
, " (y sl$Blk) )"
, " (! (=> (and (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f1))"
, " (injective@@sl$Train@@sl$Blk f1))"
, " (= (ran@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 (mk-fun@@sl$Train@@sl$Blk x y)))"
, " (union (set-diff@@sl$Blk (ran@@sl$Train@@sl$Blk f1)"
, " (mk-set@@sl$Blk (apply@@sl$Train@@sl$Blk f1 x)))"
, " (mk-set@@sl$Blk y))))"
, " :pattern"
, " ( (ran@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 (mk-fun@@sl$Train@@sl$Blk x y))) ))))"
, "(assert (forall ( (f1 (pfun sl$Train sl$Blk))"
, " (x sl$Train)"
, " (y sl$Blk) )"
, " (! (=> (not (elem@@sl$Train x (dom@@sl$Train@@sl$Blk f1)))"
, " (= (ran@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 (mk-fun@@sl$Train@@sl$Blk x y)))"
, " (union (ran@@sl$Train@@sl$Blk f1) (mk-set@@sl$Blk y))))"
, " :pattern"
, " ( (ran@@sl$Train@@sl$Blk (ovl@@sl$Train@@sl$Blk f1 (mk-fun@@sl$Train@@sl$Blk x y))) ))))"
, "(assert (forall ( (x sl$Blk)"
, " (y sl$Blk) )"
, " (! (= (elem@@sl$Blk x (mk-set@@sl$Blk y)) (= x y))"
, " :pattern"
, " ( (elem@@sl$Blk x (mk-set@@sl$Blk y)) ))))"
, "(assert (forall ( (x sl$Train)"
, " (y sl$Train) )"
, " (! (= (elem@@sl$Train x (mk-set@@sl$Train y)) (= x y))"
, " :pattern"
, " ( (elem@@sl$Train x (mk-set@@sl$Train y)) ))))"
, "(assert (forall ( (s1 (set sl$Blk))"
, " (s2 (set sl$Blk)) )"
, " (! (=> (finite@@sl$Blk s1)"
, " (finite@@sl$Blk (set-diff@@sl$Blk s1 s2)))"
, " :pattern"
, " ( (finite@@sl$Blk (set-diff@@sl$Blk s1 s2)) ))))"
, "(assert (forall ( (s1 (set sl$Train))"
, " (s2 (set sl$Train)) )"
, " (! (=> (finite@@sl$Train s1)"
, " (finite@@sl$Train (set-diff@@sl$Train s1 s2)))"
, " :pattern"
, " ( (finite@@sl$Train (set-diff@@sl$Train s1 s2)) ))))"
, "(assert (forall ( (s1 (set sl$Blk))"
, " (s2 (set sl$Blk)) )"
, " (! (=> (and (finite@@sl$Blk s1) (finite@@sl$Blk s2))"
, " (finite@@sl$Blk (union s1 s2)))"
, " :pattern"
, " ( (finite@@sl$Blk (union s1 s2)) ))))"
, "(assert (forall ( (s1 (set sl$Train))"
, " (s2 (set sl$Train)) )"
, " (! (=> (and (finite@@sl$Train s1) (finite@@sl$Train s2))"
, " (finite@@sl$Train (union s1 s2)))"
, " :pattern"
, " ( (finite@@sl$Train (union s1 s2)) ))))"
, "(assert (forall ( (s1 (set sl$Blk))"
, " (s2 (set sl$Blk)) )"
, " (! (=> (and (finite@@sl$Blk s2) (not (finite@@sl$Blk s1)))"
, " (not (finite@@sl$Blk (set-diff@@sl$Blk s1 s2))))"
, " :pattern"
, " ( (finite@@sl$Blk (set-diff@@sl$Blk s1 s2)) ))))"
, "(assert (forall ( (s1 (set sl$Train))"
, " (s2 (set sl$Train)) )"
, " (! (=> (and (finite@@sl$Train s2) (not (finite@@sl$Train s1)))"
, " (not (finite@@sl$Train (set-diff@@sl$Train s1 s2))))"
, " :pattern"
, " ( (finite@@sl$Train (set-diff@@sl$Train s1 s2)) ))))"
, "(assert (forall ( (x sl$Blk) )"
, " (! (finite@@sl$Blk (mk-set@@sl$Blk x))"
, " :pattern"
, " ( (finite@@sl$Blk (mk-set@@sl$Blk x)) ))))"
, "(assert (forall ( (x sl$Train) )"
, " (! (finite@@sl$Train (mk-set@@sl$Train x))"
, " :pattern"
, " ( (finite@@sl$Train (mk-set@@sl$Train x)) ))))"
, "(assert (finite@@sl$Blk empty-set@@sl$Blk))"
, "(assert (finite@@sl$Train empty-set@@sl$Train))"
, "(assert (not (exists ( (loc@prime (pfun sl$Train sl$Blk)) )"
, " (and true"
, " (= loc@prime"
, " (ovl@@sl$Train@@sl$Blk loc (mk-fun@@sl$Train@@sl$Blk t ext)))))))"
, "; SKIP:in"
, "(assert (= in@prime in))"
, "; asm0"
, "(assert (and (not (elem@@sl$Blk ext plf)) (not (= ext ent))))"
, "; asm1"
, "(assert (forall ( (b sl$Blk) )"
, " (! (= (elem@@sl$Blk b sl$Blk)"
, " (or (elem@@sl$Blk b plf) (= b ent) (= b ext)))"
, " :pattern"
, " ( (elem@@sl$Blk b sl$Blk) ))))"
, "; asm2"
, "(assert (exists ( (b sl$Blk) ) (and true (elem@@sl$Blk b plf))))"
, "; asm3"
, "(assert (not (elem@@sl$Blk ent plf)))"
, "; c1"
, "(assert (and (elem@@sl$Train t in)"
, " (elem@@sl$Blk (apply@@sl$Train@@sl$Blk loc t) plf)))"
, "; inv0"
, "(assert (= (dom@@sl$Train@@sl$Blk loc) in))"
, "; mo:g1"
, "(assert (elem@@sl$Train t in))"
, "; mo:g2"
, "(assert (elem@@sl$Blk (apply@@sl$Train@@sl$Blk loc t) plf))"
, "(assert (not true))"
, "(check-sat-using (or-else (then qe smt)"
, " (then simplify smt)"
, " (then skip smt)"
, " (then (using-params simplify :expand-power true) smt)))"
, "; m1/m1:moveout/FIS/loc@prime"
]
|
literate-unitb/literate-unitb
|
src/Document/Tests/TrainStationRefinement.hs
|
mit
| 35,169
| 0
| 12
| 13,115
| 2,442
| 1,594
| 848
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Vessel where
import Config
import Control.Monad.Logger
import Control.Monad.Reader
import Data.Bool
import Data.Monoid
import Data.Text as T
import Registry
import Container
runApp :: Config -> App a -> IO a
runApp config =
runStdoutLoggingT
. flip runReaderT config
. unApp
vessel :: App ()
vessel = do
pull
start
return ()
|
faineance/vessel
|
src/Vessel.hs
|
mit
| 470
| 0
| 8
| 158
| 112
| 61
| 51
| 20
| 1
|
module Utils (loadImage) where
import Data.Monoid (mempty)
import Diagrams.Backend.Rasterific.CmdLine (B)
import Diagrams.Prelude (Diagram)
import Diagrams.TwoD.Image (loadImageEmb, image)
loadImage :: FilePath -> IO (Diagram B)
loadImage path = do
img <- loadImageEmb path
case img of
Left _ -> putStrLn ("Invalid image path " ++ path) >> return mempty
Right i -> return $ image i
|
mihaimaruseac/petulant-octo-avenger
|
src-draw/Utils.hs
|
mit
| 397
| 0
| 13
| 70
| 140
| 74
| 66
| 11
| 2
|
import qualified Data.List as List
import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as IO
data Tile = Safe | Trap
deriving (Eq)
instance Show Tile where
show Safe = "."
show Trap = "^"
rowCount = 40
main = do
input <- Text.strip <$> IO.getContents
let firstRow = parseInput input
let rows = iterate nextRow firstRow
let safeTileCount = length $ filter (== Safe) $ concat $ take rowCount rows
mapM_ (putStrLn . concatMap show) $ take rowCount rows
print safeTileCount
parseInput :: Text -> [Tile]
parseInput = map parseTile . Text.unpack
where
parseTile '.' = Safe
parseTile '^' = Trap
nextRow :: [Tile] -> [Tile]
nextRow row = map nextTile previous
where
previous = windows 3 (Safe : row ++ [Safe])
nextTile [Trap, Trap, Safe] = Trap
nextTile [Safe, Trap, Trap] = Trap
nextTile [Trap, Safe, Safe] = Trap
nextTile [Safe, Safe, Trap] = Trap
nextTile _ = Safe
windows :: Int -> [a] -> [[a]]
windows n = takeWhile (\window -> length window == n) . List.transpose . take n . List.tails
|
SamirTalwar/advent-of-code
|
2016/AOC_18_1.hs
|
mit
| 1,082
| 7
| 14
| 239
| 436
| 231
| 205
| 31
| 5
|
-- |
-- Module : BattleHack.Piano
-- Description :
-- Copyright : (c) Jonatan H Sundqvist, 2015
-- License : MIT
-- Maintainer : Jonatan H Sundqvist
-- Stability : experimental
-- Portability : POSIX
-- Created September 12 2015
-- TODO | - Split up into several modules
-- -
-- SPEC | -
-- -
--------------------------------------------------------------------------------------------------------------------------------------------
-- GHC pragmas
--------------------------------------------------------------------------------------------------------------------------------------------
--------------------------------------------------------------------------------------------------------------------------------------------
-- API
--------------------------------------------------------------------------------------------------------------------------------------------
module BattleHack.Piano where
--------------------------------------------------------------------------------------------------------------------------------------------
-- We'll need these
--------------------------------------------------------------------------------------------------------------------------------------------
import Control.Lens hiding (inside)
import Data.List (find)
import Data.Complex
import BattleHack.Types
import BattleHack.Lenses
import BattleHack.Utilities.Vector
import BattleHack.Utilities.General
--------------------------------------------------------------------------------------------------------------------------------------------
-- Data
--------------------------------------------------------------------------------------------------------------------------------------------
-- |
naturals :: Integral n => [n]
naturals = [0, 2, 4, 5, 7, 9, 11]
-- |
accidentals :: Integral n => [n]
accidentals = [1, 3, 6, 8, 10]
-- | The indeces of every natural key in order, starting at C0 (index 0)
allnaturals :: [Int]
allnaturals = zipWith (\i key -> div i 7 * 12 + key) [0..] $ cycle naturals
-- | The indeces of every accidental key in order, starting at C#0 (index 1)
allaccidentals :: [Int]
allaccidentals = zipWith (\i key -> div i 5 * 12 + key) [0..] $ cycle accidentals
-- |
chordlayout :: [KeyLayout]
chordlayout = [KeyRight, KeyAccidental, KeyBoth, KeyAccidental, KeyLeft, KeyRight, KeyAccidental, KeyBoth, KeyAccidental, KeyBoth, KeyAccidental, KeyLeft]
-- | Horizontal offset for each key in the octave, relative to the very first key
keysteps :: RealFloat r => [r]
keysteps = scanl1 (+) [0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1]
--------------------------------------------------------------------------------------------------------------------------------------------
-- Functions
--------------------------------------------------------------------------------------------------------------------------------------------
-- |
keyorigin :: PianoSettings -> Int -> Vector
keyorigin piano i = piano-->origin + (sx*shiftx :+ 0)
where
(sx:+_) = piano-->keysize
shiftx = 7*octaveFromKeyIndex i + (keysteps !! (i `mod` 12))
-- |
keylayout :: Int -> KeyLayout
keylayout i = chordlayout !! (i `mod` 12)
-- | Is the point inside the bounds of the given key?
-- Note that the function assumes that the key is pinned to the origin.
-- TODO: Bugs ahead (swat them!)
-- TODO: Rename (?)
inside :: PianoSettings -> KeyLayout -> Vector -> Bool
inside piano KeyLeft p = insideBounds piano p && not (insideLeft piano p)
inside piano KeyRight p = insideBounds piano p && not (insideRight piano p)
inside piano KeyBoth p = insideBounds piano p && not (insideLeft piano p || insideRight piano p)
inside piano KeyAccidental p = insideMiddle piano p -- --insideLeft piano p || insideRight piano p
-- | Is the point within the rectangular bounding box of the key?
insideBounds :: PianoSettings -> Vector -> Bool
insideBounds piano (x:+y) = let (dx:+dy) = piano-->keysize in between 0 dx x && between 0 dy y
-- | Is the point within the left indent of the key?
insideLeft :: PianoSettings -> Vector -> Bool
insideLeft piano (x:+y) = let (dx:+dy) = dotwise (*) (piano-->indent :+ piano-->mid) (piano-->keysize) in between 0 dx x && between 0 dy y
-- | Is the point within the right indent of the key?
-- TODO: Buggy!
insideRight :: PianoSettings -> Vector -> Bool
insideRight piano p = let shiftx = piano-->keysize.real * (piano-->indent - 1) in insideLeft piano $ p + (shiftx:+0)
-- insideRight piano p = let shiftx = piano-->keysize.real * (1 - piano-->indent) in insideLeft piano $ p - (piano-->keysize.real:+0) + ((piano-->keysize.real*piano-->indent):+0)
-- insideRight piano (x:+y) = between ()
-- |
insideMiddle :: PianoSettings -> Vector -> Bool
insideMiddle piano (x:+y) = let (ox:+oy, dx:+dy) = keybounds piano KeyAccidental in between ox (ox+dx) x && between oy (oy+dy) y
-- |
-- TODO: Move to Piano
-- TODO: Rename (?)
layout :: RealFloat r => Complex r -> r -> r -> KeyLayout -> [Complex r]
layout (sx:+sy) indent mid which = case which of
KeyRight -> [nw, nei, rmi, rm, se, sw]
KeyBoth -> [nwi, nei, rmi, rm, se, sw, lm, lmi]
KeyLeft -> [nwi, ne, se, sw, lm, lmi]
KeyAccidental -> [nei, rmi, (sx:+0)+lmi, (sx:+0)+nwi]
where
indent' = sx*indent
mid' = sy*mid
nw = 0:+0 -- North west
ne = sx:+0 -- North east
se = sx:+sy -- South east
sw = 0:+sy -- South west
nwi = indent':+0 -- North west indented
nei = (sx-indent'):+0 -- North east indented
lmi = indent':+mid' -- Left middle indent
rmi = (sx-indent'):+mid' -- Right middle indent
lm = 0:+mid' -- Left middle
rm = sx:+mid' -- Right middle
-- | Rectangular bounds of a key (currently as a topleft, size tuple)
-- TODO: Use Bounding Box type (?)
keybounds :: PianoSettings -> KeyLayout -> (Vector, Vector)
keybounds piano layout = case layout of
KeyAccidental -> (piano-->keysize.real' - (indent':+0), (2*indent'):+mid')
_ -> (0:+0, piano-->keysize)
where
(indent':+mid') = dotwise (*) (piano-->keysize) (piano-->indent:+piano-->mid)
-- |
-- TODO: Simplify
-- TODO: Don't hard-code the range
-- TODO: This is a pretty dumb algorithm for finding hovered-over keys
findKeyAt :: Vector -> PianoSettings -> Maybe Int
findKeyAt p piano' = find (insideFromKeyIndex piano' p) (zipWith const [0..] (piano'-->keys))
-- | Is the point inside the key at the given index?
-- TODO: Rename (?)
insideFromKeyIndex :: PianoSettings -> Vector -> Int -> Bool
insideFromKeyIndex piano' p i = inside piano' (keylayout i) (p-keyorigin piano' i)
-- |
pitchFromKeyIndex :: RealFloat r => Int -> r
pitchFromKeyIndex i = let i' = i+4+49 in 440.0*2.0**((fromIntegral i' - 49)/12.0) -- TODO: Make sure this is correct, elaborate on the meaning of the different index conversions
-- pitchFromKeyIndex i = 440*2**((fromIntegral $ i+3 + 12*4-49)/12)
-- | Which octave does the key belong to (the 0th octave starts with C0, the 1st with C1; no overlap between octaves)
octaveFromKeyIndex :: RealFloat r => Int -> r
octaveFromKeyIndex = fromIntegral . (`div` 12)
-- |
-- TODO: Refactor
-- TODO: Use Unicode (?)
notenameFromKeyIndex :: Int -> String
notenameFromKeyIndex i = ["C", "C#", "D", "D#", "E", "F", "F#", "G", "G#", "A", "A#", "B"] !! mod i 12
|
SwiftsNamesake/BattleHack-2015
|
src/BattleHack/Piano.hs
|
mit
| 7,357
| 0
| 13
| 1,253
| 1,734
| 977
| 757
| -1
| -1
|
module Main where
import Data.Char (ord)
import Data.Bits ((.&.))
import Numeric (showHex)
import System.Environment (getArgs, getProgName)
import System.Exit (exitWith, ExitCode(..))
data TrimType = Trim
| TrimSafe
| TrimNone
deriving (Eq)
modifyLength :: String -> String
modifyLength s
| length s `mod` 4 == 0 = s
| otherwise = modifyLength $ '/' : s
splitInFours :: String -> [String]
splitInFours "" = []
splitInFours s = take 4 s : splitInFours (drop 4 s)
toHexByte :: Char -> String
toHexByte c =
if length hexNum == 1
then '0' : hexNum
else hexNum
where hexNum = showHex (ord c .&. 0xff) ""
hexify :: String -> String
hexify s = "0x" ++ concat (hexify' s)
where hexify' = map toHexByte
pushStr :: TrimType -> String -> String
pushStr t s = "push " ++ if t == TrimSafe && length s == 6
then "word " ++ s
else s
usage :: IO ()
usage = do
progName <- getProgName
putStrLn $ "Usage : " ++ progName ++ " [--trim | --trim-safe] <string>"
splitHead :: [String] -> [String]
splitHead [] = []
splitHead str@(x:xs)
| lenX == 3 = (reverse . modSplit . splitAt 2 . reverse) x ++ xs
| otherwise = str
where lenX = length x
modSplit (a, b) = reverse a : [b]
pushify :: String -> TrimType -> IO ()
pushify s t = mapM_ putStrLn $ hexList s
where splitArg
| t == TrimSafe = splitHead . reverse . map reverse . splitInFours
| otherwise = splitInFours . reverse . modifyLength
hexList arg = map (pushStr t . hexify) $ splitArg arg
trimLength :: String -> IO ()
trimLength s
| l == 4 = return ()
| otherwise = putStrLn $ "add esp, 0x" ++ show l
where l = 4 - length s `mod` 4
parse :: [String] -> IO ()
parse ["--help"] = usage
parse ["--trim", x] = pushify x Trim >> trimLength x
parse ["--trim-safe", x] = pushify x TrimSafe
parse [x] = pushify x TrimNone
parse _ = usage >> exitWith (ExitFailure 1)
main :: IO ()
main = parse =<< getArgs
|
reubensammut/SLAE32
|
Scripts/reverse.hs
|
mit
| 2,040
| 17
| 11
| 564
| 642
| 365
| 277
| 60
| 2
|
module Tema_23f_Escalada_Monedas_Spec (main, spec) where
import Tema_23.Escalada_Monedas
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "sucesoresMonedas" $
it "e1" $
sucesoresMonedas (199,[]) `shouldBe`
[(198,[1]),(197,[2]),(194,[5]),(189,[10]),
(179,[20]),(149,[50]),(99,[100])]
describe "cambio" $
it "e1" $
cambio 199 `shouldBe` [2,2,5,20,20,50,100]
|
jaalonso/I1M-Cod-Temas
|
test/Tema_23f_Escalada_Monedas_Spec.hs
|
gpl-2.0
| 435
| 0
| 11
| 90
| 215
| 128
| 87
| 15
| 1
|
module Main where
import Genetics
import Graphics.Gloss
import Data.Array
main = simulate window magenta 1 world worldToPicture step
|
aflag/haskell-group
|
genetics/src/Main.hs
|
gpl-2.0
| 135
| 0
| 5
| 20
| 35
| 20
| 15
| 5
| 1
|
-- -- {{{1
--
-- File : Flx/Math/Comb.hs
-- Maintainer : Felix C. Stegerman <flx@obfusk.net>
-- Date : 2012-06-28
--
-- Copyright : Copyright (C) 2012 Felix C. Stegerman
-- Licence : GPLv2
--
-- Depends : ...
-- Description : ...
--
-- TODO : ...
--
-- Links:
-- http://en.wikipedia.org/wiki/Combinatorics
--
-- -- }}}1
module Flx.Math.Comb ( -- {{{1
fac, facs, choices
) where -- }}}1
--
import Data.List (genericIndex)
--
fac :: Integral a => a -> a
fac = genericIndex facs
facs :: Integral a => [a]
facs = scanl (*) 1 [1..]
choices :: Integral a => a -> a -> a
choices n r = (fac n) `div` (fac r * fac (n - r))
-- vim: set tw=70 sw=2 sts=2 et fdm=marker :
|
obfusk/hs-flx
|
src/Flx/Math/Comb.hs
|
gpl-2.0
| 929
| 0
| 10
| 400
| 169
| 103
| 66
| 9
| 1
|
module Main where
import System.Console.GetOpt
import System.Environment
import System.Directory (doesDirectoryExist)
import System.IO
import System.Exit
import Control.Applicative
import Process
import Poll
data Options = Options { optCodeDir :: String
, optDocsDir :: String
, optCss :: Maybe String
, optCode :: Bool
, optHtml :: Bool
, optMarkdown :: Bool
, optWatch :: Bool
}
startOptions :: Options
startOptions = Options { optCodeDir = "./"
, optDocsDir = "./"
, optCss = Nothing
, optCode = False
, optHtml = False
, optMarkdown = False
, optWatch = False
}
options :: [ OptDescr (Options -> IO Options) ]
options =
[ Option "h" ["html"]
(NoArg (\opt -> return opt { optHtml = True }))
"Generate html"
, Option "m" ["markdown"]
(NoArg (\opt -> return opt { optMarkdown = True }))
"Generate markdown"
, Option "c" ["code"]
(NoArg (\opt -> return opt { optCode = True }))
"Generate code by file extension"
, Option "" ["css"]
(ReqArg
(\arg opt -> return opt { optCss = Just arg })
"FILE")
"Specify a css file for html generation"
, Option "" ["docs-dir"]
(ReqArg
(\arg opt -> return opt { optDocsDir = arg })
"DIR")
"Directory for generated docs"
, Option "" ["code-dir"]
(ReqArg
(\arg opt -> return opt { optCodeDir = arg })
"DIR")
"Directory for generated code"
, Option "w" ["watch"]
(NoArg
(\opt -> return opt { optWatch = True}))
"Watch for file changes, automatically run lit"
, Option "v" ["version"]
(NoArg
(\_ -> do
hPutStrLn stderr "Version 0.01"
exitWith ExitSuccess))
"Print version"
, Option "" ["help"]
(NoArg
(\_ -> do
prg <- getProgName
hPutStrLn stderr (usageInfo usage options)
exitWith ExitSuccess))
"Display help"
]
usage = "Usage: lit OPTIONS... FILES..."
help = "Try: lit --help"
main = do
args <- getArgs
-- Parse options, getting a list of option actions
let (actions, files, errors) = getOpt Permute options args
opts <- foldl (>>=) (return startOptions) actions
let Options { optCodeDir = codeDir
, optDocsDir = docsDir
, optMarkdown = markdown
, optCode = code
, optHtml = html
, optCss = mCss
, optWatch = watching
} = opts
codeDirCheck <- doesDirectoryExist codeDir
docsDirCheck <- doesDirectoryExist docsDir
let htmlPipe = if html then [Process.htmlPipeline docsDir mCss] else []
mdPipe = if markdown then [Process.mdPipeline docsDir mCss] else []
codePipe = if code then [Process.codePipeline codeDir mCss] else []
pipes = htmlPipe ++ mdPipe ++ codePipe
maybeWatch = if watching then Poll.watch else mapM_
errors' = if codeDirCheck then [] else ["Directory: " ++ codeDir ++ " does not exist\n"]
errors'' = if docsDirCheck then [] else ["Directory: " ++ docsDir ++ " does not exist\n"]
allErr = errors ++ errors' ++ errors''
if allErr /= [] || (not html && not code && not markdown) || files == []
then hPutStrLn stderr ((concat allErr) ++ help)
else (maybeWatch (Process.process pipes)) files
|
beni55/lit
|
src/lit.hs
|
gpl-2.0
| 3,819
| 0
| 15
| 1,458
| 980
| 538
| 442
| 94
| 8
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
module HsType where
--import Debug.Trace
import Control.Arrow
import Control.Monad
import Control.Monad.RWS
import Control.DeepSeq
import Data.List
import Data.Typeable (TypeRep)
import Data.Hashable
import Data.Text (Text)
import Data.Map.Strict (Map)
import Data.HashMap.Strict (HashMap)
import qualified Data.Text as T
import qualified Data.Map.Strict as Map
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Traversable
import qualified Data.Typeable
import HyphenBase
import HyphenKinds
import HyphenTyCon
-- | This module provides a Haskell representation of Haskell
-- types. It is similar to the Data.Typeable built-in module, but it
-- differs in that it can represent polymorphic types (that is, types
-- with type variables in them), and that the type constructors within
-- the types are encoded using our HyphenTyCon.TyCon type rather than
-- Data.Typeable.TyCon: ours contains a bit more information (to wit,
-- the kind and some information on how the type constructor can be
-- 'found' in importable modules; see comments in HyphenTyCon for
-- details).
--
-- Although (as mentioned above) we can represent polymorphic types,
-- every type we represent has a fixed kind: we don't handle
-- Kind-polymorphic beasts like @(a Text)@ where @a@ is a type
-- variable. (This could have kind @*@, if @a@ has Kind @* -> *@, but
-- could also have Kind @* -> *@, if @a@ had kind @* -> * -> *@, and
-- so on...)
----
-- | Simple wrapper type to represent a Variable. Just contains a Text
-- giving the name of the variable.
newtype Var = Var {getVar :: Text} deriving (Eq, Ord, Show)
instance NFData (Var) where
rnf (Var v) = rnf v
instance Hashable (Var) where
hashWithSalt salt (Var v) = hashWithSalt salt v
----
-- | Our representation for a Haskell type. The only 'true' fields
-- here are typeHead and typeTail; all the other fields are computed
-- from them and only present in the constructor for efficiency (so
-- that, once computed, we remember their values). The head of a type
-- is either a Type constructor, or a Variable with a kind. (Types
-- like @((a :: * -> * -> *) Text Text)@ are legal in Haskell, but
-- very rare). Note that in this example the Kind of the Variable must
-- be fixed because we do not support Kind polymorphism of any nature:
-- see second paragraph of comment at the top of the file.) The tail
-- of the type is the set of other types to which the constructor has
-- been applied.
--
-- Note that not all combinations of typeHead and typeTail are
-- allowed: there might be a kind mismatch.
--
-- The smart constructors mkHsTypeSafe and mkHsType are the preferred
-- way of building HsTypes from the 'true' fields.
--
-- typeHash is a hash or fingerprint for the type, stored once for
-- fast access. typeKind is the Kind of the type as a whole: this can
-- be figured out by taking the kind of the head and imagining that
-- @N@ arguments have already been provided to it, where @N@ is the
-- number of elemetns of the typeTail. typeFreeVars are all the free
-- type variables in the type, together with the kinds they must have
-- for the expression to make sense; this can be gleaned by scanning
-- recursively for variables. typeName is the name of the type in
-- standard Haskell notation.
data HsType
= HsType {
typeHash :: {-# UNPACK #-} !Int,
typeHead :: Either TyCon (Var, Kind),
typeTail :: [HsType],
typeKind :: Kind,
typeFreeVars :: Map Var Kind,
typeName :: Text
} deriving (Ord, Show)
instance Hashable (HsType) where
hash HsType {typeHash=th} = th
hashWithSalt salt (HsType {typeHead=head, typeTail=tail} )
= case head of (Left c) -> hashWithSalt salt (c, tail)
(Right v) -> hashWithSalt salt (("tyvar", v), tail)
bracketedTypeName :: HsType -> Text
bracketedTypeName hst | never_bracket (typeHead hst) = typeName hst
| null (typeTail hst) = typeName hst
| otherwise = bracket $ typeName hst
where never_bracket (Left tyc) = isListTyCon tyc || isTupTyCon tyc
never_bracket (Right _) = False
-- | Convenience function: put brackets round a string
bracket :: Text -> Text
bracket t = T.concat [T.pack "(", t, T.pack ")"]
instance Eq (HsType) where
a == b = hash a == hash b
-- | map the Variable names used in an HsType; useful for renaming
-- variables.
mapVars :: (Text -> Text) -> HsType -> HsType
mapVars fn = process
where process hst = let rest = (map process $ typeTail hst) in case typeHead hst of
Right (Var v, k) -> mkHsType (Right (Var (fn v), k)) rest
tycon -> mkHsType tycon rest
-- | Secondary way of making an HsType from the 'true' fields, the
-- head and the tail. As mentioned in the docstring for HsType itself,
-- not all combinations of a head and a tail will be legal (some fail
-- to Kind-check); in case of failure, this function gives an IO
-- exception.
mkHsType :: Either TyCon (Var, Kind) -> [HsType] -> HsType
mkHsType head = either (error . T.unpack . getErrMsg) id . mkHsTypeSafe head
-- | Main way of making an HsType from the 'true' fields, the head and
-- the tail. As mentioned in the docstring for HsType itself, not all
-- combinations of a head and a tail will be legal (some fail to
-- Kind-check); in case of failure, this function gives a nice,
-- monadic error message.
mkHsTypeSafe :: Either TyCon (Var, Kind) -> [HsType] -> Either ErrMsg HsType
mkHsTypeSafe head tail = let
thash = case head of (Left c) -> hash (c, tail)
(Right v) -> hash (("tyvar", v), tail)
fvMaps = headVars head : zipWith tailVars [1..] tail
headVars (Right (v,k)) = Map.fromList [(v, Right (k,"in the head of the new type"))]
headVars _ = Map.empty
tailVars i hst = let f k = Right (k, "in argument " ++ show i ++ "of the new type")
in f <$> typeFreeVars hst
fvMap = foldl (Map.unionWithKey combine) Map.empty fvMaps
combine _ a@(Left _) _ = a
combine _ _ a@(Left _) = a
combine v a@(Right (k1, r1)) (Right (k2, r2))
| k1 == k2 = a
| otherwise = Left (
"Error in attempting to construct type " ++ T.unpack name ++ "; type variable " ++
T.unpack (getVar v) ++ " has kind " ++ kindString k1 ++
" " ++ r1 ++ ", but has kind " ++ kindString k2 ++ " " ++ r2 ++ ".")
(errMsgIntro, headKind) = case head of
(Left c) -> ("Type constructor " ++ T.unpack (tyConName c), tyConKind c)
(Right (Var v, k)) -> ("Type variable " ++ T.unpack v, k )
resultKind = Kind . drop (length tail) . kindArgKinds $ headKind
kindArityCheck :: Either ErrMsg ()
kindArityCheck = let
arity = (length (kindArgKinds headKind)) :: Int
in when (arity < length tail) $ report (
errMsgIntro ++ " applied to too many type variables. It has kind " ++
kindString headKind ++ ", so should be applied to at most " ++ show arity ++
"arguments; instead, it is applied to the " ++ show (length tail :: Int) ++
" arguments " ++ (unwords $ map (T.unpack . typeName) tail) ++ ".")
argKindCheck :: Int -> Kind -> Kind -> Either ErrMsg ()
argKindCheck i kExpected kActual
= unless (kExpected == kActual) $ report (
errMsgIntro ++ " has kind " ++ kindString headKind ++ ", so its parameter number "
++ show i ++ " should have kind " ++ kindString kExpected ++ " but the actual "
++ "parameter " ++ T.unpack (typeName (tail !! (i - 1))) ++ " has kind "
++ kindString kActual)
name | either isTupTyCon (const False) head = bracket (
T.intercalate (T.pack ", ") (map typeName tail))
| either isListTyCon (const False) head = T.concat [
T.pack "[", T.intercalate (T.pack ", ") (map typeName tail), T.pack "]"]
| head == Left fnTyCon = T.concat $ case tail of
[frty, toty] ->
if typeHead frty == Left fnTyCon
then [bracketedTypeName frty, T.pack " -> ", typeName toty]
else [typeName frty, T.pack " -> ", typeName toty]
| otherwise = let headName (Left c) = tyConFullName c
headName (Right (Var v, k)) = v
in T.unwords (headName head:map bracketedTypeName tail)
in do kindArityCheck
sequence $ zipWith3 argKindCheck [1..] (kindArgKinds headKind) (map typeKind tail)
fvs <- Data.Traversable.mapM (either report (return . fst)) fvMap
return $ HsType thash (force head) tail resultKind fvs name
-- | Is this type monomorphic (free of type variables)?
isMonoType :: HsType -> Bool
isMonoType = Map.null . typeFreeVars
-- | Given HsTypes representing types @A@ and @B@, construct the type
-- of functions taking @A@ and returning @B@.
fnHsType a b = mkHsType (Left fnTyCon ) [a, b]
-- | Given an HsType which we hope desribes the type of functions from
-- some type @A@ to some type @B@, return a pair containing HsTypes
-- representing @A@ and @B@ respectively, or else a friendly error
-- message saying why it couldn't be done.
breakFnType :: HsType -> Either ErrMsg (HsType, HsType)
breakFnType (HsType {typeHead=Left tc, typeTail=[fr, to]})
| tc == fnTyCon = return (fr, to)
breakFnType ty = report $ "Trying to apply object of type '" ++ T.unpack (typeName ty)
++ "', but this is not a function type."
-- | Given an HsType, think of it as a function type A_1 -> A_2 ->
-- .. -> A_n -> R, and return a list of HsTypes representing the
-- argument types A_1, ..., A_n and an HsType representing the result
-- type R; this is always possible because we may take the list A_i as
-- empty (although we always make it as long as possible).
breakFnTypeRec :: HsType -> ([HsType], HsType)
breakFnTypeRec hst = go hst []
where go hst sofar = case breakFnType hst of
Left _ -> (reverse sofar, hst)
Right (fr, to) -> go to (fr:sofar)
-- | Given an HsType which we hope desribes the type of functions from
-- some type @A@ to some type @B@, return a pair containing HsTypes
-- representing @A@ and @B@ respectively, or else panic.
breakFnTypeUnsafe :: HsType -> (HsType, HsType)
breakFnTypeUnsafe (HsType {typeHead=Left tc, typeTail=[fr, to]})
| tc == fnTyCon = (fr, to)
breakFnTypeUnsafe ty = error $ "breakFnTypeUnsafe:'" ++ T.unpack (typeName ty)
++ "' is not a function type."
-- | Find and replace for types. Given a map of variables to types,
-- and an HsType X, replace the variables with the chosen new types in
-- X. If you try to use transformType to replace a variable with a
-- type whose kind doesn't match the variable's kind, then
-- transformType will panic as HsType's constructor complains. If you
-- want to check to make sure this will not happen, see
-- transformTypeAllowedCheck below.
transformType :: Map Var HsType -> HsType -> HsType
transformType dict = go
where go HsType {typeHead=Left con, typeTail=tail}
= mkHsType (Left con) (map go tail)
go HsType {typeHead=Right (var, kind), typeTail=tail} = case Map.lookup var dict of
Nothing -> mkHsType (Right (var, kind)) (map go tail)
Just HsType {typeHead=head', typeTail=tail'}
-> mkHsType head' (tail' ++ map go tail)
-- | If you try to use transformType to replace a variable with a type
-- whose kind doesn't match the variable's kind, then transformType
-- will panic as HsType's constructor complains. If you want to check
-- to make sure this will not happen, use this
-- transformTypeAllowedCheck function, which either returns a friendly
-- error message explaining why the requested transformType is bogus,
-- or () if it's safe to call transformType.
transformTypeAllowedCheck :: Map Var HsType -> HsType -> Either ErrMsg ()
transformTypeAllowedCheck substs original = do
let usableKinds = typeFreeVars original
unusable = map fst . Map.toList $ substs `Map.difference` usableKinds
mapM_ (\ (Var v) -> report $ "A substitution is given for the variable " ++ T.unpack v
++ "but that variable isn't used in the type into which we're "
++ "making the substitution") unusable
let checkKindCompatibility :: Var -> Kind -> HsType -> Either ErrMsg ()
checkKindCompatibility (Var v) k t =
unless (k == typeKind t) (
report $ "The substitution offered for the variable " ++ T.unpack v ++
"ought to have kind " ++ kindString k ++ " but you provided something " ++
"of kind " ++ kindString (typeKind t) ++ ", viz " ++ T.unpack (typeName t))
sequence_ . map snd . Map.toList $
Map.intersectionWithKey checkKindCompatibility usableKinds substs
-- | Given an HsType, think of it as a function type A_1 -> A_2 ->
-- .. -> A_n -> R, and return a list of HsTypes representing the
-- argument types A_1, ..., A_n and an HsType representing the result
-- type R; this is always possible because we may take the list A_i as
-- empty. We take n to be the provided integer max_peel if possible;
-- if it's not possible, we make it as large as we can.
peelArgTypes :: HsType -> Int -> ([HsType], HsType)
peelArgTypes ty max_peel
| max_peel == 0 = ([], ty)
| otherwise = if typeHead ty /= Left fnTyCon then ([], ty) else
let [arg1, rest] = typeTail ty
(args', ret) = peelArgTypes rest (max_peel-1)
in (arg1:args', ret)
-- | Create an HsType from a TypeRep, in a way which only works in the
-- special case that all the Type Constructors used in the TypeRep are
-- fully saturated. (That is, every argument passed to a type
-- constructor has kind *, and the final result has kind *.) Do not
-- call outside this case!
hsTypeFromSimpleTypeRep :: TypeRep -> HsType
hsTypeFromSimpleTypeRep tr = let
(con, args) = Data.Typeable.splitTyConApp tr
args' = map hsTypeFromSimpleTypeRep args
con' = tyConFromTypeableTyCon (simplKnd $ length args) con
in mkHsType (Left con') args'
-- | Get a string representation of the HsType that is suitable to use
-- as its Python @repr@ (that is a string that, if executed in
-- python+hyphen with appropriate imports, will evaluate to a python
-- object which represents the HsType in question). Note that this
-- depends on the high-level haskell/python bridge, so there's a bit
-- of something like leaky abstraction here...
hsTypeRepr :: HsType -> Text
hsTypeRepr hst
= let tailRep = map hsTypeRepr $ typeTail hst
in case typeHead hst of
Right (var, _) -> T.concat $ [
T.pack "hyphen.HsType(", T.intercalate (T.pack ", ") (
[T.concat[T.pack "\"", getVar var, T.pack "\""]] ++ tailRep ++
if typeKind hst /= Kind [] then
[T.concat [T.pack "kind=\"", T.pack . kindString $ typeKind hst, T.pack "\""]]
else []), T.pack ")"]
Left tyc -> T.concat $ [
tyConRepr' True tyc, T.pack "(", T.intercalate (T.pack ", ") tailRep, T.pack ")"]
-- | A very technical but very important role in hyphen is played by
-- *type forcers*, which exist only for *monomorphic* types. The idea
-- of a type forcer is that it's a string that we can pass to the
-- haskell compiler which will evaluate to the identity function on a
-- particular type in question. The point of this is that we can apply
-- such a thing to an polymorphic expression to produce a monomorphic
-- realization of (the object encoded by) that expression.
--
-- In simple cases, this is just a matter of writing @(id :: <type> ->
-- <type>)@. Not all cases are this simple, however, because sometimes
-- we just can't name the type in question (so we can't write
-- something that would go in the spot @<type>@ there). This is
-- because it's possible for a Haskell module locally define a type
-- constructor X and to export an entity whose type involves a X
-- without exporting X. (This is considered by some to be a wart.) In
-- our world, we see this in that not every @TyCLocation@ is built
-- using @InExplicitModuleNamed@. So in more general cases, we end up
-- doing something like the following. Pretend that [] is never
-- exported. Then we can force something to have type [Int] by passing
-- it through:
--
-- @
-- (((const $ id) :: (x a -> a) -> x Int -> x Int) head)
-- @
--
-- The basic idea here is that since head has type @[u] -> u@, to
-- applying the @(const $ id)@ to head forces the @x@ in the
-- explicitly given type signature to mean @[]@, so the @id@ we get is
-- actually @id : [Int] -> [Int])@. Tada!
--
-- The rest of the code here is just a matter of doing this in
-- general. We write a function from HsTypes to Texts that builds the
-- name of the type which we want to force to (@x Int@) assuming that
-- certain type variables (@x@ in this case) have been forced to be
-- synonyms for certain type constructors. This code is in a WriterT
-- monad transformer, and as it goes along it writes out pairs like
-- @('head', 'x a')@ which means that @x@ will be forced be a synonym
-- for the type constructor we want if we ensure that @x a@ matches
-- the type of @head@. As we build this, we need a good supply of type
-- variables, so we also have a state monad transformer to keep track
-- of the next fresh variable. We acutally (for human convenience) use
-- two stocks of variables: 'x variables' (which are variables that
-- will eventually be set to type constructors we want to use) and 'a
-- variables' which are just padding.
makeTypeForcer :: HsType -> Text
makeTypeForcer hst = let
(expr, _, constraints) = runRWS (makeTypeForcerM hst) () (0, 0)
nConstr = length constraints
core = bracket . T.concat $ (
(replicate nConstr $ T.pack "Prelude.const Prelude.$ ") ++ [T.pack "Prelude.id"])
typedCore = bracket . T.unwords $ (
[core, T.pack "::"] ++ intersperse (T.pack "->") (map snd constraints ++ [expr, expr]))
in bracket . T.unwords $ typedCore : map fst constraints
type TypeForcerM = RWS () [(Text, Text)] (Int, Int)
-- | Make a fresh 'x variable'; see above for what this means
makeXVar :: TypeForcerM Text
makeXVar = do (i0, i1) <- get
put (i0, i1+1)
return $ T.concat [T.pack "x", T.pack $ show i1]
-- | Make a fresh 'a variable'; see above for what this means
makeAVar :: TypeForcerM Text
makeAVar = do (i0, i1) <- get
put (i0+1, i1)
return $ T.concat [T.pack "a", T.pack $ show i0]
-- | Emit a constraint (like @('head', 'x a')@ in the example above)
emitConstraint :: (Text, Text) -> TypeForcerM ()
emitConstraint pair = tell [pair]
typeHeadForcerRenames :: HashMap (Text, Text) Text
typeHeadForcerRenames = HashMap.fromList $ map ((T.pack *** T.pack) *** T.pack) $ [
#if __GLASGOW_HASKELL__ >= 808
(("GHC.Integer.Type", "Integer"), ("GHC.Integer.Integer"))
#endif
]
-- | Make a forcer for a type constructor
makeTypeHeadForcer :: HsType -> TypeForcerM Text
makeTypeHeadForcer hst = case typeHead hst of
Right (Var v, k)
-> error "makeTypeForcer: expected monomorphic type"
Left tyc@(TyCon {tyConName = oname,
tyConLocation = (InExplicitModuleNamed mname)})
-> if isTupTyCon tyc || isListTyCon tyc || tyc == fnTyCon
then return oname
else case HashMap.lookup (mname, oname) typeHeadForcerRenames of
Nothing -> return $ T.concat [mname, T.pack ".", oname]
Just answer -> return answer
Left (TyCon {tyConLocation = (ImplicitlyVia tycLocObj False tycLocPath)})
-> do var <- makeXVar
pp <- processPath var tycLocPath
emitConstraint (tycLocObj, pp)
return var
Left (TyCon {tyConLocation = (ImplicitlyVia tycLocTyp True tycLocPath)})
-> do var <- makeXVar
pp <- processPath var tycLocPath
emitConstraint (T.concat [T.pack "(undefined :: ", tycLocTyp, T.pack ")"], pp)
return var
makeTypeForcerM :: HsType -> TypeForcerM Text
makeTypeForcerM hst =
do headImg <- makeTypeHeadForcer hst
tailImg <- mapM makeTypeForcerM $ typeTail hst
return . bracket . T.unwords $ headImg : tailImg
processPath :: Text -> [Int] -> TypeForcerM Text
processPath v [i] = do
tail <- sequence $ genericReplicate i makeAVar
return . bracket . T.unwords $ v : tail
processPath v (i:is@(_:_)) = do
hVar <- makeAVar
mid <- processPath v is
tail <- sequence $ genericReplicate i makeAVar
return . bracket . T.unwords $ hVar : mid : tail
|
tbarnetlamb/hyphen
|
hyphen/lowlevel_src/HsType.hs
|
gpl-2.0
| 20,899
| 0
| 25
| 4,991
| 4,460
| 2,350
| 2,110
| 235
| 8
|
{- |
Module : $Header$
Description : Normalization w.r.t. associativity and commutativity
Copyright : (c) Immanuel Normann, Uni Bremen 2007
License : GPLv2 or higher, see LICENSE.txt
Maintainer : inormann@jacobs-university.de
Stability : provisional
Portability : portable
-}
module Search.Common.ACStandardization where
import Search.Utils.List (updateListAndGetIndex)
type Arity = Int
type AC = Bool
data Symbol l v p = Logical l Arity AC
| Parameter p Arity AC
| Variable v Arity AC
| Lambda l Arity deriving (Show,Eq,Ord)
type State l v p = (([v], [p]), [Symbol l Int Int], [Symbol l v p])
acStandardize :: (Eq v, Ord l) =>
[Symbol l v p] -> ([Symbol l Int Int], [[Symbol l v p]])
acStandardize termStr = (acSkeleton,acParametrizations)
where initMorph = ([],[])
initPrefix = []
initSuffix = termStr
finalStates = pd [(initMorph,initPrefix,initSuffix)]
((_,acSkeleton,_):_) = finalStates
third (_,_,p) = p
acParametrizations = map third finalStates
pd states = case (head states)
of (_,_,[]) -> states
_ -> pd $ prun $ dist states
prun :: (Ord l) => [State l v p] -> [State l v p]
prun (s:states) = prun' [s] states
where prun' minStates [] = minStates
prun' (ms:minStates) (s:states) =
let compStates (_,(p:_),_) (_,(q:_),_) = compare p q
in case compStates ms s
of LT -> prun' (ms:minStates) states
EQ -> prun' (s:ms:minStates) states
GT -> prun' [s] states
dist :: (Eq v) => [State l v p] -> [State l v p]
dist states = concatMap dist1 states -- nub
where dist1 (morph,prefix,suffix) =
[succState (morph,prefix,suffix') | suffix' <- acPermuteSuffix suffix]
succState :: (Eq v) => State l v p -> State l v p
succState (morph,prefix,s:suffix) = (morph',(s':prefix),suffix)
where (morph',s') = updateMorph morph s
updateMorph :: (Eq v) =>
([v], [p]) -> Symbol l v p -> (([v], [p]), Symbol l Int Int)
updateMorph (vlst,plst) s =
case s
of (Lambda l ar) -> ((vlst,plst),(Lambda l ar))
(Logical l ar ac) -> ((vlst,plst),(Logical l ar ac))
(Parameter p ar ac) -> ((vlst,p:plst),
(Parameter ((length plst) + 1) ar ac))
(Variable v ar ac) -> ((vlst',plst),(Variable i ar ac))
where (vlst',i) = updateListAndGetIndex v vlst
acPermuteSuffix :: [Symbol l v p] -> [[Symbol l v p]]
acPermuteSuffix [] = error "ac permutation requiers a non-empty list"
acPermuteSuffix suffix = if (isAC $ head suffix) then suffixes else [suffix]
where (args,rest) = subterms suffix
suffixes = map (((head suffix):) . (++rest)) (rotate args)
rotate :: [[a]] -> [[a]]
rotate lst = map concat $ map (per lst) [1..(length lst)]
where per lst n = ys++xs where (xs,ys) = splitAt n lst
subterms :: [Symbol l v p] -> ([[Symbol l v p]], [Symbol l v p])
subterms lst = (reverse fss,bs)
where (fss,bs) = sublists (arity $ head lst) ([],tail lst)
sublists n (fs,bs) = (fs',bs')
where (fs',bs') = if n>=1 then sublists (n-1) (f:fs,bs'') else (fs,bs)
(f,bs'') = subterm bs
subterm :: [Symbol l v p] -> ([Symbol l v p], [Symbol l v p])
subterm lst = ((reverse front),back)
where (front,back) = sublist 0 ([],lst)
sublist k (ms,n:ns) =
if k>=0
then sublist ((arity n)+k-1) (n:ms,ns)
else (ms,n:ns)
arity s = case s
of (Lambda _ ar) -> ar
(Logical _ ar _) -> ar
(Parameter _ ar _) -> ar
(Variable _ ar _) -> ar
isAC s = case s
of (Lambda _ _) -> False
(Logical _ _ ac) -> ac
(Parameter _ _ ac) -> ac
(Variable _ _ ac) -> ac
{- test
lst = [3,2,0,1,0,2,3,0,0,0,2,0,0,2,0,0,8,9,0]
-}
{- todo:
move this to Search.Common.Normalization.hs
implement fromList
-}
data Formula c v = Const c [Formula c v] | Var v [Formula c v]
| Binder c [v] (Formula c v) deriving (Eq,Ord)
data Constant c = TrueAtom | FalseAtom | Not | And | Or | Imp | Eqv | Xor | Equal
| All | Some | LogicDependent c deriving (Eq,Ord)
data ScopeLabel a = Scope Int a deriving (Eq,Ord)
toList :: (Eq c) =>
Formula (Constant c) (ScopeLabel p) -> [Symbol (Constant c) (ScopeLabel p) p]
toList (Binder c vs f) = ((Lambda c (length vs)):(map toList' vs))++(toList f)
where toList' v = (Variable v 0 False)
-- problem: arity of var in binding positioin unknown! Assume arity 0 i.e. FOL.
toList (Const c args) = (Logical c (length args) ac):(concatMap toList args)
where ac = elem c [And,Or,Eqv,Xor,Equal]
toList (Var (Scope n v) args) =
case n of 0 -> (Parameter v (length args) False):(concatMap toList args)
_ -> (Variable (Scope n v) (length args) False):(concatMap toList args)
|
nevrenato/Hets_Fork
|
Search/Common/ACStandardization.hs
|
gpl-2.0
| 5,001
| 0
| 15
| 1,451
| 2,221
| 1,208
| 1,013
| 93
| 4
|
module Topology.MetricSpace where
import Notes
import Functions.Basics.Macro
import Functions.Distances.Terms
import Sets.Basics.Terms
import Topology.MetricSpace.Terms
metricSpaceS :: Note
metricSpaceS = section "Metric Spaces" $ do
pseudometricSpacesSS
metricSpacesSS
pseudometricSpacesSS :: Note
pseudometricSpacesSS = subsection "Pseudometric Spaces" $ do
pseudometricSpaceDefinition
metricSpacesSS :: Note
metricSpacesSS = subsection "Metric Spaces" $ do
metricSpaceDefinition
metricSpaceExamples
pseudometricSpaceDefinition :: Note
pseudometricSpaceDefinition = de $ do
lab pseudometricSpaceDefinitionLabel
s ["Let ", m topset, " be a ", set, and, m toppm, " a ", pseudometric_, on, m topset]
s ["The tuple ", m toppms, " is called a ", pseudometricSpace']
metricSpaceDefinition :: Note
metricSpaceDefinition = de $ do
lab metricSpaceDefinitionLabel
s ["Let ", m topset, " be a ", set, and, m topm, " a ", metric_, on , m topset]
s ["The tuple ", m topms, " is called a ", metricSpace']
metricSpaceExamples :: Note
metricSpaceExamples = do
ex $ do
s [m $ topms_ reals (func2 topm reals reals realsp a b (av $ a - b)), " is a ", metricSpace]
toprove
where
a = "a"
b = "b"
|
NorfairKing/the-notes
|
src/Topology/MetricSpace.hs
|
gpl-2.0
| 1,307
| 0
| 18
| 302
| 356
| 190
| 166
| 34
| 1
|
{-|
Module : Graphics.QML.Transient.Property
License : BSD3
Maintainer : marcin.jan.mrotek@email.com
Stability : experimental
Bindings to the property mechanism of QML, and functions to manipulate them.
'Prop's can be mapped and applied (think 'Functor' and 'Applicative') using 'Lens'es.
If you only need to read the value, you can use 'readonly' to obtain a 'PropRO', which is a regular 'Functor' and 'Applicative'.
-}
{-# LANGUAGE
DeriveFunctor
, RankNTypes
, TypeFamilies
#-}
module Graphics.QML.Transient.Property where
import Graphics.QML.Transient.Internal.Types
import Graphics.QML.Transient.Internal
import Control.Concurrent.STM
import Control.Concurrent.MVar
import Control.Lens
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Monad.Transient hiding (spawn)
import Data.Foldable
import qualified Data.IntMap.Strict as Map
import Data.IORef
import Data.Typeable
import Graphics.QML
{-| A property is a mutable cell of memory, usually accessible to the QML side.
Getting and setting actions are kept separate - when a value is set, only action using the 'get' are ran.
-}
data Prop a = Prop
{ get :: React a -- ^A callback that is ran whenever its value changes.
, put :: a -> Qml () -- ^An action that changes the value.
, peek :: IO a -- ^Look at the value of the property without triggering the computation whenever it's changed.
}
modifyProp :: Prop a -> (a -> a) -> Qml ()
-- ^Apply a function to the contents of a 'Prop', triggering any computations that depend on it.
modifyProp (Prop _ s p) f = s =<< f <$> liftIO p
mapProp :: Lens' a b -> Prop a -> Prop b
{-^ A 'Functor' 'fmap' analog for properties - a mapping from 'Lens'es into functions of properties.
As the 'Prop' type is both covariant and contravariant in its argument,
it cannot be mapped with a single function, only with a lens;
'peek' is used internally to retrieve the value of the property at the moment of setting.
The 'get' callback and the 'set' action are left uncoupled.
-}
mapProp l (Prop g s p) = Prop
{ get = view l <$> g
, put = \b -> s =<< (liftIO p <&> l .~ b)
, peek = view l <$> p
}
applyProp :: Prop (ALens' a b) -> Prop a -> Prop b
-- ^An 'Applicative' '<*>' analog.
applyProp (Prop gl _ pl) (Prop g s p) = Prop
{ get = (^#) <$> g <*> gl
, put = \b -> do
l <- liftIO pl
a <- liftIO p
s $ a & l #~ b
, peek = (^#) <$> p <*> pl
}
pairProp :: Prop a -> Prop b -> Prop (a,b)
{-^ A pairing combinator, provided as an alternative to 'applyProp'.
As curried 'Iso's are uncommon, using 'applyProp' with multiple 'Prop's can be cumbersome.
-}
pairProp (Prop g1 s1 p1) (Prop g2 s2 p2) = Prop { get = (,) <$> g1 <*> g2 , put = \(a,b) -> s1 a >> s2 b , peek = (,) <$> p1 <*> p2 }
tupleProp2 :: Prop a -> Prop b -> Prop (a,b)
-- ^Synonym to 'pairProp'.
tupleProp2 = pairProp
tupleProp3 :: Prop a -> Prop b -> Prop c -> Prop (a, b, c)
tupleProp3 (Prop g1 s1 p1) (Prop g2 s2 p2) (Prop g3 s3 p3) = Prop
{ get = (,,) <$> g1 <*> g2 <*> g3
, put = \(a,b,c) -> s1 a >> s2 b >> s3 c
, peek = (,,) <$> p1 <*> p2 <*> p3
}
tupleProp4 :: Prop a -> Prop b -> Prop c -> Prop d -> Prop (a, b, c, d)
tupleProp4 (Prop g1 s1 p1) (Prop g2 s2 p2) (Prop g3 s3 p3) (Prop g4 s4 p4) = Prop
{ get = (,,,) <$> g1 <*> g2 <*> g3 <*> g4
, put = \(a,b,c,d) -> s1 a >> s2 b >> s3 c >> s4 d
, peek = (,,,) <$> p1 <*> p2 <*> p3 <*> p4
}
tupleProp5 :: Prop a -> Prop b -> Prop c -> Prop d -> Prop e -> Prop (a, b, c, d, e)
tupleProp5 (Prop g1 s1 p1) (Prop g2 s2 p2) (Prop g3 s3 p3) (Prop g4 s4 p4) (Prop g5 s5 p5) = Prop
{ get = (,,,,) <$> g1 <*> g2 <*> g3 <*> g4 <*> g5
, put = \(a,b,c,d,e) -> s1 a >> s2 b >> s3 c >> s4 d >> s5 e
, peek = (,,,,) <$> p1 <*> p2 <*> p3 <*> p4 <*> p5
}
tupleProp6 :: Prop a -> Prop b -> Prop c -> Prop d -> Prop e -> Prop f -> Prop (a, b, c, d, e, f)
tupleProp6 (Prop g1 s1 p1) (Prop g2 s2 p2) (Prop g3 s3 p3) (Prop g4 s4 p4) (Prop g5 s5 p5) (Prop g6 s6 p6) = Prop
{ get = (,,,,,) <$> g1 <*> g2 <*> g3 <*> g4 <*> g5 <*> g6
, put = \(a,b,c,d,e,f) -> s1 a >> s2 b >> s3 c >> s4 d >> s5 e >> s6 f
, peek = (,,,,,) <$> p1 <*> p2 <*> p3 <*> p4 <*> p5 <*> p6
}
data PropRO a = PropRO { getRO :: React a , peekRO :: IO a }
deriving Functor
instance Applicative PropRO where
pure = PropRO <$> pure <*> pure
PropRO fg fp <*> PropRO xg xp = PropRO (fg <*> xg) (fp <*> xp)
readonly :: Prop a -> PropRO a
-- ^Any property can be referenced as if it was read-only.
readonly (Prop g _ p) = PropRO g p
pureProp :: a -> Build (Prop a)
{-^ Analogous to 'pure'.
-}
pureProp a = do
store'ir <- liftIO $ newIORef a
handlers'tv <- liftIO $ newTVarIO Map.empty
let addHandler = do
mv <- liftIO $ do
mv <- newMVar ()
atomically $ do
handlers <- readTVar handlers'tv
let handlerN = length handlers
modifyTVar handlers'tv
$ Map.insert handlerN mv
pure ()
pure mv
waitEvents $ takeMVar mv
liftIO $ readIORef store'ir
write val = do
atomicWriteIORef store'ir val
handlers <- readTVarIO handlers'tv
for_ handlers $ \mv -> putMVar mv ()
pure $ Prop (React addHandler) (liftIO.write) (readIORef store'ir)
propertyConst
:: ( Typeable a
, Marshal a
, CanReturnTo a ~ Yes
)
=> String -> a -> Build ()
-- ^A constant field of a Qml object.
propertyConst name =
addMember
. defPropertyConst' name
. const . return
propertySelf
:: String -> Build ()
-- ^A self referencing field, useful for connecting signals in QML.
propertySelf name = addMember $ defPropertyConst' name return
propertyRO
:: ( Typeable a
, Marshal a
, CanReturnTo a ~ Yes
)
=> String -> a -> Build (Prop a)
-- ^A field that is read-only from the QML side.
propertyRO name initial = do
handlers'tv <- liftIO $ newTVarIO Map.empty
skey <- liftIO newSignalKey
store'ir <- liftIO $ newIORef initial
let write val = do
atomicWriteIORef store'ir val
handlers <- readTVarIO handlers'tv
traverse_ ($ val) handlers
callback i action =
atomically
. modifyTVar' handlers'tv
$ Map.insert i action
putQml t a = liftIO $ do
write a
fireSignal skey t
addHandler = do
handlerN <- liftIO . atomically $ do
handlers <- readTVar handlers'tv
let handlerN = length handlers
modifyTVar handlers'tv
$ Map.insert handlerN (const $ pure ())
pure handlerN
liftTrans
. react (callback handlerN)
$ return ()
addMember $ defPropertySigRO' name skey (const $ readIORef store'ir)
return
$ Prop (React addHandler)
( \a -> Qml . ReaderT $ \t -> putQml t a )
( readIORef store'ir )
propertyRW
:: ( Typeable a
, Marshal a
, CanReturnTo a ~ Yes
, CanGetFrom a ~ Yes
)
=> String -> a -> Build (Prop a)
-- ^A field that is readable and writable from both sides.
propertyRW name initial = do
handlers'tv <- liftIO $ newTVarIO Map.empty
skey <- liftIO newSignalKey
store'ir <- liftIO $ newIORef initial
let write val = do
atomicWriteIORef store'ir val
handlers <- readTVarIO handlers'tv
traverse_ ($ val) handlers
callback i action =
atomically
. modifyTVar' handlers'tv
$ Map.insert i action
putQml t a = liftIO $ do
write a
fireSignal skey t
addHandler = do
handlerN <- liftIO . atomically $ do
handlers <- readTVar handlers'tv
let handlerN = length handlers
modifyTVar handlers'tv
$ Map.insert handlerN (const $ pure ())
pure handlerN
liftTrans
. react (callback handlerN)
$ return ()
addMember $ defPropertySigRW' name skey (const $ readIORef store'ir) (const write)
return
$ Prop (React addHandler)
( \a -> Qml . ReaderT $ \t -> putQml t a )
( readIORef store'ir )
|
marcinmrotek/hsqml-transient
|
src/Graphics/QML/Transient/Property.hs
|
gpl-3.0
| 8,049
| 0
| 22
| 2,194
| 2,754
| 1,393
| 1,361
| 172
| 1
|
module Syntax.Util where
import Common
import Syntax.Types
namespace :: BindName -> Namespace
namespace (BindName _ (x : _)) | isUpper x = NsTys
namespace _ = NsValues
allowInCycles :: Decl t -> Bool
allowInCycles (BindLocal _ _) = True
allowInCycles (BindVal _ _) = True
allowInCycles (Data _ _ _ _ _) = True
allowInCycles _ = False
binderName :: Binder t -> BindName
binderName (Binder n _) = n
bindingName :: Binding t -> BindName
bindingName (Binding b _) = binderName b
class Binds a where
binds :: a -> [BindName]
instance Binds (Binder t) where
binds = (: []) . binderName
instance Binds (Binding t) where
binds = (: []) . bindingName
instance Binds (Decl t) where
binds (Constraint _ _ _ _) = []
binds (ValField _ n _) = [n]
binds (ModField _ n _) = [n]
binds (TyField _ n _) = [n]
binds (BindLocal _ b) = [bindingName b]
binds (BindMod _ b) = [bindingName b]
binds (BindSig _ b) = [bindingName b]
binds (BindVal _ b) = [bindingName b]
binds (BindTy _ b) = [bindingName b]
binds (Infix _ _ _ _) = []
binds (Data _ _ n _ _) = [n]
instance Binds Pat where
binds (PatParams _ ps) = ps >>= binds
binds (PatBind _ n) = [n]
binds (PatApp _ _ ps) = ps >>= binds
binds (PatLit _ _) = []
binds (PatIgnore _) = []
|
ktvoelker/FLang
|
src/Syntax/Util.hs
|
gpl-3.0
| 1,262
| 0
| 9
| 288
| 634
| 324
| 310
| 39
| 1
|
{-
/****************************************************************************
** NgnTrafficParser 2.0
** Copyright (C) 2011 Granin A.S.
** Contact: Granin A.S. (graninas@gmail.com)
**
** This file is part of NgnTrafficParser 2.0.
**
** GNU General Public License Usage
** This file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL3 included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/licenses/gpl.html.
**
** If you have questions regarding the use of this file, please contact
** author (graninas@gmail.com).
**
****************************************************************************/
-}
module Tools where
import qualified System.Time as T (TimeDiff(..), CalendarTime(..), ClockTime(..), addToClockTime, toClockTime)
import Data.Char (toUpper, toLower)
import Types
firstMonthDay :: T.CalendarTime -> T.CalendarTime
firstMonthDay ct = ct {T.ctDay = 1}
prevMonthBegin :: T.CalendarTime -> T.ClockTime
prevMonthBegin curMonth = T.addToClockTime (T.TimeDiff 0 (-1) 0 0 0 0 0) (T.toClockTime $ firstMonthDay curMonth)
toYearMonth :: T.CalendarTime -> YearMonth
toYearMonth (T.CalendarTime y m _ _ _ _ _ _ _ _ _ _)= (y, fromEnum m + 1)
capitalize :: String -> String
capitalize [] = []
capitalize (l:ls) = toUpper l : map toLower ls
|
graninas/Haskell-Algorithms
|
Programs/NgnTraffic/Tools.hs
|
gpl-3.0
| 1,536
| 0
| 9
| 256
| 264
| 146
| 118
| 13
| 1
|
----------------------------------------------------------------------
-- |
-- Module : Main
-- Maintainer : Markus Forsberg
-- Stability : (stability)
-- Portability : (portability)
--
-- > CVS $Date: 2006/06/05 15:25:00 $
-- > CVS $Author: markus $
-- > CVS $Revision: 1.1 $
--
-----------------------------------------------------------------------------
module Command where
import System.Console.GetOpt
import Maybe(fromMaybe)
import List(isSuffixOf)
import System(getProgName)
import Dict.ErrM
import IO
import UTF8
import Char
import General
import Tokenize(tokens,norm)
import Dictionary
import Frontend
import List(intersperse)
import qualified Data.Map as Map
-- import Frontend
{- |Does the filename end with the suffix .dict? -}
is_dictionary :: FilePath -> Bool
is_dictionary = isSuffixOf ".dict"
{- |Does the filename end with the suffix .lexicon? -}
is_lexicon :: FilePath -> Bool
is_lexicon = isSuffixOf ".lexicon"
output :: [Flag] -> Maybe FilePath
output xs = case [f | Output f <- xs] of
[f] -> Just f
_ -> Nothing
printer :: [Flag] -> Maybe String
printer xs =
case [p | (Print p) <- xs] of
(x:_) -> return x
_ -> Nothing
apply_encoding l flags d = case [x | Encoding x <- flags] of
[] -> d
(x:_) -> case Map.lookup x (encoding l) of
Nothing -> error $ "Unknown encoding: " ++ x
Just t -> transform_dictionary t d
output_write :: [Flag] -> (String -> IO())
output_write xs = case output xs of
Nothing -> putStr . encodeUTF8
Just f -> writeFile f . encodeUTF8
dictionary_needed :: [Flag] -> Bool
dictionary_needed [] = True
dictionary_needed xs = not $ or [elem x nodict | x <- xs]
where nodict = Infl : Help : Version : [Print s | s <- ["paradigms","paradigms_compact", "paradigms_latex","tagset", "core","extract","compound"]]
is_mode :: [Flag] -> Bool
is_mode xs = case [f | Mode f <- xs] of
[_] -> True
_ -> False
--is_fullform :: [Flag] -> Bool
--is_fullform xs = False -- case [f | Fullform f <- xs] of
-- [_] -> True
-- _ -> False
is_help :: [Flag] -> Bool
is_help = elem Help
is_version :: [Flag] -> Bool
is_version = elem Version
is_compound :: [Flag] -> Bool
is_compound xs = case [f | Compound f <- xs] of
[_] -> True
_ -> False
data Comp = All | Min | Max | None | Unknown
deriving Eq
pr_comp :: Comp -> String
pr_comp c = case c of
All -> "all"
Min -> "min"
Max -> "max"
None -> "none"
--Length n -> "minlen=" ++ show n
Unknown -> "unknown (defaults to all)"
get_compound :: [Flag] -> Comp
get_compound xs = case [f | Compound f <- xs] of
(f@("none"):_) -> None
(f@("all"):_) -> All
(f@("min"):_) -> Min
(f@("max"):_) -> Max
-- (f@('m':'i':'n':'l':'e':'n':'=':n):_) | all isDigit n -> Length (read n)
_ -> Unknown
is_quality :: [Flag] -> Bool
is_quality xs = case [ x | Quality x <- xs ] of
[_] -> True
_ -> False
is_undef :: [Flag] -> Bool
is_undef = elem (Quality "undef")
is_argc :: [Flag] -> Bool
is_argc = elem (Quality "argc")
is_unused :: [Flag] -> Bool
is_unused = elem (Quality "pop")
is_duplicated :: [Flag] -> Bool
is_duplicated = elem (Quality "dup")
is_dict :: [Flag] -> Bool
is_dict = elem (Quality "dict")
is_all :: [Flag] -> Bool
is_all = elem (Quality "all")
is_test :: [Flag] -> Bool
is_test = elem (Quality "test")
is_net :: [Flag] -> Bool
is_net fs = not $ null [x | Net x <- fs]
get_port :: [Flag] -> Maybe Int
get_port fs = case [x | Net x <- fs] of
(x:_) | all isDigit x -> return $ read x
_ -> Nothing
get_quality :: [Flag] -> String
get_quality xs = case [ x | Quality x <- xs ] of
(x:_) -> x
invalid_quality :: [Flag] -> Bool
invalid_quality xs = or [not (elem x ["undef", "pop","dup","dict","argc","all","test"]) | Quality x <- xs ]
get_mode :: [Flag] -> String
get_mode xs = case [f | Mode f <- xs] of
(f:_) -> f
is_printer :: [Flag] -> Bool
is_printer xs = case [f | Print f <- xs] of
[_] -> True
_ -> False
get_length :: [Flag] -> Int
get_length xs = case [n | Length n <- xs] of
(n:_) | all isDigit n -> read n
_ -> 0
get_number :: [Flag] -> Maybe Int
get_number fs = case [n | Number n <- fs] of
(n:_) | all isDigit n -> Just $ read n
_ -> Nothing
get_tokenizer :: (String -> [Tok]) -> [Flag] -> (String -> [Tok])
get_tokenizer tokf fs = case get_tokenizer_name fs of
"words" -> (map W . words)
"lines" -> (map W . lines)
"norm" -> norm . lines
"default" -> tokens
x -> error $ "unknown tokenizer: " ++ x
invalid_tokenizer :: [Flag] -> Bool
invalid_tokenizer fs
| elem (get_tokenizer_name fs) ["words","norm", "lines","default"] = False
| otherwise = True
get_tokenizer_name :: [Flag] -> String
get_tokenizer_name fs = case [t | (Tokenizer t) <- fs] of
(t:_) -> t
_ -> "default"
{-|Data type for the Command line arguments. -}
data Flag =
Help |
Synth |
Quality String |
Compound String |
Encoding String |
Infl |
Length String |
DupID |
Tag |
Number String |
Net String |
Version |
Tokenizer String |
Mode String |
Print String |
Output String
deriving (Show,Eq)
{- |Lists all possible arguments and their explainations -}
options :: Language l => l -> [OptDescr Flag]
options l =
[
Option ['i'] ["inflection"] (NoArg Infl) "run inflection engine"
, Option ['s'] ["synthesiser"] (NoArg Synth) "enter synthesizer mode"
, Option ['a'] ["analysis"] (NoArg Tag) "pos tagging"
, Option ['c'] ["compound"] (ReqArg Compound "COMPOUND") "activate compound analysis (none,min,max,all)"
, Option ['l'] ["length"] (ReqArg Length "MINLENGTH") "the minimum length of word forms in compounds"
, Option ['n'] ["number"] (ReqArg Number "MAX") "the maximum number of analyses"
, Option ['t'] ["tokenizer"] (ReqArg Tokenizer "TOKENIZER") "select mode (default, words, lines, norm)"
, Option ['m'] ["mode"] (ReqArg Mode "MODE") "select mode (fail, lexfail, nocomp, lexcomp)"
, Option ['p'] ["printer"] (ReqArg Print "PRINTER") "print using PRINTER (core, paradigms, paradigms_compact, paradigms_latex, compound, tagset, words, lex, tables, extract, gf, xml, sfst, sfstlex, sfstheader, lexc, xfst, sql, hundict, hunaffix, lmf)"
, Option ['e'] ["encoding"] (ReqArg Encoding "ENCODING") ("select another morphosyntactic encoding (" ++ (concat (intersperse ", " (Map.keys (encoding l)))) ++ ")")
, Option ['q'] ["quality"] (ReqArg Quality "QUALITY") "run tests (all, test, dup, undef, pop, argc, dict)"
--, Option ['o'] ["output"] (ReqArg Output "FILE") "output printer content to FILE"
-- , Option ['g'] ["go"] (ReqArg Net "PORTNUMBER") "Go online with FM server on port PORTNUMBER"
, Option ['h'] ["help"] (NoArg Help) "display this message"
, Option ['v'] ["version"] (NoArg Version) "display version information"
]
{-
outp = Output . fromMaybe "stdout"
inp = Input . fromMaybe "stdin"
-}
{- |Collect the valid arguments. Raises an IO error if it fails. -}
compilerOpts :: Language l => l -> [String] -> IO ([Flag], [String])
compilerOpts l argv =
case getOpt Permute (options l) argv of
(o,xs,[] ) -> return (o,xs)
(_,_,errs) ->
do head <- header
ioError (userError (concat errs ++ usageInfo head (options l)))
header :: IO String
header= do prg <- getProgName
return $ "Usage: " ++ prg ++ " [OPTION...] dictionary_file(s)..."
help :: Language l => l -> IO String
help l = do head <- header
return $ usageInfo head (options l)
retrieve :: Language l => l -> [String] -> IO (Either String ([Flag],[FilePath]))
retrieve l xs = do res <- try (compilerOpts l xs)
case res of
Left io_err -> return $ Left $ ioeGetErrorString io_err
Right res -> return $ Right res
|
isido/functional-morphology-for-koine-greek
|
lib/Command.hs
|
gpl-3.0
| 9,116
| 4
| 18
| 3,063
| 2,670
| 1,424
| 1,246
| 180
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.