code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
--
-- 14.DataComplex.hs
-- R_Functional_Programming
--
-- Created by RocKK on 2/13/14.
-- Copyright (c) 2014 RocKK.
-- All rights reserved.
--
-- Redistribution and use in source and binary forms are permitted
-- provided that the above copyright notice and this paragraph are
-- duplicated in all such forms and that any documentation,
-- advertising materials, and other materials related to such
-- distribution and use acknowledge that the software was developed
-- by the RocKK. The name of the
-- RocKK may not be used to endorse or promote products derived
-- from this software without specific prior written permission.
-- THIS SOFTWARE IS PROVIDED ''AS IS'' AND WITHOUT ANY EXPRESS OR
-- IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
import Data.Complex
number = 3 :+ 4
main = do
print number
print $ realPart number
print $ imagPart number
print $ polar number
print $ magnitude number
print $ phase number
print $ conjugate number | RocKK-MD/R_Functional_Programming | Sources/14.DataComplex.hs | bsd-2-clause | 1,078 | 2 | 9 | 215 | 112 | 56 | 56 | 10 | 1 |
module Warning
{-# WARNINg ["This is a module warning",
"multi-line"] #-}
where
{-# Warning foo , bar
["This is a multi-line",
"deprecation message",
"for foo"] #-}
foo :: Int
foo = 4
bar :: Char
bar = 'c'
| mpickering/ghc-exactprint | tests/examples/ghc710/Warning.hs | bsd-3-clause | 257 | 0 | 4 | 89 | 26 | 17 | 9 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-|
Module : Idris.ASTUtils
Description : This implements just a few basic lens-like concepts to ease state updates. Similar to fclabels in approach, just without the extra dependency.
Copyright :
License : BSD3
Maintainer : The Idris Community.
This implements just a few basic lens-like concepts to ease state updates.
Similar to fclabels in approach, just without the extra dependency.
We don't include an explicit export list
because everything here is meant to be exported.
Short synopsis:
---------------
@
f :: Idris ()
f = do
-- these two steps:
detaggable <- fgetState (opt_detaggable . ist_optimisation typeName)
fputState (opt_detaggable . ist_optimisation typeName) (not detaggable)
-- are equivalent to:
fmodifyState (opt_detaggable . ist_optimisation typeName) not
-- of course, the long accessor can be put in a variable;
-- everything is first-class
let detag n = opt_detaggable . ist_optimisation n
fputState (detag n1) True
fputState (detag n2) False
-- Note that all these operations handle missing items consistently
-- and transparently, as prescribed by the default values included
-- in the definitions of the ist_* functions.
--
-- Especially, it's no longer necessary to have initial values of
-- data structures copied (possibly inconsistently) all over the compiler.
@
-}
module Idris.ASTUtils(
Field(), cg_usedpos, ctxt_lookup, fgetState, fmodifyState
, fputState, idris_fixities, ist_callgraph, ist_optimisation
, known_interfaces, known_terms, opt_detaggable, opt_inaccessible
, opt_forceable, opts_idrisCmdline, repl_definitions
) where
import Idris.AbsSyntaxTree
import Idris.Core.Evaluate
import Idris.Core.TT
import Prelude hiding (id, (.))
import Control.Applicative
import Control.Category
import Control.Monad.State.Class
import Data.Maybe
data Field rec fld = Field
{ fget :: rec -> fld
, fset :: fld -> rec -> rec
}
fmodify :: Field rec fld -> (fld -> fld) -> rec -> rec
fmodify field f x = fset field (f $ fget field x) x
instance Category Field where
id = Field id const
Field g2 s2 . Field g1 s1 = Field (g2 . g1) (\v2 x1 -> s1 (s2 v2 $ g1 x1) x1)
fgetState :: MonadState s m => Field s a -> m a
fgetState field = gets $ fget field
fputState :: MonadState s m => Field s a -> a -> m ()
fputState field x = fmodifyState field (const x)
fmodifyState :: MonadState s m => Field s a -> (a -> a) -> m ()
fmodifyState field f = modify $ fmodify field f
-- | Exact-name context lookup; uses Nothing for deleted values
-- (read+write!).
--
-- Reading a non-existing value yields Nothing,
-- writing Nothing deletes the value (if it existed).
ctxt_lookup :: Name -> Field (Ctxt a) (Maybe a)
ctxt_lookup n = Field
{ fget = lookupCtxtExact n
, fset = \newVal -> case newVal of
Just x -> addDef n x
Nothing -> deleteDefExact n
}
-- Maybe-lens with a default value.
maybe_default :: a -> Field (Maybe a) a
maybe_default dflt = Field (fromMaybe dflt) (const . Just)
-----------------------------------
-- Individual records and fields --
-----------------------------------
--
-- These could probably be generated; let's use lazy addition for now.
--
-- OptInfo
----------
-- | the optimisation record for the given (exact) name
ist_optimisation :: Name -> Field IState OptInfo
ist_optimisation n =
maybe_default Optimise
{ inaccessible = []
, detaggable = False
, forceable = []
}
. ctxt_lookup n
. Field idris_optimisation (\v ist -> ist{ idris_optimisation = v })
-- | two fields of the optimisation record
opt_inaccessible :: Field OptInfo [(Int, Name)]
opt_inaccessible = Field inaccessible (\v opt -> opt{ inaccessible = v })
opt_detaggable :: Field OptInfo Bool
opt_detaggable = Field detaggable (\v opt -> opt{ detaggable = v })
opt_forceable :: Field OptInfo [Int]
opt_forceable = Field forceable (\v opt -> opt{ forceable = v })
-- | callgraph record for the given (exact) name
ist_callgraph :: Name -> Field IState CGInfo
ist_callgraph n =
maybe_default CGInfo
{ calls = [], allCalls = Nothing, scg = [], usedpos = []
}
. ctxt_lookup n
. Field idris_callgraph (\v ist -> ist{ idris_callgraph = v })
cg_usedpos :: Field CGInfo [(Int, [UsageReason])]
cg_usedpos = Field usedpos (\v cg -> cg{ usedpos = v })
-- | Commandline flags
opts_idrisCmdline :: Field IState [Opt]
opts_idrisCmdline =
Field opt_cmdline (\v opts -> opts{ opt_cmdline = v })
. Field idris_options (\v ist -> ist{ idris_options = v })
-- | TT Context
--
-- This has a terrible name, but I'm not sure of a better one that
-- isn't confusingly close to tt_ctxt
known_terms :: Field IState (Ctxt (Def, RigCount, Injectivity, Accessibility, Totality, MetaInformation))
known_terms = Field (definitions . tt_ctxt)
(\v state -> state {tt_ctxt = (tt_ctxt state) {definitions = v}})
known_interfaces :: Field IState (Ctxt InterfaceInfo)
known_interfaces = Field idris_interfaces (\v state -> state {idris_interfaces = idris_interfaces state})
-- | Names defined at the repl
repl_definitions :: Field IState [Name]
repl_definitions = Field idris_repl_defs (\v state -> state {idris_repl_defs = v})
-- | Fixity declarations in an IState
idris_fixities :: Field IState [FixDecl]
idris_fixities = Field idris_infixes (\v state -> state {idris_infixes = v})
| jmitchell/Idris-dev | src/Idris/ASTUtils.hs | bsd-3-clause | 5,482 | 0 | 12 | 1,120 | 1,223 | 678 | 545 | 74 | 2 |
module A5 where
import B5
import C5
import D5
main :: Tree Int ->Bool
main t = isSame (sumSquares (fringe t))
(sumSquares (B5.myFringe t)+sumSquares (C5.myFringe t))
| kmate/HaRe | old/testing/renaming/A5.hs | bsd-3-clause | 186 | 0 | 11 | 46 | 79 | 41 | 38 | 7 | 1 |
{-# LANGUAGE DeriveDataTypeable, GeneralizedNewtypeDeriving, TemplateHaskell, NamedFieldPuns #-}
module Distribution.Server.Users.Users (
-- * Users type
Users,
-- * Construction
emptyUsers,
addUserEnabled,
addUserDisabled,
addUser,
insertUserAccount,
-- * Modification
deleteUser,
setUserEnabledStatus,
setUserAuth,
setUserName,
-- * Lookup
lookupUserId,
lookupUserName,
-- ** Lookup utils
userIdToName,
-- * Enumeration
enumerateAllUsers,
enumerateActiveUsers,
-- * Error codes
ErrUserNameClash(..),
ErrUserIdClash(..),
ErrNoSuchUserId(..),
ErrDeletedUser(..),
) where
import Distribution.Server.Users.Types
import Distribution.Server.Framework.Instances ()
import Distribution.Server.Framework.MemSize
import Control.Monad (guard)
import Data.Maybe (fromMaybe)
import Data.List (sort, group)
import qualified Data.Map as Map
import qualified Data.IntMap as IntMap
import Data.SafeCopy (base, deriveSafeCopy)
import Data.Typeable (Typeable)
import Control.Exception (assert)
-- | The entire collection of users. Manages the mapping between 'UserName'
-- and 'UserId'.
--
data Users = Users {
-- | A map from UserId to UserInfo
userIdMap :: !(IntMap.IntMap UserInfo),
-- | A map from active UserNames to the UserId for that name
userNameMap :: !(Map.Map UserName UserId),
-- | The next available UserId
nextId :: !UserId
}
deriving (Eq, Typeable, Show)
instance MemSize Users where
memSize (Users a b c) = memSize3 a b c
$(deriveSafeCopy 0 'base ''Users)
checkinvariant :: Users -> Users
checkinvariant users = assert (invariant users) users
invariant :: Users -> Bool
invariant Users{userIdMap, userNameMap, nextId} =
nextIdIsRight
&& noUserNameOverlap
&& userNameMapComplete
&& userNameMapConsistent
where
nextIdIsRight =
-- 1) the next id should be 0 if the userIdMap is empty
-- or one bigger than the maximum allocated id
let UserId nextid = nextId
in nextid == case IntMap.maxViewWithKey userIdMap of
Nothing -> 0
Just ((maxAllocatedId,_),_) -> maxAllocatedId + 1
noUserNameOverlap =
-- 2) there must be no overlap in the user names of active accounts
-- (active are enabled or disabled but not deleted)
all (\g -> length g == 1)
. group . sort
. map userName . filter (isActiveAccount . userStatus)
. IntMap.elems
$ userIdMap
userNameMapComplete =
-- 3) the userNameMap must map every active user name to the id of the
-- corresponding user info
Map.keys userNameMap
== sort [ userName uinfo
| uinfo <- IntMap.elems userIdMap
, isActiveAccount (userStatus uinfo)]
userNameMapConsistent =
and [ case IntMap.lookup uid userIdMap of
Nothing -> False
Just uinfo -> userName uinfo == uname
| (uname, UserId uid) <- Map.toList userNameMap ]
-- the point is, user names can be recycled but user ids never are
-- this simplifies things because other user groups in the system do not
-- need to be adjusted when an account is enabled/disabled/deleted
-- it also allows us to track historical info, like name of uploader
-- even if that user name has been recycled, the user ids will be distinct.
emptyUsers :: Users
emptyUsers = Users {
userIdMap = IntMap.empty,
userNameMap = Map.empty,
nextId = UserId 0
}
-- error codes
data ErrUserNameClash = ErrUserNameClash deriving Typeable
data ErrUserIdClash = ErrUserIdClash deriving Typeable
data ErrNoSuchUserId = ErrNoSuchUserId deriving Typeable
data ErrDeletedUser = ErrDeletedUser deriving Typeable
$(deriveSafeCopy 0 'base ''ErrUserNameClash)
$(deriveSafeCopy 0 'base ''ErrUserIdClash)
$(deriveSafeCopy 0 'base ''ErrNoSuchUserId)
$(deriveSafeCopy 0 'base ''ErrDeletedUser)
(?!) :: Maybe a -> e -> Either e a
ma ?! e = maybe (Left e) Right ma
lookupUserId :: UserId -> Users -> Maybe UserInfo
lookupUserId (UserId userId) users = IntMap.lookup userId (userIdMap users)
lookupUserName :: UserName -> Users -> Maybe (UserId, UserInfo)
lookupUserName uname users = do
case Map.lookup uname (userNameMap users) of
Nothing -> Nothing
Just uid -> Just (uid, fromMaybe impossible (lookupUserId uid users))
where
impossible = error "lookupUserName: invariant violation"
-- | Convert a 'UserId' to a 'UserName'. If the user id doesn't exist,
-- an ugly placeholder is used instead.
--
userIdToName :: Users -> UserId -> UserName
userIdToName users userId@(UserId idNum) =
case lookupUserId userId users of
Just user -> userName user
Nothing -> UserName $ "~id#" ++ show idNum
-- | Add a new user account, in the enabled state.
--
addUserEnabled :: UserName -> UserAuth -> Users
-> Either ErrUserNameClash (Users, UserId)
addUserEnabled name auth = addUser name (AccountEnabled auth)
-- | Add a new user account, in the disabled state and with no password.
--
addUserDisabled :: UserName -> Users
-> Either ErrUserNameClash (Users, UserId)
addUserDisabled name = addUser name (AccountDisabled Nothing)
-- | Add a new user account with the given user status.
--
addUser :: UserName -> UserStatus -> Users -> Either ErrUserNameClash (Users, UserId)
addUser name status users =
case Map.lookup name (userNameMap users) of
Just _ -> Left ErrUserNameClash
Nothing -> users' `seq` Right (users', userid)
where
userid@(UserId uid) = nextId users
uinfo = UserInfo {
userName = name,
userStatus = status
}
users' = checkinvariant users {
userIdMap = IntMap.insert uid uinfo (userIdMap users),
userNameMap = Map.insert name userid (userNameMap users),
nextId = UserId (uid + 1)
}
-- | Insert pre-existing user info. This should only be used for constructing
-- a user db manually or from a backup.
--
insertUserAccount :: UserId -> UserInfo -> Users
-> Either (Either ErrUserIdClash ErrUserNameClash) Users
insertUserAccount userId@(UserId uid) uinfo users = do
guard (not userIdInUse) ?! Left ErrUserIdClash
guard (not userNameInUse || isUserDeleted) ?! Right ErrUserNameClash
return $! checkinvariant users {
userIdMap = IntMap.insert uid uinfo (userIdMap users),
userNameMap = if isUserDeleted
then userNameMap users
else Map.insert (userName uinfo) userId (userNameMap users),
nextId = let UserId nextid = nextId users
in UserId (max nextid (uid + 1))
}
where
userIdInUse = IntMap.member uid (userIdMap users)
userNameInUse = Map.member (userName uinfo) (userNameMap users)
isUserDeleted = case userStatus uinfo of
AccountDeleted -> True
_ -> False
-- | Delete a user account.
--
-- Prevents the given user from performing authenticated operations.
-- This operation is idempotent but not reversible. Deleting an account forgets
-- any authentication credentials and the user name becomes available for
-- re-use in a new account.
--
-- Unlike 'UserName's, 'UserId's are never actually deleted or re-used. This is
-- what distinguishes disabling and deleting an account; a disabled account can
-- be enabled again and a disabled account does not release the user name for
-- re-use.
--
deleteUser :: UserId -> Users -> Either ErrNoSuchUserId Users
deleteUser (UserId userId) users = do
userInfo <- lookupUserId (UserId userId) users ?! ErrNoSuchUserId
let userInfo' = userInfo { userStatus = AccountDeleted }
return $! checkinvariant users {
userIdMap = IntMap.insert userId userInfo' (userIdMap users),
userNameMap = Map.delete (userName userInfo) (userNameMap users)
}
-- | Change the status of a user account to enabled or disabled.
--
-- Prevents the given user from performing any authenticated operations.
-- This operation is idempotent and reversable. Use 'enable' to re-enable a
-- disabled account.
--
-- The disabled state is intended to be temporary. Use 'delete' to permanently
-- delete the account and release the user name to be re-used.
--
setUserEnabledStatus :: UserId -> Bool -> Users -> Either (Either ErrNoSuchUserId ErrDeletedUser) Users
setUserEnabledStatus (UserId uid) enable users = do
userInfo <- lookupUserId (UserId uid) users ?! Left ErrNoSuchUserId
userInfo' <- changeStatus userInfo ?! Right ErrDeletedUser
return $! checkinvariant users {
userIdMap = IntMap.insert uid userInfo' (userIdMap users)
}
where
changeStatus userInfo | enable = case userStatus userInfo of
AccountEnabled _ -> Just userInfo
AccountDisabled (Just auth) -> Just userInfo { userStatus = AccountEnabled auth }
AccountDisabled Nothing -> Nothing
AccountDeleted -> Nothing
changeStatus userInfo = case userStatus userInfo of
AccountEnabled auth -> Just userInfo { userStatus = AccountDisabled (Just auth) }
AccountDisabled _ -> Just userInfo
AccountDeleted -> Nothing
-- | Replace the user authentication for the given user.
--
setUserAuth :: UserId -> UserAuth -> Users -> Either (Either ErrNoSuchUserId ErrDeletedUser) Users
setUserAuth (UserId uid) newauth users = do
userInfo <- lookupUserId (UserId uid) users ?! Left ErrNoSuchUserId
userInfo' <- changeAuth userInfo ?! Right ErrDeletedUser
return $! checkinvariant users {
userIdMap = IntMap.insert uid userInfo' (userIdMap users)
}
where
changeAuth userInfo = case userStatus userInfo of
AccountEnabled _oldauth -> Just $ userInfo { userStatus = AccountEnabled newauth }
AccountDisabled _oldauth -> Just $ userInfo { userStatus = AccountDisabled (Just newauth) }
AccountDeleted -> Nothing
-- | Change the username for a user account. The new name must not be in use.
--
setUserName :: UserId -> UserName -> Users
-> Either (Either ErrNoSuchUserId ErrUserNameClash) Users
setUserName (UserId uid) newname users = do
userinfo <- lookupUserId (UserId uid) users ?! Left ErrNoSuchUserId
guard (not (userNameInUse newname)) ?! Right ErrUserNameClash
let oldname = userName userinfo
userinfo' = userinfo { userName = newname }
return $! checkinvariant users {
userIdMap = IntMap.insert uid userinfo' (userIdMap users),
userNameMap = Map.insert newname (UserId uid) . Map.delete oldname $ userNameMap users
}
where
userNameInUse uname = Map.member uname (userNameMap users)
enumerateAllUsers :: Users -> [(UserId, UserInfo)]
enumerateAllUsers users =
[ (UserId uid, uinfo) | (uid, uinfo) <- IntMap.assocs (userIdMap users) ]
enumerateActiveUsers :: Users -> [(UserId, UserInfo)]
enumerateActiveUsers users =
[ (UserId uid, uinfo) | (uid, uinfo) <- IntMap.assocs (userIdMap users)
, isActiveAccount (userStatus uinfo) ]
| ocharles/hackage-server | Distribution/Server/Users/Users.hs | bsd-3-clause | 11,335 | 0 | 17 | 2,784 | 2,595 | 1,357 | 1,238 | 189 | 7 |
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
module Options (injectDefaults) where
import Control.Applicative
import qualified Control.Exception as E
import Control.Monad
import Control.Monad.Trans.Except
import Control.Monad.Trans.Reader
import Data.Char (isAlphaNum, isSpace, toLower)
import Data.List (foldl')
import Data.List.Split (splitOn)
import qualified Data.Map as M
import Data.Maybe (mapMaybe)
import Data.Monoid
import Options.Applicative
import Options.Applicative.Types
import System.Directory
import System.Environment
import System.FilePath ((</>))
-- | inject defaults from either files or environments
-- in order of priority:
-- 1. command line arguments: --long-option=value
-- 2. environment variables: PREFIX_COMMAND_LONGOPTION=value
-- 3. $HOME/.prefix/config: prefix.command.longoption=value
--
-- note: this automatically injects values for standard options and flags
-- (also inside subcommands), but not for more complex parsers that use BindP
-- (like `many'). As a workaround a single special case is supported,
-- for `many' arguments that generate a list of strings.
injectDefaults :: String -- ^ prefix, program name
-> [(String, a -> [String] -> a)] -- ^ append extra options for arguments that are lists of strings
-> ParserInfo a -- ^ original parsers
-> IO (ParserInfo a)
injectDefaults prefix lenses parser = do
e <- getEnvironment
config <- (readFile . (</> "config") =<< getAppUserDataDirectory prefix)
`E.catch` \(_::E.SomeException) -> return ""
let env = M.fromList . filter ((==[prefix]) . take 1 . fst) $
configLines config <> -- config first
map (\(k,v) -> (splitOn "_" $ map toLower k, v)) e -- env vars override config
p' = parser { infoParser = injectDefaultP env [prefix] (infoParser parser) }
return $ foldl' (\p (key,l) -> fmap (updateA env key l) p) p' lenses
updateA :: M.Map [String] String -> String -> (a -> [String] -> a) -> a -> a
updateA env key upd a =
case M.lookup (splitOn "." key) env of
Nothing -> a
Just v -> upd a (splitOn ":" v)
-- | really simple key/value file reader: x.y = z -> (["x","y"],"z")
configLines :: String -> [([String], String)]
configLines = mapMaybe (mkLine . takeWhile (/='#')) . lines
where
trim = let f = reverse . dropWhile isSpace in f . f
mkLine l | (k, '=':v) <- break (=='=') l = Just (splitOn "." (trim k), trim v)
| otherwise = Nothing
-- | inject the environment into the parser
-- the map contains the paths with the value that's passed into the reader if the
-- command line parser gives no result
injectDefaultP :: M.Map [String] String -> [String] -> Parser a -> Parser a
injectDefaultP _env _path n@(NilP{}) = n
injectDefaultP env path p@(OptP o)
#if MIN_VERSION_optparse_applicative(0,13,0)
| (Option (CmdReader _ cmds f) props) <- o =
#else
| (Option (CmdReader cmds f) props) <- o =
#endif
let cmdMap = M.fromList (map (\c -> (c, mkCmd c)) cmds)
mkCmd cmd =
let (Just parseri) = f cmd
in parseri { infoParser = injectDefaultP env (path ++ [normalizeName cmd]) (infoParser parseri) }
#if MIN_VERSION_optparse_applicative(0,13,0)
in OptP (Option (CmdReader Nothing cmds (`M.lookup` cmdMap)) props)
#else
in OptP (Option (CmdReader cmds (`M.lookup` cmdMap)) props)
#endif
| (Option (OptReader names (CReader _ rdr) _) _) <- o =
p <|> either (const empty)
pure
(runExcept . msum $
map (maybe (throwE $ ErrorMsg "Missing environment variable")
(runReaderT (unReadM rdr))
. getEnvValue env path)
names)
| (Option (FlagReader names a) _) <- o =
p <|> if any ((==Just "1") . getEnvValue env path) names then pure a else empty
| otherwise = p
injectDefaultP env path (MultP p1 p2) =
MultP (injectDefaultP env path p1) (injectDefaultP env path p2)
injectDefaultP env path (AltP p1 p2) =
AltP (injectDefaultP env path p1) (injectDefaultP env path p2)
injectDefaultP _env _path b@(BindP {}) = b
getEnvValue :: M.Map [String] String -> [String] -> OptName -> Maybe String
getEnvValue env path (OptLong l) = M.lookup (path ++ [normalizeName l]) env
getEnvValue _ _ _ = Nothing
normalizeName :: String -> String
normalizeName = map toLower . filter isAlphaNum
| s9gf4ult/yesod | yesod-bin/Options.hs | mit | 4,897 | 0 | 18 | 1,446 | 1,339 | 713 | 626 | 73 | 2 |
{-# LANGUAGE TypeFamilies, GeneralizedNewtypeDeriving #-}
-- |Define a lightweight wrapper, 'Angle', for floating point numeric
-- types that comes with a 'Num' instance that wraps around at
-- 0 and 2π, and properly handles angle differencing.
module AngleNum (Angle, angle, fromAngle, toDegrees) where
import Data.Function
import Data.VectorSpace
wrapAngle :: (Floating a, Ord a) => a -> a
wrapAngle theta
| theta < 0 = theta + 2 * pi
| theta > 2*pi = theta - 2 * pi
| otherwise = theta
angleDiff :: (Floating a, Ord a) => a -> a -> a
angleDiff x y = wrapAngle (x' + pi - y') - pi
where x' = if x < 0 then x + 2*pi else x
y' = if y < 0 then y + 2*pi else y
-- |Representation of an angle in radians.
newtype Angle a = Angle { fromAngle :: a }
deriving (Eq, Ord, Show, Fractional, Floating)
-- |Produce an 'Angle' value within the range [0,2pi].
angle :: (Floating a, Ord a) => a -> Angle a
angle = Angle . wrapAngle
-- |Convert an 'Angle' into a floating point number of degrees.
toDegrees :: Floating a => Angle a -> a
toDegrees = (* (180 / pi)) . fromAngle
instance (Floating a, Ord a) => Num (Angle a) where
Angle x + Angle y = Angle . wrapAngle $ x + y
(*) = ((Angle . wrapAngle) .) . (*) `on` fromAngle
Angle x - Angle y = Angle $ angleDiff x y
negate (Angle x) = Angle (negate x)
abs (Angle x) = Angle (abs x)
signum (Angle x) = Angle (signum x)
fromInteger = Angle . wrapAngle . fromInteger
instance (Floating a, Ord a, AdditiveGroup a) => AdditiveGroup (Angle a) where
zeroV = Angle 0
(^+^) = ((Angle . wrapAngle) . ) . (^+^) `on` fromAngle
negateV = negate
instance (Floating a, Ord a, AdditiveGroup a) => VectorSpace (Angle a) where
type Scalar (Angle a) = a
s *^ Angle x = Angle . wrapAngle $ s * x
| bitemyapp/roshask | Examples/Turtle/src/AngleNum.hs | bsd-3-clause | 1,776 | 0 | 10 | 398 | 689 | 368 | 321 | -1 | -1 |
{-# LANGUAGE BangPatterns, MagicHash, UnboxedTuples #-}
{-# OPTIONS_GHC -O #-}
-- We always optimise this, otherwise performance of a non-optimised
-- compiler is severely affected
-- -----------------------------------------------------------------------------
--
-- (c) The University of Glasgow, 1997-2006
--
-- Character encodings
--
-- -----------------------------------------------------------------------------
module Encoding (
-- * UTF-8
utf8DecodeChar#,
utf8PrevChar,
utf8CharStart,
utf8DecodeChar,
utf8DecodeString,
utf8EncodeChar,
utf8EncodeString,
utf8EncodedLength,
countUTF8Chars,
-- * Z-encoding
zEncodeString,
zDecodeString
) where
import Foreign
import Data.Char
import Numeric
import ExtsCompat46
-- -----------------------------------------------------------------------------
-- UTF-8
-- We can't write the decoder as efficiently as we'd like without
-- resorting to unboxed extensions, unfortunately. I tried to write
-- an IO version of this function, but GHC can't eliminate boxed
-- results from an IO-returning function.
--
-- We assume we can ignore overflow when parsing a multibyte character here.
-- To make this safe, we add extra sentinel bytes to unparsed UTF-8 sequences
-- before decoding them (see StringBuffer.hs).
{-# INLINE utf8DecodeChar# #-}
utf8DecodeChar# :: Addr# -> (# Char#, Int# #)
utf8DecodeChar# a# =
let !ch0 = word2Int# (indexWord8OffAddr# a# 0#) in
case () of
_ | ch0 <=# 0x7F# -> (# chr# ch0, 1# #)
| ch0 >=# 0xC0# && ch0 <=# 0xDF# ->
let !ch1 = word2Int# (indexWord8OffAddr# a# 1#) in
if ch1 <# 0x80# || ch1 >=# 0xC0# then fail 1# else
(# chr# (((ch0 -# 0xC0#) `uncheckedIShiftL#` 6#) +#
(ch1 -# 0x80#)),
2# #)
| ch0 >=# 0xE0# && ch0 <=# 0xEF# ->
let !ch1 = word2Int# (indexWord8OffAddr# a# 1#) in
if ch1 <# 0x80# || ch1 >=# 0xC0# then fail 1# else
let !ch2 = word2Int# (indexWord8OffAddr# a# 2#) in
if ch2 <# 0x80# || ch2 >=# 0xC0# then fail 2# else
(# chr# (((ch0 -# 0xE0#) `uncheckedIShiftL#` 12#) +#
((ch1 -# 0x80#) `uncheckedIShiftL#` 6#) +#
(ch2 -# 0x80#)),
3# #)
| ch0 >=# 0xF0# && ch0 <=# 0xF8# ->
let !ch1 = word2Int# (indexWord8OffAddr# a# 1#) in
if ch1 <# 0x80# || ch1 >=# 0xC0# then fail 1# else
let !ch2 = word2Int# (indexWord8OffAddr# a# 2#) in
if ch2 <# 0x80# || ch2 >=# 0xC0# then fail 2# else
let !ch3 = word2Int# (indexWord8OffAddr# a# 3#) in
if ch3 <# 0x80# || ch3 >=# 0xC0# then fail 3# else
(# chr# (((ch0 -# 0xF0#) `uncheckedIShiftL#` 18#) +#
((ch1 -# 0x80#) `uncheckedIShiftL#` 12#) +#
((ch2 -# 0x80#) `uncheckedIShiftL#` 6#) +#
(ch3 -# 0x80#)),
4# #)
| otherwise -> fail 1#
where
-- all invalid sequences end up here:
fail :: Int# -> (# Char#, Int# #)
fail nBytes# = (# '\0'#, nBytes# #)
-- '\xFFFD' would be the usual replacement character, but
-- that's a valid symbol in Haskell, so will result in a
-- confusing parse error later on. Instead we use '\0' which
-- will signal a lexer error immediately.
utf8DecodeChar :: Ptr Word8 -> (Char, Int)
utf8DecodeChar (Ptr a#) =
case utf8DecodeChar# a# of (# c#, nBytes# #) -> ( C# c#, I# nBytes# )
-- UTF-8 is cleverly designed so that we can always figure out where
-- the start of the current character is, given any position in a
-- stream. This function finds the start of the previous character,
-- assuming there *is* a previous character.
utf8PrevChar :: Ptr Word8 -> IO (Ptr Word8)
utf8PrevChar p = utf8CharStart (p `plusPtr` (-1))
utf8CharStart :: Ptr Word8 -> IO (Ptr Word8)
utf8CharStart p = go p
where go p = do w <- peek p
if w >= 0x80 && w < 0xC0
then go (p `plusPtr` (-1))
else return p
utf8DecodeString :: Ptr Word8 -> Int -> IO [Char]
utf8DecodeString ptr len
= unpack ptr
where
!end = ptr `plusPtr` len
unpack p
| p >= end = return []
| otherwise =
case utf8DecodeChar# (unPtr p) of
(# c#, nBytes# #) -> do
chs <- unpack (p `plusPtr#` nBytes#)
return (C# c# : chs)
countUTF8Chars :: Ptr Word8 -> Int -> IO Int
countUTF8Chars ptr len = go ptr 0
where
!end = ptr `plusPtr` len
go p !n
| p >= end = return n
| otherwise = do
case utf8DecodeChar# (unPtr p) of
(# _, nBytes# #) -> go (p `plusPtr#` nBytes#) (n+1)
unPtr :: Ptr a -> Addr#
unPtr (Ptr a) = a
plusPtr# :: Ptr a -> Int# -> Ptr a
plusPtr# ptr nBytes# = ptr `plusPtr` (I# nBytes#)
utf8EncodeChar :: Char -> Ptr Word8 -> IO (Ptr Word8)
utf8EncodeChar c ptr =
let x = ord c in
case () of
_ | x > 0 && x <= 0x007f -> do
poke ptr (fromIntegral x)
return (ptr `plusPtr` 1)
-- NB. '\0' is encoded as '\xC0\x80', not '\0'. This is so that we
-- can have 0-terminated UTF-8 strings (see GHC.Base.unpackCStringUtf8).
| x <= 0x07ff -> do
poke ptr (fromIntegral (0xC0 .|. ((x `shiftR` 6) .&. 0x1F)))
pokeElemOff ptr 1 (fromIntegral (0x80 .|. (x .&. 0x3F)))
return (ptr `plusPtr` 2)
| x <= 0xffff -> do
poke ptr (fromIntegral (0xE0 .|. (x `shiftR` 12) .&. 0x0F))
pokeElemOff ptr 1 (fromIntegral (0x80 .|. (x `shiftR` 6) .&. 0x3F))
pokeElemOff ptr 2 (fromIntegral (0x80 .|. (x .&. 0x3F)))
return (ptr `plusPtr` 3)
| otherwise -> do
poke ptr (fromIntegral (0xF0 .|. (x `shiftR` 18)))
pokeElemOff ptr 1 (fromIntegral (0x80 .|. ((x `shiftR` 12) .&. 0x3F)))
pokeElemOff ptr 2 (fromIntegral (0x80 .|. ((x `shiftR` 6) .&. 0x3F)))
pokeElemOff ptr 3 (fromIntegral (0x80 .|. (x .&. 0x3F)))
return (ptr `plusPtr` 4)
utf8EncodeString :: Ptr Word8 -> String -> IO ()
utf8EncodeString ptr str = go ptr str
where go !_ [] = return ()
go ptr (c:cs) = do
ptr' <- utf8EncodeChar c ptr
go ptr' cs
utf8EncodedLength :: String -> Int
utf8EncodedLength str = go 0 str
where go !n [] = n
go n (c:cs)
| ord c > 0 && ord c <= 0x007f = go (n+1) cs
| ord c <= 0x07ff = go (n+2) cs
| ord c <= 0xffff = go (n+3) cs
| otherwise = go (n+4) cs
-- -----------------------------------------------------------------------------
-- The Z-encoding
{-
This is the main name-encoding and decoding function. It encodes any
string into a string that is acceptable as a C name. This is done
right before we emit a symbol name into the compiled C or asm code.
Z-encoding of strings is cached in the FastString interface, so we
never encode the same string more than once.
The basic encoding scheme is this.
* Tuples (,,,) are coded as Z3T
* Alphabetic characters (upper and lower) and digits
all translate to themselves;
except 'Z', which translates to 'ZZ'
and 'z', which translates to 'zz'
We need both so that we can preserve the variable/tycon distinction
* Most other printable characters translate to 'zx' or 'Zx' for some
alphabetic character x
* The others translate as 'znnnU' where 'nnn' is the decimal number
of the character
Before After
--------------------------
Trak Trak
foo_wib foozuwib
> zg
>1 zg1
foo# foozh
foo## foozhzh
foo##1 foozhzh1
fooZ fooZZ
:+ ZCzp
() Z0T 0-tuple
(,,,,) Z5T 5-tuple
(# #) Z1H unboxed 1-tuple (note the space)
(#,,,,#) Z5H unboxed 5-tuple
(NB: There is no Z1T nor Z0H.)
-}
type UserString = String -- As the user typed it
type EncodedString = String -- Encoded form
zEncodeString :: UserString -> EncodedString
zEncodeString cs = case maybe_tuple cs of
Just n -> n -- Tuples go to Z2T etc
Nothing -> go cs
where
go [] = []
go (c:cs) = encode_digit_ch c ++ go' cs
go' [] = []
go' (c:cs) = encode_ch c ++ go' cs
unencodedChar :: Char -> Bool -- True for chars that don't need encoding
unencodedChar 'Z' = False
unencodedChar 'z' = False
unencodedChar c = c >= 'a' && c <= 'z'
|| c >= 'A' && c <= 'Z'
|| c >= '0' && c <= '9'
-- If a digit is at the start of a symbol then we need to encode it.
-- Otherwise package names like 9pH-0.1 give linker errors.
encode_digit_ch :: Char -> EncodedString
encode_digit_ch c | c >= '0' && c <= '9' = encode_as_unicode_char c
encode_digit_ch c | otherwise = encode_ch c
encode_ch :: Char -> EncodedString
encode_ch c | unencodedChar c = [c] -- Common case first
-- Constructors
encode_ch '(' = "ZL" -- Needed for things like (,), and (->)
encode_ch ')' = "ZR" -- For symmetry with (
encode_ch '[' = "ZM"
encode_ch ']' = "ZN"
encode_ch ':' = "ZC"
encode_ch 'Z' = "ZZ"
-- Variables
encode_ch 'z' = "zz"
encode_ch '&' = "za"
encode_ch '|' = "zb"
encode_ch '^' = "zc"
encode_ch '$' = "zd"
encode_ch '=' = "ze"
encode_ch '>' = "zg"
encode_ch '#' = "zh"
encode_ch '.' = "zi"
encode_ch '<' = "zl"
encode_ch '-' = "zm"
encode_ch '!' = "zn"
encode_ch '+' = "zp"
encode_ch '\'' = "zq"
encode_ch '\\' = "zr"
encode_ch '/' = "zs"
encode_ch '*' = "zt"
encode_ch '_' = "zu"
encode_ch '%' = "zv"
encode_ch c = encode_as_unicode_char c
encode_as_unicode_char :: Char -> EncodedString
encode_as_unicode_char c = 'z' : if isDigit (head hex_str) then hex_str
else '0':hex_str
where hex_str = showHex (ord c) "U"
-- ToDo: we could improve the encoding here in various ways.
-- eg. strings of unicode characters come out as 'z1234Uz5678U', we
-- could remove the 'U' in the middle (the 'z' works as a separator).
zDecodeString :: EncodedString -> UserString
zDecodeString [] = []
zDecodeString ('Z' : d : rest)
| isDigit d = decode_tuple d rest
| otherwise = decode_upper d : zDecodeString rest
zDecodeString ('z' : d : rest)
| isDigit d = decode_num_esc d rest
| otherwise = decode_lower d : zDecodeString rest
zDecodeString (c : rest) = c : zDecodeString rest
decode_upper, decode_lower :: Char -> Char
decode_upper 'L' = '('
decode_upper 'R' = ')'
decode_upper 'M' = '['
decode_upper 'N' = ']'
decode_upper 'C' = ':'
decode_upper 'Z' = 'Z'
decode_upper ch = {-pprTrace "decode_upper" (char ch)-} ch
decode_lower 'z' = 'z'
decode_lower 'a' = '&'
decode_lower 'b' = '|'
decode_lower 'c' = '^'
decode_lower 'd' = '$'
decode_lower 'e' = '='
decode_lower 'g' = '>'
decode_lower 'h' = '#'
decode_lower 'i' = '.'
decode_lower 'l' = '<'
decode_lower 'm' = '-'
decode_lower 'n' = '!'
decode_lower 'p' = '+'
decode_lower 'q' = '\''
decode_lower 'r' = '\\'
decode_lower 's' = '/'
decode_lower 't' = '*'
decode_lower 'u' = '_'
decode_lower 'v' = '%'
decode_lower ch = {-pprTrace "decode_lower" (char ch)-} ch
-- Characters not having a specific code are coded as z224U (in hex)
decode_num_esc :: Char -> EncodedString -> UserString
decode_num_esc d rest
= go (digitToInt d) rest
where
go n (c : rest) | isHexDigit c = go (16*n + digitToInt c) rest
go n ('U' : rest) = chr n : zDecodeString rest
go n other = error ("decode_num_esc: " ++ show n ++ ' ':other)
decode_tuple :: Char -> EncodedString -> UserString
decode_tuple d rest
= go (digitToInt d) rest
where
-- NB. recurse back to zDecodeString after decoding the tuple, because
-- the tuple might be embedded in a longer name.
go n (c : rest) | isDigit c = go (10*n + digitToInt c) rest
go 0 ('T':rest) = "()" ++ zDecodeString rest
go n ('T':rest) = '(' : replicate (n-1) ',' ++ ")" ++ zDecodeString rest
go 1 ('H':rest) = "(# #)" ++ zDecodeString rest
go n ('H':rest) = '(' : '#' : replicate (n-1) ',' ++ "#)" ++ zDecodeString rest
go n other = error ("decode_tuple: " ++ show n ++ ' ':other)
{-
Tuples are encoded as
Z3T or Z3H
for 3-tuples or unboxed 3-tuples respectively. No other encoding starts
Z<digit>
* "(# #)" is the tycon for an unboxed 1-tuple (not 0-tuple)
There are no unboxed 0-tuples.
* "()" is the tycon for a boxed 0-tuple.
There are no boxed 1-tuples.
-}
maybe_tuple :: UserString -> Maybe EncodedString
maybe_tuple "(# #)" = Just("Z1H")
maybe_tuple ('(' : '#' : cs) = case count_commas (0::Int) cs of
(n, '#' : ')' : _) -> Just ('Z' : shows (n+1) "H")
_ -> Nothing
maybe_tuple "()" = Just("Z0T")
maybe_tuple ('(' : cs) = case count_commas (0::Int) cs of
(n, ')' : _) -> Just ('Z' : shows (n+1) "T")
_ -> Nothing
maybe_tuple _ = Nothing
count_commas :: Int -> String -> (Int, String)
count_commas n (',' : cs) = count_commas (n+1) cs
count_commas n cs = (n,cs)
| urbanslug/ghc | compiler/utils/Encoding.hs | bsd-3-clause | 13,485 | 0 | 29 | 4,009 | 3,636 | 1,849 | 1,787 | 240 | 7 |
{-# LANGUAGE GADTs, KindSignatures,
MultiParamTypeClasses, FunctionalDependencies #-}
-- Program from Josef Svenningsson
-- Just a short explanation of the program. It contains
-- some class declarations capturing some definitions from
-- category theory. Further down he have a data type for well typed
-- lambda expressions using GADTs. Finally we have a
-- function defining the semantics for lambda terms called
-- 'interp'.
-- Made GHC 6.4 bleat
-- Quantified type variable `t' is unified with
-- another quantified type variable `terminal'
-- When trying to generalise the type inferred for `interp'
module Bug where
class Category arr where
idA :: arr a a
comp :: arr a b -> arr b c -> arr a c
class Category arr =>
Terminal terminal arr | arr -> terminal where
terminal :: arr a terminal
class Category arr =>
ProductCategory prod arr | arr -> prod where
first :: arr (prod a b) a
second :: arr (prod a b) b
pair :: arr a b -> arr a c -> arr a (prod b c)
class Category arr =>
CoproductCategory coprod arr | arr -> coprod where
inLeft :: arr a (coprod a b)
inRight :: arr b (coprod a b)
ccase :: arr a c -> arr b c -> arr (coprod a b) c
class ProductCategory prod arr =>
Exponential exp prod arr | arr -> exp where
eval :: arr (prod (exp a b) a) b
curryA :: arr (prod c a) b -> arr c (exp a b)
class (Exponential exp prod arr, Terminal terminal arr) =>
CartesianClosed terminal exp prod arr | arr -> terminal exp prod
data V prod env t where
Z :: V prod (prod env t) t
S :: V prod env t -> V prod (prod env x) t
data Lambda terminal (exp :: * -> * -> *) prod env t where
Unit :: Lambda foo exp prod env foo
Var :: V prod env t -> Lambda terminal exp prod env t
{- Lam :: Lambda terminal exp prod (prod env a) t
-> Lambda terminal exp prod env (exp a t)
App :: Lambda terminal exp prod env (exp t t')
-> Lambda terminal exp prod env t -> Lambda terminal exp prod env t'
-}
interp :: CartesianClosed terminal exp prod arr =>
Lambda terminal exp prod s t -> arr s t
interp (Unit) = terminal -- Terminal terminal arr => arr a terminal
-- interp (Var Z) = second
-- interp (Var (S v)) = first `comp` interp (Var v)
-- interp (Lam e) = curryA (interp e)
-- interp (App e1 e2) = pair (interp e1) (interp e2) `comp` eval
| ezyang/ghc | testsuite/tests/gadt/josef.hs | bsd-3-clause | 2,385 | 0 | 11 | 611 | 599 | 313 | 286 | -1 | -1 |
{-# LANGUAGE ExistentialQuantification, FlexibleInstances, UndecidableInstances #-}
-- Tests context reduction for existentials
module TestWrappedNode where
class Foo a where { op :: a -> Int }
instance {-# OVERLAPPABLE #-} Foo a => Foo [a] where -- NB overlap
op (x:xs) = op x
instance {-# OVERLAPPING #-} Foo [Int] where -- NB overlap
op x = 1
data T = forall a. Foo a => MkT a
f :: T -> Int
f (MkT x) = op [x,x]
-- The op [x,x] means we need (Foo [a]). We used to
-- complain, saying that the choice of instance depended on
-- the instantiation of 'a'; but of course it isn't *going*
-- to be instantiated.
| olsner/ghc | testsuite/tests/typecheck/should_compile/tc179.hs | bsd-3-clause | 668 | 0 | 8 | 174 | 142 | 80 | 62 | 10 | 1 |
{-# LANGUAGE BangPatterns, GeneralizedNewtypeDeriving, MagicHash,
UnboxedTuples #-}
-- !!! stress tests of copying/cloning primitive arrays
-- Note: You can run this test manually with an argument (i.e.
-- ./CopySmallArrayStressTest 10000) if you want to run the stress
-- test for longer.
{-
Test strategy
=============
We create an array of arrays of integers. Repeatedly we then either
* allocate a new array in place of an old, or
* copy a random segment of an array into another array (which might be
the source array).
By running this process long enough we hope to trigger any bugs
related to garbage collection or edge cases.
We only test copySmallMutableArray# and cloneSmallArray# as they are
representative of all the primops.
-}
module Main ( main ) where
import Debug.Trace (trace)
import Control.Exception (assert)
import Control.Monad
import Control.Monad.Trans.State.Strict
import Control.Monad.Trans.Class
import GHC.Exts hiding (IsList(..))
import GHC.ST hiding (liftST)
import Prelude hiding (length, read)
import qualified Prelude as P
import qualified Prelude as P
import System.Environment
import System.Random
main :: IO ()
main = do
args <- getArgs
-- Number of copies to perform
let numMods = case args of
[] -> 100
[n] -> P.read n :: Int
putStr (test_copyMutableArray numMods ++ "\n" ++
test_cloneMutableArray numMods ++ "\n"
)
-- Number of arrays
numArrays :: Int
numArrays = 100
-- Maxmimum length of a sub-array
maxLen :: Int
maxLen = 1024
-- Create an array of arrays, with each sub-array having random length
-- and content.
setup :: Rng s (MArray s (MArray s Int))
setup = do
len <- rnd (1, numArrays)
marr <- liftST $ new_ len
let go i
| i >= len = return ()
| otherwise = do
n <- rnd (1, maxLen)
subarr <- liftST $ fromList [j*j | j <- [(0::Int)..n-1]]
liftST $ write marr i subarr
go (i+1)
go 0
return marr
-- Replace one of the sub-arrays with a newly allocated array.
allocate :: MArray s (MArray s Int) -> Rng s ()
allocate marr = do
ix <- rnd (0, length marr - 1)
n <- rnd (1, maxLen)
subarr <- liftST $ fromList [j*j | j <- [(0::Int)..n-1]]
liftST $ write marr ix subarr
type CopyFunction s a =
MArray s a -> Int -> MArray s a -> Int -> Int -> ST s ()
-- Copy a random segment of an array onto another array, using the
-- supplied copy function.
copy :: MArray s (MArray s a) -> CopyFunction s a
-> Rng s (Int, Int, Int, Int, Int)
copy marr f = do
six <- rnd (0, length marr - 1)
dix <- rnd (0, length marr - 1)
src <- liftST $ read marr six
dst <- liftST $ read marr dix
let srcLen = length src
srcOff <- rnd (0, srcLen - 1)
let dstLen = length dst
dstOff <- rnd (0, dstLen - 1)
n <- rnd (0, min (srcLen - srcOff) (dstLen - dstOff))
liftST $ f src srcOff dst dstOff n
return (six, dix, srcOff, dstOff, n)
type CloneFunction s a = MArray s a -> Int -> Int -> ST s (MArray s a)
-- Clone a random segment of an array, replacing another array, using
-- the supplied clone function.
clone :: MArray s (MArray s a) -> CloneFunction s a
-> Rng s (Int, Int, Int, Int)
clone marr f = do
six <- rnd (0, length marr - 1)
dix <- rnd (0, length marr - 1)
src <- liftST $ read marr six
let srcLen = length src
-- N.B. The array length might be zero if we previously cloned
-- zero elements from some array.
srcOff <- rnd (0, max 0 (srcLen - 1))
n <- rnd (0, srcLen - srcOff)
dst <- liftST $ f src srcOff n
liftST $ write marr dix dst
return (six, dix, srcOff, n)
------------------------------------------------------------------------
-- copySmallMutableArray#
-- Copy a slice of the source array into a destination array and check
-- that the copy succeeded.
test_copyMutableArray :: Int -> String
test_copyMutableArray numMods = runST $ run $ do
marr <- local setup
marrRef <- setup
let go i
| i >= numMods = return "test_copyMutableArray: OK"
| otherwise = do
-- Either allocate or copy
alloc <- rnd (True, False)
if alloc then doAlloc else doCopy
go (i+1)
doAlloc = do
local $ allocate marr
allocate marrRef
doCopy = do
inp <- liftST $ asList marr
_ <- local $ copy marr copyMArray
(six, dix, srcOff, dstOff, n) <- copy marrRef copyMArraySlow
el <- liftST $ asList marr
elRef <- liftST $ asList marrRef
when (el /= elRef) $
fail inp el elRef six dix srcOff dstOff n
go 0
where
fail inp el elRef six dix srcOff dstOff n =
error $ "test_copyMutableArray: FAIL\n"
++ " Input: " ++ unlinesShow inp
++ " Copy: six: " ++ show six ++ " dix: " ++ show dix ++ " srcOff: "
++ show srcOff ++ " dstOff: " ++ show dstOff ++ " n: " ++ show n ++ "\n"
++ "Expected: " ++ unlinesShow elRef
++ " Actual: " ++ unlinesShow el
asList :: MArray s (MArray s a) -> ST s [[a]]
asList marr = toListM =<< mapArrayM toListM marr
unlinesShow :: Show a => [a] -> String
unlinesShow = concatMap (\ x -> show x ++ "\n")
------------------------------------------------------------------------
-- cloneSmallMutableArray#
-- Copy a slice of the source array into a destination array and check
-- that the copy succeeded.
test_cloneMutableArray :: Int -> String
test_cloneMutableArray numMods = runST $ run $ do
marr <- local setup
marrRef <- setup
let go i
| i >= numMods = return "test_cloneMutableArray: OK"
| otherwise = do
-- Either allocate or clone
alloc <- rnd (True, False)
if alloc then doAlloc else doClone
go (i+1)
doAlloc = do
local $ allocate marr
allocate marrRef
doClone = do
inp <- liftST $ asList marr
_ <- local $ clone marr cloneMArray
(six, dix, srcOff, n) <- clone marrRef cloneMArraySlow
el <- liftST $ asList marr
elRef <- liftST $ asList marrRef
when (el /= elRef) $
fail inp el elRef six dix srcOff n
go 0
where
fail inp el elRef six dix srcOff n =
error $ "test_cloneMutableArray: FAIL\n"
++ " Input: " ++ unlinesShow inp
++ " Clone: six: " ++ show six ++ " dix: " ++ show dix ++ " srcOff: "
++ show srcOff ++ " n: " ++ show n ++ "\n"
++ "Expected: " ++ unlinesShow elRef
++ " Actual: " ++ unlinesShow el
------------------------------------------------------------------------
-- Convenience wrappers for SmallArray# and SmallMutableArray#
data Array a = Array
{ unArray :: SmallArray# a
, lengthA :: {-# UNPACK #-} !Int}
data MArray s a = MArray
{ unMArray :: SmallMutableArray# s a
, lengthM :: {-# UNPACK #-} !Int}
class IArray a where
length :: a -> Int
instance IArray (Array a) where
length = lengthA
instance IArray (MArray s a) where
length = lengthM
instance Eq a => Eq (Array a) where
arr1 == arr2 = toList arr1 == toList arr2
new :: Int -> a -> ST s (MArray s a)
new n@(I# n#) a =
assert (n >= 0) $
ST $ \s# -> case newSmallArray# n# a s# of
(# s2#, marr# #) -> (# s2#, MArray marr# n #)
new_ :: Int -> ST s (MArray s a)
new_ n = new n (error "Undefined element")
write :: MArray s a -> Int -> a -> ST s ()
write marr i@(I# i#) a =
assert (i >= 0) $
assert (i < length marr) $
ST $ \ s# ->
case writeSmallArray# (unMArray marr) i# a s# of
s2# -> (# s2#, () #)
read :: MArray s a -> Int -> ST s a
read marr i@(I# i#) =
assert (i >= 0) $
assert (i < length marr) $
ST $ \ s# ->
readSmallArray# (unMArray marr) i# s#
index :: Array a -> Int -> a
index arr i@(I# i#) =
assert (i >= 0) $
assert (i < length arr) $
case indexSmallArray# (unArray arr) i# of
(# a #) -> a
unsafeFreeze :: MArray s a -> ST s (Array a)
unsafeFreeze marr = ST $ \ s# ->
case unsafeFreezeSmallArray# (unMArray marr) s# of
(# s2#, arr# #) -> (# s2#, Array arr# (length marr) #)
toList :: Array a -> [a]
toList arr = go 0
where
go i | i >= length arr = []
| otherwise = index arr i : go (i+1)
fromList :: [e] -> ST s (MArray s e)
fromList es = do
marr <- new_ n
let go !_ [] = return ()
go i (x:xs) = write marr i x >> go (i+1) xs
go 0 es
return marr
where
n = P.length es
mapArrayM :: (a -> ST s b) -> MArray s a -> ST s (MArray s b)
mapArrayM f src = do
dst <- new_ n
let go i
| i >= n = return dst
| otherwise = do
el <- read src i
el' <- f el
write dst i el'
go (i+1)
go 0
where
n = length src
toListM :: MArray s e -> ST s [e]
toListM marr =
sequence [read marr i | i <- [0..(length marr)-1]]
------------------------------------------------------------------------
-- Wrappers around copy/clone primops
copyMArray :: MArray s a -> Int -> MArray s a -> Int -> Int -> ST s ()
copyMArray src six@(I# six#) dst dix@(I# dix#) n@(I# n#) =
assert (six >= 0) $
assert (six + n <= length src) $
assert (dix >= 0) $
assert (dix + n <= length dst) $
ST $ \ s# ->
case copySmallMutableArray# (unMArray src) six# (unMArray dst) dix# n# s# of
s2# -> (# s2#, () #)
cloneMArray :: MArray s a -> Int -> Int -> ST s (MArray s a)
cloneMArray marr off@(I# off#) n@(I# n#) =
assert (off >= 0) $
assert (off + n <= length marr) $
ST $ \ s# ->
case cloneSmallMutableArray# (unMArray marr) off# n# s# of
(# s2#, marr2 #) -> (# s2#, MArray marr2 n #)
------------------------------------------------------------------------
-- Manual versions of copy/clone primops. Used to validate the
-- primops
copyMArraySlow :: MArray s e -> Int -> MArray s e -> Int -> Int -> ST s ()
copyMArraySlow !src !six !dst !dix n =
assert (six >= 0) $
assert (six + n <= length src) $
assert (dix >= 0) $
assert (dix + n <= length dst) $
if six < dix
then goB (six+n-1) (dix+n-1) 0 -- Copy backwards
else goF six dix 0 -- Copy forwards
where
goF !i !j c
| c >= n = return ()
| otherwise = do b <- read src i
write dst j b
goF (i+1) (j+1) (c+1)
goB !i !j c
| c >= n = return ()
| otherwise = do b <- read src i
write dst j b
goB (i-1) (j-1) (c+1)
cloneMArraySlow :: MArray s a -> Int -> Int -> ST s (MArray s a)
cloneMArraySlow !marr !off n =
assert (off >= 0) $
assert (off + n <= length marr) $ do
marr2 <- new_ n
let go !i !j c
| c >= n = return marr2
| otherwise = do
b <- read marr i
write marr2 j b
go (i+1) (j+1) (c+1)
go off 0 0
------------------------------------------------------------------------
-- Utilities for simplifying RNG passing
newtype Rng s a = Rng { unRng :: StateT StdGen (ST s) a }
deriving (Functor, Applicative, Monad)
-- Same as 'randomR', but using the RNG state kept in the 'Rng' monad.
rnd :: Random a => (a, a) -> Rng s a
rnd r = Rng $ do
g <- get
let (x, g') = randomR r g
put g'
return x
-- Run a sub-computation without affecting the RNG state.
local :: Rng s a -> Rng s a
local m = Rng $ do
g <- get
x <- unRng m
put g
return x
liftST :: ST s a -> Rng s a
liftST m = Rng $ lift m
run :: Rng s a -> ST s a
run = flip evalStateT (mkStdGen 13) . unRng
| wxwxwwxxx/ghc | testsuite/tests/codeGen/should_run/CopySmallArrayStressTest.hs | bsd-3-clause | 11,884 | 0 | 24 | 3,678 | 4,316 | 2,123 | 2,193 | 273 | 2 |
-- |
-- Module : Interpolate.Parse
-- Description :
-- Copyright : (c) Jonatan H Sundqvist, 2015
-- License : MIT
-- Maintainer : Jonatan H Sundqvist
-- Stability : experimental|stable
-- Portability : POSIX (not sure)
--
-- Created September 30 2015
-- TODO | - Allow nested formats (?)
-- - Allow interpolation of format parameters (?)
-- - Polymorphic in String
-- - Allow arbitrary monad transformers
-- - Devise some way of specifying what parameters are valid for anyg iven FormatArg type
-- -- Type-specific specifiers
-- - Empty formatting specifiers (?)
-- SPEC | -
-- -
-- cf. https://docs.python.org/3.4/library/string.html#formatspec
--------------------------------------------------------------------------------------------------------------------------------------------
-- GHC Pragmas
--------------------------------------------------------------------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
--------------------------------------------------------------------------------------------------------------------------------------------
-- API
--------------------------------------------------------------------------------------------------------------------------------------------
module Interpolate.Parse where
--------------------------------------------------------------------------------------------------------------------------------------------
-- We'll need these
--------------------------------------------------------------------------------------------------------------------------------------------
import qualified Text.Parsec as Parsec
import Text.Parsec ((<?>), (<|>), ParsecT, Stream)
import Data.Functor ((<$>), (<$))
import Data.Monoid
import Data.String
import Control.Monad.Identity
import Interpolate.Types
--------------------------------------------------------------------------------------------------------------------------------------------
-- Functions
--------------------------------------------------------------------------------------------------------------------------------------------
-- |
-- parseformat :: IsString string => string -> a
-- ParsecT s u m a is a parser with stream type s, user state type u, underlying monad m and return type a.
-- TODO: Escaping '}'
-- TODO: Rename (?)
parseformat :: (Stream s' Identity Char) => ParsecT s' u Identity [FormatToken]
parseformat = Parsec.many (Parsec.try plain <|> format)
-- |
-- TODO: unescape
-- TODO: Rename (?)
-- TODO: Fix backtracking behaviour (should be non-greedy)
literaltext :: (Stream s' Identity Char, Monoid s, IsString s) => ParsecT s' u Identity s
literaltext = mconcat <$> Parsec.many1 (Parsec.try unescaped <|> (Parsec.try openescape <|> closeescape) ) -- TODO: Find a way of flattening
-- |
plain :: (Stream s' Identity Char) => ParsecT s' u Identity (FormatToken)
plain = PlainToken <$> literaltext
-- |
unescaped :: (Stream s' Identity Char, IsString s) => ParsecT s' u Identity s
unescaped = fromString <$> (:[]) <$> Parsec.noneOf "{}"
-- |
openescape :: (Stream s' Identity Char, IsString s) => ParsecT s' u Identity s
openescape = "{" <$ string "{{"
-- |
closeescape :: (Stream s' Identity Char, IsString s) => ParsecT s' u Identity s
closeescape = "}" <$ string "}}"
-- |
-- open :: (Stream s' Identity Char, IsString s) => ParsecT s' u Identity s
-- open = string "{"
--
--
-- -- |
-- close :: (Stream s' Identity Char, IsString s) => ParsecT s' u Identity s
-- close = string "}"
-- |
format :: (Stream s' Identity Char) => ParsecT s' u Identity (FormatToken)
format = do
Parsec.string "{"
k <- key
-- string ":"
-- s <- specifier
Parsec.string "}"
return $ SpecifierToken (k, Specifier "undefined")
-- |
-- TODO: This needs a lot of work
key :: (Stream s' Identity Char) => ParsecT s' u Identity (Key)
key = (IndexKey . read <$> Parsec.try indexed) <|> (StringKey <$> Parsec.try named) <|> return EmptyKey
where
indexed = fromString <$> (Parsec.many1 $ Parsec.digit)
named = fromString <$> (Parsec.many1 $ Parsec.noneOf "{} \n\t")
-- |
-- TOOD: Rename (?)
specifier :: (Stream s' Identity Char) => ParsecT s' u Identity (Specifier)
specifier = do
-- (key, spec) <- formatspec
spec <- mconcat <$> Parsec.many1 unescaped
return $ Specifier spec
-- |
formatspec :: (Stream s' Identity Char, IsString s) => ParsecT s' u Identity (k s)
formatspec = do
undefined
-- [[fill]align][sign][#][0][width][,][.precision][type]
-- |
-- generic ::
string :: (Stream s' Identity Char, IsString s) => String -> ParsecT s' u Identity s
string str = fromString <$> Parsec.string str
| SwiftsNamesake/Interpolate | lib/Interpolate/Parse.hs | mit | 4,798 | 0 | 12 | 745 | 817 | 463 | 354 | 42 | 1 |
module Magento.Skin (
addSkin
) where
import Control.Monad (when)
import System.Directory (createDirectoryIfMissing, doesFileExist)
import System.FilePath.Posix (joinPath, takeDirectory)
import Data.String.Utils (replace)
import Magento.Module.Path (skinBasePath)
import Magento.Module (
ModuleInfo,
getName)
import Util (lowercase, writeFileAndPrint)
import Util.XML (insertXmlIfMissing)
addSkin :: ModuleInfo -> String -> String -> IO ()
addSkin info scope name = do
createSkinIfMissing info scope name
createSkinIfMissing :: ModuleInfo -> String -> String -> IO ()
createSkinIfMissing info scope name =
let path = skinPath info scope name in do
createDirectoryIfMissing True (takeDirectory path)
writeSkinIfMissing path
writeSkinIfMissing :: FilePath -> IO ()
writeSkinIfMissing path = do
exists <- doesFileExist path
when (not exists) $ writeSkin path
writeSkin :: FilePath -> IO ()
writeSkin path = do
writeFileAndPrint path ""
scopeName :: String -> String
scopeName "frontend" = "frontend"
scopeName "admin" = "adminhtml"
skinPath :: ModuleInfo -> String -> String -> FilePath
skinPath info scope name =
joinPath [
skinBasePath info (scopeName scope),
lowercase $ getName info,
(lowercase name)
]
| prasmussen/magmod | Magento/Skin.hs | mit | 1,292 | 0 | 11 | 246 | 391 | 202 | 189 | 36 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.List.NonEmpty
import Language.SAL
main :: IO ()
main = putStrLn (renderSAL ctx)
ctx :: Context
ctx = Context "mutex" Nothing body
where
body = ContextBody $ fromList [tyDecl, modDecl]
tyDecl = TypeDecl "PC" (Just tyDef)
tyDef = ScalarType $ fromList ["sleeping", "trying", "critical"]
modDecl = ModuleDecl proc
proc :: ModuleDeclaration
proc = ModuleDeclaration "process" modParams $ BaseModule [ins, outs, ini, transBlk]
where
pc = "PC" :: Identifier
modParams = Just (VarDecls (VarDecl "tval" (TyBasic BOOLEAN) :| []))
ins = InputDecl . VarDecls . fromList $
[ decl "pc2" pc
, decl "x2" BOOLEAN
]
outs = OutputDecl . VarDecls . fromList $
[ decl "pc1" pc
, decl "x1" BOOLEAN
]
ini = InitDecl (DOCDef (DefSimple (lhs "pc1" #= rhs "sleeping")) :| [])
transBlk = TransDecl (trans :| [])
trans = DOCCom
(fromList [trWake, trEnter, trLeave])
Nothing -- no ELSE block
trWake = NamedCommand (Just "wakening") $
GuardedCommand gWake aWake
gWake = "pc1" .= "sleeping"
aWake = [ lhs' "pc1" #= rhs "trying"
, lhs' "x1" #= rhs ("x2" .= "tval")
]
trEnter = NamedCommand (Just "entering_critical") $
GuardedCommand gEnter aEnter
gEnter = "pc1" .= "trying" .&& ("pc2" .= "sleeping" .|| "x2" ./= "tval")
aEnter = [ lhs' "pc1" #= rhs "critical" ]
trLeave = NamedCommand (Just "leaving_critical") $
GuardedCommand gLeave aLeave
gLeave = "pc1" .= "critical"
aLeave = [ lhs' "pc1" #= rhs "sleeping"
, lhs' "x1" #= rhs ("x2" .= "tval")
]
| GaloisInc/language-sal | examples/mutex.hs | mit | 1,645 | 0 | 15 | 403 | 557 | 291 | 266 | 41 | 1 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE TypeFamilies, FlexibleContexts #-}
module Database.Persist.Class.PersistStore
( PersistStore (..)
, getJust
, belongsTo
, belongsToJust
) where
import qualified Prelude
import Prelude hiding ((++), show)
import qualified Data.Text as T
import Control.Monad.Trans.Error (Error (..))
import Control.Monad.Trans.Class (lift)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Monoid (Monoid)
import Control.Exception.Lifted (throwIO)
import Data.Conduit.Internal (Pipe, ConduitM)
import Control.Monad.Logger (LoggingT)
import Control.Monad.Trans.Identity ( IdentityT)
import Control.Monad.Trans.List ( ListT )
import Control.Monad.Trans.Maybe ( MaybeT )
import Control.Monad.Trans.Error ( ErrorT )
import Control.Monad.Trans.Reader ( ReaderT )
import Control.Monad.Trans.Cont ( ContT )
import Control.Monad.Trans.State ( StateT )
import Control.Monad.Trans.Writer ( WriterT )
import Control.Monad.Trans.RWS ( RWST )
import Control.Monad.Trans.Resource ( ResourceT)
import qualified Control.Monad.Trans.RWS.Strict as Strict ( RWST )
import qualified Control.Monad.Trans.State.Strict as Strict ( StateT )
import qualified Control.Monad.Trans.Writer.Strict as Strict ( WriterT )
import Database.Persist.Class.PersistEntity
import Database.Persist.Types
class MonadIO m => PersistStore m where
type PersistMonadBackend m
-- | Get a record by identifier, if available.
get :: (PersistMonadBackend m ~ PersistEntityBackend val, PersistEntity val)
=> Key val -> m (Maybe val)
-- | Create a new record in the database, returning an automatically created
-- key (in SQL an auto-increment id).
insert :: (PersistMonadBackend m ~ PersistEntityBackend val, PersistEntity val)
=> val -> m (Key val)
-- | Same as 'insert', but doesn't return a @Key@.
insert_ :: (PersistMonadBackend m ~ PersistEntityBackend val, PersistEntity val)
=> val -> m ()
insert_ val = insert val >> return ()
-- | Create multiple records in the database.
-- SQL backends currently use the slow default implementation of
-- @mapM insert@
insertMany :: (PersistMonadBackend m ~ PersistEntityBackend val, PersistEntity val)
=> [val] -> m [Key val]
insertMany = mapM insert
-- | Create a new record in the database using the given key.
insertKey :: (PersistMonadBackend m ~ PersistEntityBackend val, PersistEntity val)
=> Key val -> val -> m ()
-- | Put the record in the database with the given key.
-- Unlike 'replace', if a record with the given key does not
-- exist then a new record will be inserted.
repsert :: (PersistMonadBackend m ~ PersistEntityBackend val, PersistEntity val)
=> Key val -> val -> m ()
-- | Replace the record in the database with the given
-- key. Note that the result is undefined if such record does
-- not exist, so you must use 'insertKey or 'repsert' in
-- these cases.
replace :: (PersistMonadBackend m ~ PersistEntityBackend val, PersistEntity val)
=> Key val -> val -> m ()
-- | Delete a specific record by identifier. Does nothing if record does
-- not exist.
delete :: (PersistMonadBackend m ~ PersistEntityBackend val, PersistEntity val)
=> Key val -> m ()
-- | Same as get, but for a non-null (not Maybe) foreign key
-- Unsafe unless your database is enforcing that the foreign key is valid
getJust :: (PersistStore m, PersistEntity val, Show (Key val), PersistMonadBackend m ~ PersistEntityBackend val) => Key val -> m val
getJust key = get key >>= maybe
(liftIO $ throwIO $ PersistForeignConstraintUnmet $ T.pack $ Prelude.show key)
return
-- | curry this to make a convenience function that loads an associated model
-- > foreign = belongsTo foeignId
belongsTo ::
(PersistStore m
, PersistEntity ent1
, PersistEntity ent2
, PersistMonadBackend m ~ PersistEntityBackend ent2
) => (ent1 -> Maybe (Key ent2)) -> ent1 -> m (Maybe ent2)
belongsTo foreignKeyField model = case foreignKeyField model of
Nothing -> return Nothing
Just f -> get f
-- | same as belongsTo, but uses @getJust@ and therefore is similarly unsafe
belongsToJust ::
(PersistStore m
, PersistEntity ent1
, PersistEntity ent2
, PersistMonadBackend m ~ PersistEntityBackend ent2)
=> (ent1 -> Key ent2) -> ent1 -> m ent2
belongsToJust getForeignKey model = getJust $ getForeignKey model
#define DEF(T) { type PersistMonadBackend (T m) = PersistMonadBackend m; insert = lift . insert; insertKey k = lift . insertKey k; repsert k = lift . repsert k; replace k = lift . replace k; delete = lift . delete; get = lift . get }
#define GO(T) instance (PersistStore m) => PersistStore (T m) where DEF(T)
#define GOX(X, T) instance (X, PersistStore m) => PersistStore (T m) where DEF(T)
GO(LoggingT)
GO(IdentityT)
GO(ListT)
GO(MaybeT)
GOX(Error e, ErrorT e)
GO(ReaderT r)
GO(ContT r)
GO(StateT s)
GO(ResourceT)
GO(Pipe l i o u)
GO(ConduitM i o)
GOX(Monoid w, WriterT w)
GOX(Monoid w, RWST r w s)
GOX(Monoid w, Strict.RWST r w s)
GO(Strict.StateT s)
GOX(Monoid w, Strict.WriterT w)
#undef DEF
#undef GO
#undef GOX
| gbwey/persistentold | persistent/Database/Persist/Class/PersistStore.hs | mit | 5,241 | 0 | 11 | 1,074 | 1,258 | 677 | 581 | -1 | -1 |
module Data.CapnProto.Units where
import Data.Word
import Data.Int
import Foreign.Ptr
--------------------------------------------------------------------------------
type BitCount = Word
type BitCount8 = Word8
type BitCount16 = Word16
type BitCount32 = Word32
type BitCount64 = Word64
type ByteCount = Word
type ByteCount8 = Word8
type ByteCount16 = Word16
type ByteCount32 = Word32
type ByteCount64 = Word64
type WordCount = Word
type WordCount8 = Word8
type WordCount16 = Word16
type WordCount32 = Word32
type WordCount64 = Word64
type ElementCount = Word
type ElementCount8 = Word8
type ElementCount16 = Word16
type ElementCount32 = Word32
type ElementCount64 = Word64
type WirePointerCount = Word
type WirePointerCount8 = Word8
type WirePointerCount16 = Word16
type WirePointerCount32 = Word32
type WirePointerCount64 = Word64
--------------------------------------------------------------------------------
bytesPerWord :: Int
bytesPerWord = 8
pointerSizeInWords :: Int
pointerSizeInWords = 1
bitsPerByte :: Int
bitsPerByte = 8
bitsPerWord :: Int
bitsPerWord = 64
bitsPerPointer :: Int
bitsPerPointer = 64
--------------------------------------------------------------------------------
-- TODO: move elsewhere
class Nullable a where
isNull :: a -> Bool
instance Nullable (Ptr a) where
isNull ptr = ptr == nullPtr
class Zero a where
zero :: a
instance Zero Bool where
zero = False
instance Zero Word8 where
zero = 0
instance Zero Word16 where
zero = 0
instance Zero Word32 where
zero = 0
instance Zero Word64 where
zero = 0
instance Zero Int8 where
zero = 0
instance Zero Int16 where
zero = 0
instance Zero Int32 where
zero = 0
instance Zero Int64 where
zero = 0
instance Zero Float where
zero = 0
instance Zero Double where
zero = 0
| cstrahan/hs-capnp | src/Data/CapnProto/Units.hs | mit | 1,829 | 0 | 7 | 339 | 427 | 254 | 173 | 67 | 1 |
-- Mouad NACIRI
-- Master IAO
-- TP1: Les n mots les plus fréquents
import Data.Char as Char
nCommonWords :: Int -> String -> [(String, Int)]
nCommonWords _ [] = []
nCommonWords n s = take n . qSort' . compress . qSort . words $ s
words' :: String -> [String]
words' [] = []
words' xs = let (w, rest) = break Char.isSpace xs in w : (words' . dropWhile Char.isSpace $ rest)
qSort :: (Ord a) => [a] -> [a]
qSort [] = []
qSort (x:xs) = (qSort . filter (<x) $ xs) ++ x : (qSort . filter (>=x) $ xs)
compress :: [String] -> [(String, Int)]
compress [] = []
compress (x:xs) = let (y,ys) = span (==x) xs in (x, length y + 1) : compress ys
qSort' :: (Ord a, Ord b) => [(a,b)] -> [(a,b)]
qSort' [] = []
qSort' ((x,y):xs) = (qSort' . filter (\z -> snd z >= y) $ xs) ++ (x,y) : (qSort' . filter (\z -> snd z < y) $ xs)
| NMouad21/HaskellSamples | CommonWords.hs | mit | 817 | 0 | 14 | 183 | 494 | 267 | 227 | 16 | 1 |
{-|
Module : ParMean2
Description : calculates the mean in parallel
Copyright : (c) Fabrício Olivetti, 2017
License : GPL-3
Maintainer : fabricio.olivetti@gmail.com
Calculates the mean of every line of a list in parallel.
-}
module Main where
import System.IO
import System.Environment
import Control.Parallel.Strategies
import Data.List (concat)
import Data.List.Split (chunksOf)
-- |'parseFile' parses a space separated file
-- to a list of lists of Double
parseFile :: String -> [[Double]]
parseFile file = map parseLine (lines file)
where
parseLine l = map toDouble (words l)
toDouble w = read w :: Double
mean :: [[Double]] -> [Double]
mean l = map mean' l
where
mean' l' = (sum l') / (fromIntegral $ length l')
meanPar :: [[Double]] -> [Double]
meanPar l = concat lists
where
lists = map mean chunks `using` parList rseq
chunks = chunksOf 1000 l
-- |'main' executa programa principal
main :: IO ()
main = do
args <- getArgs
file <- readFile (args !! 0)
let dataset = parseFile file
print (meanPar dataset)
| folivetti/BIGDATA | 03 - Paralelo/ParMean2.hs | mit | 1,076 | 0 | 10 | 230 | 298 | 158 | 140 | 23 | 1 |
myButLast :: [a] -> a
myButLast [x,_] = x
myButLast (_:xs) = myButLast xs
myButLast _ = error "Need at least two elements"
| tamasgal/haskell_exercises | 99questions/Problem02.hs | mit | 124 | 0 | 7 | 24 | 56 | 29 | 27 | 4 | 1 |
import Data.Char
import Data.List
ex21 lst = init $ init $ init $ tail lst
initials s1 s2 = head s1 : (". " ++ (head s2 : "."))
letterCount s = sum [length x | x <- words s, length x > 2]
isPalindrome x = toUppers x == reverse (toUppers x)
toUppers s = [toUpper c | c <- s]
flipp xss = concat $ reverse $ [reverse x | x <- xss]
inCircle r x y = [(a, b) | a <- [-10..10], b <- [-10..10], sq (a-x) + sq (b-y) <= sq r]
sq x = x * x
steps xs = zip xs (tail xs)
index xs = zip [1..] xs
indices x xs = [fst i | i <- index xs, snd i == x]
showLineNumbers s = [show (fst i) ++ ": " ++ snd i | i <- (zip [1..] (lines s))]
haveAlignment xs ys = length [x | x <- index xs, y <- index ys, fst x == fst y, snd x == snd y] > 0
| kbiscanic/PUH | lectures/puh-lecture2.hs | mit | 746 | 0 | 11 | 210 | 480 | 235 | 245 | 15 | 1 |
--------------------------------------------------------------------------
-- Copyright (c) 2007-2010, ETH Zurich.
-- All rights reserved.
--
-- This file is distributed under the terms in the attached LICENSE file.
-- If you do not find this file, copies can be found by writing to:
-- ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
--
-- Default architecture-specific definitions for Barrelfish
--
--------------------------------------------------------------------------
module ArchDefaults where
import Data.List
import HakeTypes
import Path
import qualified Config
commonFlags = [ Str s | s <- [ "-fno-builtin",
"-nostdinc",
"-U__linux__",
"-Ulinux",
"-Wall",
"-Wshadow",
"-Wmissing-declarations",
"-Wmissing-field-initializers",
"-Wredundant-decls",
"-Werror",
"-imacros" ] ]
++ [ NoDep SrcTree "src" "/include/deputy/nodeputy.h" ]
commonCFlags = [ Str s | s <- [ "-std=c99",
"-U__STRICT_ANSI__", -- for newlib headers
"-Wstrict-prototypes",
"-Wold-style-definition",
"-Wmissing-prototypes" ] ]
++ [ ContStr Config.use_fp "-fno-omit-frame-pointer" ""]
commonCxxFlags = [ Str s | s <- [ "-nostdinc++",
"-std=c++0x",
"-fno-exceptions",
"-I" ] ]
++ [ NoDep SrcTree "src" "/include/cxx" ]
++ [ ContStr Config.use_fp "-fno-omit-frame-pointer" ""]
cFlags = [ Str s | s <- [ "-Wno-packed-bitfield-compat" ] ]
++ commonCFlags
cxxFlags = [ Str s | s <- [ "-Wno-packed-bitfield-compat" ] ]
++ commonCxxFlags
cDefines options = [ Str ("-D"++s) | s <- [ "BARRELFISH" ]]
++ Config.defines
++ Config.arch_defines options
cStdIncs arch archFamily =
[ NoDep SrcTree "src" "/include",
NoDep SrcTree "src" ("/include/arch" ./. archFamily),
NoDep SrcTree "src" Config.libcInc,
NoDep SrcTree "src" "/include/c",
NoDep SrcTree "src" ("/include/target" ./. archFamily),
NoDep SrcTree "src" "/include/ipv4", -- XXX
NoDep InstallTree arch "/include",
NoDep InstallTree arch "/include/dev",
NoDep SrcTree "src" ".",
NoDep BuildTree arch "." ]
ldFlags arch =
[ Str Config.cOptFlags,
In InstallTree arch "/lib/crt0.o",
In InstallTree arch "/lib/crtbegin.o",
Str "-fno-builtin",
Str "-nostdlib" ]
ldCxxFlags arch =
[ Str Config.cOptFlags,
In InstallTree arch "/lib/crt0.o",
In InstallTree arch "/lib/crtbegin.o",
Str "-fno-builtin",
Str "-nostdlib" ]
-- Libraries that are linked to all applications.
stdLibs arch =
[ In InstallTree arch "/lib/libbarrelfish.a",
In InstallTree arch "/lib/liboctopus_parser.a", -- XXX: For NS client in libbarrelfish
In InstallTree arch "/errors/errno.o",
In InstallTree arch ("/lib/lib" ++ Config.libc ++ ".a"),
--In InstallTree arch "/lib/libposixcompat.a",
--In InstallTree arch "/lib/libvfs.a",
--In InstallTree arch "/lib/libnfs.a",
--In InstallTree arch "/lib/liblwip.a",
--In InstallTree arch "/lib/libbarrelfish.a",
--In InstallTree arch "/lib/libcontmng.a",
--In InstallTree arch "/lib/libprocon.a",
In InstallTree arch "/lib/crtend.o" ,
In InstallTree arch "/lib/libcollections.a" ]
stdCxxLibs arch =
[ In InstallTree arch "/lib/libcxx.a",
Str "./libsupc++.a" ]
++ stdLibs arch
options arch archFamily = Options {
optArch = arch,
optArchFamily = archFamily,
optFlags = cFlags,
optCxxFlags = cxxFlags,
optDefines = [ Str "-DBARRELFISH" ] ++ Config.defines,
optIncludes = cStdIncs arch archFamily,
optDependencies =
[ PreDep InstallTree arch "/include/errors/errno.h",
PreDep InstallTree arch "/include/barrelfish_kpi/capbits.h",
PreDep InstallTree arch "/include/asmoffsets.h",
PreDep InstallTree arch "/include/trace_definitions/trace_defs.h" ],
optLdFlags = ldFlags arch,
optLdCxxFlags = ldCxxFlags arch,
optLibs = stdLibs arch,
optCxxLibs = stdCxxLibs arch,
optInterconnectDrivers = ["lmp", "ump", "multihop"],
optFlounderBackends = ["lmp", "ump", "multihop"],
extraFlags = [],
extraDefines = [],
extraIncludes = [],
extraDependencies = [],
extraLdFlags = [],
optSuffix = []
}
------------------------------------------------------------------------
--
-- Now, commands to actually do something
--
------------------------------------------------------------------------
--
-- C compiler
--
cCompiler arch compiler opts phase src obj =
let incls = (optIncludes opts) ++ (extraIncludes opts)
flags = (optFlags opts)
++ (optDefines opts)
++ [ Str f | f <- extraFlags opts ]
++ [ Str f | f <- extraDefines opts ]
deps = (optDependencies opts) ++ (extraDependencies opts)
in
[ Str compiler ] ++ flags ++ [ Str Config.cOptFlags ]
++ concat [ [ NStr "-I", i ] | i <- incls ]
++ [ Str "-o", Out arch obj,
Str "-c", In (if phase == "src" then SrcTree else BuildTree) phase src ]
++ deps
--
-- the C preprocessor, like C compiler but with -E
--
cPreprocessor arch compiler opts phase src obj =
let incls = (optIncludes opts) ++ (extraIncludes opts)
flags = (optFlags opts)
++ (optDefines opts)
++ [ Str f | f <- extraFlags opts ]
++ [ Str f | f <- extraDefines opts ]
deps = (optDependencies opts) ++ (extraDependencies opts)
cOptFlags = unwords ((words Config.cOptFlags) \\ ["-g"])
in
[ Str compiler ] ++ flags ++ [ Str cOptFlags ]
++ concat [ [ NStr "-I", i ] | i <- incls ]
++ [ Str "-o", Out arch obj,
Str "-E", In (if phase == "src" then SrcTree else BuildTree) phase src ]
++ deps
--
-- C++ compiler
--
cxxCompiler arch cxxcompiler opts phase src obj =
let incls = (optIncludes opts) ++ (extraIncludes opts)
flags = (optCxxFlags opts)
++ (optDefines opts)
++ [ Str f | f <- extraFlags opts ]
++ [ Str f | f <- extraDefines opts ]
deps = (optDependencies opts) ++ (extraDependencies opts)
in
[ Str cxxcompiler ] ++ flags ++ [ Str Config.cOptFlags ]
++ concat [ [ NStr "-I", i ] | i <- incls ]
++ [ Str "-o", Out arch obj,
Str "-c", In (if phase == "src" then SrcTree else BuildTree) phase src ]
++ deps
--
-- Create C file dependencies
--
makeDepend arch compiler opts phase src obj depfile =
let incls = (optIncludes opts) ++ (extraIncludes opts)
flags = (optFlags opts)
++ (optDefines opts)
++ [ Str f | f <- extraFlags opts ]
++ [ Str f | f <- extraDefines opts ]
in
[ Str ('@':compiler) ] ++ flags
++ concat [ [ NStr "-I", i ] | i <- incls ]
++ (optDependencies opts) ++ (extraDependencies opts)
++ [ Str "-M -MF",
Out arch depfile,
Str "-MQ", NoDep BuildTree arch obj,
Str "-MQ", NoDep BuildTree arch depfile,
Str "-c", In (if phase == "src" then SrcTree else BuildTree) phase src
]
--
-- Create C++ file dependencies
--
makeCxxDepend arch cxxcompiler opts phase src obj depfile =
let incls = (optIncludes opts) ++ (extraIncludes opts)
flags = (optCxxFlags opts)
++ (optDefines opts)
++ [ Str f | f <- extraFlags opts ]
++ [ Str f | f <- extraDefines opts ]
in
[ Str ('@':cxxcompiler) ] ++ flags
++ concat [ [ NStr "-I", i ] | i <- incls ]
++ (optDependencies opts) ++ (extraDependencies opts)
++ [ Str "-M -MF",
Out arch depfile,
Str "-MQ", NoDep BuildTree arch obj,
Str "-MQ", NoDep BuildTree arch depfile,
Str "-c", In (if phase == "src" then SrcTree else BuildTree) phase src
]
--
-- Compile a C program to assembler
--
cToAssembler :: String -> String -> Options -> String -> String -> String -> String -> [ RuleToken ]
cToAssembler arch compiler opts phase src afile objdepfile =
let incls = (optIncludes opts) ++ (extraIncludes opts)
flags = (optFlags opts)
++ (optDefines opts)
++ [ Str f | f <- extraFlags opts ]
++ [ Str f | f <- extraDefines opts ]
deps = [ Dep BuildTree arch objdepfile ] ++ (optDependencies opts) ++ (extraDependencies opts)
in
[ Str compiler ] ++ flags ++ [ Str Config.cOptFlags ]
++ concat [ [ NStr "-I", i ] | i <- incls ]
++ [ Str "-o ", Out arch afile,
Str "-S ", In (if phase == "src" then SrcTree else BuildTree) phase src ]
++ deps
--
-- Assemble an assembly language file
--
assembler :: String -> String -> Options -> String -> String -> [ RuleToken ]
assembler arch compiler opts src obj =
let incls = (optIncludes opts) ++ (extraIncludes opts)
flags = (optFlags opts)
++ (optDefines opts)
++ [ Str f | f <- extraFlags opts ]
++ [ Str f | f <- extraDefines opts ]
deps = (optDependencies opts) ++ (extraDependencies opts)
in
[ Str compiler ] ++ flags ++ [ Str Config.cOptFlags ]
++ concat [ [ NStr "-I", i ] | i <- incls ]
++ [ Str "-o ", Out arch obj, Str "-c ", In SrcTree "src" src ]
++ deps
--
-- Create a library from a set of object files
--
archive :: String -> Options -> [String] -> [String] -> String -> String -> [ RuleToken ]
archive arch opts objs libs name libname =
[ Str "rm -f ", Out arch libname ]
++
[ NL, Str "ar cr ", Out arch libname ]
++
[ In BuildTree arch o | o <- objs ]
++
if libs == [] then []
else (
[ NL, Str ("rm -fr tmp-" ++ name ++ "; mkdir tmp-" ++ name) ]
++
[ NL, Str ("cd tmp-" ++ name ++ "; for i in ") ]
++
[ In BuildTree arch a | a <- libs ]
++
[ Str "; do ar x ../$$i; done" ]
++
[ NL, Str "ar q ", Out arch libname, Str (" tmp-" ++ name ++ "/*.o") ]
++
[ NL, Str ("rm -fr tmp-" ++ name) ]
)
++
[ NL, Str "ranlib ", Out arch libname ]
--
-- Link an executable
--
linker :: String -> String -> Options -> [String] -> [String] -> String -> [RuleToken]
linker arch compiler opts objs libs bin =
[ Str compiler ]
++ (optLdFlags opts)
++
(extraLdFlags opts)
++
[ Str "-o", Out arch bin ]
++
[ In BuildTree arch o | o <- objs ]
++
[ In BuildTree arch l | l <- libs ]
++
(optLibs opts)
--
-- Link an executable
--
cxxlinker :: String -> String -> Options -> [String] -> [String] -> String -> [RuleToken]
cxxlinker arch cxxcompiler opts objs libs bin =
[ Str cxxcompiler ]
++ (optLdCxxFlags opts)
++
(extraLdFlags opts)
++
[ Str "-o", Out arch bin ]
++
[ In BuildTree arch o | o <- objs ]
++
[ In BuildTree arch l | l <- libs ]
++
(optCxxLibs opts)
| CoryXie/BarrelfishOS | hake/ArchDefaults.hs | mit | 11,796 | 0 | 19 | 3,917 | 3,266 | 1,724 | 1,542 | 235 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE RecordWildCards #-}
import Control.Monad (unless)
import Data.Monoid
import Data.Version (showVersion)
import Options.Applicative
import System.Environment (getEnvironment)
import System.Exit (ExitCode (ExitSuccess), exitWith, exitFailure)
import System.FilePath (splitSearchPath)
import System.Process (rawSystem)
import AddHandler (addHandler)
import Devel (DevelOpts (..), devel, DevelTermOpt(..))
import Keter (keter)
import Options (injectDefaults)
import qualified Paths_yesod_bin
import System.IO (hPutStrLn, stderr)
import HsFile (mkHsFile)
#ifndef WINDOWS
import Build (touch)
touch' :: IO ()
touch' = touch
windowsWarning :: String
windowsWarning = ""
#else
touch' :: IO ()
touch' = return ()
windowsWarning :: String
windowsWarning = " (does not work on Windows)"
#endif
data CabalPgm = Cabal | CabalDev deriving (Show, Eq)
data Options = Options
{ optCabalPgm :: CabalPgm
, optVerbose :: Bool
, optCommand :: Command
}
deriving (Show, Eq)
data Command = Init [String]
| HsFiles
| Configure
| Build { buildExtraArgs :: [String] }
| Touch
| Devel { _develDisableApi :: Bool
, _develSuccessHook :: Maybe String
, _develFailHook :: Maybe String
, _develRescan :: Int
, _develBuildDir :: Maybe String
, develIgnore :: [String]
, develExtraArgs :: [String]
, _develPort :: Int
, _develTlsPort :: Int
, _proxyTimeout :: Int
, _noReverseProxy :: Bool
, _interruptOnly :: Bool
}
| Test
| AddHandler
{ addHandlerRoute :: Maybe String
, addHandlerPattern :: Maybe String
, addHandlerMethods :: [String]
}
| Keter
{ _keterNoRebuild :: Bool
, _keterNoCopyTo :: Bool
, _keterBuildArgs :: [String]
}
| Version
deriving (Show, Eq)
cabalCommand :: Options -> String
cabalCommand mopt
| optCabalPgm mopt == CabalDev = "cabal-dev"
| otherwise = "cabal"
main :: IO ()
main = do
o <- execParser =<< injectDefaults "yesod"
[ ("yesod.devel.extracabalarg" , \o args -> o { optCommand =
case optCommand o of
d@Devel{} -> d { develExtraArgs = args }
c -> c
})
, ("yesod.devel.ignore" , \o args -> o { optCommand =
case optCommand o of
d@Devel{} -> d { develIgnore = args }
c -> c
})
, ("yesod.build.extracabalarg" , \o args -> o { optCommand =
case optCommand o of
b@Build{} -> b { buildExtraArgs = args }
c -> c
})
] optParser'
let cabal = rawSystem' (cabalCommand o)
case optCommand o of
Init _ -> initErrorMsg
HsFiles -> mkHsFile
Configure -> cabal ["configure"]
Build es -> touch' >> cabal ("build":es)
Touch -> touch'
Keter{..} -> keter (cabalCommand o) _keterNoRebuild _keterNoCopyTo _keterBuildArgs
Version -> putStrLn ("yesod-bin version: " ++ showVersion Paths_yesod_bin.version)
AddHandler{..} -> addHandler addHandlerRoute addHandlerPattern addHandlerMethods
Test -> cabalTest cabal
Devel{..} ->do
(configOpts, menv) <- handleGhcPackagePath
let develOpts = DevelOpts
{ isCabalDev = optCabalPgm o == CabalDev
, forceCabal = _develDisableApi
, verbose = optVerbose o
, eventTimeout = _develRescan
, successHook = _develSuccessHook
, failHook = _develFailHook
, buildDir = _develBuildDir
, develPort = _develPort
, develTlsPort = _develTlsPort
, proxyTimeout = _proxyTimeout
, useReverseProxy = not _noReverseProxy
, terminateWith = if _interruptOnly then TerminateOnlyInterrupt else TerminateOnEnter
, develConfigOpts = configOpts
, develEnv = menv
}
devel develOpts develExtraArgs
where
cabalTest cabal = do
env <- getEnvironment
case lookup "STACK_EXE" env of
Nothing -> do
touch'
_ <- cabal ["configure", "--enable-tests", "-flibrary-only"]
_ <- cabal ["build"]
cabal ["test"]
Just _ -> do
hPutStrLn stderr "'yesod test' is no longer needed with Stack"
hPutStrLn stderr "Instead, please just run 'stack test'"
exitFailure
initErrorMsg = do
mapM_ putStrLn
[ "The init command has been removed."
, "Please use 'stack new <project name> <template>' instead where the"
, "available templates can be found by running 'stack templates'. For"
, "a Yesod based application you should probably choose one of the"
, "pre-canned Yesod templates."
]
exitFailure
handleGhcPackagePath :: IO ([String], Maybe [(String, String)])
handleGhcPackagePath = do
env <- getEnvironment
case lookup "GHC_PACKAGE_PATH" env of
Nothing -> return ([], Nothing)
Just gpp -> do
let opts = "--package-db=clear"
: "--package-db=global"
: map ("--package-db=" ++)
(drop 1 $ reverse $ splitSearchPath gpp)
return (opts, Just $ filter (\(x, _) -> x /= "GHC_PACKAGE_PATH") env)
optParser' :: ParserInfo Options
optParser' = info (helper <*> optParser) ( fullDesc <> header "Yesod Web Framework command line utility" )
optParser :: Parser Options
optParser = Options
<$> flag Cabal CabalDev ( long "dev" <> short 'd' <> help "use cabal-dev" )
<*> switch ( long "verbose" <> short 'v' <> help "More verbose output" )
<*> subparser ( command "init" (info (helper <*> initOptions)
(progDesc "Command no longer available, please use 'stack new'"))
<> command "hsfiles" (info (pure HsFiles)
(progDesc "Create a hsfiles file for the current folder"))
<> command "configure" (info (pure Configure)
(progDesc "Configure a project for building"))
<> command "build" (info (helper <*> (Build <$> extraCabalArgs))
(progDesc $ "Build project (performs TH dependency analysis)" ++ windowsWarning))
<> command "touch" (info (pure Touch)
(progDesc $ "Touch any files with altered TH dependencies but do not build" ++ windowsWarning))
<> command "devel" (info (helper <*> develOptions)
(progDesc "Run project with the devel server"))
<> command "test" (info (pure Test)
(progDesc "Build and run the integration tests"))
<> command "add-handler" (info (helper <*> addHandlerOptions)
(progDesc ("Add a new handler and module to the project."
++ " Interactively asks for input if you do not specify arguments.")))
<> command "keter" (info (helper <*> keterOptions)
(progDesc "Build a keter bundle"))
<> command "version" (info (pure Version)
(progDesc "Print the version of Yesod"))
)
initOptions :: Parser Command
initOptions = Init <$> many (argument str mempty)
keterOptions :: Parser Command
keterOptions = Keter
<$> switch ( long "nobuild" <> short 'n' <> help "Skip rebuilding" )
<*> switch ( long "nocopyto" <> help "Ignore copy-to directive in keter config file" )
<*> optStrToList ( long "build-args" <> help "Build arguments" )
where
optStrToList m = option (words <$> str) $ value [] <> m
defaultRescan :: Int
defaultRescan = 10
develOptions :: Parser Command
develOptions = Devel <$> switch ( long "disable-api" <> short 'd'
<> help "Disable fast GHC API rebuilding")
<*> optStr ( long "success-hook" <> short 's' <> metavar "COMMAND"
<> help "Run COMMAND after rebuild succeeds")
<*> optStr ( long "failure-hook" <> short 'f' <> metavar "COMMAND"
<> help "Run COMMAND when rebuild fails")
<*> option auto ( long "event-timeout" <> short 't' <> value defaultRescan <> metavar "N"
<> help ("Force rescan of files every N seconds (default "
++ show defaultRescan
++ ", use -1 to rely on FSNotify alone)") )
<*> optStr ( long "builddir" <> short 'b'
<> help "Set custom cabal build directory, default `dist'")
<*> many ( strOption ( long "ignore" <> short 'i' <> metavar "DIR"
<> help "ignore file changes in DIR" )
)
<*> extraCabalArgs
<*> option auto ( long "port" <> short 'p' <> value 3000 <> metavar "N"
<> help "Devel server listening port" )
<*> option auto ( long "tls-port" <> short 'q' <> value 3443 <> metavar "N"
<> help "Devel server listening port (tls)" )
<*> option auto ( long "proxy-timeout" <> short 'x' <> value 0 <> metavar "N"
<> help "Devel server timeout before returning 'not ready' message (in seconds, 0 for none)" )
<*> switch ( long "disable-reverse-proxy" <> short 'n'
<> help "Disable reverse proxy" )
<*> switch ( long "interrupt-only" <> short 'c'
<> help "Disable exiting when enter is pressed")
extraCabalArgs :: Parser [String]
extraCabalArgs = many (strOption ( long "extra-cabal-arg" <> short 'e' <> metavar "ARG"
<> help "pass extra argument ARG to cabal")
)
addHandlerOptions :: Parser Command
addHandlerOptions = AddHandler
<$> optStr ( long "route" <> short 'r' <> metavar "ROUTE"
<> help "Name of route (without trailing R). Required.")
<*> optStr ( long "pattern" <> short 'p' <> metavar "PATTERN"
<> help "Route pattern (ex: /entry/#EntryId). Defaults to \"\".")
<*> many (strOption ( long "method" <> short 'm' <> metavar "METHOD"
<> help "Takes one method. Use this multiple times to add multiple methods. Defaults to none.")
)
-- | Optional @String@ argument
optStr :: Mod OptionFields (Maybe String) -> Parser (Maybe String)
optStr m = option (Just <$> str) $ value Nothing <> m
-- | Like @rawSystem@, but exits if it receives a non-success result.
rawSystem' :: String -> [String] -> IO ()
rawSystem' x y = do
res <- rawSystem x y
unless (res == ExitSuccess) $ exitWith res
| erikd/yesod | yesod-bin/main.hs | mit | 12,397 | 0 | 21 | 5,044 | 2,602 | 1,337 | 1,265 | 216 | 15 |
module Main where
import Control.Applicative
import Control.Monad
import Network.Google.Api
import Network.Google.Drive.File
import Network.Google.OAuth2
import System.Environment.XDG.BaseDir
import System.FilePath
import Drync.Client
import Drync.Config
import Drync.Options
import Drync.Sync
main :: IO ()
main = do
options <- join $ getOptions
<$> (readConfig =<< configFile)
<*> (readExcludes =<< excludesFile)
mfile <- if oRefresh options
then return Nothing
else fmap Just $ tokenFile $ oProfile options
token <- getAccessToken client scopes mfile
runApi_ token $ do
Just syncTo <- getFile "root"
syncDirectory options (oSyncFrom options) syncTo
appName :: String
appName = "drync"
scopes :: [OAuth2Scope]
scopes = ["https://www.googleapis.com/auth/drive"]
tokenFile :: String -> IO FilePath
tokenFile profile = do
cdir <- getUserCacheDir appName
return $ cdir </> profile <.> "token"
configFile :: IO FilePath
configFile = do
cdir <- getUserConfigDir appName
return $ cdir </> "config"
excludesFile :: IO FilePath
excludesFile = do
cdir <- getUserConfigDir appName
return $ cdir </> "exclude"
| pbrisbin/drync | main.hs | mit | 1,199 | 0 | 12 | 245 | 340 | 174 | 166 | 40 | 2 |
module Command.Migrate (migrateCommand, migrate, currentMigration, migrationRequired) where
import Control.Monad (when)
import Data.Char
import Data.List (genericDrop, genericLength)
import Data.Maybe (fromJust)
import Data.UUID (toString)
import Data.UUID.V1 (nextUUID)
import Database.HDBC
import Database.HDBC.Sqlite3
import System.IO
import System.Directory
import System.FilePath ((</>), splitExtension, takeFileName)
import Paths_manila
import Util
data Migration = SchemaMigration FilePath | DataMigration (Connection -> IO ())
migrations :: [Migration]
migrations = [ SchemaMigration "0001.sql"
, DataMigration secondMigration
]
secondMigration :: Connection -> IO ()
secondMigration conn = putStrLn "Test second migration"
migrateCommand :: [String] -> String -> IO ()
migrateCommand args flags = do
conn <- getDbConnection
from <- currentMigration conn
if from < genericLength migrations
then do
backupProject
migrateFrom conn from
putStrLn "Migration successful."
else putStrLn "No migration necessary."
disconnect conn
migrate :: Connection -> IO ()
migrate conn = do
from <- currentMigration conn
when (from < genericLength migrations) $
migrateFrom conn from
migrationRequired :: Connection -> IO Bool
migrationRequired conn = do
from <- currentMigration conn
return $ from < genericLength migrations
currentMigration :: Connection -> IO Integer
currentMigration conn = do
currentMigrationResult <- trySql $ quickQuery' conn "SELECT * FROM schema_migration WHERE id = 1" []
return $ case currentMigrationResult of
Left e -> 0
Right [] -> 0
Right x -> fromSql $ head (head x)
backupProject :: IO ()
backupProject = do
uuid <- nextUUID
let backupFileName = "manila_backup_" ++ (toString $ fromJust uuid) ++ ".db"
copyFile "manila.db" backupFileName
putStrLn $ "A backup of this project has been saved to " ++ backupFileName
migrateFrom :: Connection -> Integer -> IO ()
migrateFrom conn from = do
let futureMigrations = genericDrop from migrations
mapM_ (runMigration conn) futureMigrations
saveMigrationNumber conn (genericLength migrations)
runMigration :: Connection -> Migration -> IO ()
runMigration conn (SchemaMigration fileName) = do
dataDir <- getDataDir
let migrationPath = dataDir </> "migrations" </> fileName
executeSqlFile conn fileName
runMigration conn (DataMigration f) = f conn
saveMigrationNumber :: Connection -> Integer -> IO ()
saveMigrationNumber conn migrationNumber = do
updateOrInsert conn
("SELECT * FROM schema_migration WHERE id = 1", [])
("UPDATE schema_migration SET migration_number=? WHERE id = 1", [])
("INSERT INTO schema_migration (migration_number) VALUES (?)", [toSql migrationNumber])
return ()
-- migrateFrom :: Connection -> Integer -> Bool -> IO ()
-- migrateFrom conn from createBackup = do
-- futureMigrationPaths <- futureMigrations from
-- if (length futureMigrationPaths /= 0)
-- then do when createBackup backupProject
-- runMigrations conn futureMigrationPaths
-- putStrLn "Migration successful."
-- else putStrLn "No migration necessary."
-- runMigrations :: Connection -> [FilePath] -> IO ()
-- runMigrations conn migrationPaths = do
-- mapM_ (executeSqlFile conn) migrationPaths
-- let lastMigration = read (filter isDigit (takeFileName (last migrationPaths))) :: Integer
-- updateOrInsert conn
-- ("SELECT * FROM schema_migration WHERE id = 1", [])
-- ("UPDATE schema_migration SET migration_number=? WHERE id = 1", [])
-- ("INSERT INTO schema_migration (migration_number) VALUES (?)", [toSql lastMigration])
-- return ()
-- backupProject :: IO ()
-- backupProject = do
-- uuid <- nextUUID
-- let backupFileName = "manila_backup_" ++ (toString $ fromJust uuid) ++ ".db"
-- copyFile "manila.db" backupFileName
-- putStrLn $ "A backup of this project has been saved to " ++ backupFileName
-- currentMigration :: Connection -> IO Integer
-- currentMigration conn = do
-- currentMigrationResult <- trySql $ quickQuery' conn "SELECT * FROM schema_migration WHERE id = 1" []
-- return $ case currentMigrationResult of
-- Left e -> 0
-- Right [] -> 0
-- Right x -> fromSql $ head (head x)
-- futureMigrations :: Integer -> IO [FilePath]
-- futureMigrations from = do
-- dataDir <- getDataDir
-- let migrationDir = dataDir </> "migrations"
-- directoryFiles <- getDirectoryContents migrationDir
-- let migrationFiles = filter (hasExtension ".sql") directoryFiles
-- let futureMigrations = dropWhile (`stringLEInteger` from) migrationFiles
-- return $ map (migrationDir </>) futureMigrations
-- hasExtension :: String -> FilePath -> Bool
-- hasExtension extension path =
-- let (_, ext) = splitExtension path
-- in extension == ext
-- stringLEInteger :: String -> Integer -> Bool
-- s `stringLEInteger` i = read (filter isDigit s) <= i
-- migrate :: Connection -> Bool -> IO ()
-- migrate conn createBackup = do
-- from <- currentMigration conn
-- migrateFrom conn from createBackup
| jpotterm/manila-hs | src/Command/Migrate.hs | cc0-1.0 | 5,364 | 0 | 14 | 1,209 | 833 | 439 | 394 | 71 | 3 |
module Probability.ProbabilityMeasure.Macro where
import Types
import Macro.Tuple
import Functions.Application.Macro
import Probability.Intro.Macro
import Probability.SigmaAlgebra.Macro
-- * Probability space
-- | Probabilty space given a universe, sigma algebra and probability measure
prsp :: Note -> Note -> Note -> Note
prsp = triple
-- | Concrete probability space
prsp_ :: Note
prsp_ = prsp univ_ sa_ prm_
prbsp :: Note
prbsp = prsp reals boreals prm_
-- | Concrete probability measure
prm_ :: Note
prm_ = "Pr"
-- | Probability with given probability measure
prm :: Note -> Note -> Note
prm = fn -- probability with custom measure
-- | Probability
prob :: Note -> Note
prob = prm prm_
| NorfairKing/the-notes | src/Probability/ProbabilityMeasure/Macro.hs | gpl-2.0 | 753 | 0 | 7 | 171 | 141 | 84 | 57 | 18 | 1 |
{-# LANGUAGE MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-}
{- |
Module : $Header$
Description : logic instance for FPL
Copyright : (c) Christian Maeder, DFKI GmbH 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable
Instance of class Logic for FPL.
-}
module Fpl.Logic_Fpl where
import Logic.Logic
import Fpl.As
import Fpl.Sign
import Fpl.StatAna
import Fpl.Morphism
import Fpl.ATC_Fpl ()
import CASL.Sign
import CASL.Morphism
import CASL.SymbolMapAnalysis
import CASL.AS_Basic_CASL
import CASL.Parse_AS_Basic
import CASL.SimplifySen
import CASL.SymbolParser
import CASL.Taxonomy
import CASL.Logic_CASL ()
import Common.DocUtils
data Fpl = Fpl deriving Show
instance Language Fpl where
description _ = unlines
[ "logic of functional programs (FPL) as CASL extension" ]
instance SignExtension SignExt where
isSubSignExtension = isSubFplSign
instance Syntax Fpl FplBasicSpec SYMB_ITEMS SYMB_MAP_ITEMS where
parse_basic_spec Fpl = Just $ basicSpec fplReservedWords
parse_symb_items Fpl = Just $ symbItems fplReservedWords
parse_symb_map_items Fpl = Just $ symbMapItems fplReservedWords
instance Sentences Fpl FplForm FplSign FplMor Symbol where
map_sen Fpl m = return . mapFplSen m
sym_of Fpl = symOf
symmap_of Fpl = morphismToSymbMap
sym_name Fpl = symName
simplify_sen Fpl = simplifySen minFplTerm simplifyTermExt . addBuiltins
print_sign Fpl = printSign pretty
instance StaticAnalysis Fpl FplBasicSpec FplForm
SYMB_ITEMS SYMB_MAP_ITEMS
FplSign
FplMor
Symbol RawSymbol where
basic_analysis Fpl = Just basicFplAnalysis
stat_symb_map_items Fpl = statSymbMapItems
stat_symb_items Fpl = statSymbItems
symbol_to_raw Fpl = symbolToRaw
id_to_raw Fpl = idToRaw
matches Fpl = CASL.Morphism.matches
empty_signature Fpl = emptySign emptyFplSign
signature_union Fpl s = return . addSig addFplSign s
signatureDiff Fpl s = return . diffSig diffFplSign s
intersection Fpl s = return . interSig interFplSign s
morphism_union Fpl = plainMorphismUnion addFplSign
final_union Fpl = finalUnion addFplSign
is_subsig Fpl = isSubSig isSubFplSign
subsig_inclusion Fpl = sigInclusion emptyMorExt
cogenerated_sign Fpl = cogeneratedSign emptyMorExt
generated_sign Fpl = generatedSign emptyMorExt
induced_from_morphism Fpl = inducedFromMorphism emptyMorExt
induced_from_to_morphism Fpl =
inducedFromToMorphism emptyMorExt isSubFplSign diffFplSign
theory_to_taxonomy Fpl = convTaxo
instance Logic Fpl ()
FplBasicSpec FplForm SYMB_ITEMS SYMB_MAP_ITEMS
FplSign
FplMor
Symbol RawSymbol () where
stability _ = Unstable
empty_proof_tree _ = ()
| nevrenato/Hets_Fork | Fpl/Logic_Fpl.hs | gpl-2.0 | 3,028 | 0 | 7 | 736 | 579 | 293 | 286 | 67 | 0 |
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module Sound.SC3.Server.State.Monad.Command (
-- * Requests
Request
, R.exec
, R.exec_
, Result
, R.extract
-- * Master controls
, status
, statusM
, PrintLevel(..)
, dumpOSC
, clearSched
, ErrorScope(..)
, ErrorMode(..)
, errorMode
-- * Synth definitions
, SynthDef(name)
-- , d_recv
, d_named
, d_default
, d_recv
, d_load
, d_loadDir
, d_free
-- * Resources
-- ** Nodes
, Node(..)
, AddAction(..)
, AbstractNode
, node
, n_after
, n_before
, n_fill
, n_free
, BusMapping(..)
, n_query_
, n_query
, n_queryM
, n_run_
, n_set
, n_setn
, n_trace
, n_order
-- *** Synths
, Synth(..)
, s_new
, s_new_
, s_release
, s_get
, s_getn
, s_noid
-- *** Groups
, Group(..)
, rootNode
, g_new
, g_new_
, g_deepFree
, g_freeAll
, g_head
, g_tail
, g_dumpTree
--, g_queryTree
-- ** Plugin Commands
, cmd
-- ** Unit Generator Commands
, u_cmd
-- ** Buffers
, Buffer
, bufferId
, b_alloc
, b_allocRead
, b_allocReadChannel
, b_read
, b_readChannel
, SoundFileFormat(..)
, SampleFormat(..)
, b_write
, b_free
, b_zero
, b_set
, b_setn
, b_fill
, b_gen
, b_gen_sine1
, b_gen_sine2
, b_gen_sine3
, b_gen_cheby
, b_gen_copy
, b_close
, b_query
, b_queryM
--, b_get
--, b_getn
-- ** Buses
, Bus(..)
, AudioBus(audioBusIdRange)
, audioBusId
, inputBus
, outputBus
, newAudioBus
, ControlBus(controlBusIdRange)
, controlBusId
, newControlBus
-- *** Control Bus Commands
--, c_set
--, c_setn
--, c_fill
--, c_get
--, c_getn
) where
--import qualified Codec.Compression.BZip as BZip
--import qualified Codec.Digest.SHA as SHA
import Control.Arrow (first)
import Control.Failure (Failure, failure)
import Control.Monad (liftM, unless)
import Control.Monad.IO.Class (MonadIO)
import Data.Int
import Sound.OSC (Datum(..), OSC(..))
import Sound.SC3 (Rate(..), UGen)
import Sound.SC3.Server.Allocator.Range (Range)
import qualified Sound.SC3.Server.Allocator.Range as Range
import qualified Sound.SC3.Server.Synthdef as Synthdef
import Sound.SC3.Server.Allocator (AllocFailure(..))
import Sound.SC3.Server.Enum (AddAction(..), B_Gen, ErrorScope(..), ErrorMode(..), PrintLevel(..))
import qualified Sound.SC3.Server.Command as C
import qualified Sound.SC3.Server.Command.Completion as CC
import Sound.SC3.Server.Enum (SoundFileFormat(..), SampleFormat(..))
import qualified Sound.SC3.Server.Notification as N
import Sound.SC3.Server.Process.Options (ServerOptions(..))
import Sound.SC3.Server.State (AudioBusId, BufferId, ControlBusId, NodeId)
import Sound.SC3.Server.State.Monad (sendOSC)
import qualified Sound.SC3.Server.State.Monad as M
import Sound.SC3.Server.State.Monad.Class (MonadIdAllocator, MonadServer, RequestOSC, serverOption)
import Sound.SC3.Server.State.Monad.Request (Request, Result, after_, finally, mkAsync, mkAsync_, mkSync, waitFor)
import qualified Sound.SC3.Server.State.Monad.Request as R
-- ====================================================================
-- Utils
-- | Construct a function suitable for 'mkAsync'.
mkC :: OSC o => a -> (o -> a) -> (Maybe o -> a)
mkC f _ Nothing = f
mkC _ f (Just osc) = f osc
get :: (MonadIdAllocator m, RequestOSC m, MonadIO m) => Request m (Result a) -> m a
get m = R.exec_ m >>= R.extract
withSync :: MonadIdAllocator m => OSC o => o -> Request m ()
withSync c = do
sendOSC c
sendOSC =<< mkSync
-- ====================================================================
-- Master controls
-- | Request server status.
status :: MonadIO m => Request m (Result N.Status)
status = do
sendOSC C.status
waitFor N.status_reply
-- | Request server status.
statusM :: (MonadIdAllocator m, RequestOSC m, MonadIO m) => m N.Status
statusM = get status
-- | Select printing of incoming Open Sound Control messages.
dumpOSC :: MonadIdAllocator m => PrintLevel -> Request m ()
dumpOSC p = withSync (C.dumpOSC p)
-- | Remove all bundles from the scheduling queue.
clearSched :: Monad m => Request m ()
clearSched = sendOSC C.clearSched
-- | Set error posting scope and mode.
errorMode :: Monad m => ErrorScope -> ErrorMode -> Request m ()
errorMode scope = sendOSC . C.errorMode scope
-- ====================================================================
-- Synth definitions
newtype SynthDef = SynthDef {
name :: String
} deriving (Eq, Show)
-- | Construct a synth definition from a name.
d_named :: String -> SynthDef
d_named = SynthDef
-- | The default synth definition.
d_default :: SynthDef
d_default = d_named "default"
-- | Compute a unique name for a UGen graph.
-- graphName :: UGen -> String
-- graphName = SHA.showBSasHex . SHA.hash SHA.SHA256 . BZip.compress . Synthdef.graphdef . Synthdef.synth
-- | Create a new synth definition.
-- d_new :: Monad m => String -> UGen -> Async m SynthDef
-- d_new prefix ugen
-- | length prefix < 127 = mkAsync $ return (sd, f)
-- | otherwise = error "d_new: name prefix too long, resulting string exceeds 255 characters"
-- where
-- sd = SynthDef (prefix ++ "-" ++ graphName ugen)
-- f osc = (mkC C.d_recv C.d_recv' osc) (Synthdef.synthdef (name sd) ugen)
-- | Create a synth definition from a name and a UGen graph.
d_recv :: Monad m => String -> UGen -> Request m SynthDef
d_recv name ugen
| length name < 255 = mkAsync $ return (SynthDef name, f)
| otherwise = error "d_recv: name too long, resulting string exceeds 255 characters"
where
f osc = (mkC C.d_recv CC.d_recv' osc) (Synthdef.synthdef name ugen)
-- | Load a synth definition from a named file. (Asynchronous)
d_load :: Monad m => FilePath -> Request m ()
d_load fp = mkAsync_ $ \osc -> mkC C.d_load CC.d_load' osc $ fp
-- | Load a directory of synth definition files. (Asynchronous)
d_loadDir :: Monad m => FilePath -> Request m ()
d_loadDir fp = mkAsync_ $ \osc -> mkC C.d_loadDir CC.d_loadDir' osc $ fp
-- | Remove definition once all nodes using it have ended.
d_free :: Monad m => SynthDef -> Request m ()
d_free = sendOSC . C.d_free . (:[]) . name
-- ====================================================================
-- Node
class Node a where
nodeId :: a -> NodeId
data AbstractNode = forall n . (Eq n, Node n, Show n) => AbstractNode n
instance Eq AbstractNode where
(AbstractNode a) == (AbstractNode b) = nodeId a == nodeId b
instance Node AbstractNode where
nodeId (AbstractNode n) = nodeId n
instance Show AbstractNode where
show (AbstractNode n) = show n
-- | Construct an abstract node wrapper.
node :: (Eq n, Node n, Show n) => n -> AbstractNode
node = AbstractNode
-- | Place node @a@ after node @b@.
n_after :: (Node a, Node b, Monad m) => a -> b -> Request m ()
n_after a b = sendOSC $ C.n_after [(fromIntegral (nodeId a), fromIntegral (nodeId b))]
-- | Place node @a@ before node @b@.
n_before :: (Node a, Node b, Monad m) => a -> b -> Request m ()
n_before a b = sendOSC $ C.n_after [(fromIntegral (nodeId a), fromIntegral (nodeId b))]
-- | Fill ranges of a node's control values.
n_fill :: (Node a, Monad m) => a -> [(String, Int, Double)] -> Request m ()
n_fill n = sendOSC . C.n_fill (fromIntegral (nodeId n))
-- | Delete a node.
n_free :: (Node a, MonadIdAllocator m) => a -> Request m ()
n_free n = do
sendOSC $ C.n_free [fromIntegral (nodeId n)]
finally $ M.free M.nodeIdAllocator (nodeId n)
-- | Mapping node controls to buses.
class BusMapping n b where
-- | Map a node's controls to read from a control bus.
n_map :: (Node n, Bus b, Monad m) => n -> String -> b -> Request m ()
-- | Remove a control's mapping to a control bus.
n_unmap :: (Node n, Bus b, Monad m) => n -> String -> b -> Request m ()
instance BusMapping n ControlBus where
n_map n c b = sendOSC msg
where
nid = fromIntegral (nodeId n)
bid = fromIntegral (controlBusId b)
msg = if numChannels b > 1
then C.n_mapn nid [(c, bid, numChannels b)]
else C.n_map nid [(c, bid)]
n_unmap n c b = sendOSC msg
where
nid = fromIntegral (nodeId n)
msg = if numChannels b > 1
then C.n_mapn nid [(c, -1, numChannels b)]
else C.n_map nid [(c, -1)]
instance BusMapping n AudioBus where
n_map n c b = sendOSC msg
where
nid = fromIntegral (nodeId n)
bid = fromIntegral (audioBusId b)
msg = if numChannels b > 1
then C.n_mapan nid [(c, bid, numChannels b)]
else C.n_mapa nid [(c, bid)]
n_unmap n c b = sendOSC msg
where
nid = fromIntegral (nodeId n)
msg = if numChannels b > 1
then C.n_mapan nid [(c, -1, numChannels b)]
else C.n_mapa nid [(c, -1)]
-- | Query a node.
n_query_ :: (Node a, Monad m) => a -> Request m ()
n_query_ n = sendOSC (C.n_query [fromIntegral (nodeId n)])
-- | Query a node.
n_query :: (Node a, MonadIO m) => a -> Request m (Result N.NodeNotification)
n_query n = do
n_query_ n
waitFor (N.n_info (nodeId n))
-- | Query a node.
n_queryM :: (Node a, MonadIdAllocator m, RequestOSC m, MonadIO m) => a -> m N.NodeNotification
n_queryM = get . n_query
-- | Turn node on or off.
n_run_ :: (Node a, Monad m) => a -> Bool -> Request m ()
n_run_ n b = sendOSC $ C.n_run [(fromIntegral (nodeId n), b)]
-- | Set a node's control values.
n_set :: (Node a, Monad m) => a -> [(String, Double)] -> Request m ()
n_set n = sendOSC . C.n_set (fromIntegral (nodeId n))
-- | Set ranges of a node's control values.
n_setn :: (Node a, Monad m) => a -> [(String, [Double])] -> Request m ()
n_setn n = sendOSC . C.n_setn (fromIntegral (nodeId n))
-- | Trace a node.
n_trace :: (Node a, Monad m) => a -> Request m ()
n_trace n = sendOSC $ C.n_trace [fromIntegral (nodeId n)]
-- | Move an ordered sequence of nodes.
n_order :: (Node n, Monad m) => AddAction -> n -> [AbstractNode] -> Request m ()
n_order a n = sendOSC . C.n_order a (fromIntegral (nodeId n)) . map (fromIntegral.nodeId)
-- ====================================================================
-- Synth
newtype Synth = Synth NodeId deriving (Eq, Ord, Show)
instance Node Synth where
nodeId (Synth nid) = nid
-- | Create a new synth.
s_new :: MonadIdAllocator m => SynthDef -> AddAction -> Group -> [(String, Double)] -> Request m Synth
s_new d a g xs = do
nid <- M.alloc M.nodeIdAllocator
sendOSC $ C.s_new (name d) (fromIntegral nid) a (fromIntegral (nodeId g)) xs
return $ Synth nid
-- | Create a new synth in the root group.
s_new_ :: (MonadServer m, MonadIdAllocator m) => SynthDef -> AddAction -> [(String, Double)] -> Request m Synth
s_new_ d a xs = rootNode >>= \g -> s_new d a g xs
-- | Release a synth with a "gate" envelope control.
s_release :: MonadIdAllocator m => Double -> Synth -> Request m ()
s_release r s = do
sendOSC (C.n_set1 (fromIntegral nid) "gate" r)
after_ (N.n_end_ nid) (M.free M.nodeIdAllocator nid)
where nid = nodeId s
-- | Get control values.
s_get :: MonadIO m => Synth -> [String] -> Request m (Result [(Either Int32 String, Float)])
s_get s cs = do
sendOSC (C.s_get (fromIntegral nid) cs)
waitFor (N.n_set nid)
where nid = nodeId s
-- | Get ranges of control values.
s_getn :: MonadIO m => Synth -> [(String, Int)] -> Request m (Result [(Either Int32 String, [Float])])
s_getn s cs = do
sendOSC (C.s_getn (fromIntegral nid) cs)
waitFor (N.n_setn nid)
where nid = nodeId s
-- | Free a synth's ID and auto-reassign it to a reserved value (the node is not freed!).
s_noid :: MonadIdAllocator m => Synth -> Request m ()
s_noid s = do
sendOSC (C.s_noid [fromIntegral nid])
M.free M.nodeIdAllocator nid
where nid = nodeId s
-- ====================================================================
-- Group
newtype Group = Group NodeId deriving (Eq, Ord, Show)
instance Node Group where
nodeId (Group nid) = nid
-- | Return the server's root group.
rootNode :: MonadServer m => m Group
rootNode = liftM Group M.rootNodeId
-- | Create a new group.
g_new :: MonadIdAllocator m => AddAction -> Group -> Request m Group
g_new a p = do
nid <- M.alloc M.nodeIdAllocator
sendOSC $ C.g_new [(fromIntegral nid, a, fromIntegral (nodeId p))]
return $ Group nid
-- | Create a new group in the top level group.
g_new_ :: (MonadServer m, MonadIdAllocator m) => AddAction -> Request m Group
g_new_ a = rootNode >>= g_new a
-- | Free all synths in this group and all its sub-groups.
g_deepFree :: Monad m => Group -> Request m ()
g_deepFree g = sendOSC $ C.g_deepFree [fromIntegral (nodeId g)]
-- | Delete all nodes in a group.
g_freeAll :: Monad m => Group -> Request m ()
g_freeAll g = sendOSC $ C.g_freeAll [fromIntegral (nodeId g)]
-- | Add node to head of group.
g_head :: (Node n, Monad m) => Group -> n -> Request m ()
g_head g n = sendOSC $ C.g_head [(fromIntegral (nodeId g), fromIntegral (nodeId n))]
-- | Add node to tail of group.
g_tail :: (Node n, Monad m) => Group -> n -> Request m ()
g_tail g n = sendOSC $ C.g_tail [(fromIntegral (nodeId g), fromIntegral (nodeId n))]
-- | Post a representation of a group's node subtree, optionally including the current control values for synths.
g_dumpTree :: Monad m => [(Group, Bool)] -> Request m ()
g_dumpTree = sendOSC . C.g_dumpTree . map (first (fromIntegral . nodeId))
-- ====================================================================
-- Plugin Commands
-- | Send a plugin command.
cmd :: Monad m => String -> [Datum] -> Request m ()
cmd s = sendOSC . C.cmd s
-- ====================================================================
-- Unit Generator Commands
-- | Send a command to a unit generator.
u_cmd :: Monad m => AbstractNode -> Int -> String -> [Datum] -> Request m ()
u_cmd n i s = sendOSC . C.u_cmd (fromIntegral (nodeId n)) i s
-- ====================================================================
-- Buffer Commands
newtype Buffer = Buffer { bufferId :: BufferId } deriving (Eq, Ord, Show)
-- | Allocates zero filled buffer to number of channels and samples. (Asynchronous)
b_alloc :: MonadIdAllocator m => Int -> Int -> Request m Buffer
b_alloc n c = mkAsync $ do
bid <- M.alloc M.bufferIdAllocator
let f osc = (mkC C.b_alloc CC.b_alloc' osc) (fromIntegral bid) n c
return (Buffer bid, f)
-- | Allocate buffer space and read a sound file. (Asynchronous)
b_allocRead :: MonadIdAllocator m => FilePath -> Maybe Int -> Maybe Int -> Request m Buffer
b_allocRead path fileOffset numFrames = mkAsync $ do
bid <- M.alloc M.bufferIdAllocator
let f osc = (mkC C.b_allocRead CC.b_allocRead' osc)
(fromIntegral bid) path
(maybe 0 id fileOffset)
(maybe (-1) id numFrames)
return (Buffer bid, f)
-- | Allocate buffer space and read a sound file, picking specific channels. (Asynchronous)
b_allocReadChannel :: MonadIdAllocator m => FilePath -> Maybe Int -> Maybe Int -> [Int] -> Request m Buffer
b_allocReadChannel path fileOffset numFrames channels = mkAsync $ do
bid <- M.alloc M.bufferIdAllocator
let f osc = (mkC C.b_allocReadChannel CC.b_allocReadChannel' osc)
(fromIntegral bid) path
(maybe 0 id fileOffset)
(maybe (-1) id numFrames)
channels
return (Buffer bid, f)
-- | Read sound file data into an existing buffer. (Asynchronous)
b_read :: Monad m =>
Buffer
-> FilePath
-> Maybe Int
-> Maybe Int
-> Maybe Int
-> Bool
-> Request m ()
b_read (Buffer bid) path fileOffset numFrames bufferOffset leaveOpen =
mkAsync_ $ \osc -> (mkC C.b_read CC.b_read' osc)
(fromIntegral bid) path
(maybe 0 id fileOffset)
(maybe (-1) id numFrames)
(maybe 0 id bufferOffset)
leaveOpen
-- | Read sound file data into an existing buffer, picking specific channels. (Asynchronous)
b_readChannel :: MonadIO m =>
Buffer
-> FilePath
-> Maybe Int
-> Maybe Int
-> Maybe Int
-> Bool
-> [Int]
-> Request m ()
b_readChannel (Buffer bid) path fileOffset numFrames bufferOffset leaveOpen channels =
mkAsync_ $ \osc -> (mkC C.b_readChannel CC.b_readChannel' osc)
(fromIntegral bid) path
(maybe 0 id fileOffset)
(maybe (-1) id numFrames)
(maybe 0 id bufferOffset)
leaveOpen
channels
-- | Write sound file data. (Asynchronous)
b_write :: MonadIO m =>
Buffer
-> FilePath
-> SoundFileFormat
-> SampleFormat
-> Maybe Int
-> Maybe Int
-> Bool
-> Request m ()
b_write (Buffer bid) path
soundFileFormat sampleFormat
fileOffset numFrames
leaveOpen = mkAsync_ f
where
f osc = (mkC C.b_write CC.b_write' osc)
(fromIntegral bid) path
soundFileFormat
sampleFormat
(maybe 0 id fileOffset)
(maybe (-1) id numFrames)
leaveOpen
-- | Free buffer. (Asynchronous)
b_free :: MonadIdAllocator m => Buffer -> Request m ()
b_free b = mkAsync $ do
let bid = bufferId b
M.free M.bufferIdAllocator bid
let f osc = (mkC C.b_free CC.b_free' osc) (fromIntegral bid)
return ((), f)
-- | Zero sample data. (Asynchronous)
b_zero :: MonadIO m => Buffer -> Request m ()
b_zero buffer = mkAsync_ $ \osc ->
(mkC C.b_zero CC.b_zero' osc)
(fromIntegral (bufferId buffer))
-- | Set sample values.
b_set :: Monad m => Buffer -> [(Int, Double)] -> Request m ()
b_set buffer = sendOSC . C.b_set (fromIntegral (bufferId buffer))
-- | Set ranges of sample values.
b_setn :: Monad m => Buffer -> [(Int, [Double])] -> Request m ()
b_setn buffer = sendOSC . C.b_setn (fromIntegral (bufferId buffer))
-- | Fill ranges of sample values.
b_fill :: Monad m => Buffer -> [(Int, Int, Double)] -> Request m ()
b_fill buffer = sendOSC . C.b_fill (fromIntegral (bufferId buffer))
-- | Call a command to fill a buffer. (Asynchronous)
b_gen :: MonadIdAllocator m => Buffer -> String -> [Datum] -> Request m ()
b_gen buffer cmd = withSync . C.b_gen (fromIntegral (bufferId buffer)) cmd
-- | Fill a buffer with partials, specifying amplitudes.
b_gen_sine1 :: MonadIdAllocator m => Buffer -> [B_Gen] -> [Double] -> Request m ()
b_gen_sine1 buffer flags = withSync . C.b_gen_sine1 (fromIntegral (bufferId buffer)) flags
-- | Fill a buffer with partials, specifying frequencies (in cycles per buffer) and amplitudes.
b_gen_sine2 :: MonadIdAllocator m => Buffer -> [B_Gen] -> [(Double, Double)] -> Request m ()
b_gen_sine2 buffer flags = withSync . C.b_gen_sine2 (fromIntegral (bufferId buffer)) flags
-- | Fill a buffer with partials, specifying frequencies (in cycles per buffer), amplitudes and phases.
b_gen_sine3 :: MonadIdAllocator m => Buffer -> [B_Gen] -> [(Double, Double, Double)] -> Request m ()
b_gen_sine3 buffer flags = withSync . C.b_gen_sine3 (fromIntegral (bufferId buffer)) flags
-- | Fills a buffer with a series of chebyshev polynomials.
-- Chebychev polynomials can be defined as:
--
-- cheby(n) = amplitude * cos(n * acos(x))
--
-- The first float value specifies the amplitude for n = 1, the second float
-- value specifies the amplitude for n = 2, and so on. To eliminate a DC offset
-- when used as a waveshaper, the wavetable is offset so that the center value
-- is zero.
b_gen_cheby :: MonadIdAllocator m => Buffer -> [B_Gen] -> [Double] -> Request m ()
b_gen_cheby buffer flags = withSync . C.b_gen_cheby (fromIntegral (bufferId buffer)) flags
-- | Copy samples from the source buffer to the destination buffer.
b_gen_copy :: MonadIdAllocator m => Buffer -> Int -> Buffer -> Int -> Maybe Int -> Request m ()
b_gen_copy buffer sampleOffset srcBuffer srcSampleOffset numSamples =
withSync $ C.b_gen_copy (fromIntegral (bufferId buffer))
sampleOffset
(fromIntegral (bufferId srcBuffer))
srcSampleOffset
numSamples
-- | Close attached soundfile and write header information. (Asynchronous)
b_close :: Monad m => Buffer -> Request m ()
b_close buffer = mkAsync_ $ \osc ->
mkC C.b_close CC.b_close' osc $ fromIntegral (bufferId buffer)
-- | Request 'BufferInfo'.
b_query :: MonadIO m => Buffer -> Request m (Result N.BufferInfo)
b_query (Buffer bid) = do
sendOSC (C.b_query [fromIntegral bid])
waitFor (N.b_info bid)
-- | Request 'BufferInfo'.
b_queryM :: (MonadIdAllocator m, RequestOSC m, MonadIO m) => Buffer -> m N.BufferInfo
b_queryM = get . b_query
-- ====================================================================
-- Bus
-- | Abstract interface for control and audio rate buses.
class Bus a where
-- | Rate of computation.
rate :: a -> Rate
-- | Number of channels.
numChannels :: a -> Int
-- | Free bus.
freeBus :: (MonadServer m, MonadIdAllocator m) => a -> m ()
-- | Audio bus.
newtype AudioBus = AudioBus { audioBusIdRange :: Range AudioBusId } deriving (Eq, Show)
-- | Get audio bus id.
audioBusId :: AudioBus -> AudioBusId
audioBusId = Range.begin . audioBusIdRange
instance Bus AudioBus where
rate _ = AR
numChannels = Range.size . audioBusIdRange
freeBus b = do
hw <- isHardwareBus b
unless hw $ M.freeRange M.audioBusIdAllocator (audioBusIdRange b)
-- | Allocate audio bus with the specified number of channels.
newAudioBus :: MonadIdAllocator m => Int -> m AudioBus
newAudioBus = liftM AudioBus . M.allocRange M.audioBusIdAllocator
-- | Return 'True' if bus is a hardware output or input bus.
isHardwareBus :: MonadServer m => AudioBus -> m Bool
isHardwareBus b = do
no <- serverOption numberOfOutputBusChannels
ni <- serverOption numberOfInputBusChannels
return $ audioBusId b >= 0 && audioBusId b < fromIntegral (no + ni)
-- | Get hardware input bus.
inputBus :: (MonadServer m, Failure AllocFailure m) => Int -> Int -> m AudioBus
inputBus n i = do
k <- serverOption numberOfOutputBusChannels
m <- serverOption numberOfInputBusChannels
let r = Range.sized n (fromIntegral (k+i))
if Range.begin r < fromIntegral k || Range.end r > fromIntegral (k+m)
then failure InvalidId
else return (AudioBus r)
-- | Get hardware output bus.
outputBus :: (MonadServer m, Failure AllocFailure m) => Int -> Int -> m AudioBus
outputBus n i = do
k <- serverOption numberOfOutputBusChannels
let r = Range.sized n (fromIntegral i)
if Range.begin r < 0 || Range.end r > fromIntegral k
then failure InvalidId
else return (AudioBus r)
-- | Control bus.
newtype ControlBus = ControlBus { controlBusIdRange :: Range ControlBusId } deriving (Eq, Show)
-- | Get control bus ID.
controlBusId :: ControlBus -> ControlBusId
controlBusId = Range.begin . controlBusIdRange
instance Bus ControlBus where
rate _ = KR
numChannels = Range.size . controlBusIdRange
freeBus = M.freeRange M.controlBusIdAllocator . controlBusIdRange
-- | Allocate control bus with the specified number of channels.
newControlBus :: MonadIdAllocator m => Int -> m ControlBus
newControlBus = liftM ControlBus . M.allocRange M.controlBusIdAllocator
| kaoskorobase/hsc3-server | Sound/SC3/Server/State/Monad/Command.hs | gpl-2.0 | 23,367 | 0 | 15 | 5,126 | 7,054 | 3,716 | 3,338 | 446 | 2 |
module StationCrawler.Queuers where
import StationCrawler.Types
import Types
import qualified Data.Map.Strict as M
queueTrains :: StationState -> TrainId -> StationState
queueTrains state trainId = if not . M.member trainId $ visited
then
state {
trainsLeftToVisit = trainId : (trainsLeftToVisit state)
, visitedTrains = M.insert trainId True visited
}
else
state
where visited = visitedTrains state
queueStations :: StationState -> StationId -> StationState
queueStations state stationId = if not . M.member stationId $ visited
then
state {
stationsLeftToVisit = stationId : (stationsLeftToVisit state)
, visitedStations = M.insert stationId True visited
}
else
state
where visited = visitedStations state
| mkawalec/infopassenger-crawler | src/StationCrawler/Queuers.hs | gpl-3.0 | 775 | 0 | 10 | 164 | 197 | 108 | 89 | 18 | 2 |
-- sth-escape: replace non-printable, non-ascii chars on stdin with c escape sequences
module Main where
import System.Exit (exitSuccess)
import System.Environment (getArgs)
import STH.Lib
(charFilter, lineFilter, bsEsc)
main :: IO ()
main = do
args <- getArgs
case args of
["--char"] -> charFilter bsEsc
otherwise -> lineFilter bsEsc
exitSuccess
| nbloomf/st-haskell | src/STH/Escape/Main.hs | gpl-3.0 | 370 | 0 | 10 | 71 | 94 | 51 | 43 | 12 | 2 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
-----------------------------------------------------------------------------
-- |
-- Module : Text.BlogLiterately.Image
-- Copyright : (c) 2012 Brent Yorgey
-- License : GPL (see LICENSE)
-- Maintainer : Brent Yorgey <byorgey@gmail.com>
--
-- Uploading images embedded in posts to the server.
--
-----------------------------------------------------------------------------
module Text.BlogLiterately.Image
(
uploadAllImages
, uploadIt
, mkMediaObject
) where
import qualified Data.Text as T
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State (StateT, get, modify, runStateT)
import qualified Data.ByteString.Char8 as B
import Data.Char (toLower)
import Data.List (isPrefixOf)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import System.Directory (doesFileExist)
import System.FilePath (takeExtension, takeFileName)
import Network.XmlRpc.Client (remote)
import Network.XmlRpc.Internals (Value (..), toValue)
import Text.Pandoc
import Text.BlogLiterately.Options
type URL = String
-- | Transform a document by uploading any \"local\" images to the
-- server, and replacing their filenames with the URLs returned by
-- the server. Only upload any given image once (determined by file
-- name), even across runs: uploaded images and their associated URL
-- on the server is tracked in a special dotfile,
-- @.BlogLiterately-uploaded-images@.
uploadAllImages :: BlogLiterately -> Pandoc -> IO Pandoc
uploadAllImages bl@(BlogLiterately{..}) p =
case (_blog, _htmlOnly) of
(Just xmlrpc, h) | h /= Just True -> do
uploaded <- readUploadedImages
(p', uploaded') <- runStateT (bottomUpM (uploadOneImage xmlrpc) p) uploaded
writeUploadedImages uploaded'
return p'
_ -> return p
where
uploadOneImage :: String -> Inline -> StateT (M.Map FilePath URL) IO Inline
uploadOneImage xmlrpc i@(Image attr altText (imgUrlT, imgTitle))
| isLocal imgUrl = do
uploaded <- get
case M.lookup imgUrl uploaded of
Just url -> return $ Image attr altText (T.pack url, imgTitle)
Nothing -> do
res <- lift $ uploadIt xmlrpc imgUrl bl
case res of
Just (ValueStruct (lookup "url" -> Just (ValueString newUrl))) -> do
modify (M.insert imgUrl newUrl)
return $ Image attr altText (T.pack newUrl, imgTitle)
_ -> do
liftIO . putStrLn $ "Warning: upload of " ++ imgUrl ++ " failed."
return i
| otherwise = return i
where
imgUrl = T.unpack imgUrlT
uploadOneImage _ i = return i
isLocal imgUrl = none (`isPrefixOf` imgUrl) ["http", "/"]
none pr = all (not . pr)
uploadedImagesFile :: String
uploadedImagesFile = ".BlogLiterately-uploaded-images"
-- | Read the list of previously uploaded images and their associated URLs from
-- a special dotfile (namely, @.BlogLiterately-uploaded-images@).
readUploadedImages :: IO (M.Map FilePath URL)
readUploadedImages = do
e <- doesFileExist uploadedImagesFile
case e of
False -> return M.empty
True -> do
txt <- readFile uploadedImagesFile
let m = fromMaybe (M.empty) (readMay txt)
length txt `seq` return m
readMay :: Read a => String -> Maybe a
readMay s = case reads s of
[(a,"")] -> Just a
_ -> Nothing
-- | Write out the list of uploaded images and their associated URLs
-- to a special dotfile (namely, @.BlogLiterately-uploaded-images@).
writeUploadedImages :: M.Map FilePath URL -> IO ()
writeUploadedImages m = writeFile uploadedImagesFile (show m)
-- | Upload a file using the @metaWeblog.newMediaObject@ XML-RPC method
-- call.
uploadIt :: String -> FilePath -> BlogLiterately -> IO (Maybe Value)
uploadIt url filePath (BlogLiterately{..}) = do
putStr $ "Uploading " ++ filePath ++ "..."
mmedia <- mkMediaObject filePath
case mmedia of
Nothing -> do
putStrLn $ "\nFile not found: " ++ filePath
return Nothing
Just media -> do
val <- remote url "metaWeblog.newMediaObject"
(fromMaybe "default" _blogid)
(fromMaybe "" _user)
(fromMaybe "" _password)
media
putStrLn "done."
return $ Just val
-- | Prepare a file for upload.
mkMediaObject :: FilePath -> IO (Maybe Value)
mkMediaObject filePath = do
exists <- doesFileExist filePath
if not exists
then return Nothing
else do
bits <- B.readFile filePath
return . Just $ ValueStruct
[ ("name", toValue fileName)
, ("type", toValue fileType)
, ("bits", ValueBase64 bits)
]
where
fileName = takeFileName filePath
fileType = case (map toLower . drop 1 . takeExtension) fileName of
"png" -> "image/png"
"jpg" -> "image/jpeg"
"jpeg" -> "image/jpeg"
"gif" -> "image/gif"
_ -> "image/png"
| byorgey/BlogLiterately | src/Text/BlogLiterately/Image.hs | gpl-3.0 | 5,409 | 0 | 25 | 1,571 | 1,257 | 651 | 606 | 103 | 6 |
{- Copyright (C) 2009 John Millikin <jmillikin@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Network.Protocol.XMPP.Stanzas (
StanzaType(..)
,Stanza(..)
,treeToStanza
,stanzaToTree
) where
import Text.XML.HXT.DOM.Interface (XmlTree)
import Text.XML.HXT.Arrow ((>>>))
import qualified Text.XML.HXT.Arrow as A
import Network.Protocol.XMPP.JID (JID, jidFormat, jidParse)
import Network.Protocol.XMPP.Util (mkElement, mkQName)
import qualified Text.XML.HXT.DOM.XmlNode as XN
data StanzaType =
MessageNormal
| MessageChat
| MessageGroupChat
| MessageHeadline
| MessageError
| PresenceUnavailable
| PresenceSubscribe
| PresenceSubscribed
| PresenceUnsubscribe
| PresenceUnsubscribed
| PresenceProbe
| PresenceError
| IQGet
| IQSet
| IQResult
| IQError
deriving (Show, Eq)
data Stanza = Stanza
{
stanzaType :: StanzaType
,stanzaTo :: Maybe JID
,stanzaFrom :: Maybe JID
,stanzaID :: String
,stanzaLang :: String
,stanzaPayloads :: [XmlTree]
}
deriving (Show, Eq)
stanzaTypeMap :: [((String, String, String), StanzaType)]
stanzaTypeMap = mkStanzaTypeMap [
("jabber:client", "message", [
("normal", MessageNormal)
,("chat", MessageChat)
,("groupchat", MessageGroupChat)
,("headline", MessageHeadline)
,("error", MessageError)
])
,("jabber:client", "presence", [
("unavailable", PresenceUnavailable)
,("subscribe", PresenceSubscribe)
,("subscribed", PresenceSubscribed)
,("unsubscribe", PresenceUnsubscribe)
,("unsubscribed", PresenceUnsubscribed)
,("probe", PresenceProbe)
,("error", PresenceError)
])
,("jabber:client", "iq", [
("get", IQGet)
,("set", IQSet)
,("result", IQResult)
,("error", IQError)
])
]
where mkStanzaTypeMap raw = do
(ns, elementName, typeStrings) <- raw
(typeString, type') <- typeStrings
return ((ns, elementName, typeString), type')
stanzaTypeToStr :: StanzaType -> (String, String, String)
stanzaTypeToStr t = let
step [] = undefined
step ((ret, t'):tms)
| t == t' = ret
| otherwise = step tms
in step stanzaTypeMap
stanzaTypeFromStr :: String -> String -> String -> Maybe StanzaType
stanzaTypeFromStr ns elementName typeString = let
key = (ns, elementName, typeString)
step [] = Nothing
step ((key', ret):tms)
| key == key' = Just ret
| otherwise = step tms
in step stanzaTypeMap
treeToStanza :: XmlTree -> [Stanza]
treeToStanza t = do
to <- return . jidParse =<< A.runLA (A.getAttrValue "to") t
from <- return . jidParse =<< A.runLA (A.getAttrValue "from") t
id' <- A.runLA (A.getAttrValue "id") t
lang <- A.runLA (A.getAttrValue "lang") t
ns <- A.runLA A.getNamespaceUri t
elementName <- A.runLA A.getLocalPart t
typeString <- A.runLA (A.getAttrValue "type") t
let payloads = A.runLA (A.getChildren >>> A.isElem) t
case stanzaTypeFromStr ns elementName typeString of
Nothing -> []
Just type' -> [Stanza type' to from id' lang payloads]
stanzaToTree :: Stanza -> XmlTree
stanzaToTree s = let
(ns, elementName, typeString) = stanzaTypeToStr (stanzaType s)
attrs' = [
autoAttr "to" (maybe "" jidFormat . stanzaTo)
,autoAttr "from" (maybe "" jidFormat . stanzaFrom)
,autoAttr "id" stanzaID
,autoAttr "xml:lang" stanzaLang
,const [("", "type", typeString)]
]
attrs = concatMap ($ s) attrs'
in mkElement (ns, elementName) attrs (stanzaPayloads s)
autoAttr :: String -> (Stanza -> String) -> Stanza -> [(String, String, String)]
autoAttr attr f stanza = case f stanza of
"" -> []
text -> [("", attr, text)]
| astro/network-protocol-xmpp | Network/Protocol/XMPP/Stanzas.hs | gpl-3.0 | 4,169 | 54 | 13 | 810 | 1,256 | 707 | 549 | 105 | 2 |
import Data.List
import Data.Maybe
import Data.Time
import Control.Monad
import Control.Exception
import Control.Applicative
import System.Process
import System.Directory
import System.Environment
import System.Exit
run :: CreateProcess -> String -> IO ()
run x err = do
(_, _, _, h) <- createProcess x
exitCode <- waitForProcess h
case exitCode of
ExitSuccess -> return ()
_ -> error err
main :: IO ()
main = do
args <- getArgs
date <- show . utctDay <$> getCurrentTime
editor <- fromMaybe "open" <$> lookupEnv "EDITOR"
let title = case args of
[t] -> t
_ -> error "Expected a single parameter for the title of the post."
file = date ++ " " ++ title
html = file ++ ".html"
markdown = file ++ ".markdown"
exists <- doesFileExist markdown
unless exists $ writeFile markdown $ unlines
['#':title
,"##"++date
]
run (proc editor [markdown]) "Failed to run editor"
run (proc "pandoc" [markdown,"-o",html])
"Could not run pandoc. Is pandoc installed, and in the path?"
putStrLn $ "HTML generated at '" ++ html ++ "'"
| mikeplus64/plissken | doc/log/Add.hs | gpl-3.0 | 1,200 | 0 | 13 | 355 | 357 | 178 | 179 | 36 | 2 |
module Export2 where
data Number = Three
three :: Int
three = 2
| roberth/uu-helium | test/correct/Export2.hs | gpl-3.0 | 66 | 0 | 5 | 15 | 21 | 13 | 8 | 4 | 1 |
{- Copyright 2013 Matthew Gordon.
This file is part of Gramophone.
Gramophone is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Gramophone is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Gramophone. If not, see <http://www.gnu.org/licenses/>.
-}
{-# LANGUAGE OverloadedStrings #-}
-- |The "main" gramophone module. This module essentially exposes all application
-- functionality as a library. The GUI module(s) then call this library.
module Gramophone.Core
(
initFiles,
openFiles,
scanDirectoryForAudioFiles,
scanTreeForAudioFiles,
addFileToDatabase,
addAudioFilesFromTree,
module Gramophone.Core.Database,
module Gramophone.Core.MediaController
) where
import qualified Gramophone.Core.MediaController as MC
import Gramophone.Core.MediaController (MediaController,
initMediaController,
readTagsFromFile,
shutDown,
ReadTagsResult(..) )
import Gramophone.Core.Database
import System.FilePath((</>))
import qualified System.FilePath as FilePath
import System.Directory (createDirectoryIfMissing, doesDirectoryExist, getDirectoryContents)
import qualified System.FilePath.Glob as Glob
import Control.Monad
import Control.Applicative
import Control.Monad.IO.Class
import Control.Monad.Trans.State.Strict
import Control.Lens
import Data.Maybe (fromMaybe)
-- | Given a directory, initializes a database and any other files used
-- by Gramophone.
initFiles :: FilePath -> IO (Either CreateError DatabaseRef)
initFiles location = do
createDirectoryIfMissing True location
createDatabase $ location </> "database"
-- | Given the gramophone directory, returns a DatabaseRef pointing
-- to it.
openFiles :: FilePath -> IO (Either OpenError DatabaseRef)
openFiles location = openDatabase $ location </> "database"
audioFileGlobs :: [Glob.Pattern]
audioFileGlobs = map Glob.compile ["*.flac", "*.mp3", "*.m4a"]
-- | Given a directory, (non-recursively) scans for audio files, based on file extensions.
scanDirectoryForAudioFiles :: FilePath -> IO [FilePath]
scanDirectoryForAudioFiles = (return . concat . fst) <=< (Glob.globDir audioFileGlobs)
addAudioFilesFromTree :: MonadDB m => MediaController -> FilePath -> m ()
addAudioFilesFromTree mc dir = scanTreeForAudioFiles dir loop
where loop :: (MonadDB m) => StateT ScanState m ()
loop = do
p <- getNextFile
case p of
FoundFile filename -> do
liftIO $ putStrLn ("File: " ++ filename)
addFileToDatabase mc filename
loop
ScanningDirectory dirName -> do
liftIO $ putStr ("Scanning: " ++ dirName ++ "\r")
loop
ScanDone -> return ()
addFileToDatabase :: MonadDB m => MediaController -> FilePath -> m (Maybe Recording)
addFileToDatabase mc filename = do
maybeTags <- liftIO $ MC.readTagsFromFile mc filename
newRecording <- case maybeTags of
TagsFail _ -> return $ NewRecording (AudioFileName filename) Nothing Nothing Nothing Nothing
TagsSuccess tags -> do
liftIO $ putStrLn ("File: " ++ filename)
artistRelation <- case (getTag ArtistName MC.tagArtistName tags) of
Nothing -> return Nothing
Just name -> do
artistRelations <- findArtists name
case artistRelations of
(a:_) -> return (Just a)
[] -> addArtist (NewArtist name)
albumRelation <- case (getTag AlbumTitle MC.tagAlbumName tags) of
Nothing -> return Nothing
Just albumName -> do
albumRelations <- findAlbums albumName
case albumRelations of
(a:_) -> return (Just a)
[] -> addAlbum (NewAlbum albumName
(artistId <$> artistRelation)
(TrackCount $ fromMaybe 0 (view MC.tagNumTracks tags)))
return $ NewRecording (AudioFileName filename)
(getTag RecordingTitle MC.tagTrackName tags)
(artistId <$> artistRelation)
(albumId <$> albumRelation)
(getTag TrackNumber MC.tagTrackNumber tags)
addRecording newRecording
where
getTag c t ts = c <$> (view t ts)
data ScanState = ScanState {
unscannedDirectories :: [FilePath],
unscannedFiles :: [FilePath]
} deriving Show
scanTreeForAudioFiles :: MonadIO m => FilePath -> StateT ScanState m () -> m ()
scanTreeForAudioFiles dir f = evalStateT f $ ScanState [dir] []
data ScanResult = FoundFile FilePath | ScanningDirectory FilePath | ScanDone
getNextFile :: MonadIO m => StateT ScanState m ScanResult
getNextFile = do
scanState <- get
case scanState of
ScanState [] [] -> return ScanDone
ScanState dirs (file:files) -> do
put $ ScanState dirs files
return $ FoundFile file
ScanState (dir:dirs) [] -> do
contents <- liftIO $ (filter (`notElem` [".",".."])) <$> getDirectoryContents dir
subDirs <- liftIO $ filterM doesDirectoryExist $ map (FilePath.combine dir) contents
files <- liftIO $ scanDirectoryForAudioFiles dir
put $ ScanState (subDirs++dirs) files
return $ScanningDirectory dir
| matthewscottgordon/gramophone | src/Gramophone/Core.hs | gpl-3.0 | 6,259 | 0 | 30 | 1,972 | 1,277 | 656 | 621 | 103 | 6 |
{-
Name: Njagi Mwaniki
Resources:
Learn You A Haskell: Monoid, Functors, Applicative Functors and Monads
Aeson tutorial - FPcomplete
Aeson Documentation - Hackage
-}
{-# LANGUAGE DeriveGeneric, OverloadedStrings #-}
module HW06 where
import Data.Aeson
import Data.Monoid
import GHC.Generics
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.Text as T
-- import qualified Data.Text.IO as T
import Data.List
inputFile :: FilePath
inputFile = "Src/Week6/markets.json"
outputFile :: FilePath
outputFile = "Src/Week6/outMart.json"
ynToBool :: Value -> Value
ynToBool (Object o) = Object (fmap ynToBool o)
ynToBool (Array a) = Array (fmap ynToBool a)
ynToBool (String "Y") = Bool True
ynToBool (String "N") = Bool False
ynToBool v = v
parseData :: B.ByteString -> Either String Value
parseData byteStringData = fmap ynToBool $ eitherDecode byteStringData
data Market = Market { marketname :: T.Text
, x :: Float
, y :: Float
, state :: T.Text
} deriving (Show, Eq, Generic)
-- Thanks to deriving Generic we get our type becoming automatic instances of
-- FromJSON and ToJSON and therefore get automatic parsers for this.
-- e.g eitherDecode, decode, encode
instance FromJSON Market
instance ToJSON Market
nullMarket :: Market
nullMarket = Market {marketname ="", x=0, y=0, state=""}
parseMarkets :: B.ByteString -> Either String [Market]
parseMarkets byteStringData = fmap (removeResult . fromJSON) $ parseData byteStringData
-- To get a list of Markets from a Result.
-- I think a more elegant way would be `eitherDecode byteStringData`
removeResult :: Result [Market] -> [Market]
removeResult (Success a) = a
removeResult (Error s) = [Market {marketname = "Failed Result parsing"
, x = 0
, y = 0
, state = (T.pack s)} ]
loadData :: IO [Market]
loadData = do
file <- B.readFile inputFile
return $ getMarket $ parseMarkets file
-- Gets [Market] from Either String [Market]
getMarket :: Either String [Market] -> [Market]
getMarket (Right r) = r
getMarket (Left l) = fail l
data OrdList a = OrdList { getOrdList :: [a] }
deriving (Eq, Show)
instance Ord Market where
compare (Market _ x1 _ _) (Market _ x2 _ _) = compare x1 x2
instance Ord a => Monoid (OrdList a) where
mempty = OrdList []
mappend = (\(OrdList xs) (OrdList ys) -> OrdList (xs ++ ys))
mconcat = foldr mappend mempty
type Searcher m = T.Text -> [Market] -> m
search :: Monoid m => (Market -> m) -> Searcher m
search _ _ [] = mempty
search func text lst = mconcat (map func (fmap (matchName text) lst))
makeMonoid :: Market -> OrdList Market
makeMonoid mar
| mar == nullMarket = mempty
| otherwise = OrdList [mar]
matchName :: T.Text -> Market -> Market
matchName text market =
(\c ->
case (T.isInfixOf c (marketname market)) of True -> market
False -> nullMarket) text
firstFound :: Searcher (Maybe Market)
firstFound searchWord list =
let martList = allFound searchWord list
in case martList of [] -> Nothing
_ -> Just $ head martList
lastFound :: Searcher (Maybe Market)
lastFound searchWord list =
let martList = allFound searchWord list
in case martList of [] -> Nothing
_ -> Just $ last martList
allFound :: Searcher [Market]
allFound searchWord list = getOrdList $ search makeMonoid searchWord list
numberFound :: Searcher Int
numberFound searchWord list = length $ allFound searchWord list
orderedNtoS :: Searcher [Market]
orderedNtoS searchWord list = sort $ allFound searchWord list
{-
-- Alternative of matchName which gives Text. Is much better.
f :: Market -> T.Text -> T.Text
f market text =
(\a ->
case (T.isInfixOf a (marketname market)) of True -> a
False -> None) text
-- Test passed.
testLoadData :: IO ()
testLoadData =(fmap encode loadData) >>= B.writeFile outputFile
-- Typechecks
parseMarkets :: B.ByteString -> Either String [Market]
parseMarkets byteStringData = eitherDecode byteStringData
-- To help testParseData get Value from eitherDecode so as to pass it to encode.
getValueFromEither :: Either String Value -> Value
getValueFromEither (Right a) = a
getValueFromEither (Left b) = String (T.pack b)
-- Test passed
testParseData :: IO ()
testParseData = do
file <- B.readFile inputFile
B.writeFile outputFile (encode . getValueFromEither $ parseData file)
-- Test passed
testYnToBool :: IO ()
testYnToBool = do
file <- B.readFile inputFile
B.writeFile outputFile (encode . ynToBool . fromJust $ decode file)
-}
| urbanslug/cs194 | Src/Week6/HW06.hs | gpl-3.0 | 4,846 | 0 | 12 | 1,219 | 1,113 | 584 | 529 | 81 | 2 |
{-
(c) The AQUA Project, Glasgow University, 1994-1998
\section[ErrsUtils]{Utilities for error reporting}
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE BangPatterns #-}
module U.ErrUtils (MsgDoc,
Messages,
ErrMsg,
mkWarnMsg,
makeIntoWarning,
emptyMessages
) where
import Language.Haskell.Utility.Bag
import U.Outputable
import Language.Haskell.Syntax.SrcLoc
import U.DynFlags
-------------------------
type MsgDoc = SDoc
-- -----------------------------------------------------------------------------
-- Basic error messages: just render a message with a source location.
type Messages = (WarningMessages, ErrorMessages)
type WarningMessages = Bag WarnMsg
type ErrorMessages = Bag ErrMsg
data ErrMsg = ErrMsg {
errMsgSpan :: SrcSpan,
errMsgContext :: PrintUnqualified,
errMsgDoc :: ErrDoc,
-- | This has the same text as errDocImportant . errMsgDoc.
errMsgShortString :: String,
errMsgReason :: WarnReason
}
-- The SrcSpan is used for sorting errors into line-number order
-- | Categorise error msgs by their importance. This is so each section can
-- be rendered visually distinct. See Note [Error report] for where these come
-- from.
data ErrDoc = ErrDoc {
-- | Primary error msg.
errDocImportant :: [MsgDoc],
-- | Context e.g. \"In the second argument of ...\".
_errDocContext :: [MsgDoc],
-- | Supplementary information, e.g. \"Relevant bindings include ...\".
_errDocSupplementary :: [MsgDoc]
}
type WarnMsg = ErrMsg
instance Show ErrMsg where
show em = errMsgShortString em
makeIntoWarning :: WarnReason -> ErrMsg -> ErrMsg
makeIntoWarning reason err = err
{ errMsgReason = reason }
-- -----------------------------------------------------------------------------
-- Collecting up messages for later ordering and printing.
mk_err_msg :: DynFlags -> SrcSpan -> PrintUnqualified -> ErrDoc -> ErrMsg
mk_err_msg dflags locn print_unqual doc
= ErrMsg { errMsgSpan = locn
, errMsgContext = print_unqual
, errMsgDoc = doc
, errMsgShortString = showSDoc dflags (vcat (errDocImportant doc))
, errMsgReason = NoReason }
-- ^ A long (multi-line) error message
mkWarnMsg :: DynFlags -> SrcSpan -> PrintUnqualified -> MsgDoc -> ErrMsg
mkWarnMsg dflags locn unqual msg
= mk_err_msg dflags locn unqual (ErrDoc [msg] [] [])
----------------
emptyMessages :: Messages
emptyMessages = (emptyBag, emptyBag)
| shayan-najd/HsParser | U/ErrUtils.hs | gpl-3.0 | 2,639 | 0 | 11 | 660 | 396 | 239 | 157 | 44 | 1 |
-- |Implements Weber–Fechner law for per weight perception. Using S0
-- of 1, k of 1. See more information:
-- http://en.wikipedia.org/wiki/Weber%E2%80%93Fechner_law
module WeberFechner (weberFechnerTable,cTable) where
import Data.List (intercalate)
humanBits :: Integer
pwmBits :: Integer
s0 :: Integer
humanBits = 8 -- ^Input value range
pwmBits = 12 -- ^Output value range
s0 = 10 -- ^Threshold of stimulus below which it is not perceived
a = log(2^pwmBits-fromIntegral s0)/(2^humanBits-1)
-- |Modified Weber–Fechner algorithm which saves energy when stimulus can not be
-- perceived.
weberFechner :: Integer -> Integer
weberFechner x_i | alg == s0 = 0
| otherwise = alg
where alg = round (exp (a*x)) + (s0-1)
x = fromIntegral x_i
weberFechnerTable = map weberFechner [0..2^humanBits-1]
header = "/* Pre-calculated Weber–Fechner table generated by helpers/WeberFechner.hs */"
signature = "const uint16_t weber_fechner_table[] PROGMEM"
cTable = header ++ "\n" ++ signature ++ " = {" ++ list ++ "};"
where list = intercalate "," $ map show weberFechnerTable
main = putStrLn cTable
| elovalo/elovalo | helpers/WeberFechner.hs | gpl-3.0 | 1,151 | 0 | 12 | 223 | 260 | 142 | 118 | 20 | 1 |
module WebParsing.ParsecCombinators
(getCourseFromTag,
findCourseFromTag,
getPostType,
getDepartmentName,
isDepartmentName,
generalCategoryParser,
parseCategory,
postInfoParser,
text, parseAll) where
import qualified Text.Parsec as P
import Text.Parsec ((<|>))
import qualified Data.Text as T
import Text.Parsec.Text (Parser)
import Database.Tables (Post(Post))
import Control.Monad (mapM)
import Database.DataType
getCourseFromTag :: T.Text -> T.Text
getCourseFromTag courseTag =
let course = P.parse findCourseFromTag "(source)" courseTag
in
case course of
Right courseName -> courseName
Left _ -> ""
findCourseFromTag :: Parser T.Text
findCourseFromTag = do
_ <- P.string "/course/"
parsed <- P.many1 P.anyChar
return $ T.pack parsed
generalCategoryParser :: T.Text -> Maybe T.Text -> Parser (Post, [T.Text])
generalCategoryParser fullPostName firstCourse = do
post <- postInfoParser fullPostName firstCourse
categories <- splitPrereqText
return (post, categories)
-- Post Parsing
postInfoParser :: T.Text -> Maybe T.Text -> Parser Post
postInfoParser fullPostName firstCourse = do
let parsed = P.parse getDeptNameAndPostType "(source)" fullPostName
case parsed of
Right (deptName, postType) -> do
programDescription <- getRequirements firstCourse
return $ Post (read $ T.unpack postType) deptName (T.pack " ") programDescription
Left _ -> return $ Post Other (fullPostName) (T.pack " ") (T.pack " ")
getDeptNameAndPostType :: Parser (T.Text, T.Text)
getDeptNameAndPostType = do
_ <- P.spaces
deptName <- getDepartmentName
postType <- getPostType
return $ (deptName, postType)
getDepartmentName :: Parser T.Text
getDepartmentName =
P.try (parseUntil (P.try (P.lookAhead (text " Specialist")) <|>
P.try (P.lookAhead (text " Major")) <|>
P.try (P.lookAhead (text " Minor"))))
getPostType :: Parser T.Text
getPostType = do
_ <- P.spaces
P.choice [P.try (text "Specialist"), P.try (text "Major"), P.try (text "Minor")]
isDepartmentName :: T.Text -> Parser T.Text
isDepartmentName postType = parseUntil (text postType)
-- Post Category Parsing
getRequirements :: Maybe T.Text -> Parser T.Text
getRequirements firstCourse =
P.try (parseUntil (text "First Year")) <|>
P.try (parseUntil (text "Program Course Requirements:")) <|>
P.try (parseUntil (text "Program requirements:")) <|>
findFirstCourse firstCourse
findFirstCourse :: Maybe T.Text -> Parser T.Text
findFirstCourse firstCourse =
case firstCourse of
Nothing -> parseUntil P.eof
Just course -> P.try (parseUntil (P.lookAhead (text course))) <|> parseUntil P.eof
parseNoteLine :: Parser T.Text
parseNoteLine = do
_ <- P.string "Note"
P.try (parseUntil (P.char '\n')) <|> parseUntil P.eof
parseNotes :: Parser T.Text
parseNotes = do
_ <- P.try (text "Notes") <|> P.try (text "NOTES")
_ <- parseUntil P.eof
return ""
parseUntil :: Parser a -> Parser T.Text
parseUntil parser = do
parsed <- P.manyTill P.anyChar (P.try parser)
return $ T.pack parsed
splitPrereqText :: Parser [T.Text]
splitPrereqText = do
P.manyTill (P.try parseNotes <|> P.try parseNoteLine <|>
P.try parseCategory <|> parseUntil P.eof) P.eof
parseCategory :: Parser T.Text
parseCategory = do
left <- parseUpToSeparator
_ <- P.anyChar
return left
parseUpToSeparator :: Parser T.Text
parseUpToSeparator = parseUntil (P.notFollowedBy (P.noneOf ";\r\n"))
text :: T.Text -> Parser T.Text
text someText = do
parsed <- mapM P.char (T.unpack someText)
return $ T.pack parsed
-- For testing purposed in REPL
parseAll :: Parser [T.Text]
parseAll = P.many parseCategory
| hermish/courseography | app/WebParsing/ParsecCombinators.hs | gpl-3.0 | 3,828 | 0 | 17 | 801 | 1,300 | 641 | 659 | 99 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.PlusDomains.Circles.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Shut down. See https:\/\/developers.google.com\/+\/api-shutdown for more
-- details.
--
-- /See:/ <https://developers.google.com/+/domains/ Google+ Domains API Reference> for @plusDomains.circles.list@.
module Network.Google.Resource.PlusDomains.Circles.List
(
-- * REST Resource
CirclesListResource
-- * Creating a Request
, circlesList
, CirclesList
-- * Request Lenses
, cUserId
, cPageToken
, cMaxResults
) where
import Network.Google.PlusDomains.Types
import Network.Google.Prelude
-- | A resource alias for @plusDomains.circles.list@ method which the
-- 'CirclesList' request conforms to.
type CirclesListResource =
"plusDomains" :>
"v1" :>
"people" :>
Capture "userId" Text :>
"circles" :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :> Get '[JSON] CircleFeed
-- | Shut down. See https:\/\/developers.google.com\/+\/api-shutdown for more
-- details.
--
-- /See:/ 'circlesList' smart constructor.
data CirclesList =
CirclesList'
{ _cUserId :: !Text
, _cPageToken :: !(Maybe Text)
, _cMaxResults :: !(Textual Word32)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CirclesList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cUserId'
--
-- * 'cPageToken'
--
-- * 'cMaxResults'
circlesList
:: Text -- ^ 'cUserId'
-> CirclesList
circlesList pCUserId_ =
CirclesList' {_cUserId = pCUserId_, _cPageToken = Nothing, _cMaxResults = 20}
-- | The ID of the user to get circles for. The special value \"me\" can be
-- used to indicate the authenticated user.
cUserId :: Lens' CirclesList Text
cUserId = lens _cUserId (\ s a -> s{_cUserId = a})
-- | The continuation token, which is used to page through large result sets.
-- To get the next page of results, set this parameter to the value of
-- \"nextPageToken\" from the previous response.
cPageToken :: Lens' CirclesList (Maybe Text)
cPageToken
= lens _cPageToken (\ s a -> s{_cPageToken = a})
-- | The maximum number of circles to include in the response, which is used
-- for paging. For any response, the actual number returned might be less
-- than the specified maxResults.
cMaxResults :: Lens' CirclesList Word32
cMaxResults
= lens _cMaxResults (\ s a -> s{_cMaxResults = a}) .
_Coerce
instance GoogleRequest CirclesList where
type Rs CirclesList = CircleFeed
type Scopes CirclesList =
'["https://www.googleapis.com/auth/plus.circles.read",
"https://www.googleapis.com/auth/plus.login",
"https://www.googleapis.com/auth/plus.me"]
requestClient CirclesList'{..}
= go _cUserId _cPageToken (Just _cMaxResults)
(Just AltJSON)
plusDomainsService
where go
= buildClient (Proxy :: Proxy CirclesListResource)
mempty
| brendanhay/gogol | gogol-plus-domains/gen/Network/Google/Resource/PlusDomains/Circles/List.hs | mpl-2.0 | 3,851 | 0 | 15 | 871 | 489 | 292 | 197 | 70 | 1 |
{-# LANGUAGE TemplateHaskell, CPP #-}
-- |
-- Module : Main
-- Copyright : (c) 2013 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
module Main (
main
) where
import Control.Applicative
import Control.Exception (SomeException, try)
import Control.Lens
import Control.Monad (liftM)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.ByteString (ByteString)
import Data.Char (toUpper)
import Data.String (IsString(..))
import Ligature.Config
import Ligature.JSON
import Ligature.Splices
import Ligature.Types
import Ligature.URL
import Network.URI (URI)
import Snap.Core
import Snap.Http.Server hiding (Config)
import Snap.Snaplet
import Snap.Snaplet.Heist
import Snap.Util.FileServe
import System.IO
import qualified Data.ByteString.Char8 as BS
import qualified Data.Map as M
import qualified Data.Text as T
import qualified Data.Text.Encoding as E
data App = App
{ _heist :: Snaplet (Heist App)
}
makeLenses ''App
instance HasHeist App where
heistLens = subSnaplet heist
class RequiredParam a where
requireParam :: MonadSnap m => ByteString -> m a
instance RequiredParam Bool where
requireParam = liftM (read . f) . requireParam
where
f s = (toUpper $ head s) : tail s
instance RequiredParam Int where
requireParam = liftM read . requireParam
instance RequiredParam Key where
requireParam = liftM byteKey . requireParam
instance RequiredParam String where
requireParam = liftM BS.unpack . requireParam
instance RequiredParam ByteString where
requireParam name = do
val <- getParam name
maybe (error $ "Missing param " ++ BS.unpack name)
return val
instance RequiredParam Param where
requireParam name = do
val <- E.decodeUtf8 <$> requireParam name
return $ parseParam (E.decodeUtf8 name, val)
allParamsExcept :: (MonadSnap m, RequiredParam a) => [BS.ByteString] -> m [a]
allParamsExcept es = do
ps <- M.toList <$> getParams
mapM (requireParam . fst) $ filter (not . flip elem es . fst) ps
main :: IO ()
main = do
cfg <- snapConfig
(ms, app, cleanup) <- runSnaplet Nothing . site $ appConfig cfg
hPutStrLn stderr $ T.unpack ms
print cfg
print $ appConfig cfg
_ <- try $ httpServe cfg app :: IO (Either SomeException ())
cleanup
site :: Config -> SnapletInit App App
site Config{..} = makeSnaplet "ligature" "Graphite Dashboards" Nothing $ do
h <- nestSnaplet "" heist $ heistInit "templates"
ps <- liftIO $ palettes "palettes"
m <- liftIO $ dashboards dashboardDir ps
addRoutes $ routes graphiteUrl m
addSplices $ navSplices m
return $ App h
routes :: HasHeist a => URI -> HashMap Dash -> [(ByteString, Handler a b ())]
routes uri hmap =
[ ("/dashboards/:dashboard/graphs/:graph", graph uri hmap)
, ("/dashboards/:dashboard", dashboard hmap)
, ("", serveDirectory "public")
]
graph :: HasHeist a => URI -> HashMap Dash -> Handler a b ()
graph uri hmap = do
d <- requireParam "dashboard"
g <- requireParam "graph"
ps <- allParamsExcept ["dashboard", "graph"]
liftIO (graphData uri (findGraph hmap d g) ps) >>= writeBS
dashboard :: HasHeist a => HashMap Dash -> Handler a b ()
dashboard hmap = do
d <- requireParam "dashboard"
f <- (maybe "-1day" (fromString . BS.unpack)) <$> getParam "from"
renderWithSplices "dashboard" $ dashSplices (findDash hmap d) f
| brendanhay/ligature | src/Main.hs | mpl-2.0 | 3,982 | 0 | 13 | 1,013 | 1,137 | 583 | 554 | -1 | -1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Compute.RegionOperations.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the specified region-specific Operations resource.
--
-- /See:/ <https://developers.google.com/compute/docs/reference/latest/ Compute Engine API Reference> for @compute.regionOperations.delete@.
module Network.Google.Resource.Compute.RegionOperations.Delete
(
-- * REST Resource
RegionOperationsDeleteResource
-- * Creating a Request
, regionOperationsDelete
, RegionOperationsDelete
-- * Request Lenses
, rodProject
, rodOperation
, rodRegion
) where
import Network.Google.Compute.Types
import Network.Google.Prelude
-- | A resource alias for @compute.regionOperations.delete@ method which the
-- 'RegionOperationsDelete' request conforms to.
type RegionOperationsDeleteResource =
"compute" :>
"v1" :>
"projects" :>
Capture "project" Text :>
"regions" :>
Capture "region" Text :>
"operations" :>
Capture "operation" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] ()
-- | Deletes the specified region-specific Operations resource.
--
-- /See:/ 'regionOperationsDelete' smart constructor.
data RegionOperationsDelete =
RegionOperationsDelete'
{ _rodProject :: !Text
, _rodOperation :: !Text
, _rodRegion :: !Text
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'RegionOperationsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rodProject'
--
-- * 'rodOperation'
--
-- * 'rodRegion'
regionOperationsDelete
:: Text -- ^ 'rodProject'
-> Text -- ^ 'rodOperation'
-> Text -- ^ 'rodRegion'
-> RegionOperationsDelete
regionOperationsDelete pRodProject_ pRodOperation_ pRodRegion_ =
RegionOperationsDelete'
{ _rodProject = pRodProject_
, _rodOperation = pRodOperation_
, _rodRegion = pRodRegion_
}
-- | Project ID for this request.
rodProject :: Lens' RegionOperationsDelete Text
rodProject
= lens _rodProject (\ s a -> s{_rodProject = a})
-- | Name of the Operations resource to delete.
rodOperation :: Lens' RegionOperationsDelete Text
rodOperation
= lens _rodOperation (\ s a -> s{_rodOperation = a})
-- | Name of the region for this request.
rodRegion :: Lens' RegionOperationsDelete Text
rodRegion
= lens _rodRegion (\ s a -> s{_rodRegion = a})
instance GoogleRequest RegionOperationsDelete where
type Rs RegionOperationsDelete = ()
type Scopes RegionOperationsDelete =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/compute"]
requestClient RegionOperationsDelete'{..}
= go _rodProject _rodRegion _rodOperation
(Just AltJSON)
computeService
where go
= buildClient
(Proxy :: Proxy RegionOperationsDeleteResource)
mempty
| brendanhay/gogol | gogol-compute/gen/Network/Google/Resource/Compute/RegionOperations/Delete.hs | mpl-2.0 | 3,758 | 0 | 16 | 858 | 467 | 278 | 189 | 76 | 1 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QRegion.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:34
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QRegion (
RegionType, eEllipse
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CRegionType a = CRegionType a
type RegionType = QEnum(CRegionType Int)
ieRegionType :: Int -> RegionType
ieRegionType x = QEnum (CRegionType x)
instance QEnumC (CRegionType Int) where
qEnum_toInt (QEnum (CRegionType x)) = x
qEnum_fromInt x = QEnum (CRegionType x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> RegionType -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
instance QeRectangle RegionType where
eRectangle
= ieRegionType $ 0
eEllipse :: RegionType
eEllipse
= ieRegionType $ 1
| uduki/hsQt | Qtc/Enums/Gui/QRegion.hs | bsd-2-clause | 2,334 | 0 | 18 | 520 | 589 | 298 | 291 | 51 | 1 |
module Lecture.DT.April15Spec (main, spec) where
import Lecture.DT.April15
import Test.Hspec
import Test.QuickCheck
import Test.QuickCheck.Instances
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "someFunction" $ do
it "should work fine" $ do
property someFunction
someFunction :: Bool -> Bool -> Property
someFunction x y = x === y
| athanclark/dt-haskell-intro | test/Lecture/DT/April15Spec.hs | bsd-3-clause | 367 | 0 | 13 | 70 | 118 | 63 | 55 | 14 | 1 |
{-# OPTIONS_GHC -Wall #-}
module Kitesys where
import Classy
--import Classy.State hiding ( run )
--import Control.Monad.State
--import qualified Data.HashMap.Lazy as HM
carouselSys :: IO System
carouselSys = getSystemT $ do
n <- newtonianBases
rArm <- addParam "rArm"
delta <- addCoord "d"
_ <- derivIsSpeed delta
carouselBases <- rotZ n delta "C"
x <- addCoord "x"
y <- addCoord "y"
z <- addCoord "z"
[x',y',z'] <- mapM derivIsSpeed [x,y,z]
r <- addCoord "r"
jx <- addParam "Jx"
jy <- addParam "Jy"
jz <- addParam "Jz"
wx <- addSpeed "wx"
wy <- addSpeed "wy"
wz <- addSpeed "wz"
m <- addParam "m"
kiteBases <- basesWithAngVel n (wx,wy,wz) "B"
let r'n0'k = relativePoint N0 $ xyzVec ((rArm + x), y, z) carouselBases
tension <- addParam "T"
fx <- addParam "Fx"
fy <- addParam "Fy"
fz <- addParam "Fz"
mx <- addParam "Tx"
my <- addParam "Ty"
mz <- addParam "Tz"
-- carousel arm rigid body
cJxx <- addParam "cJxx"
cMass <- addParam "cm"
let armCm = relativePoint N0 $ xVec (rArm/2) carouselBases
_ <- addRigidBody cMass (simpleDyadic cJxx 0 0 carouselBases) armCm carouselBases
-- kite rigid body
kite <- addRigidBody m (simpleDyadic jx jy jz kiteBases) r'n0'k kiteBases
-- external forces/torques
addForce kite r'n0'k (xyzVec (fx,fy,fz) kiteBases)
addMoment kite (xyzVec (mx,my,mz) kiteBases)
-- constraint force
addForce kite r'n0'k (xyzVec (-tension*x/r, -tension*y/r, -tension*z/r) carouselBases)
liftIO $ print $ generalizedEffectiveForce x' kite
liftIO $ print $ generalizedEffectiveForce y' kite
liftIO $ print $ generalizedEffectiveForce z' kite
carousel :: IO ()
carousel = do
sys <- carouselSys
-- putStrLn "\n--------------- sys: ------------------"
-- print $ sys
putStrLn "\n--------------- kane's eqs: ------------------"
print $ kanes sys
crosswindSys :: IO System
crosswindSys = getSystemT $ do
n <- newtonianBases
x <- addCoord "x"
y <- addCoord "y"
z <- addCoord "z"
[x',y',z'] <- mapM derivIsSpeed [x,y,z]
r <- addCoord "r"
jx <- addParam "Jx"
jy <- addParam "Jy"
jz <- addParam "Jz"
wx <- addSpeed "wx"
wy <- addSpeed "wy"
wz <- addSpeed "wz"
m <- addParam "m"
kiteBases <- basesWithAngVel n (wx,wy,wz) "B"
let r'n0'k = relativePoint N0 $ xyzVec (x, y, z) n
tension <- addParam "T"
fx <- addParam "Fx"
fy <- addParam "Fy"
fz <- addParam "Fz"
mx <- addParam "Mx"
my <- addParam "My"
mz <- addParam "Mz"
-- kite rigid body
kite <- addRigidBody m (simpleDyadic jx jy jz kiteBases) r'n0'k kiteBases
-- external forces/torques
addForce kite r'n0'k (xyzVec (fx,fy,fz) kiteBases)
addMoment kite (xyzVec (mx,my,mz) kiteBases)
-- constraint force
addForce kite r'n0'k (xyzVec (-tension*x/r, -tension*y/r, -tension*z/r) n)
liftIO $ print $ generalizedEffectiveForce x' kite
liftIO $ print $ generalizedEffectiveForce y' kite
liftIO $ print $ generalizedEffectiveForce z' kite
crosswind :: IO ()
crosswind = do
sys <- crosswindSys
-- putStrLn "\n--------------- sys: ------------------"
-- print $ sys
putStrLn "\n--------------- kane's eqs: ------------------"
let keq = kanes sys
print keq
| ghorn/classy-dvda | src/Kitesys.hs | bsd-3-clause | 3,195 | 0 | 15 | 664 | 1,177 | 549 | 628 | 84 | 1 |
--*- haskell -*-
data Maybe a = Nothing | Just a
data Ordering = LT | EQ | GT
type ShowS = String -> String
class Show a where
showsPrec :: Int -> a -> ShowS
show :: a -> String
showsPrec _ x s = show x ++ s
show x = showsPrec 0 x ""
instance Show Int where
showsPrec p n = if n < 0
then showChar '-' . shows (0-n)
else let d = chr (ord '0' + mod n 10)
m = div n 10
in if m == 0
then showChar d
else showsPrec p m . showChar d
instance Eq a => Eq [a] where
(==) [] [] = True
(==) [] _ = False
(==) _ [] = False
(==) (x:xs) (y:ys) = x == y && xs == ys
(/=) xs ys = not (xs == ys)
shows :: (Show a) => a -> ShowS
shows = showsPrec 0
showChar :: Char -> ShowS
showChar = (:)
showString :: String -> ShowS
showString = (++)
($) :: (a -> b) -> a -> b
($) f x = f x
fst :: (a, b) -> a
fst (a, b) = a
snd :: (a, b) -> b
snd (a, b) = b
map :: (a -> b) -> [a] -> [b]
map f (x:xs) = f x : map f xs
map f [] = []
filter :: (a -> Bool) -> [a] -> [a]
filter p [] = []
filter p (x:xs) = if p x
then x : filter p xs
else filter p xs
null :: [a] -> Bool
null [] = True
null (_:_) = False
head :: [a] -> a
head [] = error "head []"
head (x:xs) = x
tail :: [a] -> [a]
tail [] = error "tail []"
tail (x:xs) = xs
last :: [a] -> a
last [x] = x
last (_:xs) = last xs
last [] = error "Prelude.last: empty list"
init :: [a] -> [a]
init [x] = []
init (x:xs) = x : init xs
init [] = error "Prelude.init: empty list"
length :: [a] -> Int
length [] = 0
length (_:l) = 1 + length l
reverse :: [a] -> [a]
reverse = foldl (\x y -> y : x) []
concat :: [[a]] -> [a]
concat xss = foldr (++) [] xss
concatMap :: (a -> [b]) -> [a] -> [b]
concatMap f = concat . map f
drop :: Int -> [a] -> [a]
drop 0 xs = xs
drop _ [] = []
drop n (_:xs) = drop (n-1) xs
isDigit, isUpper, isLower :: Char -> Bool
isDigit c = let { o = ord c } in o >= ord '0' && o <= ord '9'
isUpper c = let { o = ord c } in o >= ord 'A' && o <= ord 'Z'
isLower c = let { o = ord c } in o >= ord 'a' && o <= ord 'z'
and, or :: [Bool] -> Bool
and = foldr (&&) True
or = foldr (||) False
foldl :: (a -> b -> a) -> a -> [b] -> a
foldl f z [] = z
foldl f z (x:xs) = foldl f (f z x) xs
foldr :: (a -> b -> b) -> b -> [a] -> b
foldr f z xs = if null xs
then z
else f (head xs) (foldr f z (tail xs))
elem :: Eq a => a -> [a] -> Bool
elem e [] = False
elem e (x:xs) = e == x || elem e xs
lookup :: Eq a => a -> [(a, b)] -> Maybe b
lookup x ((key, val) : ys) = if x == key then Just val else lookup x ys
lookup x [] = Nothing
compare :: Int -> Int -> Ordering
compare x y = if x == y then EQ else if x <= y then LT else GT
repeat :: a -> [a]
repeat x = let { xs = x:xs } in xs
not :: Bool -> Bool
not True = False
not False = True
zipWith :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWith z (a:as) (b:bs) = z a b : zipWith z as bs
zipWith _ _ _ = []
span, break :: (a -> Bool) -> [a] -> ([a],[a])
span p [] = ([],[])
span p xs@(x:xs') = if p x
then case span p xs' of
(ys, zs) -> (x:ys,zs)
else ([], xs)
break p = span (not . p)
le :: String -> String -> Bool
le [] _ = True
le _ [] = False
le (x:xs) (y:ys) = case compare (ord x) (ord y) of
LT -> True
EQ -> le xs ys
GT -> False
lines :: String -> [String]
lines [] = []
lines s = case break ((==) '\n') s of
(l, s') -> l : case s' of
[] -> []
(_:s'') -> lines s''
unlines :: [String] -> String
unlines = concatMap (\s -> s ++ "\n")
listToMaybe :: [a] -> Maybe a
listToMaybe [] = Nothing
listToMaybe (a:_) = Just a
find :: (a -> Bool) -> [a] -> Maybe a
find p = listToMaybe . filter p
otherwise :: Bool
otherwise = True
data Stream = Stream Char Stream
eof = chr 256
fromStream :: Stream -> String
fromStream (Stream c cs) = if 256 <= ord c then [] else c : fromStream cs
toStream :: String -> Stream
toStream [] = Stream '\n' $ Stream eof (toStream [])
toStream (c:cs) = Stream c (toStream cs)
putStr :: String -> Stream -> Stream
putStr s _ = toStream s
interact :: (String -> String) -> Stream -> Stream
interact f = toStream . f . fromStream
-- Mock IO operations
(>>) :: a -> b -> b
(>>) x y = y
hSetBuffering x y = x
stdout = 1
data Buffering = NoBuffering
| irori/hs2lazy | examples/hs2lazy-prelude.hs | bsd-3-clause | 4,571 | 19 | 14 | 1,548 | 2,631 | 1,360 | 1,271 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.ByteString hiding (map)
import qualified System.Network.ZMQ.MDP.Worker as W
import Data.Foldable
import Control.Concurrent.Thread.Group as TG
import System.Posix.Signals
import qualified Control.Concurrent as CC
import qualified Data.ByteString.Char8 as BS
threaded :: [IO ()] -> IO ()
threaded actions = do
tg <- TG.new
tids <- mapM (TG.forkIO tg) actions
_ <- installHandler sigINT (CatchOnce $ do
Prelude.putStrLn "worker caught an interrupt"
forM_ tids ((\x -> print x >> CC.killThread x) . fst)
) Nothing
Prelude.putStrLn "waiting..."
TG.wait tg
Prelude.putStrLn "all dead"
main :: IO ()
main = threaded $ flip map [1..4] $ \tid ->
W.withWorker "tcp://127.0.0.1:5773" "echo"
(\msgs -> return $ "hi there, ":msgs)
| mwotton/majordomo | src/echoworker.hs | bsd-3-clause | 872 | 0 | 20 | 199 | 274 | 147 | 127 | 24 | 1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module YX.Type.BuildTool
where
import Prelude (Bounded, Enum)
import Control.Applicative (empty, pure)
import Data.Eq (Eq)
import Data.Function (($), (.))
import Data.Maybe (Maybe(Just, Nothing), maybe)
import Data.Ord (Ord)
import Data.String (IsString)
import GHC.Generics (Generic)
import Text.Read (Read)
import Text.Show (Show)
import Data.Aeson (FromJSON(parseJSON), ToJSON(toJSON))
import qualified Data.Aeson as Aeson (withText)
import qualified Data.CaseInsensitive as CI (FoldCase, mk)
import Data.Specifiable (Specifiable)
import Data.Text (Text)
type SomeBuildTool = Specifiable Text BuildTool
data BuildTool
= Cabal
| Stack
deriving (Bounded, Enum, Eq, Generic, Ord, Read, Show)
-- | Convert 'BuildTool' in to a generic string.
toString :: IsString s => BuildTool -> s
toString = \case
Cabal -> "Cabal"
Stack -> "Stack"
{-# INLINEABLE toString #-}
-- | Convert a generic string in to a 'BuildTool' in a case insensitive maner.
-- 'Nothing' is returned when string value is not recognized.
fromString :: (Eq s, IsString s, CI.FoldCase s) => s -> Maybe BuildTool
fromString s = case CI.mk s of
"cabal" -> Just Cabal
"stack" -> Just Stack
_ -> Nothing
{-# INLINEABLE fromString #-}
toText :: BuildTool -> Text
toText = toString
{-# INLINE toText #-}
fromText :: Text -> Maybe BuildTool
fromText = fromString
{-# INLINE fromText #-}
instance FromJSON BuildTool where
parseJSON = Aeson.withText "BuildTool" $ maybe empty pure . fromText
instance ToJSON BuildTool where
toJSON = toJSON . toText
| trskop/yx | src/YX/Type/BuildTool.hs | bsd-3-clause | 1,683 | 0 | 9 | 287 | 450 | 262 | 188 | 44 | 3 |
module Tct.Its.Processor.Sizebounds
(
sizebounds
, sizeboundsDeclaration
) where
--import qualified Data.Graph.Inductive.Dot as Gr
import Data.Maybe (fromMaybe)
import qualified Tct.Core.Common.Pretty as PP
import qualified Tct.Core.Common.Xml as Xml
import Tct.Core (withProblem, (.>>>))
import qualified Tct.Core.Data as T
import Tct.Common.ProofCombinators
import Tct.Its.Data.Problem
import Tct.Its.Data.LocalSizebounds (LocalSizebounds)
import qualified Tct.Its.Data.LocalSizebounds as LB (compute)
import Tct.Its.Data.ResultVariableGraph (RVGraph)
import qualified Tct.Its.Data.ResultVariableGraph as RVG (compute)
import Tct.Its.Data.Rule
import Tct.Its.Data.Sizebounds (Sizebounds)
import qualified Tct.Its.Data.Sizebounds as SB (initialise, updateSizebounds)
-- | Computes local sizebounds; Initialises global sizebounds.
localSizebound :: ItsStrategy
localSizebound = T.Apply LocalSizeboundsProc
-- | Sets localSizebounds, rvgraph, sizebounds if not already defined.
initialiseSizebounds :: Its -> T.TctM Its
initialiseSizebounds prob = case localSizebounds_ prob of
Just _ -> return prob
Nothing -> newprob
where
newprob = do
lbounds <- LB.compute (domain prob) (irules_ prob)
let
rvgraph = RVG.compute (tgraph_ prob) lbounds
sbounds = SB.initialise lbounds
-- liftIO $ writeFile "/tmp/rvgraph.dot" $ maybe "Gr" (Gr.showDot . Gr.fglToDot) (rvgraph_ prob)
return $ prob {localSizebounds_ = Just lbounds, rvgraph_ = Just rvgraph, sizebounds_ = Just sbounds}
data LocalSizeboundsProcessor = LocalSizeboundsProc deriving Show
data LocalSizeboundsProof
= LocalSizeboundsProof (Vars, LocalSizebounds) RVGraph
| LocalSizeboundsFail
deriving Show
instance PP.Pretty LocalSizeboundsProof where
pretty (LocalSizeboundsProof vlbounds _) =
PP.text "LocalSizebounds generated; rvgraph"
PP.<$$> PP.indent 2 (PP.pretty vlbounds)
pretty LocalSizeboundsFail = PP.text "LocalSizebounds: no progress."
instance Xml.Xml LocalSizeboundsProof where
toXml _ = Xml.elt "localsizebounds" []
instance T.Processor LocalSizeboundsProcessor where
type ProofObject LocalSizeboundsProcessor = ApplicationProof LocalSizeboundsProof
type In LocalSizeboundsProcessor = Its
type Out LocalSizeboundsProcessor = Its
type Forking LocalSizeboundsProcessor = T.Optional T.Id
execute LocalSizeboundsProc prob | isClosed prob = closedProof prob
execute LocalSizeboundsProc prob = do
nprob <- initialiseSizebounds prob
let pproof = LocalSizeboundsProof (domain prob, error "proc sizeb" `fromMaybe` localSizebounds_ nprob) (error "proc rv" `fromMaybe` rvgraph_ nprob)
if localSizebounds_ prob /= localSizebounds_ nprob
then progress (Progress nprob) (Applicable pproof)
else progress NoProgress (Applicable LocalSizeboundsFail)
data SizeboundsProcessor = SizeboundsProc deriving Show
data SizeboundsProof
= SizeboundsProof (Vars, Sizebounds)
| SizeboundsFail
deriving Show
instance PP.Pretty SizeboundsProof where
pretty (SizeboundsProof vsbounds) =
PP.text "Sizebounds computed:"
PP.<$$> PP.indent 2 (PP.pretty vsbounds)
pretty SizeboundsFail = PP.text "Sizebounds: no progress."
instance Xml.Xml SizeboundsProof where
toXml _ = Xml.elt "sizebounds" []
instance T.Processor SizeboundsProcessor where
type ProofObject SizeboundsProcessor = ApplicationProof SizeboundsProof
type In SizeboundsProcessor = Its
type Out SizeboundsProcessor = Its
type Forking SizeboundsProcessor = T.Optional T.Id
execute SizeboundsProc prob | isClosed prob = closedProof prob
execute SizeboundsProc prob =
if sizebounds_ prob /= sizebounds_ nprob
then progress (Progress nprob) (Applicable pproof)
else progress NoProgress (Applicable SizeboundsFail)
where
nprob = updateSizebounds prob
pproof = SizeboundsProof (domain prob, error "sizebound" `fromMaybe` sizebounds_ nprob)
updateSizebounds :: Its -> Its
updateSizebounds prob = prob {sizebounds_ = Just sbounds'} where
sbounds' = SB.updateSizebounds
(tgraph_ prob)
(error "update rvgraph" `fromMaybe` rvgraph_ prob)
(timebounds_ prob)
(error "update sizebounds" `fromMaybe` sizebounds_ prob)
(error "update localsizebounds" `fromMaybe` localSizebounds_ prob)
-- | Updates sizebounds.
sizebounds :: ItsStrategy
sizebounds = withProblem $
\prob -> if sizeIsDefined prob then sb else localSizebound .>>> sb
where sb = T.Apply SizeboundsProc
sizeboundsDeclaration :: T.Declaration ('[] T.:-> ItsStrategy)
sizeboundsDeclaration = T.declare "sizebounds" [desc] () sizebounds
where desc = "Computes global sizebounds using timebounds."
| ComputationWithBoundedResources/tct-its | src/Tct/Its/Processor/Sizebounds.hs | bsd-3-clause | 4,865 | 0 | 14 | 940 | 1,160 | 621 | 539 | -1 | -1 |
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.IBM.RasterposClip
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.IBM.RasterposClip (
-- * Extension Support
glGetIBMRasterposClip,
gl_IBM_rasterpos_clip,
-- * Enums
pattern GL_RASTER_POSITION_UNCLIPPED_IBM
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| haskell-opengl/OpenGLRaw | src/Graphics/GL/IBM/RasterposClip.hs | bsd-3-clause | 663 | 0 | 5 | 91 | 47 | 36 | 11 | 7 | 0 |
{-# LANGUAGE OverloadedStrings #-}
-- -----------------------------------------------------------------------------
-- |
-- Module : Ircfs.Filesystem
-- Copyright : (c) Andreas-Christoph Bernstein 2011
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : andreas.bernstein@googlemail.com
-- Stability : unstable
-- Portability : not portable
--
-- Ircfs filesystem
--
--------------------------------------------------------------------------------
module Ircfs.Filesystem
(
readF
, showFilepath
, parsePath
, fileStat
, stat
, rootDirFiles
, subDirFiles
, readDir
, readDir'
, write
, append
, substitute
, touch
, insertChannel
, removeChannel
, rm
, event
) where
import Prelude hiding ((.), id, read)
import qualified Prelude as P
import Control.Arrow
import Control.Applicative
import Control.Category
import Control.Monad.State (modify)
import qualified Data.Lens.Common as L
import Data.Monoid
import Data.Char (isNumber)
import Data.Maybe (maybeToList, fromMaybe)
import qualified Data.ByteString.Char8 as B
import qualified Data.IntMap as IM
import qualified Data.Map as M
import Foreign.C.Types (CTime)
import qualified System.Fuse as F
--import qualified System.Fuse.Request as F
import qualified System.Posix.Types as S
import System.FilePath
import Ircfs.Types
import Ircfs.Misc
import Ircfs.Inode
import qualified Network.IRC.Message as I
-- file to Qreq
fileToQreq :: Int -> String -> Maybe Qreq
fileToQreq _ "event" = Just Qevent
fileToQreq _ "nick" = Just Qnick
fileToQreq _ "raw" = Just Qraw
fileToQreq _ "pong" = Just Qpong
fileToQreq n "ctl" = Just (Qctl n)
fileToQreq n "data" = Just (Qdata n)
fileToQreq n "name" = Just (Qname n)
fileToQreq n "users" = Just (Qusers n)
fileToQreq _ _ = Nothing
parsePath :: FilePath -> Maybe Qreq
parsePath p
| p == "" = Nothing
| p == "/" = Just Qroot
| p == "/ctl" = Just Qrootctl
| p == "/event" = Just Qevent
| p == "/raw" = Just Qraw
| p == "/nick" = Just Qnick
| p == "/pong" = Just Qpong
| p == "/0" = Just (Qdir 0)
| 2 == length (splitDirectories p) =
let ds = splitDirectories p
ok = all isNumber x
x = last ds
in if ok then (Just . Qdir . P.read) x else Nothing
| 3 == length (splitDirectories p) =
let ds = splitDirectories p
ok = all isNumber x
x = head (tail ds)
in if ok then fileToQreq (P.read x) (last ds) else Nothing
| otherwise = Nothing
rootDirFiles :: [Qreq]
rootDirFiles = [Qrootctl, Qevent, Qnick, Qraw, Qpong]
subDirFiles :: [Qreq]
subDirFiles = [Qctl 0, Qdata 0, Qname 0, Qusers 0]
{-
files :: [Qreq]
files = [ Qroot, Qrootctl, Qevent, Qraw, Qnick, Qpong, Qdir 0, Qctl 0, Qname 0
, Qusers 0, Qdata 0
]
-}
-- XXX change to full FilePath ???
showFile :: Qreq -> B.ByteString
showFile Qroot = "/"
showFile Qrootctl = "ctl"
showFile Qevent = "event"
showFile Qraw = "raw"
showFile Qnick = "nick"
showFile Qpong = "pong"
showFile (Qdir _) = ""
showFile (Qctl _) = "ctl"
showFile (Qname _) = "name"
showFile (Qusers _) = "users"
showFile (Qdata _) = "data"
-- XXX
showFilepath :: Qreq -> FilePath
showFilepath = B.unpack . showFile
filemode :: Qreq -> S.FileMode
filemode Qroot = 0o555 -- DIR
filemode Qrootctl = 0o222
filemode Qevent = 0o444
filemode Qraw = 0o666
filemode Qnick = 0o444
filemode Qpong = 0o444
filemode Qdir {} = 0o555 -- DIR
filemode Qctl {} = 0o222
filemode Qname {} = 0o444
filemode Qusers{} = 0o444
filemode Qdata {} = 0o666
fileStat :: Qreq -> F.FileStat
fileStat Qroot = defDirStat { F.statFileMode = filemode Qroot }
fileStat Qdir {} = defDirStat { F.statFileMode = filemode Qroot }
fileStat Qctl {} = defFileStat { F.statFileMode = filemode Qrootctl }
fileStat q = defFileStat { F.statFileMode = filemode q }
{-
stat :: Fs -> FilePath -> Maybe F.FileStat
stat st p = maybePlus1 <$> m <*> x
where m = parsePath p
x = stat' st =<< m
maybePlus1 _ s = s
stat' :: Fs -> Qreq -> Maybe F.FileStat
stat' f Qroot = Just $ defDirStat
{ F.statFileMode = filemode Qroot
, F.statFileOwner = fromIntegral $ userID f
, F.statFileGroup = fromIntegral $ groupID f
}
stat' f Qdir {} = Just $ defDirStat
{ F.statFileMode = filemode Qroot
, F.statFileOwner = fromIntegral $ userID f
, F.statFileGroup = fromIntegral $ groupID f
}
stat' f Qctl {} = Just $ F.defaultFileStat
{ F.statFileMode = filemode Qrootctl
, F.statFileOwner = fromIntegral $ userID f
, F.statFileGroup = fromIntegral $ groupID f
}
stat' f q =
let mn = fromIntegral . B.length <$> read' f q
s = F.defaultFileStat { F.statFileMode = filemode q
, F.statFileOwner = fromIntegral $ userID f
, F.statFileGroup = fromIntegral $ groupID f
}
in (\n -> L.setL statFileSizeL n s) <$> mn
statFileSizeL :: L.Lens F.FileStat S.FileOffset
statFileSizeL = L.lens F.statFileSize (\x s -> s { F.statFileSize = x })
-}
readF :: Fs -> FilePath -> S.ByteCount -> S.FileOffset -> Maybe B.ByteString
readF s p bc off = cut <$> (read' s =<< parsePath p)
where cut = B.take (fromIntegral bc) . B.drop (fromIntegral off)
read' :: Fs -> Qreq -> Maybe B.ByteString
read' _ Qroot = Nothing
read' con Qrootctl = L.getL (dataL Qrootctl) con
read' con Qevent = L.getL (dataL Qevent) con
read' con Qpong = L.getL (dataL Qpong) con
read' con Qraw = L.getL (dataL Qraw) con
read' con Qnick = L.getL (dataL Qnick) con
read' con (Qname k) = L.getL (dataL (Qname k)) con
read' con (Qusers k) = L.getL (dataL (Qusers k)) con
read' con (Qdata k) = L.getL (dataL (Qdata k)) con
read' _ Qctl {} = Just mempty
read' _ Qdir {} = Nothing
read' _ _ = Nothing
readDir' :: Fs -> Qreq -> [(FilePath, F.FileStat)]
readDir' st Qroot =
let ks = IM.keys (targets st)
rootDir = map (showFilepath &&& fileStat) rootDirFiles
subDirs = map (\x -> (show x,defDirStat)) ks
in [(".", defDirStat), ("..", defDirStat)]
++ rootDir ++ subDirs
readDir' _ Qdir {} =
let subDir = map (showFilepath &&& fileStat) subDirFiles
in [(".", defDirStat), ("..",defDirStat)] ++ subDir
readDir' _ _ = []
readDir :: Fs -> FilePath -> [(FilePath, F.FileStat)]
readDir st p = maybe [] (readDir' st) (parsePath p)
append :: Qreq -> B.ByteString -> Endomorphism Fs
append p s = L.modL (dataL p) (`mappend` Just s)
substitute :: Qreq -> B.ByteString -> Endomorphism Fs
substitute p s = L.setL (dataL p) (Just s)
touch :: Qreq -> CTime -> Endomorphism Fs
touch p t = L.modL (inodeL p) (fmap (setTimes t))
type Timestamp = B.ByteString
write :: Fs -> Timestamp -> B.ByteString -> Qreq -> (Fs, [I.Message])
write st _ _ Qrootctl = (st, mempty)
write st t xs Qevent = (append Qevent xs st,[])
write st t xs Qnick = (substitute Qnick xs st,[])
write st t xs Qpong = (append Qpong xs st,[])
write st stamp xs p@(Qdata k) =
let targets = maybeToList . L.getL (dataL (Qname k)) $ st
line = mconcat [stamp, " < ",me,"> ", xs]
me = fromMaybe mempty (L.getL nickLens st) -- nick st
n = fromIntegral (B.length line)
msg = privmsg targets xs
in (append p line st, [msg])
write st _ _ _ = (st, mempty)
privmsg :: [B.ByteString] -> B.ByteString -> I.Message
privmsg targets x = I.Message Nothing I.PRIVMSG (I.Params targets (Just x))
insertChannel :: B.ByteString -> CTime -> Endomorphism Fs
insertChannel name time st =
let
k = minfree (IM.keys (targets st))
target = Target k TChannel
s = B.pack $ "new " ++ show k ++ " "
emptyNode = setTimes time . chmod 0o440
$ mkInode (defaultFileStat st)
rwNode = chmod 0o660 emptyNode
wNode = chmod 0o220 emptyNode
nameNode = L.setL iDataL name emptyNode
dirNode = setTimes time $ mkInode (defaultDirStat st)
insert =
L.setL (targetLens k) (Just target)
. L.setL (targetMapLens' name) (Just k)
. L.setL (inodeL (Qname k)) (Just nameNode)
. L.setL (inodeL (Qusers k)) (Just emptyNode)
. L.setL (inodeL (Qdata k)) (Just rwNode)
. L.setL (inodeL (Qctl k)) (Just wNode)
. L.setL (inodeL (Qdir k)) (Just dirNode)
. append Qevent (s `mappend` name `mappend` "\n")
in if M.member name (targetMap st) then st else insert st
removeChannel :: B.ByteString -> CTime -> Endomorphism Fs
removeChannel name time st =
let
str k = B.pack $ "del " ++ show k ++ " "
text k = (mconcat [str k,name,"\n"])
del k = rmdir' (Qdir k)
. L.setL (targetMapLens' name) Nothing
. append Qevent (text k)
in maybe st (`del` st) (L.getL (targetMapLens' name) st)
-- writeF :: FilePath -> S.ByteCount -> B.ByteString -> Ircfs [I.Message]
-- (B.ByteString -> Maybe a, a -> B.ByteString)
rm :: Qreq -> Endomorphism Fs
rm q = L.setL (inodeL q) Nothing
rmdir :: Qreq -> Endomorphism Fs
rmdir (Qdir k) = L.setL (targetLens k) Nothing . rm (Qdir k)
rmdir _ = id
-- works like rm -rf
rmdir' :: Qreq -> Endomorphism Fs
rmdir' (Qdir k) = rmdir (Qdir k) . rm (Qname k) . rm (Qusers k)
. rm (Qdata k) . rm (Qctl k)
rmdir' _ = id
stat :: Fs -> Qreq -> Maybe F.FileStat
--stat st RootCtl = Just $ defaultFileStat st
stat st p = statFromInode <$> M.lookup p (inodes st)
statFromInode (Inode st d) =
if F.statEntryType st `eqEntryType` F.Directory then st
else st { F.statFileSize = fromIntegral (B.length d) }
defDirStat :: F.FileStat
defDirStat = defFileStat
{ F.statEntryType = F.Directory
, F.statFileMode = 0o555
, F.statLinkCount = 2
, F.statFileSize = 4096
}
defFileStat :: F.FileStat
defFileStat = F.FileStat
{ F.statEntryType = F.RegularFile
, F.statFileMode = 0o222
, F.statLinkCount = 1
, F.statFileOwner = 0
, F.statFileGroup = 0
, F.statSpecialDeviceID = 0
, F.statFileSize = 0
, F.statBlocks = 1
, F.statAccessTime = 0
, F.statModificationTime = 0
, F.statStatusChangeTime = 0
}
eqEntryType F.Unknown F.Unknown = True
eqEntryType F.NamedPipe F.NamedPipe = True
eqEntryType F.CharacterSpecial F.CharacterSpecial = True
eqEntryType F.Directory F.Directory = True
eqEntryType F.BlockSpecial F.BlockSpecial = True
eqEntryType F.RegularFile F.RegularFile = True
eqEntryType F.SymbolicLink F.SymbolicLink = True
eqEntryType F.Socket F.Socket = True
eqEntryType _ _ = False
event :: CTime -> B.ByteString -> Endomorphism Fs
event time text = append Qevent text . touch Qevent time
| bernstein/ircfs | Ircfs/Filesystem.hs | bsd-3-clause | 11,312 | 0 | 19 | 3,239 | 3,508 | 1,827 | 1,681 | 233 | 3 |
{-# LANGUAGE OverloadedStrings #-}
module Text.Domain.Parser
( Domain
, domain
)
where
import Prelude hiding (takeWhile)
import Control.Applicative
import Control.Monad
import Data.Char (isAscii, isDigit)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Attoparsec.Text
import qualified Data.Attoparsec.ByteString.Char8 as PB
type Domain = Text
-- | Parses preferred domain names according to RFC 3696.
--
-- Unicode characters are supported as per RFC 5890, but the parser does not
-- implement RFCs 5892 and 5893 correctly, allowing any non-ASCII character.
domain :: Parser Domain
domain = do
r <- label `sepBy1` char '.'
when (all isDigit $ T.unpack $ last r) $ fail "Top-level domain name cannot be all-numeric"
return (T.intercalate "." r)
maybeCons :: Maybe Char -> Text -> Text
maybeCons c t = maybe t (\x -> T.cons x t) c
label :: Parser Text
label = do
chr <- satisfy isAlnum
mid <- many (maybeCons <$> optional (char '-') <*> takeWhile1 isAlnum)
return $ T.concat (T.singleton chr : mid)
isAlpha :: Char -> Bool
isAlpha x = PB.isAlpha_ascii x || not (isAscii x)
isAlnum :: Char -> Bool
isAlnum x = isDigit x || isAlpha x
| abbradar/smtp | src/Text/Domain/Parser.hs | bsd-3-clause | 1,200 | 0 | 14 | 237 | 359 | 190 | 169 | 29 | 1 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE OverlappingInstances #-}
module Math.Metric.Metric where
import Math.Coordinate.Coordinate --(SpaceOf, CoordConversion, AutoConversion, convertCoord)
class Metric metric el result | metric el -> result where
distanceBase :: metric -> el -> el -> result
class MetricCoord metric coord | metric -> coord where
metricCoord :: metric -> coord
--distance :: (MetricCoord metric coord
-- , CoordConversion AutoConversion coord a t
-- , CoordConversion AutoConversion coord b t
-- , space ~ SpaceOf a
-- , space ~ SpaceOf b
-- , Metric metric t) =>
-- metric -> space -> a -> b -> Double
distance metric space a b = distanceBase metric (convertCoord coord space a) (convertCoord coord space b)
where coord = metricCoord metric
| wdanilo/algebraic | src/Math/Metric/Metric.hs | bsd-3-clause | 1,032 | 0 | 9 | 229 | 142 | 83 | 59 | 15 | 1 |
module Module3.Task6 where
groupElems :: Eq a => [a] -> [[a]]
groupElems = iter []
where iter a [] = reverse a
iter [] (x:xs) = iter [[x]] xs
iter ((y:ys):yss) (x:xs)
| x == y = iter ((x:y:ys):yss) xs
| otherwise = iter ([x]:(y:ys):yss) xs
| dstarcev/stepic-haskell | src/Module3/Task6.hs | bsd-3-clause | 282 | 0 | 13 | 88 | 187 | 98 | 89 | 8 | 3 |
{-# LANGUAGE TypeFamilies #-}
----------------------------------------------------------------------
-- |
-- Module : Data.Functor.Representable.Trie.Vector
-- Copyright : (c) Edward Kmett 2011
-- License : BSD3
--
-- Maintainer : ekmett@gmail.com
-- Stability : experimental
--
-- a trie for zeroless-binary vectors
----------------------------------------------------------------------
module Data.Functor.Representable.Trie.Vector (
VectorTrie (..)
) where
import Control.Applicative
import Data.Distributive
import Data.Functor.Representable
import Data.Functor.Bind
import Data.Foldable
import Data.Traversable
import Data.Semigroup
import Data.Semigroup.Foldable
import Data.Semigroup.Traversable
import Data.Key
import Prelude hiding (lookup)
data VectorTrie f n a where
T0 :: a -> VectorTrie D0
T1 :: f (VectorTrie (Product f f) n a) -> VectorTrie f (D1 n a)
T2 :: f (f (VectorTrie (Product f f) n a)) -> VectorTrie f (D2 n a)
type instance Key (VectorTrie n f) = Vector n (Key f)
instance Functor f => Functor (VectorTrie f) where
fmap f (T0 a) = T0 (f a)
-- TODO: fix below here
fmap f (T1 as) = T1 (fmap (fmap f) as)
fmap f (T2 ass) = T1 (fmap (fmap (fmap f)) as)
-- b <$ _ = pure b
instance Apply f => Apply (VectorTrie f) where
T0 a <.> T0 b = T0
T1 as <.> T1 bs = T1 ((<.>) <$> as <.> bs)
T2 ass <.> T2 bss = T1 (liftF2 (<.>) <$> as <.> bs)
a <. _ = a
_ .> b = b
instance Applicative (VectorTrie f D0) where
pure a = T0 a
T0 a <*> T0 b = T0 (a b)
a <* _ = a
_ *> a = a
instance (Applicative f, Applicative (VectorTrie f n)) => Applicative (VectorTrie f (D1 n)) where
pure a = T1 (pure (pure a))
T1 as <*> T1 bs = T ((<*>) <$> as <*> bs)
a <* _ = a
_ *> a = a
instance (Applicative f, Applicative (VectorTrie f n)) => Applicative (VectorTrie f (D1 n)) where
pure a = T2 (pure (pure (pure a))
T2 ass <*> T2 bss = T1 (liftA2 (<*>) <$> as <*> bs)
a <* _ = a
_ *> a = a
{-
instance Representable f => Bind (ListTrie f) where
(>>-) = bindRep
instance Representable f => Monad (ListTrie f) where
return = pure
(>>=) = bindRep
_ >> a = a
-}
instance Keyed f => Keyed (VectorTrie f n) where
mapWithKey f (T0 a) = T0 (f V0 a)
mapWithKey f (T1 as) = T1 (mapWithKey (\x -> mapWihKey (f . V1
ListTrie (f [] a) (mapWithKey (\x -> mapWithKey (f . (x:))) as)
instance Foldable f => Foldable (ListTrie f) where
foldMap f (ListTrie a as) = f a `mappend` foldMap (foldMap f) as
instance Foldable1 f => Foldable1 (ListTrie f) where
foldMap1 f (ListTrie a as) = f a <> foldMap1 (foldMap1 f) as
instance Traversable f => Traversable (ListTrie f) where
traverse f (ListTrie a as) = ListTrie <$> f a <*> traverse (traverse f) as
instance Traversable1 f => Traversable1 (ListTrie f) where
traverse1 f (ListTrie a as) = ListTrie <$> f a <.> traverse1 (traverse1 f) as
instance FoldableWithKey f => FoldableWithKey (ListTrie f) where
foldMapWithKey f (ListTrie a as) = f [] a `mappend` foldMapWithKey (\x -> foldMapWithKey (f . (x:))) as
instance FoldableWithKey1 f => FoldableWithKey1 (ListTrie f) where
foldMapWithKey1 f (ListTrie a as) = f [] a <> foldMapWithKey1 (\x -> foldMapWithKey1 (f . (x:))) as
instance TraversableWithKey f => TraversableWithKey (ListTrie f) where
traverseWithKey f (ListTrie a as) = ListTrie <$> f [] a <*> traverseWithKey (\x -> traverseWithKey (f . (x:))) as
instance TraversableWithKey1 f => TraversableWithKey1 (ListTrie f) where
traverseWithKey1 f (ListTrie a as) = ListTrie <$> f [] a <.> traverseWithKey1 (\x -> traverseWithKey1 (f . (x:))) as
instance Representable f => Distributive (ListTrie f) where
distribute = distributeRep
instance Indexable f => Indexable (ListTrie f) where
index (ListTrie x _) [] = x
index (ListTrie _ xs) (a:as) = index (index xs a) as
instance Adjustable f => Adjustable (ListTrie f) where
adjust f [] (ListTrie x xs) = ListTrie (f x) xs
adjust f (a:as) (ListTrie x xs) = ListTrie x (adjust (adjust f as) a xs)
instance Lookup f => Lookup (ListTrie f) where
lookup [] (ListTrie x _) = Just x
lookup (a:as) (ListTrie _ xs) = lookup a xs >>= lookup as
instance Representable f => Representable (ListTrie f) where
tabulate f = ListTrie (f []) (tabulate (\x -> tabulate (f . (x:))))
| ekmett/representable-tries | src/Data/Functor/Representable/Trie/Vector.hs | bsd-3-clause | 4,284 | 8 | 23 | 884 | 1,829 | 929 | 900 | -1 | -1 |
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.NonEmptyList.Kbf2d1c86eb20 (NonEmptyList(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
data NonEmptyList a = Elem a
| Cons a (Test.ZM.ADT.NonEmptyList.Kbf2d1c86eb20.NonEmptyList a)
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance ( Data.Model.Model a ) => Data.Model.Model ( NonEmptyList a )
| tittoassini/typed | test/Test/ZM/ADT/NonEmptyList/Kbf2d1c86eb20.hs | bsd-3-clause | 527 | 0 | 9 | 80 | 142 | 87 | 55 | 11 | 0 |
{-# LANGUAGE TupleSections #-}
import CoreSyn
import CoreUtils
import Id
import Type
import MkCore
import CallArity (callArityRHS)
import MkId
import SysTools
import DynFlags
import ErrUtils
import Outputable
import TysWiredIn
import Literal
import GHC
import Control.Monad
import Control.Monad.IO.Class
import System.Environment( getArgs )
import VarSet
import PprCore
import Unique
import CoreLint
import FastString
-- Build IDs. use mkTemplateLocal, more predictable than proper uniques
go, go2, x, d, n, y, z, scrut :: Id
[go, go2, x,d, n, y, z, scrut, f] = mkTestIds
(words "go go2 x d n y z scrut f")
[ mkFunTys [intTy, intTy] intTy
, mkFunTys [intTy, intTy] intTy
, intTy
, mkFunTys [intTy] intTy
, mkFunTys [intTy] intTy
, intTy
, intTy
, boolTy
, mkFunTys [intTy, intTy] intTy -- protoypical external function
]
exprs :: [(String, CoreExpr)]
exprs =
[ ("go2",) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
go `mkLApps` [0, 0]
, ("nested_go2",) $
mkRFun go [x]
(mkLet n (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)) $
mkACase (Var n) $
mkFun go2 [y]
(mkLet d
(mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y) ) $
mkLams [z] $ Var d `mkVarApps` [x] )$
Var go2 `mkApps` [mkLit 1] ) $
go `mkLApps` [0, 0]
, ("d0 (go 2 would be bad)",) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $
mkLams [z] $ Var f `mkApps` [ Var d `mkVarApps` [x], Var d `mkVarApps` [x] ]) $
go `mkLApps` [0, 0]
, ("go2 (in case crut)",) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
Case (go `mkLApps` [0, 0]) z intTy
[(DEFAULT, [], Var f `mkVarApps` [z,z])]
, ("go2 (in function call)",) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
f `mkLApps` [0] `mkApps` [go `mkLApps` [0, 0]]
, ("go2 (using surrounding interesting let)",) $
mkLet n (f `mkLApps` [0]) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
Var f `mkApps` [n `mkLApps` [0], go `mkLApps` [0, 0]]
, ("go2 (using surrounding boring let)",) $
mkLet z (mkLit 0) $
mkRFun go [x]
(mkLet d (mkACase (Var go `mkVarApps` [x])
(mkLams [y] $ Var y)
) $ mkLams [z] $ Var d `mkVarApps` [x]) $
Var f `mkApps` [Var z, go `mkLApps` [0, 0]]
, ("two calls, one from let and from body (d 1 would be bad)",) $
mkLet d (mkACase (mkLams [y] $ mkLit 0) (mkLams [y] $ mkLit 0)) $
mkFun go [x,y] (mkVarApps (Var d) [x]) $
mkApps (Var d) [mkLApps go [1,2]]
, ("a thunk in a recursion (d 1 would be bad)",) $
mkRLet n (mkACase (mkLams [y] $ mkLit 0) (Var n)) $
mkRLet d (mkACase (mkLams [y] $ mkLit 0) (Var d)) $
Var n `mkApps` [d `mkLApps` [0]]
, ("two thunks, one called multiple times (both arity 1 would be bad!)",) $
mkLet n (mkACase (mkLams [y] $ mkLit 0) (f `mkLApps` [0])) $
mkLet d (mkACase (mkLams [y] $ mkLit 0) (f `mkLApps` [0])) $
Var n `mkApps` [Var d `mkApps` [Var d `mkApps` [mkLit 0]]]
, ("two functions, not thunks",) $
mkLet go (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var f `mkVarApps` [x]))) $
mkLet go2 (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var f `mkVarApps` [x]))) $
Var go `mkApps` [go2 `mkLApps` [0,1], mkLit 0]
, ("a thunk, called multiple times via a forking recursion (d 1 would be bad!)",) $
mkLet d (mkACase (mkLams [y] $ mkLit 0) (f `mkLApps` [0])) $
mkRLet go2 (mkLams [x] (mkACase (Var go2 `mkApps` [Var go2 `mkApps` [mkLit 0, mkLit 0]]) (Var d))) $
go2 `mkLApps` [0,1]
, ("a function, one called multiple times via a forking recursion",) $
mkLet go (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var f `mkVarApps` [x]))) $
mkRLet go2 (mkLams [x] (mkACase (Var go2 `mkApps` [Var go2 `mkApps` [mkLit 0, mkLit 0]]) (go `mkLApps` [0]))) $
go2 `mkLApps` [0,1]
, ("two functions (recursive)",) $
mkRLet go (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go `mkVarApps` [x]))) $
mkRLet go2 (mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go2 `mkVarApps` [x]))) $
Var go `mkApps` [go2 `mkLApps` [0,1], mkLit 0]
, ("mutual recursion (thunks), called mutiple times (both arity 1 would be bad!)",) $
Let (Rec [ (n, mkACase (mkLams [y] $ mkLit 0) (Var d))
, (d, mkACase (mkLams [y] $ mkLit 0) (Var n))]) $
Var n `mkApps` [Var d `mkApps` [Var d `mkApps` [mkLit 0]]]
, ("mutual recursion (functions), but no thunks",) $
Let (Rec [ (go, mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go2 `mkVarApps` [x])))
, (go2, mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go `mkVarApps` [x])))]) $
Var go `mkApps` [go2 `mkLApps` [0,1], mkLit 0]
, ("mutual recursion (functions), one boring (d 1 would be bad)",) $
mkLet d (f `mkLApps` [0]) $
Let (Rec [ (go, mkLams [x, y] (Var d `mkApps` [go2 `mkLApps` [1,2]]))
, (go2, mkLams [x] (mkACase (mkLams [y] $ mkLit 0) (Var go `mkVarApps` [x])))]) $
Var d `mkApps` [go2 `mkLApps` [0,1]]
, ("a thunk (non-function-type), called twice, still calls once",) $
mkLet d (f `mkLApps` [0]) $
mkLet x (d `mkLApps` [1]) $
Var f `mkVarApps` [x, x]
, ("a thunk (function type), called multiple times, still calls once",) $
mkLet d (f `mkLApps` [0]) $
mkLet n (Var f `mkApps` [d `mkLApps` [1]]) $
mkLams [x] $ Var n `mkVarApps` [x]
, ("a thunk (non-function-type), in mutual recursion, still calls once (d 1 would be good)",) $
mkLet d (f `mkLApps` [0]) $
Let (Rec [ (x, Var d `mkApps` [go `mkLApps` [1,2]])
, (go, mkLams [x] $ mkACase (mkLams [z] $ Var x) (Var go `mkVarApps` [x]) ) ]) $
Var go `mkApps` [mkLit 0, go `mkLApps` [0,1]]
, ("a thunk (function type), in mutual recursion, still calls once (d 1 would be good)",) $
mkLet d (f `mkLApps` [0]) $
Let (Rec [ (n, Var go `mkApps` [d `mkLApps` [1]])
, (go, mkLams [x] $ mkACase (Var n) (Var go `mkApps` [Var n `mkVarApps` [x]]) ) ]) $
Var go `mkApps` [mkLit 0, go `mkLApps` [0,1]]
, ("a thunk (non-function-type) co-calls with the body (d 1 would be bad)",) $
mkLet d (f `mkLApps` [0]) $
mkLet x (d `mkLApps` [1]) $
Var d `mkVarApps` [x]
]
main = do
[libdir] <- getArgs
runGhc (Just libdir) $ do
getSessionDynFlags >>= setSessionDynFlags . flip gopt_set Opt_SuppressUniques
dflags <- getSessionDynFlags
liftIO $ forM_ exprs $ \(n,e) -> do
case lintExpr dflags [f,scrut] e of
Just msg -> putMsg dflags (msg $$ text "in" <+> text n)
Nothing -> return ()
putMsg dflags (text n <> char ':')
-- liftIO $ putMsg dflags (ppr e)
let e' = callArityRHS e
let bndrs = varSetElems (allBoundIds e')
-- liftIO $ putMsg dflags (ppr e')
forM_ bndrs $ \v -> putMsg dflags $ nest 4 $ ppr v <+> ppr (idCallArity v)
-- Utilities
mkLApps :: Id -> [Integer] -> CoreExpr
mkLApps v = mkApps (Var v) . map mkLit
mkACase = mkIfThenElse (Var scrut)
mkTestId :: Int -> String -> Type -> Id
mkTestId i s ty = mkSysLocal (mkFastString s) (mkBuiltinUnique i) ty
mkTestIds :: [String] -> [Type] -> [Id]
mkTestIds ns tys = zipWith3 mkTestId [0..] ns tys
mkLet :: Id -> CoreExpr -> CoreExpr -> CoreExpr
mkLet v rhs body = Let (NonRec v rhs) body
mkRLet :: Id -> CoreExpr -> CoreExpr -> CoreExpr
mkRLet v rhs body = Let (Rec [(v, rhs)]) body
mkFun :: Id -> [Id] -> CoreExpr -> CoreExpr -> CoreExpr
mkFun v xs rhs body = mkLet v (mkLams xs rhs) body
mkRFun :: Id -> [Id] -> CoreExpr -> CoreExpr -> CoreExpr
mkRFun v xs rhs body = mkRLet v (mkLams xs rhs) body
mkLit :: Integer -> CoreExpr
mkLit i = Lit (mkLitInteger i intTy)
-- Collects all let-bound IDs
allBoundIds :: CoreExpr -> VarSet
allBoundIds (Let (NonRec v rhs) body) = allBoundIds rhs `unionVarSet` allBoundIds body `extendVarSet` v
allBoundIds (Let (Rec binds) body) =
allBoundIds body `unionVarSet` unionVarSets
[ allBoundIds rhs `extendVarSet` v | (v, rhs) <- binds ]
allBoundIds (App e1 e2) = allBoundIds e1 `unionVarSet` allBoundIds e2
allBoundIds (Case scrut _ _ alts) =
allBoundIds scrut `unionVarSet` unionVarSets
[ allBoundIds e | (_, _ , e) <- alts ]
allBoundIds (Lam _ e) = allBoundIds e
allBoundIds (Tick _ e) = allBoundIds e
allBoundIds (Cast e _) = allBoundIds e
allBoundIds _ = emptyVarSet
| gcampax/ghc | testsuite/tests/callarity/unittest/CallArity1.hs | bsd-3-clause | 9,340 | 0 | 25 | 2,812 | 4,145 | 2,281 | 1,864 | 195 | 2 |
{-# LANGUAGE TemplateHaskell, MultiParamTypeClasses, FlexibleInstances #-}
module OfficeClimate.TemperatureReport where
import Database.HDBC.Query.TH (defineTableFromDB)
import Database.HDBC.Schema.PostgreSQL (driverPostgreSQL)
import Database.Record.TH (derivingShow)
import OfficeClimate.Connection (connect)
$(defineTableFromDB connect driverPostgreSQL "office_climate" "temperature_report" [derivingShow])
| igrep/office-climate | src/OfficeClimate/TemperatureReport.hs | bsd-3-clause | 414 | 0 | 8 | 31 | 73 | 44 | 29 | 7 | 0 |
{- |
- Module : Types.Internal.Username
- Description : Representation of an IRC username.
- Copyright : (c) Magnus Stavngaard, 2017
- License : BSD-3
- Maintainer : magnus@stavngaard.dk
- Stability : experimental
- Portability : POSIX
-
- An IRC username is a non-empty string of characters not including '\0', '\r',
- '\n', ' ', '@', '%'.
-}
module Types.Internal.Username where
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import Test.QuickCheck.Gen (suchThat)
import Data.Aeson (ToJSON(..), FromJSON(..), withText)
import qualified Data.Aeson.Types as Aeson
import qualified Parsers.Utils as PU
import qualified Text.Parsec as P
import Data.Maybe (isJust)
import qualified Data.Text as T
{- | IRC username. -}
newtype Username = Username String deriving (Show, Read, Eq)
{- | Smart constructor for Username's. -}
username :: String
-- ^ Source of username.
-> Maybe Username
username user = case P.parse (PU.username <* P.eof) "(username source)" user of
Right u -> return $ Username u
Left _ -> Nothing
{- | Get the actual username. -}
getUsername :: Username -> String
getUsername (Username user) = user
{- | Construct arbitrary IRC Username's. -}
instance Arbitrary Username where
arbitrary = Username <$> suchThat arbitrary (isJust . username)
-- TODO: shrink.
{- | Convert Username's to JSON. -}
instance ToJSON Username where
toJSON (Username user) = Aeson.String . T.pack $ user
{- | Parse Username's from JSON. -}
instance FromJSON Username where
parseJSON = withText "user" $ return . Username . T.unpack
| bus000/Dikunt | src/Types/Internal/Username.hs | bsd-3-clause | 1,594 | 0 | 9 | 298 | 316 | 182 | 134 | 23 | 2 |
--
-- Unshipping Docker
--
-- Copyright © 2014 Operational Dynamics Consulting, Pty Ltd and Others
-- Copyright © 2014 Anchor Systems, Pty Ltd and Others
--
-- The code in this file, and the program it is a part of, is
-- made available to you by its authors as open source software:
-- you can redistribute it and/or modify it under the terms of
-- the 3-clause BSD licence.
--
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Concurrent.Async
import Data.Monoid
import Options.Applicative
import System.Log.Logger
import Package (package, version)
import Linux.Program
--
-- Component line option parsing
--
data Options = Options
{ broker :: String
, debug :: Bool
, component :: Component }
data Component =
Status
| Two { raw :: Bool }
(<+>) :: Monoid θ => θ -> θ -> θ
(<+>) = mappend
commandLineParser :: ParserInfo Options
commandLineParser = info (helper <*> optionsParser) fullDesc
optionsParser :: Parser Options
optionsParser = Options <$> parseSocket
<*> parseDebug
<*> parseComponents
where
parseSocket = strOption $
long "socket" <+>
short 's' <+>
metavar "SOCKET" <+>
value "/var/run/docker.sock" <+>
showDefault <+>
help "Location of Docker's remote control API endpoint"
parseDebug = switch $
long "debug" <+>
short 'd' <+>
help "Output lots of debugging information"
parseComponents = subparser
(parseOneComponent <+>
parseTwoComponent)
parseOneComponent =
componentHelper "status" (pure Status) "Get status of currently running containers"
parseTwoComponent =
componentHelper "two" readOptionsParser "Takes two to tango"
componentHelper cmd_name parser desc =
command cmd_name (info (helper <*> parser) (progDesc desc))
readOptionsParser :: Parser Component
readOptionsParser = Two <$> parseRaw
where
parseRaw = switch $
long "raw"
<> short 'r'
<> help "Output values in a raw form (human-readable otherwise)"
--
-- Actual tools
--
--
-- Main program entry point
--
main :: IO ()
main = do
Options{..} <- execParser commandLineParser
let level = if debug
then Debug
else Quiet
quit <- initializeProgram (package ++ "-" ++ version) level
-- Run selected component.
debugM "Main.main" "Running command"
-- Although none of the commands are daemons, we get off of the main thread
-- so that we can block the main thread on the quit semaphore, such that a
-- user interrupt will kill the program.
a <- async $ do
case component of
Status ->
putStrLn "Checking status"
Two _ ->
undefined
_ <- wait a
debugM "Main.main" "End"
| afcowie/detour | src/RuntimeProgram.hs | bsd-3-clause | 2,977 | 0 | 14 | 819 | 527 | 284 | 243 | 68 | 3 |
module BowlingGame.KataSpec (spec) where
import Test.Hspec
import BowlingGame.Kata (roll, score, startGame, Game)
spec :: Spec
spec =
describe "Bowling Game" $ do
it "processes gutter game" $
score (rollMany 20 0 startGame) `shouldBe` 0
it "processes all ones" $
score (rollMany 20 1 startGame) `shouldBe` 20
it "processes one spare" $
score (rollSpare $ roll 3 $ rollMany 17 0 startGame) `shouldBe` 16
it "processes one strike" $
score (rollStrike $ roll 4 $ roll 3 $ rollMany 16 0 startGame) `shouldBe` 24
it "processes perfect game" $
score (rollMany 12 10 startGame) `shouldBe` 300
rollMany :: Int -> Int -> Game -> Game
rollMany 0 _ game = game
rollMany times pin game = rollMany (times - 1) pin (roll pin game)
rollSpare :: Game -> Game
rollSpare = roll 4 . roll 6
rollStrike :: Game -> Game
rollStrike = roll 10
| Alex-Diez/haskell-tdd-kata | BowlingGameKata/BowlingGameDay03/test/BowlingGame/KataSpec.hs | bsd-3-clause | 1,020 | 0 | 15 | 345 | 330 | 167 | 163 | 23 | 1 |
{-# LANGUAGE CPP #-}
module Rumpus.Types where
import Control.Monad.Trans
import Graphics.GL.Pal
import Data.IORef
import Data.Time
versionString :: String
versionString = "0.2.0"
isInReleaseMode :: Bool
#if defined(RUMPUS_RELEASE)
isInReleaseMode = True
#else
isInReleaseMode = False
#endif
isBeingProfiled :: Bool
#if defined(RUMPUS_PROFILE)
isBeingProfiled = True
#else
isBeingProfiled = False
#endif
profileMS' :: (MonadIO m) => String -> Int -> m a -> m a
profileMS' = profileMS
--profileMS' _ _ = id
--profileMS' name _ act = putStrLnIO ("About to run " ++ name ++ "...") >> act
profileFPS' :: (MonadIO m) => String -> Int -> m a -> m a
--profileFPS' = profileFPS
profileFPS' _ _ = id
makeCheckFPS :: MonadIO m => m (m ())
makeCheckFPS = do
fpsRef <- liftIO . newIORef =<< liftIO getCurrentTime
let checkFPS = liftIO $ do
now <- getCurrentTime
before <- readIORef fpsRef
writeIORef fpsRef now
let timeDiff = now `diffUTCTime` before
putStrLn ("FPS: " ++ show (1/timeDiff))
return checkFPS
| lukexi/rumpus | src/Rumpus/Types.hs | bsd-3-clause | 1,080 | 0 | 18 | 241 | 275 | 145 | 130 | 26 | 1 |
module Gittins.Main where
import Gittins.Config
import Gittins.FilePath
import Gittins.Interpreter
import Gittins.Process
import Gittins.Types
import Data.Either (partitionEithers)
import Data.List (isPrefixOf, nub)
import Data.Maybe (isJust)
import Options.Applicative
import Options.Applicative.Types (Completer(..))
import System.Exit (ExitCode(..))
import Text.Regex (Regex, matchRegex, mkRegex)
-- | Type alias for options passed through to Git
type GitOpt = String
type Force = Bool
data Opts = Opts RuntimeConfig Command
data Command = Register [FilePath] [GroupId] Force
| Unregister [FilePath]
| List [GroupId]
| AddToGroup [GroupId] [FilePath]
| RemoveFromGroup [GroupId] [FilePath]
| Status [GroupId] [GitOpt]
| Pull [GroupId] [GitOpt]
| Diff [GroupId] [GitOpt]
deriving (Eq, Ord, Show)
parseOpts :: Parser Opts
parseOpts = Opts <$> workerOpts <*> subparser (
-- Manage repositories and groups
command "register" (info registerOpts
(progDesc "Register one or more repositories"))
<> command "unregister" (info unregisterOpts
(progDesc "Unregister one ore more repositories"))
<> command "add-group" (info addToGroupOpts
(progDesc "Add one or more repositories to a group"))
<> command "rm-group" (info removeFromGroupOpts
(progDesc "Remove one or more repositories from a group"))
<> command "list" (info listOpts (progDesc "List registered repositories"))
-- Git commands
<> command "status" (info statusOpts fullDesc)
<> command "pull" (info pullOpts fullDesc)
<> command "diff" (info diffOpts fullDesc))
where
registerOpts = Register <$> paths <*> groupIds <*> force
unregisterOpts = Unregister <$> paths
addToGroupOpts = AddToGroup <$> groupIds <*> paths
removeFromGroupOpts = RemoveFromGroup <$> groupIds <*> paths
listOpts = List <$> groupIds
groupIds = many $ strOption (short 'g' <> long "group" <> metavar "GROUP"
<> completer completeGroups)
paths = some (strArgument (metavar "PATH")) <|> pure ["."]
statusOpts = Status <$> groupIds <*> gitOpts
pullOpts = Pull <$> groupIds <*> gitOpts
diffOpts = Diff <$> groupIds <*> gitOpts
gitOpts = many (strArgument (metavar "GIT_OPT"))
force = switch (short 'f' <> long "force")
workerOpts = RuntimeConfig <$>
(option auto ( short 'j'
<> metavar "CONCURRENCY"
<> help "Max number of Git processes to run concurrently" ) <|> pure 1)
completeGroups :: Completer
completeGroups = Completer $ \prefix -> runIO runtimeConfig $ do
Config repos <- getConfig
return $ nub $ filter (prefix `isPrefixOf`) (concatMap repoGroups repos)
where runtimeConfig = RuntimeConfig 1
-- | Entry point for register command
register :: [GroupId] -> [FilePath] -> Force -> Act ()
register groupIds paths force = mapM_ addRepo paths where
addRepo :: FilePath -> Act ()
addRepo p = do
Config repos <- getConfig
let name = mkRepoName p
if any ((== p) . repoPath) repos
then putLog (AlreadyRegistered p)
else do
shouldRegister <- isWorkingTree p
if force || shouldRegister
then do putLog (Registering p)
putConfig $ Config (Repository name p groupIds : repos)
else putLog (NotAGitRepository p)
-- | Entry point for unregister command
unregister :: [FilePath] -> Act ()
unregister = mapM_ rmRepo where
rmRepo :: FilePath -> Act ()
rmRepo p = do
Config repos <- getConfig
if all ((/= p) . repoPath) repos
then putLog (NotRegistered p)
else do
putLog (Unregistering p)
let repos' = filter ((/= p) . repoPath) repos
putConfig (Config repos')
-- | Entry point for add-to-group command
addToGroup :: [GroupId] -> [FilePath] -> Act ()
addToGroup groupIds = mapM_ addGroup where
addGroup :: FilePath -> Act ()
addGroup path = do
config <- getConfig
case addToGroups groupIds path config of
Just c -> putConfig c
Nothing -> putLog (NotRegistered path)
-- | Entry point for remove-from-group command
removeFromGroup :: [GroupId] -> [FilePath] -> Act ()
removeFromGroup groupIds = mapM_ rmGroup where
rmGroup :: FilePath -> Act ()
rmGroup path = do
config <- getConfig
let config' = modifyRepository (\(Repository n p gs) ->
if p == path || n == path
then Just $ Repository n p (filter (not . flip elem groupIds) gs)
else Nothing) config
case config' of
Just c -> putConfig c
Nothing -> putLog (NotRegistered path)
-- | Entry point for list command
list :: [GroupId] -> Act ()
list groupIds = do
Config repos <- getConfig
let repos' = filter (\(Repository _ _ gs) -> null groupIds || any (`elem` groupIds) gs) repos
putLog (RepositoriesSummary repos')
-- | Entry point for status command
status :: [GroupId] -> [GitOpt] -> Act ()
status = gitCommand "status"
-- | Entry point for pull command
pull :: [GroupId] -> [GitOpt] -> Act ()
pull groupIds gitOpts = do
repos <- getReposForGroup groupIds
outputs <- concurrentFor repos $ \repo@(Repository _ path _) ->
do result <- git path "pull" gitOpts
putLog (GitOutput repo result)
return (repo, result)
putLog (pullSummary outputs)
where
pullSummary = uncurry PullSummary
. partitionEithers
. map (uncurry toEither)
. filter (not . isUpToDate . snd)
isUpToDate (ProcessResult ExitSuccess out _) =
out == "Already up-to-date.\n" || isJust (matchRegex upToDateRegex out)
isUpToDate _ = False
toEither repo (ProcessResult exit _ _) =
case exit of ExitSuccess -> Right repo
ExitFailure _ -> Left repo
upToDateRegex :: Regex
upToDateRegex = mkRegex "Current branch .+ is up to date."
-- | Entry point for diff
diff :: [GroupId] -> [GitOpt] -> Act ()
diff = gitCommand "diff"
gitCommand :: String -> [GroupId] -> [GitOpt] -> Act ()
gitCommand cmd groupIds gitOpts = do
repos <- getReposForGroup groupIds
concurrentFor_ repos $ \repo@(Repository _ p _) ->
do result <- git p cmd gitOpts
putLog (GitOutput repo result)
-- | Git command
git :: FilePath -> String -> [GitOpt] -> Act ProcessResult
git cwd cmd opts = process cwd "git" (cmd : opts)
-- | Main entry point
gittinsMain :: IO ()
gittinsMain = do
Opts runtimeConfig cmd <- execParser $ info (helper <*> parseOpts) fullDesc
let run = runIO runtimeConfig
case cmd of
Register paths groupIds force -> do paths' <- canonicalize paths
run $ register groupIds paths' force
Unregister paths -> canonicalize paths >>= run . unregister
AddToGroup groupIds paths -> canonicalize paths >>= run . addToGroup groupIds
RemoveFromGroup groupIds paths -> canonicalize paths >>= run . removeFromGroup groupIds
List groupIds -> run $ list groupIds
Status groupIds gitOpts -> run $ status groupIds gitOpts
Pull groupIds gitOpts -> run $ pull groupIds gitOpts
Diff groupIds gitOpts -> run $ diff groupIds gitOpts
where
canonicalize = mapM safeCanonicalize
| bmjames/gittins | src/Gittins/Main.hs | bsd-3-clause | 7,463 | 0 | 22 | 2,007 | 2,264 | 1,139 | 1,125 | 158 | 8 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main where
import Control.Monad
import Control.Monad.Remote.JSON
import Control.Applicative
import Data.Monoid
import Data.Aeson
import Data.Maybe
import Data.Text (Text)
import qualified Data.Text.IO as IO
import System.Random
import Server
main = do
t <- send session $ do
notification "say" [String "Hello!"]
notification "say" [String "Hello!"]
t <- method "temperature" []
return t
print t
| roboguy13/remote-json | example/Weak/Untyped.hs | bsd-3-clause | 597 | 0 | 13 | 184 | 138 | 74 | 64 | 20 | 1 |
module Utils where
import Data.Function (on)
import Data.List (sortBy)
import System.Random.MWC (GenIO,uniform)
import Control.Monad.Primitive (PrimMonad, PrimState)
import Numeric.LinearAlgebra.HMatrix (Vector)
import qualified Data.Vector.Storable as V
multinomial :: Vector Double -- ^ Probabilities of each of the p different outcomes. These should sum to 1.
-> GenIO
-> IO (Vector Int)
multinomial pvals gen
| V.sum pvals > 1.01 =
error "Utils.multinomial: sum of probabilities should sum to 1"
| otherwise = do
y <- uniform gen
let (_,sample,_) = prepvals y
return sample
where prepvals :: Double -> (Double, Vector Int, Bool)
prepvals x = foldr
(\(j,p) (s,v,z) ->
if z
then (s, v, z)
else if s + p >= x
then ( s
, v V.//
[(j, 1)]
, True)
else (s + p, v, z))
(0.0, V.replicate k 0, False)
(sortBy
(compare `on` snd)
(zip
[0 ..]
(V.toList pvals)))
k = V.length pvals
| DbIHbKA/mnistvb | src/Utils.hs | bsd-3-clause | 1,403 | 0 | 15 | 676 | 369 | 207 | 162 | 35 | 3 |
module Parser where
import Control.Applicative
import Control.Monad
import Data.Attoparsec.ByteString (IResult (..), Parser (..),
Result (..))
import qualified Data.Attoparsec.ByteString.Char8 as A
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.ByteString.Char8 (pack, unpack)
import Data.List (intercalate)
import Data.Time (FormatTime (..), UTCTime,
defaultTimeLocale,
formatTime, parseTimeM)
import Numeric.Natural
import System.IO as IO
import Pipes as P
import qualified Pipes.Prelude as P
import qualified Pipes.ByteString as PB
import Types
import Debug.Trace
type Line = (UTCTime, (Natural, Natural), Int, Code)
type Code = ByteString
--------------------------------------------------------------------------------
-- XXX Kinda arbitrary
chunkSize :: Int
chunkSize = 25
parser :: MonadIO m => Producer LogEntry m ()
parser = PB.hGet chunkSize IO.stdin >-> parseLazy ""
-- Attoparsec isn't lazy enough, but Pipes are nice.
parseLazy :: Monad m => ByteString -> Pipe ByteString LogEntry m ()
parseLazy rem = do
r <- A.parseWith await parseLine rem
case r of Done rem le -> yield le >> parseLazy rem
Fail rem _ _ -> parseLazy rem
Partial cb -> error "parseWith invariant failed!"
prettyPipe :: Monad m => Bool -> Pipe LogEntry ByteString m ()
prettyPipe ln = forever $ do
next <- await
yield . pack $ if ln then pretty next ++ "\n" else pretty next
--------------------------------------------------------------------------------
pretty :: LogEntry -> String
pretty (LogEntry time (Measurement st (Location x y) temp)) =
intercalate "|" [ti, loc, te, sta]
where ti = formatTime defaultTimeLocale timeFormat time
loc = show (round x) ++ "," ++ show (round y)
te = show (round temp)
sta = show st
-- |Produce an entry with correct units from raw digits and an observatory code.
mkEntry :: UTCTime -> (Natural, Natural) -> Integer -> Station -> LogEntry
mkEntry time (lx, ly) temp sta = LogEntry time
$ mkMeasure sta (fromIntegral lx, fromIntegral ly) (fromIntegral temp)
parseLine :: Parser LogEntry
parseLine = do
time <- parseTime
pipe
loc <- location
pipe
temp <- temperature
pipe
sta <- station
A.many' newline
return (mkEntry time loc temp sta)
-- yyyy-MM-ddThh:mm in UTC
timeFormat :: String
timeFormat = "%Y-%m-%dT%H:%M"
parseTime :: Parser UTCTime
parseTime = do
str <- A.take 16
parseTimeM False defaultTimeLocale timeFormat (unpack str)
location :: Parser (Natural, Natural)
location = do
lx <- A.decimal
A.char ','
ly <- A.decimal
return (lx, ly)
temperature :: Parser Integer
temperature = A.signed A.decimal
-- Allow only ASCII alphabet in stations
station :: Parser Station
station = do
a <- A.satisfy A.isAlpha_ascii
b <- A.satisfy A.isAlpha_ascii
let str = [a, b]
return $ if | str == "AU" -> AUS
| str == "US" -> USA
| str == "FR" -> FRA
| otherwise -> Other str
pipe :: Parser Char
pipe = A.char '|'
newline :: Parser Char
newline = A.char '\n'
| thumphries/weather | src/Parser.hs | bsd-3-clause | 3,452 | 0 | 12 | 991 | 995 | 521 | 474 | -1 | -1 |
import Distribution.MacOSX
import Distribution.Simple
import System.Cmd
import System.Exit
import System.Directory
import System.FilePath
import System.Info (os)
import Control.Monad
import Distribution.PackageDescription
import Distribution.Simple.Setup
import Distribution.Simple
import Distribution.Simple.Utils
import Distribution.Simple.LocalBuildInfo
main :: IO ()
main = do
resources <- getAllDirectoryContents "res"
let usefulres = flip filter resources $ \r -> r /= ("res" </> "images" </> "icon" </> "hpage.icns")
defaultMainWithHooks $ simpleUserHooks {
postBuild = appBundleBuildHook $ [guiApp usefulres], -- no-op if not MacOS X
postInst = appBundleInstall $ guiApp usefulres,
runTests = runTests'
}
guiApp :: [FilePath] -> MacApp
guiApp rs = MacApp "hpage"
(Just $ "res" </> "images" </> "icon" </> "hpage.icns")
Nothing -- Build a default Info.plist for the icon.
rs
[] -- No other binaries.
(ChaseWith $ "libstdc++." : defaultExclusions)
getAllDirectoryContents :: FilePath -> IO [FilePath]
getAllDirectoryContents p =
do
allContents <- getDirectoryContents p
let nonHidden = map (\path -> p </> path) $ flip filter allContents $ \path -> head path /= '.'
recContents <- forM nonHidden $ \path -> do
exists <- doesDirectoryExist path
if exists
then getAllDirectoryContents path
else return [path]
return $ concat recContents
appBundleInstall :: MacApp -> Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO ()
appBundleInstall app _ _ pkg localb =
case os of
"darwin" ->
do
ExitSuccess <- copyDirectory appPath theBindir
writeFile scriptFile scriptText
_ ->
return ()
where
theBindir = bindir $ absoluteInstallDirs pkg localb NoCopyDest
appPath = buildDir localb </> appName app <.> "app"
scriptFile = theBindir </> appName app
finalAppPath = theBindir </> takeFileName appPath
scriptText = finalAppPath </> "Contents/MacOS" </> appName app ++ " \"$@\""
-- scriptText = "open " ++ finalAppPath
copyDirectory :: FilePath -> FilePath -> IO ExitCode
copyDirectory dir newLocation = rawSystem "cp" ["-rf", dir, newLocation]
runTests' :: Args -> Bool -> PackageDescription -> LocalBuildInfo -> IO ()
runTests' _ _ _ lbi = do
built <- doesDirectoryExist $ buildDir lbi
unless built $ die "Run the 'build' command first."
system $ "cd src && runhaskell -i../dist/build/autogen HPage.Test.Server && cd .."
return () | elbrujohalcon/hPage | Setup.hs | bsd-3-clause | 2,888 | 0 | 16 | 879 | 685 | 346 | 339 | 61 | 2 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
Type checking of type signatures in interface files
-}
{-# LANGUAGE CPP #-}
module TcIface (
tcLookupImported_maybe,
importDecl, checkWiredInTyCon, tcHiBootIface, typecheckIface,
tcIfaceDecl, tcIfaceInst, tcIfaceFamInst, tcIfaceRules,
tcIfaceVectInfo, tcIfaceAnnotations,
tcIfaceExpr, -- Desired by HERMIT (Trac #7683)
tcIfaceGlobal
) where
#include "HsVersions.h"
import TcTypeNats(typeNatCoAxiomRules)
import IfaceSyn
import LoadIface
import IfaceEnv
import BuildTyCl
import TcRnMonad
import TcType
import Type
import Coercion
import CoAxiom
import TyCoRep -- needs to build types & coercions in a knot
import HscTypes
import Annotations
import InstEnv
import FamInstEnv
import CoreSyn
import CoreUtils
import CoreUnfold
import CoreLint
import MkCore
import Id
import MkId
import IdInfo
import Class
import TyCon
import ConLike
import DataCon
import PrelNames
import TysWiredIn
import Literal
import Var
import VarEnv
import VarSet
import Name
import NameEnv
import NameSet
import OccurAnal ( occurAnalyseExpr )
import Demand
import Module
import UniqFM
import UniqSupply
import Outputable
import Maybes
import SrcLoc
import DynFlags
import Util
import FastString
import BasicTypes hiding ( SuccessFlag(..) )
import ListSetOps
import Data.List
import Control.Monad
import qualified Data.Map as Map
{-
This module takes
IfaceDecl -> TyThing
IfaceType -> Type
etc
An IfaceDecl is populated with RdrNames, and these are not renamed to
Names before typechecking, because there should be no scope errors etc.
-- For (b) consider: f = \$(...h....)
-- where h is imported, and calls f via an hi-boot file.
-- This is bad! But it is not seen as a staging error, because h
-- is indeed imported. We don't want the type-checker to black-hole
-- when simplifying and compiling the splice!
--
-- Simple solution: discard any unfolding that mentions a variable
-- bound in this module (and hence not yet processed).
-- The discarding happens when forkM finds a type error.
************************************************************************
* *
Type-checking a complete interface
* *
************************************************************************
Suppose we discover we don't need to recompile. Then we must type
check the old interface file. This is a bit different to the
incremental type checking we do as we suck in interface files. Instead
we do things similarly as when we are typechecking source decls: we
bring into scope the type envt for the interface all at once, using a
knot. Remember, the decls aren't necessarily in dependency order --
and even if they were, the type decls might be mutually recursive.
-}
typecheckIface :: ModIface -- Get the decls from here
-> TcRnIf gbl lcl ModDetails
typecheckIface iface
= initIfaceTc iface $ \ tc_env_var -> do
-- The tc_env_var is freshly allocated, private to
-- type-checking this particular interface
{ -- Get the right set of decls and rules. If we are compiling without -O
-- we discard pragmas before typechecking, so that we don't "see"
-- information that we shouldn't. From a versioning point of view
-- It's not actually *wrong* to do so, but in fact GHCi is unable
-- to handle unboxed tuples, so it must not see unfoldings.
ignore_prags <- goptM Opt_IgnoreInterfacePragmas
-- Typecheck the decls. This is done lazily, so that the knot-tying
-- within this single module work out right. In the If monad there is
-- no global envt for the current interface; instead, the knot is tied
-- through the if_rec_types field of IfGblEnv
; names_w_things <- loadDecls ignore_prags (mi_decls iface)
; let type_env = mkNameEnv names_w_things
; writeMutVar tc_env_var type_env
-- Now do those rules, instances and annotations
; insts <- mapM tcIfaceInst (mi_insts iface)
; fam_insts <- mapM tcIfaceFamInst (mi_fam_insts iface)
; rules <- tcIfaceRules ignore_prags (mi_rules iface)
; anns <- tcIfaceAnnotations (mi_anns iface)
-- Vectorisation information
; vect_info <- tcIfaceVectInfo (mi_module iface) type_env (mi_vect_info iface)
-- Exports
; exports <- ifaceExportNames (mi_exports iface)
-- Finished
; traceIf (vcat [text "Finished typechecking interface for" <+> ppr (mi_module iface),
-- Careful! If we tug on the TyThing thunks too early
-- we'll infinite loop with hs-boot. See #10083 for
-- an example where this would cause non-termination.
text "Type envt:" <+> ppr (map fst names_w_things)])
; return $ ModDetails { md_types = type_env
, md_insts = insts
, md_fam_insts = fam_insts
, md_rules = rules
, md_anns = anns
, md_vect_info = vect_info
, md_exports = exports
}
}
{-
************************************************************************
* *
Type and class declarations
* *
************************************************************************
-}
tcHiBootIface :: HscSource -> Module -> TcRn SelfBootInfo
-- Load the hi-boot iface for the module being compiled,
-- if it indeed exists in the transitive closure of imports
-- Return the ModDetails; Nothing if no hi-boot iface
tcHiBootIface hsc_src mod
| HsBootFile <- hsc_src -- Already compiling a hs-boot file
= return NoSelfBoot
| otherwise
= do { traceIf (text "loadHiBootInterface" <+> ppr mod)
; mode <- getGhcMode
; if not (isOneShot mode)
-- In --make and interactive mode, if this module has an hs-boot file
-- we'll have compiled it already, and it'll be in the HPT
--
-- We check wheher the interface is a *boot* interface.
-- It can happen (when using GHC from Visual Studio) that we
-- compile a module in TypecheckOnly mode, with a stable,
-- fully-populated HPT. In that case the boot interface isn't there
-- (it's been replaced by the mother module) so we can't check it.
-- And that's fine, because if M's ModInfo is in the HPT, then
-- it's been compiled once, and we don't need to check the boot iface
then do { hpt <- getHpt
; case lookupHpt hpt (moduleName mod) of
Just info | mi_boot (hm_iface info)
-> mkSelfBootInfo (hm_iface info) (hm_details info)
_ -> return NoSelfBoot }
else do
-- OK, so we're in one-shot mode.
-- Re #9245, we always check if there is an hi-boot interface
-- to check consistency against, rather than just when we notice
-- that an hi-boot is necessary due to a circular import.
{ read_result <- findAndReadIface
need mod
True -- Hi-boot file
; case read_result of {
Succeeded (iface, _path) -> do { tc_iface <- typecheckIface iface
; mkSelfBootInfo iface tc_iface } ;
Failed err ->
-- There was no hi-boot file. But if there is circularity in
-- the module graph, there really should have been one.
-- Since we've read all the direct imports by now,
-- eps_is_boot will record if any of our imports mention the
-- current module, which either means a module loop (not
-- a SOURCE import) or that our hi-boot file has mysteriously
-- disappeared.
do { eps <- getEps
; case lookupUFM (eps_is_boot eps) (moduleName mod) of
Nothing -> return NoSelfBoot -- The typical case
Just (_, False) -> failWithTc moduleLoop
-- Someone below us imported us!
-- This is a loop with no hi-boot in the way
Just (_mod, True) -> failWithTc (elaborate err)
-- The hi-boot file has mysteriously disappeared.
}}}}
where
need = text "Need the hi-boot interface for" <+> ppr mod
<+> text "to compare against the Real Thing"
moduleLoop = text "Circular imports: module" <+> quotes (ppr mod)
<+> text "depends on itself"
elaborate err = hang (text "Could not find hi-boot interface for" <+>
quotes (ppr mod) <> colon) 4 err
mkSelfBootInfo :: ModIface -> ModDetails -> TcRn SelfBootInfo
mkSelfBootInfo iface mds
= do -- NB: This is computed DIRECTLY from the ModIface rather
-- than from the ModDetails, so that we can query 'sb_tcs'
-- WITHOUT forcing the contents of the interface.
tcs <- mapM (lookupOrig (mi_module iface) . ifName)
. filter isIfaceTyCon
. map snd
$ mi_decls iface
return $ SelfBoot { sb_mds = mds
, sb_tcs = mkNameSet tcs }
where
-- | Retuerns @True@ if, when you call 'tcIfaceDecl' on
-- this 'IfaceDecl', an ATyCon would be returned.
-- NB: This code assumes that a TyCon cannot be implicit.
isIfaceTyCon IfaceId{} = False
isIfaceTyCon IfaceData{} = True
isIfaceTyCon IfaceSynonym{} = True
isIfaceTyCon IfaceFamily{} = True
isIfaceTyCon IfaceClass{} = True
isIfaceTyCon IfaceAxiom{} = False
isIfaceTyCon IfacePatSyn{} = False
{-
************************************************************************
* *
Type and class declarations
* *
************************************************************************
When typechecking a data type decl, we *lazily* (via forkM) typecheck
the constructor argument types. This is in the hope that we may never
poke on those argument types, and hence may never need to load the
interface files for types mentioned in the arg types.
E.g.
data Foo.S = MkS Baz.T
Mabye we can get away without even loading the interface for Baz!
This is not just a performance thing. Suppose we have
data Foo.S = MkS Baz.T
data Baz.T = MkT Foo.S
(in different interface files, of course).
Now, first we load and typecheck Foo.S, and add it to the type envt.
If we do explore MkS's argument, we'll load and typecheck Baz.T.
If we explore MkT's argument we'll find Foo.S already in the envt.
If we typechecked constructor args eagerly, when loading Foo.S we'd try to
typecheck the type Baz.T. So we'd fault in Baz.T... and then need Foo.S...
which isn't done yet.
All very cunning. However, there is a rather subtle gotcha which bit
me when developing this stuff. When we typecheck the decl for S, we
extend the type envt with S, MkS, and all its implicit Ids. Suppose
(a bug, but it happened) that the list of implicit Ids depended in
turn on the constructor arg types. Then the following sequence of
events takes place:
* we build a thunk <t> for the constructor arg tys
* we build a thunk for the extended type environment (depends on <t>)
* we write the extended type envt into the global EPS mutvar
Now we look something up in the type envt
* that pulls on <t>
* which reads the global type envt out of the global EPS mutvar
* but that depends in turn on <t>
It's subtle, because, it'd work fine if we typechecked the constructor args
eagerly -- they don't need the extended type envt. They just get the extended
type envt by accident, because they look at it later.
What this means is that the implicitTyThings MUST NOT DEPEND on any of
the forkM stuff.
-}
tcIfaceDecl :: Bool -- ^ True <=> discard IdInfo on IfaceId bindings
-> IfaceDecl
-> IfL TyThing
tcIfaceDecl = tc_iface_decl Nothing
tc_iface_decl :: Maybe Class -- ^ For associated type/data family declarations
-> Bool -- ^ True <=> discard IdInfo on IfaceId bindings
-> IfaceDecl
-> IfL TyThing
tc_iface_decl _ ignore_prags (IfaceId {ifName = occ_name, ifType = iface_type,
ifIdDetails = details, ifIdInfo = info})
= do { name <- lookupIfaceTop occ_name
; ty <- tcIfaceType iface_type
; details <- tcIdDetails ty details
; info <- tcIdInfo ignore_prags name ty info
; return (AnId (mkGlobalId details name ty info)) }
tc_iface_decl _ _ (IfaceData {ifName = occ_name,
ifCType = cType,
ifBinders = binders,
ifResKind = res_kind,
ifRoles = roles,
ifCtxt = ctxt, ifGadtSyntax = gadt_syn,
ifCons = rdr_cons,
ifParent = mb_parent })
= bindIfaceTyConBinders_AT binders $ \ binders' -> do
{ tc_name <- lookupIfaceTop occ_name
; res_kind' <- tcIfaceType res_kind
; tycon <- fixM $ \ tycon -> do
{ stupid_theta <- tcIfaceCtxt ctxt
; parent' <- tc_parent tc_name mb_parent
; cons <- tcIfaceDataCons tc_name tycon binders' rdr_cons
; return (mkAlgTyCon tc_name binders' res_kind'
roles cType stupid_theta
cons parent' gadt_syn) }
; traceIf (text "tcIfaceDecl4" <+> ppr tycon)
; return (ATyCon tycon) }
where
tc_parent :: Name -> IfaceTyConParent -> IfL AlgTyConFlav
tc_parent tc_name IfNoParent
= do { tc_rep_name <- newTyConRepName tc_name
; return (VanillaAlgTyCon tc_rep_name) }
tc_parent _ (IfDataInstance ax_name _ arg_tys)
= do { ax <- tcIfaceCoAxiom ax_name
; let fam_tc = coAxiomTyCon ax
ax_unbr = toUnbranchedAxiom ax
; lhs_tys <- tcIfaceTcArgs arg_tys
; return (DataFamInstTyCon ax_unbr fam_tc lhs_tys) }
tc_iface_decl _ _ (IfaceSynonym {ifName = occ_name,
ifRoles = roles,
ifSynRhs = rhs_ty,
ifBinders = binders,
ifResKind = res_kind })
= bindIfaceTyConBinders_AT binders $ \ binders' -> do
{ tc_name <- lookupIfaceTop occ_name
; res_kind' <- tcIfaceType res_kind -- Note [Synonym kind loop]
; rhs <- forkM (mk_doc tc_name) $
tcIfaceType rhs_ty
; let tycon = mkSynonymTyCon tc_name binders' res_kind' roles rhs
; return (ATyCon tycon) }
where
mk_doc n = text "Type synonym" <+> ppr n
tc_iface_decl parent _ (IfaceFamily {ifName = occ_name,
ifFamFlav = fam_flav,
ifBinders = binders,
ifResKind = res_kind,
ifResVar = res, ifFamInj = inj })
= bindIfaceTyConBinders_AT binders $ \ binders' -> do
{ tc_name <- lookupIfaceTop occ_name
; res_kind' <- tcIfaceType res_kind -- Note [Synonym kind loop]
; rhs <- forkM (mk_doc tc_name) $
tc_fam_flav tc_name fam_flav
; res_name <- traverse (newIfaceName . mkTyVarOccFS) res
; let tycon = mkFamilyTyCon tc_name binders' res_kind' res_name rhs parent inj
; return (ATyCon tycon) }
where
mk_doc n = text "Type synonym" <+> ppr n
tc_fam_flav :: Name -> IfaceFamTyConFlav -> IfL FamTyConFlav
tc_fam_flav tc_name IfaceDataFamilyTyCon
= do { tc_rep_name <- newTyConRepName tc_name
; return (DataFamilyTyCon tc_rep_name) }
tc_fam_flav _ IfaceOpenSynFamilyTyCon= return OpenSynFamilyTyCon
tc_fam_flav _ (IfaceClosedSynFamilyTyCon mb_ax_name_branches)
= do { ax <- traverse (tcIfaceCoAxiom . fst) mb_ax_name_branches
; return (ClosedSynFamilyTyCon ax) }
tc_fam_flav _ IfaceAbstractClosedSynFamilyTyCon
= return AbstractClosedSynFamilyTyCon
tc_fam_flav _ IfaceBuiltInSynFamTyCon
= pprPanic "tc_iface_decl"
(text "IfaceBuiltInSynFamTyCon in interface file")
tc_iface_decl _parent ignore_prags
(IfaceClass {ifCtxt = rdr_ctxt, ifName = tc_occ,
ifRoles = roles,
ifBinders = binders,
ifFDs = rdr_fds,
ifATs = rdr_ats, ifSigs = rdr_sigs,
ifMinDef = mindef_occ })
-- ToDo: in hs-boot files we should really treat abstract classes specially,
-- as we do abstract tycons
= bindIfaceTyConBinders binders $ \ binders' -> do
{ tc_name <- lookupIfaceTop tc_occ
; traceIf (text "tc-iface-class1" <+> ppr tc_occ)
; ctxt <- mapM tc_sc rdr_ctxt
; traceIf (text "tc-iface-class2" <+> ppr tc_occ)
; sigs <- mapM tc_sig rdr_sigs
; fds <- mapM tc_fd rdr_fds
; traceIf (text "tc-iface-class3" <+> ppr tc_occ)
; mindef <- traverse (lookupIfaceTop . mkVarOccFS) mindef_occ
; cls <- fixM $ \ cls -> do
{ ats <- mapM (tc_at cls) rdr_ats
; traceIf (text "tc-iface-class4" <+> ppr tc_occ)
; buildClass tc_name binders' roles ctxt fds ats sigs mindef }
; return (ATyCon (classTyCon cls)) }
where
tc_sc pred = forkM (mk_sc_doc pred) (tcIfaceType pred)
-- The *length* of the superclasses is used by buildClass, and hence must
-- not be inside the thunk. But the *content* maybe recursive and hence
-- must be lazy (via forkM). Example:
-- class C (T a) => D a where
-- data T a
-- Here the associated type T is knot-tied with the class, and
-- so we must not pull on T too eagerly. See Trac #5970
tc_sig :: IfaceClassOp -> IfL TcMethInfo
tc_sig (IfaceClassOp occ rdr_ty dm)
= do { op_name <- lookupIfaceTop occ
; let doc = mk_op_doc op_name rdr_ty
; op_ty <- forkM (doc <+> text "ty") $ tcIfaceType rdr_ty
-- Must be done lazily for just the same reason as the
-- type of a data con; to avoid sucking in types that
-- it mentions unless it's necessary to do so
; dm' <- tc_dm doc dm
; return (op_name, op_ty, dm') }
tc_dm :: SDoc
-> Maybe (DefMethSpec IfaceType)
-> IfL (Maybe (DefMethSpec (SrcSpan, Type)))
tc_dm _ Nothing = return Nothing
tc_dm _ (Just VanillaDM) = return (Just VanillaDM)
tc_dm doc (Just (GenericDM ty))
= do { -- Must be done lazily to avoid sucking in types
; ty' <- forkM (doc <+> text "dm") $ tcIfaceType ty
; return (Just (GenericDM (noSrcSpan, ty'))) }
tc_at cls (IfaceAT tc_decl if_def)
= do ATyCon tc <- tc_iface_decl (Just cls) ignore_prags tc_decl
mb_def <- case if_def of
Nothing -> return Nothing
Just def -> forkM (mk_at_doc tc) $
extendIfaceTyVarEnv (tyConTyVars tc) $
do { tc_def <- tcIfaceType def
; return (Just (tc_def, noSrcSpan)) }
-- Must be done lazily in case the RHS of the defaults mention
-- the type constructor being defined here
-- e.g. type AT a; type AT b = AT [b] Trac #8002
return (ATI tc mb_def)
mk_sc_doc pred = text "Superclass" <+> ppr pred
mk_at_doc tc = text "Associated type" <+> ppr tc
mk_op_doc op_name op_ty = text "Class op" <+> sep [ppr op_name, ppr op_ty]
tc_fd (tvs1, tvs2) = do { tvs1' <- mapM tcIfaceTyVar tvs1
; tvs2' <- mapM tcIfaceTyVar tvs2
; return (tvs1', tvs2') }
tc_iface_decl _ _ (IfaceAxiom { ifName = ax_occ, ifTyCon = tc
, ifAxBranches = branches, ifRole = role })
= do { tc_name <- lookupIfaceTop ax_occ
; tc_tycon <- tcIfaceTyCon tc
; tc_branches <- tc_ax_branches branches
; let axiom = CoAxiom { co_ax_unique = nameUnique tc_name
, co_ax_name = tc_name
, co_ax_tc = tc_tycon
, co_ax_role = role
, co_ax_branches = manyBranches tc_branches
, co_ax_implicit = False }
; return (ACoAxiom axiom) }
tc_iface_decl _ _ (IfacePatSyn{ ifName = occ_name
, ifPatMatcher = if_matcher
, ifPatBuilder = if_builder
, ifPatIsInfix = is_infix
, ifPatUnivBndrs = univ_bndrs
, ifPatExBndrs = ex_bndrs
, ifPatProvCtxt = prov_ctxt
, ifPatReqCtxt = req_ctxt
, ifPatArgs = args
, ifPatTy = pat_ty
, ifFieldLabels = field_labels })
= do { name <- lookupIfaceTop occ_name
; traceIf (text "tc_iface_decl" <+> ppr name)
; matcher <- tc_pr if_matcher
; builder <- fmapMaybeM tc_pr if_builder
; bindIfaceForAllBndrs univ_bndrs $ \univ_tvs -> do
{ bindIfaceForAllBndrs ex_bndrs $ \ex_tvs -> do
{ patsyn <- forkM (mk_doc name) $
do { prov_theta <- tcIfaceCtxt prov_ctxt
; req_theta <- tcIfaceCtxt req_ctxt
; pat_ty <- tcIfaceType pat_ty
; arg_tys <- mapM tcIfaceType args
; return $ buildPatSyn name is_infix matcher builder
(univ_tvs, req_theta)
(ex_tvs, prov_theta)
arg_tys pat_ty field_labels }
; return $ AConLike . PatSynCon $ patsyn }}}
where
mk_doc n = text "Pattern synonym" <+> ppr n
tc_pr :: (IfExtName, Bool) -> IfL (Id, Bool)
tc_pr (nm, b) = do { id <- forkM (ppr nm) (tcIfaceExtId nm)
; return (id, b) }
tc_ax_branches :: [IfaceAxBranch] -> IfL [CoAxBranch]
tc_ax_branches if_branches = foldlM tc_ax_branch [] if_branches
tc_ax_branch :: [CoAxBranch] -> IfaceAxBranch -> IfL [CoAxBranch]
tc_ax_branch prev_branches
(IfaceAxBranch { ifaxbTyVars = tv_bndrs, ifaxbCoVars = cv_bndrs
, ifaxbLHS = lhs, ifaxbRHS = rhs
, ifaxbRoles = roles, ifaxbIncomps = incomps })
= bindIfaceTyConBinders_AT
(map (\b -> TvBndr b (NamedTCB Inferred)) tv_bndrs) $ \ tvs ->
-- The _AT variant is needed here; see Note [CoAxBranch type variables] in CoAxiom
bindIfaceIds cv_bndrs $ \ cvs -> do
{ tc_lhs <- tcIfaceTcArgs lhs
; tc_rhs <- tcIfaceType rhs
; let br = CoAxBranch { cab_loc = noSrcSpan
, cab_tvs = binderVars tvs
, cab_cvs = cvs
, cab_lhs = tc_lhs
, cab_roles = roles
, cab_rhs = tc_rhs
, cab_incomps = map (prev_branches `getNth`) incomps }
; return (prev_branches ++ [br]) }
tcIfaceDataCons :: Name -> TyCon -> [TyConBinder] -> IfaceConDecls -> IfL AlgTyConRhs
tcIfaceDataCons tycon_name tycon tc_tybinders if_cons
= case if_cons of
IfAbstractTyCon dis -> return (AbstractTyCon dis)
IfDataTyCon cons _ _ -> do { field_lbls <- mapM (traverse lookupIfaceTop) (ifaceConDeclFields if_cons)
; data_cons <- mapM (tc_con_decl field_lbls) cons
; return (mkDataTyConRhs data_cons) }
IfNewTyCon con _ _ -> do { field_lbls <- mapM (traverse lookupIfaceTop) (ifaceConDeclFields if_cons)
; data_con <- tc_con_decl field_lbls con
; mkNewTyConRhs tycon_name tycon data_con }
where
univ_tv_bndrs :: [TyVarBinder]
univ_tv_bndrs = mkDataConUnivTyVarBinders tc_tybinders
tc_con_decl field_lbls (IfCon { ifConInfix = is_infix,
ifConExTvs = ex_bndrs,
ifConOcc = occ, ifConCtxt = ctxt, ifConEqSpec = spec,
ifConArgTys = args, ifConFields = my_lbls,
ifConStricts = if_stricts,
ifConSrcStricts = if_src_stricts})
= -- Universally-quantified tyvars are shared with
-- parent TyCon, and are alrady in scope
bindIfaceForAllBndrs ex_bndrs $ \ ex_tv_bndrs -> do
{ traceIf (text "Start interface-file tc_con_decl" <+> ppr occ)
; dc_name <- lookupIfaceTop occ
-- Read the context and argument types, but lazily for two reasons
-- (a) to avoid looking tugging on a recursive use of
-- the type itself, which is knot-tied
-- (b) to avoid faulting in the component types unless
-- they are really needed
; ~(eq_spec, theta, arg_tys, stricts) <- forkM (mk_doc dc_name) $
do { eq_spec <- tcIfaceEqSpec spec
; theta <- tcIfaceCtxt ctxt
; arg_tys <- mapM tcIfaceType args
; stricts <- mapM tc_strict if_stricts
-- The IfBang field can mention
-- the type itself; hence inside forkM
; return (eq_spec, theta, arg_tys, stricts) }
-- Look up the field labels for this constructor; note that
-- they should be in the same order as my_lbls!
; let lbl_names = map find_lbl my_lbls
find_lbl x = case find (\ fl -> nameOccName (flSelector fl) == x) field_lbls of
Just fl -> fl
Nothing -> error $ "find_lbl missing " ++ occNameString x
-- Remember, tycon is the representation tycon
; let orig_res_ty = mkFamilyTyConApp tycon
(substTyVars (mkTvSubstPrs (map eqSpecPair eq_spec))
(binderVars tc_tybinders))
; prom_rep_name <- newTyConRepName dc_name
; con <- buildDataCon (pprPanic "tcIfaceDataCons: FamInstEnvs" (ppr dc_name))
dc_name is_infix prom_rep_name
(map src_strict if_src_stricts)
(Just stricts)
-- Pass the HsImplBangs (i.e. final
-- decisions) to buildDataCon; it'll use
-- these to guide the construction of a
-- worker.
-- See Note [Bangs on imported data constructors] in MkId
lbl_names
univ_tv_bndrs ex_tv_bndrs
eq_spec theta
arg_tys orig_res_ty tycon
; traceIf (text "Done interface-file tc_con_decl" <+> ppr dc_name)
; return con }
mk_doc con_name = text "Constructor" <+> ppr con_name
tc_strict :: IfaceBang -> IfL HsImplBang
tc_strict IfNoBang = return (HsLazy)
tc_strict IfStrict = return (HsStrict)
tc_strict IfUnpack = return (HsUnpack Nothing)
tc_strict (IfUnpackCo if_co) = do { co <- tcIfaceCo if_co
; return (HsUnpack (Just co)) }
src_strict :: IfaceSrcBang -> HsSrcBang
src_strict (IfSrcBang unpk bang) = HsSrcBang Nothing unpk bang
tcIfaceEqSpec :: IfaceEqSpec -> IfL [EqSpec]
tcIfaceEqSpec spec
= mapM do_item spec
where
do_item (occ, if_ty) = do { tv <- tcIfaceTyVar occ
; ty <- tcIfaceType if_ty
; return (mkEqSpec tv ty) }
{-
Note [Synonym kind loop]
~~~~~~~~~~~~~~~~~~~~~~~~
Notice that we eagerly grab the *kind* from the interface file, but
build a forkM thunk for the *rhs* (and family stuff). To see why,
consider this (Trac #2412)
M.hs: module M where { import X; data T = MkT S }
X.hs: module X where { import {-# SOURCE #-} M; type S = T }
M.hs-boot: module M where { data T }
When kind-checking M.hs we need S's kind. But we do not want to
find S's kind from (typeKind S-rhs), because we don't want to look at
S-rhs yet! Since S is imported from X.hi, S gets just one chance to
be defined, and we must not do that until we've finished with M.T.
Solution: record S's kind in the interface file; now we can safely
look at it.
************************************************************************
* *
Instances
* *
************************************************************************
-}
tcIfaceInst :: IfaceClsInst -> IfL ClsInst
tcIfaceInst (IfaceClsInst { ifDFun = dfun_name, ifOFlag = oflag
, ifInstCls = cls, ifInstTys = mb_tcs
, ifInstOrph = orph })
= do { dfun <- forkM (text "Dict fun" <+> ppr dfun_name) $
tcIfaceExtId dfun_name
; let mb_tcs' = map (fmap ifaceTyConName) mb_tcs
; return (mkImportedInstance cls mb_tcs' dfun_name dfun oflag orph) }
tcIfaceFamInst :: IfaceFamInst -> IfL FamInst
tcIfaceFamInst (IfaceFamInst { ifFamInstFam = fam, ifFamInstTys = mb_tcs
, ifFamInstAxiom = axiom_name } )
= do { axiom' <- forkM (text "Axiom" <+> ppr axiom_name) $
tcIfaceCoAxiom axiom_name
-- will panic if branched, but that's OK
; let axiom'' = toUnbranchedAxiom axiom'
mb_tcs' = map (fmap ifaceTyConName) mb_tcs
; return (mkImportedFamInst fam mb_tcs' axiom'') }
{-
************************************************************************
* *
Rules
* *
************************************************************************
We move a IfaceRule from eps_rules to eps_rule_base when all its LHS free vars
are in the type environment. However, remember that typechecking a Rule may
(as a side effect) augment the type envt, and so we may need to iterate the process.
-}
tcIfaceRules :: Bool -- True <=> ignore rules
-> [IfaceRule]
-> IfL [CoreRule]
tcIfaceRules ignore_prags if_rules
| ignore_prags = return []
| otherwise = mapM tcIfaceRule if_rules
tcIfaceRule :: IfaceRule -> IfL CoreRule
tcIfaceRule (IfaceRule {ifRuleName = name, ifActivation = act, ifRuleBndrs = bndrs,
ifRuleHead = fn, ifRuleArgs = args, ifRuleRhs = rhs,
ifRuleAuto = auto, ifRuleOrph = orph })
= do { ~(bndrs', args', rhs') <-
-- Typecheck the payload lazily, in the hope it'll never be looked at
forkM (text "Rule" <+> pprRuleName name) $
bindIfaceBndrs bndrs $ \ bndrs' ->
do { args' <- mapM tcIfaceExpr args
; rhs' <- tcIfaceExpr rhs
; return (bndrs', args', rhs') }
; let mb_tcs = map ifTopFreeName args
; this_mod <- getIfModule
; return (Rule { ru_name = name, ru_fn = fn, ru_act = act,
ru_bndrs = bndrs', ru_args = args',
ru_rhs = occurAnalyseExpr rhs',
ru_rough = mb_tcs,
ru_origin = this_mod,
ru_orphan = orph,
ru_auto = auto,
ru_local = False }) } -- An imported RULE is never for a local Id
-- or, even if it is (module loop, perhaps)
-- we'll just leave it in the non-local set
where
-- This function *must* mirror exactly what Rules.roughTopNames does
-- We could have stored the ru_rough field in the iface file
-- but that would be redundant, I think.
-- The only wrinkle is that we must not be deceived by
-- type synonyms at the top of a type arg. Since
-- we can't tell at this point, we are careful not
-- to write them out in coreRuleToIfaceRule
ifTopFreeName :: IfaceExpr -> Maybe Name
ifTopFreeName (IfaceType (IfaceTyConApp tc _ )) = Just (ifaceTyConName tc)
ifTopFreeName (IfaceType (IfaceTupleTy s _ ts)) = Just (tupleTyConName s (length (tcArgsIfaceTypes ts)))
ifTopFreeName (IfaceApp f _) = ifTopFreeName f
ifTopFreeName (IfaceExt n) = Just n
ifTopFreeName _ = Nothing
{-
************************************************************************
* *
Annotations
* *
************************************************************************
-}
tcIfaceAnnotations :: [IfaceAnnotation] -> IfL [Annotation]
tcIfaceAnnotations = mapM tcIfaceAnnotation
tcIfaceAnnotation :: IfaceAnnotation -> IfL Annotation
tcIfaceAnnotation (IfaceAnnotation target serialized) = do
target' <- tcIfaceAnnTarget target
return $ Annotation {
ann_target = target',
ann_value = serialized
}
tcIfaceAnnTarget :: IfaceAnnTarget -> IfL (AnnTarget Name)
tcIfaceAnnTarget (NamedTarget occ) = do
name <- lookupIfaceTop occ
return $ NamedTarget name
tcIfaceAnnTarget (ModuleTarget mod) = do
return $ ModuleTarget mod
{-
************************************************************************
* *
Vectorisation information
* *
************************************************************************
-}
-- We need access to the type environment as we need to look up information about type constructors
-- (i.e., their data constructors and whether they are class type constructors). If a vectorised
-- type constructor or class is defined in the same module as where it is vectorised, we cannot
-- look that information up from the type constructor that we obtained via a 'forkM'ed
-- 'tcIfaceTyCon' without recursively loading the interface that we are already type checking again
-- and again and again...
--
tcIfaceVectInfo :: Module -> TypeEnv -> IfaceVectInfo -> IfL VectInfo
tcIfaceVectInfo mod typeEnv (IfaceVectInfo
{ ifaceVectInfoVar = vars
, ifaceVectInfoTyCon = tycons
, ifaceVectInfoTyConReuse = tyconsReuse
, ifaceVectInfoParallelVars = parallelVars
, ifaceVectInfoParallelTyCons = parallelTyCons
})
= do { let parallelTyConsSet = mkNameSet parallelTyCons
; vVars <- mapM vectVarMapping vars
; let varsSet = mkVarSet (map fst vVars)
; tyConRes1 <- mapM (vectTyConVectMapping varsSet) tycons
; tyConRes2 <- mapM (vectTyConReuseMapping varsSet) tyconsReuse
; vParallelVars <- mapM vectVar parallelVars
; let (vTyCons, vDataCons, vScSels) = unzip3 (tyConRes1 ++ tyConRes2)
; return $ VectInfo
{ vectInfoVar = mkDVarEnv vVars `extendDVarEnvList` concat vScSels
, vectInfoTyCon = mkNameEnv vTyCons
, vectInfoDataCon = mkNameEnv (concat vDataCons)
, vectInfoParallelVars = mkDVarSet vParallelVars
, vectInfoParallelTyCons = parallelTyConsSet
}
}
where
vectVarMapping name
= do { vName <- lookupIfaceTop (mkLocalisedOccName mod mkVectOcc name)
; var <- forkM (text "vect var" <+> ppr name) $
tcIfaceExtId name
; vVar <- forkM (text "vect vVar [mod =" <+>
ppr mod <> text "; nameModule =" <+>
ppr (nameModule name) <> text "]" <+> ppr vName) $
tcIfaceExtId vName
; return (var, (var, vVar))
}
-- where
-- lookupLocalOrExternalId name
-- = do { let mb_id = lookupTypeEnv typeEnv name
-- ; case mb_id of
-- -- id is local
-- Just (AnId id) -> return id
-- -- name is not an Id => internal inconsistency
-- Just _ -> notAnIdErr
-- -- Id is external
-- Nothing -> tcIfaceExtId name
-- }
--
-- notAnIdErr = pprPanic "TcIface.tcIfaceVectInfo: not an id" (ppr name)
vectVar name
= forkM (text "vect scalar var" <+> ppr name) $
tcIfaceExtId name
vectTyConVectMapping vars name
= do { vName <- lookupIfaceTop (mkLocalisedOccName mod mkVectTyConOcc name)
; vectTyConMapping vars name vName
}
vectTyConReuseMapping vars name
= vectTyConMapping vars name name
vectTyConMapping vars name vName
= do { tycon <- lookupLocalOrExternalTyCon name
; vTycon <- forkM (text "vTycon of" <+> ppr vName) $
lookupLocalOrExternalTyCon vName
-- Map the data constructors of the original type constructor to those of the
-- vectorised type constructor /unless/ the type constructor was vectorised
-- abstractly; if it was vectorised abstractly, the workers of its data constructors
-- do not appear in the set of vectorised variables.
--
-- NB: This is lazy! We don't pull at the type constructors before we actually use
-- the data constructor mapping.
; let isAbstract | isClassTyCon tycon = False
| datacon:_ <- tyConDataCons tycon
= not $ dataConWrapId datacon `elemVarSet` vars
| otherwise = True
vDataCons | isAbstract = []
| otherwise = [ (dataConName datacon, (datacon, vDatacon))
| (datacon, vDatacon) <- zip (tyConDataCons tycon)
(tyConDataCons vTycon)
]
-- Map the (implicit) superclass and methods selectors as they don't occur in
-- the var map.
vScSels | Just cls <- tyConClass_maybe tycon
, Just vCls <- tyConClass_maybe vTycon
= [ (sel, (sel, vSel))
| (sel, vSel) <- zip (classAllSelIds cls) (classAllSelIds vCls)
]
| otherwise
= []
; return ( (name, (tycon, vTycon)) -- (T, T_v)
, vDataCons -- list of (Ci, Ci_v)
, vScSels -- list of (seli, seli_v)
)
}
where
-- we need a fully defined version of the type constructor to be able to extract
-- its data constructors etc.
lookupLocalOrExternalTyCon name
= do { let mb_tycon = lookupTypeEnv typeEnv name
; case mb_tycon of
-- tycon is local
Just (ATyCon tycon) -> return tycon
-- name is not a tycon => internal inconsistency
Just _ -> notATyConErr
-- tycon is external
Nothing -> tcIfaceTyConByName name
}
notATyConErr = pprPanic "TcIface.tcIfaceVectInfo: not a tycon" (ppr name)
{-
************************************************************************
* *
Types
* *
************************************************************************
-}
tcIfaceType :: IfaceType -> IfL Type
tcIfaceType = go
where
go (IfaceTyVar n) = TyVarTy <$> tcIfaceTyVar n
go (IfaceAppTy t1 t2) = AppTy <$> go t1 <*> go t2
go (IfaceLitTy l) = LitTy <$> tcIfaceTyLit l
go (IfaceFunTy t1 t2) = FunTy <$> go t1 <*> go t2
go (IfaceDFunTy t1 t2) = FunTy <$> go t1 <*> go t2
go (IfaceTupleTy s i tks) = tcIfaceTupleTy s i tks
go (IfaceTyConApp tc tks)
= do { tc' <- tcIfaceTyCon tc
; tks' <- mapM go (tcArgsIfaceTypes tks)
; return (mkTyConApp tc' tks') }
go (IfaceForAllTy bndr t)
= bindIfaceForAllBndr bndr $ \ tv' vis ->
ForAllTy (TvBndr tv' vis) <$> go t
go (IfaceCastTy ty co) = CastTy <$> go ty <*> tcIfaceCo co
go (IfaceCoercionTy co) = CoercionTy <$> tcIfaceCo co
tcIfaceTupleTy :: TupleSort -> IfaceTyConInfo -> IfaceTcArgs -> IfL Type
tcIfaceTupleTy sort info args
= do { args' <- tcIfaceTcArgs args
; let arity = length args'
; base_tc <- tcTupleTyCon True sort arity
; case info of
NoIfaceTyConInfo
-> return (mkTyConApp base_tc args')
IfacePromotedDataCon
-> do { let tc = promoteDataCon (tyConSingleDataCon base_tc)
kind_args = map typeKind args'
; return (mkTyConApp tc (kind_args ++ args')) } }
-- See Note [Unboxed tuple RuntimeRep vars] in TyCon
tcTupleTyCon :: Bool -- True <=> typechecking a *type* (vs. an expr)
-> TupleSort
-> Arity -- the number of args. *not* the tuple arity.
-> IfL TyCon
tcTupleTyCon in_type sort arity
= case sort of
ConstraintTuple -> do { thing <- tcIfaceGlobal (cTupleTyConName arity)
; return (tyThingTyCon thing) }
BoxedTuple -> return (tupleTyCon Boxed arity)
UnboxedTuple -> return (tupleTyCon Unboxed arity')
where arity' | in_type = arity `div` 2
| otherwise = arity
-- in expressions, we only have term args
tcIfaceTcArgs :: IfaceTcArgs -> IfL [Type]
tcIfaceTcArgs = mapM tcIfaceType . tcArgsIfaceTypes
-----------------------------------------
tcIfaceCtxt :: IfaceContext -> IfL ThetaType
tcIfaceCtxt sts = mapM tcIfaceType sts
-----------------------------------------
tcIfaceTyLit :: IfaceTyLit -> IfL TyLit
tcIfaceTyLit (IfaceNumTyLit n) = return (NumTyLit n)
tcIfaceTyLit (IfaceStrTyLit n) = return (StrTyLit n)
{-
%************************************************************************
%* *
Coercions
* *
************************************************************************
-}
tcIfaceCo :: IfaceCoercion -> IfL Coercion
tcIfaceCo = go
where
go (IfaceReflCo r t) = Refl r <$> tcIfaceType t
go (IfaceFunCo r c1 c2) = mkFunCo r <$> go c1 <*> go c2
go (IfaceTyConAppCo r tc cs)
= TyConAppCo r <$> tcIfaceTyCon tc <*> mapM go cs
go (IfaceAppCo c1 c2) = AppCo <$> go c1 <*> go c2
go (IfaceForAllCo tv k c) = do { k' <- go k
; bindIfaceTyVar tv $ \ tv' ->
ForAllCo tv' k' <$> go c }
go (IfaceCoVarCo n) = CoVarCo <$> go_var n
go (IfaceAxiomInstCo n i cs) = AxiomInstCo <$> tcIfaceCoAxiom n <*> pure i <*> mapM go cs
go (IfaceUnivCo p r t1 t2) = UnivCo <$> tcIfaceUnivCoProv p <*> pure r
<*> tcIfaceType t1 <*> tcIfaceType t2
go (IfaceSymCo c) = SymCo <$> go c
go (IfaceTransCo c1 c2) = TransCo <$> go c1
<*> go c2
go (IfaceInstCo c1 t2) = InstCo <$> go c1
<*> go t2
go (IfaceNthCo d c) = NthCo d <$> go c
go (IfaceLRCo lr c) = LRCo lr <$> go c
go (IfaceCoherenceCo c1 c2) = CoherenceCo <$> go c1
<*> go c2
go (IfaceKindCo c) = KindCo <$> go c
go (IfaceSubCo c) = SubCo <$> go c
go (IfaceAxiomRuleCo ax cos) = AxiomRuleCo <$> go_axiom_rule ax
<*> mapM go cos
go_var :: FastString -> IfL CoVar
go_var = tcIfaceLclId
go_axiom_rule :: FastString -> IfL CoAxiomRule
go_axiom_rule n =
case Map.lookup n typeNatCoAxiomRules of
Just ax -> return ax
_ -> pprPanic "go_axiom_rule" (ppr n)
tcIfaceUnivCoProv :: IfaceUnivCoProv -> IfL UnivCoProvenance
tcIfaceUnivCoProv IfaceUnsafeCoerceProv = return UnsafeCoerceProv
tcIfaceUnivCoProv (IfacePhantomProv kco) = PhantomProv <$> tcIfaceCo kco
tcIfaceUnivCoProv (IfaceProofIrrelProv kco) = ProofIrrelProv <$> tcIfaceCo kco
tcIfaceUnivCoProv (IfacePluginProv str) = return $ PluginProv str
{-
************************************************************************
* *
Core
* *
************************************************************************
-}
tcIfaceExpr :: IfaceExpr -> IfL CoreExpr
tcIfaceExpr (IfaceType ty)
= Type <$> tcIfaceType ty
tcIfaceExpr (IfaceCo co)
= Coercion <$> tcIfaceCo co
tcIfaceExpr (IfaceCast expr co)
= Cast <$> tcIfaceExpr expr <*> tcIfaceCo co
tcIfaceExpr (IfaceLcl name)
= Var <$> tcIfaceLclId name
tcIfaceExpr (IfaceExt gbl)
= Var <$> tcIfaceExtId gbl
tcIfaceExpr (IfaceLit lit)
= do lit' <- tcIfaceLit lit
return (Lit lit')
tcIfaceExpr (IfaceFCall cc ty) = do
ty' <- tcIfaceType ty
u <- newUnique
dflags <- getDynFlags
return (Var (mkFCallId dflags u cc ty'))
tcIfaceExpr (IfaceTuple sort args)
= do { args' <- mapM tcIfaceExpr args
; tc <- tcTupleTyCon False sort arity
; let con_tys = map exprType args'
some_con_args = map Type con_tys ++ args'
con_args = case sort of
UnboxedTuple -> map (Type . getRuntimeRep "tcIfaceExpr") con_tys ++ some_con_args
_ -> some_con_args
-- Put the missing type arguments back in
con_id = dataConWorkId (tyConSingleDataCon tc)
; return (mkApps (Var con_id) con_args) }
where
arity = length args
tcIfaceExpr (IfaceLam (bndr, os) body)
= bindIfaceBndr bndr $ \bndr' ->
Lam (tcIfaceOneShot os bndr') <$> tcIfaceExpr body
where
tcIfaceOneShot IfaceOneShot b = setOneShotLambda b
tcIfaceOneShot _ b = b
tcIfaceExpr (IfaceApp fun arg)
= App <$> tcIfaceExpr fun <*> tcIfaceExpr arg
tcIfaceExpr (IfaceECase scrut ty)
= do { scrut' <- tcIfaceExpr scrut
; ty' <- tcIfaceType ty
; return (castBottomExpr scrut' ty') }
tcIfaceExpr (IfaceCase scrut case_bndr alts) = do
scrut' <- tcIfaceExpr scrut
case_bndr_name <- newIfaceName (mkVarOccFS case_bndr)
let
scrut_ty = exprType scrut'
case_bndr' = mkLocalIdOrCoVar case_bndr_name scrut_ty
tc_app = splitTyConApp scrut_ty
-- NB: Won't always succeed (polymorphic case)
-- but won't be demanded in those cases
-- NB: not tcSplitTyConApp; we are looking at Core here
-- look through non-rec newtypes to find the tycon that
-- corresponds to the datacon in this case alternative
extendIfaceIdEnv [case_bndr'] $ do
alts' <- mapM (tcIfaceAlt scrut' tc_app) alts
return (Case scrut' case_bndr' (coreAltsType alts') alts')
tcIfaceExpr (IfaceLet (IfaceNonRec (IfLetBndr fs ty info) rhs) body)
= do { name <- newIfaceName (mkVarOccFS fs)
; ty' <- tcIfaceType ty
; id_info <- tcIdInfo False {- Don't ignore prags; we are inside one! -}
name ty' info
; let id = mkLocalIdOrCoVarWithInfo name ty' id_info
; rhs' <- tcIfaceExpr rhs
; body' <- extendIfaceIdEnv [id] (tcIfaceExpr body)
; return (Let (NonRec id rhs') body') }
tcIfaceExpr (IfaceLet (IfaceRec pairs) body)
= do { ids <- mapM tc_rec_bndr (map fst pairs)
; extendIfaceIdEnv ids $ do
{ pairs' <- zipWithM tc_pair pairs ids
; body' <- tcIfaceExpr body
; return (Let (Rec pairs') body') } }
where
tc_rec_bndr (IfLetBndr fs ty _)
= do { name <- newIfaceName (mkVarOccFS fs)
; ty' <- tcIfaceType ty
; return (mkLocalIdOrCoVar name ty') }
tc_pair (IfLetBndr _ _ info, rhs) id
= do { rhs' <- tcIfaceExpr rhs
; id_info <- tcIdInfo False {- Don't ignore prags; we are inside one! -}
(idName id) (idType id) info
; return (setIdInfo id id_info, rhs') }
tcIfaceExpr (IfaceTick tickish expr) = do
expr' <- tcIfaceExpr expr
-- If debug flag is not set: Ignore source notes
dbgLvl <- fmap debugLevel getDynFlags
case tickish of
IfaceSource{} | dbgLvl > 0
-> return expr'
_otherwise -> do
tickish' <- tcIfaceTickish tickish
return (Tick tickish' expr')
-------------------------
tcIfaceTickish :: IfaceTickish -> IfM lcl (Tickish Id)
tcIfaceTickish (IfaceHpcTick modl ix) = return (HpcTick modl ix)
tcIfaceTickish (IfaceSCC cc tick push) = return (ProfNote cc tick push)
tcIfaceTickish (IfaceSource src name) = return (SourceNote src name)
-------------------------
tcIfaceLit :: Literal -> IfL Literal
-- Integer literals deserialise to (LitInteger i <error thunk>)
-- so tcIfaceLit just fills in the type.
-- See Note [Integer literals] in Literal
tcIfaceLit (LitInteger i _)
= do t <- tcIfaceTyConByName integerTyConName
return (mkLitInteger i (mkTyConTy t))
tcIfaceLit lit = return lit
-------------------------
tcIfaceAlt :: CoreExpr -> (TyCon, [Type])
-> (IfaceConAlt, [FastString], IfaceExpr)
-> IfL (AltCon, [TyVar], CoreExpr)
tcIfaceAlt _ _ (IfaceDefault, names, rhs)
= ASSERT( null names ) do
rhs' <- tcIfaceExpr rhs
return (DEFAULT, [], rhs')
tcIfaceAlt _ _ (IfaceLitAlt lit, names, rhs)
= ASSERT( null names ) do
lit' <- tcIfaceLit lit
rhs' <- tcIfaceExpr rhs
return (LitAlt lit', [], rhs')
-- A case alternative is made quite a bit more complicated
-- by the fact that we omit type annotations because we can
-- work them out. True enough, but its not that easy!
tcIfaceAlt scrut (tycon, inst_tys) (IfaceDataAlt data_occ, arg_strs, rhs)
= do { con <- tcIfaceDataCon data_occ
; when (debugIsOn && not (con `elem` tyConDataCons tycon))
(failIfM (ppr scrut $$ ppr con $$ ppr tycon $$ ppr (tyConDataCons tycon)))
; tcIfaceDataAlt con inst_tys arg_strs rhs }
tcIfaceDataAlt :: DataCon -> [Type] -> [FastString] -> IfaceExpr
-> IfL (AltCon, [TyVar], CoreExpr)
tcIfaceDataAlt con inst_tys arg_strs rhs
= do { us <- newUniqueSupply
; let uniqs = uniqsFromSupply us
; let (ex_tvs, arg_ids)
= dataConRepFSInstPat arg_strs uniqs con inst_tys
; rhs' <- extendIfaceEnvs ex_tvs $
extendIfaceIdEnv arg_ids $
tcIfaceExpr rhs
; return (DataAlt con, ex_tvs ++ arg_ids, rhs') }
{-
************************************************************************
* *
IdInfo
* *
************************************************************************
-}
tcIdDetails :: Type -> IfaceIdDetails -> IfL IdDetails
tcIdDetails _ IfVanillaId = return VanillaId
tcIdDetails ty IfDFunId
= return (DFunId (isNewTyCon (classTyCon cls)))
where
(_, _, cls, _) = tcSplitDFunTy ty
tcIdDetails _ (IfRecSelId tc naughty)
= do { tc' <- either (fmap RecSelData . tcIfaceTyCon)
(fmap (RecSelPatSyn . tyThingPatSyn) . tcIfaceDecl False)
tc
; return (RecSelId { sel_tycon = tc', sel_naughty = naughty }) }
where
tyThingPatSyn (AConLike (PatSynCon ps)) = ps
tyThingPatSyn _ = panic "tcIdDetails: expecting patsyn"
tcIdInfo :: Bool -> Name -> Type -> IfaceIdInfo -> IfL IdInfo
tcIdInfo ignore_prags name ty info
| ignore_prags = return vanillaIdInfo
| otherwise = case info of
NoInfo -> return vanillaIdInfo
HasInfo info -> foldlM tcPrag init_info info
where
-- Set the CgInfo to something sensible but uninformative before
-- we start; default assumption is that it has CAFs
init_info = vanillaIdInfo
tcPrag :: IdInfo -> IfaceInfoItem -> IfL IdInfo
tcPrag info HsNoCafRefs = return (info `setCafInfo` NoCafRefs)
tcPrag info (HsArity arity) = return (info `setArityInfo` arity)
tcPrag info (HsStrictness str) = return (info `setStrictnessInfo` str)
tcPrag info (HsInline prag) = return (info `setInlinePragInfo` prag)
-- The next two are lazy, so they don't transitively suck stuff in
tcPrag info (HsUnfold lb if_unf)
= do { unf <- tcUnfolding name ty info if_unf
; let info1 | lb = info `setOccInfo` strongLoopBreaker
| otherwise = info
; return (info1 `setUnfoldingInfoLazily` unf) }
tcUnfolding :: Name -> Type -> IdInfo -> IfaceUnfolding -> IfL Unfolding
tcUnfolding name _ info (IfCoreUnfold stable if_expr)
= do { dflags <- getDynFlags
; mb_expr <- tcPragExpr name if_expr
; let unf_src | stable = InlineStable
| otherwise = InlineRhs
; return $ case mb_expr of
Nothing -> NoUnfolding
Just expr -> mkUnfolding dflags unf_src
True {- Top level -}
(isBottomingSig strict_sig)
expr
}
where
-- Strictness should occur before unfolding!
strict_sig = strictnessInfo info
tcUnfolding name _ _ (IfCompulsory if_expr)
= do { mb_expr <- tcPragExpr name if_expr
; return (case mb_expr of
Nothing -> NoUnfolding
Just expr -> mkCompulsoryUnfolding expr) }
tcUnfolding name _ _ (IfInlineRule arity unsat_ok boring_ok if_expr)
= do { mb_expr <- tcPragExpr name if_expr
; return (case mb_expr of
Nothing -> NoUnfolding
Just expr -> mkCoreUnfolding InlineStable True expr guidance )}
where
guidance = UnfWhen { ug_arity = arity, ug_unsat_ok = unsat_ok, ug_boring_ok = boring_ok }
tcUnfolding name dfun_ty _ (IfDFunUnfold bs ops)
= bindIfaceBndrs bs $ \ bs' ->
do { mb_ops1 <- forkM_maybe doc $ mapM tcIfaceExpr ops
; return (case mb_ops1 of
Nothing -> noUnfolding
Just ops1 -> mkDFunUnfolding bs' (classDataCon cls) ops1) }
where
doc = text "Class ops for dfun" <+> ppr name
(_, _, cls, _) = tcSplitDFunTy dfun_ty
{-
For unfoldings we try to do the job lazily, so that we never type check
an unfolding that isn't going to be looked at.
-}
tcPragExpr :: Name -> IfaceExpr -> IfL (Maybe CoreExpr)
tcPragExpr name expr
= forkM_maybe doc $ do
core_expr' <- tcIfaceExpr expr
-- Check for type consistency in the unfolding
whenGOptM Opt_DoCoreLinting $ do
in_scope <- get_in_scope
dflags <- getDynFlags
case lintUnfolding dflags noSrcLoc in_scope core_expr' of
Nothing -> return ()
Just fail_msg -> do { mod <- getIfModule
; pprPanic "Iface Lint failure"
(vcat [ text "In interface for" <+> ppr mod
, hang doc 2 fail_msg
, ppr name <+> equals <+> ppr core_expr'
, text "Iface expr =" <+> ppr expr ]) }
return core_expr'
where
doc = text "Unfolding of" <+> ppr name
get_in_scope :: IfL VarSet -- Totally disgusting; but just for linting
get_in_scope
= do { (gbl_env, lcl_env) <- getEnvs
; rec_ids <- case if_rec_types gbl_env of
Nothing -> return []
Just (_, get_env) -> do
{ type_env <- setLclEnv () get_env
; return (typeEnvIds type_env) }
; return (bindingsVars (if_tv_env lcl_env) `unionVarSet`
bindingsVars (if_id_env lcl_env) `unionVarSet`
mkVarSet rec_ids) }
bindingsVars :: FastStringEnv Var -> VarSet
bindingsVars ufm = mkVarSet $ nonDetEltsUFM ufm
-- It's OK to use nonDetEltsUFM here because we immediately forget
-- the ordering by creating a set
{-
************************************************************************
* *
Getting from Names to TyThings
* *
************************************************************************
-}
tcIfaceGlobal :: Name -> IfL TyThing
tcIfaceGlobal name
| Just thing <- wiredInNameTyThing_maybe name
-- Wired-in things include TyCons, DataCons, and Ids
-- Even though we are in an interface file, we want to make
-- sure the instances and RULES of this thing (particularly TyCon) are loaded
-- Imagine: f :: Double -> Double
= do { ifCheckWiredInThing thing; return thing }
| otherwise
= do { env <- getGblEnv
; case if_rec_types env of { -- Note [Tying the knot]
Just (mod, get_type_env)
| nameIsLocalOrFrom mod name
-> do -- It's defined in the module being compiled
{ type_env <- setLclEnv () get_type_env -- yuk
; case lookupNameEnv type_env name of
Just thing -> return thing
Nothing ->
pprPanic "tcIfaceGlobal (local): not found"
(ifKnotErr name (if_doc env) type_env)
}
; _ -> do
{ hsc_env <- getTopEnv
; mb_thing <- liftIO (lookupTypeHscEnv hsc_env name)
; case mb_thing of {
Just thing -> return thing ;
Nothing -> do
{ mb_thing <- importDecl name -- It's imported; go get it
; case mb_thing of
Failed err -> failIfM err
Succeeded thing -> return thing
}}}}}
ifKnotErr :: Name -> SDoc -> TypeEnv -> SDoc
ifKnotErr name env_doc type_env = vcat
[ text "You are in a maze of twisty little passages, all alike."
, text "While forcing the thunk for TyThing" <+> ppr name
, text "which was lazily initialized by" <+> env_doc <> text ","
, text "I tried to tie the knot, but I couldn't find" <+> ppr name
, text "in the current type environment."
, text "If you are developing GHC, please read Note [Tying the knot]"
, text "and Note [Type-checking inside the knot]."
, text "Consider rebuilding GHC with profiling for a better stack trace."
, hang (text "Contents of current type environment:")
2 (ppr type_env)
]
-- Note [Tying the knot]
-- ~~~~~~~~~~~~~~~~~~~~~
-- The if_rec_types field is used in two situations:
--
-- a) Compiling M.hs, which indirectly imports Foo.hi, which mentions M.T
-- Then we look up M.T in M's type environment, which is splatted into if_rec_types
-- after we've built M's type envt.
--
-- b) In ghc --make, during the upsweep, we encounter M.hs, whose interface M.hi
-- is up to date. So we call typecheckIface on M.hi. This splats M.T into
-- if_rec_types so that the (lazily typechecked) decls see all the other decls
--
-- In case (b) it's important to do the if_rec_types check *before* looking in the HPT
-- Because if M.hs also has M.hs-boot, M.T will *already be* in the HPT, but in its
-- emasculated form (e.g. lacking data constructors).
tcIfaceTyConByName :: IfExtName -> IfL TyCon
tcIfaceTyConByName name
= do { thing <- tcIfaceGlobal name
; return (tyThingTyCon thing) }
tcIfaceTyCon :: IfaceTyCon -> IfL TyCon
tcIfaceTyCon (IfaceTyCon name info)
= do { thing <- tcIfaceGlobal name
; return $ case info of
NoIfaceTyConInfo -> tyThingTyCon thing
IfacePromotedDataCon -> promoteDataCon $ tyThingDataCon thing }
tcIfaceCoAxiom :: Name -> IfL (CoAxiom Branched)
tcIfaceCoAxiom name = do { thing <- tcIfaceGlobal name
; return (tyThingCoAxiom thing) }
tcIfaceDataCon :: Name -> IfL DataCon
tcIfaceDataCon name = do { thing <- tcIfaceGlobal name
; case thing of
AConLike (RealDataCon dc) -> return dc
_ -> pprPanic "tcIfaceExtDC" (ppr name$$ ppr thing) }
tcIfaceExtId :: Name -> IfL Id
tcIfaceExtId name = do { thing <- tcIfaceGlobal name
; case thing of
AnId id -> return id
_ -> pprPanic "tcIfaceExtId" (ppr name$$ ppr thing) }
{-
************************************************************************
* *
Bindings
* *
************************************************************************
-}
bindIfaceId :: IfaceIdBndr -> (Id -> IfL a) -> IfL a
bindIfaceId (fs, ty) thing_inside
= do { name <- newIfaceName (mkVarOccFS fs)
; ty' <- tcIfaceType ty
; let id = mkLocalIdOrCoVar name ty'
; extendIfaceIdEnv [id] (thing_inside id) }
bindIfaceIds :: [IfaceIdBndr] -> ([Id] -> IfL a) -> IfL a
bindIfaceIds [] thing_inside = thing_inside []
bindIfaceIds (b:bs) thing_inside
= bindIfaceId b $ \b' ->
bindIfaceIds bs $ \bs' ->
thing_inside (b':bs')
bindIfaceBndr :: IfaceBndr -> (CoreBndr -> IfL a) -> IfL a
bindIfaceBndr (IfaceIdBndr bndr) thing_inside
= bindIfaceId bndr thing_inside
bindIfaceBndr (IfaceTvBndr bndr) thing_inside
= bindIfaceTyVar bndr thing_inside
bindIfaceBndrs :: [IfaceBndr] -> ([CoreBndr] -> IfL a) -> IfL a
bindIfaceBndrs [] thing_inside = thing_inside []
bindIfaceBndrs (b:bs) thing_inside
= bindIfaceBndr b $ \ b' ->
bindIfaceBndrs bs $ \ bs' ->
thing_inside (b':bs')
-----------------------
bindIfaceForAllBndrs :: [IfaceForAllBndr] -> ([TyVarBinder] -> IfL a) -> IfL a
bindIfaceForAllBndrs [] thing_inside = thing_inside []
bindIfaceForAllBndrs (bndr:bndrs) thing_inside
= bindIfaceForAllBndr bndr $ \tv vis ->
bindIfaceForAllBndrs bndrs $ \bndrs' ->
thing_inside (mkTyVarBinder vis tv : bndrs')
bindIfaceForAllBndr :: IfaceForAllBndr -> (TyVar -> ArgFlag -> IfL a) -> IfL a
bindIfaceForAllBndr (TvBndr tv vis) thing_inside
= bindIfaceTyVar tv $ \tv' -> thing_inside tv' vis
bindIfaceTyVar :: IfaceTvBndr -> (TyVar -> IfL a) -> IfL a
bindIfaceTyVar (occ,kind) thing_inside
= do { name <- newIfaceName (mkTyVarOccFS occ)
; tyvar <- mk_iface_tyvar name kind
; extendIfaceTyVarEnv [tyvar] (thing_inside tyvar) }
mk_iface_tyvar :: Name -> IfaceKind -> IfL TyVar
mk_iface_tyvar name ifKind
= do { kind <- tcIfaceType ifKind
; return (Var.mkTyVar name kind) }
bindIfaceTyConBinders :: [IfaceTyConBinder]
-> ([TyConBinder] -> IfL a) -> IfL a
bindIfaceTyConBinders [] thing_inside = thing_inside []
bindIfaceTyConBinders (b:bs) thing_inside
= bindIfaceTyConBinderX bindIfaceTyVar b $ \ b' ->
bindIfaceTyConBinders bs $ \ bs' ->
thing_inside (b':bs')
bindIfaceTyConBinders_AT :: [IfaceTyConBinder]
-> ([TyConBinder] -> IfL a) -> IfL a
-- Used for type variable in nested associated data/type declarations
-- where some of the type variables are already in scope
-- class C a where { data T a b }
-- Here 'a' is in scope when we look at the 'data T'
bindIfaceTyConBinders_AT [] thing_inside
= thing_inside []
bindIfaceTyConBinders_AT (b : bs) thing_inside
= bindIfaceTyConBinderX bind_tv b $ \b' ->
bindIfaceTyConBinders_AT bs $ \bs' ->
thing_inside (b':bs')
where
bind_tv tv thing
= do { mb_tv <- lookupIfaceTyVar tv
; case mb_tv of
Just b' -> thing b'
Nothing -> bindIfaceTyVar tv thing }
bindIfaceTyConBinderX :: (IfaceTvBndr -> (TyVar -> IfL a) -> IfL a)
-> IfaceTyConBinder
-> (TyConBinder -> IfL a) -> IfL a
bindIfaceTyConBinderX bind_tv (TvBndr tv vis) thing_inside
= bind_tv tv $ \tv' ->
thing_inside (TvBndr tv' vis)
| sgillespie/ghc | compiler/iface/TcIface.hs | bsd-3-clause | 66,733 | 299 | 28 | 23,060 | 11,720 | 6,283 | 5,437 | -1 | -1 |
{-# LANGUAGE BangPatterns #-}
import Haha.Term
{-Church Number -}
type ChNum t = (t -> t) -> t -> t
--
ch0 :: ChNum t
ch0 = cK cI
ch1 :: ChNum t
ch1 = cS cB ch0
ch2 :: ChNum t
ch2 = cS cB ch1
ch3 :: ChNum t
ch3 = cS cB ch2
ch4 :: ChNum t
ch4 = cS cB ch3
--
ch :: Int -> ChNum t
ch 0 = cK cI
ch i = cS cB (ch (i-1))
--
chSucc :: ChNum t -> ChNum t
chSucc n = cS cB n
--
subs :: Term -> Term -> Term
subs Atom t = Atom
subs (Abs bodyTerm) t = Abs (subs bodyTerm t)
subs (App bodyTerm argTerm) t = App (subs bodyTerm t) (subs argTerm t)
subs (Var Zero) t = t
subs (Var (Succ n)) t = Var n
evalCbV :: Term -> Term
evalCbV Atom = Atom
evalCbV (Var n) = Var n
evalCbV (Abs lt) = Abs lt
evalCbV (App (Abs bodyT) argT) = let !t' = (evalCbV argT) in
evalCbV $ subs bodyT t'
evalCbV (App bodyT argT) = evalCbV $ App (evalCbV bodyT) (evalCbV argT)
evalCbN :: Term -> Term
evalCbN Atom = Atom
evalCbN (Var n) = Var n
evalCbN (Abs lt) = Abs lt
evalCbN (App (Abs bodyT) argT) = evalCbN $ subs bodyT argT
evalCbN (App bodyT argT) = evalCbN $ App (evalCbN bodyT) (evalCbN argT)
{- Combinators -}
cI = \x -> x
cK = \x y -> x
cW = \f x -> f x x
cC = \f x y -> f y x
cB = \f g x -> f (g x)
cS = \f g x -> f x (g x)
myI = fun v0
myK = fun (fun v0)
myW = fun $ fun $ (v0 <> v1) <> v1
myC = fun $ fun $ fun $ (v0 <> v2) <> v1
myB = fun $ fun $ fun $ v0 <> (v1 <> v2)
myS = fun $ fun $ fun $ (v0 <> v2) <> (v1 <> v2)
myo = fun (v0 <> v0)
myO = myo <> myo
main = do
putStrLn $ pretty myS
| jaiyalas/haha | experiment/v0.hs | mit | 1,509 | 32 | 10 | 415 | 994 | 452 | 542 | 53 | 1 |
-- |
-- Copyright : (c) 2017 Egor Tensin <Egor.Tensin@gmail.com>
-- License : MIT
-- Maintainer : Egor.Tensin@gmail.com
-- Stability : experimental
-- Portability : Windows-only
module Utils.Path
( ExpandedPath(..)
, pathExpandValue
, pathExpandAll
, pathAnyExpanded
) where
import Control.Monad.Trans.Except (ExceptT)
import qualified WindowsEnv
data ExpandedPath = ExpandedPath
{ pathOriginal :: String
, pathExpanded :: String
} deriving (Eq, Show)
pathExpandValue :: WindowsEnv.Value -> ExceptT IOError IO [ExpandedPath]
pathExpandValue value
| WindowsEnv.valueExpandable value = do
expanded <- expandOnce
zipWith ExpandedPath split <$>
if length expanded == length split
then return expanded
else expandEach
| otherwise = return $ zipWith ExpandedPath split split
where
joined = WindowsEnv.valueString value
split = WindowsEnv.pathSplit joined
expandOnce = WindowsEnv.pathSplit <$> WindowsEnv.expand joined
expandEach = WindowsEnv.expandAll split
pathExpandAll :: [String] -> ExceptT IOError IO [ExpandedPath]
pathExpandAll paths = zipWith ExpandedPath paths <$> WindowsEnv.expandAll paths
pathIsExpanded :: ExpandedPath -> Bool
pathIsExpanded path = pathOriginal path /= pathExpanded path
pathAnyExpanded :: [ExpandedPath] -> Bool
pathAnyExpanded = any pathIsExpanded
| egor-tensin/windows-env | app/Utils/Path.hs | mit | 1,410 | 0 | 11 | 296 | 317 | 168 | 149 | 30 | 2 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
-- | Module for working with dependency trees.
module NLP.Partage.AStar.DepTree
( Tree (..)
, Dep (..)
, mapDep
, discard
, toRose
, fromDeriv
) where
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import qualified Data.Tree as R
import qualified NLP.Partage.AStar.Deriv.Gorn as D
import qualified NLP.Partage.Tree.Other as O
-- import qualified NLP.Partage.EdgeTree as Edge
---------------------------------------------------
-- Dependency Tree
---------------------------------------------------
-- | A dependency tree with node (token) labels of type `a` and
-- arc labels of type `b`. We use a `Data.Map` to represent
-- depenedency subtrees because (i) their ordering doesn't
-- matter, (ii) we assume they cannot repeat.
data Tree a b = Tree
-- { root :: S.Set (Tok t)
-- { root :: S.Set a
{ root :: a
-- ^ Label assigned to the root of a dependency tree
, children :: M.Map (Tree a b) b
-- ^ Children dependency trees and the corresponding dependency labels
} deriving (Show, Eq, Ord)
-- type Tree n t = Edge.Tree (S.Set t) (Dep n)
-- | Dependency label.
data Dep
= Arg
-- ^ Argument dependency (related to substitution)
| Mod
-- ^ Modifier dependency (related to adjunction)
-- | Top
-- -- ^ Dummy dependency to be used with top-level (root) nodes
deriving (Show, Eq, Ord)
-- | Map dependency labels using the given function. Note that this
-- cannot be an implementation of `fmap` because it requires `Ord c`.
mapDep :: (Ord a, Ord c) => (b -> c) -> Tree a b -> Tree a c
mapDep f Tree{..} = Tree root $ M.fromList
[ (mapDep f t, f x)
| (t, x) <- M.toList children ]
-- | Transform the dependency tree to a rose tree.
toRose :: Tree a b -> R.Tree (a, Maybe b)
toRose =
go Nothing
where
go rootArc Tree{..} =
R.Node
{ R.rootLabel = (root, rootArc)
, R.subForest =
[ go (Just arc) child
| (child, arc) <- M.toList children ] }
-- | Discard dependency nodes which satisfy the given predicate.
-- Return `Nothing` when all nodes are discarded.
discard :: (Ord a, Ord b) => (a -> Bool) -> Tree a b -> [Tree a b]
discard p tree
| p (root tree) = map fst newChildren
| otherwise = [Tree (root tree) (M.fromList newChildren)]
where
newChildren =
[ (newChild, dep)
| (oldChild, dep) <- M.toList (children tree)
, newChild <- discard p oldChild ]
-- -- | Transform a rose tree to a dependency tree.
-- -- Top-level arc dependency label is ignored.
-- fromRose :: R.Tree (a, b) -> Tree a b
-- fromRose = undefined
---------------------------------------------------
-- Derivation -> Dependency Tree Conversion
---------------------------------------------------
-- | Create a dependency tree from a derivation tree.
-- Terminals become nodes of the dependency tree, enriched
-- with information about the position in the input sentence.
-- Non-terminals assigned to roots of the individual ETs
-- become arcs.
fromDeriv :: (Ord t) => D.Deriv n t -> Tree (S.Set t) Dep
fromDeriv D.Deriv{..} = Tree
{ root = S.fromList (O.project rootET)
, children = M.fromList
-- [ (fromDeriv deriv, Arg $ rootNT deriv)
[ (fromDeriv deriv, modTyp gorn rootET)
| (gorn, derivs) <- M.toList modifs
, deriv <- derivs ] }
where
modTyp gorn tree = case O.follow gorn tree of
Nothing -> error "fromDeriv.modTyp: incorrenct Gorn address"
Just subTree -> case R.subForest subTree of
[] -> Arg -- meaning that substitution was used
_ -> Mod -- meaning that adjunction was used
-- fromDeriv rootDeriv =
-- go rootDeriv (Top $ rootNT rootDeriv)
-- where
-- go D.Deriv{..} edgeLabel =
-- R.Node
-- { R.rootLabel = Edge.Node
-- { Edge.nodeLabel = S.fromList (O.project rootET)
-- , Edge.edgeLabel = edgeLabel }
-- , R.subForest =
-- [ go deriv (Top $ rootNT deriv)
-- -- TODO: above we take `Top` for granted
-- | (_gorn, derivs) <- M.toList modifs
-- , deriv <- derivs ]
-- }
-- | Obtain the non-terminal in the root of the given derivation.
_rootNT :: D.Deriv n t -> n
_rootNT D.Deriv{..} =
case R.rootLabel rootET of
O.NonTerm x -> x
_ -> error "rootNT: ET's root not a non-terminal"
| kawu/partage | src/NLP/Partage/AStar/DepTree.hs | bsd-2-clause | 4,446 | 0 | 13 | 1,080 | 857 | 485 | 372 | 61 | 3 |
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for ganeti-htools.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.HTools.Backend.Text (testHTools_Backend_Text) where
import Test.QuickCheck
import qualified Data.Map as Map
import Data.List
import Data.Maybe
import System.Time (ClockTime(..))
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Test.Ganeti.TestHTools
import Test.Ganeti.HTools.Instance (genInstanceSmallerThanNode,
genInstanceOnNodeList)
import Test.Ganeti.HTools.Node (genNode, genOnlineNode, genUniqueNodeList)
import Ganeti.BasicTypes
import qualified Ganeti.HTools.Backend.Text as Text
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Loader as Loader
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Types as Types
import qualified Ganeti.Utils as Utils
-- * Instance text loader tests
prop_Load_Instance :: String -> Int -> Int -> Int -> Types.InstanceStatus
-> NonEmptyList Char -> String
-> NonNegative Int -> NonNegative Int -> Bool
-> Types.DiskTemplate -> Int -> Property
prop_Load_Instance name mem dsk vcpus status
(NonEmpty pnode) snode
(NonNegative pdx) (NonNegative sdx) autobal dt su =
pnode /= snode && pdx /= sdx ==>
let vcpus_s = show vcpus
dsk_s = show dsk
mem_s = show mem
su_s = show su
status_s = Types.instanceStatusToRaw status
ndx = if null snode
then [(pnode, pdx)]
else [(pnode, pdx), (snode, sdx)]
nl = Map.fromList ndx
tags = ""
sbal = if autobal then "Y" else "N"
sdt = Types.diskTemplateToRaw dt
inst = Text.loadInst nl
[name, mem_s, dsk_s, vcpus_s, status_s,
sbal, pnode, snode, sdt, tags, su_s]
fail1 = Text.loadInst nl
[name, mem_s, dsk_s, vcpus_s, status_s,
sbal, pnode, pnode, tags]
in case inst of
Bad msg -> failTest $ "Failed to load instance: " ++ msg
Ok (_, i) -> printTestCase "Mismatch in some field while\
\ loading the instance" $
Instance.name i == name &&
Instance.vcpus i == vcpus &&
Instance.mem i == mem &&
Instance.pNode i == pdx &&
Instance.sNode i == (if null snode
then Node.noSecondary
else sdx) &&
Instance.autoBalance i == autobal &&
Instance.spindleUse i == su &&
isBad fail1
prop_Load_InstanceFail :: [(String, Int)] -> [String] -> Property
prop_Load_InstanceFail ktn fields =
length fields < 10 || length fields > 12 ==>
case Text.loadInst nl fields of
Ok _ -> failTest "Managed to load instance from invalid data"
Bad msg -> printTestCase ("Unrecognised error message: " ++ msg) $
"Invalid/incomplete instance data: '" `isPrefixOf` msg
where nl = Map.fromList ktn
genInstanceNodes :: Gen (Instance.Instance, Node.List, Types.NameAssoc)
genInstanceNodes = do
(nl, na) <- genUniqueNodeList genOnlineNode
inst <- genInstanceOnNodeList nl
return (inst, nl, na)
prop_InstanceLSIdempotent :: Property
prop_InstanceLSIdempotent =
forAll genInstanceNodes $ \(inst, nl, assoc) ->
(Text.loadInst assoc . Utils.sepSplit '|' . Text.serializeInstance nl)
inst ==? Ok (Instance.name inst, inst)
prop_Load_Node :: String -> Int -> Int -> Int -> Int -> Int
-> Int -> Bool -> Bool
prop_Load_Node name tm nm fm td fd tc fo =
let conv v = if v < 0
then "?"
else show v
tm_s = conv tm
nm_s = conv nm
fm_s = conv fm
td_s = conv td
fd_s = conv fd
tc_s = conv tc
fo_s = if fo
then "Y"
else "N"
any_broken = any (< 0) [tm, nm, fm, td, fd, tc]
gid = Group.uuid defGroup
in case Text.loadNode defGroupAssoc
[name, tm_s, nm_s, fm_s, td_s, fd_s, tc_s, fo_s, gid] of
Nothing -> False
Just (name', node) ->
if fo || any_broken
then Node.offline node
else Node.name node == name' && name' == name &&
Node.alias node == name &&
Node.tMem node == fromIntegral tm &&
Node.nMem node == nm &&
Node.fMem node == fm &&
Node.tDsk node == fromIntegral td &&
Node.fDsk node == fd &&
Node.tCpu node == fromIntegral tc
prop_Load_NodeFail :: [String] -> Property
prop_Load_NodeFail fields =
length fields /= 8 ==> isNothing $ Text.loadNode Map.empty fields
prop_NodeLSIdempotent :: Property
prop_NodeLSIdempotent =
forAll (genNode (Just 1) Nothing) $ \node ->
-- override failN1 to what loadNode returns by default
let n = Node.setPolicy Types.defIPolicy $
node { Node.failN1 = True, Node.offline = False }
in
(Text.loadNode defGroupAssoc.
Utils.sepSplit '|' . Text.serializeNode defGroupList) n ==?
Just (Node.name n, n)
prop_ISpecIdempotent :: Types.ISpec -> Property
prop_ISpecIdempotent ispec =
case Text.loadISpec "dummy" . Utils.sepSplit ',' .
Text.serializeISpec $ ispec of
Bad msg -> failTest $ "Failed to load ispec: " ++ msg
Ok ispec' -> ispec ==? ispec'
prop_MultipleMinMaxISpecsIdempotent :: [Types.MinMaxISpecs] -> Property
prop_MultipleMinMaxISpecsIdempotent minmaxes =
case Text.loadMultipleMinMaxISpecs "dummy" . Utils.sepSplit ';' .
Text.serializeMultipleMinMaxISpecs $ minmaxes of
Bad msg -> failTest $ "Failed to load min/max ispecs: " ++ msg
Ok minmaxes' -> minmaxes ==? minmaxes'
prop_IPolicyIdempotent :: Types.IPolicy -> Property
prop_IPolicyIdempotent ipol =
case Text.loadIPolicy . Utils.sepSplit '|' $
Text.serializeIPolicy owner ipol of
Bad msg -> failTest $ "Failed to load ispec: " ++ msg
Ok res -> (owner, ipol) ==? res
where owner = "dummy"
-- | This property, while being in the text tests, does more than just
-- test end-to-end the serialisation and loading back workflow; it
-- also tests the Loader.mergeData and the actual
-- Cluster.iterateAlloc (for well-behaving w.r.t. instance
-- allocations, not for the business logic). As such, it's a quite
-- complex and slow test, and that's the reason we restrict it to
-- small cluster sizes.
prop_CreateSerialise :: Property
prop_CreateSerialise =
forAll genTags $ \ctags ->
forAll (choose (1, 20)) $ \maxiter ->
forAll (choose (2, 10)) $ \count ->
forAll genOnlineNode $ \node ->
forAll (genInstanceSmallerThanNode node) $ \inst ->
let nl = makeSmallCluster node count
reqnodes = Instance.requiredNodes $ Instance.diskTemplate inst
in case Cluster.genAllocNodes defGroupList nl reqnodes True >>= \allocn ->
Cluster.iterateAlloc nl Container.empty (Just maxiter) inst allocn [] []
of
Bad msg -> failTest $ "Failed to allocate: " ++ msg
Ok (_, _, _, [], _) -> printTestCase
"Failed to allocate: no allocations" False
Ok (_, nl', il', _, _) ->
let cdata = Loader.ClusterData defGroupList nl' il' ctags
Types.defIPolicy
saved = Text.serializeCluster cdata
in case Text.parseData saved >>= Loader.mergeData [] [] [] [] (TOD 0 0)
of
Bad msg -> failTest $ "Failed to load/merge: " ++ msg
Ok (Loader.ClusterData gl2 nl2 il2 ctags2 cpol2) ->
conjoin [ ctags ==? ctags2
, Types.defIPolicy ==? cpol2
, il' ==? il2
, defGroupList ==? gl2
, nl' ==? nl2
]
testSuite "HTools/Backend/Text"
[ 'prop_Load_Instance
, 'prop_Load_InstanceFail
, 'prop_InstanceLSIdempotent
, 'prop_Load_Node
, 'prop_Load_NodeFail
, 'prop_NodeLSIdempotent
, 'prop_ISpecIdempotent
, 'prop_MultipleMinMaxISpecsIdempotent
, 'prop_IPolicyIdempotent
, 'prop_CreateSerialise
]
| apyrgio/snf-ganeti | test/hs/Test/Ganeti/HTools/Backend/Text.hs | bsd-2-clause | 9,754 | 0 | 29 | 2,673 | 2,226 | 1,190 | 1,036 | 180 | 5 |
-- |Note: I copied a lot of this code from another project called
-- "tn". So some of the things are named @tnThis@ and @tnThat@. Changing
-- the variable names seemed more trouble than it was worth.
module Main where
import Desbot
import Control.Concurrent
import Control.Monad (forM_)
import Control.Lens
import qualified Data.ByteString as B
import Data.FileEmbed
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Time
import Options.Applicative
import System.Directory
-- |Possible commands we can run
data Command = Run (Maybe FilePath) -- ^Run the bot, optionally with a path to the config file
| Scaffold -- ^Create necessary configuration files
| ShowVersion -- ^Print out the version
deriving (Eq, Show)
-- |Main!
main :: IO ()
main =
do result <- customExecParser tnPrefs (infoHelper tnParser tnInfo)
case result of
ShowVersion -> T.putStrLn versionText
Scaffold ->
do c <- confDir
mkdir_p c
d <- dataDir
mkdir_p d
cfp <- confFilePath
scfp <- sampleConfFilePath
B.writeFile scfp sampleConfigText
putStrLn (mconcat [ "You chould edit "
, scfp
, " now, and move it to "
, cfp
])
Run fp ->
do cfp <- case fp of
Just p -> return p
Nothing -> confFilePath
readPrivateConf cfp >>= runConfig
where tnPrefs = prefs (mconcat [ disambiguate
, showHelpOnError
])
tnInfo = mconcat [ fullDesc
, progDesc "Haskell evalutation IRC bot"
]
-- The version command is special, because it's a top-level
tnParser = versionCmd <|> tnParser'
tnParser' = subconcat [ scaffoldCmd
, runCmd
]
subconcat = altConcat . fmap subparser
x </> y = mconcat [x, "/", y]
mkdir_p = createDirectoryIfMissing True
sampleConfigText = $(embedFile "config/config.sample.yaml")
confDir = getXdgDirectory XdgConfig "desbot"
dataDir = getXdgDirectory XdgData "desbot"
confFilePath = fmap (</> "config.yaml") confDir
sampleConfFilePath = fmap (</> "config.sample.yaml") confDir
runConfig conf =
do -- We're changing the logFile if the user didn't specify one
--
-- It will look something like desbot-irc.freenode.net-20160404-213333.log
--
-- This is to prevent collisions.
let nick' = T.unpack (conf ^. nick)
host' = T.unpack (conf ^. host)
conf' <- case conf ^. logFile of
Just _ -> return conf
Nothing ->
do dp <- dataDir
currentTime <- getCurrentTime
let formattedTime = formatTime defaultTimeLocale
"%Y%m%d-%H%M%S"
currentTime
logFileName = mconcat [ nick'
, "-"
, host'
, "-"
, formattedTime
, ".log"
]
finalFileName = dp </> logFileName
return (set logFile (Just finalFileName) conf)
-- Finally, run the bot
connect'' conf'
-- |The @desbot --version@ command.
versionCmd :: Parser Command
versionCmd =
flag' ShowVersion
(mconcat [ help "Show the version"
, long "version"
])
-- |The @desbot scaffold@ command
scaffoldCmd :: Mod CommandFields Command
scaffoldCmd =
command "scaffold"
(infoHelper (pure Scaffold)
(mconcat [ fullDesc
, progDesc "Create necessary configuration files and directories."
]))
-- |The @desbot run@ command
runCmd :: Mod CommandFields Command
runCmd =
command "run"
(infoHelper (Run <$> altConcat [ fmap Just configOption
, pure Nothing
])
(mconcat [ fullDesc
, progDesc "Actually run the bot."
]))
where configOption = strOption (mconcat [ long "config"
, short 'c'
, help "Path to a configuration file"
])
-- |Helper function that is for some reason not in
-- optparse-applicative. It just shortens typing a bit, so you don't
-- have to type
--
-- > info (helper <*> a) ...
--
-- Instead, it's just
--
-- > infoHelper a ...
infoHelper :: Parser a -> InfoMod a -> ParserInfo a
infoHelper a = info (helper <*> a)
-- |Sort of like 'mconcat' for 'Alternative's
altConcat :: (Foldable t, Alternative f) => t (f a) -> f a
altConcat = foldr (<|>) empty
| pharpend/desbot | bin/Main.hs | bsd-3-clause | 5,647 | 0 | 19 | 2,561 | 924 | 486 | 438 | -1 | -1 |
{-# LANGUAGE QuasiQuotes #-}
module System.Console.CmdArgs.Test.SplitJoin(test) where
import System.Console.CmdArgs.Explicit
import System.Console.CmdArgs.Test.Util
import Control.Monad
test = do
forM_ tests $ \(src,parsed) -> do
let a = splitArgs src
b1 = joinArgs parsed
b2 = joinArgs $ splitArgs b1
if a == parsed then return () else failure "splitArgs" [("Given ",src),("Expected",show parsed),("Found ",show a)]
if b1 == b2 then return () else failure "joinArgs" [("Given ",show parsed),("Expected",b1),("Found ",b2)]
success
{-
newtype CmdLine = CmdLine String deriving Show
instance Arbitrary CmdLine where
arbitrary = fmap CmdLine $ listOf $ elements "abcd \\/\'\""
generateTests :: IO ()
generateTests = withTempFile $ \src -> do
writeFile src "import System.Environment\nmain = print =<< getArgs\n"
quickCheckWith stdArgs{chatty=False} $ \(CmdLine x) -> unsafePerformIO $ do
putStr $ ",(,) " ++ (show x) ++ " "
system $ "runhaskell \"" ++ src ++ "\" " ++ x
return True
withTempFile :: (FilePath -> IO a) -> IO a
withTempFile f = bracket
(do (file,h) <- openTempFile "." "cmdargs.hs"; hClose h; return file)
removeFile
f
-}
-- Pregenerate the QuickCheck tests and run them through the system console
-- Not done each time for three reasons
-- * Avoids an extra dependency on QuickCheck + process
-- * Slow to run through the command line
-- * Can't figure out how to read the output, without adding more escaping (which breaks the test)
tests =
[(,) "" []
,(,) "c" ["c"]
,(,) "b" ["b"]
,(,) "\\" ["\\"]
,(,) "'//" ["'//"]
,(,) "a" ["a"]
,(,) "cda" ["cda"]
,(,) "b'" ["b'"]
,(,) "" []
,(,) " " []
,(,) "/b" ["/b"]
,(,) "\"b/\"d a'b'b" ["b/d","a'b'b"]
,(,) "d'c a\"/\\" ["d'c","a/\\"]
,(,) "d" ["d"]
,(,) "bb' " ["bb'"]
,(,) "b'\\" ["b'\\"]
,(,) "\"\\ac" ["\\ac"]
,(,) "\\'\"abbb\"c/''' \\ c" ["\\'abbbc/'''","\\","c"]
,(,) "/bbdbb a " ["/bbdbb","a"]
,(,) "b\" d" ["b d"]
,(,) "" []
,(,) "\\cc/''\\b\\ccc\\'\\b\\" ["\\cc/''\\b\\ccc\\'\\b\\"]
,(,) "/" ["/"]
,(,) "///\"b\\c/b\"cd//c'\"" ["///b\\c/bcd//c'"]
,(,) "\\\"d\\\\' /d\\\\/bb'a /\\d" ["\"d\\\\'","/d\\\\/bb'a","/\\d"]
,(,) "c/ \\''/c b\\'" ["c/","\\''/c","b\\'"]
,(,) "dd'b\\\\\\' /c'aaa\"" ["dd'b\\\\\\'","/c'aaa"]
,(,) "b'd''\\/ b\\'b'db/'cd " ["b'd''\\/","b\\'b'db/'cd"]
,(,) "a\"ba\\/\\ " ["aba\\/\\ "]
,(,) "b\"'dd'c /b/c\"bbd \"\"\\ad'\"c\\\"" ["b'dd'c /b/cbbd","\\ad'c\""]
,(,) "da 'c\\\\acd/'dbaaa///dccbc a \\" ["da","'c\\\\acd/'dbaaa///dccbc","a","\\"]
,(,) "a'ac \"da\"" ["a'ac","da"]
,(,) "\"'\\\"/\"\"b\\b \"'\"\"ccd'a\"/c /da " ["'\"/\"b\\b","'\"ccd'a/c /da "]
,(,) "d\"\\c\\\\cb c/\"aa' b\"\\/d \"'c c/" ["d\\c\\\\cb c/aa'","b\\/d 'c","c/"]
,(,) "dbc\\/\"\"//c/\"accda" ["dbc\\///c/accda"]
,(,) "aca a'' \\ c b'\\/d\\" ["aca","a''","\\","c","b'\\/d\\"]
,(,) "dc\"bc/a\\ccdd\\\\aad\\c'ab '\\cddcdba" ["dcbc/a\\ccdd\\\\aad\\c'ab '\\cddcdba"]
,(,) " c'\"ba \"b\\dc\"" ["c'ba b\\dc"]
,(,) "a\\acd/a \"'c /'c'" ["a\\acd/a","'c /'c'"]
,(,) " ac ddc/\"\"a/\\bd\\d c'cac\"c\\a/a''c" ["ac","ddc/a/\\bd\\d","c'cacc\\a/a''c"]
,(,) "b/cd\"//bb\"/daaab/ b b \"' d\"a\" 'd b" ["b/cd//bb/daaab/","b","b","' da 'd b"]
,(,) "a\"cc'cd\"\\'ad '\"dcc acb\"\\\\" ["acc'cd\\'ad","'dcc acb\\\\"]
,(,) "/bc/bc'/\"d \"a/\"\\ad aba\\da" ["/bc/bc'/d a/\\ad aba\\da"]
,(,) "b\\a" ["b\\a"]
,(,) "/dc ''c'a\"'/'\\ /'cd\\'d/'db/b\"' cabacaaa\"\"dd" ["/dc","''c'a'/'\\ /'cd\\'d/'db/b'","cabacaaadd"]
,(,) "\"ac\\\"c'/c'b\"b\"b'd\"c\"\"" ["ac\"c'/c'bbb'dc"]
,(,) "/ 'ccc\"d\\dc'\"'\\ b" ["/","'cccd\\dc''\\","b"]
,(,) " '\"/\\cc\\/c '\\\\" ["'/\\cc\\/c '\\\\"]
,(,) "\\ \\' ' /d \"cc\\\\//da\"d'a/a\"ca\\\\\"\\cb c\"d'b 'acb" ["\\","\\'","'","/d","cc\\\\//dad'a/aca\\\\cb","cd'b 'acb"]
,(,) "a\"\"d'\"a\"\\ \\c db'da/d\\c\"a/ aa c/db" ["ad'a\\","\\c","db'da/d\\ca/ aa c/db"]
,(,) " d\\" ["d\\"]
,(,) "d c b'/\\/'\"/'a'aa\"a\"/ad\\/" ["d","c","b'/\\/'/'a'aaa/ad\\/"]
,(,) " a \\' /" ["a","\\'","/"]
,(,) "'/ c" ["'/","c"]
,(,) "acd 'bcab /ba'daa'/ba/\"dcdadbcacb" ["acd","'bcab","/ba'daa'/ba/dcdadbcacb"]
,(,) "a\\\"dd'a c\"a\"\"ac\\" ["a\"dd'a","ca\"ac\\"]
,(,) "\"dba /'bb\\ d ba '/c' \"dd\\' cbcd c /b/\\b///" ["dba /'bb\\ d ba '/c' dd\\'","cbcd","c","/b/\\b///"]
,(,) "a'c/c \"ccb '/d\\abd/bc " ["a'c/c","ccb '/d\\abd/bc "]
,(,) "\\da\"\\//add\\\\ c" ["\\da\\//add\\\\ c"]
,(,) "c/\\\"// a/\"ac\"//''ba\"c/\\bc\\\"d\"bc/d" ["c/\"//","a/ac//''bac/\\bc\"dbc/d"]
,(,) "/d/ a dc'\\ \"" ["/d/","a","dc'\\",""]
,(,) " \"dc//b\\cd/ \\ac\"b\"b\"d\"\"\"dd\"\" ' a\\'/ \"/'/\\a/abd\\ddd" ["dc//b\\cd/ \\acbbd\"dd","'","a\\'/","/'/\\a/abd\\ddd"]
,(,) "\\' ' d\"b bbc" ["\\'","'","db bbc"]
,(,) "'ba\\a'db/bd d\\'b\\ \\/a'da' " ["'ba\\a'db/bd","d\\'b\\","\\/a'da'"]
,(,) "\\b\\cc\"\"d' dd ddcb\"d" ["\\b\\ccd'","dd","ddcbd"]
,(,) "d\"dc'\\d\"/'\\\"b\\c'c\" db' \\'b/\"a' / da'\"/ab'\\ c\\bc\\//dbcb\\" ["ddc'\\d/'\"b\\c'c db' \\'b/a'","/","da'/ab'\\ c\\bc\\//dbcb\\"]
,(,) " b ddbbbbc\"da\\c\"'\\" ["b","ddbbbbcda\\c'\\"]
,(,) "b/\"d dacd'/'\\\"''a a /'\\c'b ab\\ dda\\c'abdd'a\"//d \\\\\\ d\"\"" ["b/d dacd'/'\"''a a /'\\c'b ab\\ dda\\c'abdd'a//d","\\\\\\","d"]
,(,) "/c\"\" dd'a'/b\\/'\"'/" ["/c","dd'a'/b\\/''/"]
,(,) "/\"'\"\"'cc a a\\dd''\\'b" ["/'\"'cc","a","a\\dd''\\'b"]
,(,) "c\"dcd''aba\" \" /'" ["cdcd''aba"," /'"]
,(,) "'\"/''\\\\d'/ad\\baadabdca\\ /\\'''bd\\/\"'/' aca \\ \\a'\\ cd\"d /bdcd''cac" ["'/''\\\\d'/ad\\baadabdca\\ /\\'''bd\\/'/'","aca","\\","\\a'\\","cdd /bdcd''cac"]
,(,) "\" /\"da" [" /da"]
,(,) "'\"ca/'d/d/d\\ca\"/\"\" ddac cc\" ''a c''bd\"bc'dc\\/\"b\"a\\\"\"a/\\ " ["'ca/'d/d/d\\ca/","ddac","cc ''a c''bdbc'dc\\/ba\"a/\\ "]
,(,) "\\\\d'ad ' ''\"cd/a \"\"\\'\\\"'dc\\" ["\\\\d'ad","'","''cd/a \"\\'\"'dc\\"]
,(,) " ab c'\\a" ["ab","c'\\a"]
,(,) "b" ["b"]
,(,) "''c dc c\\'d'ab'd\"\\\"cca\"b'da\"dbcdbd\"cd'/d \\cd'\"d \"\"b cdc''/\\\"b'" ["''c","dc","c\\'d'ab'd\"ccab'dadbcdbdcd'/d","\\cd'd \"b","cdc''/\"b'"]
,(,) " \"'cb dbddbdd/" ["'cb dbddbdd/"]
,(,) "a/\"d// dd/cc/\"cc\"d\" d\\/a a \\c\" \\\\/\"\\ bcc'ac'\"\\c//d\"da/\\aac\\b\"c/'b\"\"bbd/\\" ["a/d// dd/cc/ccd","d\\/a","a","\\c \\\\/\\","bcc'ac'\\c//dda/\\aac\\bc/'b\"bbd/\\"]
,(,) "b\"ddccd\"a\"/ba\"" ["bddccda/ba"]
,(,) " \" c/b/'/bdd cb d'c a'\"'a d\\\\db//\\\"' c'/'c\\/aa" [" c/b/'/bdd cb d'c a''a","d\\\\db//\"'","c'/'c\\/aa"]
,(,) "\\caab" ["\\caab"]
,(,) "bb\"'\"/d'bad 'd\\/'\\b//\\\\ \\d''c\"c b\\b/\\" ["bb'/d'bad","'d\\/'\\b//\\\\","\\d''cc b\\b/\\"]
,(,) " c'a\" \\cab\"bd\"dcd\"/cb/\"\"b\"b'\"d" ["c'a \\cabbddcd/cb/bb'd"]
,(,) "\\/ \"c'ca" ["\\/","c'ca"]
,(,) " d' /c'bc\"'/'\\\\dca'cc\"'\"''/d cb//'a \"bd ab\"dcaadc\\\"'d\\\"/a\"a\\\"ba//b/ d/dbac/d\\caa\"bc/ " ["d'","/c'bc'/'\\\\dca'cc'''/d cb//'a bd","abdcaadc\"'d\"/aa\"ba//b/","d/dbac/d\\caabc/ "]
,(,) "/\"\\db'd/ ca\"ad b\\\\\"cd/a bbc\\ " ["/\\db'd/ caad","b\\cd/a bbc\\ "]
,(,) "cdc bd'/\"c''c d \\\"aa \\d\\ bb'b/ /b/a/c'acda\\'\"\"c \"bbbaa/'/a \\aca\"'/ac' " ["cdc","bd'/c''c d \"aa \\d\\ bb'b/ /b/a/c'acda\\'\"c","bbbaa/'/a \\aca'/ac'"]
,(,) "ad/'b\\d /cc\"\"ab \\ \"' ''b\\\"/\\ a\"'d\"\\ddacdbbabb b b //' acd\"c\\d'd\\b\"'\\\"aaba/bda/c'// \\b" ["ad/'b\\d","/ccab","\\","' ''b\"/\\ a'd\\ddacdbbabb b b //' acdc\\d'd\\b'\"aaba/bda/c'// \\b"]
,(,) "bac cc \"ac\"/ca/ '\"\" b/b d /cd'\\'bb\" \\ \"b '/ b c ' c''\"a/ad\\ " ["bac","cc","ac/ca/","'","b/b","d","/cd'\\'bb \\ b","'/","b","c","'","c''a/ad\\ "]
,(,) "baa' b'b''\\dab/'c" ["baa'","b'b''\\dab/'c"]
,(,) "cb\\\\ " ["cb\\\\"]
,(,) "/b'a''d\"b\" 'c'b ba\\'b\" bb" ["/b'a''db","'c'b","ba\\'b bb"]
,(,) "b /\"ca\\cbac " ["b","/ca\\cbac "]
,(,) " \"\"/\"bcaa\"\"a' \\/bb \"a\\\"'\"" ["/bcaa\"a'","\\/bb","a\"'"]
,(,) "\"c /''c\"\\badc/\\daa/\\ c\"a c\\ \\/cab \"b\"\\ ba\"\"/d/cd'a ad'c/ad\"' a\\d/d\\c\\'cdccd/\"a'/\"b///ac\"" ["c /''c\\badc/\\daa/\\","ca c\\ \\/cab b\\ ba\"/d/cd'a","ad'c/ad' a\\d/d\\c\\'cdccd/a'/b///ac"]
,(,) "/cbbd\"/b' /dd\"/c\\ca/'\"\\ cc \\d\"aca/\"b caa\\d\\'\"b'b dc\"cd\\'c\" 'd/ac\"cacc\"" ["/cbbd/b' /dd/c\\ca/'\\ cc \\daca/b caa\\d\\'b'b","dccd\\'c","'d/accacc"]
,(,) "bc/bd\\ca\\bcacca\"\"\\c/\\ /\"\"a/\"c'//b'\\d/a/'ab/cbd/cacb//b \\d\"aac\\d'\"/" ["bc/bd\\ca\\bcacca\\c/\\","/a/c'//b'\\d/a/'ab/cbd/cacb//b \\daac\\d'/"]
,(,) "bbac bdc/d\\\"/db\"dbdb\"a \" /\"/'a\\acacbcc c'//\\//b\"ca\"bcca c\\/aaa/c/bccbccaa \"\" cdccc/bddcbc c''" ["bbac","bdc/d\"/dbdbdba"," //'a\\acacbcc","c'//\\//bcabcca","c\\/aaa/c/bccbccaa","","cdccc/bddcbc","c''"]
]
| copland/cmdargs | System/Console/CmdArgs/Test/SplitJoin.hs | bsd-3-clause | 8,813 | 0 | 15 | 1,444 | 2,021 | 1,193 | 828 | 114 | 3 |
{-# LANGUAGE CPP,
GeneralizedNewtypeDeriving,
DeriveDataTypeable,
DeriveFunctor,
DeriveTraversable,
DeriveFoldable,
MultiParamTypeClasses,
TypeFamilies
#-}
-- |
-- Defines two variants of @(,)@ with lifted instances for the standard type classes.
--
-- The 'Functor', 'Applicative' and 'Comonad' instances are the standard instances. The
-- 'Monad' instances are not in base (but should argubly be there). All of these instances
-- are equivalent to 'Writer' in transformers.
--
-- 'Applicative' is used to lift 'Monoid' and the standard numeric classes.
--
-- The only difference between 'Twain' and 'Couple' is the handling of 'Eq' and 'Ord':
-- 'Twain' compares only the second value, while 'Couple' compares both. Thus 'Couple' needs
-- an extra @Ord b@ constraint for all sub-classes of 'Ord'.
--
module Data.Functor.Couple (Twain(..), Couple(..)) where
import Data.Bifunctor
import Data.Functor.Product
import Data.Functor.Identity
import Data.Foldable
import Data.Traversable
import Data.Functor.Adjunction (unzipR)
import Data.Semigroup
import Data.Typeable
import Control.Applicative
import Control.Comonad
import Data.PairMonad ()
import Control.Lens (Wrapped(..), Rewrapped(..), iso)
-- |
-- A variant of pair/writer with lifted instances for the numeric classes, using 'Applicative'.
--
newtype Twain b a = Twain { getTwain :: (b, a) }
deriving (Show, Functor, Traversable, Foldable, Typeable, Applicative, Monad, Comonad, Semigroup, Monoid)
instance Wrapped (Twain b a) where
type Unwrapped (Twain b a) = (b, a)
_Wrapped' = iso getTwain Twain
instance Rewrapped (Twain c a) (Twain c b)
instance (Monoid b, Num a) => Num (Twain b a) where
(+) = liftA2 (+)
(*) = liftA2 (*)
(-) = liftA2 (-)
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
instance (Monoid b, Fractional a) => Fractional (Twain b a) where
recip = fmap recip
fromRational = pure . fromRational
instance (Monoid b, Floating a) => Floating (Twain b a) where
pi = pure pi
sqrt = fmap sqrt
exp = fmap exp
log = fmap log
sin = fmap sin
cos = fmap cos
asin = fmap asin
atan = fmap atan
acos = fmap acos
sinh = fmap sinh
cosh = fmap cosh
asinh = fmap asinh
atanh = fmap atanh
acosh = fmap acos
instance (Monoid b, Enum a) => Enum (Twain b a) where
toEnum = pure . toEnum
fromEnum = fromEnum . extract
instance (Monoid b, Bounded a) => Bounded (Twain b a) where
minBound = pure minBound
maxBound = pure maxBound
--
-- Eq, Ord and their subclasses
--
-- If comparison takes both values into account, we must add and (Ord b)
-- constraint to all of the following instances. Instead, follow the
-- spirit of the Num et al instances to compare just the second argument.
--
instance Eq a => Eq (Twain b a) where
Twain (b,a) == Twain (b',a') = a == a'
instance Ord a => Ord (Twain b a) where
Twain (b,a) < Twain (b',a') = a < a'
instance (Monoid b, Real a, Enum a, Integral a) => Integral (Twain b a) where
quot = liftA2 quot
rem = liftA2 rem
quotRem = fmap (fmap unzipR) (liftA2 quotRem)
toInteger = toInteger . extract
instance (Monoid b, Real a) => Real (Twain b a) where
toRational = toRational . extract
instance (Monoid b, RealFrac a) => RealFrac (Twain b a) where
properFraction = first extract . unzipR . fmap properFraction
-- |
-- A variant of pair/writer with lifted instances for the numeric classes, using 'Applicative'.
--
newtype Couple b a = Couple { getCouple :: (b, a) }
deriving (Show, Functor, Foldable, Traversable, Typeable, Applicative, Monad, Comonad, Semigroup, Monoid)
instance Wrapped (Couple b a) where
type Unwrapped (Couple b a) = (b, a)
_Wrapped' = iso getCouple Couple
instance Rewrapped (Couple c a) (Couple c b)
instance (Monoid b, Num a) => Num (Couple b a) where
(+) = liftA2 (+)
(*) = liftA2 (*)
(-) = liftA2 (-)
abs = fmap abs
signum = fmap signum
fromInteger = pure . fromInteger
instance (Monoid b, Fractional a) => Fractional (Couple b a) where
recip = fmap recip
fromRational = pure . fromRational
instance (Monoid b, Floating a) => Floating (Couple b a) where
pi = pure pi
sqrt = fmap sqrt
exp = fmap exp
log = fmap log
sin = fmap sin
cos = fmap cos
asin = fmap asin
atan = fmap atan
acos = fmap acos
sinh = fmap sinh
cosh = fmap cosh
asinh = fmap asinh
atanh = fmap atanh
acosh = fmap acos
instance (Monoid b, Enum a) => Enum (Couple b a) where
toEnum = pure . toEnum
fromEnum = fromEnum . extract
instance (Monoid b, Bounded a) => Bounded (Couple b a) where
minBound = pure minBound
maxBound = pure maxBound
instance (Eq b, Eq a) => Eq (Couple b a) where
Couple ((b,a)) == Couple (b',a') = (b,a) == (b',a')
instance (Ord b, Ord a) => Ord (Couple b a) where
Couple (b,a) < Couple (b',a') = (b,a) < (b',a')
instance (Monoid b, Ord b, Real a, Enum a, Integral a) => Integral (Couple b a) where
quot = liftA2 quot
rem = liftA2 rem
quotRem = fmap (fmap unzipR) (liftA2 quotRem)
toInteger = toInteger . extract
instance (Monoid b, Ord b, Real a) => Real (Couple b a) where
toRational = toRational . extract
instance (Monoid b, Ord b, RealFrac a) => RealFrac (Couple b a) where
properFraction = first extract . unzipR . fmap properFraction
| music-suite/music-score | src/Data/Functor/Couple.hs | bsd-3-clause | 5,405 | 0 | 9 | 1,228 | 1,819 | 983 | 836 | 121 | 0 |
{-# LANGUAGE DeriveDataTypeable, TemplateHaskell #-}
module Grammatik.Config where
import Language.Syntax
import qualified Grammatik.Type as G
import Grammatik.Property
import Autolib.ToDoc
import Autolib.Reader
import Data.Typeable
data Config =
Config { lang :: Language.Syntax.Type
, max_length :: Int -- ^ Wörter höchstens so lang
, max_num :: Int -- ^ höchstes so viele
, properties :: [ Property ]
, cut :: Int -- ^ soviele Schritte (der Maschine)
, start :: G.Grammatik
}
deriving ( Typeable )
$(derives [makeReader, makeToDoc] [''Config])
example = Config
{ lang = Ordered_Gleich "ab"
, max_length = 10 -- Wörter höchstens so lang
, max_num = 100 -- höchstes so viele
, properties = []
, cut = 30 -- soviele Schritte (der Maschine)
, start = G.example
}
-- local variables:
-- mode: haskell
-- end:
| Erdwolf/autotool-bonn | src/Grammatik/Config.hs | gpl-2.0 | 901 | 6 | 9 | 218 | 188 | 119 | 69 | 24 | 1 |
-- | A streaming XML parser, using a method known as SAX. SAX isn't really a
-- standard, but an implementation, so it's just an \"SAX-like\" parser.
-- This module allows you parse an XML document without having to evaluate
-- it as a whole. This is needed for protocols like jabber, which use xml
-- streams for communication.
module Text.XML.HaXml.SAX
( SaxElement(..)
, saxParse
) where
import Text.XML.HaXml.Types
import Text.XML.HaXml.Parse
import Text.XML.HaXml.Lex
import Text.ParserCombinators.Poly.State
data SaxElement
= SaxDocTypeDecl DocTypeDecl
-- ^ A doctype declaration occured(\<!DOCTYPE\>)
| SaxProcessingInstruction ProcessingInstruction
-- ^ A processing instruction occured (\<??\>)
| SaxComment String -- ^ A comment occured (\<!-- --\>)
| SaxElementOpen Name [Attribute] -- ^ An element was opened (\<\>)
| SaxElementClose Name -- ^ An element was closed (\<\/\>)
| SaxElementTag Name [Attribute]
-- ^ An element without content occured (\<\/\>)
| SaxCharData CharData -- ^ Some string data occured
| SaxReference Reference -- ^ A reference occured
-- | @saxParse file content@ takes a filename and the string content of that
-- file and generates a stream of @SaxElement@s. If an error occurs, the
-- parsing stops and a string is returned using the @Maybe@ type.
saxParse :: String -- ^ The filename
-> String -- ^ The content of the file
-> ([SaxElement],Maybe String)
-- ^ A tuple of the parsed elements and @Nothing@, if no
-- error occured, or @Just@ @String@ if an error occured.
saxParse file cntnt = parseStream sax emptySTs
(xmlLex file cntnt)
parseStream :: Parser s t a -> s -> [t] -> ([a], Maybe String)
parseStream _ _ [] = ([],Nothing)
parseStream p state toks = case runParser p state toks of
(Left err, _, _) -> ([],Just err)
(Right res, nstate, rest) -> (res:moreres, err)
where (moreres,err) = parseStream p nstate rest
sax :: XParser SaxElement
sax = oneOf [ saxelementopen
, saxelementclose
, saxprocessinginstruction
, saxcomment
, saxdoctypedecl
, saxreference
, saxchardata
]
`adjustErr` (++("\nLooking for a SAX event:\n"
++" elem-open, elem-close, PI, comment, DTD, ref, or chardata"))
saxelementopen :: XParser SaxElement
saxelementopen = do
tok TokAnyOpen
(ElemTag (N n) as) <- elemtag -- no QN ever generated during parsing
(( do tok TokEndClose
return (SaxElementTag n as)) `onFail`
( do tok TokAnyClose
return (SaxElementOpen n as))
`onFail` fail "missing > or /> in element tag")
saxelementclose :: XParser SaxElement
saxelementclose = do
tok TokEndOpen
n <- name
tok TokAnyClose
return (SaxElementClose n)
saxcomment :: XParser SaxElement
saxcomment = comment >>= return . SaxComment
saxchardata :: XParser SaxElement
saxchardata =
(cdsect >>= return . SaxCharData)
`onFail`
(chardata >>= return . SaxCharData)
saxreference :: XParser SaxElement
saxreference = reference >>= return . SaxReference
saxdoctypedecl :: XParser SaxElement
saxdoctypedecl = doctypedecl >>= return . SaxDocTypeDecl
saxprocessinginstruction :: XParser SaxElement
saxprocessinginstruction = fmap SaxProcessingInstruction processinginstruction
| alexbaluta/courseography | dependencies/HaXml-1.25.3/src/Text/XML/HaXml/SAX.hs | gpl-3.0 | 3,311 | 16 | 15 | 697 | 678 | 376 | 302 | 65 | 2 |
#!/usr/bin/env runhugs
import Distribution.Simple
main = defaultMain
| binesiyu/ifl | examples/ch23/Setup.hs | mit | 71 | 1 | 4 | 9 | 15 | 7 | 8 | 2 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TupleSections #-}
-- | Names for packages.
module Stack.Types.PackageName
(PackageName
,PackageNameParseFail(..)
,packageNameParser
,parsePackageName
,parsePackageNameFromString
,packageNameByteString
,packageNameString
,packageNameText
,fromCabalPackageName
,toCabalPackageName
,parsePackageNameFromFilePath
,mkPackageName
,packageNameArgument)
where
import Control.Applicative
import Control.DeepSeq
import Control.Monad
import Control.Monad.Catch
import Data.Aeson.Extended
import Data.Attoparsec.ByteString.Char8
import Data.Attoparsec.Combinators
import Data.Binary (Binary)
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as S8
import Data.Char (isLetter)
import Data.Data
import Data.Hashable
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text.Encoding as T
import qualified Distribution.Package as Cabal
import GHC.Generics
import Language.Haskell.TH
import Language.Haskell.TH.Syntax
import Path
import qualified Options.Applicative as O
-- | A parse fail.
data PackageNameParseFail
= PackageNameParseFail ByteString
| CabalFileNameParseFail FilePath
deriving (Typeable)
instance Exception PackageNameParseFail
instance Show PackageNameParseFail where
show (PackageNameParseFail bs) = "Invalid package name: " ++ show bs
show (CabalFileNameParseFail fp) = "Invalid file path for cabal file: " ++ fp
-- | A package name.
newtype PackageName =
PackageName ByteString
deriving (Eq,Ord,Typeable,Data,Generic,Hashable,Binary,NFData)
instance Lift PackageName where
lift (PackageName n) =
appE (conE 'PackageName)
(stringE (S8.unpack n))
instance Show PackageName where
show (PackageName n) = S8.unpack n
instance ToJSON PackageName where
toJSON = toJSON . packageNameText
instance FromJSON PackageName where
parseJSON j =
do s <- parseJSON j
case parsePackageNameFromString s of
Nothing ->
fail ("Couldn't parse package name: " ++ s)
Just ver -> return ver
-- | Attoparsec parser for a package name from bytestring.
packageNameParser :: Parser PackageName
packageNameParser =
fmap (PackageName . S8.pack)
(appending (many1 (satisfy isAlphaNum))
(concating (many (alternating
(pured (satisfy isAlphaNum))
(appending (pured (satisfy (== '-')))
(pured (satisfy isLetter)))))))
where
isAlphaNum c = isLetter c || isDigit c
-- | Make a package name.
mkPackageName :: String -> Q Exp
mkPackageName s =
case parsePackageNameFromString s of
Nothing -> error ("Invalid package name: " ++ show s)
Just pn -> [|pn|]
-- | Convenient way to parse a package name from a bytestring.
parsePackageName :: MonadThrow m => ByteString -> m PackageName
parsePackageName x = go x
where go =
either (const (throwM (PackageNameParseFail x))) return .
parseOnly (packageNameParser <* endOfInput)
-- | Migration function.
parsePackageNameFromString :: MonadThrow m => String -> m PackageName
parsePackageNameFromString =
parsePackageName . S8.pack
-- | Produce a bytestring representation of a package name.
packageNameByteString :: PackageName -> ByteString
packageNameByteString (PackageName n) = n
-- | Produce a string representation of a package name.
packageNameString :: PackageName -> String
packageNameString (PackageName n) = S8.unpack n
-- | Produce a string representation of a package name.
packageNameText :: PackageName -> Text
packageNameText (PackageName n) = T.decodeUtf8 n
-- | Convert from a Cabal package name.
fromCabalPackageName :: Cabal.PackageName -> PackageName
fromCabalPackageName (Cabal.PackageName name) =
let !x = S8.pack name
in PackageName x
-- | Convert to a Cabal package name.
toCabalPackageName :: PackageName -> Cabal.PackageName
toCabalPackageName (PackageName name) =
let !x = S8.unpack name
in Cabal.PackageName x
-- | Parse a package name from a file path.
parsePackageNameFromFilePath :: MonadThrow m => Path a File -> m PackageName
parsePackageNameFromFilePath fp =
clean (toFilePath (filename fp)) >>= parsePackageNameFromString
where clean = liftM reverse . strip . reverse
strip ('l':'a':'b':'a':'c':'.':xs) = return xs
strip _ = throwM (CabalFileNameParseFail (toFilePath fp))
instance ToJSON a => ToJSON (Map PackageName a) where
toJSON = toJSON . Map.mapKeysWith const packageNameText
instance FromJSON a => FromJSON (Map PackageName a) where
parseJSON val = do
m <- parseJSON val
fmap Map.fromList $ mapM go $ Map.toList m
where
go (k, v) = fmap (, v) $ either (fail . show) return $ parsePackageNameFromString k
-- | An argument which accepts a template name of the format
-- @foo.hsfiles@.
packageNameArgument :: O.Mod O.ArgumentFields PackageName
-> O.Parser PackageName
packageNameArgument =
O.argument
(do s <- O.str
either O.readerError return (p s))
where
p s =
case parsePackageNameFromString s of
Just x -> Right x
Nothing -> Left ("Expected valid package name, but got: " ++ s)
| DanielG/stack | src/Stack/Types/PackageName.hs | bsd-3-clause | 5,675 | 0 | 20 | 1,302 | 1,372 | 720 | 652 | 129 | 2 |
module Foo where
{-@ LIQUID "--totality" @-}
bar :: Int -> Int -> Int
bar x y | x > y = 1
| x == y = 0
| mightymoose/liquidhaskell | tests/neg/NoExhaustiveGuardsError.hs | bsd-3-clause | 113 | 0 | 8 | 39 | 51 | 26 | 25 | 4 | 1 |
module Distribution.Server.Framework.AuthCrypt (
PasswdPlain(..),
PasswdHash(..),
newPasswdHash,
checkBasicAuthInfo,
BasicAuthInfo(..),
checkDigestAuthInfo,
DigestAuthInfo(..),
QopInfo(..),
) where
import Distribution.Server.Framework.AuthTypes
import Distribution.Server.Users.Types (UserName(..))
import Data.Digest.Pure.MD5 (md5)
import qualified Data.ByteString.Lazy.Char8 as BS.Lazy -- Only used for ASCII data
import Data.List (intercalate)
-- Hashed passwords are stored in the format:
--
-- @md5 (username ++ ":" ++ realm ++ ":" ++ password)@.
--
-- This format enables us to use either the basic or digest
-- HTTP authentication methods.
-- | Create a new 'PasswdHash' suitable for safe permanent storage.
--
newPasswdHash :: RealmName -> UserName -> PasswdPlain -> PasswdHash
newPasswdHash (RealmName realmName) (UserName userName) (PasswdPlain passwd) =
PasswdHash $ md5HexDigest [userName, realmName, passwd]
------------------
-- HTTP Basic auth
--
data BasicAuthInfo = BasicAuthInfo {
basicRealm :: RealmName,
basicUsername :: UserName,
basicPasswd :: PasswdPlain
}
checkBasicAuthInfo :: PasswdHash -> BasicAuthInfo -> Bool
checkBasicAuthInfo hash (BasicAuthInfo realmName userName pass) =
newPasswdHash realmName userName pass == hash
------------------
-- HTTP Digest auth
--
data DigestAuthInfo = DigestAuthInfo {
digestUsername :: UserName,
digestNonce :: String,
digestResponse :: String,
digestURI :: String,
digestRqMethod :: String,
digestQoP :: QopInfo
}
deriving Show
data QopInfo = QopNone
| QopAuth {
digestNonceCount :: String,
digestClientNonce :: String
}
-- | QopAuthInt
deriving Show
-- See RFC 2617 http://www.ietf.org/rfc/rfc2617
--
checkDigestAuthInfo :: PasswdHash -> DigestAuthInfo -> Bool
checkDigestAuthInfo (PasswdHash passwdHash)
(DigestAuthInfo _username nonce response uri method qopinfo) =
hash3 == response
where
hash1 = passwdHash
hash2 = md5HexDigest [method, uri]
hash3 = case qopinfo of
QopNone -> md5HexDigest [hash1, nonce, hash2]
QopAuth nc cnonce -> md5HexDigest [hash1, nonce, nc, cnonce, "auth", hash2]
------------------
-- Utils
--
md5HexDigest :: [String] -> String
md5HexDigest = show . md5 . BS.Lazy.pack . intercalate ":"
| mpickering/hackage-server | Distribution/Server/Framework/AuthCrypt.hs | bsd-3-clause | 2,474 | 0 | 11 | 568 | 506 | 308 | 198 | 48 | 2 |
{-# LANGUAGE MagicHash, BangPatterns #-}
import GHC.Base
f :: Int -> Int# -> Int#
f x i = i
test = let !(I# i) = 3 in I# (f 2 i) | urbanslug/ghc | testsuite/tests/ghci.debugger/scripts/dynbrk009.hs | bsd-3-clause | 132 | 1 | 11 | 34 | 73 | 33 | 40 | 5 | 1 |
{-# LANGUAGE GADTs, ExistentialQuantification, ScopedTypeVariables,
RankNTypes #-}
-- Here's an example from Doaitse Swiestra (Sept 06)
-- which requires use of scoped type variables
--
-- It's a cut-down version of a larger program
--
-- It's also one which was sensitive to syntactic order
-- in GHC 6.4; but not in 6.6
module ShouldCompile where
data Exists f = forall a . Exists (f a)
data Ref env a where
Zero :: Ref (a,env') a
Suc :: Ref env' a -> Ref (x,env') a
data Find env final = Find (forall a . Ref env a -> Maybe (Ref final a))
data Equal a b where
Eq :: Equal a a
sym :: Equal a b -> Equal b a
sym Eq = Eq
match' :: Ref env' a -> Ref env'' a -> Bool
match' _ _ = True
match :: Ref env a -> Ref env b -> Maybe (Equal a b)
match Zero Zero = Just Eq
match (Suc x)(Suc y) = match x y
match _ _ = Nothing
-- Notice the essential type sig for the argument to Exists
f1 :: forall env. (Exists (Ref env)) -> Exists (Find env)
f1 (Exists (ref1 :: Ref env b))
= Exists ( Find (\ ref2 -> case match ref2 ref1 of
Just Eq -> Just Zero
_ -> Nothing
):: Find env (b,())
)
-- same as 'f1' except that 'ref1' and 'ref2' are swapped in the
-- application of 'match'
f2 :: forall env. (Exists (Ref env)) -> Exists (Find env)
f2 (Exists (ref1 :: Ref env b))
= Exists (Find (\ ref2 -> case match ref1 ref2 of
Just Eq -> Just Zero
_ -> Nothing
) :: Find env (b,())
)
| nkaretnikov/ghc | testsuite/tests/gadt/doaitse.hs | bsd-3-clause | 1,615 | 0 | 13 | 543 | 524 | 275 | 249 | 30 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE Strict #-}
-- | The Futhark source language AST definition. Many types, such as
-- 'ExpBase'@, are parametrised by type and name representation.
-- E.g. in a value of type @ExpBase f vn@, annotations are wrapped in
-- the functor @f@, and all names are of type @vn@. See the
-- @https://futhark.readthedocs.org@ for a language reference, or this
-- module may be a little hard to understand.
module Language.Futhark.Syntax
( module Language.Futhark.Core,
pretty,
-- * Types
Uniqueness (..),
IntType (..),
FloatType (..),
PrimType (..),
ArrayDim (..),
DimDecl (..),
ShapeDecl (..),
shapeRank,
stripDims,
TypeName (..),
typeNameFromQualName,
qualNameFromTypeName,
TypeBase (..),
TypeArg (..),
DimExp (..),
TypeExp (..),
TypeArgExp (..),
PName (..),
ScalarTypeBase (..),
RetTypeBase (..),
PatType,
StructType,
StructRetType,
PatRetType,
ValueType,
Diet (..),
TypeDeclBase (..),
-- * Values
IntValue (..),
FloatValue (..),
PrimValue (..),
IsPrimValue (..),
Value (..),
-- * Abstract syntax tree
AttrInfo (..),
AttrAtom (..),
BinOp (..),
IdentBase (..),
Inclusiveness (..),
DimIndexBase (..),
SliceBase,
SizeBinder (..),
AppExpBase (..),
AppRes (..),
ExpBase (..),
FieldBase (..),
CaseBase (..),
LoopFormBase (..),
PatLit (..),
PatBase (..),
-- * Module language
SpecBase (..),
SigExpBase (..),
TypeRefBase (..),
SigBindBase (..),
ModExpBase (..),
ModBindBase (..),
ModParamBase (..),
-- * Definitions
DocComment (..),
ValBindBase (..),
EntryPoint (..),
EntryType (..),
EntryParam (..),
Liftedness (..),
TypeBindBase (..),
TypeParamBase (..),
typeParamName,
ProgBase (..),
DecBase (..),
-- * Miscellaneous
Showable,
NoInfo (..),
Info (..),
Alias (..),
Aliasing,
QualName (..),
)
where
import Control.Applicative
import Control.Monad
import Data.Array
import Data.Bifoldable
import Data.Bifunctor
import Data.Bitraversable
import Data.Foldable
import qualified Data.List.NonEmpty as NE
import qualified Data.Map.Strict as M
import Data.Monoid hiding (Sum)
import Data.Ord
import qualified Data.Set as S
import Data.Traversable
import Futhark.IR.Primitive
( FloatType (..),
FloatValue (..),
IntType (..),
IntValue (..),
)
import Futhark.Util.Loc
import Futhark.Util.Pretty
import Language.Futhark.Core
import Prelude
-- | Convenience class for deriving 'Show' instances for the AST.
class
( Show vn,
Show (f VName),
Show (f (Diet, Maybe VName)),
Show (f String),
Show (f [VName]),
Show (f ([VName], [VName])),
Show (f PatType),
Show (f (PatType, [VName])),
Show (f (StructType, [VName])),
Show (f (StructRetType, [VName])),
Show (f EntryPoint),
Show (f StructType),
Show (f StructRetType),
Show (f PatRetType),
Show (f (StructType, Maybe VName)),
Show (f (PName, StructType)),
Show (f (PName, StructType, Maybe VName)),
Show (f (Aliasing, StructRetType)),
Show (f (M.Map VName VName)),
Show (f AppRes)
) =>
Showable f vn
-- | No information functor. Usually used for placeholder type- or
-- aliasing information.
data NoInfo a = NoInfo
deriving (Eq, Ord, Show)
instance Show vn => Showable NoInfo vn
instance Functor NoInfo where
fmap _ NoInfo = NoInfo
instance Foldable NoInfo where
foldr _ b NoInfo = b
instance Traversable NoInfo where
traverse _ NoInfo = pure NoInfo
-- | Some information. The dual to 'NoInfo'
newtype Info a = Info {unInfo :: a}
deriving (Eq, Ord, Show)
instance Show vn => Showable Info vn
instance Functor Info where
fmap f (Info x) = Info $ f x
instance Foldable Info where
foldr f b (Info x) = f x b
instance Traversable Info where
traverse f (Info x) = Info <$> f x
-- | Low-level primitive types.
data PrimType
= Signed IntType
| Unsigned IntType
| FloatType FloatType
| Bool
deriving (Eq, Ord, Show)
-- | Non-array values.
data PrimValue
= SignedValue !IntValue
| UnsignedValue !IntValue
| FloatValue !FloatValue
| BoolValue !Bool
deriving (Eq, Ord, Show)
-- | A class for converting ordinary Haskell values to primitive
-- Futhark values.
class IsPrimValue v where
primValue :: v -> PrimValue
instance IsPrimValue Int where
primValue = SignedValue . Int32Value . fromIntegral
instance IsPrimValue Int8 where
primValue = SignedValue . Int8Value
instance IsPrimValue Int16 where
primValue = SignedValue . Int16Value
instance IsPrimValue Int32 where
primValue = SignedValue . Int32Value
instance IsPrimValue Int64 where
primValue = SignedValue . Int64Value
instance IsPrimValue Word8 where
primValue = UnsignedValue . Int8Value . fromIntegral
instance IsPrimValue Word16 where
primValue = UnsignedValue . Int16Value . fromIntegral
instance IsPrimValue Word32 where
primValue = UnsignedValue . Int32Value . fromIntegral
instance IsPrimValue Word64 where
primValue = UnsignedValue . Int64Value . fromIntegral
instance IsPrimValue Float where
primValue = FloatValue . Float32Value
instance IsPrimValue Double where
primValue = FloatValue . Float64Value
instance IsPrimValue Bool where
primValue = BoolValue
-- | The value of an 'AttrAtom'.
data AttrAtom vn
= AtomName Name
| AtomInt Integer
deriving (Eq, Ord, Show)
-- | The payload of an attribute.
data AttrInfo vn
= AttrAtom (AttrAtom vn) SrcLoc
| AttrComp Name [AttrInfo vn] SrcLoc
deriving (Eq, Ord, Show)
-- | A type class for things that can be array dimensions.
class Eq dim => ArrayDim dim where
-- | @unifyDims x y@ combines @x@ and @y@ to contain their maximum
-- common information, and fails if they conflict.
unifyDims :: dim -> dim -> Maybe dim
instance ArrayDim () where
unifyDims () () = Just ()
-- | Declaration of a dimension size.
data DimDecl vn
= -- | The size of the dimension is this name, which
-- must be in scope. In a return type, this will
-- give rise to an assertion.
NamedDim (QualName vn)
| -- | The size is a constant.
ConstDim Int
| -- | No known size - but still possibly given a unique name, so we
-- can recognise e.g. @type square [n] = [n][n]i32@ and make
-- @square []@ do the right thing. If @Nothing@, then this is a
-- name distinct from any other. The type checker should _never_
-- produce these - they are a (hopefully temporary) thing
-- introduced by defunctorisation and monomorphisation.
AnyDim (Maybe vn)
deriving (Show)
deriving instance Eq (DimDecl VName)
deriving instance Ord (DimDecl VName)
instance Functor DimDecl where
fmap = fmapDefault
instance Foldable DimDecl where
foldMap = foldMapDefault
instance Traversable DimDecl where
traverse f (NamedDim qn) = NamedDim <$> traverse f qn
traverse _ (ConstDim x) = pure $ ConstDim x
traverse f (AnyDim v) = AnyDim <$> traverse f v
-- Note that the notion of unifyDims here is intentionally not what we
-- use when we do real type unification in the type checker.
instance ArrayDim (DimDecl VName) where
unifyDims AnyDim {} y = Just y
unifyDims x AnyDim {} = Just x
unifyDims (NamedDim x) (NamedDim y) | x == y = Just $ NamedDim x
unifyDims (ConstDim x) (ConstDim y) | x == y = Just $ ConstDim x
unifyDims _ _ = Nothing
-- | The size of an array type is a list of its dimension sizes. If
-- 'Nothing', that dimension is of a (statically) unknown size.
newtype ShapeDecl dim = ShapeDecl {shapeDims :: [dim]}
deriving (Eq, Ord, Show)
instance Foldable ShapeDecl where
foldr f x (ShapeDecl ds) = foldr f x ds
instance Traversable ShapeDecl where
traverse f (ShapeDecl ds) = ShapeDecl <$> traverse f ds
instance Functor ShapeDecl where
fmap f (ShapeDecl ds) = ShapeDecl $ map f ds
instance Semigroup (ShapeDecl dim) where
ShapeDecl l1 <> ShapeDecl l2 = ShapeDecl $ l1 ++ l2
instance Monoid (ShapeDecl dim) where
mempty = ShapeDecl []
-- | The number of dimensions contained in a shape.
shapeRank :: ShapeDecl dim -> Int
shapeRank = length . shapeDims
-- | @stripDims n shape@ strips the outer @n@ dimensions from
-- @shape@, returning 'Nothing' if this would result in zero or
-- fewer dimensions.
stripDims :: Int -> ShapeDecl dim -> Maybe (ShapeDecl dim)
stripDims i (ShapeDecl l)
| i < length l = Just $ ShapeDecl $ drop i l
| otherwise = Nothing
-- | A type name consists of qualifiers (for error messages) and a
-- 'VName' (for equality checking).
data TypeName = TypeName {typeQuals :: [VName], typeLeaf :: VName}
deriving (Show)
instance Eq TypeName where
TypeName _ x == TypeName _ y = x == y
instance Ord TypeName where
TypeName _ x `compare` TypeName _ y = x `compare` y
-- | Convert a 'QualName' to a 'TypeName'.
typeNameFromQualName :: QualName VName -> TypeName
typeNameFromQualName (QualName qs x) = TypeName qs x
-- | Convert a 'TypeName' to a 'QualName'.
qualNameFromTypeName :: TypeName -> QualName VName
qualNameFromTypeName (TypeName qs x) = QualName qs x
-- | The name (if any) of a function parameter. The 'Eq' and 'Ord'
-- instances always compare values of this type equal.
data PName = Named VName | Unnamed
deriving (Show)
instance Eq PName where
_ == _ = True
instance Ord PName where
_ <= _ = True
-- | Types that can appear to the right of a function arrow. This
-- just means they can be existentially quantified.
data RetTypeBase dim as = RetType
{ retDims :: [VName],
retType :: TypeBase dim as
}
deriving (Eq, Ord, Show)
instance Bitraversable RetTypeBase where
bitraverse f g (RetType dims t) = RetType dims <$> bitraverse f g t
instance Bifunctor RetTypeBase where
bimap = bimapDefault
instance Bifoldable RetTypeBase where
bifoldMap = bifoldMapDefault
-- | Types that can be elements of arrays. This representation does
-- allow arrays of records of functions, which is nonsensical, but it
-- convolutes the code too much if we try to statically rule it out.
data ScalarTypeBase dim as
= Prim PrimType
| TypeVar as Uniqueness TypeName [TypeArg dim]
| Record (M.Map Name (TypeBase dim as))
| Sum (M.Map Name [TypeBase dim as])
| -- | The aliasing corresponds to the lexical
-- closure of the function.
Arrow as PName (TypeBase dim as) (RetTypeBase dim as)
deriving (Eq, Ord, Show)
instance Bitraversable ScalarTypeBase where
bitraverse _ _ (Prim t) = pure $ Prim t
bitraverse f g (Record fs) = Record <$> traverse (bitraverse f g) fs
bitraverse f g (TypeVar als u t args) =
TypeVar <$> g als <*> pure u <*> pure t <*> traverse (traverse f) args
bitraverse f g (Arrow als v t1 t2) =
Arrow <$> g als <*> pure v <*> bitraverse f g t1 <*> bitraverse f g t2
bitraverse f g (Sum cs) = Sum <$> (traverse . traverse) (bitraverse f g) cs
instance Bifunctor ScalarTypeBase where
bimap = bimapDefault
instance Bifoldable ScalarTypeBase where
bifoldMap = bifoldMapDefault
-- | An expanded Futhark type is either an array, or something that
-- can be an element of an array. When comparing types for equality,
-- function parameter names are ignored. This representation permits
-- some malformed types (arrays of functions), but importantly rules
-- out arrays-of-arrays.
data TypeBase dim as
= Scalar (ScalarTypeBase dim as)
| Array as Uniqueness (ScalarTypeBase dim ()) (ShapeDecl dim)
deriving (Eq, Ord, Show)
instance Bitraversable TypeBase where
bitraverse f g (Scalar t) = Scalar <$> bitraverse f g t
bitraverse f g (Array a u t shape) =
Array <$> g a <*> pure u <*> bitraverse f pure t <*> traverse f shape
instance Bifunctor TypeBase where
bimap = bimapDefault
instance Bifoldable TypeBase where
bifoldMap = bifoldMapDefault
-- | An argument passed to a type constructor.
data TypeArg dim
= TypeArgDim dim SrcLoc
| TypeArgType (TypeBase dim ()) SrcLoc
deriving (Eq, Ord, Show)
instance Traversable TypeArg where
traverse f (TypeArgDim v loc) = TypeArgDim <$> f v <*> pure loc
traverse f (TypeArgType t loc) = TypeArgType <$> bitraverse f pure t <*> pure loc
instance Functor TypeArg where
fmap = fmapDefault
instance Foldable TypeArg where
foldMap = foldMapDefault
-- | A variable that is aliased. Can be still in-scope, or have gone
-- out of scope and be free. In the latter case, it behaves more like
-- an equivalence class. See uniqueness-error18.fut for an example of
-- why this is necessary.
data Alias
= AliasBound {aliasVar :: VName}
| AliasFree {aliasVar :: VName}
deriving (Eq, Ord, Show)
-- | Aliasing for a type, which is a set of the variables that are
-- aliased.
type Aliasing = S.Set Alias
-- | A type with aliasing information and shape annotations, used for
-- describing the type patterns and expressions.
type PatType = TypeBase (DimDecl VName) Aliasing
-- | A "structural" type with shape annotations and no aliasing
-- information, used for declarations.
type StructType = TypeBase (DimDecl VName) ()
-- | A value type contains full, manifest size information.
type ValueType = TypeBase Int64 ()
-- | The return type version of 'StructType'.
type StructRetType = RetTypeBase (DimDecl VName) ()
-- | The return type version of 'PatType'.
type PatRetType = RetTypeBase (DimDecl VName) Aliasing
-- | A dimension declaration expression for use in a 'TypeExp'.
data DimExp vn
= -- | The size of the dimension is this name, which
-- must be in scope.
DimExpNamed (QualName vn) SrcLoc
| -- | The size is a constant.
DimExpConst Int SrcLoc
| -- | No dimension declaration.
DimExpAny
deriving (Show)
deriving instance Eq (DimExp Name)
deriving instance Eq (DimExp VName)
deriving instance Ord (DimExp Name)
deriving instance Ord (DimExp VName)
-- | An unstructured type with type variables and possibly shape
-- declarations - this is what the user types in the source program.
-- These are used to construct 'TypeBase's in the type checker.
data TypeExp vn
= TEVar (QualName vn) SrcLoc
| TETuple [TypeExp vn] SrcLoc
| TERecord [(Name, TypeExp vn)] SrcLoc
| TEArray (TypeExp vn) (DimExp vn) SrcLoc
| TEUnique (TypeExp vn) SrcLoc
| TEApply (TypeExp vn) (TypeArgExp vn) SrcLoc
| TEArrow (Maybe vn) (TypeExp vn) (TypeExp vn) SrcLoc
| TESum [(Name, [TypeExp vn])] SrcLoc
| TEDim [vn] (TypeExp vn) SrcLoc
deriving (Show)
deriving instance Eq (TypeExp Name)
deriving instance Eq (TypeExp VName)
deriving instance Ord (TypeExp Name)
deriving instance Ord (TypeExp VName)
instance Located (TypeExp vn) where
locOf (TEArray _ _ loc) = locOf loc
locOf (TETuple _ loc) = locOf loc
locOf (TERecord _ loc) = locOf loc
locOf (TEVar _ loc) = locOf loc
locOf (TEUnique _ loc) = locOf loc
locOf (TEApply _ _ loc) = locOf loc
locOf (TEArrow _ _ _ loc) = locOf loc
locOf (TESum _ loc) = locOf loc
locOf (TEDim _ _ loc) = locOf loc
-- | A type argument expression passed to a type constructor.
data TypeArgExp vn
= TypeArgExpDim (DimExp vn) SrcLoc
| TypeArgExpType (TypeExp vn)
deriving (Show)
deriving instance Eq (TypeArgExp Name)
deriving instance Eq (TypeArgExp VName)
deriving instance Ord (TypeArgExp Name)
deriving instance Ord (TypeArgExp VName)
instance Located (TypeArgExp vn) where
locOf (TypeArgExpDim _ loc) = locOf loc
locOf (TypeArgExpType t) = locOf t
-- | A declaration of the type of something.
data TypeDeclBase f vn = TypeDecl
{ -- | The type declared by the user.
declaredType :: TypeExp vn,
-- | The type deduced by the type checker.
expandedType :: f StructType
}
deriving instance Showable f vn => Show (TypeDeclBase f vn)
deriving instance Eq (TypeDeclBase NoInfo VName)
deriving instance Ord (TypeDeclBase NoInfo VName)
instance Located (TypeDeclBase f vn) where
locOf = locOf . declaredType
-- | Information about which parts of a value/type are consumed.
data Diet
= -- | Consumes these fields in the record.
RecordDiet (M.Map Name Diet)
| -- | A function that consumes its argument(s) like this.
-- The final 'Diet' should always be 'Observe', as there
-- is no way for a function to consume its return value.
FuncDiet Diet Diet
| -- | Consumes this value.
Consume
| -- | Only observes value in this position, does
-- not consume.
Observe
deriving (Eq, Show)
-- | Simple Futhark values. Values are fully evaluated and their type
-- is always unambiguous.
data Value
= PrimValue !PrimValue
| -- | It is assumed that the array is 0-indexed. The type
-- is the full type.
ArrayValue !(Array Int Value) ValueType
deriving (Eq, Show)
-- | An identifier consists of its name and the type of the value
-- bound to the identifier.
data IdentBase f vn = Ident
{ identName :: vn,
identType :: f PatType,
identSrcLoc :: SrcLoc
}
deriving instance Showable f vn => Show (IdentBase f vn)
instance Eq vn => Eq (IdentBase ty vn) where
x == y = identName x == identName y
instance Ord vn => Ord (IdentBase ty vn) where
compare = comparing identName
instance Located (IdentBase ty vn) where
locOf = locOf . identSrcLoc
-- | Default binary operators.
data BinOp
= -- | A pseudo-operator standing in for any normal
-- identifier used as an operator (they all have the
-- same fixity).
-- Binary Ops for Numbers
Backtick
| Plus
| Minus
| Pow
| Times
| Divide
| Mod
| Quot
| Rem
| ShiftR
| ShiftL
| Band
| Xor
| Bor
| LogAnd
| LogOr
| -- Relational Ops for all primitive types at least
Equal
| NotEqual
| Less
| Leq
| Greater
| Geq
| -- Some functional ops.
-- | @|>@
PipeRight
| -- | @<|@
-- Misc
PipeLeft
deriving (Eq, Ord, Show, Enum, Bounded)
-- | Whether a bound for an end-point of a 'DimSlice' or a range
-- literal is inclusive or exclusive.
data Inclusiveness a
= DownToExclusive a
| -- | May be "down to" if step is negative.
ToInclusive a
| UpToExclusive a
deriving (Eq, Ord, Show)
instance Located a => Located (Inclusiveness a) where
locOf (DownToExclusive x) = locOf x
locOf (ToInclusive x) = locOf x
locOf (UpToExclusive x) = locOf x
instance Functor Inclusiveness where
fmap = fmapDefault
instance Foldable Inclusiveness where
foldMap = foldMapDefault
instance Traversable Inclusiveness where
traverse f (DownToExclusive x) = DownToExclusive <$> f x
traverse f (ToInclusive x) = ToInclusive <$> f x
traverse f (UpToExclusive x) = UpToExclusive <$> f x
-- | An indexing of a single dimension.
data DimIndexBase f vn
= DimFix (ExpBase f vn)
| DimSlice
(Maybe (ExpBase f vn))
(Maybe (ExpBase f vn))
(Maybe (ExpBase f vn))
deriving instance Showable f vn => Show (DimIndexBase f vn)
deriving instance Eq (DimIndexBase NoInfo VName)
deriving instance Ord (DimIndexBase NoInfo VName)
-- | A slicing of an array (potentially multiple dimensions).
type SliceBase f vn = [DimIndexBase f vn]
-- | A name qualified with a breadcrumb of module accesses.
data QualName vn = QualName
{ qualQuals :: ![vn],
qualLeaf :: !vn
}
deriving (Show)
instance Eq (QualName Name) where
QualName qs1 v1 == QualName qs2 v2 = qs1 == qs2 && v1 == v2
instance Eq (QualName VName) where
QualName _ v1 == QualName _ v2 = v1 == v2
instance Ord (QualName Name) where
QualName qs1 v1 `compare` QualName qs2 v2 = compare (qs1, v1) (qs2, v2)
instance Ord (QualName VName) where
QualName _ v1 `compare` QualName _ v2 = compare v1 v2
instance Functor QualName where
fmap = fmapDefault
instance Foldable QualName where
foldMap = foldMapDefault
instance Traversable QualName where
traverse f (QualName qs v) = QualName <$> traverse f qs <*> f v
-- | A binding of a size in a pattern (essentially a size parameter in
-- a @let@ expression).
data SizeBinder vn = SizeBinder {sizeName :: !vn, sizeLoc :: !SrcLoc}
deriving (Eq, Ord, Show)
instance Located (SizeBinder vn) where
locOf = locOf . sizeLoc
-- | An "application expression" is a semantic (not syntactic)
-- grouping of expressions that have "funcall-like" semantics, mostly
-- meaning that they can return existential sizes. In our type
-- theory, these are all thought to be bound to names (*Administrative
-- Normal Form*), but as this is not practical in a real language, we
-- instead use an annotation ('AppRes') that stores the information we
-- need, so we can pretend that an application expression was really
-- bound to a name.
data AppExpBase f vn
= -- | The @Maybe VName@ is a possible existential size that is
-- instantiated by this argument. May have duplicates across the
-- program, but they will all produce the same value (the
-- expressions will be identical).
Apply
(ExpBase f vn)
(ExpBase f vn)
(f (Diet, Maybe VName))
SrcLoc
| -- | Size coercion: @e :> t@.
Coerce (ExpBase f vn) (TypeDeclBase f vn) SrcLoc
| Range
(ExpBase f vn)
(Maybe (ExpBase f vn))
(Inclusiveness (ExpBase f vn))
SrcLoc
| LetPat
[SizeBinder vn]
(PatBase f vn)
(ExpBase f vn)
(ExpBase f vn)
SrcLoc
| LetFun
vn
( [TypeParamBase vn],
[PatBase f vn],
Maybe (TypeExp vn),
f StructRetType,
ExpBase f vn
)
(ExpBase f vn)
SrcLoc
| If (ExpBase f vn) (ExpBase f vn) (ExpBase f vn) SrcLoc
| DoLoop
[VName] -- Size parameters.
(PatBase f vn) -- Merge variable pattern.
(ExpBase f vn) -- Initial values of merge variables.
(LoopFormBase f vn) -- Do or while loop.
(ExpBase f vn) -- Loop body.
SrcLoc
| BinOp
(QualName vn, SrcLoc)
(f PatType)
(ExpBase f vn, f (StructType, Maybe VName))
(ExpBase f vn, f (StructType, Maybe VName))
SrcLoc
| LetWith
(IdentBase f vn)
(IdentBase f vn)
(SliceBase f vn)
(ExpBase f vn)
(ExpBase f vn)
SrcLoc
| Index (ExpBase f vn) (SliceBase f vn) SrcLoc
| -- | A match expression.
Match (ExpBase f vn) (NE.NonEmpty (CaseBase f vn)) SrcLoc
deriving instance Showable f vn => Show (AppExpBase f vn)
deriving instance Eq (AppExpBase NoInfo VName)
deriving instance Ord (AppExpBase NoInfo VName)
instance Located (AppExpBase f vn) where
locOf (Range _ _ _ pos) = locOf pos
locOf (BinOp _ _ _ _ loc) = locOf loc
locOf (If _ _ _ loc) = locOf loc
locOf (Coerce _ _ loc) = locOf loc
locOf (Apply _ _ _ loc) = locOf loc
locOf (LetPat _ _ _ _ loc) = locOf loc
locOf (LetFun _ _ _ loc) = locOf loc
locOf (LetWith _ _ _ _ _ loc) = locOf loc
locOf (Index _ _ loc) = locOf loc
locOf (DoLoop _ _ _ _ _ loc) = locOf loc
locOf (Match _ _ loc) = locOf loc
-- | An annotation inserted by the type checker on constructs that are
-- "function calls" (either literally or conceptually). This
-- annotation encodes the result type, as well as any existential
-- sizes that are generated here.
data AppRes = AppRes
{ appResType :: PatType,
appResExt :: [VName]
}
deriving (Eq, Ord, Show)
-- | The Futhark expression language.
--
-- This allows us to encode whether or not the expression has been
-- type-checked in the Haskell type of the expression. Specifically,
-- the parser will produce expressions of type @Exp 'NoInfo' 'Name'@,
-- and the type checker will convert these to @Exp 'Info' 'VName'@, in
-- which type information is always present and all names are unique.
data ExpBase f vn
= Literal PrimValue SrcLoc
| -- | A polymorphic integral literal.
IntLit Integer (f PatType) SrcLoc
| -- | A polymorphic decimal literal.
FloatLit Double (f PatType) SrcLoc
| -- | A string literal is just a fancy syntax for an array
-- of bytes.
StringLit [Word8] SrcLoc
| Var (QualName vn) (f PatType) SrcLoc
| -- | A parenthesized expression.
Parens (ExpBase f vn) SrcLoc
| QualParens (QualName vn, SrcLoc) (ExpBase f vn) SrcLoc
| -- | Tuple literals, e.g., @{1+3, {x, y+z}}@.
TupLit [ExpBase f vn] SrcLoc
| -- | Record literals, e.g. @{x=2,y=3,z}@.
RecordLit [FieldBase f vn] SrcLoc
| -- | Array literals, e.g., @[ [1+x, 3], [2, 1+4] ]@.
-- Second arg is the row type of the rows of the array.
ArrayLit [ExpBase f vn] (f PatType) SrcLoc
| -- | An attribute applied to the following expression.
Attr (AttrInfo vn) (ExpBase f vn) SrcLoc
| Project Name (ExpBase f vn) (f PatType) SrcLoc
| -- | Numeric negation (ugly special case; Haskell did it first).
Negate (ExpBase f vn) SrcLoc
| -- | Logical and bitwise negation.
Not (ExpBase f vn) SrcLoc
| -- | Fail if the first expression does not return true,
-- and return the value of the second expression if it
-- does.
Assert (ExpBase f vn) (ExpBase f vn) (f String) SrcLoc
| -- | An n-ary value constructor.
Constr Name [ExpBase f vn] (f PatType) SrcLoc
| Update (ExpBase f vn) (SliceBase f vn) (ExpBase f vn) SrcLoc
| RecordUpdate (ExpBase f vn) [Name] (ExpBase f vn) (f PatType) SrcLoc
| Lambda
[PatBase f vn]
(ExpBase f vn)
(Maybe (TypeExp vn))
(f (Aliasing, StructRetType))
SrcLoc
| -- | @+@; first two types are operands, third is result.
OpSection (QualName vn) (f PatType) SrcLoc
| -- | @2+@; first type is operand, second is result.
OpSectionLeft
(QualName vn)
(f PatType)
(ExpBase f vn)
(f (PName, StructType, Maybe VName), f (PName, StructType))
(f PatRetType, f [VName])
SrcLoc
| -- | @+2@; first type is operand, second is result.
OpSectionRight
(QualName vn)
(f PatType)
(ExpBase f vn)
(f (PName, StructType), f (PName, StructType, Maybe VName))
(f PatRetType)
SrcLoc
| -- | Field projection as a section: @(.x.y.z)@.
ProjectSection [Name] (f PatType) SrcLoc
| -- | Array indexing as a section: @(.[i,j])@.
IndexSection (SliceBase f vn) (f PatType) SrcLoc
| -- | Type ascription: @e : t@.
Ascript (ExpBase f vn) (TypeDeclBase f vn) SrcLoc
| AppExp (AppExpBase f vn) (f AppRes)
deriving instance Showable f vn => Show (ExpBase f vn)
deriving instance Eq (ExpBase NoInfo VName)
deriving instance Ord (ExpBase NoInfo VName)
instance Located (ExpBase f vn) where
locOf (Literal _ loc) = locOf loc
locOf (IntLit _ _ loc) = locOf loc
locOf (FloatLit _ _ loc) = locOf loc
locOf (Parens _ loc) = locOf loc
locOf (QualParens _ _ loc) = locOf loc
locOf (TupLit _ pos) = locOf pos
locOf (RecordLit _ pos) = locOf pos
locOf (Project _ _ _ pos) = locOf pos
locOf (ArrayLit _ _ pos) = locOf pos
locOf (StringLit _ loc) = locOf loc
locOf (Var _ _ loc) = locOf loc
locOf (Ascript _ _ loc) = locOf loc
locOf (Negate _ pos) = locOf pos
locOf (Not _ pos) = locOf pos
locOf (Update _ _ _ pos) = locOf pos
locOf (RecordUpdate _ _ _ _ pos) = locOf pos
locOf (Lambda _ _ _ _ loc) = locOf loc
locOf (OpSection _ _ loc) = locOf loc
locOf (OpSectionLeft _ _ _ _ _ loc) = locOf loc
locOf (OpSectionRight _ _ _ _ _ loc) = locOf loc
locOf (ProjectSection _ _ loc) = locOf loc
locOf (IndexSection _ _ loc) = locOf loc
locOf (Assert _ _ _ loc) = locOf loc
locOf (Constr _ _ _ loc) = locOf loc
locOf (Attr _ _ loc) = locOf loc
locOf (AppExp e _) = locOf e
-- | An entry in a record literal.
data FieldBase f vn
= RecordFieldExplicit Name (ExpBase f vn) SrcLoc
| RecordFieldImplicit vn (f PatType) SrcLoc
deriving instance Showable f vn => Show (FieldBase f vn)
deriving instance Eq (FieldBase NoInfo VName)
deriving instance Ord (FieldBase NoInfo VName)
instance Located (FieldBase f vn) where
locOf (RecordFieldExplicit _ _ loc) = locOf loc
locOf (RecordFieldImplicit _ _ loc) = locOf loc
-- | A case in a match expression.
data CaseBase f vn = CasePat (PatBase f vn) (ExpBase f vn) SrcLoc
deriving instance Showable f vn => Show (CaseBase f vn)
deriving instance Eq (CaseBase NoInfo VName)
deriving instance Ord (CaseBase NoInfo VName)
instance Located (CaseBase f vn) where
locOf (CasePat _ _ loc) = locOf loc
-- | Whether the loop is a @for@-loop or a @while@-loop.
data LoopFormBase f vn
= For (IdentBase f vn) (ExpBase f vn)
| ForIn (PatBase f vn) (ExpBase f vn)
| While (ExpBase f vn)
deriving instance Showable f vn => Show (LoopFormBase f vn)
deriving instance Eq (LoopFormBase NoInfo VName)
deriving instance Ord (LoopFormBase NoInfo VName)
-- | A literal in a pattern.
data PatLit
= PatLitInt Integer
| PatLitFloat Double
| PatLitPrim PrimValue
deriving (Eq, Ord, Show)
-- | A pattern as used most places where variables are bound (function
-- parameters, @let@ expressions, etc).
data PatBase f vn
= TuplePat [PatBase f vn] SrcLoc
| RecordPat [(Name, PatBase f vn)] SrcLoc
| PatParens (PatBase f vn) SrcLoc
| Id vn (f PatType) SrcLoc
| Wildcard (f PatType) SrcLoc -- Nothing, i.e. underscore.
| PatAscription (PatBase f vn) (TypeDeclBase f vn) SrcLoc
| PatLit PatLit (f PatType) SrcLoc
| PatConstr Name (f PatType) [PatBase f vn] SrcLoc
| PatAttr (AttrInfo vn) (PatBase f vn) SrcLoc
deriving instance Showable f vn => Show (PatBase f vn)
deriving instance Eq (PatBase NoInfo VName)
deriving instance Ord (PatBase NoInfo VName)
instance Located (PatBase f vn) where
locOf (TuplePat _ loc) = locOf loc
locOf (RecordPat _ loc) = locOf loc
locOf (PatParens _ loc) = locOf loc
locOf (Id _ _ loc) = locOf loc
locOf (Wildcard _ loc) = locOf loc
locOf (PatAscription _ _ loc) = locOf loc
locOf (PatLit _ _ loc) = locOf loc
locOf (PatConstr _ _ _ loc) = locOf loc
locOf (PatAttr _ _ loc) = locOf loc
-- | Documentation strings, including source location.
data DocComment = DocComment String SrcLoc
deriving (Show)
instance Located DocComment where
locOf (DocComment _ loc) = locOf loc
-- | Part of the type of an entry point. Has an actual type, and
-- maybe also an ascribed type expression.
data EntryType = EntryType
{ entryType :: StructType,
entryAscribed :: Maybe (TypeExp VName)
}
deriving (Show)
-- | A parameter of an entry point.
data EntryParam = EntryParam
{ entryParamName :: Name,
entryParamType :: EntryType
}
deriving (Show)
-- | Information about the external interface exposed by an entry
-- point. The important thing is that that we remember the original
-- source-language types, without desugaring them at all. The
-- annoying thing is that we do not require type annotations on entry
-- points, so the types can be either ascribed or inferred.
data EntryPoint = EntryPoint
{ entryParams :: [EntryParam],
entryReturn :: EntryType
}
deriving (Show)
-- | Function Declarations
data ValBindBase f vn = ValBind
{ -- | Just if this function is an entry point. If so, it also
-- contains the externally visible interface. Note that this may not
-- strictly be well-typed after some desugaring operations, as it
-- may refer to abstract types that are no longer in scope.
valBindEntryPoint :: Maybe (f EntryPoint),
valBindName :: vn,
valBindRetDecl :: Maybe (TypeExp vn),
-- | If 'valBindParams' is null, then the 'retDims' are brought
-- into scope at this point.
valBindRetType :: f StructRetType,
valBindTypeParams :: [TypeParamBase vn],
valBindParams :: [PatBase f vn],
valBindBody :: ExpBase f vn,
valBindDoc :: Maybe DocComment,
valBindAttrs :: [AttrInfo vn],
valBindLocation :: SrcLoc
}
deriving instance Showable f vn => Show (ValBindBase f vn)
instance Located (ValBindBase f vn) where
locOf = locOf . valBindLocation
-- | Type Declarations
data TypeBindBase f vn = TypeBind
{ typeAlias :: vn,
typeLiftedness :: Liftedness,
typeParams :: [TypeParamBase vn],
typeExp :: TypeExp vn,
typeElab :: f StructRetType,
typeDoc :: Maybe DocComment,
typeBindLocation :: SrcLoc
}
deriving instance Showable f vn => Show (TypeBindBase f vn)
instance Located (TypeBindBase f vn) where
locOf = locOf . typeBindLocation
-- | The liftedness of a type parameter. By the @Ord@ instance,
-- @Unlifted < SizeLifted < Lifted@.
data Liftedness
= -- | May only be instantiated with a zero-order type of (possibly
-- symbolically) known size.
Unlifted
| -- | May only be instantiated with a zero-order type, but the size
-- can be varying.
SizeLifted
| -- | May be instantiated with a functional type.
Lifted
deriving (Eq, Ord, Show)
-- | A type parameter.
data TypeParamBase vn
= -- | A type parameter that must be a size.
TypeParamDim vn SrcLoc
| -- | A type parameter that must be a type.
TypeParamType Liftedness vn SrcLoc
deriving (Eq, Ord, Show)
instance Functor TypeParamBase where
fmap = fmapDefault
instance Foldable TypeParamBase where
foldMap = foldMapDefault
instance Traversable TypeParamBase where
traverse f (TypeParamDim v loc) = TypeParamDim <$> f v <*> pure loc
traverse f (TypeParamType l v loc) = TypeParamType l <$> f v <*> pure loc
instance Located (TypeParamBase vn) where
locOf (TypeParamDim _ loc) = locOf loc
locOf (TypeParamType _ _ loc) = locOf loc
-- | The name of a type parameter.
typeParamName :: TypeParamBase vn -> vn
typeParamName (TypeParamDim v _) = v
typeParamName (TypeParamType _ v _) = v
-- | A spec is a component of a module type.
data SpecBase f vn
= ValSpec
{ specName :: vn,
specTypeParams :: [TypeParamBase vn],
specType :: TypeDeclBase f vn,
specDoc :: Maybe DocComment,
specLocation :: SrcLoc
}
| TypeAbbrSpec (TypeBindBase f vn)
| -- | Abstract type.
TypeSpec Liftedness vn [TypeParamBase vn] (Maybe DocComment) SrcLoc
| ModSpec vn (SigExpBase f vn) (Maybe DocComment) SrcLoc
| IncludeSpec (SigExpBase f vn) SrcLoc
deriving instance Showable f vn => Show (SpecBase f vn)
instance Located (SpecBase f vn) where
locOf (ValSpec _ _ _ _ loc) = locOf loc
locOf (TypeAbbrSpec tbind) = locOf tbind
locOf (TypeSpec _ _ _ _ loc) = locOf loc
locOf (ModSpec _ _ _ loc) = locOf loc
locOf (IncludeSpec _ loc) = locOf loc
-- | A module type expression.
data SigExpBase f vn
= SigVar (QualName vn) (f (M.Map VName VName)) SrcLoc
| SigParens (SigExpBase f vn) SrcLoc
| SigSpecs [SpecBase f vn] SrcLoc
| SigWith (SigExpBase f vn) (TypeRefBase f vn) SrcLoc
| SigArrow (Maybe vn) (SigExpBase f vn) (SigExpBase f vn) SrcLoc
deriving instance Showable f vn => Show (SigExpBase f vn)
-- | A type refinement.
data TypeRefBase f vn = TypeRef (QualName vn) [TypeParamBase vn] (TypeDeclBase f vn) SrcLoc
deriving instance Showable f vn => Show (TypeRefBase f vn)
instance Located (TypeRefBase f vn) where
locOf (TypeRef _ _ _ loc) = locOf loc
instance Located (SigExpBase f vn) where
locOf (SigVar _ _ loc) = locOf loc
locOf (SigParens _ loc) = locOf loc
locOf (SigSpecs _ loc) = locOf loc
locOf (SigWith _ _ loc) = locOf loc
locOf (SigArrow _ _ _ loc) = locOf loc
-- | Module type binding.
data SigBindBase f vn = SigBind
{ sigName :: vn,
sigExp :: SigExpBase f vn,
sigDoc :: Maybe DocComment,
sigLoc :: SrcLoc
}
deriving instance Showable f vn => Show (SigBindBase f vn)
instance Located (SigBindBase f vn) where
locOf = locOf . sigLoc
-- | Module expression.
data ModExpBase f vn
= ModVar (QualName vn) SrcLoc
| ModParens (ModExpBase f vn) SrcLoc
| -- | The contents of another file as a module.
ModImport FilePath (f FilePath) SrcLoc
| ModDecs [DecBase f vn] SrcLoc
| -- | Functor application. The first mapping is from parameter
-- names to argument names, while the second maps names in the
-- constructed module to the names inside the functor.
ModApply
(ModExpBase f vn)
(ModExpBase f vn)
(f (M.Map VName VName))
(f (M.Map VName VName))
SrcLoc
| ModAscript (ModExpBase f vn) (SigExpBase f vn) (f (M.Map VName VName)) SrcLoc
| ModLambda
(ModParamBase f vn)
(Maybe (SigExpBase f vn, f (M.Map VName VName)))
(ModExpBase f vn)
SrcLoc
deriving instance Showable f vn => Show (ModExpBase f vn)
instance Located (ModExpBase f vn) where
locOf (ModVar _ loc) = locOf loc
locOf (ModParens _ loc) = locOf loc
locOf (ModImport _ _ loc) = locOf loc
locOf (ModDecs _ loc) = locOf loc
locOf (ModApply _ _ _ _ loc) = locOf loc
locOf (ModAscript _ _ _ loc) = locOf loc
locOf (ModLambda _ _ _ loc) = locOf loc
-- | A module binding.
data ModBindBase f vn = ModBind
{ modName :: vn,
modParams :: [ModParamBase f vn],
modSignature :: Maybe (SigExpBase f vn, f (M.Map VName VName)),
modExp :: ModExpBase f vn,
modDoc :: Maybe DocComment,
modLocation :: SrcLoc
}
deriving instance Showable f vn => Show (ModBindBase f vn)
instance Located (ModBindBase f vn) where
locOf = locOf . modLocation
-- | A module parameter.
data ModParamBase f vn = ModParam
{ modParamName :: vn,
modParamType :: SigExpBase f vn,
modParamAbs :: f [VName],
modParamLocation :: SrcLoc
}
deriving instance Showable f vn => Show (ModParamBase f vn)
instance Located (ModParamBase f vn) where
locOf = locOf . modParamLocation
-- | A top-level binding.
data DecBase f vn
= ValDec (ValBindBase f vn)
| TypeDec (TypeBindBase f vn)
| SigDec (SigBindBase f vn)
| ModDec (ModBindBase f vn)
| OpenDec (ModExpBase f vn) SrcLoc
| LocalDec (DecBase f vn) SrcLoc
| ImportDec FilePath (f FilePath) SrcLoc
deriving instance Showable f vn => Show (DecBase f vn)
instance Located (DecBase f vn) where
locOf (ValDec d) = locOf d
locOf (TypeDec d) = locOf d
locOf (SigDec d) = locOf d
locOf (ModDec d) = locOf d
locOf (OpenDec _ loc) = locOf loc
locOf (LocalDec _ loc) = locOf loc
locOf (ImportDec _ _ loc) = locOf loc
-- | The program described by a single Futhark file. May depend on
-- other files.
data ProgBase f vn = Prog
{ progDoc :: Maybe DocComment,
progDecs :: [DecBase f vn]
}
deriving instance Showable f vn => Show (ProgBase f vn)
--- Some prettyprinting definitions are here because we need them in
--- the Attributes module.
instance Pretty PrimType where
ppr (Unsigned Int8) = text "u8"
ppr (Unsigned Int16) = text "u16"
ppr (Unsigned Int32) = text "u32"
ppr (Unsigned Int64) = text "u64"
ppr (Signed t) = ppr t
ppr (FloatType t) = ppr t
ppr Bool = text "bool"
instance Pretty BinOp where
ppr Backtick = text "``"
ppr Plus = text "+"
ppr Minus = text "-"
ppr Pow = text "**"
ppr Times = text "*"
ppr Divide = text "/"
ppr Mod = text "%"
ppr Quot = text "//"
ppr Rem = text "%%"
ppr ShiftR = text ">>"
ppr ShiftL = text "<<"
ppr Band = text "&"
ppr Xor = text "^"
ppr Bor = text "|"
ppr LogAnd = text "&&"
ppr LogOr = text "||"
ppr Equal = text "=="
ppr NotEqual = text "!="
ppr Less = text "<"
ppr Leq = text "<="
ppr Greater = text ">"
ppr Geq = text ">="
ppr PipeLeft = text "<|"
ppr PipeRight = text "|>"
| HIPERFIT/futhark | src/Language/Futhark/Syntax.hs | isc | 38,952 | 0 | 14 | 8,747 | 11,331 | 6,029 | 5,302 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
import GTL.Example.FictitiousPlay.Setup
import GTL.Data.Time
import System.Console.CmdArgs
data PDFArgs = PDFArgs { dir :: FilePath
, endTime :: Time } deriving (Show, Data, Typeable)
pdfArgs = PDFArgs { dir = def &= args &= typ "FILES/DIRS", endTime = 1000 }
main = do
as <- cmdArgs pdfArgs
let d = dir as
t = endTime as
if null $ dir as
then error "A directory name need to be provided"
else pdfAll d t
| dudebout/game-theoretic-learning | GTL/Example/FictitiousPlay/printAll.hs | isc | 494 | 6 | 10 | 130 | 157 | 81 | 76 | 14 | 2 |
module Hage.Geometry where
import qualified Graphics.UI.SDL as SDL (Rect(..))
import Hage.Geometry.Types
infinity :: Float
infinity = 1.0e9
movePoint :: Vector -> Point -> Point
movePoint (vx, vy) (px, py) = (px + vx, py + vy)
nullVector :: Vector
nullVector = (0, 0)
mirrorX :: Vector -> Vector
mirrorX (vx, vy) = (-vx, vy)
mirrorY :: Vector -> Vector
mirrorY (vx, vy) = (vx, -vy)
isBetween :: Ord a => a -> a -> a -> Bool
isBetween a b x = a <= x && x <= b
makeRect :: Point -> Size -> SDL.Rect
makeRect (x, y) (w, h) = SDL.Rect (truncate x) (truncate y) (truncate w) (truncate h)
hasRectPoint :: Rect -> Point -> Bool
hasRectPoint ((rx, ry), (w, h)) (x, y) =
isBetween rx (rx + w) x && isBetween ry (ry + h) y
circleAndRectClashing :: Circle -> Rect -> Maybe Clash
circleAndRectClashing ((cx, cy), r) rect
| check [(cx + r, cy), (cx - r, cy)] = Just Horizontal
| check [(cx, cy + r), (cx, cy - r)] = Just Vertical
| otherwise = Nothing
where
check = any $ hasRectPoint rect
| Hinidu/Arkanoid | src/Hage/Geometry.hs | mit | 1,041 | 0 | 11 | 253 | 506 | 281 | 225 | 26 | 1 |
module HsSearch.FileTypesTest
(
getFileTypeTests
, getFileTypeFromNameTests
) where
import HsSearch.FileTypes
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit hiding (Test)
getFileTypeTests :: IO [Test]
getFileTypeTests = do
let archiveFile = "archive.zip"
let binaryFile = "binary.exe"
let codeFile = "FileTypes.hs"
let textFile = "text.txt"
let xmlFile = "markup.xml"
let unknownFile = "unknown.xyz"
archiveFileType <- getFileType archiveFile
binaryFileType <- getFileType binaryFile
codeFileType <- getFileType codeFile
textFileType <- getFileType textFile
xmlFileType <- getFileType xmlFile
unknownFileType <- getFileType unknownFile
return [ testCase "getFileType archive.zip == Archive" (archiveFileType @?= Archive)
, testCase "getFileType binary.exe == Binary" (binaryFileType @?= Binary)
, testCase "getFileType FileTypes.hs == Code" (codeFileType @?= Code)
, testCase "getFileType text.txt == Text" (textFileType @?= Text)
, testCase "getFileType markup.xml == Xml" (xmlFileType @?= Xml)
, testCase "getFileType unknown.xyz == Unknown" (unknownFileType @?= Unknown)
]
getFileTypeFromNameTests :: IO [Test]
getFileTypeFromNameTests = do
let archiveFileType = getFileTypeForName "archive"
let binaryFileType = getFileTypeForName "binarY"
let codeFileType = getFileTypeForName "cODe"
let textFileType = getFileTypeForName "Text"
let xmlFileType = getFileTypeForName "XML"
let unknownFileType = getFileTypeForName "whoknows"
return [ testCase "getFileTypeForName archive == Archive" (archiveFileType @?= Archive)
, testCase "getFileTypeForName binarY == Binary" (binaryFileType @?= Binary)
, testCase "getFileTypeForName cODe == Code" (codeFileType @?= Code)
, testCase "getFileTypeForName Text == Text" (textFileType @?= Text)
, testCase "getFileTypeForName XML == Xml" (xmlFileType @?= Xml)
, testCase "getFileTypeForName whoknows == Unknown" (unknownFileType @?= Unknown)
]
| clarkcb/xsearch | haskell/hssearch/test/HsSearch/FileTypesTest.hs | mit | 2,074 | 0 | 11 | 384 | 452 | 223 | 229 | 42 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
module PrettyPrint where
import DataDynamic
import DataTypeable
import Text.PrettyPrint.HughesPJClass
import StaticLang as SL
-- =======================================================================================
-- | Pretty printer instace for our StaticExp terms. Takes care of indentation
-- | and spaces when necessary.
instance Pretty (StaticExp t) where
pPrint (Lam str exp) = if length (render short) <= 10 then short else long
where short = parens $ text ("\\ " ++ str ++ ".") <+> pPrint exp
long = parens $ text ("\\ " ++ str ++ ".") $$ pPrint exp
pPrint (f :@ exp) = parens $ pPrint f <+> pPrint exp
pPrint (BinInt op e1 e2) = parens $ pPrint e1 <+> intTextOp op <+> pPrint e2
pPrint (BinBool op e1 e2) = pPrint e1 <+> boolTextOp op <+> pPrint e2
pPrint (BinIntBool op e1 e2) = pPrint e1 <+> intBoolTextOp op <+> pPrint e2
pPrint (BinEq e1 e2) = pPrint e1 <+> text "==" <+> pPrint e2
pPrint (UniNot e) = text "not" <+> pPrint e
pPrint (UniFst e) = text "fst" <+> parens (pPrint e)
pPrint (UniSnd e) = text "snd" <+> parens (pPrint e)
pPrint (Lit x) = text (show x)
pPrint (Var s) = text s
pPrint (If cond e1 e2) = text "if" <+> nest 1 (pPrint cond) $$
nest 1 (pPrint e1) $$
nest 1 (pPrint e2)
pPrint (To exp) = text "to" <+> pPrint exp
pPrint (From exp) = text "from" <+> pPrint exp
intTextOp :: IntIntOp -> Doc
intTextOp Plus = text "+"
intTextOp Minus = text "-"
intTextOp Mult = text "*"
boolTextOp :: BoolBoolOp -> Doc
boolTextOp Or = text "||"
boolTextOp And = text "&&"
intBoolTextOp Gte = text ">="
intBoolTextOp Lte = text "<="
intBoolTextOp Lt = text "<"
intBoolTextOp Gt = text ">"
-- TODO
--textOp SL.EQ = text "=="
--textOp NEQ = text "/="
-- | Instance to our pretty printer.
instance Show (StaticExp t) where
show exp = render $ pPrint exp
printAST :: StaticExp t -> IO()
printAST exp = putStrLn (show (pPrint exp))
-- =======================================================================================
| plclub/cis670-16fa | projects/DynamicLang/src/PrettyPrint.hs | mit | 2,204 | 0 | 13 | 555 | 739 | 360 | 379 | 41 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGPreserveAspectRatio
(pattern SVG_PRESERVEASPECTRATIO_UNKNOWN,
pattern SVG_PRESERVEASPECTRATIO_NONE,
pattern SVG_PRESERVEASPECTRATIO_XMINYMIN,
pattern SVG_PRESERVEASPECTRATIO_XMIDYMIN,
pattern SVG_PRESERVEASPECTRATIO_XMAXYMIN,
pattern SVG_PRESERVEASPECTRATIO_XMINYMID,
pattern SVG_PRESERVEASPECTRATIO_XMIDYMID,
pattern SVG_PRESERVEASPECTRATIO_XMAXYMID,
pattern SVG_PRESERVEASPECTRATIO_XMINYMAX,
pattern SVG_PRESERVEASPECTRATIO_XMIDYMAX,
pattern SVG_PRESERVEASPECTRATIO_XMAXYMAX,
pattern SVG_MEETORSLICE_UNKNOWN, pattern SVG_MEETORSLICE_MEET,
pattern SVG_MEETORSLICE_SLICE, js_setAlign, setAlign, js_getAlign,
getAlign, js_setMeetOrSlice, setMeetOrSlice, js_getMeetOrSlice,
getMeetOrSlice, SVGPreserveAspectRatio,
castToSVGPreserveAspectRatio, gTypeSVGPreserveAspectRatio)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
pattern SVG_PRESERVEASPECTRATIO_UNKNOWN = 0
pattern SVG_PRESERVEASPECTRATIO_NONE = 1
pattern SVG_PRESERVEASPECTRATIO_XMINYMIN = 2
pattern SVG_PRESERVEASPECTRATIO_XMIDYMIN = 3
pattern SVG_PRESERVEASPECTRATIO_XMAXYMIN = 4
pattern SVG_PRESERVEASPECTRATIO_XMINYMID = 5
pattern SVG_PRESERVEASPECTRATIO_XMIDYMID = 6
pattern SVG_PRESERVEASPECTRATIO_XMAXYMID = 7
pattern SVG_PRESERVEASPECTRATIO_XMINYMAX = 8
pattern SVG_PRESERVEASPECTRATIO_XMIDYMAX = 9
pattern SVG_PRESERVEASPECTRATIO_XMAXYMAX = 10
pattern SVG_MEETORSLICE_UNKNOWN = 0
pattern SVG_MEETORSLICE_MEET = 1
pattern SVG_MEETORSLICE_SLICE = 2
foreign import javascript unsafe "$1[\"align\"] = $2;" js_setAlign
:: SVGPreserveAspectRatio -> Word -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPreserveAspectRatio.align Mozilla SVGPreserveAspectRatio.align documentation>
setAlign :: (MonadIO m) => SVGPreserveAspectRatio -> Word -> m ()
setAlign self val = liftIO (js_setAlign (self) val)
foreign import javascript unsafe "$1[\"align\"]" js_getAlign ::
SVGPreserveAspectRatio -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPreserveAspectRatio.align Mozilla SVGPreserveAspectRatio.align documentation>
getAlign :: (MonadIO m) => SVGPreserveAspectRatio -> m Word
getAlign self = liftIO (js_getAlign (self))
foreign import javascript unsafe "$1[\"meetOrSlice\"] = $2;"
js_setMeetOrSlice :: SVGPreserveAspectRatio -> Word -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPreserveAspectRatio.meetOrSlice Mozilla SVGPreserveAspectRatio.meetOrSlice documentation>
setMeetOrSlice ::
(MonadIO m) => SVGPreserveAspectRatio -> Word -> m ()
setMeetOrSlice self val = liftIO (js_setMeetOrSlice (self) val)
foreign import javascript unsafe "$1[\"meetOrSlice\"]"
js_getMeetOrSlice :: SVGPreserveAspectRatio -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGPreserveAspectRatio.meetOrSlice Mozilla SVGPreserveAspectRatio.meetOrSlice documentation>
getMeetOrSlice :: (MonadIO m) => SVGPreserveAspectRatio -> m Word
getMeetOrSlice self = liftIO (js_getMeetOrSlice (self)) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGPreserveAspectRatio.hs | mit | 3,913 | 28 | 8 | 512 | 776 | 451 | 325 | 63 | 1 |
module Typed.Exception
( module M
, pattern Terroras
, Term (ExceptionTerm)
) where
import Control.Monad
import Control.Monad.Catch
import qualified Data.Map as M
import qualified Data.Tree as T
import Preliminaries
import Typed.Simply as M (pattern Ttrue, pattern Tfalse, pattern Tif, pattern Kbool, pattern Tval, pattern Tvar, pattern Tabs, pattern Tapp, pattern Karr, Term (SimplyTerm))
pattern Terroras t = T.Node "error as {}" [t]
pattern Ttry t1 t2 = T.Node "try {} with {}" [t1,t2]
data TypeOfError
= ArmsOfConditionalHasDifferentTypes
| GuardOfConditionalNotABoolean
| ParameterTypeMismatch
| ArrowTypeExpected
| ExpectedType StrTree StrTree
deriving Show
instance Exception TypeOfError
instance Calculus "exception" StrTree StrTree (M.Map Var StrTree) where
data Term "exception" StrTree = ExceptionTerm StrTree deriving (Eq, Show)
isValue (ExceptionTerm t) = isValue (SimplyTerm t)
typeof ctx (ExceptionTerm t) = go ctx t where
go ctx (Terroras t) = return t
go ctx Ttrue = return Kbool
go ctx Tfalse = return Kbool
go ctx (Tif t a b) = do
tt <- go ctx t
case tt of
Kbool -> do
ta <- go ctx a
tb <- go ctx b
if ta == tb then return ta else throwM ArmsOfConditionalHasDifferentTypes
_ -> throwM GuardOfConditionalNotABoolean
go ctx (Tvar x) = return $ ctx M.! x
go ctx (Tabs x xt t) = do
tt <- go (M.insert x xt ctx) t
return $ Karr xt tt
go ctx (Tapp tx ty) = do
txTyp <- go ctx tx
tyTyp <- go ctx ty
case txTyp of
Karr txTyp1 txTyp2 ->
if tyTyp == txTyp1 then return txTyp2
else throwM ParameterTypeMismatch
_ -> throwM ArrowTypeExpected
go ctx (Ttry t1 t2) = join $ liftM2 (expect ExpectedType) (go ctx t1) (go ctx t2)
eval1 (ExceptionTerm t) = fmap ExceptionTerm $ go t where
go (Tapp (Terroras typ) t) = return $ Terroras typ
go (Tapp v (Terroras typ)) | isValue (ExceptionTerm v) = return $ Terroras typ
go (Tif Ttrue t1 t2) = return t1
go (Tif Tfalse t1 t2) = return t2
go (Tif t1 t2 t3) = do
t1' <- go t1
return $ Tif t1' t2 t3
go (Tapp (Tabs x typ11 t12) v) = return $ subst x v t12
go (Tapp tx ty)
| isValue (SimplyTerm tx) = do
ty' <- go ty
return $ Tapp tx ty'
| otherwise = do
tx' <- go tx
return $ Tapp tx' ty
go (Ttry v1 t2) | isValue (ExceptionTerm v1) = return v1
go (Ttry (Terroras _) t2) = return t2
go (Ttry t1 t2) = Ttry <$> go t1 <*> return t2
go _ = throwM NoRuleApplies
subst v p = go where
go Ttrue = Ttrue
go Tfalse = Tfalse
go (Tif b t1 t2) = Tif (go b) (go t1) (go t2)
go (Tvar y)
| v == y = p
| otherwise = Tvar y
go (Tabs y yt t)
| v == y = Tabs y yt t
| otherwise = Tabs y yt (go t)
go (Tapp t1 t2) = Tapp (go t1) (go t2)
go (Terroras typ) = Terroras typ
go (Ttry t1 t2) = Ttry (go t1) (go t2)
| myuon/typed | tapl/src/Typed/Exception.hs | mit | 3,013 | 0 | 16 | 892 | 1,306 | 636 | 670 | -1 | -1 |
{-# htermination (foldl1 :: (a -> a -> a) -> (List a) -> a) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
foldl :: (a -> b -> a) -> a -> (List b) -> a;
foldl f z Nil = z;
foldl f z (Cons x xs) = foldl f (f z x) xs;
foldl1 :: (a -> a -> a) -> (List a) -> a;
foldl1 f (Cons x xs) = foldl f x xs;
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/foldl1_1.hs | mit | 376 | 3 | 9 | 121 | 199 | 97 | 102 | 8 | 1 |
{-# LANGUAGE TupleSections #-}
module Examples.GGM where
import Examples.Goldreich
import Circuit
import Circuit.Builder
import Circuit.Utils
import Control.Monad
import Control.Monad.Trans
import Text.Printf
export :: Gate g => [(String, [IO (String, Circuit g)])]
export =
[ ("big_ggm", [("ggm_4_128" ,) <$> ggm 16 128 16])
, ("ggm", [ ("ggm_1_32" ,) <$> ggm 4 32 16
, ("ggm_2_32" ,) <$> ggm 8 32 16
, ("ggm_3_32" ,) <$> ggm 12 32 16
, ("ggm_4_32" ,) <$> ggm 16 32 16
, ("ggm_1_64" ,) <$> ggm 4 64 16
, ("ggm_2_64" ,) <$> ggm 8 64 16
, ("ggm_3_64" ,) <$> ggm 12 64 16
, ("ggm_4_64" ,) <$> ggm 16 64 16
, ("ggm_1_128" ,) <$> ggm 4 128 16
, ("ggm_2_128" ,) <$> ggm 8 128 16
, ("ggm_3_128" ,) <$> ggm 12 128 16
, ("ggm_4_128" ,) <$> ggm 16 128 16
] )
, ("ggm_sigma_more", map (\(niter, keysize, stretch) ->
let name = printf "ggm_sigma_%d_%d_%d" niter stretch keysize
in (name ,) <$> ggmSigma niter keysize stretch)
[ (niter, keysize, stretch) | niter <- [1..4], keysize <- [32,64,128], stretch <- [16,32,64,128,256] ]
)
, ("ggm_sigma", [ ("ggm_sigma_1_16_32" ,) <$> ggmSigma 1 32 16
, ("ggm_sigma_2_16_32" ,) <$> ggmSigma 2 32 16
, ("ggm_sigma_3_16_32" ,) <$> ggmSigma 3 32 16
, ("ggm_sigma_4_16_32" ,) <$> ggmSigma 4 32 16
, ("ggm_sigma_1_16_64" ,) <$> ggmSigma 1 64 16
, ("ggm_sigma_2_16_64" ,) <$> ggmSigma 2 64 16
, ("ggm_sigma_3_16_64" ,) <$> ggmSigma 3 64 16
, ("ggm_sigma_4_16_64" ,) <$> ggmSigma 4 64 16
, ("ggm_sigma_1_16_128" ,) <$> ggmSigma 1 128 16
, ("ggm_sigma_2_16_128" ,) <$> ggmSigma 2 128 16
, ("ggm_sigma_3_16_128" ,) <$> ggmSigma 3 128 16
, ("ggm_sigma_4_16_128" ,) <$> ggmSigma 4 128 16
, ("ggm_sigma_1_32_32" ,) <$> ggmSigma 1 32 32
, ("ggm_sigma_1_32_64" ,) <$> ggmSigma 1 64 32
, ("ggm_sigma_1_32_128" ,) <$> ggmSigma 1 128 32
, ("ggm_sigma_2_32_32" ,) <$> ggmSigma 2 32 32
, ("ggm_sigma_2_32_64" ,) <$> ggmSigma 2 64 32
, ("ggm_sigma_2_32_128" ,) <$> ggmSigma 2 128 32
, ("ggm_sigma_1_64_32" ,) <$> ggmSigma 1 32 64
, ("ggm_sigma_1_64_64" ,) <$> ggmSigma 1 64 64
, ("ggm_sigma_1_64_128" ,) <$> ggmSigma 1 128 64
, ("ggm_sigma_2_64_32" ,) <$> ggmSigma 2 32 64
, ("ggm_sigma_2_64_64" ,) <$> ggmSigma 2 64 64
, ("ggm_sigma_2_64_128" ,) <$> ggmSigma 2 128 64
] )
, ("ggm_sigma_256", [ ("ggm_sigma_1_256_32" ,) <$> ggmSigma 1 32 256
, ("ggm_sigma_1_256_64" ,) <$> ggmSigma 1 64 256
, ("ggm_sigma_1_256_128" ,) <$> ggmSigma 1 128 256
, ("ggm_sigma_2_256_32" ,) <$> ggmSigma 2 32 256
, ("ggm_sigma_2_256_64" ,) <$> ggmSigma 2 64 256
, ("ggm_sigma_2_256_128" ,) <$> ggmSigma 2 128 256
] )
, ("ggm_sigma_1024", [ ("ggm_sigma_2_1024_32" ,) <$> ggmSigma 2 32 1024
, ("ggm_sigma_2_1024_64" ,) <$> ggmSigma 2 64 1024
, ("ggm_sigma_2_1024_128" ,) <$> ggmSigma 2 128 1024
] )
]
--------------------------------------------------------------------------------
-- ggm
-- choose the ith set from xs
choose :: (Gate g, Monad m) => [Ref] -> [[Ref]] -> BuilderT g m [Ref]
choose ix xs = do
s <- selectionVector ix
ws <- zipWithM (\b x -> mapM (circMul b) x) s xs
mapM circSum (transpose ws)
ggmStep :: (Gate g, Monad m) => Circuit g -> [Ref] -> [Ref] -> BuilderT g m [Ref]
ggmStep prg seed choice = do
let n = length seed
ws <- safeChunksOf n <$> subcircuit prg seed
choose choice ws
ggm :: Gate g => Int -> Int -> Int -> IO (Circuit g)
ggm inputLength keyLength stretch = buildCircuitT $ do
g <- lift $ prg' keyLength (stretch * keyLength) 5 xorAnd
keyBits <- lift $ randKeyIO keyLength
xs <- inputs inputLength
seed <- secrets keyBits
res <- foldM (ggmStep g) seed (safeChunksOf (numBits stretch) xs)
outputs res
ggmNoPrg :: Gate g => Int -> Int -> Int -> IO (Circuit g)
ggmNoPrg inputLength keyLength stretch = buildCircuitT $ do
let g = buildCircuit $ do
xs <- inputs keyLength
replicateM stretch (outputs xs)
keyBits <- lift $ randKeyIO keyLength
xs <- inputs inputLength
seed <- secrets keyBits
res <- foldM (ggmStep g) seed (safeChunksOf (numBits stretch) xs)
outputs res
--------------------------------------------------------------------------------
-- ggm rachel
ggmStepR :: (Gate g, Monad m) => Circuit g -> [Ref] -> [Ref] -> BuilderT g m [Ref]
ggmStepR prg seed choice = do
let n = length seed
xs <- safeChunksOf n <$> subcircuit prg seed
when (length choice /= length xs) $ error "[ggmStepR] wrong input length"
ws <- zipWithM (\b x -> mapM (circMul b) x) choice xs
mapM circSum (transpose ws)
-- set noutputs= logBase 2 symlen * num_prg
ggmSigma :: Gate g => Int -> Int -> Int -> IO (Circuit g)
ggmSigma num_prg keyLength symlen = buildCircuitT $ do
let outputLength = numBits symlen * num_prg
g <- lift $ prg' keyLength (keyLength * symlen) 5 xorAnd
keyBits <- lift $ randKeyIO keyLength
xs <- replicateM num_prg (symbol symlen)
mapM setSigma [0..SymId num_prg]
seed <- secrets keyBits
res <- foldM (ggmStepR g) seed xs
outputs (take outputLength res)
| spaceships/circuit-synthesis | src/Examples/GGM.hs | mit | 6,024 | 0 | 16 | 2,069 | 1,973 | 1,032 | 941 | 106 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
module WS.Types where
import Control.Applicative ((<$>), (<*>))
import Data.Aeson
import Data.Aeson.Types
import Data.Fixed (Pico)
import Data.Serialize
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import Data.Time (LocalTime(..), Day(..), TimeOfDay(..))
import Database.HDBC.Query.TH (makeRecordPersistableDefault)
import Database.Record.TH (derivingEq, derivingShow)
import Database.Relational.Query
import GHC.Generics (Generic)
import Language.Haskell.TH.Name.CamelCase (conCamelcaseName)
import Prelude hiding (id)
import WS.DB
--data User = User
-- { id :: Int
-- , name :: Text
-- , emailAddress :: Text
-- , createdAt :: LocalTime
-- , lastLoggedinAt :: LocalTime
-- }
-- deriving (Eq, Generic, Show)
$(defineTable
[("VARCHAR", [t|Text|])]
"main"
"user"
[derivingEq, derivingShow, conCamelcaseName "Generic"])
data InsertUser = InsertUser
{ insName :: Text
, insEmailAddress :: Text
, insCreatedAt :: LocalTime
, insLastLoggedinAt :: LocalTime
}
$(makeRecordPersistableDefault ''InsertUser)
piUser :: Pi User InsertUser
piUser = InsertUser |$| name'
|*| emailAddress'
|*| createdAt'
|*| lastLoggedinAt'
instance Serialize User
instance Serialize Text where
put = put . encodeUtf8
get = decodeUtf8 <$> get
instance Serialize Day where
put = put . toModifiedJulianDay
get = ModifiedJulianDay <$> get
instance Serialize TimeOfDay where
put tod = do
put $ todHour tod
put $ todMin tod
put $ todSec tod
get = TimeOfDay <$> get <*> get <*> get
instance Serialize Pico where
put = put . show
get = read <$> get
instance Serialize LocalTime where
put lt = do
put $ localDay lt
put $ localTimeOfDay lt
get = LocalTime <$> get <*> get
options :: Options
options = defaultOptions { fieldLabelModifier = camelTo2 '_' }
instance FromJSON User where
parseJSON = genericParseJSON options
instance ToJSON User where
toJSON = genericToJSON options
data RegForm = RegForm
{ regName :: Text
, regEmailAddress :: Text
}
deriving (Eq, Generic, Show)
instance FromJSON RegForm where
parseJSON = genericParseJSON options
instance ToJSON RegForm where
toJSON = genericToJSON options
data LoginForm = LoginForm
{ loginName :: Text
}
deriving (Eq, Generic, Show)
instance FromJSON LoginForm where
parseJSON = genericParseJSON options
instance ToJSON LoginForm where
toJSON = genericToJSON options
| krdlab/haskell-webapp-testing-experimentation | src/WS/Types.hs | mit | 2,801 | 0 | 9 | 659 | 680 | 385 | 295 | 79 | 1 |
{-# Language NoImplicitPrelude #-}
module Peano where
import Prelude
import Data.Monoid
class Monoid a => Semiring a where
sone :: a
stimes :: a -> a -> a
data Nat = Zero
| Succ Nat
deriving (Show, Read, Eq, Ord)
instance Monoid Nat where
mempty = Zero
mappend = plus
instance Semiring Nat where
sone = Succ Zero
stimes n m = mconcat $ replicate (fromEnum n) m
instance Enum Nat where
succ = successor
pred = predecessor
toEnum = (iterate successor Zero !!)
fromEnum Zero = 0
fromEnum (Succ Zero) = 1
fromEnum (Succ n) = fromEnum n + 1
-- So now you can do
-- fromEnum $ succ . succ . succ . succ $ Zero
-- -> 4
isZero :: Nat -> Bool
isZero Zero = True
isZero (Succ _) = False
predecessor :: Nat -> Nat
predecessor Zero = undefined
predecessor (Succ n) = n
successor :: Nat -> Nat
successor = Succ
plus :: Nat -> Nat -> Nat
plus Zero n = n
plus (Succ n) o = successor $ n `plus` o
minus :: Nat -> Nat -> Nat
minus Zero Zero = Zero
minus Zero (Succ _) = undefined
minus n@(Succ _) Zero = n
minus (Succ n) (Succ o) = n `minus` o
| passy/peano | src/Peano.hs | mit | 1,100 | 0 | 9 | 286 | 426 | 226 | 200 | 39 | 1 |
-- specs.hs
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module Specs where
import qualified Database.Persist.TH as TH
TH.share [TH.mkPersist TH.sqlSettings, TH.mkMigrate "migrateAll"] [TH.persistLowerCase|
MyRecord
value Int
deriving Show
|]
| kubkon/conduit-persistent-example | specs.hs | mit | 450 | 0 | 8 | 86 | 55 | 36 | 19 | 10 | 0 |
module Main where
import Test.Hspec
import TestNeuron
main :: IO()
main = do
testNeuron
| mckeankylej/hwRecog | test/Main.hs | mit | 93 | 0 | 6 | 19 | 30 | 17 | 13 | 6 | 1 |
{-# LANGUAGE NegativeLiterals #-}
import Data.Int
-- Intermission: Exercises
--1. Given a datatype
data BigSmall =
Big Bool
| Small Bool
deriving (Eq, Show)
-- What is the cardinality of this datatype? Hint: We already know Bool’s
-- cardinality. Show your work as demonstrated earlier.
-- 4
-- 2. Given a datatype
data NumberOrBool =
Numba Int8
| BoolyBool Bool
deriving (Eq, Show)
-- 258
myNumba = Numba (-128)
-- What is the cardinality of NumberOrBool? What happens if you try to create a
-- Numba with a numeric literal larger than 127? And with a numeric literal
-- smaller than (-128)?
-- If you choose (-128) for a value precisely, you’ll notice you get a spurious warning:
-- Prelude> let n = Numba (-128)
-- Literal 128 is out of the Int8 range -128..127
-- If you are trying to write a large negative
-- literal, use NegativeLiterals
-- Now, since -128 is a perfectly valid Int8 value you could choose to ignore
-- this. What happens is that (-128) desugars into (negate 128). The compiler
-- sees that you expect the type Int8, but Int8’s maxBound is 127. So even
-- though you’re negating 128, it hasn’t done that step yet and immediately
-- whines about 128 being larger than 127. One way to avoid the warning is the
-- following:
-- Prelude> let n = (-128)
-- Prelude> let x = Numba n
-- Or you can use the NegativeLiterals extension as it recommends:
-- Prelude> :set -XNegativeLiterals
-- Prelude> let n = Numba (-128)
-- Note that the negative literals extension doesn’t prevent the warn- ing if
-- you use negate.
| diminishedprime/.org | reading-list/haskell_programming_from_first_principles/11_08.hs | mit | 1,583 | 1 | 9 | 313 | 111 | 69 | 42 | 11 | 1 |
validBraces :: String -> Bool
validBraces = f []
where
isOpen x = x == '(' || x == '{' || x == '['
isMatch x y = x == '(' && y == ')' || x == '{' && y == '}' || x == '[' && y == ']'
f [] [] = True
f _ [] = False
f st (x:xs) =
if isOpen x
then f (x:st) xs
else not (null st) && let y:ys = st in isMatch y x && f ys xs
| delta4d/codewars | kata/valid-braces/Braces.hs | mit | 369 | 1 | 15 | 141 | 230 | 107 | 123 | 10 | 4 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeSynonymInstances #-}
module PostgREST.Error (PgError, pgErrResponse, errResponse) where
import Data.Aeson ((.=))
import qualified Data.Aeson as JSON
import Data.String.Conversions (cs)
import Data.String.Utils (replace)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Hasql as H
import qualified Hasql.Postgres as P
import Network.HTTP.Types.Header
import qualified Network.HTTP.Types.Status as HT
import Network.Wai (Response, responseLBS)
type PgError = H.SessionError P.Postgres
errResponse :: HT.Status -> Text -> Response
errResponse status message = responseLBS status [(hContentType, "application/json")] (cs $ T.concat ["{\"message\":\"",message,"\"}"])
pgErrResponse :: PgError -> Response
pgErrResponse e = responseLBS (httpStatus e)
[(hContentType, "application/json")] (JSON.encode e)
instance JSON.ToJSON PgError where
toJSON (H.TxError (P.ErroneousResult c m d h)) = JSON.object [
"code" .= (cs c::T.Text),
"message" .= (cs m::T.Text),
"details" .= (fmap cs d::Maybe T.Text),
"hint" .= (fmap cs h::Maybe T.Text)]
toJSON (H.TxError (P.NoResult d)) = JSON.object [
"message" .= ("No response from server"::T.Text),
"details" .= (fmap cs d::Maybe T.Text)]
toJSON (H.TxError (P.UnexpectedResult m)) = JSON.object ["message" .= m]
toJSON (H.TxError P.NotInTransaction) = JSON.object [
"message" .= ("Not in transaction"::T.Text)]
toJSON (H.CxError (P.CantConnect d)) = JSON.object [
"message" .= ("Can't connect to the database"::T.Text),
"details" .= (fmap cs d::Maybe T.Text)]
toJSON (H.CxError (P.UnsupportedVersion v)) = JSON.object [
"message" .= ("Postgres version "++version++" is not supported") ]
where version = replace "0" "." (show v)
toJSON (H.ResultError m) = JSON.object ["message" .= m]
httpStatus :: PgError -> HT.Status
httpStatus (H.TxError (P.ErroneousResult codeBS _ _ _)) =
let code = cs codeBS in
case code of
'0':'8':_ -> HT.status503 -- pg connection err
'0':'9':_ -> HT.status500 -- triggered action exception
'0':'L':_ -> HT.status403 -- invalid grantor
'0':'P':_ -> HT.status403 -- invalid role specification
'2':'5':_ -> HT.status500 -- invalid tx state
'2':'8':_ -> HT.status403 -- invalid auth specification
'2':'D':_ -> HT.status500 -- invalid tx termination
'3':'8':_ -> HT.status500 -- external routine exception
'3':'9':_ -> HT.status500 -- external routine invocation
'3':'B':_ -> HT.status500 -- savepoint exception
'4':'0':_ -> HT.status500 -- tx rollback
'5':'3':_ -> HT.status503 -- insufficient resources
'5':'4':_ -> HT.status413 -- too complex
'5':'5':_ -> HT.status500 -- obj not on prereq state
'5':'7':_ -> HT.status500 -- operator intervention
'5':'8':_ -> HT.status500 -- system error
'F':'0':_ -> HT.status500 -- conf file error
'H':'V':_ -> HT.status500 -- foreign data wrapper error
'P':'0':_ -> HT.status500 -- PL/pgSQL Error
'X':'X':_ -> HT.status500 -- internal Error
"42P01" -> HT.status404 -- undefined table
"42501" -> HT.status404 -- insufficient privilege
_ -> HT.status400
httpStatus (H.TxError (P.NoResult _)) = HT.status503
httpStatus _ = HT.status500
| NikolayS/postgrest | src/PostgREST/Error.hs | mit | 3,484 | 0 | 11 | 752 | 1,137 | 615 | 522 | 69 | 23 |
-- Copyright (c) Microsoft. All rights reserved.
-- Licensed under the MIT license. See LICENSE file in the project root for full license information.
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -fno-warn-incomplete-patterns #-}
module Tests.Codegen
( verifyCodegen
, verifyCppCodegen
, verifyCppGrpcCodegen
, verifyApplyCodegen
, verifyExportsCodegen
, verifyCsCodegen
, verifyCsGrpcCodegen
, verifyJavaCodegen
) where
import System.FilePath
import Control.Monad
import Data.Monoid
import Data.Maybe
import Prelude
import Data.Algorithm.DiffContext
import Data.Text.Lazy (Text, unpack)
import qualified Data.Text.Lazy as LT
import qualified Data.ByteString.Char8 as BS
import Text.PrettyPrint (render, text)
import Test.Tasty
import Test.Tasty.Golden.Advanced
import Language.Bond.Codegen.Templates
import Language.Bond.Codegen.TypeMapping
import Language.Bond.Syntax.Types (Bond(..), Import, Declaration(..))
import Options
import IO
type Template = MappingContext -> String -> [Import] -> [Declaration] -> (String, Text)
verifyCppCodegen :: FilePath -> TestTree
verifyCppCodegen = verifyCodegen ["c++"]
verifyCsCodegen :: FilePath -> TestTree
verifyCsCodegen = verifyCodegen ["c#"]
verifyJavaCodegen :: FilePath -> TestTree
verifyJavaCodegen = verifyCodegen ["java"]
verifyCodegen :: [String] -> FilePath -> TestTree
verifyCodegen args baseName =
testGroup baseName $
verifyFiles (processOptions args) baseName
verifyApplyCodegen :: [String] -> FilePath -> TestTree
verifyApplyCodegen args baseName =
testGroup baseName $
map (verifyFile options baseName cppTypeMapping "apply") templates
where
options = processOptions args
templates =
[ apply_h protocols (export_attribute options)
, apply_cpp protocols
]
protocols =
[ ProtocolReader "bond::CompactBinaryReader<bond::InputBuffer>"
, ProtocolWriter "bond::CompactBinaryWriter<bond::OutputBuffer>"
, ProtocolWriter "bond::CompactBinaryWriter<bond::OutputCounter>"
, ProtocolReader "bond::FastBinaryReader<bond::InputBuffer>"
, ProtocolWriter "bond::FastBinaryWriter<bond::OutputBuffer>"
, ProtocolReader "bond::SimpleBinaryReader<bond::InputBuffer>"
, ProtocolWriter "bond::SimpleBinaryWriter<bond::OutputBuffer>"
]
verifyExportsCodegen :: [String] -> FilePath -> TestTree
verifyExportsCodegen args baseName =
testGroup baseName $
map (verifyFile options baseName (cppExpandAliases (type_aliases_enabled options) cppTypeMapping) "exports") templates
where
options = processOptions args
templates = [ reflection_h (export_attribute options) ]
verifyCppGrpcCodegen :: [String] -> FilePath -> TestTree
verifyCppGrpcCodegen args baseName =
testGroup baseName $
map (verifyFile options baseName (cppExpandAliases (type_aliases_enabled options) cppTypeMapping) "") templates
where
options = processOptions args
templates =
[ grpc_h (export_attribute options)
, grpc_cpp
, types_cpp
]
verifyCsGrpcCodegen :: [String] -> FilePath -> TestTree
verifyCsGrpcCodegen args baseName =
testGroup baseName $
map (verifyFile (processOptions args) baseName csTypeMapping "")
[ grpc_cs
, types_cs Class (fieldMapping (processOptions args))
]
where
fieldMapping Cs {..} = if readonly_properties
then ReadOnlyProperties
else if fields
then PublicFields
else Properties
verifyFiles :: Options -> FilePath -> [TestTree]
verifyFiles options baseName =
map (verify (typeMapping options) "") (templates options)
<>
extra options
where
verify = verifyFile options baseName
fieldMapping Cs {..} = if readonly_properties
then ReadOnlyProperties
else if fields
then PublicFields
else Properties
typeMapping Cpp {..} = cppExpandAliases type_aliases_enabled $ maybe cppTypeMapping (cppCustomAllocTypeMapping scoped_alloc_enabled) allocator
typeMapping Cs {} = csTypeMapping
typeMapping Java {} = javaTypeMapping
templates Cpp {..} =
[ (reflection_h export_attribute)
, types_cpp
, types_h header enum_header allocator alloc_ctors_enabled type_aliases_enabled scoped_alloc_enabled
] <>
[ enum_h | enum_header]
templates Cs {..} =
[ types_cs Class $ fieldMapping options
]
templates Java {} =
[ javaCatTemplate
]
extra Cs {} =
[ testGroup "collection interfaces" $
map (verify csCollectionInterfacesTypeMapping "collection-interfaces") (templates options)
]
extra Cpp {..} =
[ testGroup "custom allocator" $
map (verify (cppExpandAliasesTypeMapping $ cppCustomAllocTypeMapping False "arena") "allocator")
(templates $ options { allocator = Just "arena" })
| isNothing allocator
] ++
[ testGroup "constructors with allocator argument" $
map (verify (cppExpandAliasesTypeMapping $ cppCustomAllocTypeMapping False "arena") "alloc_ctors")
(templates $ options { allocator = Just "arena", alloc_ctors_enabled = True })
| isNothing allocator
] ++
[ testGroup "type aliases" $
map (verify (cppCustomAllocTypeMapping False "arena") "type_aliases")
(templates $ options { allocator = Just "arena", type_aliases_enabled = True })
] ++
[ testGroup "scoped allocator" $
map (verify (cppExpandAliasesTypeMapping $ cppCustomAllocTypeMapping True "arena") "scoped_allocator")
(templates $ options { allocator = Just "arena", scoped_alloc_enabled = True })
| isNothing allocator
]
extra Java {} =
[
]
verifyFile :: Options -> FilePath -> TypeMapping -> FilePath -> Template -> TestTree
verifyFile options baseName typeMapping subfolder template =
goldenTest suffix readGolden codegen cmp updateGolden
where
(suffix, _) = template (MappingContext typeMapping [] [] []) "" [] []
golden = "tests" </> "generated" </> subfolder </> baseName ++ suffix
readGolden = BS.readFile golden
updateGolden = BS.writeFile golden
codegen = do
aliasMapping <- parseAliasMappings $ using options
namespaceMapping <- parseNamespaceMappings $ namespace options
(Bond imports namespaces declarations) <- parseBondFile [] $ "tests" </> "schema" </> baseName <.> "bond"
let mappingContext = MappingContext typeMapping aliasMapping namespaceMapping namespaces
let (_, code) = template mappingContext baseName imports declarations
return $ BS.pack $ unpack code
cmp x y = return $ if x == y then Nothing else Just $ diff x y
diff x y = render $ prettyContextDiff
(text golden)
(text "test output")
(text . BS.unpack)
(getContextDiff 3 (BS.lines x) (BS.lines y))
javaCatTemplate :: MappingContext -> String -> [Import] -> [Declaration] -> (String, Text)
javaCatTemplate mappingContext _ imports declarations =
(suffix, LT.concat $ mapMaybe codegenDecl declarations)
where
suffix = "_concatenated.java"
codegenDecl declaration =
case declaration of
Struct {} -> Just $ class_java mappingContext imports declaration
Enum {} -> Just $ enum_java mappingContext declaration
_ -> Nothing
cppExpandAliases :: Bool -> TypeMapping -> TypeMapping
cppExpandAliases type_aliases_enabled = if type_aliases_enabled
then id
else cppExpandAliasesTypeMapping
| sapek/bond | compiler/tests/Tests/Codegen.hs | mit | 7,770 | 0 | 17 | 1,850 | 1,849 | 972 | 877 | 162 | 9 |
module Transformations.Basic where
import Komposition
import Data.Vect.Float
import Data.Vect.Float.Util.Dim2 (angle2, rotMatrix2)
type Transformation = Vec2 -> Vec2
-- Apply a transformation, work like they do in Processing
transform :: Transformation -> Komposition a -> Komposition a
transform t (Komposition f) = Komposition (f . t)
translate :: Vec2 -> Transformation
translate d = (d &+)
relativeTo :: Point -> Transformation
relativeTo p = (p &-)
scale :: Float -> Transformation
scale = scalarMul
rotate :: Float -> Transformation
rotate = rotate2
-- Polar / Cartesian
fromPolar :: PolarPoint -> Vec2
fromPolar (Vec2 r t) = Vec2 (r * cos t) (r * sin t)
toPolar :: Vec2 -> PolarPoint
toPolar p = Vec2 (len p) (angle2 p)
center :: Vec2 -> Transformation
center (Vec2 w h) = translate $ Vec2 (-w/2) (-h/2)
flipX :: Transformation
flipX (Vec2 x y) = Vec2 (-x) y
flipY :: Transformation
flipY (Vec2 x y) = Vec2 x (-y)
circleInversion :: Vec2 -> Float -> Transformation
circleInversion x0 k x = x0 &+ scalarMul (1/(len (x &- x0) ^ 2)) (scalarMul (k ^ 2) (x &- x0))
swirl :: Float -> Transformation
swirl f p = rotate (f * log (len p)) p
-- Express a vector with other vectors
chBase :: Mat2 -> Transformation
chBase base v = (inverse base) *. v
| Vetii/Komposition | Transformations/Basic.hs | gpl-2.0 | 1,267 | 0 | 13 | 238 | 506 | 270 | 236 | 31 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.