code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# OPTIONS_GHC -fglasgow-exts #-}
-- -fglasgow-exts for the type constraint of inScope
--
-- Circuit compiler for the Faerieplay hardware-assisted secure
-- computation project at Dartmouth College.
--
-- Copyright (C) 2003-2007, Alexander Iliev <sasho@cs.dartmouth.edu> and
-- Sean W. Smith <sws@cs.dartmouth.edu>
--
-- All rights reserved.
--
-- This code is released under a BSD license.
-- Please see LICENSE.txt for the full license and disclaimers.
--
module Faerieplay.Unroll where
-- import Debug.Trace
import Control.Monad.Error (Error, noMsg, strMsg, throwError, ErrorT(..))
import Control.Monad.Identity (Identity, runIdentity)
import Control.Monad.Writer (Writer, runWriter, tell)
import Data.Bits
import qualified Control.Monad.State as St
import Control.Monad.Trans (lift)
import Maybe (fromJust)
import List (unfoldr, partition)
import qualified Data.Map as Map
import IlievUtils.Misc ((<<), ilog2, unfoldrM,
mapTuple2, iterateWhileM,
fromJustMsg)
import Faerieplay.Stack (Stack(..), maybeLookup)
import qualified Faerieplay.Container as Cont
import Faerieplay.Common
import Faerieplay.Intermediate
import Faerieplay.HoistStms
import qualified Faerieplay.TypeChecker as Tc
type MyStateT = (ProgTables , Scope)
-- and accessor functions:
applyToPT :: (ProgTables -> ProgTables) -> MyStateT -> MyStateT
applyToPT f (pt,scope) = (f pt, scope)
applyToScope f (pt,scope) = (pt, f scope)
pushScope' = applyToScope pushScope
incrScope' = applyToScope incrScope
popScope' = applyToScope popScope
getPT = fst
getScope = snd
-- maximum function recursion depth
cMAXSCOPE = 128
-- type StateWithErr = St.StateT MyStateT ErrMonad
type StateWithErr = ErrorT MyErrorCtx (St.State MyStateT)
-- to throw an error inside the StateWithErr monad
throwErr :: Int -> String -> StateWithErr a
throwErr p msg = throwErrorCtx $ Err p msg
logError line msg = tell [Err line msg]
-- the full version, usign both State and Error
unrollProg :: Prog -> ErrCtxMonad [Stm]
unrollProg (Prog pname pt@(ProgTables {funcs=fs})) =
let (Func _ _ t form_args stms) = fromJustMsg ("failed to find \"" ++ cMAINNAME
++ "\" function") $
Map.lookup cMAINNAME fs
startScope = pushScope []
startState = (pt, startScope)
(val_or_err,state') = St.runState (runErrorT $ mapM unroll stms)
startState
in case val_or_err of
Left err -> Left err
Right stmss -> Right (concat stmss)
-- scope invariants:
-- unroll is called with the correct scope depth, but with the top-level scope
-- number of the previous unroll call. Thus, have to increment the scope at the
-- start, and add a new depth before calling unroll recursively
-- :::::::::::::::::::::
unroll :: Stm -> StateWithErr [Stm]
-- :::::::::::::::::::::
-- unroll s@(SAss lv e@(EFunCall nm args)) = return [s]
{-
Implementation of reference parameters:
Replace all their usages in the body with the actual parameter
-}
unroll s@(SAss lv e@(EFunCall nm args)) = genericUnroll unrollAss s
where unrollAss scope (SAss lv e@(EFunCall nm args)) =
do (Func name locals t form_args stms) <- extractFunc nm
let -- pair up the formal vars and their values
arg_complex = zip form_args args
-- split up reference and non-ref args (paired with their values),
-- ditch the formal var type
-- annotations, and make var's from the formal names
(ref_args,
nonref_args) = mapTuple2 (map (\((nm,t),val) ->
(formarg2var scope nm , val))
) $
partition (isRefType . snd . fst) $
arg_complex
-- prepare the statements
stms' = removeRefQuals .
replaceRefs ref_args .
addLocalScope scope locals $
stms
-- assignments to the non-ref formal params from their values
ass's = [SAss (ExpT t (EVar formarg))
actarg
| (formarg,
actarg@(ExpT t _)) <- nonref_args]
-- assignment to lval from the function return parameter
retass = [SAss lv (fcnRetVar nm scope)]
return $ ass's ++ stms' ++ retass
-- NOTE: this is how formal param vars in the function body are
-- marked in TypeChecker.hs, except there is no scope to deal with
-- there. Now, want to re-generate those vars in order to assign them
-- the actual param values, or replace them in the case of reference
-- args.
formarg2var scope name = addScope scope $ Tc.name2var [FormalParam] name
-- add the local scope to *all* local var occurances
addLocalScope scope locals stms = map (scopeVars locals scope) stms
`trace`
("unroll func: name = " << nm <<
"; locals = " << locals <<
"; scope = " << scope)
-- replace reference local vars with their referrents
-- remove the ExpT on the value before subbing it in, as there should already be
-- an equivalent ExpT around the var
replaceRefs ref_args = map (\stm -> foldl (\s (var,val) -> let (ExpT _ val_e) = val
in
subst var val_e s)
stm
ref_args)
-- remove RefT's in ExpT annotations (as the actual refs are already removed by
-- replaceRefs)
removeRefQuals = map stmStripRefQual
unroll s@(SBlock _ _) = genericUnroll unrollBlock s
where unrollBlock scope (SBlock locals stms) =
-- replace all local variables with Scoped ones, in stms
return $ map (scopeVars locals scope) stms
unroll s@(SFor _ _ _) = genericUnroll unrollFor s
where unrollFor :: Scope -> Stm -> StateWithErr [Stm]
unrollFor scope (SFor countVar countVals stms) =
do -- the only new local variable in a for-loop body is
-- the counter variable, so create a VarSet
-- containing just that
let stms' = map (scopeVars (Cont.singleton (stripScope countVar))
scope)
stms
stmss = replicate (length countVals) stms'
--
-- and subst correct counter values into all the statements
--
-- References to the loop counter will have been scoped, so scope
-- countVar so it's the same.
countVar' = addScope scope countVar
-- a version of subst for each unrolled block of stm's
substs = [subst countVar' (lint val) | val <- countVals]
-- each 'stms' list is mapped a subst with the correct counter value
stmss' = zipWith map substs stmss
return $ concat stmss'
{-
unroll s@(SFor_C _ _ _ _ _) = genericUnroll unrollFor_C s
where unrollFor_C scope (SFor_C countVar
begin_exp
stop_cond
update_ass
stms) =
do countVals <- getCountVals countVar begin_exp stop_cond update_ass
`logDebug`
("unroll(SFor_C): stop_cond = " << show stop_cond
<< ", update_ass = " << show update_ass)
error ("Got the vals: " ++ show countVals)
getCountVals countVar begin_exp stop_cond update =
do begin <- evalStatic begin_exp
vals <- iterateWhileM (keepgoing countVar stop_cond)
(nextVal countVar update)
begin
return vals
nextVal countVar (AssStm lval op rval) x =
do let rval' = substExp countVar (lint x) rval
-- FIXME: have to use 'op' here, not just a straight assignment
lval' <- evalStatic rval'
return lval'
keepgoing countVar stop_cond x =
do let cond = substExp countVar (lint x) stop_cond
cond_val <- evalStatic cond
return (toEnum $ fromIntegral cond_val)
`logDebug`
("keepgoing (x=" << x << "): cond_val = " << cond_val)
-}
unroll s@(SIfElse test (locs1,stms1) (locs2,stms2)) =
do stmss1' <- mapM unroll stms1
stmss2' <- mapM unroll stms2
return [SIfElse test (locs1, concat stmss1') (locs2, concat stmss2')]
-- simple leftovers: SAss without an EFunCall on the right, and SPrint
unroll s = return [s]
genericUnroll :: (Scope -> Stm -> StateWithErr [Stm]) -> Stm -> StateWithErr [Stm]
genericUnroll f stm = do St.modify incrScope'
scope <- St.gets getScope
checkScopeDepth scope
-- do the "real work"
stms <- f scope stm
-- and recurse
St.modify pushScope'
stmss <- mapM unroll stms
St.modify popScope'
return $ concat stmss
-- checkScopeDepth :: Scope -> ErrCtxMonad ()
checkScopeDepth scope
| length scope > cMAXSCOPE = throwErrorCtx $
Err 42 $ ("while unrolling: Function/Block recursion deeper than "
<< cMAXSCOPE)
| otherwise = return ()
-- set the scope of variables which are local in this block, as indicated by the
-- set 'locals'
scopeVars :: VarSet -> Scope -> Stm -> Stm
scopeVars locals scope s = mapStm f_s f_e s
where f_e (EVar v)
| inScope locals v = EVar (addScope scope v)
f_e e = e
-- also set the scope of the counter variable inside its For loop
f_s (SFor counter ctrvals stms) = SFor (addScope scope counter)
ctrvals
stms
f_s s = s
inScope :: (Cont.Container c Var) => c -> Var -> Bool
inScope locals var = Cont.member (stripScope var) locals
-- to reconstruct the return variable of a function in a given scope
-- FIXME: rather awkward, may be better if a Func carried its return var around
-- explicitly
fcnRetVar fname scope = EVar (VScoped scope (VFlagged [RetVar] (VSimple fname)))
extractFunc name = do ProgTables {funcs=fs} <- St.gets getPT
case maybeLookup name [fs] of
(Just f) -> return f
_ -> throwErr 42 $ "Unroll.extractFunc "
<< name << " failed!"
--unrollStms = unrollFor
-- substitute a value for a variable into a statement
-- The only actaul substitution is at EVar expressions, this here just
-- needs to recurse the computation through the statement (using mapStm)
subst :: Var -> Exp -> Stm -> Stm
subst var val s = mapStm f_s f_e s
where f_e = substExp var val
f_s = id
-- remove all RefT's in an Stm
stmStripRefQual = mapStm id f_e
where f_e = mapExp f
-- remove RefT's in an Exp
f (ExpT (RefT t) e) = (ExpT t e)
f e = e
------------------
-- Scope utilities
------------------
-- enter a new scope depth (eg. upon entering a function call)
-- uses the Stack class functions
pushScope :: Scope -> Scope
pushScope = push 0
popScope :: Scope -> Scope
popScope = pop
-- enter the next scope at the same depth (eg. from one function call to the
-- next)
incrScope = modtop (+1)
testExp = (BinOp Plus (var "x") (ELit $ LInt 5))
|
ailiev/faerieplay-compiler
|
Faerieplay/Unroll.hs
|
bsd-3-clause
| 13,383
| 0
| 20
| 5,516
| 2,109
| 1,129
| 980
| -1
| -1
|
{-# OPTIONS_HADDOCK hide #-}
-- | Miscellaneous helper functions.
module System.Random.Random123.Misc where
-- | Apply a function to its result sequentially,
-- additionally passing it the current iteration number.
apply :: (Int -> a -> a) -> Int -> a -> a
apply f n = applyLoop 0 where
applyLoop i v
| i == n = v
| otherwise = applyLoop (i + 1) $! f i v
-- | Apply a function to its result sequentially.
apply_ :: (a -> a) -> Int -> a -> a
apply_ f = apply (\_ v -> f v)
|
fjarri/haskell-random123
|
System/Random/Random123/Misc.hs
|
bsd-3-clause
| 498
| 0
| 11
| 124
| 152
| 80
| 72
| 9
| 1
|
{-# LANGUAGE UnicodeSyntax #-}
module Main where
import Control.Monad.Loops (whileM_)
import System.IO (hFlush, hIsEOF, stdin, stdout)
import Typed.Parser (parseExpr)
import Typed.Semantics (Binding (NameBind), Context, Term (..),
Ty (..), eval, removeNames, typeOf)
main ∷ IO ()
main = do
putStr "λ "
hFlush stdout
whileM_ (fmap not $ hIsEOF stdin) $ do
hFlush stdout
input ← getLine
let expr = removeNames [] (parseExpr input)
case typeOf [] expr of
Right τ → let expr' = eval [] expr
exprWType = (parens $ exprToString [] $ expr')
++ " : " ++ typeToString τ
in putStrLn exprWType
Left err → putStrLn err
putStr "λ "
hFlush stdout
putStrLn "Bye."
parens ∷ String → String
parens s = "(" ++ s ++ ")"
typeToString ∷ Ty → String
typeToString (TyArr τ1 τ2)
= typeToString τ1 ++ " → " ++ typeToString τ2
typeToString TyBool = "Bool"
isNameBound ∷ Context → String → Bool
isNameBound [] _ = False
isNameBound ((y,_):bs) x
| y == x = True
| otherwise = isNameBound bs x
pickFreshName ∷ Context → String → (Context, String)
pickFreshName ctx x
| isNameBound ctx x = pickFreshName ctx (x ++ "'")
| otherwise = let ctx' = ((x, NameBind) : ctx)
in (ctx', x)
exprToString ∷ Context → Term → String
exprToString ctx (TmVar x)
= case ctx !! x of (x', _) → x'
exprToString ctx (TmAbs x τ t)
= let (ctx', x') = pickFreshName ctx x
in "λ" ++ x' ++ " : " ++ typeToString τ ++ ". " ++ exprToString ctx' t
exprToString ctx (TmApp t1 t2)
= exprToString ctx t1 ++ " " ++ exprToString ctx t2
exprToString _ TmTrue
= "true"
exprToString _ TmFalse
= "false"
exprToString ctx (TmIf t1 t2 t3)
= let t1str = exprToString ctx t1
t2str = exprToString ctx t2
t3str = exprToString ctx t3
in "if " ++ t1str ++
" then " ++ t2str ++
" else " ++ t3str
|
ayberkt/TAPL
|
src/Typed/Main.hs
|
bsd-3-clause
| 2,066
| 0
| 23
| 622
| 763
| 382
| 381
| 59
| 2
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
module Duckling.Ordinal.UK.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.UK.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "UK Tests"
[ makeCorpusTest [This Ordinal] corpus
]
|
rfranek/duckling
|
tests/Duckling/Ordinal/UK/Tests.hs
|
bsd-3-clause
| 600
| 0
| 9
| 96
| 80
| 51
| 29
| 11
| 1
|
import Criterion
import Progression.Main
main = defaultMain (bgroup "fac" [bench (show n) (nf fac n) | n <- ns])
where ns = [10^4 * k | k <- [1,2,3]] :: [Integer]
fac 0 = 1
fac n = n * fac (n-1)
|
sebfisch/haskell-barchart
|
examples/factorial.hs
|
bsd-3-clause
| 199
| 0
| 11
| 46
| 126
| 67
| 59
| 6
| 1
|
module Parser (
parseExpr
) where
import Data.Functor.Identity
import Text.Parsec
import Text.Parsec.String (Parser)
import qualified Text.Parsec.Expr as Ex
import Lexer
import Syntax
-- Expressions
expr :: Parser Expr
expr = foldl1 App <$> many1 term
term :: Parser Expr
term =
lambda
<|> literal
<|> variable
<|> parens expr
lambda :: Parser Expr
lambda = do
reservedOp "\\"
arg <- identifier
reservedOp ":"
argtype <- argtype
reservedOp "."
body <- expr
return $ Lam arg argtype body
literal :: Parser Expr
literal = number <|> bool
number :: Parser Expr
number = Lit . LInt . fromIntegral <$> natural
bool :: Parser Expr
bool = Lit . LBool . read <$> (symbol "True" <|> symbol "False")
variable :: Parser Expr
variable = Var <$> identifier
-- Types
argtype :: Parser Type
argtype = Ex.buildExpressionParser typeOps typeAtom
where
infixOp x f = Ex.Infix (reservedOp x >> return f)
typeOps = [
[infixOp "->" TArr Ex.AssocRight]
]
typeAtom :: Parser Type
typeAtom = typeLiteral <|> parens argtype
typeLiteral :: Parser Type
typeLiteral =
(reservedOp "Bool" >> return TBool)
<|> (reservedOp "Int" >> return TInt)
-- Interface
parseExpr s = parse (contents expr) "<stdin>" s
|
zanesterling/haskell-compiler
|
src/Parser.hs
|
bsd-3-clause
| 1,253
| 0
| 10
| 271
| 417
| 213
| 204
| 45
| 1
|
import Control.Monad
main = do
colors <- forM [1,2,3,4] $ \a -> do
putStrLn $ "Which color do you associate with the number " ++ show a ++ "?"
color <- getLine
return color
putStrLn "The colors that you associate with 1, 2, 3 and 4 are: "
mapM putStrLn colors
|
ku00/h-book
|
src/WhichColor.hs
|
bsd-3-clause
| 297
| 0
| 14
| 88
| 87
| 41
| 46
| 8
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
module Aria.RaceController where
import Aria.Types
import Aria.RaceHistory
import Aria.Repo
import Aria.Scripts
import Data.Maybe
import Data.Acid.Advanced
import Control.Monad
import Control.Lens
import GHC.Generics hiding (to)
import System.Process
import System.IO
import Control.Exception
import Control.Monad.State
import Control.Monad.Reader
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.STM.TVar
import System.FilePath ((</>))
import Control.Monad.Catch
import Data.Data
import Data.Monoid (Any(..),getAny)
data RacingStatus = RaceSetup RaceHistoryData
| RaceStarted RaceHistoryData [Maybe ProcessHandle]
| RaceStopped
data NoHandleException = NoHandleException
deriving (Eq,Ord,Show,Read,Data,Typeable)
instance Exception NoHandleException
data BadIndexError =
BadIndexError Int
deriving (Eq,Ord,Show,Read,Data,Typeable)
instance Exception BadIndexError
data NoSelectedBuildError = NoSelectedBuildError RacerId
deriving (Eq,Ord,Show,Read,Data,Typeable)
instance Exception NoSelectedBuildError
whenRaceStopped :: (MonadState RacingStatus m) => m () -> m ()
whenRaceStopped x = do
hist <- get
case hist of
RaceStopped -> x
_ -> return ()
whenRaceSetup :: (MonadState RacingStatus m) => m () -> m ()
whenRaceSetup x = do
hist <- get
case hist of
(RaceSetup _) -> x
_ -> return ()
whenRacing :: (MonadState RacingStatus m) => m () -> m ()
whenRacing x = do
hist <- get
case hist of
(RaceStarted _ _) -> x
_ -> return ()
whenNotRacing :: (MonadState RacingStatus m) => m () -> m ()
whenNotRacing x = do
hist <- get
case hist of
(RaceStarted _ _) -> return ()
_ -> x
setupRace :: (MonadIO m, MonadThrow m, MonadReader RepoAcid m, MonadState RacingStatus m) => [RacerId] -> m ()
setupRace rids = whenRaceStopped $ forM rids getRacerBuild >>= makeRaceHistory >>= put . RaceSetup
where
getRacerBuild rid = withRacer rid $ \racer -> guardMaybe (NoSelectedBuildError $ racer^.racerId) (racer^.selectedBuild) $ \bName -> return (racer^.racerId,bName)
startRace :: (MonadThrow m, MonadIO m, MonadReader RepoAcid m, MonadState RacingStatus m) => m ()
startRace = whenRaceSetup $ do
ips <- getRacerAcid >>= flip query' GetRobotIps
(RaceSetup hist) <- get
(rds,threads) <- unzip <$> forM (zip (hist^.histRaceData) ips) startRacer
put $ RaceStarted (hist & histRaceData .~ rds) threads
where
startRacer (raceData,ip) = do
ph <- getRacerAcid >>= \ips -> runRacerCode ip (raceData^.rdRId)
clk <- startClock
return (raceData & rdTime .~ clk,Just ph)
runRacerCode :: (MonadReader RepoAcid m, MonadIO m) => String -> RacerId -> m ProcessHandle
runRacerCode robotIp rid = do
runPath <- getRacerAcid >>= fmap (_scriptCwd) . flip query' GetScriptConfig
let cmdPath = runPath</>(show $ rid^.unRacerId)</>"build"</>"ariaracer"</>"ariaracer"
logHandle <- liftIO $ openFile ("/tmp/robot_"++(show $ rid^.unRacerId)++"_log.txt") WriteMode
(_,_,_,ph) <- liftIO . createProcess_ "Running Robot" $ (proc cmdPath ["-rh",robotIp]) {std_out = UseHandle logHandle, create_group = True, new_session = True}
return ph
stopRace :: (MonadIO m, MonadThrow m, Monad m, MonadReader RepoAcid m, MonadState RacingStatus m) => StopCommand -> m ()
stopRace cmd = whenRacing $ do
(RaceStarted hist phs) <- get
newHist <- stopRaceClocks cmd hist
newPhs <- foldM stopRacer phs $ take (hist^.histRaceData . (to length)) $ toLaneNumbers cmd
let raceFlag = stillRacing newHist newPhs
unless raceFlag $ do
getRacerAcid >>= flip update' (AddRaceHistory newHist)
put RaceStopped
when raceFlag $ put $ RaceStarted newHist newPhs
where
stopRacer :: (MonadIO m, MonadThrow m) => [Maybe ProcessHandle] -> Int -> m [Maybe ProcessHandle]
stopRacer ph i = do
guardMaybe (BadIndexError i) (ph ^? ix i) $ maybe (return ()) (liftIO . interruptProcessGroupOf)
return $ ph & ix i .~ Nothing
toLaneNumbers Abort = [0,1]
toLaneNumbers (AbortLane i) = [i-1]
toLaneNumbers (StopLane i) = [i-1]
stillRacing hist phs = (not . allStopped $ hist) && (getAny . mconcat . fmap (Any . isJust) $ phs)
isRacing :: (Monad m, MonadState RacingStatus m) => m Bool
isRacing = do
status <- get
case status of
(RaceStarted _ _ ) -> return True
_ -> return False
|
theNerd247/ariaRacer
|
src/Aria/RaceController.hs
|
bsd-3-clause
| 4,428
| 0
| 16
| 801
| 1,666
| 848
| 818
| -1
| -1
|
import System.Time.Extra
import System.Process.Extra
main :: IO ()
main = do
system_ "hlint --generate-summary"
system_ "hlint --test +RTS -K512K"
(time,_) <- duration $ system_ "hlint src --with-group=extra --with-group=future" -- "UNIPLATE_VERBOSE=-1 hlint src +RTS -K1K"
putStrLn $ "Running HLint on self took " ++ showDuration time
|
ndmitchell/hlint
|
travis.hs
|
bsd-3-clause
| 355
| 0
| 9
| 64
| 75
| 37
| 38
| 8
| 1
|
module TestToAST where
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.HUnit
import Insomnia.SurfaceSyntax.ToAST (example1, example1_expected,
example2, example2_expected,
example3, example3_expected,
example4, example4_expected)
infixParsingUnits :: TestTree
infixParsingUnits = testGroup "infix parsing "
[ testCase "example1" $ example1 () @?= example1_expected
, testCase "example2" $ example2 () @?= example2_expected
, testCase "example3" $ example3 () @?= example3_expected
, testCase "example4" $ example4 () @?= example4_expected
]
units :: TestTree
units = testGroup "ToAST" [
infixParsingUnits
]
|
lambdageek/insomnia
|
tests/TestToAST.hs
|
bsd-3-clause
| 749
| 0
| 10
| 203
| 164
| 90
| 74
| 16
| 1
|
module Generate where
-- needs: haskell-src-exts, curl >= 1.3.8
import Language.Haskell.Exts.Syntax
import Language.Haskell.Exts.Pretty
import Data.Char (toUpper)
import Network.HTTP
import Data.Text (Text, pack, unpack, breakOn, splitOn)
import qualified Data.Text as T
import Data.Maybe
import Data.List
gen triples = prettyPrint $
-- Module SrcLoc ModuleName [ModulePragma] (Maybe WarningText) (Maybe [ExportSpec]) [ImportDecl] [Decl]
Module (SrcLoc n 0 0) (ModuleName n) [] Nothing Nothing
[ImportDecl loc (ModuleName "Prelude") False False False Nothing Nothing
(Just (True, [IThingAll (Ident "Ordering") ])) -- hidding Ordering because of the symbol LT
]
[iso639_1, toCharsFun, fromCharsFun, languageFun]
where
n = "Data.LanguageCodes"
loc = SrcLoc n 0 0
iso639_1 = DataDecl loc DataType [] (Ident "ISO639_1") []
(codes languages_1) -- constructors
derivingClasses -- deriving
code (_,c) = QualConDecl loc [] []
$ ConDecl (Ident $ map toUpper c) []
codes lst = map code lst
languages_1 = catMaybes $ map (\(l,_,c) -> fmap (\a -> (unpack l, unpack a)) c) triples
derivingClasses = map (\a -> (UnQual $ Ident a, []) )
["Show", "Read", "Eq", "Enum", "Ord"]
fromCharsFun = FunBind
[ Match loc (Ident "fromChars")
[PVar (Symbol "c1"), PVar (Symbol "c2")]
Nothing
( UnGuardedRhs
(Case ( Tuple Boxed [ Var $ UnQual $ Symbol "c1"
, Var $ UnQual $ Symbol "c2"
]
) (reverse $ languageNoneAlt
: (map languageCodeAlt languages_1)
)
)
)
(BDecls [])
]
languageCodeAlt (_,c) =
Alt loc (PTuple Boxed [ PLit Signless $ Char $ c !! 0
, PLit Signless $ Char $ c !! 1
])
(UnGuardedRhs $ App (Con $ UnQual $ Symbol "Just")
(Con $ UnQual $ Symbol $ map toUpper c))
(BDecls [])
languageNoneAlt =
Alt loc PWildCard (UnGuardedRhs (Con $ UnQual $ Symbol "Nothing"))
(BDecls [])
toCharsFun = FunBind
[ Match loc (Ident "toChars")
[PVar (Symbol "code")]
Nothing
( UnGuardedRhs (Case ( Var $ UnQual $ Symbol "code" )
( map languageAlt languages_1)
)
)
(BDecls [])
]
languageAlt (_,c) =
Alt loc (PApp (UnQual $ Symbol $ map toUpper c) [])
(UnGuardedRhs $ Tuple Boxed [ Lit (Char $ c !! 0)
, Lit (Char $ c !! 1)
]
)
(BDecls [])
languageFun = FunBind
[ Match loc (Ident "language")
[PVar (Symbol "code")]
Nothing
( UnGuardedRhs (Case ( Var $ UnQual $ Symbol "code" )
( map languageNameAlt languages_1 )
)
)
(BDecls [])
]
languageNameAlt (n,c) =
Alt loc (PApp (UnQual $ Symbol $ map toUpper c) [])
(UnGuardedRhs $ Lit $ String n)
(BDecls [])
-- fetch table from official site
fetchTable = do
str <- simpleHTTP (getRequest url) >>= getResponseBody
let site = pack str
return site
where
url = "http://www.loc.gov/standards/iso639-2/php/English_list.php"
-- dirty table parsing
parseTable html = map (rowToTriple . T.lines) $ rows html
where
rows h = filter isValid $ map fixLine (tail $ T.splitOn (pack "</tr>") h)
tagTxt = T.tail . T.takeWhile (/= '<') . T.dropWhile (/= '>')
isValid t = T.take 3 t == T.pack "<tr"
fixLine = T.replace (T.pack "><") (T.pack ">\n<")
. T.dropWhile (/= '<')
rowToTriple l = (tagTxt (l !! 1), tagTxt (l !! 4)
, if tagTxt (l !! 5) == pack " "
then Nothing
else Just (tagTxt (l !! 5))
)
doIt = do
t <- fetchTable
let uniqueLang f = nubBy (\(_,_,c1) (_,_,c2) -> c1 == c2) f
sortedUnique = sortBy (\(_,_,c1) (_,_,c2) -> compare c1 c2) . uniqueLang
writeFile "../Data/LanguageCodes.hs" $ gen (sortedUnique $ parseTable t)
|
HugoDaniel/iso639
|
generator/Generate.hs
|
bsd-3-clause
| 4,949
| 0
| 18
| 2,179
| 1,471
| 767
| 704
| 87
| 2
|
{-# LANGUAGE FlexibleContexts, TypeSynonymInstances, FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
module SimplePreprocessor (runPreprocessor) where
import Language.Preprocessor.Cpphs
import Control.Monad.IO.Class
options = CpphsOptions { infiles = []
, outfiles = []
, defines = []
, includes = []
, preInclude = []
, boolopts = BoolOptions { macros = True
, locations = False
, hashline = True
, pragma = False
, stripEol = True
, stripC89 = False
, lang = True
, ansi = False
, layout = True
, literate = False
, warnings = True}
}
runPreprocessor :: MonadIO m => String -> m String
runPreprocessor str = liftIO (runCpphs options "" str)
|
abakst/symmetry
|
checker/src/simple-parser/SimplePreprocessor.hs
|
mit
| 1,277
| 0
| 8
| 732
| 185
| 114
| 71
| 23
| 1
|
--
--
--
-----------------
-- Exercise 4.14.
-----------------
--
--
--
module E'4'14 where
import B'C'4
(
Move ( Rock , Paper , Scissors )
)
import E'4'11 ( Result ( Win , Lose , Draw ) )
import E'4'12 ( outcome )
import Test.QuickCheck hiding ( Result )
import Test.QuickCheck ( quickCheck )
prop_outcome :: Bool
prop_outcome
= ( outcome Rock Rock ) == Draw
&& ( outcome Rock Paper ) == Lose
&& ( outcome Rock Scissors ) == Win
&& ( outcome Paper Paper ) == Draw
&& ( outcome Paper Rock ) == Win
&& ( outcome Paper Scissors ) == Lose
&& ( outcome Scissors Scissors ) == Draw
&& ( outcome Scissors Rock ) == Lose
&& ( outcome Scissors Paper ) == Win
-- GHCi> quickCheck prop_outcome
-- Note: This test data is quite manageable manually (3 * 3 test cases),
-- BUT in general we should write smarter tests and/or generate most
-- of it dynamically.
|
pascal-knodel/haskell-craft
|
_/links/E'4'14.hs
|
mit
| 976
| 0
| 23
| 292
| 228
| 134
| 94
| 19
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : $Header$
Copyright : Francisc-Nicolae Bungiu, Jacobs University Bremen
License : GPLv2 or higher, see LICENSE.txt
RDF signature and sentences
-}
module RDF.Sign where
import RDF.AS
import Common.Result
import Data.Data
import qualified Data.Set as Set
data Sign = Sign
{ subjects :: Set.Set Term
, predicates :: Set.Set Term
, objects :: Set.Set Term
} deriving (Show, Eq, Ord, Typeable, Data)
emptySign :: Sign
emptySign = Sign
{ subjects = Set.empty
, predicates = Set.empty
, objects = Set.empty
}
diffSig :: Sign -> Sign -> Sign
diffSig a b =
a { subjects = subjects a `Set.difference` subjects b
, predicates = predicates a `Set.difference` predicates b
, objects = objects a `Set.difference` objects b
}
addSign :: Sign -> Sign -> Sign
addSign toIns totalSign = totalSign
{ subjects = Set.union (subjects totalSign) (subjects toIns)
, predicates = Set.union (predicates totalSign) (predicates toIns)
, objects = Set.union (objects totalSign) (objects toIns)
}
isSubSign :: Sign -> Sign -> Bool
isSubSign a b =
Set.isSubsetOf (subjects a) (subjects b)
&& Set.isSubsetOf (predicates a) (predicates b)
&& Set.isSubsetOf (objects a) (objects b)
uniteSign :: Sign -> Sign -> Result Sign
uniteSign s1 s2 = return $ addSign s1 s2
symOf :: Sign -> Set.Set RDFEntity
symOf s = Set.unions
[ Set.map (RDFEntity SubjectEntity) $ subjects s
, Set.map (RDFEntity PredicateEntity) $ predicates s
, Set.map (RDFEntity ObjectEntity) $ objects s ]
|
keithodulaigh/Hets
|
RDF/Sign.hs
|
gpl-2.0
| 1,596
| 0
| 10
| 348
| 530
| 279
| 251
| 38
| 1
|
{- |
Module : $Id: Isabelle.hs 13959 2010-08-31 22:15:26Z cprodescu $
Description : logic for the interactive higher order theorem prover Isabelle
Copyright : (c) Christian Maeder, DFKI Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable (except Isabelle.Logic_Isabelle)
This folder contains the interface to the Isabelle theorem prover.
"Isabelle.IsaSign" provides data structures for Isabelle signatures,
formulas and theories. These resemble the ML data structures that
Isabelle uses. However, the emphasis is on outputting theories
with the pretty printer ("Isabelle.IsaPrint"); hence, not only the
kernel language of Isabelle is supported. Because the Isabelle
logic is only used for proving, no parser and static analysis are provided.
"Isabelle.IsaProve" is an interactive interface to the Isabelle prover.
"Isabelle.CreateTheories" is the batch version.
"Isabelle.Logic_Isabelle" provides the Isabelle instance of
type class 'Logic.Logic.Logic'.
"Isabelle.IsaConsts" and
"Isabelle.Translate" are auxiliary modules used in the comorphisms
into Isabelle, as well as in the prover module.
-}
module Isabelle where
|
nevrenato/HetsAlloy
|
Isabelle.hs
|
gpl-2.0
| 1,235
| 0
| 2
| 189
| 5
| 4
| 1
| 1
| 0
|
{-# language TypeSynonymInstances, MultiParamTypeClasses #-}
module Grammatik.Akzeptor where
import Language.Type
import Language.Inter
import Grammatik.Type
import Grammatik.Property
import Grammatik.Machine
import Grammatik.Config as GC
import qualified Machine.Acceptor.Type as A
import qualified Machine.Acceptor.Inter
import Inter.Types
import Inter.Quiz
import Autolib.ToDoc
import Autolib.Informed
import Autolib.Util.Zufall
acceptor :: Make
acceptor = quiz ( A.Acceptor "Grammatik" ) GC.example
type Accept = A.Type Grammatik String Property
instance Project A.Acceptor Accept Accept where
project _ i = i
instance Generator A.Acceptor GC.Config Accept where
generator _ config key = do
let l = inter $ GC.lang config
m = GC.max_num config
e = GC.max_length config
small = \ w -> length w <= e
yeah <- lift $ samples l m 0
noh <- lift $ anti_samples l m 0
return $ A.Make
{ A.machine_desc = text "Grammatik (als Akzeptor)"
, A.data_desc = info $ GC.lang config
, A.yeah = take m $ filter small yeah
, A.noh = take m $ filter small noh
, A.cut = GC.cut config
, A.properties = Typ 0 : Monoton : GC.properties config
, A.start = GC.start config
}
|
florianpilz/autotool
|
src/Grammatik/Akzeptor.hs
|
gpl-2.0
| 1,337
| 0
| 13
| 361
| 393
| 209
| 184
| 36
| 1
|
module Test.QuickFuzz.Gen.Pki
( module Test.QuickFuzz.Gen.Pki.ASN1,
module Test.QuickFuzz.Gen.Pki.X509,
module Test.QuickFuzz.Gen.Pki.CRL
) where
import Test.QuickFuzz.Gen.Pki.ASN1
import Test.QuickFuzz.Gen.Pki.X509
import Test.QuickFuzz.Gen.Pki.CRL
|
elopez/QuickFuzz
|
src/Test/QuickFuzz/Gen/Pki.hs
|
gpl-3.0
| 255
| 0
| 5
| 22
| 61
| 46
| 15
| 7
| 0
|
{-#LANGUAGE GADTs, UndecidableInstances, RankNTypes, FlexibleContexts, PatternSynonyms, TypeSynonymInstances, FlexibleInstances, MultiParamTypeClasses #-}
module Carnap.Languages.ModalFirstOrder.Logic.Rules where
import Data.List (intercalate)
import Data.Typeable (Typeable)
import Data.Maybe (catMaybes)
import Text.Parsec
import Carnap.Core.Data.Util (scopeHeight)
import Carnap.Core.Unification.Unification (applySub,subst)
import Carnap.Core.Data.Classes
import Carnap.Core.Data.Types
import Carnap.Core.Data.Optics
import Carnap.Languages.ModalPropositional.Syntax
import Carnap.Languages.ModalFirstOrder.Syntax
import Carnap.Languages.PureFirstOrder.Logic.Rules (globalOldConstraint, globalNewConstraint)
import Carnap.Languages.ClassicalSequent.Syntax
import Carnap.Languages.ClassicalSequent.Parser
import Carnap.Languages.ModalFirstOrder.Parser
import Carnap.Languages.Util.LanguageClasses
import Carnap.Languages.Util.GenericConstructors
import Carnap.Calculi.NaturalDeduction.Syntax (DeductionLine(..),depth,assertion)
type FirstOrderModalSequentCalcOverWith b a = ClassicalSequentOver (ModalFirstOrderLexOverWith b a)
type FirstOrderModalSequentCalcLexOverWith b a = ClassicalSequentLexOver (ModalFirstOrderLexOverWith b a)
type FirstOrderModalIndexedSequentCalcWith a = ClassicalSequentOver (IndexedModalFirstOrderLexWith a)
type FirstOrderModalIndexedSequentCalcLexWith a = ClassicalSequentLexOver (IndexedModalFirstOrderLexWith a)
type FirstOrderModalIndexedSequentCalc = FirstOrderModalIndexedSequentCalcWith EndLang
pattern SeqQuant q = FX (Lx2 (Lx1 (Lx2 (Bind q))))
pattern SeqVar c a = FX (Lx2 (Lx1 (Lx5 (Function c a))))
pattern SeqTau c a = FX (Lx2 (Lx1 (Lx6 (Function c a))))
pattern SeqV s = SeqVar (Var s) AZero
pattern SeqT n = SeqTau (SFunc AZero n) AZero
pattern SeqSchemIdx c a = FX (Lx2 (Lx1 (Lx1 (Lx2 (Lx4 (Function c a))))))
pattern SeqSchmIdx n = SeqSchemIdx (SFunc AZero n) AZero
pattern SomeWorld = SeqSchmIdx 0
instance UniformlyEq (FirstOrderModalSequentCalcOverWith b a) => Eq (FirstOrderModalSequentCalcOverWith b a c) where
(==) = (=*)
liftAbsRule (SequentRule p c) = map (liftAbsSeq SomeWorld) p ∴ liftAbsSeq SomeWorld c
liftAbsSeq :: IndexingLang (FirstOrderModalIndexedSequentCalcLexWith a) (Term World) (Form Bool) (Form (World -> Bool)) =>
FirstOrderModalIndexedSequentCalcWith a (Term World)
-> FirstOrderModalIndexedSequentCalcWith a (Sequent (Form (World -> Bool)))
-> FirstOrderModalIndexedSequentCalcWith a (Sequent (Form Bool))
liftAbsSeq w (a :|-: s) = atSomeAnt a :|-: atSomeSuc s
where
--atSomeAnt :: FirstOrderModalIndexedSequentCalcWith a (Antecedent (Form (World -> Bool))) -> FirstOrderModalIndexedSequentCalcWith a (Antecedent (Form Bool))
atSomeAnt (x :+: y) = atSomeAnt x :+: atSomeAnt y
atSomeAnt (SA x) = SA (x `atWorld` w)
atSomeAnt (GammaV n) = GammaV n
atSomeAnt Top = Top
--atSomeSuc :: FirstOrderModalIndexedSequentCalcWith a (Succedent (Form (World -> Bool))) -> FirstOrderModalIndexedSequentCalcWith a (Succedent (Form Bool))
atSomeSuc (x :-: y) = atSomeSuc x :-: atSomeSuc y
atSomeSuc (SS x) = SS (x `atWorld` w)
atSomeSuc Bot = Bot
someWorld :: IndexingLang lex (Term World) (Form c) (Form (World -> Bool)) => FixLang lex (Term World)
someWorld = worldScheme 0
someOtherWorld :: IndexingLang lex (Term World) (Form c) (Form (World -> Bool)) => FixLang lex (Term World)
someOtherWorld = worldScheme 1
someThirdWorld :: IndexingLang lex (Term World) (Form c) (Form (World -> Bool)) => FixLang lex (Term World)
someThirdWorld = worldScheme 2
globalOldIdxConstraint cs ded lineno sub = globalOldConstraint (filter (\x -> not (applySub sub x =* world 0)) cs) ded lineno sub
globalNewIdxConstraint cs ded lineno sub = case globalNewConstraint cs ded lineno sub of
Nothing -> if world 0 `elem` (map (applySub sub) cs)
then Just "This rule requires new indicies, but the index 0 is never new"
else Nothing
k -> k
indexedModalFOSeqParser = liftAbsSeq (world 0) <$> (seqFormulaParser :: Parsec String u (FirstOrderModalIndexedSequentCalc (Sequent (Form (World -> Bool)))))
instance IndexedSchemeConstantLanguage (FirstOrderModalSequentCalcOverWith b a (Term Int)) where
taun = SeqT
instance ( Schematizable (a (FirstOrderModalIndexedSequentCalcWith a))
, StaticVar (FirstOrderModalIndexedSequentCalcWith a)
) => CopulaSchema (FirstOrderModalIndexedSequentCalcWith a) where
appSchema (SeqQuant (All x)) (LLam f) e = schematize (All x) (show (f $ SeqV x) : e)
appSchema (SeqQuant (Some x)) (LLam f) e = schematize (Some x) (show (f $ SeqV x) : e)
appSchema x y e = schematize x (show y : e)
lamSchema = defaultLamSchema
|
opentower/carnap
|
Carnap/src/Carnap/Languages/ModalFirstOrder/Logic/Rules.hs
|
gpl-3.0
| 5,077
| 0
| 18
| 1,040
| 1,471
| 769
| 702
| 70
| 6
|
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE UndecidableSuperClasses #-}
{-# LANGUAGE UnicodeSyntax #-}
{-# OPTIONS_GHC -Wextra -Wno-unused-imports -Wno-unticked-promoted-constructors -Wno-type-defaults -Wno-missing-signatures #-}
module Main where
import Control.Monad.IO.Class
import qualified GHC.Generics as GHC
import Generics.SOP
import qualified Generics.SOP as SOP
class ( GHC.Generic a, SOP.Generic a, HasDatatypeInfo a
, forall xs. Code a ~ '[xs]) => Foo m a where
instance ( GHC.Generic a, SOP.Generic a, HasDatatypeInfo a
, forall xs. Code a ~ '[xs]
, Monad m) => Foo m a where
-- error:
-- • Could not deduce: Code a ~ '[xs]
-- arising from the superclasses of an instance declaration
-- from the context: (GHC.Generic a, HasDatatypeInfo a,
-- forall (xs :: [*]). Code a ~ '[xs], Monad m)
-- bound by the instance declaration
-- at /run/user/1000/danteazsW1Q.hs:(22,10)-(24,47)
-- • In the instance declaration for ‘Foo m a’
main ∷ IO ()
main = do
putStrLn "You are standing at the end of a road before a small brick building."
putStrLn "Around you is a forest. A small stream flows out of the building and"
putStrLn "down a gully."
|
deepfire/mood
|
experiments/ghc/Main.hs
|
agpl-3.0
| 1,438
| 4
| 10
| 343
| 204
| 116
| 88
| -1
| -1
|
module Examples.ScoProd
( scowpspExample
, scolpspExample
, scospspExample
, scolphcspExample
) where
import Utils
import Graph
import LaTeX
import Algebra.Matrix
import Algebra.Semiring
import Algebra.Constructs.Scoped
import Algebra.Constructs.Lexico
import Policy.WidestPath
import Policy.ShortestPath
import Policy.LocalPreference
scowpspExample 0 = toMatrix (G (10,
[ (1,2, Int (mulId, SP 4)), (1,3, Int (mulId, SP 1))
, (2,1, Int (mulId, SP 4)), (2,3, Int (mulId, SP 1)), (2,4, Ext (WP 100, SP 0))
, (3,1, Int (mulId, SP 1)), (3,2, Int (mulId, SP 1)), (3,8, Ext (WP 10, SP 0))
, (4,2, Ext (WP 100, SP 0)), (4,5, Int (mulId, SP 1)), (4,6, Int (mulId, SP 1))
, (5,4, Int (mulId, SP 1)), (5,6, Int (mulId, SP 2))
, (6,4, Int (mulId, SP 1)), (6,5, Int (mulId, SP 2)), (6,7, Int (mulId, SP 4))
, (7,6, Int (mulId, SP 4)), (7,9, Ext (WP 100, SP 0))
, (8,3, Ext (WP 10, SP 0)), (8,9, Int (mulId, SP 3)), (8,10, Int (mulId, SP 1))
, (9,7, Ext (WP 100, SP 0)), (9,8, Int (mulId, SP 3)), (9,10, Int (mulId, SP 1))
, (10,8, Int (mulId, SP 1)), (10,9, Int (mulId, SP 1))
]))
scolpspExample 0 = toMatrix (G (10,
[ (1,2, Int (mulId, SP 4)), (1,3, Int (mulId, SP 1))
, (2,1, Int (mulId, SP 4)), (2,3, Int (mulId, SP 1)), (2,4, Ext (LP 100, SP 0))
, (3,1, Int (mulId, SP 1)), (3,2, Int (mulId, SP 1)), (3,8, Ext (LP 10, SP 0))
, (4,2, Ext (LP 100, SP 0)), (4,5, Int (mulId, SP 1)), (4,6, Int (mulId, SP 1))
, (5,4, Int (mulId, SP 1)), (5,6, Int (mulId, SP 2))
, (6,4, Int (mulId, SP 1)), (6,5, Int (mulId, SP 2)), (6,7, Int (mulId, SP 4))
, (7,6, Int (mulId, SP 4)), (7,9, Ext (LP 100, SP 0))
, (8,3, Ext (LP 100, SP 0)), (8,9, Int (mulId, SP 3)), (8,10, Int (mulId, SP 1))
, (9,7, Ext (LP 100, SP 0)), (9,8, Int (mulId, SP 3)), (9,10, Int (mulId, SP 1))
, (10,8, Int (mulId, SP 1)), (10,9, Int (mulId, SP 1))
]))
scolpspExample 1 = toMatrix (G (10,
[ (1,2, Int (mulId, SP 4)), (1,3, Int (mulId, SP 1))
, (2,1, Int (mulId, SP 4)), (2,3, Int (mulId, SP 1)), (2,4, Ext (LP 100, SP 0))
, (3,1, Int (mulId, SP 1)), (3,2, Int (mulId, SP 1)), (3,8, Ext (LP 10, SP 0))
, (4,2, Ext (LP 100, SP 0)), (4,5, Int (mulId, SP 1)), (4,6, Int (mulId, SP 1))
, (5,4, Int (mulId, SP 1)), (5,6, Int (mulId, SP 2))
, (6,4, Int (mulId, SP 1)), (6,5, Int (mulId, SP 2)), (6,7, Int (mulId, SP 4))
, (7,6, Int (mulId, SP 4)), (7,9, Ext (LP 100, SP 0))
, (8,3, Ext (LP 10, SP 0)), (8,9, Int (mulId, SP 3)), (8,10, Int (mulId, SP 1))
, (9,7, Ext (LP 100, SP 0)), (9,8, Int (mulId, SP 3)), (9,10, Int (mulId, SP 1))
, (10,8, Int (mulId, SP 1)), (10,9, Int (mulId, SP 1))
]))
scolphcspExample 0 = toMatrix (G (10,
[ (1,2, Int (mulId, SP 4)), (1,3, Int (mulId, SP 1))
, (2,1, Int (mulId, SP 4)), (2,3, Int (mulId, SP 1)), (2,4, Ext (Lex(LP 10, SP 1), SP 0))
, (3,1, Int (mulId, SP 1)), (3,2, Int (mulId, SP 1)), (3,8, Ext (Lex(LP 5, SP 1), SP 0))
, (4,2, Ext (Lex(LP 10, SP 1), SP 0)), (4,5, Int (mulId, SP 1)), (4,6, Int (mulId, SP 1))
, (5,4, Int (mulId, SP 1)), (5,6, Int (mulId, SP 2))
, (6,4, Int (mulId, SP 1)), (6,5, Int (mulId, SP 2)), (6,7, Int (mulId, SP 4))
, (7,6, Int (mulId, SP 4)), (7,9, Ext (Lex(LP 10, SP 1), SP 0))
, (8,3, Ext (Lex(LP 5, SP 1), SP 0)), (8,9, Int (mulId, SP 3)), (8,10, Int (mulId, SP 1))
, (9,7, Ext (Lex(LP 10, SP 1), SP 0)), (9,8, Int (mulId, SP 3)), (9,10, Int (mulId, SP 1))
, (10,8, Int (mulId, SP 1)), (10,9, Int (mulId, SP 1))
]))
scospspExample 0 = toMatrix (G (29,
-- Domain 1
[ (1,2, Int (mulId, SP 1)), (1,3, Int (mulId, SP 3))
, (2,1, Int (mulId, SP 1)), (2,3, Int (mulId, SP 1)), (2,5, Int (mulId, SP 1)), (2,6, Ext (SP 1, mulId))
, (3,1, Int (mulId, SP 3)), (3,2, Int (mulId, SP 1)), (3,4, Int (mulId, SP 2)), (3,5, Int (mulId, SP 2))
, (4,3, Int (mulId, SP 2)), (4,5, Int (mulId, SP 4)), (4,18, Ext (SP 1, mulId))
, (5,2, Int (mulId, SP 1)), (5,3, Int (mulId, SP 2)), (5,4, Int (mulId, SP 4)), (5,12, Ext (SP 2, mulId))
-- Domain 2
, (6,2, Ext (SP 1, mulId)), (6,7, Int (mulId, SP 1)), (6,8, Int (mulId, SP 4))
, (7,6, Int (mulId, SP 1)), (7,10, Int (mulId, SP 1))
, (8,6, Int (mulId, SP 4)), (8,9, Int (mulId, SP 1)), (8,10, Int (mulId, SP 1)), (8,11, Int (mulId, SP 4))
, (9,8, Int (mulId, SP 1)), (9,14, Ext (SP 1, mulId))
, (10,7, Int (mulId, SP 1)), (10,8, Int (mulId, SP 1)), (10,11, Int (mulId, SP 2))
, (11,8, Int (mulId, SP 4)), (11,10, Int (mulId, SP 2)), (11,24, Ext (SP 3, mulId))
-- Domain 3
, (12,5, Ext (SP 2, mulId)), (12,13, Int (mulId, SP 2))
, (13,12, Int (mulId, SP 2)), (13,14, Int (mulId, SP 5)), (13,16, Int (mulId, SP 1))
, (14,9, Ext (SP 1, mulId)), (14,13, Int (mulId, SP 4)), (14,17, Int (mulId, SP 1))
, (15,16, Int (mulId, SP 1)), (15,19, Ext (SP 4, mulId))
, (16,13, Int (mulId, SP 1)), (16,15, Int (mulId, SP 1)), (16,17, Int (mulId, SP 2))
, (17,14, Int (mulId, SP 1)), (17,16, Int (mulId, SP 2)), (17,24, Ext (SP 1, mulId))
-- Domain 4
, (18,4, Ext (SP 1, mulId)), (18,20, Int (mulId, SP 2))
, (19,15, Ext (SP 4, mulId)), (19,20, Int (mulId, SP 1)), (19,25, Ext (SP 2, mulId))
, (20,18, Int (mulId, SP 2)), (20,19, Int (mulId, SP 1)), (20,21, Int (mulId, SP 1)), (20,22, Int (mulId, SP 1))
, (21,20, Int (mulId, SP 1)), (21,23, Int (mulId, SP 4))
, (22,20, Int (mulId, SP 1)), (22,23, Int (mulId, SP 1)), (22,28, Ext (SP 1, mulId))
, (23,21, Int (mulId, SP 4)), (23,22, Int (mulId, SP 1))
-- Domain 5
, (24,11, Ext (SP 3, mulId)), (24,25, Int (mulId, SP 2)), (24,26, Int (mulId, SP 1)), (24,27, Int (mulId, SP 1))
, (25,19, Ext (SP 2, mulId)), (25,24, Int (mulId, SP 2)), (25,27, Int (mulId, SP 1)), (25,28, Int (mulId, SP 1))
, (26,24, Int (mulId, SP 1)), (26,27, Int (mulId, SP 1)), (26,29, Int (mulId, SP 2))
, (27,24, Int (mulId, SP 1)), (27,25, Int (mulId, SP 1)), (27,26, Int (mulId, SP 1)), (27,29, Int (mulId, SP 1))
, (28,22, Ext (SP 1, mulId)), (28,25, Int (mulId, SP 1)), (28,29, Int (mulId, SP 4))
, (29,26, Int (mulId, SP 2)), (29,27, Int (mulId, SP 1)), (29,28, Int (mulId, SP 4))
]))
scospspExample 1 = toMatrix (G (17,
-- Domain 1
[ (1,2, Int (mulId, SP 1)), (1,3, Int (mulId, SP 3))
, (2,1, Int (mulId, SP 1)), (2,3, Int (mulId, SP 1)), (2,5, Int (mulId, SP 1)), (2,6, Ext (SP 1, mulId))
, (3,1, Int (mulId, SP 3)), (3,2, Int (mulId, SP 1)), (3,4, Int (mulId, SP 2)), (3,5, Int (mulId, SP 2))
, (4,3, Int (mulId, SP 2)), (4,5, Int (mulId, SP 4))
, (5,2, Int (mulId, SP 1)), (5,3, Int (mulId, SP 2)), (5,4, Int (mulId, SP 4)), (5,12, Ext (SP 3, mulId))
-- Domain 2
, (6,2, Ext (SP 1, mulId)), (6,7, Int (mulId, SP 1)), (6,8, Int (mulId, SP 4))
, (7,6, Int (mulId, SP 1)), (7,10, Int (mulId, SP 1))
, (8,6, Int (mulId, SP 4)), (8,9, Int (mulId, SP 1)), (8,10, Int (mulId, SP 1)), (8,11, Int (mulId, SP 4))
, (9,8, Int (mulId, SP 1)), (9,14, Ext (SP 1, mulId))
, (10,7, Int (mulId, SP 1)), (10,8, Int (mulId, SP 1)), (10,11, Int (mulId, SP 2))
, (11,8, Int (mulId, SP 4)), (11,10, Int (mulId, SP 2))
-- Domain 3
, (12,5, Ext (SP 1, mulId)), (12,13, Int (mulId, SP 2))
, (13,12, Int (mulId, SP 2)), (13,14, Int (mulId, SP 5)), (13,16, Int (mulId, SP 1))
, (14,9, Ext (SP 3, mulId)), (14,13, Int (mulId, SP 4)), (14,17, Int (mulId, SP 1))
, (15,16, Int (mulId, SP 1))
, (16,13, Int (mulId, SP 1)), (16,15, Int (mulId, SP 1)), (16,17, Int (mulId, SP 2))
, (17,14, Int (mulId, SP 1)), (17,16, Int (mulId, SP 2))
]))
|
sdynerow/Semirings-Library
|
haskell/Examples/ScoProd.hs
|
apache-2.0
| 7,739
| 24
| 15
| 1,974
| 6,068
| 3,512
| 2,556
| 107
| 1
|
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
{-# LANGUAGE OverloadedStrings, RecordWildCards, ScopedTypeVariables #-}
module Main where
import Control.Exception
import Control.Monad
import Data.Functor
import Data.List.Split
import Data.String
import Network
import Network.URI
import System.Environment
import System.Exit
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Vector as Vector
import qualified System.IO as IO
import ThriftTest_Iface
import ThriftTest_Types
import qualified ThriftTest_Client as Client
import Thrift.Transport
import Thrift.Transport.Framed
import Thrift.Transport.Handle
import Thrift.Transport.HttpClient
import Thrift.Protocol
import Thrift.Protocol.Binary
import Thrift.Protocol.Compact
import Thrift.Protocol.JSON
data Options = Options
{ host :: String
, port :: Int
, domainSocket :: String
, transport :: String
, protocol :: ProtocolType
-- TODO: Haskell lib does not have SSL support
, ssl :: Bool
, testLoops :: Int
}
deriving (Show, Eq)
data TransportType = Buffered IO.Handle
| Framed (FramedTransport IO.Handle)
| Http HttpClient
| NoTransport String
getTransport :: String -> String -> Int -> (IO TransportType)
getTransport "buffered" host port = do
h <- hOpen (host, PortNumber $ fromIntegral port)
IO.hSetBuffering h $ IO.BlockBuffering Nothing
return $ Buffered h
getTransport "framed" host port = do
h <- hOpen (host, PortNumber $ fromIntegral port)
t <- openFramedTransport h
return $ Framed t
getTransport "http" host port = let uriStr = "http://" ++ host ++ ":" ++ show port in
case parseURI uriStr of
Nothing -> do return (NoTransport $ "Failed to parse URI: " ++ uriStr)
Just(uri) -> do
t <- openHttpClient uri
return $ Http t
getTransport t host port = do return (NoTransport $ "Unsupported transport: " ++ t)
data ProtocolType = Binary
| Compact
| JSON
deriving (Show, Eq)
getProtocol :: String -> ProtocolType
getProtocol "binary" = Binary
getProtocol "compact" = Compact
getProtocol "json" = JSON
getProtocol p = error $ "Unsupported Protocol: " ++ p
defaultOptions :: Options
defaultOptions = Options
{ port = 9090
, domainSocket = ""
, host = "localhost"
, transport = "buffered"
, protocol = Binary
, ssl = False
, testLoops = 1
}
runClient :: (Protocol p, Transport t) => p t -> IO ()
runClient p = do
let prot = (p,p)
putStrLn "Starting Tests"
-- VOID Test
putStrLn "testVoid"
Client.testVoid prot
-- String Test
putStrLn "testString"
s <- Client.testString prot "Test"
when (s /= "Test") exitFailure
-- Bool Test
putStrLn "testBool"
bool <- Client.testBool prot True
when (not bool) exitFailure
putStrLn "testBool"
bool <- Client.testBool prot False
when (bool) exitFailure
-- Byte Test
putStrLn "testByte"
byte <- Client.testByte prot 1
when (byte /= 1) exitFailure
-- I32 Test
putStrLn "testI32"
i32 <- Client.testI32 prot (-1)
when (i32 /= -1) exitFailure
-- I64 Test
putStrLn "testI64"
i64 <- Client.testI64 prot (-34359738368)
when (i64 /= -34359738368) exitFailure
-- Double Test
putStrLn "testDouble"
dub <- Client.testDouble prot (-5.2098523)
when (abs (dub + 5.2098523) > 0.001) exitFailure
-- Binary Test
putStrLn "testBinary"
bin <- Client.testBinary prot (LBS.pack . reverse $ [-128..127])
when ((reverse [-128..127]) /= LBS.unpack bin) exitFailure
-- Struct Test
let structIn = Xtruct{ xtruct_string_thing = "Zero"
, xtruct_byte_thing = 1
, xtruct_i32_thing = -3
, xtruct_i64_thing = -5
}
putStrLn "testStruct"
structOut <- Client.testStruct prot structIn
when (structIn /= structOut) exitFailure
-- Nested Struct Test
let nestIn = Xtruct2{ xtruct2_byte_thing = 1
, xtruct2_struct_thing = structIn
, xtruct2_i32_thing = 5
}
putStrLn "testNest"
nestOut <- Client.testNest prot nestIn
when (nestIn /= nestOut) exitSuccess
-- Map Test
let mapIn = Map.fromList $ map (\i -> (i, i-10)) [1..5]
putStrLn "testMap"
mapOut <- Client.testMap prot mapIn
when (mapIn /= mapOut) exitSuccess
-- Set Test
let setIn = Set.fromList [-2..3]
putStrLn "testSet"
setOut <- Client.testSet prot setIn
when (setIn /= setOut) exitFailure
-- List Test
let listIn = Vector.fromList [-2..3]
putStrLn "testList"
listOut <- Client.testList prot listIn
when (listIn /= listOut) exitFailure
-- Enum Test
putStrLn "testEnum"
numz1 <- Client.testEnum prot ONE
when (numz1 /= ONE) exitFailure
putStrLn "testEnum"
numz2 <- Client.testEnum prot TWO
when (numz2 /= TWO) exitFailure
putStrLn "testEnum"
numz5 <- Client.testEnum prot FIVE
when (numz5 /= FIVE) exitFailure
-- Typedef Test
putStrLn "testTypedef"
uid <- Client.testTypedef prot 309858235082523
when (uid /= 309858235082523) exitFailure
-- Nested Map Test
putStrLn "testMapMap"
_ <- Client.testMapMap prot 1
-- Exception Test
putStrLn "testException"
exn1 <- try $ Client.testException prot "Xception"
case exn1 of
Left (Xception _ _) -> return ()
_ -> putStrLn (show exn1) >> exitFailure
putStrLn "testException"
exn2 <- try $ Client.testException prot "TException"
case exn2 of
Left (_ :: SomeException) -> return ()
Right _ -> exitFailure
putStrLn "testException"
exn3 <- try $ Client.testException prot "success"
case exn3 of
Left (_ :: SomeException) -> exitFailure
Right _ -> return ()
-- Multi Exception Test
putStrLn "testMultiException"
multi1 <- try $ Client.testMultiException prot "Xception" "test 1"
case multi1 of
Left (Xception _ _) -> return ()
_ -> exitFailure
putStrLn "testMultiException"
multi2 <- try $ Client.testMultiException prot "Xception2" "test 2"
case multi2 of
Left (Xception2 _ _) -> return ()
_ -> exitFailure
putStrLn "testMultiException"
multi3 <- try $ Client.testMultiException prot "success" "test 3"
case multi3 of
Left (_ :: SomeException) -> exitFailure
Right _ -> return ()
main :: IO ()
main = do
options <- flip parseFlags defaultOptions <$> getArgs
case options of
Nothing -> showHelp
Just Options{..} -> do
trans <- Main.getTransport transport host port
case trans of
Buffered t -> runTest testLoops protocol t
Framed t -> runTest testLoops protocol t
Http t -> runTest testLoops protocol t
NoTransport err -> putStrLn err
where
makeClient p t = case p of
Binary -> runClient $ BinaryProtocol t
Compact -> runClient $ CompactProtocol t
JSON -> runClient $ JSONProtocol t
runTest loops p t = do
let client = makeClient p t
replicateM_ loops client
putStrLn "COMPLETED SUCCESSFULLY"
parseFlags :: [String] -> Options -> Maybe Options
parseFlags (flag : flags) opts = do
let pieces = splitOn "=" flag
case pieces of
"--port" : arg : _ -> parseFlags flags opts{ port = read arg }
"--domain-socket" : arg : _ -> parseFlags flags opts{ domainSocket = read arg }
"--host" : arg : _ -> parseFlags flags opts{ host = arg }
"--transport" : arg : _ -> parseFlags flags opts{ transport = arg }
"--protocol" : arg : _ -> parseFlags flags opts{ protocol = getProtocol arg }
"-n" : arg : _ -> parseFlags flags opts{ testLoops = read arg }
"--h" : _ -> Nothing
"--help" : _ -> Nothing
"--ssl" : _ -> parseFlags flags opts{ ssl = True }
"--processor-events" : _ -> parseFlags flags opts
_ -> Nothing
parseFlags [] opts = Just opts
showHelp :: IO ()
showHelp = putStrLn
"Allowed options:\n\
\ -h [ --help ] produce help message\n\
\ --host arg (=localhost) Host to connect\n\
\ --port arg (=9090) Port number to connect\n\
\ --domain-socket arg Domain Socket (e.g. /tmp/ThriftTest.thrift),\n\
\ instead of host and port\n\
\ --transport arg (=buffered) Transport: buffered, framed, http\n\
\ --protocol arg (=binary) Protocol: binary, compact, json\n\
\ --ssl Encrypted Transport using SSL\n\
\ -n [ --testloops ] arg (=1) Number of Tests"
|
BluechipSystems/thrift
|
test/hs/TestClient.hs
|
apache-2.0
| 9,513
| 0
| 15
| 2,471
| 2,527
| 1,249
| 1,278
| 212
| 11
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="de-DE">
<title>>Run Applications | ZAP Extensions</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Suche</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
veggiespam/zap-extensions
|
addOns/invoke/src/main/javahelp/org/zaproxy/zap/extension/invoke/resources/help_de_DE/helpset_de_DE.hs
|
apache-2.0
| 982
| 83
| 41
| 160
| 404
| 212
| 192
| -1
| -1
|
module Lang.Hask.Semantics where
import FP
import Lang.Hask.Time
import Lang.Hask.CPS hiding (atom)
import Name
import Literal
import DataCon
import CoreSyn (AltCon(..))
-- Values
data Moment lτ dτ = Moment
{ timeLex :: lτ
, timeDyn :: dτ
} deriving (Eq, Ord)
makeLenses ''Moment
instance (Time ψ lτ, Time ψ dτ) => Bot (Moment lτ dτ) where bot = Moment tzero tzero
data Addr lτ dτ = Addr
{ addrName :: Name
, addrTime :: Moment lτ dτ
} deriving (Eq, Ord)
type Env lτ dτ = Map Name (Addr lτ dτ)
type Store ν lτ dτ = Map (Addr lτ dτ) (ν lτ dτ)
data ArgVal lτ dτ =
AddrVal (Addr lτ dτ)
| LitVal Literal
| TypeVal
deriving (Eq, Ord)
data Data lτ dτ = Data
{ dataCon :: DataCon
, dataArgs :: [ArgVal lτ dτ]
} deriving (Eq, Ord)
data FunClo lτ dτ = FunClo
{ funCloLamArg :: Name
, funCloKonArg :: Name
, funCloBody :: Call
, funCloEnv :: Env lτ dτ
, funCloTime :: lτ
} deriving (Eq, Ord)
data Ref lτ dτ = Ref
{ refAddr :: Addr lτ dτ
} deriving (Eq, Ord)
data KonClo lτ dτ = KonClo
{ konCloArg :: Name
, konCloBody :: Call
, konCloEnv :: Env lτ dτ
} deriving (Eq, Ord)
data ThunkClo lτ dτ = ThunkClo
{ thunkCloKonXLoc :: Int
, thunkCloKonXArg :: Name
, thunkCloKonKArg :: Name
, thunkCloFun :: Pico
, thunkCloArg :: Pico
, thunkCloEnv :: Env lτ dτ
, thunkCloTime :: lτ
} deriving (Eq, Ord)
data KonMemoClo lτ dτ = KonMemoClo
{ konMemoCloLoc :: Addr lτ dτ
, konMemoCloThunk :: ThunkClo lτ dτ
, konMemoCloArg :: Name
, konMemoCloBody :: Call
, konMemoCloEnv :: Env lτ dτ
} deriving (Eq, Ord)
data Forced lτ dτ = Forced
{ forcedVal :: ArgVal lτ dτ
} deriving (Eq, Ord)
class Val lτ dτ γν αν | αν -> γν where
botI :: αν lτ dτ
litI :: Literal -> αν lτ dτ
litTestE :: Literal -> αν lτ dτ -> γν Bool
dataI :: Data lτ dτ -> αν lτ dτ
dataAnyI :: DataCon -> αν lτ dτ
dataE :: αν lτ dτ -> γν (Data lτ dτ)
funCloI :: FunClo lτ dτ -> αν lτ dτ
funCloE :: αν lτ dτ -> γν (FunClo lτ dτ)
refI :: Ref lτ dτ -> αν lτ dτ
refAnyI :: αν lτ dτ
refE :: αν lτ dτ -> γν (Ref lτ dτ)
konCloI :: KonClo lτ dτ -> αν lτ dτ
konCloE :: αν lτ dτ -> γν (KonClo lτ dτ)
konMemoCloI :: KonMemoClo lτ dτ -> αν lτ dτ
konMemoCloE :: αν lτ dτ -> γν (KonMemoClo lτ dτ)
thunkCloI :: ThunkClo lτ dτ -> αν lτ dτ
thunkCloE :: αν lτ dτ -> γν (ThunkClo lτ dτ)
forcedI :: Forced lτ dτ -> αν lτ dτ
forcedE :: αν lτ dτ -> γν (Forced lτ dτ)
-- State Space
data 𝒮 ν lτ dτ = 𝒮
{ 𝓈Env :: Env lτ dτ
, 𝓈Store :: Store ν lτ dτ
, 𝓈Time :: Moment lτ dτ
} deriving (Eq, Ord)
instance (Time ψ lτ, Time ψ dτ) => Bot (𝒮 ν lτ dτ) where bot = 𝒮 bot bot bot
makeLenses ''𝒮
-- Analysis effects and constraints
type TimeC lτ dτ = (Ord lτ, Ord dτ, Time Int lτ, Time Int dτ)
type ValC ν lτ dτ = (JoinLattice (ν lτ dτ), Meet (ν lτ dτ), Neg (ν lτ dτ), Val lτ dτ SetWithTop ν)
type MonadC ν lτ dτ m = (Monad m, MonadBot m, MonadTop m, MonadPlus m, MonadState (𝒮 ν lτ dτ) m)
class ( MonadC ν lτ dτ m , ValC ν lτ dτ , TimeC lτ dτ) => Analysis ν lτ dτ m | m -> ν , m -> lτ , m -> dτ
-- Moment management
tickLex :: (Analysis ν lτ dτ m) => Call -> m ()
tickLex = modifyL (timeLexL <.> 𝓈TimeL) . tick . stampedFixID
tickDyn :: (Analysis ν lτ dτ m) => Call -> m ()
tickDyn = modifyL (timeDynL <.> 𝓈TimeL) . tick . stampedFixID
alloc :: (Analysis ν lτ dτ m) => Name -> m (Addr lτ dτ)
alloc x = do
τ <- getL 𝓈TimeL
return $ Addr x τ
-- Updating values in the store
bindJoin :: (Analysis ν lτ dτ m) => Name -> ν lτ dτ -> m ()
bindJoin x v = do
𝓁 <- alloc x
modifyL 𝓈EnvL $ mapInsert x 𝓁
modifyL 𝓈StoreL $ mapInsertWith (\/) 𝓁 v
updateRef :: (Analysis ν lτ dτ m) => Addr lτ dτ -> ν lτ dτ -> ν lτ dτ -> m ()
updateRef 𝓁 vOld vNew = modifyL 𝓈StoreL $ \ σ ->
mapModify (\ v -> v /\ neg vOld) 𝓁 σ \/ mapSingleton 𝓁 vNew
-- Refinement and extraction
refine :: (Analysis ν lτ dτ m) => ArgVal lτ dτ -> ν lτ dτ -> m ()
refine (AddrVal 𝓁) v = modifyL 𝓈StoreL $ mapInsertWith (/\) 𝓁 v
refine (LitVal _) _ = return ()
refine TypeVal _ = return ()
extract :: (Analysis ν lτ dτ m) => (a -> ν lτ dτ) -> (ν lτ dτ -> SetWithTop a) -> ArgVal lτ dτ -> m a
extract intro elim av = do
v <- argVal av
a <- setWithTopElim mtop mset $ elim v
refine av $ intro a
return a
extractIsLit :: (Analysis ν lτ dτ m) => Literal -> ArgVal lτ dτ -> m ()
extractIsLit l av = do
v <- argVal av
b <- setWithTopElim mtop mset $ litTestE l v
guard b
refine av $ litI l
-- Denotations
addr :: (Analysis ν lτ dτ m) => Addr lτ dτ -> m (ν lτ dτ)
addr 𝓁 = do
σ <- getL 𝓈StoreL
maybeZero $ σ # 𝓁
argVal :: (Analysis ν lτ dτ m) => ArgVal lτ dτ -> m (ν lτ dτ)
argVal (AddrVal 𝓁) = addr 𝓁
argVal (LitVal l) = return $ litI l
argVal TypeVal = return botI
varAddr :: (Analysis ν lτ dτ m) => Name -> m (Addr lτ dτ)
varAddr x = do
ρ <- getL 𝓈EnvL
maybeZero $ ρ # x
var :: (Analysis ν lτ dτ m) => Name -> m (ν lτ dτ)
var = addr *. varAddr
pico :: (Analysis ν lτ dτ m) => Pico -> m (ν lτ dτ)
pico = \ case
Var n -> var n
Lit l -> return $ litI l
Type -> return botI
picoArg :: (Analysis ν lτ dτ m) => Pico -> m (ArgVal lτ dτ)
picoArg (Var x) = AddrVal ^$ varAddr x
picoArg (Lit l) = return $ LitVal l
picoArg Type = return TypeVal
atom :: (Analysis ν lτ dτ m) => Atom -> m (ν lτ dτ)
atom = \ case
Pico p -> pico p
LamF x k c -> do
ρ <- getL 𝓈EnvL
lτ <- getL $ timeLexL <.> 𝓈TimeL
return $ funCloI $ FunClo x k c ρ lτ
LamK x c -> do
ρ <- getL 𝓈EnvL
return $ konCloI $ KonClo x c ρ
Thunk r xi x k p₁ p₂ -> do
ρ <- getL 𝓈EnvL
lτ <- getL $ timeLexL <.> 𝓈TimeL
𝓁 <- alloc r
updateRef 𝓁 botI $ thunkCloI $ ThunkClo xi x k p₁ p₂ ρ lτ
return $ refI $ Ref 𝓁
forceThunk :: forall ν lτ dτ m. (Analysis ν lτ dτ m) => Name -> ArgVal lτ dτ -> Call -> m Call
forceThunk x av c = do
Ref 𝓁 <- extract refI refE av
msum
[ do
Forced av' <- extract forcedI forcedE $ AddrVal 𝓁
v' <- argVal av'
bindJoin x v'
return c
, do
t@(ThunkClo xi' x' k p₁' p₂' ρ' lτ') <- extract thunkCloI thunkCloE $ AddrVal 𝓁
ρ <- getL 𝓈EnvL
let kv = konMemoCloI $ KonMemoClo 𝓁 t x c ρ
putL 𝓈EnvL ρ'
putL (timeLexL <.> 𝓈TimeL) lτ'
bindJoin k kv
return $ StampedFix xi' $ AppF xi' x' p₁' p₂' $ Var k
]
call :: (Analysis ν lτ dτ m) => Call -> m Call
call c = do
tickDyn c
case stampedFix c of
Let x a c' -> do
v <- atom a
bindJoin x v
return c'
Rec rxs c' -> do
traverseOn rxs $ \ (r,x) -> do
𝓁 <- alloc r
bindJoin x $ refI $ Ref 𝓁
return c'
Letrec xas c' -> do
traverseOn xas $ \ (x, a) -> do
av <- picoArg $ Var x
Ref 𝓁 <- extract refI refE av
updateRef 𝓁 botI *$ atom a
return c'
AppK p₁ p₂ -> do
av₁ <- picoArg p₁
v₂ <- pico p₂
msum
[ do
KonClo x c' ρ <- extract konCloI konCloE av₁
putL 𝓈EnvL ρ
bindJoin x v₂
return c'
, do
KonMemoClo 𝓁 th x c' ρ <- extract konMemoCloI konMemoCloE av₁
updateRef 𝓁 (thunkCloI th) . forcedI . Forced *$ picoArg p₂
putL 𝓈EnvL ρ
bindJoin x v₂
return c'
]
AppF xi' x' p₁ p₂ p₃ -> do
av₁ <- picoArg p₁
v₂ <- pico p₂
v₃ <- pico p₃
msum
[ do
FunClo x k c' ρ lτ <- extract funCloI funCloE av₁
putL 𝓈EnvL ρ
putL (timeLexL <.> 𝓈TimeL) lτ
bindJoin x v₂
bindJoin k v₃
return c'
, forceThunk x' av₁ $ StampedFix xi' $ AppF xi' x' (Var x') p₂ p₃
]
Case xi' x' p bs0 -> do
av <- picoArg p
msum
[ do
-- loop through the alternatives
let loop bs = do
(CaseBranch acon xs c', bs') <- maybeZero $ view consL bs
case acon of
DataAlt con -> msum
-- The alt is a Data and the value is a Data with the same
-- tag; jump to the alt body.
[ do
Data dcon 𝓁s <- extract dataI dataE av
guard $ con == dcon
x𝓁s <- maybeZero $ zip xs 𝓁s
traverseOn x𝓁s $ \ (x, av') -> do
v' <- argVal av'
bindJoin x v'
return c'
-- The alt is a Data and the value is not a Data with the
-- same tag; try the next branch.
, do
refine av $ neg $ dataAnyI con
loop bs'
]
LitAlt l -> msum
-- The alt is a Lit and the value is the same lit; jump to
-- the alt body.
[ do
extractIsLit l av
return c'
-- The alt is a Lit and and the value is not the same lit;
-- try the next branch.
, do
refine av $ neg $ litI l
loop bs'
]
-- The alt is the default branch; jump to the body _only if
-- the value is not a ref_.
DEFAULT -> do
refine av $ neg $ refAnyI
return c
loop bs0
, forceThunk x' av $ StampedFix xi' $ Case xi' x' (Var x') bs0
]
Halt _ -> return c
|
FranklinChen/maam
|
src/Lang/Hask/Semantics.hs
|
bsd-3-clause
| 10,237
| 200
| 33
| 3,480
| 4,559
| 2,272
| 2,287
| -1
| -1
|
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.SetupWrapper
-- Copyright : (c) The University of Glasgow 2006,
-- Duncan Coutts 2008
--
-- Maintainer : cabal-devel@haskell.org
-- Stability : alpha
-- Portability : portable
--
-- An interface to building and installing Cabal packages.
-- If the @Built-Type@ field is specified as something other than
-- 'Custom', and the current version of Cabal is acceptable, this performs
-- setup actions directly. Otherwise it builds the setup script and
-- runs it with the given arguments.
module Distribution.Client.SetupWrapper (
setupWrapper,
SetupScriptOptions(..),
defaultSetupScriptOptions,
) where
import qualified Distribution.Make as Make
import qualified Distribution.Simple as Simple
import Distribution.Version
( Version(..), VersionRange, anyVersion
, intersectVersionRanges, orLaterVersion
, withinRange )
import Distribution.InstalledPackageInfo (installedPackageId)
import Distribution.Package
( InstalledPackageId(..), PackageIdentifier(..), PackageId,
PackageName(..), Package(..), packageName
, packageVersion, Dependency(..) )
import Distribution.PackageDescription
( GenericPackageDescription(packageDescription)
, PackageDescription(..), specVersion
, BuildType(..), knownBuildTypes, defaultRenaming )
import Distribution.PackageDescription.Parse
( readPackageDescription )
import Distribution.Simple.Configure
( configCompilerEx )
import Distribution.Compiler
( buildCompilerId, CompilerFlavor(GHC, GHCJS) )
import Distribution.Simple.Compiler
( Compiler(compilerId), compilerFlavor, PackageDB(..), PackageDBStack )
import Distribution.Simple.PreProcess
( runSimplePreProcessor, ppUnlit )
import Distribution.Simple.Program
( ProgramConfiguration, emptyProgramConfiguration
, getProgramSearchPath, getDbProgramOutput, runDbProgram, ghcProgram
, ghcjsProgram )
import Distribution.Simple.Program.Find
( programSearchPathAsPATHVar )
import Distribution.Simple.Program.Run
( getEffectiveEnvironment )
import qualified Distribution.Simple.Program.Strip as Strip
import Distribution.Simple.BuildPaths
( defaultDistPref, exeExtension )
import Distribution.Simple.Command
( CommandUI(..), commandShowOptions )
import Distribution.Simple.Program.GHC
( GhcMode(..), GhcOptions(..), renderGhcOptions )
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import Distribution.Client.Config
( defaultCabalDir )
import Distribution.Client.IndexUtils
( getInstalledPackages )
import Distribution.Client.JobControl
( Lock, criticalSection )
import Distribution.Simple.Setup
( Flag(..) )
import Distribution.Simple.Utils
( die, debug, info, cabalVersion, tryFindPackageDesc, comparing
, createDirectoryIfMissingVerbose, installExecutableFile
, copyFileVerbose, rewriteFile, intercalate )
import Distribution.Client.Utils
( inDir, tryCanonicalizePath
, existsAndIsMoreRecentThan, moreRecentFile
#if mingw32_HOST_OS
, canonicalizePathNoThrow
#endif
)
import Distribution.System ( Platform(..), buildPlatform )
import Distribution.Text
( display )
import Distribution.Utils.NubList
( toNubListR )
import Distribution.Verbosity
( Verbosity )
import Distribution.Compat.Exception
( catchIO )
import System.Directory ( doesFileExist )
import System.FilePath ( (</>), (<.>) )
import System.IO ( Handle, hPutStr )
import System.Exit ( ExitCode(..), exitWith )
import System.Process ( runProcess, waitForProcess )
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ( (<$>), (<*>) )
import Data.Monoid ( mempty )
#endif
import Control.Monad ( when, unless )
import Data.List ( foldl1' )
import Data.Maybe ( fromMaybe, isJust )
import Data.Char ( isSpace )
import Distribution.Client.Compat.ExecutablePath ( getExecutablePath )
#ifdef mingw32_HOST_OS
import Distribution.Simple.Utils
( withTempDirectory )
import Control.Exception ( bracket )
import System.FilePath ( equalFilePath, takeDirectory )
import System.Directory ( doesDirectoryExist )
import qualified System.Win32 as Win32
#endif
data SetupScriptOptions = SetupScriptOptions {
useCabalVersion :: VersionRange,
useCompiler :: Maybe Compiler,
usePlatform :: Maybe Platform,
usePackageDB :: PackageDBStack,
usePackageIndex :: Maybe InstalledPackageIndex,
useProgramConfig :: ProgramConfiguration,
useDistPref :: FilePath,
useLoggingHandle :: Maybe Handle,
useWorkingDir :: Maybe FilePath,
forceExternalSetupMethod :: Bool,
-- | List of dependencies to use when building Setup.hs
useDependencies :: [(InstalledPackageId, PackageId)],
-- | Is the list of setup dependencies exclusive?
--
-- This is here for legacy reasons. Before the introduction of the explicit
-- setup stanza in .cabal files we compiled Setup.hs scripts with all
-- packages in the environment visible, but we will needed to restrict
-- _some_ packages; in particular, we need to restrict the version of Cabal
-- that the setup script gets linked against (this was the only "dependency
-- constraint" that we had previously for Setup scripts).
useDependenciesExclusive :: Bool,
-- Used only by 'cabal clean' on Windows.
--
-- Note: win32 clean hack
-------------------------
-- On Windows, running './dist/setup/setup clean' doesn't work because the
-- setup script will try to delete itself (which causes it to fail horribly,
-- unlike on Linux). So we have to move the setup exe out of the way first
-- and then delete it manually. This applies only to the external setup
-- method.
useWin32CleanHack :: Bool,
-- Used only when calling setupWrapper from parallel code to serialise
-- access to the setup cache; should be Nothing otherwise.
--
-- Note: setup exe cache
------------------------
-- When we are installing in parallel, we always use the external setup
-- method. Since compiling the setup script each time adds noticeable
-- overhead, we use a shared setup script cache
-- ('~/.cabal/setup-exe-cache'). For each (compiler, platform, Cabal
-- version) combination the cache holds a compiled setup script
-- executable. This only affects the Simple build type; for the Custom,
-- Configure and Make build types we always compile the setup script anew.
setupCacheLock :: Maybe Lock
}
defaultSetupScriptOptions :: SetupScriptOptions
defaultSetupScriptOptions = SetupScriptOptions {
useCabalVersion = anyVersion,
useCompiler = Nothing,
usePlatform = Nothing,
usePackageDB = [GlobalPackageDB, UserPackageDB],
usePackageIndex = Nothing,
useDependencies = [],
useDependenciesExclusive = False,
useProgramConfig = emptyProgramConfiguration,
useDistPref = defaultDistPref,
useLoggingHandle = Nothing,
useWorkingDir = Nothing,
useWin32CleanHack = False,
forceExternalSetupMethod = False,
setupCacheLock = Nothing
}
setupWrapper :: Verbosity
-> SetupScriptOptions
-> Maybe PackageDescription
-> CommandUI flags
-> (Version -> flags)
-> [String]
-> IO ()
setupWrapper verbosity options mpkg cmd flags extraArgs = do
pkg <- maybe getPkg return mpkg
let setupMethod = determineSetupMethod options' buildType'
options' = options {
useCabalVersion = intersectVersionRanges
(useCabalVersion options)
(orLaterVersion (specVersion pkg))
}
buildType' = fromMaybe Custom (buildType pkg)
mkArgs cabalLibVersion = commandName cmd
: commandShowOptions cmd (flags cabalLibVersion)
++ extraArgs
checkBuildType buildType'
setupMethod verbosity options' (packageId pkg) buildType' mkArgs
where
getPkg = tryFindPackageDesc (fromMaybe "." (useWorkingDir options))
>>= readPackageDescription verbosity
>>= return . packageDescription
checkBuildType (UnknownBuildType name) =
die $ "The build-type '" ++ name ++ "' is not known. Use one of: "
++ intercalate ", " (map display knownBuildTypes) ++ "."
checkBuildType _ = return ()
-- | Decide if we're going to be able to do a direct internal call to the
-- entry point in the Cabal library or if we're going to have to compile
-- and execute an external Setup.hs script.
--
determineSetupMethod :: SetupScriptOptions -> BuildType -> SetupMethod
determineSetupMethod options buildType'
-- This order is picked so that it's stable. The build type and
-- required cabal version are external info, coming from .cabal
-- files and the command line. Those do switch between the
-- external and self & internal methods, but that info itself can
-- be considered stable. The logging and force-external conditions
-- are internally generated choices but now these only switch
-- between the self and internal setup methods, which are
-- consistent with each other.
| buildType' == Custom = externalSetupMethod
| not (cabalVersion `withinRange`
useCabalVersion options) = externalSetupMethod
| isJust (useLoggingHandle options)
-- Forcing is done to use an external process e.g. due to parallel
-- build concerns.
|| forceExternalSetupMethod options = selfExecSetupMethod
| otherwise = internalSetupMethod
type SetupMethod = Verbosity
-> SetupScriptOptions
-> PackageIdentifier
-> BuildType
-> (Version -> [String]) -> IO ()
-- ------------------------------------------------------------
-- * Internal SetupMethod
-- ------------------------------------------------------------
internalSetupMethod :: SetupMethod
internalSetupMethod verbosity options _ bt mkargs = do
let args = mkargs cabalVersion
debug verbosity $ "Using internal setup method with build-type " ++ show bt
++ " and args:\n " ++ show args
inDir (useWorkingDir options) $
buildTypeAction bt args
buildTypeAction :: BuildType -> ([String] -> IO ())
buildTypeAction Simple = Simple.defaultMainArgs
buildTypeAction Configure = Simple.defaultMainWithHooksArgs
Simple.autoconfUserHooks
buildTypeAction Make = Make.defaultMainArgs
buildTypeAction Custom = error "buildTypeAction Custom"
buildTypeAction (UnknownBuildType _) = error "buildTypeAction UnknownBuildType"
-- ------------------------------------------------------------
-- * Self-Exec SetupMethod
-- ------------------------------------------------------------
selfExecSetupMethod :: SetupMethod
selfExecSetupMethod verbosity options _pkg bt mkargs = do
let args = ["act-as-setup",
"--build-type=" ++ display bt,
"--"] ++ mkargs cabalVersion
debug verbosity $ "Using self-exec internal setup method with build-type "
++ show bt ++ " and args:\n " ++ show args
path <- getExecutablePath
info verbosity $ unwords (path : args)
case useLoggingHandle options of
Nothing -> return ()
Just logHandle -> info verbosity $ "Redirecting build log to "
++ show logHandle
searchpath <- programSearchPathAsPATHVar
(getProgramSearchPath (useProgramConfig options))
env <- getEffectiveEnvironment [("PATH", Just searchpath)]
process <- runProcess path args
(useWorkingDir options) env Nothing
(useLoggingHandle options) (useLoggingHandle options)
exitCode <- waitForProcess process
unless (exitCode == ExitSuccess) $ exitWith exitCode
-- ------------------------------------------------------------
-- * External SetupMethod
-- ------------------------------------------------------------
externalSetupMethod :: SetupMethod
externalSetupMethod verbosity options pkg bt mkargs = do
debug verbosity $ "Using external setup method with build-type " ++ show bt
debug verbosity $ "Using explicit dependencies: " ++ show (useDependenciesExclusive options)
createDirectoryIfMissingVerbose verbosity True setupDir
(cabalLibVersion, mCabalLibInstalledPkgId, options') <- cabalLibVersionToUse
debug verbosity $ "Using Cabal library version " ++ display cabalLibVersion
path <- if useCachedSetupExecutable
then getCachedSetupExecutable options'
cabalLibVersion mCabalLibInstalledPkgId
else compileSetupExecutable options'
cabalLibVersion mCabalLibInstalledPkgId False
invokeSetupScript options' path (mkargs cabalLibVersion)
where
workingDir = case fromMaybe "" (useWorkingDir options) of
[] -> "."
dir -> dir
setupDir = workingDir </> useDistPref options </> "setup"
setupVersionFile = setupDir </> "setup" <.> "version"
setupHs = setupDir </> "setup" <.> "hs"
setupProgFile = setupDir </> "setup" <.> exeExtension
platform = fromMaybe buildPlatform (usePlatform options)
useCachedSetupExecutable = (bt == Simple || bt == Configure || bt == Make)
maybeGetInstalledPackages :: SetupScriptOptions -> Compiler
-> ProgramConfiguration -> IO InstalledPackageIndex
maybeGetInstalledPackages options' comp conf =
case usePackageIndex options' of
Just index -> return index
Nothing -> getInstalledPackages verbosity
comp (usePackageDB options') conf
cabalLibVersionToUse :: IO (Version, (Maybe InstalledPackageId)
,SetupScriptOptions)
cabalLibVersionToUse = do
savedVer <- savedVersion
case savedVer of
Just version | version `withinRange` useCabalVersion options
-> do updateSetupScript version bt
-- Does the previously compiled setup executable still exist and
-- is it up-to date?
useExisting <- canUseExistingSetup version
if useExisting
then return (version, Nothing, options)
else installedVersion
_ -> installedVersion
where
-- This check duplicates the checks in 'getCachedSetupExecutable' /
-- 'compileSetupExecutable'. Unfortunately, we have to perform it twice
-- because the selected Cabal version may change as a result of this
-- check.
canUseExistingSetup :: Version -> IO Bool
canUseExistingSetup version =
if useCachedSetupExecutable
then do
(_, cachedSetupProgFile) <- cachedSetupDirAndProg options version
doesFileExist cachedSetupProgFile
else
(&&) <$> setupProgFile `existsAndIsMoreRecentThan` setupHs
<*> setupProgFile `existsAndIsMoreRecentThan` setupVersionFile
installedVersion :: IO (Version, Maybe InstalledPackageId
,SetupScriptOptions)
installedVersion = do
(comp, conf, options') <- configureCompiler options
(version, mipkgid, options'') <- installedCabalVersion options' comp conf
updateSetupScript version bt
writeFile setupVersionFile (show version ++ "\n")
return (version, mipkgid, options'')
savedVersion :: IO (Maybe Version)
savedVersion = do
versionString <- readFile setupVersionFile `catchIO` \_ -> return ""
case reads versionString of
[(version,s)] | all isSpace s -> return (Just version)
_ -> return Nothing
-- | Update a Setup.hs script, creating it if necessary.
updateSetupScript :: Version -> BuildType -> IO ()
updateSetupScript _ Custom = do
useHs <- doesFileExist customSetupHs
useLhs <- doesFileExist customSetupLhs
unless (useHs || useLhs) $ die
"Using 'build-type: Custom' but there is no Setup.hs or Setup.lhs script."
let src = (if useHs then customSetupHs else customSetupLhs)
srcNewer <- src `moreRecentFile` setupHs
when srcNewer $ if useHs
then copyFileVerbose verbosity src setupHs
else runSimplePreProcessor ppUnlit src setupHs verbosity
where
customSetupHs = workingDir </> "Setup.hs"
customSetupLhs = workingDir </> "Setup.lhs"
updateSetupScript cabalLibVersion _ =
rewriteFile setupHs (buildTypeScript cabalLibVersion)
buildTypeScript :: Version -> String
buildTypeScript cabalLibVersion = case bt of
Simple -> "import Distribution.Simple; main = defaultMain\n"
Configure -> "import Distribution.Simple; main = defaultMainWithHooks "
++ if cabalLibVersion >= Version [1,3,10] []
then "autoconfUserHooks\n"
else "defaultUserHooks\n"
Make -> "import Distribution.Make; main = defaultMain\n"
Custom -> error "buildTypeScript Custom"
UnknownBuildType _ -> error "buildTypeScript UnknownBuildType"
installedCabalVersion :: SetupScriptOptions -> Compiler -> ProgramConfiguration
-> IO (Version, Maybe InstalledPackageId
,SetupScriptOptions)
installedCabalVersion options' _ _ | packageName pkg == PackageName "Cabal" =
return (packageVersion pkg, Nothing, options')
installedCabalVersion options' compiler conf = do
index <- maybeGetInstalledPackages options' compiler conf
let cabalDep = Dependency (PackageName "Cabal") (useCabalVersion options')
options'' = options' { usePackageIndex = Just index }
case PackageIndex.lookupDependency index cabalDep of
[] -> die $ "The package '" ++ display (packageName pkg)
++ "' requires Cabal library version "
++ display (useCabalVersion options)
++ " but no suitable version is installed."
pkgs -> let ipkginfo = head . snd . bestVersion fst $ pkgs
in return (packageVersion ipkginfo
,Just . installedPackageId $ ipkginfo, options'')
bestVersion :: (a -> Version) -> [a] -> a
bestVersion f = firstMaximumBy (comparing (preference . f))
where
-- Like maximumBy, but picks the first maximum element instead of the
-- last. In general, we expect the preferred version to go first in the
-- list. For the default case, this has the effect of choosing the version
-- installed in the user package DB instead of the global one. See #1463.
--
-- Note: firstMaximumBy could be written as just
-- `maximumBy cmp . reverse`, but the problem is that the behaviour of
-- maximumBy is not fully specified in the case when there is not a single
-- greatest element.
firstMaximumBy :: (a -> a -> Ordering) -> [a] -> a
firstMaximumBy _ [] =
error "Distribution.Client.firstMaximumBy: empty list"
firstMaximumBy cmp xs = foldl1' maxBy xs
where
maxBy x y = case cmp x y of { GT -> x; EQ -> x; LT -> y; }
preference version = (sameVersion, sameMajorVersion
,stableVersion, latestVersion)
where
sameVersion = version == cabalVersion
sameMajorVersion = majorVersion version == majorVersion cabalVersion
majorVersion = take 2 . versionBranch
stableVersion = case versionBranch version of
(_:x:_) -> even x
_ -> False
latestVersion = version
configureCompiler :: SetupScriptOptions
-> IO (Compiler, ProgramConfiguration, SetupScriptOptions)
configureCompiler options' = do
(comp, conf) <- case useCompiler options' of
Just comp -> return (comp, useProgramConfig options')
Nothing -> do (comp, _, conf) <-
configCompilerEx (Just GHC) Nothing Nothing
(useProgramConfig options') verbosity
return (comp, conf)
-- Whenever we need to call configureCompiler, we also need to access the
-- package index, so let's cache it in SetupScriptOptions.
index <- maybeGetInstalledPackages options' comp conf
return (comp, conf, options' { useCompiler = Just comp,
usePackageIndex = Just index,
useProgramConfig = conf })
-- | Path to the setup exe cache directory and path to the cached setup
-- executable.
cachedSetupDirAndProg :: SetupScriptOptions -> Version
-> IO (FilePath, FilePath)
cachedSetupDirAndProg options' cabalLibVersion = do
cabalDir <- defaultCabalDir
let setupCacheDir = cabalDir </> "setup-exe-cache"
cachedSetupProgFile = setupCacheDir
</> ("setup-" ++ buildTypeString ++ "-"
++ cabalVersionString ++ "-"
++ platformString ++ "-"
++ compilerVersionString)
<.> exeExtension
return (setupCacheDir, cachedSetupProgFile)
where
buildTypeString = show bt
cabalVersionString = "Cabal-" ++ (display cabalLibVersion)
compilerVersionString = display $
fromMaybe buildCompilerId
(fmap compilerId . useCompiler $ options')
platformString = display platform
-- | Look up the setup executable in the cache; update the cache if the setup
-- executable is not found.
getCachedSetupExecutable :: SetupScriptOptions
-> Version -> Maybe InstalledPackageId
-> IO FilePath
getCachedSetupExecutable options' cabalLibVersion
maybeCabalLibInstalledPkgId = do
(setupCacheDir, cachedSetupProgFile) <-
cachedSetupDirAndProg options' cabalLibVersion
cachedSetupExists <- doesFileExist cachedSetupProgFile
if cachedSetupExists
then debug verbosity $
"Found cached setup executable: " ++ cachedSetupProgFile
else criticalSection' $ do
-- The cache may have been populated while we were waiting.
cachedSetupExists' <- doesFileExist cachedSetupProgFile
if cachedSetupExists'
then debug verbosity $
"Found cached setup executable: " ++ cachedSetupProgFile
else do
debug verbosity $ "Setup executable not found in the cache."
src <- compileSetupExecutable options'
cabalLibVersion maybeCabalLibInstalledPkgId True
createDirectoryIfMissingVerbose verbosity True setupCacheDir
installExecutableFile verbosity src cachedSetupProgFile
-- Do not strip if we're using GHCJS, since the result may be a script
when (maybe True ((/=GHCJS).compilerFlavor) $ useCompiler options') $
Strip.stripExe verbosity platform (useProgramConfig options')
cachedSetupProgFile
return cachedSetupProgFile
where
criticalSection' = fromMaybe id
(fmap criticalSection $ setupCacheLock options')
-- | If the Setup.hs is out of date wrt the executable then recompile it.
-- Currently this is GHC/GHCJS only. It should really be generalised.
--
compileSetupExecutable :: SetupScriptOptions
-> Version -> Maybe InstalledPackageId -> Bool
-> IO FilePath
compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId
forceCompile = do
setupHsNewer <- setupHs `moreRecentFile` setupProgFile
cabalVersionNewer <- setupVersionFile `moreRecentFile` setupProgFile
let outOfDate = setupHsNewer || cabalVersionNewer
when (outOfDate || forceCompile) $ do
debug verbosity "Setup executable needs to be updated, compiling..."
(compiler, conf, options'') <- configureCompiler options'
let cabalPkgid = PackageIdentifier (PackageName "Cabal") cabalLibVersion
(program, extraOpts)
= case compilerFlavor compiler of
GHCJS -> (ghcjsProgram, ["-build-runner"])
_ -> (ghcProgram, ["-threaded"])
cabalDep = maybe [] (\ipkgid -> [(ipkgid, cabalPkgid)])
maybeCabalLibInstalledPkgId
addRenaming (ipid, pid) = (ipid, pid, defaultRenaming)
ghcOptions = mempty {
ghcOptVerbosity = Flag verbosity
, ghcOptMode = Flag GhcModeMake
, ghcOptInputFiles = toNubListR [setupHs]
, ghcOptOutputFile = Flag setupProgFile
, ghcOptObjDir = Flag setupDir
, ghcOptHiDir = Flag setupDir
, ghcOptSourcePathClear = Flag True
, ghcOptSourcePath = toNubListR [workingDir]
, ghcOptPackageDBs = usePackageDB options''
, ghcOptHideAllPackages = Flag (useDependenciesExclusive options')
, ghcOptPackages = toNubListR $
map addRenaming $
if useDependenciesExclusive options'
then useDependencies options'
else useDependencies options'
++ cabalDep
, ghcOptExtra = toNubListR extraOpts
}
let ghcCmdLine = renderGhcOptions compiler ghcOptions
case useLoggingHandle options of
Nothing -> runDbProgram verbosity program conf ghcCmdLine
-- If build logging is enabled, redirect compiler output to the log file.
(Just logHandle) -> do output <- getDbProgramOutput verbosity program
conf ghcCmdLine
hPutStr logHandle output
return setupProgFile
invokeSetupScript :: SetupScriptOptions -> FilePath -> [String] -> IO ()
invokeSetupScript options' path args = do
info verbosity $ unwords (path : args)
case useLoggingHandle options' of
Nothing -> return ()
Just logHandle -> info verbosity $ "Redirecting build log to "
++ show logHandle
-- Since useWorkingDir can change the relative path, the path argument must
-- be turned into an absolute path. On some systems, runProcess will take
-- path as relative to the new working directory instead of the current
-- working directory.
path' <- tryCanonicalizePath path
-- See 'Note: win32 clean hack' above.
#if mingw32_HOST_OS
-- setupProgFile may not exist if we're using a cached program
setupProgFile' <- canonicalizePathNoThrow setupProgFile
let win32CleanHackNeeded = (useWin32CleanHack options')
-- Skip when a cached setup script is used.
&& setupProgFile' `equalFilePath` path'
if win32CleanHackNeeded then doWin32CleanHack path' else doInvoke path'
#else
doInvoke path'
#endif
where
doInvoke path' = do
searchpath <- programSearchPathAsPATHVar
(getProgramSearchPath (useProgramConfig options'))
env <- getEffectiveEnvironment [("PATH", Just searchpath)]
process <- runProcess path' args
(useWorkingDir options') env Nothing
(useLoggingHandle options') (useLoggingHandle options')
exitCode <- waitForProcess process
unless (exitCode == ExitSuccess) $ exitWith exitCode
#if mingw32_HOST_OS
doWin32CleanHack path' = do
info verbosity $ "Using the Win32 clean hack."
-- Recursively removes the temp dir on exit.
withTempDirectory verbosity workingDir "cabal-tmp" $ \tmpDir ->
bracket (moveOutOfTheWay tmpDir path')
(maybeRestore path')
doInvoke
moveOutOfTheWay tmpDir path' = do
let newPath = tmpDir </> "setup" <.> exeExtension
Win32.moveFile path' newPath
return newPath
maybeRestore oldPath path' = do
let oldPathDir = takeDirectory oldPath
oldPathDirExists <- doesDirectoryExist oldPathDir
-- 'setup clean' didn't complete, 'dist/setup' still exists.
when oldPathDirExists $
Win32.moveFile path' oldPath
#endif
|
corngood/cabal
|
cabal-install/Distribution/Client/SetupWrapper.hs
|
bsd-3-clause
| 29,342
| 0
| 23
| 8,257
| 5,032
| 2,690
| 2,342
| 431
| 29
|
-- In this example, make the imported items explicit in 'import D1'
module A1 where
import D1 (sumSquares, fringe)
import C1
import B1
main :: Tree Int ->Bool
main t = isSame (sumSquares (fringe t))
(sumSquares (B1.myFringe t)+sumSquares (C1.myFringe t))
|
kmate/HaRe
|
old/testing/mkImpExplicit/A1_TokOut.hs
|
bsd-3-clause
| 275
| 0
| 11
| 59
| 88
| 47
| 41
| 7
| 1
|
-- a fast, straightforward points to analysis
-- meant to determine nodes that are always in whnf
-- and find out evals or applys that always
-- apply to a known value
module Grin.NodeAnalyze(nodeAnalyze) where
import Control.Monad.Identity hiding(join)
import Control.Monad.RWS hiding(join)
import Data.Maybe
import Text.Printf
import qualified Data.Map as Map
import qualified Data.Set as Set
import Grin.Grin hiding(V)
import Grin.Noodle
import Options
import StringTable.Atom
import Support.CanType
import Support.FreeVars
import Support.Tickle
import Util.Gen
import Util.SetLike
import Util.UnionSolve
import Util.UniqueMonad
import qualified Stats
data NodeType
= WHNF -- ^ guarenteed to be a WHNF
| Lazy -- ^ a suspension, a WHNF, or an indirection to a WHNF
deriving(Eq,Ord,Show)
data N = N !NodeType (Topped (Set.Set Atom))
deriving(Eq)
instance Show N where
show (N nt ts) = show nt ++ "-" ++ f ts where
f Top = "[?]"
f (Only x) = show (Set.toList x)
instance Fixable NodeType where
isBottom x = x == WHNF
isTop x = x == Lazy
join x y = max x y
meet x y = min x y
eq = (==)
lte x y = x <= y
instance Fixable N where
isBottom (N a b) = isBottom a && isBottom b
isTop (N a b) = isTop a && isTop b
join (N x y) (N x' y') = N (join x x') (join y y')
meet (N x y) (N x' y') = N (meet x x') (meet y y')
lte (N x y) (N x' y') = lte x x' && lte y y'
eq (N x y) (N x' y') = eq x x' && eq y y'
showFixable n = show n
data V = V Va Ty | VIgnore
deriving(Eq,Ord)
data Va =
Vr !Var
| Fa !Atom !Int
| Fr !Atom !Int
deriving(Eq,Ord)
vr v t = V (Vr v) t
fa n i t = V (Fa n i) t
fr n i t = V (Fr n i) t
class NodeLike a where
isGood :: a -> Bool
instance NodeLike Ty where
isGood TyNode = True
isGood TyINode = True
isGood _ = False
instance NodeLike Val where
isGood v = isGood (getType v)
instance NodeLike V where
isGood (V _ t) = isGood t
isGood _ = False
instance NodeLike (Either V b) where
isGood (Left n) = isGood n
isGood _ = True
instance Show V where
showsPrec _ (V (Vr v) ty) = shows (Var v ty)
showsPrec _ (V (Fa a i) _) = shows (a,i)
showsPrec _ (V (Fr a i) _) = shows (i,a)
showsPrec _ VIgnore = showString "IGN"
newtype M a = M (RWS TyEnv (C N V) Int a)
deriving(Monad,Functor,MonadWriter (C N V))
runM :: Grin -> M a -> C N V
runM grin (M w) = case runRWS w (grinTypeEnv grin) 1 of
(_,_,w) -> w
{-# NOINLINE nodeAnalyze #-}
nodeAnalyze :: Grin -> IO Grin
nodeAnalyze grin' = do
let cs = runM grin $ do
mapM_ doFunc (grinFuncs grin)
mapM_ docaf (grinCafs grin)
doFunc (toAtom "@initcafs",[] :-> initCafs grin)
grin = renameUniqueGrin grin'
docaf (v,tt) | True = tell $ Right top `equals` Left (V (Vr v) TyINode)
| otherwise = return ()
--putStrLn "----------------------------"
--print cs
--putStrLn "----------------------------"
--putStrLn "-- NodeAnalyze"
(rm,res) <- solve (const (return ())) cs
--(rm,res) <- solve putStrLn cs
--putStrLn "----------------------------"
--mapM_ (\ (x,y) -> putStrLn $ show x ++ " -> " ++ show y) (Map.toList rm)
--putStrLn "----------------------------"
--mapM_ print (Map.elems res)
--putStrLn "----------------------------"
let cmap = Map.map (fromJust . flip Map.lookup res) rm
(grin',stats) <- Stats.runStatT $ tickleM (fixupfs cmap (grinTypeEnv grin)) grin
return $ transformFuncs (fixupFuncs (grinSuspFunctions grin) (grinPartFunctions grin) cmap) grin' { grinStats = stats `mappend` grinStats grin' }
data Todo = Todo !Bool [V] | TodoNothing
initCafs grin = f (grinCafs grin) (Return []) where
f ((v,node):rs) rest = BaseOp Overwrite [(Var v TyINode),node] :>>= [] :-> f rs rest
f [] rest = rest
doFunc :: (Atom,Lam) -> M ()
doFunc (name,arg :-> body) = ans where
ans :: M ()
ans = do
let rts = getType body
forMn_ rts $ \ (t,i) -> dVar (fr name i t) t
forMn_ arg $ \ (~(Var v vt),i) -> do
dVar (vr v vt) vt
tell $ cAnnotate "FunArg" $ Left (fa name i vt) `equals` Left (vr v vt)
fn (Todo True [ fr name i t | i <- naturals | t <- rts ]) body
-- restrict values of TyNode type to be in WHNF
dVar v TyNode = do
tell $ Left v `islte` Right (N WHNF Top)
dVar _ _ = return ()
-- set concrete values for vars based on their type only
-- should only be used in patterns
zVar s v TyNode = tell $ cAnnotate ("zVar - tynode " ++ s) $ Left (vr v TyNode) `equals` Right (N WHNF Top)
zVar s v t = tell $ cAnnotate ("zVar - inode " ++ s) $ Left (vr v t) `equals` Right top
fn :: Todo -> Exp -> M ()
fn ret body = f body where
f (x :>>= [Var v vt] :-> rest) = do
dVar (vr v vt) vt
gn (Todo True [vr v vt]) x
f rest
f (x :>>= vs@(_:_:_) :-> rest) = do
vs' <- forM vs $ \ (Var v vt) -> do
dVar (vr v vt) vt
return $ vr v vt
gn (if all (== VIgnore) vs' then TodoNothing else Todo True vs') x
f rest
f (x :>>= v :-> rest) = do
forM_ (Set.toList $ freeVars v) $ \ (v,vt) -> zVar "Bind" v vt
gn TodoNothing x
f rest
f body = gn ret body
isfn _ x y | not (isGood x) = mempty
isfn (Todo True _) x y = cAnnotate "isfn True" $ Left x `equals` y
isfn (Todo False _) x y = cAnnotate "isfn False" $ Left x `isgte` y
--isfn (Todo _ _) x y = Left x `isgte` y
isfn TodoNothing x y = mempty
equals x y | isGood x && isGood y = Util.UnionSolve.equals x y
| otherwise = mempty
isgte x y | isGood x && isGood y = Util.UnionSolve.isgte x y
| otherwise = mempty
islte x y | isGood x && isGood y = Util.UnionSolve.islte x y
| otherwise = mempty
gn ret head = f head where
fl ret (v :-> body) = do
forM_ (Set.toList $ freeVars v) $ \ (v,vt) -> zVar "Alt" v vt
fn ret body
dunno ty = do
dres [Right (if TyNode == t then N WHNF Top else top) | t <- ty ]
dres res = do
case ret of
Todo b vs | length res /= length vs -> error "lengths don't match!"
Todo b vs -> forM_ (zip vs res) $ \ (v,r) -> tell (isfn ret v r)
_ -> return ()
f (_ :>>= _) = error $ "Grin.NodeAnalyze: :>>="
f (Case v as)
| Todo _ n <- ret = mapM_ (fl (Todo False n)) as
| TodoNothing <- ret = mapM_ (fl TodoNothing) as
f (BaseOp Eval [x]) = do
dres [Right (N WHNF Top)]
f (BaseOp (Apply ty) xs) = do
mapM_ convertVal xs
dunno ty
f (App { expFunction = fn, expArgs = vs, expType = ty }) = do
vs' <- mapM convertVal vs
forMn_ (zip vs vs') $ \ ((tv,v),i) -> do
tell $ v `islte` Left (fa fn i (getType tv))
dres [Left $ fr fn i t | i <- [ 0 .. ] | t <- ty ]
f (Call { expValue = Item fn _, expArgs = vs, expType = ty }) = do
vs' <- mapM convertVal vs
forMn_ (zip vs vs') $ \ ((tv,v),i) -> do
tell $ v `islte` Left (fa fn i (getType tv))
dres [Left $ fr fn i t | i <- [ 0 .. ] | t <- ty ]
f (Return x) = do mapM convertVal x >>= dres
f (BaseOp (StoreNode _) w) = do mapM convertVal w >>= dres
f (BaseOp Promote [w]) = do
ww <- convertVal w
tell $ ww `islte` Right (N WHNF Top)
dres [ww]
f (BaseOp Demote [w]) = do
ww <- convertVal w
tell $ ww `islte` Right (N WHNF Top)
dres [ww]
f Error {} = return ()
f Prim { expArgs = as, expType = ty } = mapM_ convertVal as >> dunno ty
f Alloc { expValue = v } | getType v == TyNode = do
v' <- convertVal v
dres [v']
f Alloc { expValue = v } | getType v == tyINode = do
convertVal v
dunno [TyPtr tyINode]
f NewRegion { expLam = _ :-> body } = fn ret body
f (BaseOp Overwrite [Var vname ty,v]) | ty == TyINode = do
v' <- convertVal v
tell $ Left (vr vname ty) `isgte` v'
dres []
f e@(BaseOp Overwrite vs) = do mapM_ convertVal vs >> dunno (getType e)
f e@(BaseOp PokeVal vs) = do mapM_ convertVal vs >> dunno (getType e)
f e@(BaseOp PeekVal vs) = do mapM_ convertVal vs >> dunno (getType e)
f Let { expDefs = ds, expBody = e } = do
mapM_ doFunc (map (\x -> (funcDefName x, funcDefBody x)) ds)
fn ret e
f exp = error $ "NodeAnalyze.f: " ++ show exp
convertVal (Const n@(NodeC _ _)) = convertVal n
convertVal (Const _) = return $ Right (N WHNF Top)
convertVal (NodeC t vs) = case tagUnfunction t of
Nothing -> do
mapM_ convertVal vs
return $ Right (N WHNF (Only $ Set.singleton t))
Just (n,fn) -> do
vs' <- mapM convertVal vs
forMn_ (zip vs vs') $ \ ((vt,v),i) -> do
tell $ v `islte` Left (fa fn i (getType vt))
forM_ [0 .. n - 1 ] $ \i -> do
tell $ Right top `islte` Left (fa fn (length vs + i) TyINode)
return $ Right (N (if n == 0 then Lazy else WHNF) (Only $ Set.singleton t))
convertVal (Var v t) = return $ Left (vr v t)
convertVal v | isGood v = return $ Right (N Lazy Top)
convertVal Lit {} = return $ Left VIgnore
convertVal ValPrim {} = return $ Left VIgnore
convertVal Index {} = return $ Left VIgnore
convertVal Item {} = return $ Left VIgnore
convertVal ValUnknown {} = return $ Left VIgnore
convertVal v = error $ "convertVal " ++ show v
bottom = N WHNF (Only (Set.empty))
top = N Lazy Top
data WhatToDo
= WhatDelete
| WhatUnchanged
| WhatConstant Val
| WhatSubs Ty (Val -> Exp) (Val -> Exp)
--isWhatUnchanged WhatUnchanged = True
--isWhatUnchanged _ = False
transformFuncs :: (Atom -> [Ty] -> Maybe [Ty] -> (Maybe [WhatToDo],Maybe [WhatToDo])) -> Grin -> Grin
transformFuncs fn grin = grin'' where
grin'' = grin' { grinTypeEnv = extendTyEnv (grinFunctions grin') (grinTypeEnv grin') }
grin' = setGrinFunctions (nfs $ grinFuncs grin) grin
nfs ds = map fs ds
fs (n,l@(ps :-> e)) = (n,f (fn n (map getType ps) (Just $ getType e)) l)
f (Nothing,Nothing) (p :-> e) = p :-> j e
f (Just ats,rts') (p :-> e) = p' :-> e' where
rts = maybe (map (const WhatUnchanged) (getType e)) id rts'
p' = concatMap f (zip p ats) where
f (v,WhatUnchanged) = [v]
f (_,WhatDelete) = []
f (_,WhatConstant _) = []
f (Var v _,WhatSubs nty _ _) = [Var v nty]
f _ = error "NodeAnalyze.transformFuncs: f bad."
e' = g (zip p ats) (j e)
g ((_,WhatUnchanged):xs) e = g xs e
g ((_,WhatDelete):xs) e = g xs e
g ((vr,WhatConstant c):xs) e = Return [c] :>>= [vr] :-> g xs e
g ((Var v vt,WhatSubs nt _ ft):xs) e = ft (Var v nt) :>>= [Var v vt] :-> g xs e
g [] e = e :>>= rvs :-> h (zip rvs rts) (drop (length (getType e)) [v1 .. ]) [] where
rvs = zipWith Var [v1 .. ] (getType e)
g _ _ = error "NodeAnalyze.transformFuncs: g bad."
h ((r,WhatUnchanged):xs) vs rs = h xs vs (r:rs)
h ((r,WhatDelete):xs) vs rs = h xs vs rs
h ((r,WhatConstant _):xs) vs rs = h xs vs rs
h ((r,WhatSubs nty tt _):xs) (v:vs) rs = tt r :>>= [Var v nty] :-> h xs vs (Var v nty:rs)
h [] _ rs = Return (reverse rs)
h _ _ _ = error "NodeAnalyze.transformFuncs: h bad."
f _ _ = error "NodeAnalyze.transformFuncs: f bad."
j app@(BaseOp (StoreNode False) [NodeC a xs]) = res where
res = if isNothing ats' then app else e'
ats = maybe (repeat WhatUnchanged) id ats'
(ats',_) = fn (tagFlipFunction a) (map getType xs) Nothing
lvars = zipWith Var [ v1 .. ] (map getType xs)
e' = Return xs :>>= lvars :-> f (zip lvars ats) []
f ((v,WhatUnchanged):xs) rs = f xs (v:rs)
f ((_,WhatDelete):xs) rs = f xs rs
f ((_,WhatConstant _):xs) rs = f xs rs
f ((Var v oty,WhatSubs nty tt _):xs) rs = tt (Var v oty) :>>= [Var v nty] :-> f xs (Var v nty:rs)
f [] rs = BaseOp (StoreNode False) [NodeC a (reverse rs)]
f _ _ = error "NodeAnalyze.transformFuncs: f bad."
j app@(App a xs ts) = res where
res = if isNothing ats' && isNothing rts' then app else e'
ats = maybe (repeat WhatUnchanged) id ats'
rts = maybe (repeat WhatUnchanged) id rts'
(ats',rts') = fn a (map getType xs) (Just ts)
lvars = zipWith Var [ v1 .. ] (map getType xs)
e' = Return xs :>>= lvars :-> f (zip lvars ats) []
f ((v,WhatUnchanged):xs) rs = f xs (v:rs)
f ((_,WhatDelete):xs) rs = f xs rs
f ((_,WhatConstant _):xs) rs = f xs rs
f ((Var v oty,WhatSubs nty tt _):xs) rs = tt (Var v oty) :>>= [Var v nty] :-> f xs (Var v nty:rs)
f [] rs = App a (reverse rs) ts' :>>= rvars :-> g (zip rvars' rts) rvars []
f _ _ = error "NodeAnalyze.transformFuncs: f bad."
g [] [] rs = Return (reverse rs)
g ((_,WhatUnchanged):xs) (n:ns) rs = g xs ns (n:rs)
g ((v,WhatDelete):xs) vs rs = Return [ValUnknown (getType v)] :>>= [v] :-> g xs vs (v:rs)
g ((v,WhatConstant c):xs) vs rs = Return [c] :>>= [v] :-> g xs vs (v:rs)
g ((v,WhatSubs _ _ ft):xs) (n:ns) rs = ft n :>>= [v] :-> g xs ns (v:rs)
g _ _ _ = error "NodeAnalyze.transformFuncs: g bad."
rvars = zipWith Var [ v1 .. ] ts'
rvars' = zipWith Var (drop (length rvars) [ v1 .. ]) ts
ts' = concatMap g (zip ts rts) where
g (t,WhatUnchanged) = [t]
g (t,WhatConstant _) = []
g (t,WhatDelete) = []
g (t,WhatSubs nty _ _) = [nty]
j Let { expDefs = ds, expBody = e } = grinLet [ updateFuncDefProps d { funcDefBody = snd $ fs (funcDefName d, funcDefBody d) } | d <- ds ] (j e)
j e = runIdentity $ mapExpExp (return . j) e
fixupFuncs sfuncs pfuncs cmap = ans where
ans a as jrs | a `Set.member` pfuncs = (Nothing,Nothing)
| a `Set.member` sfuncs = (Just aargs,Nothing)
| otherwise = (Just aargs,fmap rargs jrs) where
aargs = map (bool pnode WhatUnchanged) largs
largs = map (lupArg fa a) (zip as [0 .. ])
rargs rs = map (bool pnode WhatUnchanged) (map (lupArg fr a) (zip rs [0 .. ]))
lupArg fa a (x,i) = case (x,Map.lookup (fa a i x) cmap) of
(TyINode,Just (ResultJust _ (N WHNF _))) -> True
(TyINode,Just ResultBounded { resultLB = Just (N WHNF _) }) -> True
(TyINode,Just ResultBounded { resultLB = Nothing }) -> True
_ -> False
pnode = WhatSubs TyNode (\v -> BaseOp Promote [v]) (\v -> BaseOp Demote [v])
fixupfs cmap tyEnv l = tickleM f (l::Lam) where
lupVar (Var v t) = case Map.lookup (vr v t) cmap of
_ | v < v0 -> fail "nocafyet"
Just (ResultJust _ lb) -> return lb
Just ResultBounded { resultLB = Just lb } -> return lb
Just ResultBounded { resultLB = Nothing } -> return bottom
_ -> fail "lupVar"
lupVar _ = fail "lupVar2"
pstuff x arg n@(N w t) = liftIO $ when verbose (printf "-- %s %s %s\n" x (show arg) (show n))
f a@(BaseOp Eval [arg]) | Just n <- lupVar arg = case n of
N WHNF _ -> do
pstuff "eval" arg n
Stats.mtick (toAtom "Optimize.NodeAnalyze.eval-promote")
return (BaseOp Promote [arg])
_ -> return a
f a@(BaseOp (Apply ty) (papp:args)) | Just nn <- lupVar papp = case nn of
N WHNF tset | Only set <- tset, [sv] <- Set.toList set, TagPApp n fn <- tagInfo sv, Just (ts,_) <- findArgsType tyEnv sv -> do
pstuff "apply" papp nn
case (n,args) of
(1,[arg]) -> do
Stats.mtick (toAtom "Optimize.NodeAnalyze.apply-inline")
let va = Var v1 (getType arg)
vars = zipWith Var [ v2 .. ] ts
return $ Return [arg,papp] :>>= [va,NodeC sv vars] :-> App fn (vars ++ [va]) ty
(1,[]) -> do
Stats.mtick (toAtom "Optimize.NodeAnalyze.apply-inline")
let vars = zipWith Var [ v2 .. ] ts
return $ Return [papp] :>>= [NodeC sv vars] :-> App fn vars ty
(pn,[arg]) -> do
Stats.mtick (toAtom "Optimize.NodeAnalyze.apply-inline")
let va = Var v1 (getType arg)
vars = zipWith Var [ v2 .. ] ts
return $ Return [arg,papp] :>>= [va,NodeC sv vars] :-> dstore (NodeC (partialTag fn (pn - 1)) (vars ++ [va]))
(pn,[]) -> do
Stats.mtick (toAtom "Optimize.NodeAnalyze.apply-inline")
let vars = zipWith Var [ v2 .. ] ts
return $ Return [papp] :>>= [NodeC sv vars] :-> dstore (NodeC (partialTag fn (pn - 1)) vars)
_ -> return a
_ -> return a
f e = mapExpExp f e
dstore x = BaseOp (StoreNode True) [x]
renameUniqueGrin :: Grin -> Grin
renameUniqueGrin grin = res where
(res,()) = evalRWS (execUniqT 1 ans) ( mempty :: Map.Map Atom Atom) (fromList [ x | (x,_) <- grinFuncs grin ] :: Set.Set Atom)
ans = do tickleM f grin
f (l :-> b) = g b >>= return . (l :->)
g a@App { expFunction = fn } = do
m <- lift ask
case mlookup fn m of
Just fn' -> return a { expFunction = fn' }
_ -> return a
g a@Call { expValue = Item fn t } = do
m <- lift ask
case mlookup fn m of
Just fn' -> return a { expValue = Item fn' t }
_ -> return a
g (e@Let { expDefs = defs }) = do
(defs',rs) <- liftM unzip $ flip mapM defs $ \d -> do
(nn,rs) <- newName (funcDefName d)
return (d { funcDefName = nn },rs)
local (fromList rs `mappend`) $ mapExpExp g e { expDefs = defs' }
g b = mapExpExp g b
newName a = do
m <- lift get
case member a m of
False -> do lift $ modify (insert a); return (a,(a,a))
True -> do
let cfname = do
uniq <- newUniq
let fname = toAtom $ show a ++ "-" ++ show uniq
if fname `member` (m :: Set.Set Atom) then cfname else return fname
nn <- cfname
lift $ modify (insert nn)
return (nn,(a,nn))
bool x y b = if b then x else y
|
m-alvarez/jhc
|
src/Grin/NodeAnalyze.hs
|
mit
| 18,749
| 3
| 26
| 6,231
| 8,677
| 4,351
| 4,326
| -1
| -1
|
module Main where
import Debug.Trace
main :: IO ()
main = print (alg 3 1)
alg :: Word -> Word -> Word
alg a b
| traceShow (a, b) False = undefined
| c < b = alg b c
| c > b = alg c b
| otherwise = c
where
c = a - b
|
shlevy/ghc
|
testsuite/tests/codeGen/should_run/T14754.hs
|
bsd-3-clause
| 232
| 0
| 9
| 75
| 131
| 65
| 66
| 11
| 1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="da-DK">
<title>WebSockets | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Søg</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
msrader/zap-extensions
|
src/org/zaproxy/zap/extension/websocket/resources/help_da_DK/helpset_da_DK.hs
|
apache-2.0
| 970
| 80
| 66
| 159
| 416
| 210
| 206
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="ko-KR">
<title>Active Scan Rules | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
msrader/zap-extensions
|
src/org/zaproxy/zap/extension/ascanrules/resources/help_ko_KR/helpset_ko_KR.hs
|
apache-2.0
| 979
| 80
| 66
| 161
| 417
| 211
| 206
| -1
| -1
|
{-# LANGUAGE PartialTypeSignatures #-}
module Uncurry where
unc :: (_ -> _ -> _) -> (_, _) -> _
unc = uncurry
|
urbanslug/ghc
|
testsuite/tests/partial-sigs/should_compile/Uncurry.hs
|
bsd-3-clause
| 111
| 0
| 8
| 23
| 40
| 24
| 16
| 4
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Common where
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.QQ as Aeson.QQ
import qualified Data.ByteString.Lazy as ByteString.Lazy
import Data.Semigroup ((<>))
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text.Encoding
import qualified Data.Text.Lazy.IO as Text.Lazy.IO
import qualified Network.HTTP.Client as HTTP.Client
import qualified Network.HTTP.Simple as HTTP.Simple
import qualified Text.Pretty.Simple as Pretty.Simple
pPrintJSON :: Aeson.Value -> IO ()
pPrintJSON
= Text.Lazy.IO.putStrLn
. Pretty.Simple.pString
. Text.unpack
. Text.Encoding.decodeUtf8
. ByteString.Lazy.toStrict
. Aeson.encode
pPrintToJSON :: Aeson.ToJSON a => a -> IO ()
pPrintToJSON = pPrintJSON . Aeson.toJSON
printRequest :: HTTP.Client.Request -> IO ()
printRequest request = do
putStrLn
$ (show . HTTP.Client.method) request
++ " "
++ (show . HTTP.Client.path) request
++ (Text.unpack . Text.Encoding.decodeUtf8 . HTTP.Client.queryString) request
let
printJSON bytes =
case Aeson.decode bytes of
Nothing -> return ()
Just (json :: Aeson.Value) -> pPrintJSON json
case HTTP.Client.requestBody request of
HTTP.Client.RequestBodyLBS lazyBody -> printJSON lazyBody
HTTP.Client.RequestBodyBS body -> printJSON . ByteString.Lazy.fromStrict $ body
_ -> return ()
pPrintResponse :: HTTP.Client.Response Aeson.Value -> IO ()
pPrintResponse = pPrintJSON . HTTP.Client.responseBody
printHeader :: Text.Text -> IO ()
printHeader name = do
putStrLn ""
putStrLn "***"
putStrLn $ "*** " <> Text.unpack name
putStrLn "***"
host :: Text.Text
host = "127.0.0.1"
port :: Int
port = 9200
baseRequest :: HTTP.Simple.Request
baseRequest
= HTTP.Simple.setRequestMethod "GET"
. HTTP.Simple.setRequestPort port
. HTTP.Simple.setRequestHost (Text.Encoding.encodeUtf8 host)
$ HTTP.Simple.defaultRequest
createIndex :: Aeson.ToJSON a => Text.Text -> a -> IO ()
createIndex name settings
= sendRequest_
. HTTP.Simple.setRequestBodyJSON settings
. HTTP.Simple.setRequestMethod "PUT"
. HTTP.Simple.setRequestPath (Text.Encoding.encodeUtf8 ("/" <> name))
$ baseRequest
deleteIndex :: Text.Text -> IO ()
deleteIndex name
= sendRequest_
. HTTP.Simple.setRequestMethod "DELETE"
. HTTP.Simple.setRequestPath (Text.Encoding.encodeUtf8 ("/" <> name))
$ baseRequest
refreshIndex :: Text.Text -> IO ()
refreshIndex name
= sendRequest_
. HTTP.Simple.setRequestMethod "POST"
. HTTP.Simple.setRequestPath (Text.Encoding.encodeUtf8 ("/" <> name <> "/_refresh"))
$ baseRequest
indexDocument :: Aeson.ToJSON a => Text.Text -> Text.Text -> Text.Text -> a -> IO ()
indexDocument indexName typeName documentId document
= sendRequest_
. HTTP.Simple.setRequestBodyJSON document
. HTTP.Simple.setRequestMethod "PUT"
. HTTP.Simple.setRequestPath (Text.Encoding.encodeUtf8 ("/" <> indexName <> "/" <> typeName <> "/" <> documentId))
$ baseRequest
analyzeText :: Text.Text -> Text.Text -> Text.Text -> IO ()
analyzeText indexName analyzerName text
= sendRequest_
. HTTP.Simple.setRequestBodyJSON [Aeson.QQ.aesonQQ|
{
"analyzer": #{analyzerName},
"text": #{text}
}
|]
. HTTP.Simple.setRequestPath (Text.Encoding.encodeUtf8 ("/" <> indexName <> "/_analyze"))
$ Common.baseRequest
searchJSON :: Aeson.ToJSON a => Text.Text -> Text.Text -> a -> IO ()
searchJSON indexName typeName query
= sendRequest_
. HTTP.Simple.setRequestBodyJSON query
. HTTP.Simple.setRequestMethod "GET"
. HTTP.Simple.setRequestPath (Text.Encoding.encodeUtf8 ("/" <> indexName <> "/" <> typeName <> "/_search"))
$ baseRequest
sendRequest_ :: HTTP.Simple.Request -> IO ()
sendRequest_ request = do
_ <- sendRequest request
return ()
sendRequest :: HTTP.Simple.Request -> IO (HTTP.Simple.Response Aeson.Value)
sendRequest request = do
printRequest request
response <- HTTP.Simple.httpJSON request
Common.pPrintResponse response
putStrLn ""
return response
|
scott-fleischman/relevant-haskell
|
src/Common.hs
|
mit
| 4,174
| 0
| 16
| 728
| 1,242
| 653
| 589
| 108
| 4
|
type Peg = String
type Move = (Peg, Peg)
hanoi :: Integer -> Peg -> Peg -> Peg -> [Move]
hanoi 0 a b c = []
hanoi n a b c = hanoi (n-1) a c b ++ [(a, b)] ++ hanoi (n-1) c b a
|
ransingh/cs194-haskell-course
|
week-1/tower-of-hanoi.hs
|
mit
| 176
| 0
| 9
| 48
| 122
| 66
| 56
| 5
| 1
|
{-
@Author: Felipe Rabelo
@Date: Oct 27 2017
-}
{-
This module is kind of useless, provided that an implemantation of an actual linked list,
would conflitct with one of the fundamental functional programming paradigms (the
immutability one). Therefor, this module is nothing but a toy module with the sole purpose
of being a study object on datatypes and its behavior.
-}
module List
( insertL
, insertR
) where
data List a = Empty | Node (List a) a (List a) deriving (Show, Read, Eq, Ord)
singleton :: a -> List a
singleton value = Node Empty value Empty
singletonL :: a -> List a -> List a
singletonL value prevValue = Node Empty value prevValue
singletonR :: a -> List a -> List a
singletonR value nextValue = Node nextValue value Empty
insertL :: (Ord a) => a -> List a -> List a
insertL value Empty = singleton value
insertL value prevNode@(Node left a right) = Node (insertL' value left prevNode) a right
insertL' :: (Ord a) => a -> List a -> List a -> List a
insertL' value Empty rightNode@(Node left a right) = singletonL value rightNode
insertL' value prevNode@(Node left a right) _ = Node (insertL' value left prevNode) a right
insertR :: (Ord a) => a -> List a -> List a
insertR value Empty = singleton value
insertR value nextNode@(Node left a right) = Node left a (insertR' value right nextNode)
insertR' :: (Ord a) => a -> List a -> List a -> List a
insertR' value leftNode@(Node left a right) Empty = singletonR value leftNode
insertR' value _ nextNode@(Node left a right) = Node left a (insertR' value nextNode right)
|
KHs000/haskellToys
|
src/modules/List.hs
|
mit
| 1,564
| 0
| 9
| 310
| 537
| 269
| 268
| 22
| 1
|
{-|
Module: Y2018.D04
Description: Advent of Code Day 03 Solutions.
License: MIT
Maintainer: @tylerjl
Solutions to the day 04 set of problems for <adventofcode.com>.
-}
module Y2018.D04
( laziestGuard
, laziestMinute
)
where
import Y2015.Util (regularParse, intParser)
import qualified Data.Map.Strict as Map
import Control.Applicative ((<|>))
import Data.List (foldl', maximumBy, sort)
import Data.Ord (comparing)
import Text.Parsec.String (Parser)
import Text.Parsec.Char (endOfLine)
import Text.Parsec
( ParseError
, many
, optional
, space
, string
)
data Log = Log
{ timeStamp :: TimeStamp
, entry :: Entry
} deriving (Eq, Ord, Show)
data TimeStamp = TimeStamp
{ _year :: Int
, _month :: Int
, _day :: Int
, _hour :: Int
, minute :: Int
} deriving (Eq, Ord, Show)
data Entry = StartShift Guard
| Sleep
| Wake
deriving (Eq, Ord, Show)
type Guard = Int
type GuardHistory = (Maybe Guard, Map.Map Guard Shift)
data Shift = Shift
{ minutesSlept :: Map.Map Int Int
, lastChange :: TimeStamp
} deriving (Eq, Ord, Show)
laziestMinute :: String -> Either ParseError Int
laziestMinute input = case parseLog input of
Left e -> Left e
Right logs -> Right $ logFilter maximum logs
laziestGuard :: String -> Either ParseError Int
laziestGuard input = case parseLog input of
Left e -> Left e
Right logs -> Right $ logFilter (Map.foldl (+) 0) logs
logFilter :: Ord a => (Map.Map Int Int -> a) -> [Log] -> Guard
logFilter f logs =
let sleepiestMinute =
fst
$ maximumBy (\m c -> compare (snd m) (snd c))
$ Map.toList
$ minutesSlept
$ snd guard
guardID = fst guard
guard =
maximumBy (comparing (f . minutesSlept . snd))
$ Map.toList
$ snd
$ foldl' recordLog (Nothing, Map.empty)
$ sort logs
in guardID * sleepiestMinute
recordLog :: GuardHistory -> Log -> GuardHistory
recordLog (_current, h) (Log { timeStamp = ts, entry = (StartShift g) }) =
(Just g, Map.insertWith shiftChange g toShift h)
where toShift =
Shift
{ minutesSlept = Map.fromList $ zip [0 .. 59] $ repeat 0
, lastChange = ts
}
shiftChange _newShift oldShift =
oldShift { lastChange = ts }
recordLog (Just current, h) (Log { timeStamp = ts@(TimeStamp { minute = m }), entry = (Wake) }) =
(Just current, Map.adjust transition current h)
where transition oldShift@(Shift { lastChange = (TimeStamp { minute = m' }), minutesSlept = minutes }) =
oldShift
{ lastChange = ts
, minutesSlept = Map.unionWith (+) minutes $ Map.fromList $ zip [m' .. (m - 1)] $ repeat 1
}
recordLog (Just current, h) (Log { timeStamp = ts, entry = (Sleep) }) =
(Just current, Map.adjust transition current h)
where transition oldShift = oldShift { lastChange = ts }
recordLog gh@(Nothing, _) _ = gh
-- Parsing
parseLog :: String
-> Either ParseError [Log]
parseLog = regularParse logParser
logParser :: Parser [Log]
logParser = many (parseRawLog <* optional endOfLine)
parseRawLog :: Parser Log
parseRawLog = Log <$> (parseTimeStamp <* space) <*> parseEntry
parseTimeStamp :: Parser TimeStamp
parseTimeStamp = TimeStamp <$ string "[" <*> intParser <* string "-"
<*> intParser <* string "-"
<*> intParser <* space
<*> intParser <* string ":"
<*> intParser <* string "]"
parseEntry :: Parser Entry
parseEntry = StartShift <$ string "Guard #" <*> intParser <* string " begins shift"
<|> Sleep <$ string "falls asleep"
<|> Wake <$ string "wakes up"
|
tylerjl/adventofcode
|
src/Y2018/D04.hs
|
mit
| 3,825
| 0
| 18
| 1,106
| 1,229
| 666
| 563
| 97
| 2
|
fibRec :: Integer -> Integer
fibRec 0 = 1
fibRec 1 = 1
fibRec n = fibRec(n-1) + fibRec(n-2)
|
samidarko/algorithms
|
fib/fig.hs
|
mit
| 92
| 0
| 8
| 19
| 58
| 29
| 29
| 4
| 1
|
{-# htermination lookupFM :: Ord a => FiniteMap (Maybe a) b -> (Maybe a) -> Maybe b #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_lookupFM_9.hs
|
mit
| 105
| 0
| 3
| 20
| 5
| 3
| 2
| 1
| 0
|
{-|
Purely functional red-black trees.
* Chris Okasaki, \"Red-Black Trees in a Functional Setting\",
Journal of Functional Programming, 9(4), pp 471-477, July 1999
<http://www.eecs.usma.edu/webs/people/okasaki/pubs.html#jfp99>
* Stefan Kahrs, \"Red-black trees with types\",
Journal of functional programming, 11(04), pp 425-432, July 2001
-}
module Data.Set.RBTree (
-- * Data structures
RBTree(..)
, Color(..)
, BlackHeight
-- * Creating red-black trees
, empty
, singleton
, insert
, fromList
-- * Converting to a list
, toList
-- * Membership
, member
-- * Deleting
, delete
, deleteMin
, deleteMax
-- * Checking
, null
-- * Set operations
, union
, intersection
, difference
-- * Helper functions
, join
, merge
, split
, minimum
, maximum
, valid
, showSet
, printSet
) where
import Data.List (foldl')
import Prelude hiding (minimum, maximum, null)
----------------------------------------------------------------
-- Part to be shared
----------------------------------------------------------------
data RBTree a = Leaf -- color is Black
| Node Color !BlackHeight !(RBTree a) a !(RBTree a)
deriving (Show)
data Color = B -- ^ Black
| R -- ^ Red
deriving (Eq,Show)
{-|
Red nodes have the same BlackHeight of their parent.
-}
type BlackHeight = Int
----------------------------------------------------------------
instance (Eq a) => Eq (RBTree a) where
t1 == t2 = toList t1 == toList t2
----------------------------------------------------------------
height :: RBTree a -> BlackHeight
height Leaf = 0
height (Node _ h _ _ _) = h
----------------------------------------------------------------
{-|
See if the red black tree is empty.
>>> Data.Set.RBTree.null empty
True
>>> Data.Set.RBTree.null (singleton 1)
False
-}
null :: Eq a => RBTree a -> Bool
null t = t == Leaf
----------------------------------------------------------------
{-| Empty tree.
>>> height empty
0
-}
empty :: RBTree a
empty = Leaf
{-| Singleton tree.
>>> height (singleton 'a')
1
-}
singleton :: Ord a => a -> RBTree a
singleton x = Node B 1 Leaf x Leaf
----------------------------------------------------------------
{-| Creating a tree from a list. O(N log N)
>>> empty == fromList []
True
>>> singleton 'a' == fromList ['a']
True
>>> fromList [5,3,5] == fromList [5,3]
True
-}
fromList :: Ord a => [a] -> RBTree a
fromList = foldl' (flip insert) empty
----------------------------------------------------------------
{-| Creating a list from a tree. O(N)
>>> toList (fromList [5,3])
[3,5]
>>> toList empty
[]
-}
toList :: RBTree a -> [a]
toList t = inorder t []
where
inorder Leaf xs = xs
inorder (Node _ _ l x r) xs = inorder l (x : inorder r xs)
----------------------------------------------------------------
{-| Checking if this element is a member of a tree?
>>> member 5 (fromList [5,3])
True
>>> member 1 (fromList [5,3])
False
-}
member :: Ord a => a -> RBTree a -> Bool
member _ Leaf = False
member x (Node _ _ l y r) = case compare x y of
LT -> member x l
GT -> member x r
EQ -> True
----------------------------------------------------------------
isBalanced :: RBTree a -> Bool
isBalanced t = isBlackSame t && isRedSeparate t
isBlackSame :: RBTree a -> Bool
isBlackSame t = all (n==) ns
where
n:ns = blacks t
blacks :: RBTree a -> [Int]
blacks = blacks' 0
where
blacks' n Leaf = [n+1]
blacks' n (Node R _ l _ r) = blacks' n l ++ blacks' n r
blacks' n (Node B _ l _ r) = blacks' n' l ++ blacks' n' r
where
n' = n + 1
isRedSeparate :: RBTree a -> Bool
isRedSeparate = reds B
reds :: Color -> RBTree t -> Bool
reds _ Leaf = True
reds R (Node R _ _ _ _) = False
reds _ (Node c _ l _ r) = reds c l && reds c r
isOrdered :: Ord a => RBTree a -> Bool
isOrdered t = ordered $ toList t
where
ordered [] = True
ordered [_] = True
ordered (x:y:xys) = x < y && ordered (y:xys)
blackHeight :: RBTree a -> Bool
blackHeight Leaf = True
blackHeight t@(Node B i _ _ _) = bh i t
where
bh n Leaf = n == 0
bh n (Node R h l _ r) = n == h' && bh n l && bh n r
where
h' = h - 1
bh n (Node B h l _ r) = n == h && bh n' l && bh n' r
where
n' = n - 1
blackHeight _ = error "blackHeight"
----------------------------------------------------------------
turnR :: RBTree a -> RBTree a
turnR Leaf = error "turnR"
turnR (Node _ h l x r) = Node R h l x r
turnB :: RBTree a -> RBTree a
turnB Leaf = error "turnB"
turnB (Node _ h l x r) = Node B h l x r
turnB' :: RBTree a -> RBTree a
turnB' Leaf = Leaf
turnB' (Node _ h l x r) = Node B h l x r
----------------------------------------------------------------
{-| Finding the minimum element. O(log N)
>>> minimum (fromList [3,5,1])
1
>>> minimum empty
*** Exception: minimum
-}
minimum :: RBTree a -> a
minimum (Node _ _ Leaf x _) = x
minimum (Node _ _ l _ _) = minimum l
minimum _ = error "minimum"
{-| Finding the maximum element. O(log N)
>>> maximum (fromList [3,5,1])
5
>>> maximum empty
*** Exception: maximum
-}
maximum :: RBTree a -> a
maximum (Node _ _ _ x Leaf) = x
maximum (Node _ _ _ _ r) = maximum r
maximum _ = error "maximum"
----------------------------------------------------------------
showSet :: Show a => RBTree a -> String
showSet = showSet' ""
showSet' :: Show a => String -> RBTree a -> String
showSet' _ Leaf = "\n"
showSet' pref (Node k h l x r) = show k ++ " " ++ show x ++ " (" ++ show h ++ ")\n"
++ pref ++ "+ " ++ showSet' pref' l
++ pref ++ "+ " ++ showSet' pref' r
where
pref' = " " ++ pref
printSet :: Show a => RBTree a -> IO ()
printSet = putStr . showSet
----------------------------------------------------------------
isRed :: RBTree a -> Bool
isRed (Node R _ _ _ _ ) = True
isRed _ = False
----------------------------------------------------------------
-- Basic operations
----------------------------------------------------------------
{-| Checking validity of a tree.
-}
valid :: Ord a => RBTree a -> Bool
valid t = isBalanced t && blackHeight t && isOrdered t
----------------------------------------------------------------
-- Chris Okasaki
--
{-| Insertion. O(log N)
>>> insert 5 (fromList [5,3]) == fromList [3,5]
True
>>> insert 7 (fromList [5,3]) == fromList [3,5,7]
True
>>> insert 5 empty == singleton 5
True
-}
insert :: Ord a => a -> RBTree a -> RBTree a
insert kx t = turnB (insert' kx t)
insert' :: Ord a => a -> RBTree a -> RBTree a
insert' kx Leaf = Node R 1 Leaf kx Leaf
insert' kx s@(Node B h l x r) = case compare kx x of
LT -> balanceL' h (insert' kx l) x r
GT -> balanceR' h l x (insert' kx r)
EQ -> s
insert' kx s@(Node R h l x r) = case compare kx x of
LT -> Node R h (insert' kx l) x r
GT -> Node R h l x (insert' kx r)
EQ -> s
balanceL' :: BlackHeight -> RBTree a -> a -> RBTree a -> RBTree a
balanceL' h (Node R _ (Node R _ a x b) y c) z d =
Node R (h+1) (Node B h a x b) y (Node B h c z d)
balanceL' h (Node R _ a x (Node R _ b y c)) z d =
Node R (h+1) (Node B h a x b) y (Node B h c z d)
balanceL' h l x r = Node B h l x r
balanceR' :: BlackHeight -> RBTree a -> a -> RBTree a -> RBTree a
balanceR' h a x (Node R _ b y (Node R _ c z d)) =
Node R (h+1) (Node B h a x b) y (Node B h c z d)
balanceR' h a x (Node R _ (Node R _ b y c) z d) =
Node R (h+1) (Node B h a x b) y (Node B h c z d)
balanceR' h l x r = Node B h l x r
----------------------------------------------------------------
balanceL :: Color -> BlackHeight -> RBTree a -> a -> RBTree a -> RBTree a
balanceL B h (Node R _ (Node R _ a x b) y c) z d =
Node R (h+1) (Node B h a x b) y (Node B h c z d)
balanceL B h (Node R _ a x (Node R _ b y c)) z d =
Node R (h+1) (Node B h a x b) y (Node B h c z d)
balanceL k h l x r = Node k h l x r
balanceR :: Color -> BlackHeight -> RBTree a -> a -> RBTree a -> RBTree a
balanceR B h a x (Node R _ b y (Node R _ c z d)) =
Node R (h+1) (Node B h a x b) y (Node B h c z d)
balanceR B h a x (Node R _ (Node R _ b y c) z d) =
Node R (h+1) (Node B h a x b) y (Node B h c z d)
balanceR k h l x r = Node k h l x r
----------------------------------------------------------------
type RBTreeBDel a = (RBTree a, Bool)
unbalancedL :: Color -> BlackHeight -> RBTree a -> a -> RBTree a -> RBTreeBDel a
unbalancedL c h l@(Node B _ _ _ _) x r
= (balanceL B h (turnR l) x r, c == B)
unbalancedL B h (Node R lh ll lx lr@(Node B _ _ _ _)) x r
= (Node B lh ll lx (balanceL B h (turnR lr) x r), False)
unbalancedL _ _ _ _ _ = error "unbalancedL"
-- The left tree lacks one Black node
unbalancedR :: Color -> BlackHeight -> RBTree a -> a -> RBTree a -> (RBTree a, Bool)
-- Decreasing one Black node in the right
unbalancedR c h l x r@(Node B _ _ _ _)
= (balanceR B h l x (turnR r), c == B)
-- Taking one Red node from the right and adding it to the right as Black
unbalancedR B h l x (Node R rh rl@(Node B _ _ _ _) rx rr)
= (Node B rh (balanceR B h l x (turnR rl)) rx rr, False)
unbalancedR _ _ _ _ _ = error "unbalancedR"
----------------------------------------------------------------
{-| Deleting the minimum element. O(log N)
>>> deleteMin (fromList [5,3,7]) == fromList [5,7]
True
>>> deleteMin empty == empty
True
-}
deleteMin :: RBTree a -> RBTree a
deleteMin Leaf = empty
deleteMin t = turnB' s
where
((s, _), _) = deleteMin' t
deleteMin' :: RBTree a -> (RBTreeBDel a, a)
deleteMin' Leaf = error "deleteMin'"
deleteMin' (Node B _ Leaf x Leaf) = ((Leaf, True), x)
deleteMin' (Node B _ Leaf x r@(Node R _ _ _ _)) = ((turnB r, False), x)
deleteMin' (Node R _ Leaf x r) = ((r, False), x)
deleteMin' (Node c h l x r) = if d then (tD, m) else (tD', m)
where
((l',d),m) = deleteMin' l
tD = unbalancedR c (h-1) l' x r
tD' = (Node c h l' x r, False)
----------------------------------------------------------------
{-| Deleting the maximum
>>> deleteMax (fromList [(5,"a"), (3,"b"), (7,"c")]) == fromList [(3,"b"), (5,"a")]
True
>>> deleteMax empty == empty
True
-}
deleteMax :: RBTree a -> RBTree a
deleteMax Leaf = empty
deleteMax t = turnB' s
where
((s, _), _) = deleteMax' t
deleteMax' :: RBTree a -> (RBTreeBDel a, a)
deleteMax' Leaf = error "deleteMax'"
deleteMax' (Node B _ Leaf x Leaf) = ((Leaf, True), x)
deleteMax' (Node B _ l@(Node R _ _ _ _) x Leaf) = ((turnB l, False), x)
deleteMax' (Node R _ l x Leaf) = ((l, False), x)
deleteMax' (Node c h l x r) = if d then (tD, m) else (tD', m)
where
((r',d),m) = deleteMax' r
tD = unbalancedL c (h-1) l x r'
tD' = (Node c h l x r', False)
----------------------------------------------------------------
blackify :: RBTree a -> RBTreeBDel a
blackify s@(Node R _ _ _ _) = (turnB s, False)
blackify s = (s, True)
{-| Deleting this element from a tree. O(log N)
>>> delete 5 (fromList [5,3]) == singleton 3
True
>>> delete 7 (fromList [5,3]) == fromList [3,5]
True
>>> delete 5 empty == empty
True
-}
delete :: Ord a => a -> RBTree a -> RBTree a
delete x t = turnB' s
where
(s,_) = delete' x t
delete' :: Ord a => a -> RBTree a -> RBTreeBDel a
delete' _ Leaf = (Leaf, False)
delete' x (Node c h l y r) = case compare x y of
LT -> let (l',d) = delete' x l
t = Node c h l' y r
in if d then unbalancedR c (h-1) l' y r else (t, False)
GT -> let (r',d) = delete' x r
t = Node c h l y r'
in if d then unbalancedL c (h-1) l y r' else (t, False)
EQ -> case r of
Leaf -> if c == B then blackify l else (l, False)
_ -> let ((r',d),m) = deleteMin' r
t = Node c h l m r'
in if d then unbalancedL c (h-1) l m r' else (t, False)
----------------------------------------------------------------
-- Set operations
----------------------------------------------------------------
{-| Joining two trees with an element. O(log N)
Each element of the left tree must be less than the element.
Each element of the right tree must be greater than the element.
Both tree must have black root.
-}
join :: Ord a => RBTree a -> a -> RBTree a -> RBTree a
join Leaf k t2 = insert k t2
join t1 k Leaf = insert k t1
join t1 k t2 = case compare h1 h2 of
LT -> turnB $ joinLT t1 k t2 h1
GT -> turnB $ joinGT t1 k t2 h2
EQ -> Node B (h1+1) t1 k t2
where
h1 = height t1
h2 = height t2
-- The root of result must be red.
joinLT :: Ord a => RBTree a -> a -> RBTree a -> BlackHeight -> RBTree a
joinLT t1 g t2@(Node c h l x r) h1
| h == h1 = Node R (h+1) t1 g t2
| otherwise = balanceL c h (joinLT t1 g l h1) x r
joinLT _ _ _ _ = error "joinLT"
-- The root of result must be red.
joinGT :: Ord a => RBTree a -> a -> RBTree a -> BlackHeight -> RBTree a
joinGT t1@(Node c h l x r) g t2 h2
| h == h2 = Node R (h+1) t1 g t2
| otherwise = balanceR c h l x (joinGT r g t2 h2)
joinGT _ _ _ _ = error "joinGT"
----------------------------------------------------------------
{-| Merging two trees. O(log N)
Each element of the left tree must be less than each element of
the right tree. Both trees must have black root.
-}
merge :: Ord a => RBTree a -> RBTree a -> RBTree a
merge Leaf t2 = t2
merge t1 Leaf = t1
merge t1 t2 = case compare h1 h2 of
LT -> turnB $ mergeLT t1 t2 h1
GT -> turnB $ mergeGT t1 t2 h2
EQ -> turnB $ mergeEQ t1 t2
where
h1 = height t1
h2 = height t2
mergeLT :: Ord a => RBTree a -> RBTree a -> BlackHeight -> RBTree a
mergeLT t1 t2@(Node c h l x r) h1
| h == h1 = mergeEQ t1 t2
| otherwise = balanceL c h (mergeLT t1 l h1) x r
mergeLT _ _ _ = error "mergeLT"
mergeGT :: Ord a => RBTree a -> RBTree a -> BlackHeight -> RBTree a
mergeGT t1@(Node c h l x r) t2 h2
| h == h2 = mergeEQ t1 t2
| otherwise = balanceR c h l x (mergeGT r t2 h2)
mergeGT _ _ _ = error "mergeGT"
{-
Merging two trees whose heights are the same.
The root must be either
a red with height + 1
for
a black with height
-}
mergeEQ :: Ord a => RBTree a -> RBTree a -> RBTree a
mergeEQ Leaf Leaf = Leaf
mergeEQ t1@(Node _ h l x r) t2
| h == h2' = Node R (h+1) t1 m t2'
| isRed l = Node R (h+1) (turnB l) x (Node B h r m t2')
-- unnecessary for LL
| isRed r = Node B h (Node R h l x rl) rx (Node R h rr m t2')
| otherwise = Node B h (turnR t1) m t2'
where
m = minimum t2
t2' = deleteMin t2
h2' = height t2'
Node R _ rl rx rr = r
mergeEQ _ _ = error "mergeEQ"
----------------------------------------------------------------
{-| Splitting a tree. O(log N)
>>> split 2 (fromList [5,3]) == (empty, fromList [3,5])
True
>>> split 3 (fromList [5,3]) == (empty, singleton 5)
True
>>> split 4 (fromList [5,3]) == (singleton 3, singleton 5)
True
>>> split 5 (fromList [5,3]) == (singleton 3, empty)
True
>>> split 6 (fromList [5,3]) == (fromList [3,5], empty)
True
-}
split :: Ord a => a -> RBTree a -> (RBTree a, RBTree a)
split _ Leaf = (Leaf,Leaf)
split kx (Node _ _ l x r) = case compare kx x of
LT -> (lt, join gt x (turnB' r)) where (lt,gt) = split kx l
GT -> (join (turnB' l) x lt, gt) where (lt,gt) = split kx r
EQ -> (turnB' l, turnB' r)
{- LL
split :: Ord a => a -> RBTree a -> (RBTree a, RBTree a)
split _ Leaf = (Leaf,Leaf)
split kx (Node _ _ l x r) = case compare kx x of
LT -> (lt, join gt x r) where (lt,gt) = split kx l
GT -> (join l x lt, gt) where (lt,gt) = split kx r
EQ -> (turnB' l, r)
-}
----------------------------------------------------------------
{-| Creating a union tree from two trees. O(N + M)
>>> union (fromList [5,3]) (fromList [5,7]) == fromList [3,5,7]
True
-}
union :: Ord a => RBTree a -> RBTree a -> RBTree a
union t1 Leaf = t1 -- ensured Black thanks to split
union Leaf t2 = turnB' t2
union t1 (Node _ _ l x r) = join (union l' l) x (union r' r)
where
(l',r') = split x t1
{-| Creating a intersection tree from trees. O(N + N)
>>> intersection (fromList [5,3]) (fromList [5,7]) == singleton 5
True
-}
intersection :: Ord a => RBTree a -> RBTree a -> RBTree a
intersection Leaf _ = Leaf
intersection _ Leaf = Leaf
intersection t1 (Node _ _ l x r)
| member x t1 = join (intersection l' l) x (intersection r' r)
| otherwise = merge (intersection l' l) (intersection r' r)
where
(l',r') = split x t1
{-| Creating a difference tree from trees. O(N + N)
>>> difference (fromList [5,3]) (fromList [5,7]) == singleton 3
True
-}
difference :: Ord a => RBTree a -> RBTree a -> RBTree a
difference Leaf _ = Leaf
difference t1 Leaf = t1 -- ensured Black thanks to split
difference t1 (Node _ _ l x r) = merge (difference l' l) (difference r' r)
where
(l',r') = split x t1
|
rickyhan/rbtree
|
src/rbtree.hs
|
mit
| 16,953
| 0
| 16
| 4,376
| 6,263
| 3,161
| 3,102
| 294
| 8
|
{-# LANGUAGE ConstraintKinds, DeriveFunctor, DeriveFoldable, DeriveTraversable, FlexibleContexts, FlexibleInstances, GeneralizedNewtypeDeriving, ImplicitParams, LambdaCase, MultiParamTypeClasses, OverloadedStrings, PackageImports, ScopedTypeVariables, TemplateHaskell #-}
module Formura.MPICxx.Translate where
import Control.Applicative
import Control.Concurrent(threadDelay)
import qualified Control.Exception as X
import Control.Lens
import Control.Monad
import "mtl" Control.Monad.RWS
import Data.Char (toUpper, isAlphaNum)
import Data.Foldable (toList)
import Data.Function (on)
import Data.List (zip4, isPrefixOf, sort, groupBy, sortBy)
import qualified Data.Map as M
import Data.Maybe
import Data.String
import Data.String.ToString
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Text.Lens as T
import qualified Data.Text.IO as T
import System.Directory
import System.FilePath.Lens
import System.Process
import Text.Trifecta (failed, raiseErr)
import Formura.Utilities (readYamlDef, zipWithFT)
import qualified Formura.Annotation as A
import Formura.Annotation.Boundary
import Formura.Annotation.Representation
import Formura.Compiler
import Formura.CommandLineOption
import Formura.Geometry
import Formura.GlobalEnvironment
import Formura.Language.Combinator (subFix)
import Formura.NumericalConfig
import Formura.OrthotopeMachine.Graph
import Formura.OrthotopeMachine.TemporalBlocking
import Formura.Syntax
import Formura.Vec
import qualified Formura.MPICxx.Language as C
import Formura.MPICxx.Cut hiding (cut)
newtype VariableName = VariableName C.Src
-- | The struct for generating unique names, and holds already given names.
data NamingState = NamingState
{ _alreadyGivenNames :: S.Set C.Src
, _alreadyGivenLocalNames :: S.Set C.Src
, _alreadyDeclaredResourceNames :: S.Set C.Src
, _freeNameCounter :: Integer
, _freeLocalNameCounter :: Integer
, _nodeIDtoLocalName :: M.Map MMNodeID C.Src
, _loopIndexNames :: Vec C.Src
, _loopIndexOffset :: Vec Int
, _loopExtentNames :: Vec C.Src
}
makeClassy ''NamingState
defaultNamingState = NamingState
{ _alreadyGivenNames = S.empty
, _alreadyGivenLocalNames = S.empty
, _alreadyDeclaredResourceNames = S.empty
, _freeNameCounter = 0
, _freeLocalNameCounter = 0
, _nodeIDtoLocalName = M.empty
, _loopIndexNames = PureVec ""
, _loopIndexOffset = 0
, _loopExtentNames = PureVec ""
}
type MPIPlanSelector = Bool
data TranState = TranState
{ _tranSyntacticState :: CompilerSyntacticState
, _tsNumericalConfig :: NumericalConfig
, _tsNamingState :: NamingState
, _theProgram :: Program
, _theMMProgram :: MMProgram
, _theGraph :: MMGraph
, _tsMPIPlanSelection :: MPIPlanSelector
, _tsMPIPlanMap :: M.Map MPIPlanSelector MPIPlan
, _tsCommonStaticBox :: Box
, _tsCommonOMNodeBox :: Box
, _tsCxxTemplateWithMacro :: C.Src
}
makeClassy ''TranState
instance HasCompilerSyntacticState TranState where
compilerSyntacticState = tranSyntacticState
instance HasNumericalConfig TranState where
numericalConfig = tsNumericalConfig
instance HasMachineProgram TranState MMInstruction OMNodeType where
machineProgram = theMMProgram
instance HasNamingState TranState where
namingState = tsNamingState
instance HasMPIPlan TranState where
mPIPlan =
let
gettr s = fromJust $ M.lookup (s^.tsMPIPlanSelection) (s^.tsMPIPlanMap)
settr s a = s & tsMPIPlanMap %~ M.insert (s^.tsMPIPlanSelection) a
in lens gettr settr
data CProgramF a = CProgram { _headerFileContent :: a, _sourceFileContent :: a,
_auxFilesContent :: M.Map FilePath a}
deriving (Eq, Ord, Show, Functor, Foldable, Traversable)
type CProgram = CProgramF C.Src
makeLenses ''CProgramF
tellH :: (MonadWriter CProgram m) => C.Src -> m ()
tellH txt = tell $ CProgram txt "" M.empty
tellC :: (MonadWriter CProgram m) => C.Src -> m ()
tellC txt = tell $ CProgram "" txt M.empty
tellBoth :: (MonadWriter CProgram m) => C.Src -> m ()
tellBoth txt = tell $ CProgram txt txt M.empty
tellF :: (MonadWriter CProgram m) => FilePath -> C.Src -> m ()
tellF fn txt = tell $ CProgram "" "" (M.singleton fn txt)
tellHLn :: (MonadWriter CProgram m) => C.Src -> m ()
tellHLn txt = tellH $ txt <> "\n"
tellCLn :: (MonadWriter CProgram m) => C.Src -> m ()
tellCLn txt = tellC $ txt <> "\n"
tellBothLn :: (MonadWriter CProgram m) => C.Src -> m ()
tellBothLn txt = tellBoth $ txt <> "\n"
tellFLn :: (MonadWriter CProgram m) => FilePath -> C.Src -> m ()
tellFLn fn txt = tellF fn $ txt <> "\n"
instance Monoid CProgram where
mempty = CProgram "" "" M.empty
mappend (CProgram h1 c1 f1) (CProgram h2 c2 f2) = CProgram (h1 <> h2) (c1 <> c2) (M.unionWith (<>) f1 f2)
type TranM = CompilerMonad GlobalEnvironment CProgram TranState
-- * Parallel code generation
-- | generate new free global name based on given identifier,
-- and prevent further generation of that name
genFreeName :: IdentName -> TranM C.Src
genFreeName = genFreeName' True
-- | generate new free local name based on given identifier,
-- and prevent further generation of that name within current scope
genFreeLocalName :: IdentName -> TranM C.Src
genFreeLocalName = genFreeName' False
-- | base function for giving names
genFreeName' :: Bool -> IdentName -> TranM C.Src
genFreeName' isGlobal ident = do
aggNames <- use alreadyGivenNames
aglNames <- use alreadyGivenLocalNames
let initName = fromString ident
agNames = aggNames <> aglNames
nCounter :: Lens' TranState Integer
nCounter = if isGlobal then freeNameCounter else freeLocalNameCounter
go = do
ctr <- use nCounter
let tmpName = initName <> "_" <> C.show ctr
if S.member tmpName agNames
then (nCounter += 1) >> go
else return tmpName
givenName <- if S.member initName agNames then go else return initName
(if isGlobal then alreadyGivenNames else alreadyGivenLocalNames) %= S.insert givenName
return givenName
-- | read all numerical config from the Formura source program
setNumericalConfig :: WithCommandLineOption => TranM ()
setNumericalConfig = do
dim <- view dimension
ivars <- view axesNames
prog <- use theProgram
let nc = prog ^. programNumericalConfig
tsNumericalConfig .= nc
when (length (nc ^. ncMPIGridShape) /= dim) $
raiseErr $ failed $ "mpi_grid_shape needs exactly " ++ show dim ++ " elements."
when (length (nc ^. ncIntraNodeShape) /= dim) $
raiseErr $ failed $ "intra_node_shape needs exactly " ++ show dim ++ " elements."
return ()
-- | prepare unique name for everyone
setNamingState :: TranM ()
setNamingState = do
stateVars <- use omStateSignature
alreadyGivenNames .= (S.fromList $ map fromString $ M.keys stateVars)
ans <- view axesNames
lins <- traverse (genFreeName . ("i"++)) ans
loopIndexNames .= lins
luns <- traverse (genFreeName . ("N"++) . map toUpper) ans
loopExtentNames .= luns
let nameNode :: MMNode -> TranM MMNode
nameNode nd = do
let initName = case A.viewMaybe nd of
Just (SourceName n) -> n
_ -> "g"
cName <- genFreeName initName
return $ nd & A.annotation %~ A.set (VariableName cName)
gr <- use omInitGraph
gr2 <- flip traverse gr $ nameNode
omInitGraph .= gr2
gr <- use omStepGraph
gr2 <- flip traverse gr $ nameNode
omStepGraph .= gr2
-- | Generate C type declaration for given language.
genTypeDecl :: IdentName -> TypeExpr -> TranM C.Src
genTypeDecl name typ = case typ of
ElemType "void" -> return ""
ElemType "Rational" -> return $ "double " <> fromString name
ElemType x -> return $ fromString x <> " " <> fromString name
GridType _ x -> do
body <- genTypeDecl name x
if body == "" then return ""
else do
sz <- use ncIntraNodeShape
let szpt = foldMap (C.brackets . C.show) sz
return $ body <> szpt
_ -> raiseErr $ failed $ "Cannot translate type to C: " ++ show typ
elemTypeOfResource :: ResourceT a b -> TranM TypeExpr
elemTypeOfResource (ResourceStatic sname _) = do
ssMap <- use omStateSignature
let Just typ = M.lookup sname ssMap
case typ of
ElemType _ -> return typ
GridType _ etyp -> return etyp
elemTypeOfResource (ResourceOMNode nid _) = do
mmProg <- use omStepGraph
let Just nd = M.lookup nid mmProg
case nd ^.nodeType of
ElemType x -> return $ ElemType x
GridType _ etyp -> return $ subFix etyp
tellMPIRequestDecl :: C.Src -> TranM ()
tellMPIRequestDecl name = do
adrn <- use alreadyDeclaredResourceNames
case S.member name adrn of
True -> return ()
False -> do
alreadyDeclaredResourceNames %= S.insert name
tellH "extern "
tellBothLn $ "MPI_Request "<>name<>";\n"
tellResourceDecl :: C.Src -> ResourceT a b -> Box -> TranM ()
tellResourceDecl = tellResourceDecl' False
tellResourceDecl' :: Bool -> C.Src -> ResourceT a b -> Box -> TranM ()
tellResourceDecl' isInClass name rsc box0 = do
adrn <- use alreadyDeclaredResourceNames
case S.member name adrn || name == "" of
True -> return ()
False -> do
alreadyDeclaredResourceNames %= S.insert name
typ <- elemTypeOfResource rsc
let szpt = foldMap (C.brackets . C.show) sz
sz = box0 ^.upperVertex - box0 ^. lowerVertex
decl <- case typ of
ElemType "void" -> return ""
ElemType "Rational" -> return $ "double " <> name <> szpt
ElemType x -> return $ fromString x <> " " <> name <> szpt
_ -> raiseErr $ failed $ "Cannot translate type to C: " ++ show typ
when (decl /= "") $ do
when isInClass $ do
tellH decl
tellHLn ";"
when (not isInClass) $ do
tellH "extern "
tellBoth decl
tellBothLn ";"
-- case rsc of
-- ResourceStatic _ _ -> do
-- tellH "extern "
-- tellBoth decl
-- tellBothLn ";"
-- _ -> do
-- when (decl /= "") $ tellCLn $ decl <> ";"
tellFacetDecl :: FacetID -> [RidgeID] -> TranM ()
tellFacetDecl f rs = do
let name = fromString $ toCName f
tellH $ "struct " <> name <> "{"
ralloc <- use planRidgeAlloc
forM_ rs $ \rk -> do
name <- nameRidgeResource' True rk SendRecv
let Just box0 = M.lookup rk ralloc
tellResourceDecl' True name (rk ^. ridgeDelta) box0
tellH "};"
tellH $ "extern struct " <> name <> " " <> name <> "_Send;"
tellH $ "extern struct " <> name <> " " <> name <> "_Recv;"
tellC $ "struct " <> name <> " " <> name <> "_Send;"
tellC $ "struct " <> name <> " " <> name <> "_Recv;"
return ()
toCName :: Show a => a -> IdentName
toCName a = postfix $ fix $ go False $ prefix $ show a
where
go _ [] = []
go b (x:xs) = case isAlphaNum x of
True -> x : go False xs
False -> if b then go b xs else '_' : go True xs
postfix :: IdentName -> IdentName
postfix = reverse . dropWhile (=='_') . reverse
prefix :: IdentName -> IdentName
prefix = T.packed %~ (T.replace "-" "m")
fix :: IdentName -> IdentName
fix = T.packed %~ (T.replace "ResourceOMNode" "Om" .
T.replace "ResourceStatic" "St" .
T.replace "IRank" "r".
T.replace "ridgeDelta_" "".
T.replace "MPIRank" "".
T.replace "RidgeID_ridgeDeltaMPI_MPIRank" "Ridge" .
T.replace "facetIRSrc_IRank" "src" .
T.replace "facetIRDest_IRank" "dest" .
T.replace "FacetID_facetDeltaMPI_" "Facet".
T.replace "IRankCompareStraight" "".
T.replace "IRankCompareReverse" "".
id
)
-- | Give name to Resources
nameArrayResource :: (ResourceT () IRank) -> TranM C.Src
nameArrayResource rsc = case rsc of
ResourceStatic sn _ -> do
let ret = fromString sn
planResourceNames %= M.insert rsc ret
return ret
_ -> do
sharing <- use planResourceSharing
dict <- use planResourceNames
sdict <- use planSharedResourceNames
ret <- case M.lookup rsc sharing of
Nothing -> return "" -- These are OMNode for Store instruction; do not need array decl
Just rsid -> do
ret <- case M.lookup rsid sdict of
Just ret -> return ret
Nothing -> do
genFreeName $ "Rsc" ++ show (fromResourceSharingID rsid)
planSharedResourceNames %= M.insert rsid ret
return ret
planResourceNames %= M.insert rsc ret
return ret
nameRidgeResource :: RidgeID -> SendOrRecv -> TranM C.Src
nameRidgeResource = nameRidgeResource' False
nameRidgeResource' :: Bool -> RidgeID -> SendOrRecv -> TranM C.Src
nameRidgeResource' isInClass r sr0 = do
dict <- use planRidgeNames
fdict <- use planFacetAssignment
prefix <- if not (doesRidgeNeedMPI r) || isInClass
then return ""
else do
let Just f = M.lookup r fdict
fname <- nameFacet f sr0
return $ fname <> "."
let (sr1, suffix) = (SendRecv, "")
-- let (sr1, suffix) = case doesRidgeNeedMPI r of
-- True -> (sr0, "_" ++ show sr0)
-- False -> (SendRecv, "")
case M.lookup (r,sr1) dict of
Just ret -> return $ prefix <> ret
Nothing -> do
ret <- genFreeName $ toCName r ++ suffix
planRidgeNames %= M.insert (r,sr1) ret
return $ prefix <> ret
nameFacetRequest :: FacetID -> TranM C.Src
nameFacetRequest f = do
dict <- use planMPIRequestNames
case M.lookup f dict of
Just ret -> return ret
Nothing -> do
ret <- genFreeName $ "req_" ++ toCName f
planMPIRequestNames %= M.insert f ret
return ret
nameDeltaMPIRank :: MPIRank -> C.Src
nameDeltaMPIRank r = "mpi_rank_" <> fromString (toCName r)
nameFacet :: FacetID -> SendOrRecv -> TranM C.Src
nameFacet f sr = do
let name = fromString $ toCName f
case sr of
SendRecv -> return $ name
_ -> return $ name <> "_" <> C.show sr
-- | Generate Declaration for State Arrays
tellArrayDecls :: TranM ()
tellArrayDecls = do
aalloc <- use planArrayAlloc
commonBox <- use planSharedResourceExtent
let szpt = foldMap (C.brackets . C.show) (drop 1 $ toList sz)
sz = commonBox ^.upperVertex - commonBox ^. lowerVertex
tellHLn $ "typedef double " <> C.raw rscSfcTypename <> szpt <> ";"
forM_ (M.toList aalloc) $ \(rsc, box0) -> do
name <- nameArrayResource rsc
let box1 = case rsc of
ResourceOMNode _ _ -> commonBox
_ -> box0
tellResourceDecl name rsc box1
falloc <- use planFacetAlloc
forM_ (M.toList falloc) $ \(fr@(f, rs)) -> do
tellFacetDecl f rs
name <- nameFacetRequest f
tellMPIRequestDecl name
ralloc <- use planRidgeAlloc
forM_ (M.toList ralloc) $ \(rk@(RidgeID _ rsc), box0) -> do
when (not $ doesRidgeNeedMPI rk) $ do
name <- nameRidgeResource rk SendRecv
tellResourceDecl name rsc box0
-- | Generate Declarations for intermediate variables
tellIntermediateVariables :: TranM ()
tellIntermediateVariables = do
g1 <- use omInitGraph
g2 <- use omStepGraph
forM_ [g1, g2] $ \gr -> do
forM_ (M.toList gr) $ \(_, node) -> do
let typ = subFix $ node ^. nodeType
Just (VariableName vname) = A.viewMaybe node
decl <- genTypeDecl (toString vname) typ
when (decl /= "") $ tellCLn $ "static " <> decl <> ";"
-- | lookup node by its index
lookupNode :: OMNodeID -> TranM MMNode
lookupNode i = do
g <- use theGraph
case M.lookup i g of
Nothing -> raiseErr $ failed $ "out-of-bound node reference: #" ++ show i
Just n -> do
case A.viewMaybe n of
Just meta -> compilerFocus %= (meta <|>)
Nothing -> return ()
return n
nPlusK :: C.Src -> Int -> C.Src
nPlusK i d = i <> "+" <> C.parens (C.parameter "int" d)
--- nPlusK i d | d == 0 = i
--- | d < 0 = i <> C.show d
--- | otherwise = i <> "+" <> C.show d
-- | generate bindings, and the final expression that contains the result of evaluation.
genMMInstruction :: (?ncOpts :: [String]) => IRank -> MMInstruction -> TranM (C.Src, [(C.Src,Vec Int)])
genMMInstruction ir0 mminst = do
axvars <- fmap fromString <$> view axesNames
indNames <- use loopIndexNames
indOffset <- use loopIndexOffset -- indNames + indOffset = real addr
arrayDict <- use planArrayAlloc
resourceDict <- use planResourceNames
let
-- how to access physical coordinate indNames + indOffset
-- in array allocated with margin box0
accAtMargin :: Box -> Vec Int -> C.Src
accAtMargin box0 vi = accAt (indOffset + vi - (box0 ^. lowerVertex))
accAt :: Vec Int -> C.Src
accAt v = foldMap C.brackets $ nPlusK <$> indNames <*> v
alreadyGivenLocalNames .= S.empty
freeLocalNameCounter .= 0
nodeIDtoLocalName .= M.empty
let refCount :: MMNodeID -> Int
refCount nid = fromMaybe 0 $ M.lookup nid refCntMap
refCntMap :: M.Map MMNodeID Int
refCntMap = M.unionsWith (+) $
concat $
map (map (flip M.singleton 1) . genRefCnt . _nodeInst) $
M.elems mminst
genRefCnt :: MicroInstruction -> [MMNodeID]
genRefCnt (Imm _) = []
genRefCnt (Uniop _ a) = [a]
genRefCnt (Binop _ a b) = [a,b]
genRefCnt (Triop _ a b c) = [a,b,c]
genRefCnt (Naryop "<%" xs) = xs ++ xs
genRefCnt (Naryop _ xs) = xs
genRefCnt (Store _ x) = [x]
genRefCnt (LoadIndex _) = []
genRefCnt (LoadExtent _) = []
genRefCnt (LoadCursor _ _) = []
genRefCnt (LoadCursorStatic _ _) = []
doesSpine :: MMNodeID -> Bool
doesSpine nid = case A.viewMaybe $ fromJust $ M.lookup nid mminst of
Just (NBUSpine False) -> False
_ -> True
doesBind :: MMNodeID -> Bool
doesBind nid = doesBind' (refCount nid) (fromJust (M.lookup nid mminst) ^. nodeInst)
doesBind' :: Int -> MicroInstruction -> Bool
doesBind' _ (Imm _) = False
doesBind' _ (Store _ x) = False
doesBind' n _ = n >= 1 -- TODO : Implement CSE and then reduce n
let orderedMMInst :: [(MMNodeID, MicroNode)]
orderedMMInst = sortBy (compare `on` (loc . snd)) $ M.toList mminst
loc :: MicroNode -> MMLocation
loc = fromJust . A.viewMaybe
txts <- forM orderedMMInst $ \(nid0, Node inst microTyp _) -> do
microTypDecl <- genTypeDecl "" (subFix microTyp)
let thisEq :: C.Src -> TranM C.Src
thisEq code =
case doesBind nid0 of
True -> do
thisName <- genFreeLocalName "a"
nodeIDtoLocalName %= M.insert nid0 thisName
return $ microTypDecl <> " " <> thisName <> "=" <> code
<> "/*"<> C.show (doesSpine nid0) <> "*/" <> ";\n"
False -> do
nodeIDtoLocalName %= M.insert nid0 code
return ""
query :: MMNodeID -> TranM C.Src
query nid1 = do
nmap <- use nodeIDtoLocalName
case M.lookup nid1 nmap of
Just vname -> return vname
Nothing -> raiseErr $ failed $ "genExpr: missing graph node " ++ show nid1
case inst of
LoadCursorStatic vi name -> do
let key = ResourceStatic name () :: ArrayResourceKey
let Just abox = M.lookup key arrayDict
Just rscName = M.lookup key resourceDict
thisEq $ rscName <> accAtMargin abox vi
LoadCursor vi nid -> do
node <- lookupNode nid
let Just abox = M.lookup key arrayDict
Just rscName0 = M.lookup key resourceDict
key = ResourceOMNode nid ir0
rscName :: C.Src
rscName = C.typedHole rscPtrTypename (C.toText rscName0)
case node ^. nodeType of
ElemType _ -> thisEq $ rscName
_ -> thisEq $ rscName <> accAtMargin abox vi
Imm r -> thisEq $ C.show (realToFrac r :: Double)
Uniop op a -> do
a_code <- query a
if "external-call/" `isPrefixOf` op
then thisEq $ C.parens $ fromString (T.packed %~ T.replace "external-call/" "" $ op) <> C.parens a_code
else thisEq $ C.parens $ fromString op <> a_code
Binop op a b -> do
a_code <- query a
b_code <- query b
case op of
"**" -> thisEq $ ("pow"<>) $ C.parens $ a_code <> "," <> b_code
_ -> thisEq $ C.parens $ a_code <> fromString op <> b_code
Triop "ite" a b c -> do
a_code <- query a
b_code <- query b
c_code <- query c
thisEq $ C.parens $ a_code <> "?" <> b_code <> ":" <> c_code
Naryop op xs -> do
xs_code <- mapM query xs
let chain fname cs = foldr1 (\a b -> fname <> C.parens (a <> "," <> b) ) cs
case op of
">?" -> thisEq $ chain "fmax" xs_code
"<?" -> thisEq $ chain "fmin" xs_code
"<%" -> thisEq $ chain "fmin" ["0.0", chain "fmax" xs_code] <> "+" <>
chain "fmax" ["0.0", chain "fmin" xs_code]
_ -> raiseErr $ failed $ "unsupported N-ary operator: " ++ show op
LoadIndex ax -> do
let ofs_i = "navi.offset_" <> i
i = toList axvars !! ax
ix= toList indNames !! ax
thisEq $ C.parens $ nPlusK (ofs_i <> "+" <> ix) (toList indOffset !! ax)
Store _ x -> do
x_code <- query x
thisEq x_code
x -> raiseErr $ failed $ "mpicxx codegen unimplemented for keyword: " ++ show x
nmap <- use nodeIDtoLocalName
let (tailID, _) = M.findMax mminst
Just tailName = M.lookup tailID nmap
retPairs = [ (tailName,c)
| (i,c) <- mmFindTailIDs mminst
, tailName <- maybeToList $ M.lookup i nmap]
return $ (C.unwords txts, retPairs)
mmFindTailIDs :: MMInstruction -> [(MMNodeID, Vec Int)]
mmFindTailIDs mminst = rets
where
rets =
[ (i, c)
| (i,nd) <- M.toList mminst,
let Just (MMLocation omnid2 c) = A.viewMaybe nd,
omnid2==omnid ]
Just (MMLocation omnid _) = A.viewMaybe maxNode
maxNode :: MicroNode
maxNode = snd $ M.findMax mminst
ompEveryLoopPragma :: (?ncOpts :: [String]) => Int -> C.Src
ompEveryLoopPragma n
| "omp-collapse" `elem` ?ncOpts = "#pragma omp for collapse(" <> C.show n <> ")"
| otherwise = ""
withFineBench :: (?ncOpts :: [String]) => C.Src -> C.Src -> C.Src
withFineBench benchLabel = addColl . addFapp
where
addColl src = case "bench-fine-collection" `elem` ?ncOpts of
False -> src
True -> C.unlines ["start_collection(\"" <> benchLabel <> "\");"
, src
, "stop_collection(\"" <> benchLabel <> "\");"
]
addFapp src = case "bench-fine-fapp" `elem` ?ncOpts of
False -> src
True -> C.unlines ["fapp_start(\"" <> benchLabel <> "\",0,0);"
, src
, "fapp_stop(\"" <> benchLabel <> "\",0,0);"
]
-- | generate a formura function body.
genComputation :: (?ncOpts :: [String]) => (IRank, OMNodeID) -> ArrayResourceKey -> TranM C.Src
genComputation (ir0, nid0) destRsc0 = do
dim <- view dimension
ivars <- use loopIndexNames
regionDict <- use planRegionAlloc
arrayDict <- use planArrayAlloc
stepGraph <- use omStepGraph
nc <- view envNumericalConfig
let
regionBox :: Box
marginBox :: Box
Just regionBox = M.lookup (ir0, nid0) regionDict
Just marginBox = M.lookup destRsc0 arrayDict
loopFroms :: Vec Int
loopFroms = regionBox^.lowerVertex - marginBox^.lowerVertex
loopTos :: Vec Int
loopTos = regionBox^.upperVertex - marginBox^.lowerVertex
mmInst :: MMInstruction
Just (Node mmInst typ annot) = M.lookup nid0 stepGraph
loopIndexOffset .= marginBox^. lowerVertex
systemOffset0 <- use planSystemOffset
let nbux = nbuSize "x" nc
nbuy = nbuSize "y" nc
nbuz = nbuSize "z" nc
gridStride = [nbux, nbuy, nbuz]
let
genGrid useSystemOffset lhsName2 = do
let openLoops =
[ C.unwords
["for (int ", i, "=", C.parameter "int" l ,";", i, "<", C.parameter "int" h, ";", i, "+=", C.show s ,"){"]
| (i,s,l,h) <- zip4 (toList ivars) gridStride (toList loopFroms) (toList loopTos)]
closeLoops =
["}" | _ <- toList ivars]
(letBs,rhss) <- genMMInstruction ir0 mmInst
let bodyExpr = C.unlines
[ lhsName2 <> foldMap C.brackets (nPlusK <$> ivarExpr <*> c) <> "=" <> rhs <> ";"
| (rhs, c) <- rhss ]
ivarExpr
| useSystemOffset = nPlusK <$> ivars <*> negate systemOffset0
| otherwise = ivars
return $ C.potentialSubroutine $ C.unlines $
[ompEveryLoopPragma $ dim-1] ++
openLoops ++ [letBs,bodyExpr] ++ closeLoops
case typ of
ElemType "void" ->
case head $ mmInstTails mmInst of
Store n _ -> do
lhsName <- nameArrayResource (ResourceStatic n ())
genGrid True lhsName
_ -> return "// void"
GridType _ typ -> do
lhsName <- nameArrayResource (ResourceOMNode nid0 ir0)
genGrid False (C.typedHole rscPtrTypename (C.toText lhsName))
_ -> do
return $ fromString $ "// dunno how gen " ++ show mmInst
-- | generate a staging/unstaging code
genStagingCode :: (?ncOpts :: [String]) => Bool -> RidgeID -> TranM C.Src
genStagingCode isStaging rid = do
dim <- view dimension
ridgeDict <- use planRidgeAlloc
arrDict <- use planArrayAlloc
intraShape <- use ncIntraNodeShape
let Just box0 = M.lookup rid ridgeDict
src :: ArrayResourceKey
src = case rid of
RidgeID _ (ResourceOMNode nid (irS,irD)) -> ResourceOMNode nid (if isStaging then irS else irD)
RidgeID _ (ResourceStatic sn ()) -> ResourceStatic sn ()
Just box1 = M.lookup src arrDict
MPIRank mpivec = rid ^. ridgeDeltaMPI
arrName <- nameArrayResource src
rdgNameSend <- nameRidgeResource rid Send
rdgNameRecv <- nameRidgeResource rid Recv
ivars <- use loopIndexNames
let offset :: Vec Int
offset = box0^.lowerVertex
loopFroms :: Vec Int
loopFroms = box0^.lowerVertex - offset
loopTos :: Vec Int
loopTos = box0^.upperVertex - offset
otherOffset :: Vec Int
otherOffset = offset - box1^.lowerVertex
- (if isStaging then mpivec * intraShape else 0)
let openLoops =
[ C.unwords
["for (int ", i, "=", C.show l ,";", i, "<", C.show h, ";++", i, "){"]
| (i,(l,h)) <- (toList ivars) `zip`
zip (toList loopFroms) (toList loopTos)]
closeLoops =
["}" | _ <- toList ivars]
rdgName = if isStaging then rdgNameSend else rdgNameRecv
rdgTerm = rdgName <> foldMap C.brackets ivars
arrTerm = arrName <> foldMap C.brackets (liftVec2 nPlusK ivars otherOffset)
body
| isStaging = rdgTerm <> "=" <> arrTerm
| otherwise = arrTerm <> "=" <> rdgTerm
let pragma =
if "collapse-ridge" `elem` ?ncOpts then ompEveryLoopPragma dim
else ompEveryLoopPragma (dim -1)
return $ pragma <> "\n" <>
C.unlines openLoops <> body <> ";" <> C.unlines closeLoops
genMPISendRecvCode :: FacetID -> TranM C.Src
genMPISendRecvCode f = do
reqName <- nameFacetRequest f
facetNameSend <- nameFacet f Send
facetNameRecv <- nameFacet f Recv
facetTypeName <- nameFacet f SendRecv
mpiTagDict <- use planFacetMPITag
let
dmpi = f ^. facetDeltaMPI
mpiIsendIrecv :: C.Src
mpiIsendIrecv = C.unwords $
[ "MPI_Irecv( (void*) &" <> facetNameRecv, ","
, "sizeof(struct " <> facetTypeName <> ") ,"
, "MPI_BYTE,"
, "navi->" <> nameDeltaMPIRank dmpi <> ","
, let Just t = M.lookup f mpiTagDict in C.show t, ","
, "navi->mpi_comm,"
, "&" <> reqName <> " );\n"]
++
[ "MPI_Isend( (void*) &" <> facetNameSend, ","
, "sizeof(struct " <> facetTypeName <> ") ,"
, "MPI_BYTE,"
, "navi->" <> nameDeltaMPIRank (negate dmpi) <> ","
, let Just t = M.lookup f mpiTagDict in C.show t, ","
, "navi->mpi_comm,"
, "&" <> reqName <> " );\n"]
return mpiIsendIrecv
genMPIWaitCode :: FacetID -> TranM C.Src
genMPIWaitCode f = do
reqName <- nameFacetRequest f
let
dmpi = f ^. facetDeltaMPI
mpiWait :: C.Src
mpiWait = C.unwords $
["MPI_Wait(&" <> reqName <> ",MPI_STATUS_IGNORE);\n"]
return mpiWait
-- | generate a distributed program
genDistributedProgram :: (?ncOpts :: [String]) => [DistributedInst] -> TranM C.Src
genDistributedProgram insts0 = do
stepGraph <- use omStepGraph
theGraph .= stepGraph
let insts1 = filter (not . isNop) insts0
insts2 = grp [] $ insts1
when (insts1 /= concat insts2) $
raiseErr $ failed $ "Detected instruction order mismatch!"
bodies <- mapM (mapM go2) $ insts2
ps <- mapM genCall bodies
return $ mconcat ps
where
isNop (FreeResource _) = True
isNop _ = False
sticks :: DistributedInst -> DistributedInst -> Bool
sticks | "stick-all-comp" `elem` ?ncOpts = sticksB
| "stick-single-comp" `elem` ?ncOpts = sticksA
| otherwise = sticksB
sticksA :: DistributedInst -> DistributedInst -> Bool
sticksA (Unstage _) (Unstage _ ) = True
sticksA (Unstage _) (Computation _ _ ) = True
sticksA (Computation _ _ ) (Stage _) = True
sticksA (Stage _) (Stage _) = True
sticksA _ _ = False
sticksB :: DistributedInst -> DistributedInst -> Bool
sticksB a b =
let isComp (CommunicationWait _) = False
isComp (CommunicationSendRecv _) = False
isComp _ = True
in isComp a && isComp b
grp :: [DistributedInst] -> [DistributedInst] -> [[DistributedInst]]
grp accum [] = [reverse accum]
grp [] (x:xs) = grp [x] xs
grp accum@(a:aa) (x:xs)
| sticks a x = grp (x:accum) xs
| otherwise = reverse accum : grp [] (x:xs)
go2 :: DistributedInst -> TranM (DistributedInst, C.Src)
go2 i = do
j <- go i
return (i,j)
剔算 = "knockout-computation" `elem` ?ncOpts
剔通 = "knockout-communication" `elem` ?ncOpts
m ⏲ str = withFineBench str <$> m
knockout :: Bool -> TranM C.Src -> TranM C.Src
knockout flag m = do
t <- m
return $ if flag then "" else t
go :: DistributedInst -> TranM C.Src
go (Computation cmp destRsc) = knockout 剔算 $ genComputation cmp destRsc ⏲ "computation"
go (Unstage rid) = knockout 剔算 $ genStagingCode False rid ⏲ "stageOut"
go (Stage rid) = knockout 剔算 $ genStagingCode True rid ⏲ "stageIn"
go (FreeResource _) = knockout 剔算 $ return ""
go (CommunicationSendRecv f) = knockout 剔通 $ genMPISendRecvCode f ⏲ "mpiSendrecv"
go (CommunicationWait f) = knockout 剔通 $ genMPIWaitCode f ⏲ "mpiWait"
genCall :: [(DistributedInst, C.Src)] -> TranM C.Src
genCall instPairs = do
let body = map snd instPairs
isGenerateFunction = case map fst instPairs of
[(CommunicationWait _)] -> False
[(CommunicationSendRecv _)] -> False
_ -> True
case isGenerateFunction of
True -> do
funName <- genFreeName "Formura_internal"
tellH $ "void "<> funName <> "();\n"
tellF (toString $ funName <> ".c") $ C.unlines $
["void "<> funName <> "(){"]
++ (if "omp" `elem` ?ncOpts then ["#pragma omp parallel\n{"] else [])
++ body
++ (if "omp" `elem` ?ncOpts then ["}"] else [])
++ ["}"]
return $ funName <> "();\n"
False -> do
return $ C.unlines $ body
-- | Let the plans collaborate
collaboratePlans :: TranM ()
collaboratePlans = do
plans0 <- use tsMPIPlanMap
nc <- view envNumericalConfig
let nbux = nbuSize "x" nc
nbuy = nbuSize "y" nc
nbuz = nbuSize "z" nc
nbuMargin = Vec [nbux-1+2, nbuy-1+2, nbuz-1+2]
let commonStaticBox :: Box
commonStaticBox =
upperVertex %~ (+nbuMargin) $
foldr1 (|||)
[ b
| p <- M.elems plans0
, (ResourceStatic snName (), b) <- M.toList $ p ^. planArrayAlloc
]
newPlans = M.map rewritePlan plans0
rewritePlan :: MPIPlan -> MPIPlan
rewritePlan p = p
& planArrayAlloc %~ M.mapWithKey go
-- & planSharedResourceExtent .~ commonRscBox -- TODO: Flipping the comment of this line changes the behavior.
go (ResourceStatic snName ()) _ = commonStaticBox
go _ b = b
commonRscBox =
upperVertex %~ (+nbuMargin) $
foldr1 (|||)
[ p ^. planSharedResourceExtent
| p <- M.elems plans0]
tsCommonStaticBox .= commonStaticBox
tsMPIPlanMap .= newPlans
-- | The main translation logic
tellProgram :: WithCommandLineOption => TranM ()
tellProgram = do
setNumericalConfig
setNamingState
nc <- use tsNumericalConfig
let ?ncOpts = nc ^. ncOptionStrings
mpiGrid0 <- use ncMPIGridShape
mmprog <- use theMMProgram
(ivars :: Vec C.Src) <- fmap fromString <$> view axesNames
intraExtents <- use ncIntraNodeShape
let cxxTemplateWithMacro :: C.Src
cxxTemplateWithMacro = cxxTemplate
tsCxxTemplateWithMacro .= cxxTemplateWithMacro
tsMPIPlanSelection .= False
plan <- liftIO $ makePlan nc mmprog
mPIPlan .= plan
tsMPIPlanSelection .= True
plan <- liftIO $ makePlan (nc & ncWallInverted .~ Just True) mmprog
mPIPlan .= plan
collaboratePlans
tellH $ C.unlines
[ ""
, "#pragma once"
, "#ifdef __cplusplus"
, "extern \"C\""
, "{"
, "#endif"
]
tellH $ C.unlines ["#include <mpi.h>"]
tellC $ cxxTemplateWithMacro
tellBoth "\n\n"
tellH $ C.unlines
[ "#define " <> nx <> " " <> C.show (i*g)
| (x,i,g) <- zip3 (toList ivars) (toList intraExtents) (toList mpiGrid0)
, let nx = "N" <> (fromString $ map toUpper $ toString x)
]
tsMPIPlanSelection .= False
tellArrayDecls
srmap0 <- use planSharedResourceNames
tsMPIPlanSelection .= True
planSharedResourceNames .= srmap0 -- share the shared resource among plans
tellArrayDecls
tellBoth "\n"
allRidges0 <- use planRidgeAlloc
let deltaMPIs :: [MPIRank]
deltaMPIs = S.toList $ S.fromList $ concat [ [dmpi, negate dmpi]
| rdg <- M.keys allRidges0
, let dmpi = rdg ^. ridgeDeltaMPI]
tellHLn $ "struct Formura_Navigator {"
tellHLn $ "int time_step;"
forM_ ivars $ \i -> do
tellHLn $ "int lower_" <> i <> ";"
tellHLn $ "int upper_" <> i <> ";"
tellHLn $ "int offset_" <> i <> ";"
tellHLn $ "MPI_Comm mpi_comm;"
tellHLn $ "int mpi_my_rank;"
forM_ deltaMPIs $ \r -> do
tellHLn $ "int " <> nameDeltaMPIRank r <> ";"
tellHLn $ "};"
tellH "extern struct Formura_Navigator navi;"
tellC "struct Formura_Navigator navi;"
tellBoth "\n\n"
tellCLn $ "void Formura_decode_mpi_rank (int r" <> C.unwords[", int *i" <> x | x<-toList ivars] <> "){"
tellCLn "int s=r;"
forM_ (zip (reverse $ toList ivars) (reverse $ toList mpiGrid0)) $ \(x, g) -> do
tellCLn $ "*i" <> x <> "=s%" <> C.show g <> ";"
tellCLn $ "s=s/" <> C.show g <> ";"
tellCLn "}"
tellCLn $ "int Formura_encode_mpi_rank (" <> C.intercalate "," [" int i" <> x | x<-toList ivars] <> "){"
tellCLn "int s = 0;"
forM_ (zip (toList ivars) (toList mpiGrid0)) $ \(x, ig) -> do
let g=C.show ig
tellCLn $ "s *= " <>g<>";"
tellCLn $ "s += (i"<>x<>"%"<>g<>"+"<>g<>")%"<>g<>";"
tellCLn "return s;}"
tellBoth "int Formura_Init (struct Formura_Navigator *navi, MPI_Comm comm)"
tellH ";"
csb0 <- use tsCommonStaticBox
let mpiivars = fmap ("i"<>) ivars
lower_offset = negate $ csb0 ^.lowerVertex
tellCLn "{"
tellCLn $ "int " <> C.intercalate "," (toList mpiivars) <> ";"
tellCLn $ "navi->mpi_comm = comm;"
tellCLn $ "{int r; MPI_Comm_rank(comm,&r);navi->mpi_my_rank = r;}"
tellCLn $ "Formura_decode_mpi_rank( navi->mpi_my_rank" <> C.unwords [ ", &" <> x| x<- toList mpiivars] <> ");"
forM_ deltaMPIs $ \r@(MPIRank rv) -> do
let terms = zipWith nPlusK (toList mpiivars) (toList rv)
tellC $ "navi->" <> nameDeltaMPIRank r <> "="
tellCLn $ "Formura_encode_mpi_rank( " <> C.intercalate "," terms <> ");"
tellCLn "navi->time_step=0;"
forM_ (zip3 (toList ivars) (toList intraExtents) (toList lower_offset)) $ \(x, e, o) -> do
tellCLn $ "navi->offset_" <> x <> "=" <> "i"<> x <> "*"<>C.show e <> "-" <> C.show o <> ";"
tellCLn $ "navi->lower_" <> x <> "=" <> C.show o<>";"
tellCLn $ "navi->upper_" <> x <> "=" <> C.show o <> "+"<>C.show e <> ";"
tellCLn "return 0;}"
tellBoth "\n\n"
cprogcon <- forM [False, True] $ \ mps -> do
tsMPIPlanSelection .= mps
dProg <- use planDistributedProgram
genDistributedProgram dProg
tellH $ "/*INSERT SUBROUTINES HERE*/\n"
monitorInterval0 <- use ncMonitorInterval
temporalBlockingInterval0 <- use ncTemporalBlockingInterval
timeStepVarName <- genFreeName "timestep"
when ((monitorInterval0`mod`(2*temporalBlockingInterval0))/=0) $
liftIO $ putStrLn "Warning : Monitor interval must be multiple of (2 * temporal blocking interval)"
let monitorInterval2 = head $ filter (\x -> x`mod`(2*temporalBlockingInterval0)==0)[monitorInterval0 ..]
let openTimeLoop = "for(int " <> timeStepVarName <> "=0;" <>
timeStepVarName <> "<"
<> C.show (monitorInterval2`div`(2*temporalBlockingInterval0))
<> ";" <> "++" <> timeStepVarName <> "){"
closeTimeLoop = "}"
tellBoth "int Formura_Forward (struct Formura_Navigator *navi)"
tellH ";"
tellC $ C.unlines
[ "{"
, openTimeLoop
, C.unlines [cprogcon!!0,"/* HALFWAYS */" , cprogcon!!1]
, closeTimeLoop
, "navi->time_step += " <> C.show monitorInterval2 <> ";"
, "return 0;}"
]
tellH $ C.unlines
[ ""
, "#ifdef __cplusplus"
, "}"
, "#endif"
]
useSubroutineCalls :: WithCommandLineOption => M.Map C.Src String -> CProgram -> IO CProgram
useSubroutineCalls subroutineMap cprog0 =
traverse (useSubroutineInSrc subroutineMap) cprog0
useSubroutineInSrc :: WithCommandLineOption => M.Map C.Src String -> C.Src -> IO C.Src
useSubroutineInSrc subroutineMap (C.Src xs) = C.Src <$> mapM go xs
where
go :: C.Word -> IO C.Word
go x@(C.Raw _) = return x
go x@(C.Hole _) = return x
go (C.PotentialSubroutine pssrc) = do
let tmpl = C.template pssrc
Just funName = M.lookup tmpl subroutineMap
argList :: [T.Text]
argList = [(argN ^. C.holeExpr) | argN <-toList pssrc]
return $ C.Raw $ fromString funName <> "(" <> T.intercalate "," argList <> ");\n"
joinSubroutines :: WithCommandLineOption => CProgram -> IO CProgram
joinSubroutines cprog0 = do
when (?commandLineOption ^. verbose) $ do
putStrLn $ "## Subroutine Analysis"
when (elem "show-subroutines" $ ?commandLineOption ^. auxFlags) $ do
forM_ (zip [1..] subs1) $ \(i, ss) -> do
forM_ (zip [1..] ss) $ \(j, s) -> do
putStrLn $ "# Subroutine group" ++ show i ++ ": member " ++ show j
T.putStrLn $ C.pretty s
putStrLn $ show $ C.template s
print $ sum $ map fromEnum $ show $ C.template s
putStrLn $ "Found " ++ show (length subs0) ++ " subroutines."
putStrLn $ "Found " ++ show (length subs1) ++ " subroutine groups."
forM_ (zip [1..] subs1) $ \(i, ss) -> do
let C.Src xs = head ss
cnt (C.Hole _) = 1
cnt _ = 0
print ("Count of typed holes #",i, sum $ map cnt xs)
-- forM_ (take 2 ss) $ T.putStrLn . C.pretty
cprog1 <- useSubroutineCalls subroutineNameMap cprog0
return $ cprog1
& headerFileContent %~ (C.replace "/*INSERT SUBROUTINES HERE*/" hxxSubroutineDecls)
& auxFilesContent %~ (M.union auxSubroutineDefs)
where
subs0 :: [C.Src]
subs0 = foldMap getSub cprog0
getSub :: C.Src -> [C.Src]
getSub (C.Src xs) = xs >>= toSub
toSub :: C.Word -> [C.Src]
toSub (C.PotentialSubroutine s) = [s]
toSub _ = []
-- (subroutine template, the list of codes that uses the subroutine)
submap1 = M.unionsWith (++)
[ M.singleton (C.template s) [s] | s <- subs0]
subs1 :: [[C.Src]]
subs1 = M.elems $ submap1
subTemplates :: [C.Src]
subTemplates = M.keys submap1
-- map a Potential Subroutine template to its subroutine name
subroutineNameMap :: M.Map C.Src String
subroutineNameMap = M.fromList
[(tmpl, "Formura_subroutine_" ++ show i) | (i,tmpl) <- zip [0..] subTemplates]
argvNames :: [C.Src]
argvNames = ["argx" <> C.show i | i <- [0..]]
genSubroutine :: String -> C.Src -> (C.Src, C.Src)
genSubroutine fname tmpl = let
header = "void " <> fromString fname <> "(" <> C.intercalate "," argvList <> ")"
argvList = [C.raw (h ^. C.holeType) <> " " <> argN | (h, argN) <- zip (toList tmpl) argvNames]
sbody :: C.Src
sbody = zipWithFT (\arg hole -> hole & C.holeExpr .~ C.toText arg) argvNames tmpl
in (header <> ";", header <> C.braces sbody)
subroutineCodes :: [(String, C.Src, C.Src)]
subroutineCodes =
[ (fnBody ++ ".c", hxx, cxx)
| (tmpl, fnBody) <- M.toList subroutineNameMap
, let (hxx,cxx) = genSubroutine fnBody tmpl]
hxxSubroutineDecls :: C.Src
hxxSubroutineDecls = C.unlines [ hc ^. _2 | hc <- subroutineCodes]
auxSubroutineDefs :: M.Map FilePath C.Src
auxSubroutineDefs = M.fromList [ (hc ^. _1, hc ^. _3) | hc <- subroutineCodes]
genCxxFiles :: WithCommandLineOption => Program -> MMProgram -> IO ()
genCxxFiles formuraProg mmProg0 = do
let
nc = formuraProg ^. programNumericalConfig
tbFoldingNumber = nc ^. ncTemporalBlockingInterval
mmProgTB = temporalBlocking tbFoldingNumber mmProg0
tranState0 = TranState
{ _tranSyntacticState = defaultCompilerSyntacticState{ _compilerStage = "C++ code generation"}
, _tsNamingState = defaultNamingState
, _theProgram = formuraProg
, _theMMProgram = mmProgTB
, _tsNumericalConfig = nc
, _theGraph = M.empty
, _tsMPIPlanSelection = False
, _tsMPIPlanMap = M.empty
, _tsCommonStaticBox = error "_tsCommonStaticBox is unset"
, _tsCxxTemplateWithMacro = error "_tsCxxTemplateWithMacro is unset"
}
(_, tranState1 , cprog0)
<- runCompilerRight tellProgram
(mmProgTB ^. omGlobalEnvironment)
tranState0
(CProgram hxxContent cxxContent auxFilesContent) <-
if (elem "no-subroutine" $ tranState1 ^. ncOptionStrings) then return cprog0
else joinSubroutines cprog0
createDirectoryIfMissing True (cxxFilePath ^. directory)
T.writeFile hxxFilePath $ C.toText hxxContent
T.writeFile cxxFilePath $ C.toText cxxContent
let funcs = cluster [] $ M.elems auxFilesContent
cluster :: [C.Src] -> [C.Src] -> [C.Src]
cluster accum [] = reverse accum
cluster [] (x:xs) = cluster [x] xs
cluster (ac:acs) (x:xs)
| ac /= "" && C.length (ac<>x) > 64000 = cluster ("":ac:acs) (x:xs)
| otherwise = cluster (ac <> x : acs) xs
writeAuxFile i con = do
let fn = cxxFileBodyPath ++ "_internal_" ++ show i ++ ".c"
T.writeFile fn $ C.toText $ (tranState1 ^. tsCxxTemplateWithMacro) <> con
return fn
auxFilePaths <- zipWithM writeAuxFile [0..] funcs
let wait = ?commandLineOption ^. sleepAfterGen
when (wait>0) $ threadDelay (1000000 * wait)
mapM_ indent ([hxxFilePath, cxxFilePath] ++ auxFilePaths)
where
indent fn = X.handle ignore $ callProcess "indent" ["-gnu", "-i2", "-nut","-br", "-nlp","-ip0","-l80", fn]
ignore :: X.SomeException -> IO ()
ignore _ = return ()
cxxTemplate :: (WithCommandLineOption, ?ncOpts :: [String]) => C.Src
cxxTemplate = C.unlines
[ ""
, "#include <mpi.h>"
, "#include <math.h>"
, "#include <stdbool.h>"
, benchHeaders
, "#include \"" <> fromString hxxFileName <> "\""
, ""
]
where
isBenchFine = "bench-fine-collection" `elem` ?ncOpts || "bench-fine-fapp" `elem` ?ncOpts
benchHeaders
| isBenchFine = C.unlines ["#include <fj_tool/fapp.h>" , "#include <fjcoll.h>"]
| otherwise = ""
rscPtrTypename :: T.Text
rscPtrTypename = rscSfcTypename <> " * __restrict "
rscSfcTypename :: T.Text
rscSfcTypename = "rsc_surface"
|
nushio3/formura
|
src/Formura/MPICxx/Translate.hs
|
mit
| 44,969
| 16
| 28
| 12,175
| 14,529
| 7,186
| 7,343
| -1
| -1
|
import Parser
import AST
import Text.Parsec (Parsec, ParseError, parse, runParser)
import Data.Text (splitOn, pack, unpack)
import Data.List (intercalate)
-- pretty print
enumerate :: [a] -> [(Int, a)]
enumerate = zip [1..]
printLineNumbers :: String -> String
printLineNumbers input =
let
lines = enumerate $ splitOn (pack "\n") (pack input)
printLine (lineNo, str) = show lineNo ++ "|" ++ unpack str
in
intercalate "\n" $ map printLine lines
parseFromFile :: Parsec String () Program -> String -> IO (Either ParseError Program)
parseFromFile p fname = do
source <- readFile fname
putStrLn "Input"
putStrLn "========================="
putStrLn $ printLineNumbers source
putStrLn ""
return (runParser p () fname source)
main =
do
either <- parseFromFile parseProgram "test.488"
putStrLn "AST"
putStrLn "========================="
putStrLn $ show either
|
dat2/488-compiler-haskell
|
Main.hs
|
mit
| 905
| 0
| 12
| 173
| 314
| 155
| 159
| 27
| 1
|
{- | Representations of Rubik's cube.
-}
module Rubik.Cube (
module Rubik.Cube.Facelet,
module Rubik.Cube.Coord,
module Rubik.Cube.Cubie,
module Rubik.Cube.Moves,
) where
import Rubik.Cube.Facelet
import Rubik.Cube.Coord
import Rubik.Cube.Cubie
import Rubik.Cube.Moves
|
Lysxia/twentyseven
|
src/Rubik/Cube.hs
|
mit
| 288
| 0
| 5
| 47
| 62
| 43
| 19
| 9
| 0
|
{-# LANGUAGE NoImplicitPrelude #-}
-- | BasicPrelude mostly re-exports
-- several key libraries in their entirety.
-- The exception is Data.List,
-- where various functions are replaced
-- by similar versions that are either
-- generalized, operate on Text,
-- or are implemented strictly.
module BasicPrelude
( -- * Module exports
module CorePrelude
, module Data.List
, module Control.Monad
-- ** Folds and traversals
, Foldable
(
foldMap
, foldr
, foldr'
, foldl
, foldl'
, foldr1
, foldl1
)
-- In base-4.8, these are instance methods.
, elem
, maximum
, minimum
, Traversable
(
traverse
, sequenceA
, mapM
, sequence
)
-- * Enhanced exports
-- ** Simpler name for a typeclassed operation
, map
, empty
, (++)
, concat
, intercalate
-- ** Strict implementation
, BasicPrelude.sum
, BasicPrelude.product
-- ** Text for Read and Show operations
, show
, fromShow
, read
, readIO
-- ** FilePath for file operations
, readFile
, writeFile
, appendFile
-- * Text exports
-- ** Text operations (Pure)
, Text.lines
, Text.words
, Text.unlines
, Text.unwords
, textToString
, ltextToString
, fpToText
, fpFromText
, fpToString
, encodeUtf8
, decodeUtf8
-- ** Text operations (IO)
, Text.getLine
, LText.getContents
, LText.interact
-- * Miscellaneous prelude re-exports
-- ** Math
, Prelude.gcd
, Prelude.lcm
-- ** Show and Read
, Prelude.ShowS
, Prelude.showsPrec
, Prelude.showList
, Prelude.shows
, Prelude.showChar
, Prelude.showString
, Prelude.showParen
, Prelude.ReadS
, Prelude.readsPrec
, Prelude.readList
, Prelude.reads
, Prelude.readParen
, Prelude.lex
, readMay
-- ** IO operations
, Prelude.putChar
, Prelude.getChar
, Prelude.readLn
) where
import CorePrelude
import Data.List hiding
( -- prefer monoid versions instead
(++)
, concat
, intercalate
-- prefer Text versions instead
, lines
, words
, unlines
, unwords
-- prefer map = fmap instead
, map
-- prefer strict versions
, sum
, product
-- prefer Foldable versions
, elem
, foldl
, foldl'
, foldl1
, foldr
, foldr'
, foldr1
, maximum
, minimum
)
-- Import *all of the things* from Control.Monad,
-- specifically, the list-based things that
-- CorePrelude doesn't export
import Control.Monad hiding
( -- Also exported by Data.Traversable.
mapM
, sequence
)
import Data.Foldable (Foldable(..), elem, maximum, minimum)
import Data.Traversable (Traversable(..))
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified Data.Text.Lazy as LText
import qualified Data.Text.Lazy.IO as LText
import qualified Prelude
import Data.Text.Encoding (encodeUtf8, decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
import qualified Safe
-- | > map = fmap
map :: (Functor f) => (a -> b) -> f a -> f b
map = fmap
-- | > empty = mempty
empty :: Monoid w => w
empty = mempty
{-# DEPRECATED empty "Use mempty" #-}
infixr 5 ++
-- | > (++) = mappend
(++) :: Monoid w => w -> w -> w
(++) = mappend
-- | > concat = mconcat
concat :: Monoid w => [w] -> w
concat = mconcat
-- | > intercalate = mconcat .: intersperse
intercalate :: Monoid w => w -> [w] -> w
intercalate xs xss = mconcat (Data.List.intersperse xs xss)
-- | Compute the sum of a finite list of numbers.
sum :: Num a => [a] -> a
sum = Data.Foldable.foldl' (+) 0
-- | Compute the product of a finite list of numbers.
product :: Num a => [a] -> a
product = Data.Foldable.foldl' (*) 1
-- | Convert a value to readable Text
show :: Show a => a -> Text
show = Text.pack . Prelude.show
-- | Convert a value to readable IsString
--
-- Since 0.3.12
fromShow :: (Show a, IsString b) => a -> b
fromShow = fromString . Prelude.show
-- | Parse Text to a value
read :: Read a => Text -> a
read = Prelude.read . Text.unpack
-- | The readIO function is similar to read
-- except that it signals parse failure to the IO monad
-- instead of terminating the program.
readIO :: Read a => Text -> IO a
readIO = Prelude.readIO . Text.unpack
-- | Read a file and return the contents of the file as Text.
-- The entire file is read strictly.
readFile :: FilePath -> IO Text
readFile = Text.readFile
-- | Write Text to a file.
-- The file is truncated to zero length before writing begins.
writeFile :: FilePath -> Text -> IO ()
writeFile = Text.writeFile
-- | Write Text to the end of a file.
appendFile :: FilePath -> Text -> IO ()
appendFile = Text.appendFile
textToString :: Text -> Prelude.String
textToString = Text.unpack
ltextToString :: LText -> Prelude.String
ltextToString = LText.unpack
-- | This function assumes file paths are encoded in UTF8. If it
-- cannot decode the 'FilePath', the result is just an approximation.
--
-- Since 0.3.13
fpToText :: FilePath -> Text
fpToText = Text.pack
{-# DEPRECATED fpToText "Use Data.Text.pack" #-}
-- |
-- Since 0.3.13
fpFromText :: Text -> FilePath
fpFromText = Text.unpack
{-# DEPRECATED fpFromText "Use Data.Text.unpack" #-}
-- |
-- Since 0.3.13
fpToString :: FilePath -> Prelude.String
fpToString = id
{-# DEPRECATED fpToString "Use id" #-}
-- | Note that this is /not/ the standard @Data.Text.Encoding.decodeUtf8@. That
-- function will throw impure exceptions on any decoding errors. This function
-- instead uses @decodeLenient@.
decodeUtf8 :: ByteString -> Text
decodeUtf8 = decodeUtf8With lenientDecode
readMay :: Read a => Text -> Maybe a
readMay = Safe.readMay . Text.unpack
|
PierreR/basic-prelude
|
BasicPrelude.hs
|
mit
| 5,605
| 0
| 8
| 1,235
| 1,075
| 668
| 407
| 155
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ExistentialQuantification #-}
module Unpack where
import Control.Monad.Except
import Data.Either.Combinators
import Types
data Unpacker m = forall a. Eq a => AnyUnpacker (LispVal -> m a)
laxUnpackers :: (MonadError LispError m) => [Unpacker m]
laxUnpackers = [ AnyUnpacker unpackNum
, AnyUnpacker extractBool
, AnyUnpacker unpackString ]
strictUnpackers :: (MonadError LispError m) => [Unpacker m]
strictUnpackers = [ AnyUnpacker extractNum
, AnyUnpacker extractBool
, AnyUnpacker extractString ]
unpackEquals :: LispVal -> LispVal -> Unpacker (Except LispError) -> Bool
unpackEquals x y (AnyUnpacker unpacker) =
fromRight False . runExcept $ do
ux <- unpacker x
uy <- unpacker y
return $ ux == uy
unpackString :: (MonadError LispError m) => LispVal -> m String
unpackString (String s) = return s
unpackString (Number n) = return $ show n
unpackString (Bool b) = return $ show b
unpackString arg = throwError $ TypeMismatch "string" arg
unpackNum :: (MonadError LispError m) => LispVal -> m Integer
unpackNum (Number n) = return n
unpackNum arg@(String s) =
case reads s of ((n,_):_) -> return n
_ -> throwError $ TypeMismatch "number" arg
unpackNum (List [n]) = unpackNum n
unpackNum arg = throwError $ TypeMismatch "number" arg
extractNum :: (MonadError LispError m) => LispVal -> m Integer
extractNum (Number n) = return n
extractNum nonNum = throwError $ TypeMismatch "number" nonNum
extractBool :: (MonadError LispError m) => LispVal -> m Bool
extractBool (Bool b) = return b
extractBool nonBool = throwError $ TypeMismatch "bool" nonBool
extractString :: (MonadError LispError m) => LispVal -> m String
extractString (String s) = return s
extractString nonString = throwError $ TypeMismatch "string" nonString
|
ublubu/wyascheme
|
src/Unpack.hs
|
mit
| 1,869
| 0
| 10
| 374
| 637
| 321
| 316
| 42
| 2
|
/*Owner & Copyrights: Vance King Saxbe. A.*//* Copyright (c) <2014> Author Vance King Saxbe. A, and contributors Power Dominion Enterprise, Precieux Consulting and other contributors. Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting and GoldSax Technologies email @vsaxbe@yahoo.com. Development teams from Power Dominion Enterprise, Precieux Consulting. Project sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager.*/module GoldSaxMachineModule14.SimpleNoAG where
data Expr = Plus Expr Expr
| Times Expr Expr
| AmountOf Char
meaning :: Expr -> [Char] -> Int
meaning (Plus l r) p = meaning l p + meaning r p
meaning (Times l r) p = meaning l p * meaning r p
meaning (AmountOf c) p = length $ filter (== c) p
/*email to provide support at vancekingsaxbe@powerdominionenterprise.com, businessaffairs@powerdominionenterprise.com, For donations please write to fundraising@powerdominionenterprise.com*/
|
VanceKingSaxbeA/GoldSaxMachineStore
|
GoldSaxMachineModule14/src/Chapter14/SimpleNoAG.hs
|
mit
| 998
| 23
| 16
| 162
| 380
| 192
| 188
| 8
| 1
|
module Anagram ( anagramsFor ) where
import Data.List
import Data.Char
import Control.Arrow
sortLower :: String -> String
sortLower = sort . map toLower
checkAnagram :: String -> String -> Bool
checkAnagram st x = sortLower st == sortLower x
anagramsFor :: String -> [ String ] -> [ String ]
anagramsFor st xs = filter ( \x -> ( /= ) st x && checkAnagram st x ) xs
|
mukeshtiwari/Excercism
|
haskell/anagram/Anagram.hs
|
mit
| 373
| 0
| 9
| 77
| 136
| 73
| 63
| 10
| 1
|
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE InstanceSigs #-}
newtype Fix f = Fix { unFix :: f (Fix f) }
cata :: Functor f => (f a -> a) -> (Fix f -> a)
cata f = f . fmap (cata f) . unFix
ana :: Functor f => (a -> f a) -> (a -> Fix f)
ana f = Fix . fmap (ana f) . f
---
data L a b = Nil | Cons a b
type List a = Fix (L a)
instance Functor (L a) where
--fmap :: (b -> c) -> (L a b) -> (L a c)
fmap f x = case x of
Nil -> Nil
Cons a b -> Cons a (f b)
empty :: List a
empty = Fix Nil
cons :: a -> List a -> List a
cons a xs = Fix (Cons a xs)
unit :: a -> List a
unit a = cons a empty
length :: List a -> Int
length = cata $ \x -> case x of
Nil -> 0
Cons _ n -> n + 1
sum :: Num a => List a -> a
sum = cata $ \x -> case x of
Nil -> 0
Cons a s -> a + s
main::IO ()
main = do
putStrLn "hello world"
|
eulerfx/learnfp
|
listf.hs
|
mit
| 913
| 0
| 11
| 305
| 450
| 229
| 221
| 31
| 2
|
module DMatrix where
import qualified Data.List as L
import qualified Text.Read as R
import qualified Data.Maybe as D
data DataFrame = DataFrame {
headers :: [String], body :: [[String]] } deriving (Show)
type DMatrix = [[Float]]
unique :: [String] -> [String]
unique = L.nub
strToFloat :: String -> Float
strToFloat s = case R.readMaybe s :: Maybe Float of
Just i -> realToFrac i :: Float
Nothing -> 9.9
listBody :: [[String]] -> [[String]]
listBody xs = tail $ init xs
index :: Int -> DataFrame -> [String]
index i df = map (!! i) (body df)
indexByColumn :: String -> DataFrame -> Maybe Int
indexByColumn col df = L.elemIndex col (headers df)
column :: String -> DataFrame -> [String]
column colname df = case indexByColumn colname df of
Just i -> index i df
Nothing -> []
factorize :: [String] -> [Float]
factorize xs = map fn xs where
fn x = case L.elemIndex x (unique xs) of
Just i -> realToFrac i :: Float
Nothing -> 9.9
dummyLists :: [String] -> [[String]]
dummyLists col = map fn (unique col) where
fn x = [ if j == x then "1" else "0" | j <- col ]
processColumn :: [String] -> [[String]]
processColumn col = case columnIsFactors col of
False -> [col] --map strToFloat c
True -> dummyLists col
dummyDf :: [[String]] -> [[[String]]]
dummyDf [] = []
dummyDf (x:xs) = (processColumn x) : (dummyDf xs)
data Column = Float [Float] | Character [String]
type Mungestep = [Column] -> [Column]
munge :: [Mungestep] -> [Column] -> [Column]
munge [] df = df
munge (x:xs) df = munge xs $ munge1 x df
munge1 :: Mungestep -> [Column] -> [Column]
munge1 m df = foldl (++) (m df) []
columnIsFactors :: [String] -> Bool
columnIsFactors col = or $ map D.isNothing $
map (R.readMaybe :: String -> Maybe Float) col
seqAlong :: [a] -> [Int]
seqAlong xs = [0..(length xs -1)]
--TODO: I ended up not needing this, but I don't want to get rid of it.
which :: [Bool] -> [Int]
which bools = (map fst) . (filter snd) $ zip [0..] bools
convertColumn :: [String] -> [Float]
convertColumn col = case columnIsFactors col of
False -> map strToFloat col
True -> factorize col
toDMatrix :: DataFrame -> DMatrix
toDMatrix xs = map convertColumn (L.transpose $ body xs)
|
peterhurford/DMatrix.hs
|
src/DMatrix.hs
|
mit
| 2,219
| 0
| 10
| 467
| 962
| 519
| 443
| 58
| 2
|
-- | Simple API for multivariate polynomials.
module Math.Poly.Multi (
MPoly, terms, x_, totalDegree, monomial, pderiv, lderiv,
evalAts, linMonoMap, numVars
) where
import Prelude ()
import Math.Base
import Data.Monoid
import Control.Arrow (first)
type Mono = [Int]
-- Named instances perhaps?
newtype Mono' = M Mono deriving (Eq)
type RawPoly a = [(a, Mono)]
newtype MPoly a = P { terms :: RawPoly a } deriving (Eq)
-- This is wrapped in M instead of being a type just so this ordering
-- can be customized, which I can't think of any reason to do.
instance Ord Mono' where
compare (M a) (M b) = (sum a `compare` sum b) `mappend` (a `compare` b)
instance (Eq a, Num a) => Num (MPoly a) where
P x + P y = P $ gen_add 1 x y
P x * P y = P $ foldl (gen_add 1) []
[ [ (cx*cy, mx`mmul`my) | (cy, my) <- y ] | (cx, mx) <- x ]
negate (P x) = P $ gen_add (-1) [] x
P x - P y = P $ gen_add (-1) x y
abs (P x) = P x; signum _ = 1
fromInteger = unit . fromInteger
instance (Show a, Num a) => Show (MPoly a) where
show (P x) = show x
instance Extension MPoly where
unit 0 = P[]; unit x = P[(x, [])]
project (P x) = maybe 0 fst $ find (null . snd) x
nmap f (P x) = P . filternz $ map (first f) x
instance Foldable MPoly where
foldr f v = foldr f v . map fst . terms
filternz :: (Eq a, Num a) => RawPoly a -> RawPoly a
filternz = filter ((/=0).fst)
mmul :: Mono -> Mono -> Mono
mmul = zipWith' (+)
-- There may be a more efficient way to do this, but efficiency
-- is not hugely important here.
gen_add :: (Eq a, Num a) => a -> RawPoly a -> RawPoly a -> RawPoly a
gen_add f = ga where
ga x [] = x
ga [] x = map (first (f*)) x
ga z@(b@(c1,t1):x1) w@((c2,t2):x2) =
case M t1 `compare` M t2 of
GT -> b : ga x1 w
LT -> (f*c2, t2) : ga z x2
EQ -> case c1+f*c2 of 0 -> ga x1 x2; n -> (n, t1) : ga x1 x2
-- | Build a raw monomial from coefficient and exponent sequence.
-- It could be defined as
--
-- @
-- monomial c es = c * product [ x_ i ^ e | (i, e) <- zip [0..] es ]
-- @
--
-- but is more efficient.
-- It is something like an opposite of `terms`. Trailing zeros are
-- eliminated.
monomial :: Num a => a -> [Int] -> MPoly a
monomial c es = P [(c, dz es)] where
dz l = case span (==0) l of (_, []) -> []; (x1, x:xs) -> x1++x:dz xs
-- | Simple utility function to give the @n@th variable as a polynomial. This
-- type is overloaded, so it is useful to have utility functions such as
--
-- @
-- x0 :: MPoly Integer
-- x0 = x_ 0
-- @
x_ :: (Num a) => Int -> MPoly a
x_ i = 1 `monomial` (replicate i 0 ++ [1])
-- | The total degree of the polynomial is the maximum total degree of its
-- constituent monomial;
-- the total degree of a monomial is the sum of the degrees of its component
-- variables.
totalDegree :: (Num a) => MPoly a -> Int
totalDegree (P x) = maximum $ 0 : map (sum.snd) x
-- | A general map which is linear in monomials. It takes a coercion
-- function and a function
-- from exponent sequences to a numeric type and returns a function
-- from polynomials to whatever.
linMonoMap :: (Num a, Num b) => (a -> b) -> ([Int] -> b) -> MPoly a -> b
linMonoMap u f (P p) = sum [ u c * f e | (c, e) <- p ]
-- | Partial derivative.
pderiv :: (Eq a, Num a) => Int -> MPoly a -> MPoly a
pderiv n = linMonoMap unit $ \es ->
case splitAt n es of
(_, []) -> 0; (_, 0:_) -> 0
(e1, x:e2) -> fromIntegral x `monomial` (e1++(x-1):e2)
-- | Linear derivative, in a given direction.
lderiv :: (Eq a, Num a) => (Int, MPoly a) -> [a] -> MPoly a
lderiv (n, p) = let ds = map (flip pderiv p) [0..n-1] in
\ v -> sum [ unit e * d | (e, d) <- zip v ds {-, e/=0 -} ]
-- This could be changed to use linMonoMap.
-- | Substitute a list of values into the variables. The second argument may be
-- infinite, but a failure will result if it has insufficient variables.
evalAts :: Num a => MPoly a -> [a] -> a
evalAts (P ms) ps =
sum [ c * product [ w!!p | (w, p) <- zip powers vec ] | (c, vec) <- ms ]
where powers = map (\p -> iterate (p*) 1) ps
-- | Simple utility function gives one more than the index of the largest
-- variable in a polynomial.
numVars :: MPoly a -> Int
numVars = maximum . map (length . snd) . terms
|
galenhuntington/etr
|
Math/Poly/Multi.hs
|
mit
| 4,253
| 0
| 15
| 1,070
| 1,721
| 927
| 794
| 65
| 6
|
import Plot.VectorField
import Utils.MatrixIO
import Utils.Grid
import Data.Matrix
mat = fromLists [[1]]
f (x,y) = (y - x^2 - 1, 1 + x - y^2)
pendulum_dynamics g m l b torque (th, th_dot)
= (th_dot, torque - g * sin( th ) / l - b * th_dot)
g (x1,x2) = (((-6) * x1 /(1 + x1^2)^2) + 2*x2, (-2) * ( x1 + x2 ) / ( 1+ x1^2 )^2)
h (x1,x2) = (x2, x1**3 -x1 - x2)
j (x1,x2) = (x2 - (x1^3), (-1)*(x2 ^ 3 + x1))
main
= do
let gridspec = (FloatRange (-6.28) 6.28 30, FloatRange (-4) 4 30)
let (x,y) = mgrid gridspec
let (u,v) = mapOverGrid (pendulum_dynamics 10 30 5.0 0.1 0) gridspec
raw_streamplot x y u v
let (u,v) = mapOverGrid (pendulum_dynamics 10 30 4.0 0.1 0) gridspec
raw_streamplot x y u v
--raw_quiverplot x y u v
|
Zomega/thesis
|
Wurm/PlotTest.hs
|
mit
| 782
| 1
| 13
| 214
| 468
| 250
| 218
| 19
| 1
|
module Graph where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Monad
import Data.Attoparsec.Char8
import Data.ByteString.Char8 (pack)
import Data.Maybe (fromJust)
import Debug.Trace
data Node = Node { label :: String
, text :: String
, neighbors :: [(String,Node)]
}
mkGraph :: [(String, String, [(String,String)])] -> Node
mkGraph links = lookupNode "root"
where
mkNode (lbl, txt, adj) = (lbl, Node lbl txt $ map (fst &&& lookupNode . snd) adj)
nodeLookupList = map mkNode links
lookupNode lbl = fromJust $ lookup lbl nodeLookupList
nodeLabel :: Parser String
nodeLabel = do
char '['
label <- many1 (notChar ']')
char ']'
many endOfLine
return label
nodeText :: Parser String
nodeText = do
text <- (many . satisfy $ \c ->
c /= '\n' &&
c /= '\r' &&
c /= '['
) `sepBy` endOfLine
many endOfLine
return (join text)
nodeLink :: Parser (String, String)
nodeLink = do
char '['
skipSpace
char '['
target <- many1 (notChar ']')
char ']'
desc <- many1 (notChar ']')
char ']'
many endOfLine
return (desc, target)
nodeSpec :: Parser (String, String, [(String, String)])
nodeSpec = (,,) <$> nodeLabel <*> nodeText <*> many nodeLink
loadFromString :: String -> Node
loadFromString str = case parseOnly (many nodeSpec) (pack str) of
Left _ -> error "Improper string"
Right l -> mkGraph l
|
mplamann/interactive-graph
|
src/Graph.hs
|
mit
| 1,461
| 0
| 16
| 369
| 553
| 286
| 267
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-layer-recipes.html
module Stratosphere.ResourceProperties.OpsWorksLayerRecipes where
import Stratosphere.ResourceImports
-- | Full data type definition for OpsWorksLayerRecipes. See
-- 'opsWorksLayerRecipes' for a more convenient constructor.
data OpsWorksLayerRecipes =
OpsWorksLayerRecipes
{ _opsWorksLayerRecipesConfigure :: Maybe (ValList Text)
, _opsWorksLayerRecipesDeploy :: Maybe (ValList Text)
, _opsWorksLayerRecipesSetup :: Maybe (ValList Text)
, _opsWorksLayerRecipesShutdown :: Maybe (ValList Text)
, _opsWorksLayerRecipesUndeploy :: Maybe (ValList Text)
} deriving (Show, Eq)
instance ToJSON OpsWorksLayerRecipes where
toJSON OpsWorksLayerRecipes{..} =
object $
catMaybes
[ fmap (("Configure",) . toJSON) _opsWorksLayerRecipesConfigure
, fmap (("Deploy",) . toJSON) _opsWorksLayerRecipesDeploy
, fmap (("Setup",) . toJSON) _opsWorksLayerRecipesSetup
, fmap (("Shutdown",) . toJSON) _opsWorksLayerRecipesShutdown
, fmap (("Undeploy",) . toJSON) _opsWorksLayerRecipesUndeploy
]
-- | Constructor for 'OpsWorksLayerRecipes' containing required fields as
-- arguments.
opsWorksLayerRecipes
:: OpsWorksLayerRecipes
opsWorksLayerRecipes =
OpsWorksLayerRecipes
{ _opsWorksLayerRecipesConfigure = Nothing
, _opsWorksLayerRecipesDeploy = Nothing
, _opsWorksLayerRecipesSetup = Nothing
, _opsWorksLayerRecipesShutdown = Nothing
, _opsWorksLayerRecipesUndeploy = Nothing
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-layer-recipes.html#cfn-opsworks-layer-customrecipes-configure
owlrConfigure :: Lens' OpsWorksLayerRecipes (Maybe (ValList Text))
owlrConfigure = lens _opsWorksLayerRecipesConfigure (\s a -> s { _opsWorksLayerRecipesConfigure = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-layer-recipes.html#cfn-opsworks-layer-customrecipes-deploy
owlrDeploy :: Lens' OpsWorksLayerRecipes (Maybe (ValList Text))
owlrDeploy = lens _opsWorksLayerRecipesDeploy (\s a -> s { _opsWorksLayerRecipesDeploy = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-layer-recipes.html#cfn-opsworks-layer-customrecipes-setup
owlrSetup :: Lens' OpsWorksLayerRecipes (Maybe (ValList Text))
owlrSetup = lens _opsWorksLayerRecipesSetup (\s a -> s { _opsWorksLayerRecipesSetup = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-layer-recipes.html#cfn-opsworks-layer-customrecipes-shutdown
owlrShutdown :: Lens' OpsWorksLayerRecipes (Maybe (ValList Text))
owlrShutdown = lens _opsWorksLayerRecipesShutdown (\s a -> s { _opsWorksLayerRecipesShutdown = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-opsworks-layer-recipes.html#cfn-opsworks-layer-customrecipes-undeploy
owlrUndeploy :: Lens' OpsWorksLayerRecipes (Maybe (ValList Text))
owlrUndeploy = lens _opsWorksLayerRecipesUndeploy (\s a -> s { _opsWorksLayerRecipesUndeploy = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/OpsWorksLayerRecipes.hs
|
mit
| 3,240
| 0
| 12
| 349
| 537
| 304
| 233
| 42
| 1
|
-- Tree module
-- By Gregory W. Schwartz
-- | Collects all functions pertaining to the creation of the shared mutation
-- tree
{-# LANGUAGE BangPatterns #-}
module Tree where
-- Built-in
import Data.Maybe
import Data.List
import qualified Data.Map as M
import Data.Tree
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
-- Local
import Types
import Utility
-- | Creat the subforest from the most common mutations
getSubForest :: ParentSeq -> [SuperFasta] -> [Tree TreeInfo]
getSubForest _ [] = []
getSubForest parentSeq fastaList =
createTree (Just mutFreq) parentSeq (fst lineage)
: getSubForest parentSeq (snd lineage)
where
lineage = iterateLineage (fst mutFreq) fastaList
mutFreq = mostCommonMutation fastaList
-- | Create the lineage tree by finding the most common mutations
createTree :: Maybe (Mutation, Int)
-> ParentSeq
-> [SuperFasta]
-> Tree TreeInfo
createTree mutFreq parentSeq fastaList =
Node { rootLabel = TreeInfo { sequences = map
superFastaToPrintFasta
fastaList
, nodeSequence = F.toList newSeq
, nodeMutations = catMaybes
. (: [])
. fmap (printMutation . fst)
$ mutFreq
, number = printNumber mutFreq }
, subForest = getSubForest newSeq
. filter (not . M.null . mutations)
$ fastaList }
where
printMutation (!x, (!y, !z)) = intercalate "-" [[y], show x, [z]]
printNumber Nothing = 0
printNumber (Just (_, x)) = x
newSeq = mutate mutFreq parentSeq
mutate Nothing = id
mutate (Just ((p, (_, x)), _)) = Seq.update (p - 1) x
-- | Collapse nodes where there are no observed sequences, as we don't know
-- what order the mutations happened in
collapseTree :: [String] -> Tree TreeInfo -> Tree TreeInfo
collapseTree _ tree@(Node { rootLabel = TreeInfo { nodeMutations = [] }
, subForest = ts }) =
tree { subForest = map (collapseTree []) ts }
collapseTree muts tree@(Node { rootLabel = rl, subForest = ts })
| anyObserved || (not anyObserved && (not . null . tail $ ts)) =
tree { rootLabel = rl { nodeMutations = nodeMutations rl ++ muts }
, subForest = map (collapseTree []) ts }
| not anyObserved && (null . tail $ ts) =
collapseTree (muts ++ nodeMutations rl) . head $ ts
| otherwise = tree { subForest = map (collapseTree []) ts }
where
anyObserved = any (== 0) . map remainingMutations . sequences $ rl
|
GregorySchwartz/lineage
|
src/Tree.hs
|
gpl-2.0
| 2,762
| 0
| 14
| 893
| 758
| 413
| 345
| 52
| 3
|
module P1 where
solve :: (Integral i) => i -> i
solve n = sum $ takeWhile (< n) $ filter (\x -> x `mod` 3 == 0 || x `mod` 5 == 0) [1..]
|
hsinhuang/codebase
|
ProjectEuler/P1.hs
|
gpl-2.0
| 138
| 0
| 13
| 37
| 85
| 48
| 37
| 3
| 1
|
module T where
import Tests.KesterelBasis
-- Instantaneously termianting loop body.
e = arr (\() -> ()) >>> loopE nothingE
c = runE e
prop_correct = property (\xs -> simulate c xs
== zip (repeat false) xs)
ok_constructive = isNothing (isConstructive c)
ok_netlist = runNL c
|
peteg/ADHOC
|
Tests/08_Kesterel/013_loop_nothing_non_constructive.hs
|
gpl-2.0
| 306
| 0
| 11
| 77
| 103
| 54
| 49
| 8
| 1
|
{- |
Module : $Header$
Description : XML Printer function for FreeCAD datatypes
Copyright : (c) Robert Savu and Uni Bremen 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Robert.Savu@dfki.de
Stability : experimental
Portability : portable
Declaration of the abstract datatypes of FreeCAD terms
-}
module FreeCAD.XMLPrinter where
import Text.XML.Light
import FreeCAD.As
import qualified Data.Set as Set
exportXMLFC :: Sign -> String
exportXMLFC = ppTopElement . doc2XML . Set.toList . objects
makeAttr :: String -> String -> Attr
makeAttr key value = Attr (unqual key) value
doc2XML :: Document -> Element
doc2XML list = unode "document" (map sendNamedObj list)
sendNamedObj :: NamedObject -> Element
sendNamedObj no = add_attr att (unode "Object" (getNOChildren no)) where
att = Attr (unqual "name") (name no)
getNOChildren :: NamedObject -> [Element]
getNOChildren no = (makePlaceElem place):(makeObjElem obj):[] where
pobj = object no
obj = o pobj
place = p pobj
makePlaceElem :: Placement -> Element
makePlaceElem pl = unode "placement" attrList
where
attrList = xAt:yAt:zAt:q0At:q1At:q2At:q3At:[]
mkp a b = Attr (unqual a) (show b)
mko a b = Attr (unqual a) (show b)
xAt = mkp "x" (x $ position pl)
yAt = mkp "y" (y $ position pl)
zAt = mkp "z" (z $ position pl)
q0At = mko "q0" (q0 $ orientation pl)
q1At = mko "q1" (q1 $ orientation pl)
q2At = mko "q2" (q2 $ orientation pl)
q3At = mko "q3" (q3 $ orientation pl)
mkNumAtt :: Show a => String -> a -> Attr
mkNumAtt key num = (Attr (unqual key) (show num))
makeObjElem :: Object -> Element
makeObjElem obj = case obj of
BaseObject bo -> makeBOElem bo
Cut eo1 eo2 -> mk2refs "cut" eo1 eo2
Common eo1 eo2 -> mk2refs "common" eo1 eo2
Fusion eo1 eo2 -> mk2refs "fusion" eo1 eo2
Section eo1 eo2 -> mk2refs "section" eo1 eo2
Extrusion eo1 v3 -> mk1refs "extrusion" eo1 v3
where
mkRefAtt key eo = (Attr (unqual key) (getEORef eo))
mk2refs consType ref1 ref2 =
unode consType ((mkRefAtt "base" ref1):(mkRefAtt "tool" ref2):[])
mk1refs consType ref v3 =
unode consType ((mkRefAtt "base" ref):(mkNumAtt "xval" (x v3)):
(mkNumAtt "yval" (y v3)):(mkNumAtt "zval" (z v3)):[])
getEORef :: ExtendedObject -> String
getEORef eo = case eo of
Ref s -> s
Placed _ -> error "cannot get reference"
makeBOElem :: BaseObject -> Element
makeBOElem obj = case obj of
Box a1 a2 a3 ->
unode "box" ((mkNumAtt "height" a1):
(mkNumAtt "width" a2):(mkNumAtt "length" a3):[])
Cylinder a1 a2 a3 ->
unode "cylinder" ((mkNumAtt "angle" a1):
(mkNumAtt "height" a2):(mkNumAtt "radius" a3):[])
Sphere a1 a2 a3 a4 ->
unode "sphere" ((mkNumAtt "angle1" a1):
(mkNumAtt "angle2" a2):(mkNumAtt "angle3" a3):
(mkNumAtt "radius" a4):[])
Cone a1 a2 a3 a4 ->
unode "cone" ((mkNumAtt "angle" a1):
(mkNumAtt "radius1" a2):(mkNumAtt "radius2" a3):
(mkNumAtt "height" a4):[])
Torus a1 a2 a3 a4 a5 ->
unode "torus" ((mkNumAtt "angle1" a1):
(mkNumAtt "angle2" a2):(mkNumAtt "angle3" a3):
(mkNumAtt "radius1" a4):(mkNumAtt "radius2" a5):[])
Line a1 -> unode "line" (mkNumAtt "length" a1)
Circle a1 a2 a3 ->
unode "circle" ((mkNumAtt "startang" a1):
(mkNumAtt "endang" a2):(mkNumAtt "radius" a3):[])
Rectangle a1 a2 ->
unode "rectangle" ((mkNumAtt "height" a1):
(mkNumAtt "length" a2):[])
|
nevrenato/Hets_Fork
|
FreeCAD/XMLPrinter.hs
|
gpl-2.0
| 4,102
| 0
| 16
| 1,412
| 1,378
| 685
| 693
| 77
| 8
|
import System (getArgs)
import System.Directory (getDirectoryContents, getCurrentDirectory)
import Char (isDigit)
import Text.Printf (printf)
import qualified Time as T
type Year = Int
type Month = Int
type DateRange = (Year, Month)
defaultDateRange :: T.CalendarTime -> DateRange
defaultDateRange (T.CalendarTime y m _ _ _ _ _ _ _ _ _ _)= (y, fromEnum m + 1)
readDateRange :: Read a => String -> Maybe a
readDateRange str = case reads str of
[(x, _)] -> Just x
_ -> Nothing
isFileForMerge :: String -> DateRange -> Bool
isFileForMerge s (y, m) | (25 == length s) && (all isDigit (take 21 s)) =
let
ry = read ((take 4 . drop 5) s)
rm = read ((take 2 . drop 9) s)
in ry == y && rm == m
isFileForMerge _ _ | otherwise = False
isTxt :: String -> Bool
isTxt s = (drop ((length s) - 3) s) == "txt"
filesToMerge :: [String] -> DateRange -> [String]
filesToMerge ss dr = filter (\x -> isTxt x && isFileForMerge x dr) ss
merge :: [String] -> IO [String]
merge fsToMerge = mapM readFile fsToMerge
newFileName :: DateRange -> String
newFileName (y, m) = printf "%s.%02s.txt" (show y) (show m)
main :: IO ()
main = do
args <- getArgs
curDir <- getCurrentDirectory
dirContents <- getDirectoryContents curDir
curTime <- T.getClockTime
monthAgoTime <- return $ T.addToClockTime (T.TimeDiff 0 (-1) 0 0 0 0 0) curTime
calendarMonthAgoTime <- T.toCalendarTime monthAgoTime
let maybeDateRange = case args of
(a:b:_) -> readDateRange (unwords [a, b])
_ -> Just $ defaultDateRange calendarMonthAgoTime
case maybeDateRange of
Just dr -> do
let fsToMerge = filesToMerge dirContents dr
let fsToMergeCountStr = show $ length fsToMerge
let mergeLog = (newFileName dr ++ ".log")
let dateRangeMsg = "DateRange: " ++ show dr
fsContents <- merge fsToMerge
writeFile (newFileName dr) (unlines fsContents)
writeFile mergeLog (unlines fsToMerge ++ printf "\n%s\nTotal files: %s" dateRangeMsg fsToMergeCountStr)
putStrLn (unlines fsContents)
putStrLn dateRangeMsg
--putStrLn ("Files to merge: " ++ unlines fsToMerge)
putStrLn (printf "Count of files: %s. See %s for file list." fsToMergeCountStr mergeLog)
Nothing -> putStrLn ("Invalid date range.")
|
graninas/Haskell-Algorithms
|
Materials/Haskell в реальном мире - Статья/merger 0.14.hs
|
gpl-3.0
| 2,320
| 3
| 18
| 546
| 862
| 427
| 435
| 53
| 3
|
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Grid.Helpers
(
gridModifyTick,
gridModifyCamera,
gridClearEvents,
gridCameraCmdsIsComplete,
gridPushCameraCmds,
gridPushCameraCmd,
gridSetCameraCmds,
gridClearCameraCmds,
turnFromDir,
dirNodeNode,
turnMultNode,
module Game.Grid.Helpers.Path,
module Game.Grid.Helpers.Segment,
module Game.Grid.Helpers.Camera,
) where
import MyPrelude
import Game
import Game.Grid.GridWorld
import Game.Grid.Helpers.Path
import Game.Grid.Helpers.Segment
import Game.Grid.Helpers.Camera
--------------------------------------------------------------------------------
--
gridClearEvents :: GridWorld -> GridWorld
gridClearEvents grid =
grid
{
gridPathA = pathClearEvents (gridPathA grid),
gridEvents = []
}
--------------------------------------------------------------------------------
--
gridModifyCamera :: GridWorld -> (Camera -> Camera) -> GridWorld
gridModifyCamera grid f =
grid { gridCamera = f (gridCamera grid) }
gridModifyTick :: GridWorld -> (TickT -> TickT) -> GridWorld
gridModifyTick grid f =
grid { gridTick = f (gridTick grid) }
--------------------------------------------------------------------------------
-- CameraCommand
-- fixme: also GridEvent?
gridCameraCmdsIsComplete :: GridWorld -> Bool
gridCameraCmdsIsComplete grid =
(null $ gridCameraCommands grid) && (gridCameraCommandTick grid <= worldTick grid)
gridPushCameraCmd :: GridWorld -> CameraCommand -> GridWorld
gridPushCameraCmd grid cmd =
gridPushCameraCmds grid [cmd]
gridPushCameraCmds :: GridWorld -> [CameraCommand] -> GridWorld
gridPushCameraCmds grid cmds =
grid
{
gridCameraCommands = gridCameraCommands grid ++ cmds
}
gridSetCameraCmds :: GridWorld -> [CameraCommand] -> GridWorld
gridSetCameraCmds grid cmds =
grid
{
gridCameraCommands = cmds
}
gridClearCameraCmds :: GridWorld -> GridWorld
gridClearCameraCmds grid =
gridSetCameraCmds grid []
--------------------------------------------------------------------------------
--
dirNodeNode :: Node -> Node -> Dir
dirNodeNode node node' =
case nodeDiff node node' of
Node x y z -> Dir (fI $ signum x) (fI $ signum y) (fI $ signum z)
turnMultNode :: Turn -> Node -> Node
turnMultNode (Turn a0 a1 a2 b0 b1 b2 c0 c1 c2) (Node n0 n1 n2) =
Node (n0 * fI a0 + n1 * fI b0 + n2 * fI c0)
(n0 * fI a1 + n1 * fI b1 + n2 * fI c1)
(n0 * fI a2 + n1 * fI b2 + n2 * fI c2)
turnFromDir :: Turn -> Dir -> Turn
turnFromDir turn dir =
let diff = case helper (turnInverse turn) dir of
Dir 1 0 0 -> straightTurn
Dir 0 1 0 -> upTurn
Dir 0 0 1 -> rightTurn
Dir (-1) 0 0 -> backTurn
Dir 0 (-1) 0 -> downTurn
Dir 0 0 (-1) -> leftTurn
Dir 0 0 0 -> straightTurn
_ -> error "turnFromDir: no such direction"
in diff `mappend` turn
where
helper (Turn x0 x1 x2
y0 y1 y2
z0 z1 z2)
(Dir d0 d1 d2) =
Dir (x0 * d0 + y0 * d1 + z0 * d2)
(x1 * d0 + y1 * d1 + z1 * d2)
(x2 * d0 + y2 * d1 + z2 * d2)
|
karamellpelle/grid
|
designer/source/Game/Grid/Helpers.hs
|
gpl-3.0
| 4,054
| 0
| 14
| 1,060
| 967
| 521
| 446
| 78
| 8
|
module Plugins.Str(str) where
import Language.Haskell.TH
import Language.Haskell.TH.Quote
str = QuasiQuoter {
quoteExp = stringE
,quotePat = undefined
,quoteType = undefined
,quoteDec = undefined
}
|
xpika/interpreter-haskell
|
Plugins/Str.hs
|
gpl-3.0
| 211
| 0
| 6
| 37
| 55
| 36
| 19
| 8
| 1
|
module Lambda.Utils (
removeDups,
newNameForVar,
joinBySpace
) where
import Data.Char
addIfNotIn :: (Eq a) => a -> [a] -> [a]
addIfNotIn x xs
| x `elem` xs = xs
| otherwise = x:xs
removeDups :: (Eq a) => [a] -> [a]
removeDups = foldr addIfNotIn []
stringWillOverflowZ :: Char -> Bool
stringWillOverflowZ c = ((+1) . ord $ c) >= 123
nextValue :: String -> String
nextValue [] = "a"
nextValue xs
| stringWillOverflowZ $ last xs = nextValue (init xs) ++ "a"
| otherwise = let next = chr ((+1) . ord $ last xs) in init xs ++ [next]
newNameForVar :: String -> [String] -> String
newNameForVar _ fv = nextValue $ foldr max "" fv
joinBySpace :: String -> String -> String
joinBySpace a b = a ++ " " ++ b
|
KuroAku/lambda
|
src/Lambda/Utils.hs
|
gpl-3.0
| 720
| 0
| 14
| 158
| 329
| 172
| 157
| 22
| 1
|
{-# LANGUAGE DeriveDataTypeable, ScopedTypeVariables, GADTs, NoMonomorphismRestriction, ImplicitParams, TypeFamilies, TypeOperators, StandaloneDeriving, FlexibleContexts, FlexibleInstances, TemplateHaskell, UndecidableInstances, GeneralizedNewtypeDeriving, FunctionalDependencies, MultiParamTypeClasses, TypeSynonymInstances, ViewPatterns #-}
{-# LANGUAGE Rank2Types #-}
-- {-# OPTIONS -ddump-splices #-}
{-# OPTIONS -Wall #-}
module Tetrahedron.NormalDisc(
module Tetrahedron,
module Tetrahedron.NormalArc,
-- * Normal discs
NormalDisc,eitherND,
MakeNormalDisc(..), allNormalDiscs,
NormalDiscs(..),normalDiscList,
normalDiscsContainingNormalCorner,
normalDiscsContainingNormalArc,
adjacentNormalCorners,
-- ** GADT sillyness
DiscShape(..),
IsDiscShape(..),
getShape,
getShape1,
eitherND',
-- * Normal triangles
NormalTri(..),
NormalTris(..),
normalTriList,
-- ** Construction
allNormalTris, allNormalTris',
MakeNormalTri(..),
normalTriByNormalArc,
adjacentNormalCornersInTri,
-- ** Properties
normalTriGetVertex,
normalTriGetNormalCorners,
normalTriGetNormalCornersAsc,
normalTriGetNormalArcs,
-- * Normal quadrilaterals
NormalQuad(..), allNormalQuads, allNormalQuads',
normalQuadByDisjointEdge,
normalQuadByVertexAndTriangle,
normalQuadByNormalArc,
normalQuadsByIntersectedEdge,
normalQuadGetIntersectedEdges,
normalQuadGetDisjointEdges,
otherNormalQuads,
NormalQuads(..),normalQuadList,
adjacentNormalCornersInQuad,
normalQuadGetNormalCornersInOrder,
normalQuadGetNormalArcsInOrder,
) where
import Tetrahedron
import Control.Applicative
import Control.Arrow((&&&))
import Control.Monad
import Data.Maybe
import Data.SumType
import Element
import HomogenousTuples
import Language.Haskell.TH.Syntax
import Tetrahedron.NormalArc
import Prelude hiding(catch,lookup)
import PrettyUtil
import Quote
import Test.QuickCheck
import TupleTH
import Util
import Data.Ix
import Control.DeepSeq
import Control.DeepSeq.TH
import Language.Haskell.TH.Lift
import Data.Typeable
import ShortShow
import Data.Char
newtype NormalTri = NormalTri Vertex deriving(Enum,Bounded,Eq,Ord,Arbitrary,Finite,Ix,NFData,Typeable)
instance Show NormalTri where
showsPrec = prettyShowsPrec
allNormalTris :: [NormalTri]
allNormalTris = asList allNormalTris'
allNormalTris' :: (NormalTri, NormalTri, NormalTri, NormalTri)
allNormalTris' = map4 NormalTri allVertices'
data NormalQuad =
-- | The quad disjoint from the edges 'eAB' and 'eCD'.
Q_ab |
-- | The quad disjoint from the edges 'eAC' and 'eBD'.
Q_ac |
-- | The quad disjoint from the edges 'eAD' and 'eBC'.
Q_ad
deriving(Enum,Bounded,Eq,Ord,Show,Ix,Typeable)
deriveNFData ''NormalQuad
-- show q = "{Normal quad separating "++show v0++","++show v1++" from "++show v2++","++show v3++"}"
-- where
-- (vertices -> (v0,v1), vertices -> (v2,v3)) = normalQuadGetDisjointEdges q
instance MakeNormalDisc NormalTri where normalDisc = NormalDisc . Left
instance MakeNormalDisc NormalQuad where normalDisc = NormalDisc . Right
instance Arbitrary NormalQuad where arbitrary = elements [minBound .. maxBound]
allNormalQuads :: [NormalQuad]
allNormalQuads = asList allNormalQuads'
allNormalQuads' :: Triple NormalQuad
allNormalQuads' = (Q_ab,Q_ac,Q_ad)
allNormalDiscs :: [NormalDisc]
allNormalDiscs = (normalDisc <$> allNormalTris) ++ (normalDisc <$> allNormalQuads)
instance Enum NormalDisc where
toEnum x | x < 4 = NormalDisc . Left $ toEnum x
| otherwise = NormalDisc . Right $ toEnum (x - 4)
fromEnum = eitherND fromEnum ((+4) . fromEnum)
instance Bounded NormalDisc where
minBound = NormalDisc . Left $ minBound
maxBound = NormalDisc . Right $ maxBound
normalTriGetVertex :: NormalTri -> Vertex
normalTriGetVertex (NormalTri x) = x
instance MakeVertex NormalTri where
vertex = normalTriGetVertex
class MakeNormalTri a where
normalTri :: a -> NormalTri
-- | Construct a normal triangle by the vertex it encloses
instance MakeNormalTri Vertex where
normalTri = NormalTri
-- | Construct a normal triangle by the vertex it encloses
instance MakeNormalDisc Vertex where
normalDisc = normalDisc . normalTri
normalQuadGetDisjointEdges :: NormalQuad -> (Edge, Edge)
normalQuadGetDisjointEdges q = case q of
Q_ab -> (edge (vA,vB),edge (vC,vD))
Q_ac -> (edge (vA,vC),edge (vB,vD))
Q_ad -> (edge (vA,vD),edge (vB,vC))
-- | Edges (circularly) adjacent in the returned tuple have nonempty intersections.
normalQuadGetIntersectedEdges :: NormalQuad -> Quadruple Edge
normalQuadGetIntersectedEdges q =
case q of
Q_ab -> go (vA,vC,vB,vD)
Q_ac -> go (vA,vB,vC,vD)
Q_ad -> go (vA,vB,vD,vC)
where
go vs = $(zipTupleWith 4) (curry edge) (rotate4_1 vs) vs
-- | Constructs a normal quad specified by one of the two edges disjoint from it
normalQuadByDisjointEdge :: Edge -> NormalQuad
normalQuadByDisjointEdge e = r
where
r | e == eAB = Q_ab
| e == eAC = Q_ac
| e == eAD = Q_ad
| e == eCD = Q_ab
| e == eBD = Q_ac
| e == eBC = Q_ad
| otherwise = error ("normalQuadByDisjointEdge: Unexpected edge")
otherNormalQuads :: NormalQuad -> (Pair NormalQuad)
otherNormalQuads Q_ab = (Q_ac,Q_ad)
otherNormalQuads Q_ac = (Q_ab,Q_ad)
otherNormalQuads Q_ad = (Q_ab,Q_ac)
normalQuadsByIntersectedEdge :: Edge -> Pair NormalQuad
normalQuadsByIntersectedEdge = otherNormalQuads . normalQuadByDisjointEdge
-- case $(filterTuple 3) (\q -> $(elemTuple 2) e (normalQuadGetDisjointEdges q)) allNormalQuads' of
-- [q] -> q
-- xs -> error (unwords ["normalQuadByDisjointEdge",show e ++":", "impossible:", "q =",show xs])
instance Show NormalDisc where show = eitherND show show
instance Pretty NormalDisc where pretty = eitherND pretty pretty
instance Pretty NormalQuad where pretty = green . text . show
instance Pretty NormalTri where pretty = green . text . show . normalTriGetVertex
instance Quote NormalTri where quote nt = "nt" ++ show (normalTriGetVertex nt)
instance Quote NormalQuad where quote = show
-- | The normal quad type having a normal arc of type 'normalArcByTriangleAndVertex'
normalQuadByVertexAndTriangle :: Vertex -> Triangle -> NormalQuad
normalQuadByVertexAndTriangle v f = normalQuadByDisjointEdge (edgeByOppositeVertexAndTriangle v f)
-- | Returns the unique normal quad type containing a normal arc of the given type
normalQuadByNormalArc :: NormalArc -> NormalQuad
normalQuadByNormalArc na = normalQuadByVertexAndTriangle (normalArcGetVertex na) (normalArcGetTriangle na)
instance NormalArcs NormalTri (Triple NormalArc) where
normalArcs = normalTriGetNormalArcs
instance Edges NormalTri where
type Eds NormalTri = (Triple NormalArc)
edges = normalTriGetNormalArcs
instance NormalArcs NormalDisc [NormalArc] where
normalArcs = eitherND normalArcList normalArcList
instance IsSubface NormalArc NormalTri where
isSubface nat ntt = normalTriGetVertex ntt == normalArcGetVertex nat
instance IsSubface NormalArc NormalQuad where
isSubface nat nqt = elem4 nat (normalArcs nqt)
instance IsSubface NormalArc NormalDisc where
isSubface nat = eitherND (isSubface nat) (isSubface nat)
instance IsSubface NormalCorner NormalTri where
isSubface nc nt = isVertexOfEdge (vertex nt) (edge nc)
instance IsSubface NormalCorner NormalQuad where
isSubface nc nq = elem4 nc (normalCorners nq)
instance IsSubface NormalCorner NormalDisc where
isSubface nat = eitherND (isSubface nat) (isSubface nat)
-- deriveCollectionKeyClass ''NormalArc
-- deriveCollectionKeyClass ''NormalCorner
-- deriveCollectionKeyClass ''NormalDisc
class AsList tuple => NormalTris a tuple | a -> tuple where
normalTris :: a -> tuple
normalTriList :: NormalTris a b => a -> [Element b]
normalTriList = asList . normalTris
class AsList tuple => NormalQuads a tuple | a -> tuple where
normalQuads :: a -> tuple
normalQuadList :: NormalQuads a b => a -> [Element b]
normalQuadList = asList . normalQuads
class AsList tuple => NormalDiscs a tuple | a -> tuple where
normalDiscs :: a -> tuple
normalDiscList :: NormalDiscs a b => a -> [Element b]
normalDiscList = asList . normalDiscs
-- | Normal triangles meeting a given edge
instance NormalTris Edge (Pair NormalTri) where
normalTris = map2 normalTri . vertices
-- | Normal triangles meeting a given edge
instance NormalTris OEdge (Pair NormalTri) where
normalTris = map2 normalTri . vertices
-- | = normalTriGetNormalCorners
instance NormalCorners NormalTri (Triple NormalCorner) where
normalCorners = normalTriGetNormalCorners
-- | = normalTriGetNormalCorners
instance Vertices NormalTri where
type Verts NormalTri = Triple NormalCorner
vertices = normalTriGetNormalCorners
-- | In order corresponding to 'edgesContainingVertex'.
normalTriGetNormalCorners :: NormalTri -> Triple NormalCorner
normalTriGetNormalCorners =
map3 normalCorner . unAsc . edgesContainingVertexAsc . normalTriGetVertex
normalTriGetNormalCornersAsc :: NormalTri -> Asc3 NormalCorner
normalTriGetNormalCornersAsc =
unsafeAsc . normalTriGetNormalCorners
-- Ascending
normalTriGetNormalArcs :: NormalTri -> Triple NormalArc
normalTriGetNormalArcs =
-- \(normalTriGetVertex -> v) -> sort3 (map3 (\t -> normalArc (t, v)) (star v (TwoSkeleton AbsTet)))
-- DO NOT TOUCH THE ORDERING without adjusting the QuadHalf module
\x -> map3 (uncurry normalArcByTriangleAndVertex) $ case normalTriGetVertex x of
A -> ((tABC, vA), (tABD, vA), (tACD, vA))
B -> ((tABC, vB), (tABD, vB), (tBCD, vB))
C -> ((tABC, vC), (tACD, vC), (tBCD, vC))
D -> ((tABD, vD), (tACD, vD), (tBCD, vD))
-- | Elements (circularly) adjacent in the returned tuple are adjacent corners
normalQuadGetNormalCornersInOrder
:: NormalQuad
-> (Quadruple NormalCorner)
normalQuadGetNormalCornersInOrder =
--map4 normalCorner . normalQuadGetIntersectedEdges
-- DO NOT TOUCH THE ORDERING without adjusting the QuadHalf module
\x -> map4 normalCorner $ case x of
Q_ab -> (eAD, eAC, eBC, eBD)
Q_ac -> (eAD, eAB, eBC, eCD)
Q_ad -> (eAC, eAB, eBD, eCD)
dihedral4 :: (t, t, t, t) -> [(t, t, t, t)]
dihedral4 x = do
x' <- [x,
$(rotateTuple 4 1) x,
$(rotateTuple 4 2) x,
$(rotateTuple 4 3) x
]
[x',$(reverseTuple 4) x']
-- | = 'normalQuadGetNormalCornersInOrder'
instance NormalCorners NormalQuad (Quadruple NormalCorner) where
normalCorners = normalQuadGetNormalCornersInOrder
instance Vertices NormalQuad where
type Verts NormalQuad = Quadruple NormalCorner
vertices = normalQuadGetNormalCornersInOrder
-- | @i@th component of the result spans from the @i-1@th component of 'normalQuadGetNormalCornersInOrder' to its @i@th component
normalQuadGetNormalArcsInOrder :: NormalQuad -> Quadruple NormalArc
normalQuadGetNormalArcsInOrder q =
case normalQuadGetNormalCornersInOrder q of
cs -> map4
normalArc
($(zipTuple 4) cs (rotate4_1 cs))
instance NormalArcs NormalQuad (Quadruple NormalArc) where
normalArcs = normalQuadGetNormalArcsInOrder
-- | Returns the unique normal tri type containing a normal arc of the given type
normalTriByNormalArc :: NormalArc -> NormalTri
normalTriByNormalArc = normalTri . normalArcGetVertex
normalTrisContainingNormalCorner :: NormalCorner -> Pair NormalTri
normalTrisContainingNormalCorner = map2 normalTri . vertices . normalCornerGetContainingEdge
normalQuadsContainingNormalCorner :: NormalCorner -> Pair NormalQuad
normalQuadsContainingNormalCorner = normalQuadsByIntersectedEdge . normalCornerGetContainingEdge
normalDiscsContainingNormalCorner :: NormalCorner -> Quadruple NormalDisc
normalDiscsContainingNormalCorner = liftM2 ($(catTuples 2 2))
(map2 normalDisc . normalTrisContainingNormalCorner)
(map2 normalDisc . normalQuadsContainingNormalCorner)
adjacentNormalCornersInTri
:: (Eq a, Show a, Show a1, NormalCorners a1 (a, a, a)) =>
a -> a1 -> (a, a)
adjacentNormalCornersInTri nc nt =
fromMaybe _err
(deleteTuple3 nc (normalCorners nt))
where
_err = (error (unwords ["link",show nc,show nt]
++": Normal corner not contained in normal tri"))
adjacentNormalCornersInQuad
:: NormalCorner -> NormalQuad -> (NormalCorner, NormalCorner)
adjacentNormalCornersInQuad nc nq =
case normalQuadGetNormalCornersInOrder nq of
(a,b,c,d)
| nc == a -> (d,b)
| nc == b -> (a,c)
| nc == c -> (b,d)
| nc == d -> (c,a)
| otherwise ->
error (unwords ["link",show nc,show nq]
++": Normal corner not contained in normal quad")
adjacentNormalCorners
:: NormalCorner -> NormalDisc -> (NormalCorner, NormalCorner)
adjacentNormalCorners = liftA2 eitherND adjacentNormalCornersInTri adjacentNormalCornersInQuad
-- | = 'adjacentNormalCornersInTri'
instance Link NormalCorner NormalTri (Pair NormalCorner) where
link = adjacentNormalCornersInTri
-- | = 'adjacentNormalCornersInQuad'
instance Link NormalCorner NormalQuad (Pair NormalCorner) where
link = adjacentNormalCornersInQuad
instance Link NormalCorner NormalDisc (Pair NormalCorner) where
link nc = eitherND (link nc) (link nc)
instance Lift NormalTri where
lift (NormalTri v) = [| NormalTri v |]
instance Lift NormalDisc where
lift (NormalDisc x) = [| NormalDisc x |]
instance Quote NormalDisc where
quotePrec prec x =
quoteParen (prec > 10) $
"normalDisc " ++ (eitherND (quotePrec 11) (quotePrec 11) x)
instance Finite NormalQuad
instance Finite NormalDisc
instance Show a => Show (NormalDisc -> a) where show = showFiniteFunc "nd"
instance Show a => Show (NormalQuad -> a) where show = showFiniteFunc "q"
instance Show a => Show (NormalTri -> a) where show = showFiniteFunc "t"
normalDiscsContainingNormalArc
:: NormalArc -> (Pair NormalDisc)
normalDiscsContainingNormalArc =
normalDisc . normalTriByNormalArc &&&
normalDisc . normalQuadByNormalArc
data DiscShape :: (* -> *) where
Tri :: DiscShape NormalTri
Quad :: DiscShape NormalQuad
class MakeNormalDisc a => IsDiscShape a where isDiscShapeProof :: DiscShape a
instance IsDiscShape NormalTri where isDiscShapeProof = Tri
instance IsDiscShape NormalQuad where isDiscShapeProof = Quad
-- | Convenience function; ignores its first arg
getShape :: IsDiscShape a => a -> DiscShape a
getShape = const isDiscShapeProof
-- | Convenience function; ignores its first arg
getShape1 :: IsDiscShape a => f a -> DiscShape a
getShape1 = const isDiscShapeProof
eitherND' :: (forall a. IsDiscShape a => a -> r) -> NormalDisc -> r
eitherND' k = eitherND k k
instance Ix NormalDisc where
range (x,y) =
either'
(\xt ->
either'
(\yt -> map left' (range (xt, yt)))
(\yq -> map left' (range (xt, maxBound)) ++
map right' (range (minBound, yq)))
y)
(\xq ->
either'
(const [])
(\yq -> map right' (range (xq, yq)))
y)
x
index (x,_) z = fromEnum z - fromEnum x
inRange (x,y) z = x <= z && z <= y
rangeSize (x,y) = max 0 (fromEnum y - fromEnum x + 1)
instance NormalCorners NormalDisc [NormalCorner] where
normalCorners = eitherND normalCornerList normalCornerList
type instance L NormalDisc = NormalTri
type instance R NormalDisc = NormalQuad
newtype NormalDisc = NormalDisc { unNormalDisc :: Either NormalTri NormalQuad }
deriving(Eq,Ord,Arbitrary,SubSumTy,SuperSumTy,NFData,Typeable)
eitherND
:: (NormalTri -> r) -> (NormalQuad -> r) -> NormalDisc -> r
eitherND kt kq = either kt kq . unNormalDisc
class MakeNormalDisc a where
normalDisc :: a -> NormalDisc
deriveLift ''NormalQuad
instance ShortShow NormalQuad where
shortShow = map toUpper . drop 2 . show
instance ShortShow NormalTri where
shortShow = shortShow . normalTriGetVertex
instance ShortShow NormalDisc where
shortShow = eitherND shortShow shortShow
instance QuoteConstPat NormalQuad where
quoteConstPat = show
instance QuoteConstPat NormalTri where
quoteConstPat = quoteConstPat . normalTriGetVertex
quoteConstPat_view _ x = "normalTriGetVertex "++x
instance Edges NormalQuad where
type Eds NormalQuad = Quadruple NormalArc
edges = normalQuadGetNormalArcsInOrder
|
DanielSchuessler/hstri
|
Tetrahedron/NormalDisc.hs
|
gpl-3.0
| 17,248
| 0
| 16
| 3,866
| 4,090
| 2,208
| 1,882
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module DSL.Scad
( Scad(..),
Distance(..),
CubeSize(..),
)
where
data Scad = Sphere Distance
| Cube CubeSize
| Cylinder Height Distance (Maybe Distance) Center
| Union
| Difference
| Intersection
| Translate X Y Z
| Rotate X Y Z
| Root deriving(Show, Read)
data CubeSize = Size Float | Dimension Width Depth Height deriving(Show, Read)
data Distance = Radius Float | Diameter Float deriving(Show, Read)
type Width = Float
type Height = Float
type Depth = Float
type Center = Bool
type X = Float
type Y = Float
type Z = Float
|
Norberg/gui_scad
|
src/DSL/Scad.hs
|
gpl-3.0
| 647
| 0
| 8
| 185
| 199
| 121
| 78
| 23
| 0
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
-- | A module that defines widgets that print the schematics (gates and wires) for a given formula.
module Dep.Ui.Schematics (
schematicsWidget,
schematicsTick,
sopWidget,
) where
import Control.Monad(mapM)
import Data.Bits
import Data.Function(on)
import Data.Hashable(Hashable())
import qualified Data.HashMap.Strict as HM
import Data.IORef(IORef(),readIORef)
import Data.List
import qualified Data.Text as T
import Data.Word(Word8)
import Debug.Trace
import Graphics.Vty.Attributes(MaybeDefault(SetTo),Color(ISOColor))
import Graphics.Vty.Image(Image(..),Attr(..),char,string,charFill,(<|>),(<->),emptyImage,imageWidth,imageHeight,crop)
import Graphics.Vty.Input.Events
import Graphics.Vty.Prelude
import Graphics.Vty.Widgets.All(newWidget,WidgetImpl(..),Widget(..),RenderContext(..),getState)
import Graphics.Vty.Widgets.Events
import Dep.Algorithms(calcSop)
import Dep.Printing
import Dep.Structures
import Dep.Ui.Utils(KeyContextHandler(..),handleKeyWidget,swapAttr,Decorator(..),UiDecorator(..),imageReplicate,WidgetKeyHandling(..),taplines,inboxH,mapHImg,calcRoutImg,lineLabel,vlineILabel,linC,shiftCursorWithPosition,wireAttr,highlightWireAttr)
import Dep.Ui.Utils.Scrollable(autoScrollable,ScrollSt())
import Dep.Utils(replicateFoldl1,ordNub,hashItemIndex,mapN,hashGenerator)
import Dep.Utils.IORefFun(IORefFun(),readReference)
instance WidgetKeyHandling Int
instance KeyContextHandler Int a where
handleKeyCtx _ _ _ _ = Nothing
data Schmtc inr a = Schmtc { cx :: Int, cy :: Int, schematic :: IORefFun inr a }
instance Show (Schmtc inr a) where
show x = "[Schematic "++show (cx x,cy x)++"]"
instance WidgetKeyHandling (Schmtc inr [CombElem])
instance KeyContextHandler (Schmtc inr a) b where
handleKeyCtx KLeft [] _ s = Just $ s { cx = cx s-1}
handleKeyCtx KRight [] _ s = Just $ s { cx = cx s+1}
handleKeyCtx KUp [] _ s = Just $ s { cy = cy s-1}
handleKeyCtx KDown [] _ s = Just $ s { cy = cy s+1}
handleKeyCtx _ _ _ _ = Nothing
schmGcp :: Widget (Schmtc inr a) -> IO (Maybe DisplayRegion)
schmGcp wg = do
st <- getState wg
shiftCursorWithPosition wg $ Just (cx st, cy st)
displaySop :: Widget (Schmtc inr [CombElem]) -> DisplayRegion -> RenderContext -> IO Image
displaySop w d c = do
sopRef <- getState w
sops <- readReference $ schematic sopRef
let (ima,oPosO) = (dispSop d c $ map (\(SOP x) -> x) sops) in return $ ima <-> vlineILabel norm (imageWidth ima) oPosO
where norm = normalAttr c
dispSop :: DisplayRegion -> RenderContext -> [[[Int]]] -> (Image,[(Int,String,Attr)])
dispSop d@(iw,ih) c ts = ((tap_layer <|> lbl_layer) <-> neg_layer <-> and_layer <-> ano_layer <-> orr_layer,oPosO)
where norm = normalAttr c
wana x = highlightWireAttr norm (calcValue1 (<0) x) x
wano x = highlightWireAttr norm (calcValue2 (<0) x) x
ihf = inboxH (<|>) norm
al = nub $ concat ts
ol = map sort $ indexInvert al ts
xs = xused al
aIn = ihf $ map (\ai -> (wana ai,ai)) al
oIn = ihf $ map (\oi -> (wireAttr norm oi,oi)) ol
aPos = snd aIn
aPosC = concat aPos
aPosO = map (\x -> snd (head x) + div (length x-1) 2) aPos
oPos = snd oIn
oPosC = concat oPos
oPosCI = map (\((_,x),y) -> (x,y)) oPosC
oPosX = map (\x -> snd (head x) + div (length x-1) 2) oPos
oPosO = zipWith (\x y -> (x,'f':show y,norm)) oPosX [0..]
wdth = on max (maximum . map snd) aPosC oPosCI
imgc = fst aIn
imgd = fst oIn
typN i | i < 0 = ai '0'
| otherwise = ai '\x2502'
where ai = char $ wana [abs i]
gtU n = linC 40:replicate n (linC 137)++[linC 160]
gtM c n = linC 34:replicate n c++[linC 34]
gtL n = linC 10:replicate dn (linC 136)++linC 152:replicate (n-dn-1) (linC 136)++[linC 130]
where dn = div (n-1) 2
tap_layer = taplines norm wdth (wireInvert wana 0 aPosC)
lbl_layer = lineLabel $ wireLabel wana 'x' $ nub $ map (abs . fst) aPosC
neg_layer = mapHImg norm typN aPosC
and_layer = imgc '\x2502' gtU <-> imgc '\x2502' (gtM '&') <-> imgc '\x2502' gtL
ano_layer = calcRoutImg norm wdth $ wireMatch wana aPosO oPosC
orr_layer = imgd '\x2502' gtU <-> imgd '\x2502' (gtM '|') <-> imgd '\x2502' gtL
calcValue1 :: (Int -> Bool) -> [Int] -> Bool
calcValue1 = all
calcValue2 :: (Int -> Bool) -> [[Int]] -> Bool
calcValue2 f = any $ calcValue1 f
wireInvert :: ([Int] -> Attr) -> Int -> [(Int,Int)] -> [([Int],Attr)]
wireInvert lut i ((a,b):xs) = (b:map snd (filter (\(x,_) -> abs x == aba) xs),lut [aba]):wireInvert lut (i+1) (filter (\(x,_) -> abs x /= aba) xs)
where aba = abs a
wireInvert _ _ [] = []
wireLabel :: ([Int] -> Attr) -> Char -> [Int] -> [(String,Attr)]
wireLabel lut c (a:as) = (c:show (aba-1),lut [aba]):wireLabel lut c as
where aba = abs a
wireLabel _ _ [] = []
wireMatch :: ([Int] -> Attr) -> [Int] -> [(([Int],Int),Int)] -> [([Int],Attr)]
wireMatch lut = wm 0
where wm i (xa:aps) ops = (xa:map snd fli,lut $ fst $ fst $ head fli) : wm (i+1) aps (filter ((/=) i . snd . fst) ops)
where fli = filter ((==) i . snd . fst) ops
wm _ [] _ = []
indexInvert :: (Hashable a,Eq a) => [a] -> [[a]] -> [[(a,Int)]]
indexInvert ixf = $(mapN 2) $ \x -> (x,(HM.!) (hashItemIndex 0 ixf HM.empty) x)
attrWireHash :: Attr -> (Int -> Bool) -> [[[Int]]] -> HM.HashMap [Int] Attr
attrWireHash (Attr aa _ ac) lu xs = hashGenerator (\i a -> Attr (hli a) (SetTo $ ISOColor $ cola a i) ac) 0 hs HM.empty
where fs = nub $ concat xs
gs = map (:[]) $ concat fs
hs = nub $ gs++fs
hl = SetTo 32
col True = cycle [3,6,5]
col False = cycle [1,2,4]
cola as i = cycle (col $ all lu as) !! i
hli as = if all lu as then hl else aa
xused :: [[Int]] -> [Int]
xused = ordNub . sort . concatMap (map abs)
-- | Construct a `Widget` that renders the given list of combinatorial elements by using an AND array and OR array.
sopWidget :: IORefFun inr [CombElem] -- ^ The given reference to a list of combinatorial elements.
-> IO (Widget (ScrollSt (Schmtc inr [CombElem]))) -- ^ The returning widget that renders the combinatorial elements as a SOP and does this inside an automatic scroll element.
sopWidget c = do
wid <- newWidget (Schmtc 0 0 c) $ \x -> x {
growHorizontal_ = const $ return False,
growVertical_ = const $ return False,
render_ = displaySop,
keyEventHandler = handleKeyWidget,
getCursorPosition_ = schmGcp
}
autoScrollable wid
-- | A Widget that diesplays the given schematics and allows the user to edit the schematics accordingly.
schematicsWidget :: [Int] -- ^ The given schematics that must be rendered.
-> IO (Widget (ScrollSt (Decorator [Int] UiDecorator))) -- ^ The returning widget: a Scrollable with the widget renderer internally.
schematicsWidget ct = do
wid <- newWidget (Decorator ct [CursorX 0 3 0,CursorY 0 3 0]) $ \x -> x {
growHorizontal_ = const $ return True,
growVertical_ = const $ return True,
render_ = displaySchematics,
keyEventHandler = handleKeyWidget,
getCursorPosition_ = const $ return Nothing
}
autoScrollable wid
displaySchematics :: Widget (Decorator [Int] UiDecorator) -> DisplayRegion -> RenderContext -> IO Image
displaySchematics w d c = return $ imageReplicate (crossMask norm) d
where norm = normalAttr c
-- | A constant that defines the number of spaces between two ticks for the schematics editor, must be even.
schematicsTick :: Int -- ^ The number of spaces between two ticks for the schematics editor, must be even.
schematicsTick = 2
crossMask :: Attr -> Image
crossMask na = hl <-> (sd <|> st) <-> hl
where n = schematicsTick
st = char na '\x253c'
sp = char na ' '
sd = replicateFoldl1 (<|>) n sp
md = replicateFoldl1 (<->) n sd
hl = md <|> sp <|> md
|
KommuSoft/dep-software
|
Dep.Ui.Schematics.hs
|
gpl-3.0
| 8,264
| 0
| 17
| 1,997
| 3,271
| 1,769
| 1,502
| 153
| 3
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.FusionTables.Table.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates an existing table. Unless explicitly requested, only the name,
-- description, and attribution will be updated.
--
-- /See:/ <https://developers.google.com/fusiontables Fusion Tables API Reference> for @fusiontables.table.update@.
module Network.Google.Resource.FusionTables.Table.Update
(
-- * REST Resource
TableUpdateResource
-- * Creating a Request
, tableUpdate
, TableUpdate
-- * Request Lenses
, tabPayload
, tabReplaceViewDefinition
, tabTableId
) where
import Network.Google.FusionTables.Types
import Network.Google.Prelude
-- | A resource alias for @fusiontables.table.update@ method which the
-- 'TableUpdate' request conforms to.
type TableUpdateResource =
"fusiontables" :>
"v2" :>
"tables" :>
Capture "tableId" Text :>
QueryParam "replaceViewDefinition" Bool :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Table :> Put '[JSON] Table
-- | Updates an existing table. Unless explicitly requested, only the name,
-- description, and attribution will be updated.
--
-- /See:/ 'tableUpdate' smart constructor.
data TableUpdate = TableUpdate'
{ _tabPayload :: !Table
, _tabReplaceViewDefinition :: !(Maybe Bool)
, _tabTableId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TableUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tabPayload'
--
-- * 'tabReplaceViewDefinition'
--
-- * 'tabTableId'
tableUpdate
:: Table -- ^ 'tabPayload'
-> Text -- ^ 'tabTableId'
-> TableUpdate
tableUpdate pTabPayload_ pTabTableId_ =
TableUpdate'
{ _tabPayload = pTabPayload_
, _tabReplaceViewDefinition = Nothing
, _tabTableId = pTabTableId_
}
-- | Multipart request metadata.
tabPayload :: Lens' TableUpdate Table
tabPayload
= lens _tabPayload (\ s a -> s{_tabPayload = a})
-- | Whether the view definition is also updated. The specified view
-- definition replaces the existing one. Only a view can be updated with a
-- new definition.
tabReplaceViewDefinition :: Lens' TableUpdate (Maybe Bool)
tabReplaceViewDefinition
= lens _tabReplaceViewDefinition
(\ s a -> s{_tabReplaceViewDefinition = a})
-- | ID of the table that is being updated.
tabTableId :: Lens' TableUpdate Text
tabTableId
= lens _tabTableId (\ s a -> s{_tabTableId = a})
instance GoogleRequest TableUpdate where
type Rs TableUpdate = Table
type Scopes TableUpdate =
'["https://www.googleapis.com/auth/fusiontables"]
requestClient TableUpdate'{..}
= go _tabTableId _tabReplaceViewDefinition
(Just AltJSON)
_tabPayload
fusionTablesService
where go
= buildClient (Proxy :: Proxy TableUpdateResource)
mempty
|
rueshyna/gogol
|
gogol-fusiontables/gen/Network/Google/Resource/FusionTables/Table/Update.hs
|
mpl-2.0
| 3,747
| 0
| 14
| 869
| 466
| 279
| 187
| 72
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.IAM.GetContextKeysForCustomPolicy
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Gets a list of all of the context keys referenced in 'Condition'
-- elements in the input policies. The policies are supplied as a list of
-- one or more strings. To get the context keys from policies associated
-- with an IAM user, group, or role, use GetContextKeysForPrincipalPolicy.
--
-- Context keys are variables maintained by AWS and its services that
-- provide details about the context of an API query request, and can be
-- evaluated by using the 'Condition' element of an IAM policy. Use
-- GetContextKeysForCustomPolicy to understand what key names and values
-- you must supply when you call SimulateCustomPolicy. Note that all
-- parameters are shown in unencoded form here for clarity, but must be URL
-- encoded to be included as a part of a real HTML request.
--
-- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_GetContextKeysForCustomPolicy.html AWS API Reference> for GetContextKeysForCustomPolicy.
module Network.AWS.IAM.GetContextKeysForCustomPolicy
(
-- * Creating a Request
getContextKeysForCustomPolicy
, GetContextKeysForCustomPolicy
-- * Request Lenses
, gckfcpPolicyInputList
-- * Destructuring the Response
, getContextKeysForPolicyResponse
, GetContextKeysForPolicyResponse
-- * Response Lenses
, gckfpContextKeyNames
) where
import Network.AWS.IAM.Types
import Network.AWS.IAM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'getContextKeysForCustomPolicy' smart constructor.
newtype GetContextKeysForCustomPolicy = GetContextKeysForCustomPolicy'
{ _gckfcpPolicyInputList :: [Text]
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetContextKeysForCustomPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gckfcpPolicyInputList'
getContextKeysForCustomPolicy
:: GetContextKeysForCustomPolicy
getContextKeysForCustomPolicy =
GetContextKeysForCustomPolicy'
{ _gckfcpPolicyInputList = mempty
}
-- | A list of policies for which you want list of context keys used in
-- 'Condition' elements.
gckfcpPolicyInputList :: Lens' GetContextKeysForCustomPolicy [Text]
gckfcpPolicyInputList = lens _gckfcpPolicyInputList (\ s a -> s{_gckfcpPolicyInputList = a}) . _Coerce;
instance AWSRequest GetContextKeysForCustomPolicy
where
type Rs GetContextKeysForCustomPolicy =
GetContextKeysForPolicyResponse
request = postQuery iAM
response
= receiveXMLWrapper
"GetContextKeysForCustomPolicyResult"
(\ s h x -> parseXML x)
instance ToHeaders GetContextKeysForCustomPolicy
where
toHeaders = const mempty
instance ToPath GetContextKeysForCustomPolicy where
toPath = const "/"
instance ToQuery GetContextKeysForCustomPolicy where
toQuery GetContextKeysForCustomPolicy'{..}
= mconcat
["Action" =:
("GetContextKeysForCustomPolicy" :: ByteString),
"Version" =: ("2010-05-08" :: ByteString),
"PolicyInputList" =:
toQueryList "member" _gckfcpPolicyInputList]
|
olorin/amazonka
|
amazonka-iam/gen/Network/AWS/IAM/GetContextKeysForCustomPolicy.hs
|
mpl-2.0
| 3,979
| 0
| 10
| 803
| 356
| 223
| 133
| 51
| 1
|
-- This file is part of khph.
--
-- Copyright 2016 Bryan Gardiner <bog@khumba.net>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU Affero General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
-- You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE CPP, OverloadedStrings #-}
-- | Project configuration data types and parser.
module Khph.Config (
Config,
defaultConfig,
configSourceDirs,
configIgnoredPaths,
readProjectFile,
) where
#if !MIN_VERSION_base(4,8,0)
import Control.Applicative ((<$>))
#endif
import Control.Monad (unless, when)
import Data.Aeson.Types (typeMismatch)
import qualified Data.HashMap.Strict as HashMap
import qualified Data.Set as Set
import Data.Set (Set)
import Data.Text (Text)
import Data.Yaml (
(.:?),
(.!=),
FromJSON (parseJSON),
Object,
ParseException (UnexpectedEvent, _received, _expected),
Parser,
Value (Object),
decodeFileEither,
)
import Khph.Project.Base
import Khph.Util
import System.FilePath (isAbsolute)
import Text.Libyaml (Event (EventDocumentStart, EventStreamEnd))
data Config = Config
{ configSourceDirs :: Set ProjectPath
, configIgnoredPaths :: [ProjectPath]
} deriving (Show)
-- | Used when the config file is empty or absent.
defaultConfig :: Config
defaultConfig = Config Set.empty []
instance FromJSON Config where
parseJSON (Object o) = do
[sourceDirsKey, ignoredPathsKey] <- expectKeys "Config" o ["sourceDirs", "ignore"]
sourceDirs <- map stripTrailingPathSeparators <$> o .:? sourceDirsKey .!= []
ignoredPaths <- map stripTrailingPathSeparators <$> o .:? ignoredPathsKey .!= []
let absoluteSourceDirs = filter isAbsolute sourceDirs
when (not $ null absoluteSourceDirs) $
fail $ concat
["sourceDirs: Source directories must be relative paths (to the project root): ",
show absoluteSourceDirs]
let absoluteIgnoredPaths = filter isAbsolute ignoredPaths
when (not $ null absoluteIgnoredPaths) $
fail $ concat
["ignore: Ignored paths must be relative paths (to the project root): ",
show absoluteIgnoredPaths]
return Config
{ configSourceDirs = Set.fromList $ map toProjectPath sourceDirs
, configIgnoredPaths = map toProjectPath ignoredPaths
}
parseJSON value = typeMismatch "Config" value
readProjectFile :: FilePath -> IO (Either String Config)
readProjectFile path =
either checkError Right <$> decodeFileEither path
where checkError err =
let isEmptyFile = case err of
UnexpectedEvent {_received = r, _expected = e} ->
r == Just EventStreamEnd && e == Just EventDocumentStart
_ -> False
in if isEmptyFile then Right defaultConfig else Left $ show err
-- | Returns the given list of keys, but first checks that an object doesn't
-- contain more keys than those in the list. If it does, then the parse is
-- failed with a message containing the extraneous keys.
expectKeys :: String -> Object -> [Text] -> Parser [Text]
expectKeys typeName o keys = do
let extraKeys = Set.fromList (HashMap.keys o) `Set.difference` Set.fromList keys
unless (Set.null extraKeys) $
fail $ typeName ++ " has unknown keys: " ++ show (Set.toList extraKeys)
return keys
|
khumba/khph
|
src/Khph/Config.hs
|
agpl-3.0
| 3,745
| 0
| 17
| 732
| 756
| 418
| 338
| 67
| 3
|
{-# LANGUAGE OverloadedStrings #-}
module Haskoin.TransactionSpec (spec) where
import qualified Data.ByteString as B
import Data.Bytes.Get
import Data.Bytes.Put
import Data.Bytes.Serial
import Data.Either
import Data.Maybe
import Data.String (fromString)
import Data.String.Conversions
import Data.Text (Text)
import Data.Word (Word32, Word64)
import Haskoin.Address
import Haskoin.Constants
import Haskoin.Keys
import Haskoin.Script
import Haskoin.Transaction
import Haskoin.Util
import Haskoin.Util.Arbitrary
import Test.HUnit
import Test.Hspec
import Test.Hspec.QuickCheck
import Test.QuickCheck
serialVals :: [SerialBox]
serialVals =
[ SerialBox $ arbitraryTx =<< arbitraryNetwork
, SerialBox $ arbitraryWitnessTx =<< arbitraryNetwork
, SerialBox $ arbitraryLegacyTx =<< arbitraryNetwork
, SerialBox $ arbitraryTxIn =<< arbitraryNetwork
, SerialBox $ arbitraryTxOut =<< arbitraryNetwork
, SerialBox arbitraryOutPoint
]
readVals :: [ReadBox]
readVals =
[ ReadBox arbitraryTxHash
, ReadBox $ arbitraryTx =<< arbitraryNetwork
, ReadBox $ arbitraryTxIn =<< arbitraryNetwork
, ReadBox $ arbitraryTxOut =<< arbitraryNetwork
, ReadBox arbitraryOutPoint
]
jsonVals :: [JsonBox]
jsonVals =
[ JsonBox arbitraryTxHash
, JsonBox $ arbitraryTx =<< arbitraryNetwork
, JsonBox $ arbitraryWitnessTx =<< arbitraryNetwork
, JsonBox $ arbitraryLegacyTx =<< arbitraryNetwork
, JsonBox $ arbitraryTxIn =<< arbitraryNetwork
, JsonBox $ arbitraryTxOut =<< arbitraryNetwork
, JsonBox arbitraryOutPoint
]
spec :: Spec
spec = do
testIdentity serialVals readVals jsonVals []
describe "Transaction properties" $ do
prop "decode and encode txid" $
forAll arbitraryTxHash $ \h -> hexToTxHash (txHashToHex h) == Just h
prop "from string transaction id" $
forAll arbitraryTxHash $ \h -> fromString (cs $ txHashToHex h) == h
prop "building address tx" $
forAll arbitraryNetwork $ \net ->
forAll arbitraryAddress $
forAll (arbitrarySatoshi net) . testBuildAddrTx net
prop "guess transaction size" $
forAll arbitraryNetwork $ \net ->
forAll (arbitraryAddrOnlyTxFull net) (testGuessSize net)
prop "choose coins" $
forAll arbitraryNetwork $ \net ->
forAll (listOf (arbitrarySatoshi net)) testChooseCoins
prop "choose multisig coins" $
forAll arbitraryNetwork $ \net ->
forAll arbitraryMSParam $
forAll (listOf (arbitrarySatoshi net)) . testChooseMSCoins
prop "sign and validate transaction" $
forAll arbitraryNetwork $ \net ->
forAll (arbitrarySigningData net) (testDetSignTx net)
prop "sign and validate (nested) transaction" $
forAll arbitraryNetwork $ \net ->
forAll (arbitrarySigningData net) (testDetSignNestedTx net)
prop "merge partially signed transactions" $
forAll arbitraryNetwork $ \net ->
property $ forAll (arbitraryPartialTxs net) (testMergeTx net)
describe "Transaction vectors" $ do
it "compute txid from tx" $ mapM_ testTxidVector txidVectors
it "build pkhash transaction (generated from bitcoind)" $
mapM_ testPKHashVector pkHashVectors
-- Txid Vectors
testTxidVector :: (Text, Text) -> Assertion
testTxidVector (tid, tx) =
assertEqual "txid" (Just tid) (txHashToHex . txHash <$> txM)
where
txM = eitherToMaybe . runGetS deserialize =<< decodeHex tx
txidVectors :: [(Text, Text)]
txidVectors =
[ ( "23b397edccd3740a74adb603c9756370fafcde9bcc4483eb271ecad09a94dd63"
, "0100000001b14bdcbc3e01bdaad36cc08e81e69c82e1060bc14e518db2b49aa4\
\3ad90ba26000000000490047304402203f16c6f40162ab686621ef3000b04e75\
\418a0c0cb2d8aebeac894ae360ac1e780220ddc15ecdfc3507ac48e1681a33eb\
\60996631bf6bf5bc0a0682c4db743ce7ca2b01ffffffff0140420f0000000000\
\1976a914660d4ef3a743e3e696ad990364e555c271ad504b88ac00000000"
)
, ( "c99c49da4c38af669dea436d3e73780dfdb6c1ecf9958baa52960e8baee30e73"
, "01000000010276b76b07f4935c70acf54fbf1f438a4c397a9fb7e633873c4dd3\
\bc062b6b40000000008c493046022100d23459d03ed7e9511a47d13292d3430a\
\04627de6235b6e51a40f9cd386f2abe3022100e7d25b080f0bb8d8d5f878bba7\
\d54ad2fda650ea8d158a33ee3cbd11768191fd004104b0e2c879e4daf7b9ab68\
\350228c159766676a14f5815084ba166432aab46198d4cca98fa3e9981d0a90b\
\2effc514b76279476550ba3663fdcaff94c38420e9d5000000000100093d0000\
\0000001976a9149a7b0f3b80c6baaeedce0a0842553800f832ba1f88ac000000\
\00"
)
, ( "f7fdd091fa6d8f5e7a8c2458f5c38faffff2d3f1406b6e4fe2c99dcc0d2d1cbb"
, "01000000023d6cf972d4dff9c519eff407ea800361dd0a121de1da8b6f4138a2\
\f25de864b4000000008a4730440220ffda47bfc776bcd269da4832626ac332ad\
\fca6dd835e8ecd83cd1ebe7d709b0e022049cffa1cdc102a0b56e0e04913606c\
\70af702a1149dc3b305ab9439288fee090014104266abb36d66eb4218a6dd31f\
\09bb92cf3cfa803c7ea72c1fc80a50f919273e613f895b855fb7465ccbc8919a\
\d1bd4a306c783f22cd3227327694c4fa4c1c439affffffff21ebc9ba20594737\
\864352e95b727f1a565756f9d365083eb1a8596ec98c97b7010000008a473044\
\0220503ff10e9f1e0de731407a4a245531c9ff17676eda461f8ceeb8c06049fa\
\2c810220c008ac34694510298fa60b3f000df01caa244f165b727d4896eb84f8\
\1e46bcc4014104266abb36d66eb4218a6dd31f09bb92cf3cfa803c7ea72c1fc8\
\0a50f919273e613f895b855fb7465ccbc8919ad1bd4a306c783f22cd32273276\
\94c4fa4c1c439affffffff01f0da5200000000001976a914857ccd42dded6df3\
\2949d4646dfa10a92458cfaa88ac00000000"
)
, ( "afd9c17f8913577ec3509520bd6e5d63e9c0fd2a5f70c787993b097ba6ca9fae"
, "010000000370ac0a1ae588aaf284c308d67ca92c69a39e2db81337e563bf40c5\
\9da0a5cf63000000006a4730440220360d20baff382059040ba9be98947fd678\
\fb08aab2bb0c172efa996fd8ece9b702201b4fb0de67f015c90e7ac8a193aeab\
\486a1f587e0f54d0fb9552ef7f5ce6caec032103579ca2e6d107522f012cd00b\
\52b9a65fb46f0c57b9b8b6e377c48f526a44741affffffff7d815b6447e35fbe\
\a097e00e028fb7dfbad4f3f0987b4734676c84f3fcd0e804010000006b483045\
\022100c714310be1e3a9ff1c5f7cacc65c2d8e781fc3a88ceb063c6153bf9506\
\50802102200b2d0979c76e12bb480da635f192cc8dc6f905380dd4ac1ff35a4f\
\68f462fffd032103579ca2e6d107522f012cd00b52b9a65fb46f0c57b9b8b6e3\
\77c48f526a44741affffffff3f1f097333e4d46d51f5e77b53264db8f7f5d2e1\
\8217e1099957d0f5af7713ee010000006c493046022100b663499ef73273a378\
\8dea342717c2640ac43c5a1cf862c9e09b206fcb3f6bb8022100b09972e75972\
\d9148f2bdd462e5cb69b57c1214b88fc55ca638676c07cfc10d8032103579ca2\
\e6d107522f012cd00b52b9a65fb46f0c57b9b8b6e377c48f526a44741affffff\
\ff0380841e00000000001976a914bfb282c70c4191f45b5a6665cad1682f2c9c\
\fdfb88ac80841e00000000001976a9149857cc07bed33a5cf12b9c5e0500b675\
\d500c81188ace0fd1c00000000001976a91443c52850606c872403c0601e69fa\
\34b26f62db4a88ac00000000"
)
]
-- Build address transactions vectors generated from bitcoin-core raw tx API
testPKHashVector :: ([(Text, Word32)], [(Text, Word64)], Text) -> Assertion
testPKHashVector (is, os, res) =
assertEqual
"Build PKHash Tx"
(Right res)
(encodeHex . runPutS . serialize <$> txE)
where
txE = buildAddrTx btc (map f is) os
f (tid, ix) = OutPoint (fromJust $ hexToTxHash tid) ix
pkHashVectors :: [([(Text, Word32)], [(Text, Word64)], Text)]
pkHashVectors =
[ ( [ ( "eb29eba154166f6541ebcc9cbdf5088756e026af051f123bcfb526df594549db"
, 14)
]
, [("14LsRquZfURNFrzpcLVGdaHTfAPjjwiSPb", 90000000)]
, "0100000001db494559df26b5cf3b121f05af26e0568708f5bd9ccceb41656f1654\
\a1eb29eb0e00000000ffffffff01804a5d05000000001976a91424aa604689cc58\
\2292b97668bedd91dd5bf9374c88ac00000000")
, ( [ ( "eb29eba154166f6541ebcc9cbdf5088756e026af051f123bcfb526df594549db"
, 0)
, ( "0001000000000000000000000000000000000000000000000000000000000000"
, 2147483647)
]
, [ ("14LsRquZfURNFrzpcLVGdaHTfAPjjwiSPb", 1)
, ("19VCgS642vzEA1sdByoSn6GsWBwraV8D4n", 2100000000000000)
]
, "0100000002db494559df26b5cf3b121f05af26e0568708f5bd9ccceb41656f1654\
\a1eb29eb0000000000ffffffff0000000000000000000000000000000000000000\
\000000000000000000000100ffffff7f00ffffffff0201000000000000001976a9\
\1424aa604689cc582292b97668bedd91dd5bf9374c88ac0040075af07507001976\
\a9145d16672f53981ff21c5f42b40d1954993cbca54f88ac00000000")
, ( [ ( "eb29eba154166f6541ebcc9cbdf5088756e026af051f123bcfb526df594549db"
, 0)
, ( "0001000000000000000000000000000000000000000000000000000000000000"
, 2147483647)
]
, []
, "0100000002db494559df26b5cf3b121f05af26e0568708f5bd9ccceb41656f1654a\
\1eb29eb0000000000ffffffff000000000000000000000000000000000000000000\
\0000000000000000000100ffffff7f00ffffffff0000000000")
, ( []
, [ ("14LsRquZfURNFrzpcLVGdaHTfAPjjwiSPb", 1)
, ("19VCgS642vzEA1sdByoSn6GsWBwraV8D4n", 2100000000000000)
]
, "01000000000201000000000000001976a91424aa604689cc582292b97668bedd91d\
\d5bf9374c88ac0040075af07507001976a9145d16672f53981ff21c5f42b40d1954\
\993cbca54f88ac00000000")
]
-- Transaction Properties --
testBuildAddrTx :: Network -> Address -> TestCoin -> Bool
testBuildAddrTx net a (TestCoin v)
| isPubKeyAddress a = Right (PayPKHash (getAddrHash160 a)) == out
| isScriptAddress a = Right (PayScriptHash (getAddrHash160 a)) == out
| otherwise = undefined
where
tx = buildAddrTx net [] [(fromJust (addrToText net a), v)]
out =
decodeOutputBS $
scriptOutput $
head $ txOut (fromRight (error "Could not build transaction") tx)
-- We compute an upper bound but it should be close enough to the real size
-- We give 2 bytes of slack on every signature (1 on r and 1 on s)
testGuessSize :: Network -> Tx -> Bool
testGuessSize net tx =
guess >= len && guess <= len + 2 * delta
where
delta = pki + sum (map fst msi)
guess = guessTxSize pki msi pkout msout
len = B.length $ runPutS $ serialize tx
ins = map f $ txIn tx
f i =
fromRight (error "Could not decode input") $
decodeInputBS net $ scriptInput i
pki = length $ filter isSpendPKHash ins
msi = concatMap shData ins
shData (ScriptHashInput _ (PayMulSig keys r)) = [(r, length keys)]
shData _ = []
out =
map
(fromRight (error "Could not decode transaction output") .
decodeOutputBS . scriptOutput) $
txOut tx
pkout = length $ filter isPayPKHash out
msout = length $ filter isPayScriptHash out
testChooseCoins :: [TestCoin] -> Word64 -> Word64 -> Int -> Property
testChooseCoins coins target byteFee nOut = nOut >= 0 ==>
case chooseCoins target byteFee nOut True coins of
Right (chosen, change) ->
let outSum = sum $ map coinValue chosen
fee = guessTxFee byteFee nOut (length chosen)
in outSum == target + change + fee
Left _ ->
let fee = guessTxFee byteFee nOut (length coins)
in target == 0 || s < target + fee
where
s = sum $ map coinValue coins
testChooseMSCoins :: (Int, Int) -> [TestCoin]
-> Word64 -> Word64 -> Int -> Property
testChooseMSCoins (m, n) coins target byteFee nOut = nOut >= 0 ==>
case chooseMSCoins target byteFee (m,n) nOut True coins of
Right (chosen,change) ->
let outSum = sum $ map coinValue chosen
fee = guessMSTxFee byteFee (m,n) nOut (length chosen)
in outSum == target + change + fee
Left _ ->
let fee = guessMSTxFee byteFee (m,n) nOut (length coins)
in target == 0 || s < target + fee
where
s = sum $ map coinValue coins
{- Signing Transactions -}
testDetSignTx :: Network -> (Tx, [SigInput], [SecKeyI]) -> Bool
testDetSignTx net (tx, sigis, prv) =
not (verifyStdTx net tx verData) &&
not (verifyStdTx net txSigP verData) &&
verifyStdTx net txSigC verData
where
txSigP =
fromRight (error "Could not decode transaction") $
signTx net tx sigis (map secKeyData (tail prv))
txSigC =
fromRight (error "Could not decode transaction") $
signTx net txSigP sigis [secKeyData (head prv)]
verData = map (\(SigInput s v o _ _) -> (s, v, o)) sigis
testDetSignNestedTx :: Network -> (Tx, [SigInput], [SecKeyI]) -> Bool
testDetSignNestedTx net (tx, sigis, prv) =
not (verifyStdTx net tx verData) &&
not (verifyStdTx net txSigP verData) &&
verifyStdTx net txSigC verData
where
txSigP =
fromRight (error "Could not decode transaction") $
signNestedWitnessTx net tx sigis (secKeyData <$> tail prv)
txSigC =
fromRight (error "Could not decode transaction") $
signNestedWitnessTx net txSigP sigis [secKeyData (head prv)]
verData = handleSegwit <$> sigis
handleSegwit (SigInput s v o _ _)
| isSegwit s = (toP2SH $ encodeOutput s, v, o)
| otherwise = (s, v, o)
testMergeTx :: Network -> ([Tx], [(ScriptOutput, Word64, OutPoint, Int, Int)]) -> Bool
testMergeTx net (txs, os) =
and [ isRight mergeRes
, length (txIn mergedTx) == length os
, if enoughSigs
then isValid
else not isValid
-- Signature count == min (length txs) (sum required signatures)
, sum (map snd sigMap) == min (length txs) (sum (map fst sigMap))
]
where
outs = map (\(so, val, op, _, _) -> (so, val, op)) os
mergeRes = mergeTxs net txs outs
mergedTx = fromRight (error "Could not merge") mergeRes
isValid = verifyStdTx net mergedTx outs
enoughSigs = all (\(m, c) -> c >= m) sigMap
sigMap =
map (\((_, _, _, m, _), inp) -> (m, sigCnt inp)) $
zip os $ txIn mergedTx
sigCnt inp =
case decodeInputBS net $ scriptInput inp of
Right (RegularInput (SpendMulSig sigs)) -> length sigs
Right (ScriptHashInput (SpendMulSig sigs) _) -> length sigs
_ -> error "Invalid input script type"
|
haskoin/haskoin
|
test/Haskoin/TransactionSpec.hs
|
unlicense
| 14,657
| 0
| 18
| 3,454
| 2,967
| 1,562
| 1,405
| 229
| 4
|
-- 100
import Data.Ratio((%), denominator)
import Euler(digitUsage)
kk = 2
-- must have a single digit in common
-- two digits in common would either be one or cancel to one
-- that digit cannot be zero
-- remaining fraction cannot divide by zero
isReduceable n d
| sum cc /= 1 || cc !! 0 == 1 || d2 == 0 = False
| otherwise = n % d == n2 % d2
where nn = digitUsage n
dd = digitUsage d
cc = zipWith min nn dd
n2 = length $ takeWhile (==0) $ zipWith (-) nn cc
d2 = length $ takeWhile (==0) $ zipWith (-) dd cc
calcFracResult k = denominator $ product
[n % d | n <- [10^(k-1)..10^k-1], d <- [n+1..10^k-1], isReduceable n d]
main = putStrLn $ show $ calcFracResult kk
|
higgsd/euler
|
hs/33.hs
|
bsd-2-clause
| 729
| 7
| 14
| 202
| 297
| 153
| 144
| 14
| 1
|
{-# LANGUAGE DataKinds #-}
module Parse.Pattern (term, expr) where
import Parse.ParsecAdapter ((<|>), (<?>), char, choice, optionMaybe, try)
import AST.V0_16
import AST.Structure
import qualified Data.Indexed as I
import ElmVersion
import Parse.Helpers
import qualified Parse.Literal as Literal
import Reporting.Annotation (Located)
import qualified Reporting.Annotation as A
import Parse.IParser
import Parse.Whitespace
import qualified Parse.ParsecAdapter as Parsec
basic :: ElmVersion -> IParser (ASTNS Located [UppercaseIdentifier] 'PatternNK)
basic elmVersion =
fmap I.Fix $ addLocation $
choice
[ char '_' >> return Anything
, VarPattern <$> lowVar elmVersion
, chunksToPattern <$> dotSep1 (capVar elmVersion)
, LiteralPattern <$> Literal.literal
]
where
chunksToPattern chunks =
case reverse chunks of
[UppercaseIdentifier "True"] ->
LiteralPattern (Boolean True)
[UppercaseIdentifier "False"] ->
LiteralPattern (Boolean False)
(last:rest) ->
DataPattern (reverse rest, last) []
[] -> error "dotSep1 returned empty list"
asPattern :: ElmVersion -> IParser (FixAST Located typeRef ctorRef varRef 'PatternNK) -> IParser (FixAST Located typeRef ctorRef varRef 'PatternNK)
asPattern elmVersion patternParser =
do (start, pattern, _) <- located patternParser
maybeAlias <- optionMaybe asAlias
case maybeAlias of
Just (postPattern, alias) ->
do end <- Parsec.getPosition
return $ I.Fix $ A.at start end $ Alias (C postPattern pattern) alias
Nothing ->
return pattern
where
asAlias =
do preAs <- try (whitespace <* reserved elmVersion "as")
postAs <- whitespace
var <- lowVar elmVersion
return (preAs, C postAs var)
record :: ElmVersion -> IParser (FixAST Located typeRef ctorRef varRef 'PatternNK)
record elmVersion =
fmap I.Fix $ addLocation $
do
result <- surround'' '{' '}' (lowVar elmVersion)
return $
case result of
Left comments ->
EmptyRecordPattern comments
Right fields ->
RecordPattern fields
tuple :: ElmVersion -> IParser (ASTNS Located [UppercaseIdentifier] 'PatternNK)
tuple elmVersion =
do (start, patterns, end) <- located $ parens'' (expr elmVersion)
return $
case patterns of
Left comments ->
I.Fix $ A.at start end $ UnitPattern comments
Right [] ->
I.Fix $ A.at start end $ UnitPattern []
Right [C ([], []) pattern] ->
pattern
Right [pattern] ->
I.Fix $ A.at start end $ PatternParens pattern
Right patterns ->
I.Fix $ A.at start end $ TuplePattern patterns
list :: ElmVersion -> IParser (ASTNS Located [UppercaseIdentifier] 'PatternNK)
list elmVersion =
fmap I.Fix $ addLocation $
do
result <- braces'' (expr elmVersion)
return $
case result of
Left comments ->
EmptyListPattern comments
Right patterns ->
ListPattern patterns
term :: ElmVersion -> IParser (ASTNS Located [UppercaseIdentifier] 'PatternNK)
term elmVersion =
choice [ record elmVersion, tuple elmVersion, list elmVersion, basic elmVersion ]
<?> "a pattern"
patternConstructor :: ElmVersion -> IParser (ASTNS Located [UppercaseIdentifier] 'PatternNK)
patternConstructor elmVersion =
fmap I.Fix $ addLocation $
do v <- dotSep1 (capVar elmVersion)
case reverse v of
[UppercaseIdentifier "True"] -> return $ LiteralPattern (Boolean True)
[UppercaseIdentifier "False"] -> return $ LiteralPattern (Boolean False)
(last:rest) -> DataPattern (reverse rest, last) <$> spacePrefix (term elmVersion)
[] -> error "dotSep1 returned empty list"
expr :: ElmVersion -> IParser (ASTNS Located [UppercaseIdentifier] 'PatternNK)
expr elmVersion =
asPattern elmVersion subPattern <?> "a pattern"
where
subPattern =
do
result <- separated cons (patternConstructor elmVersion <|> term elmVersion)
return $
case result of
Left pattern ->
pattern
Right (region, first, rest, _) ->
I.Fix $ A.At region $ ConsPattern first rest
|
avh4/elm-format
|
elm-format-lib/src/Parse/Pattern.hs
|
bsd-3-clause
| 4,392
| 0
| 16
| 1,213
| 1,346
| 668
| 678
| 108
| 5
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
module EFA.Test.Sweep where
import qualified EFA.Application.Optimisation.Sweep as Sweep
import qualified EFA.Equation.Arithmetic as Arith
import EFA.Equation.Arithmetic ((~+), (~-), (~*), (~/))
import EFA.TestUtility (Func(Func))
import Test.QuickCheck.All (quickCheckAll)
import qualified Data.Vector.Unboxed as UV
--------------------------------------------------------------
type Sweep = Sweep.Sweep UV.Vector Double
--------------------------------------------------------------
eps :: Double
eps = 10^^(-9 :: Integer)
(===) :: Sweep -> Sweep -> Bool
(Sweep.Sweep xs) === (Sweep.Sweep ys) =
UV.all (< eps) (UV.zipWith (\x y -> abs (x-y)) xs ys)
(====) :: Double -> Double -> Bool
x ==== y = abs (x - y) < eps
--------------------------------------------------------------
prop_length :: [Double] -> Bool
prop_length xs =
length xs == Sweep.length (Sweep.fromList xs :: Sweep)
prop_map_fusion ::
(Func Double) ->
(Func Double) ->
Sweep ->
Bool
prop_map_fusion (Func f) (Func g) sweep =
Sweep.map f (Sweep.map g sweep) == Sweep.map (f . g) sweep
prop_conversion :: Sweep -> Bool
prop_conversion sweep = Sweep.fromList (Sweep.toList sweep) == sweep
prop_conversion2 :: [Double] -> Bool
prop_conversion2 xs = Sweep.toList (Sweep.fromList xs :: Sweep) == xs
prop_recip :: Sweep -> Bool
prop_recip sweep = Arith.recip (Arith.recip sweep) === sweep
prop_negate :: Sweep -> Bool
prop_negate sweep = Arith.negate (Arith.negate sweep) == sweep
prop_integrate :: Sweep -> Bool
prop_integrate sweep = Arith.integrate sweep == sweep
prop_allZeros :: Sweep -> Bool
prop_allZeros sweep =
if Arith.allZeros sweep then sum (Sweep.toList sweep) == 0 else True
{-
prop_allZeros2 :: Sweep -> QC.Property
prop_allZeros2 sweep =
(Arith.allZeros sweep)
==>
(sum (Sweep.toList sweep) == 0)
-}
prop_constOne :: Sweep -> Bool
prop_constOne sweep =
sum (Sweep.toList (Arith.constOne sweep)) == fromIntegral (Sweep.length sweep)
prop_replicate :: Sweep -> Double -> Bool
prop_replicate sweep x =
sum (Sweep.toList (Sweep.replicate sweep x))
====
(fromIntegral (Sweep.length sweep) * x)
prop_fromDouble_replicate :: Sweep -> Double -> Bool
prop_fromDouble_replicate sweep x =
Sweep.replicate sweep x == Sweep.fromRational (Sweep.length sweep) x
prop_add :: Sweep -> Bool
prop_add sweep = sweep ~+ Arith.negate sweep == sweep ~- sweep
prop_mul :: Sweep -> Bool
prop_mul sweep = (sweep ~* Arith.recip sweep) === (sweep ~/ sweep)
runTests :: IO Bool
runTests = $quickCheckAll
main :: IO ()
main = runTests >>= print
|
energyflowanalysis/efa-2.1
|
test/EFA/Test/Sweep.hs
|
bsd-3-clause
| 2,694
| 0
| 12
| 428
| 875
| 470
| 405
| 61
| 2
|
-- | Take configuration, produce 'Travis'.
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS_GHC -Wno-unused-matches #-}
module HaskellCI.Travis (
makeTravis,
travisHeader,
) where
import HaskellCI.Prelude
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import qualified Distribution.Fields.Pretty as C
import qualified Distribution.Package as C
import qualified Distribution.Pretty as C
import qualified Distribution.Types.VersionRange as C
import qualified Distribution.Version as C
import Cabal.Project
import HaskellCI.Auxiliary
import HaskellCI.Compiler
import HaskellCI.Config
import HaskellCI.Config.ConstraintSet
import HaskellCI.Config.Doctest
import HaskellCI.Config.Folds
import HaskellCI.Config.HLint
import HaskellCI.Config.Installed
import HaskellCI.Config.Jobs
import HaskellCI.Config.PackageScope
import HaskellCI.Config.Validity
import HaskellCI.HeadHackage
import HaskellCI.Jobs
import HaskellCI.List
import HaskellCI.MonadErr
import HaskellCI.Package
import HaskellCI.Sh
import HaskellCI.ShVersionRange
import HaskellCI.Tools
import HaskellCI.Travis.Yaml
import HaskellCI.VersionInfo
-------------------------------------------------------------------------------
-- Travis header
-------------------------------------------------------------------------------
travisHeader :: Bool -> [String] -> [String]
travisHeader insertVersion argv =
[ "This Travis job script has been generated by a script via"
, ""
, " haskell-ci " ++ unwords [ "'" ++ a ++ "'" | a <- argv ]
, ""
, "To regenerate the script (for example after adjusting tested-with) run"
, ""
, " haskell-ci regenerate"
, ""
, "For more information, see https://github.com/haskell-CI/haskell-ci"
, ""
] ++
if insertVersion then
[ "version: " ++ haskellCIVerStr
, ""
] else []
-------------------------------------------------------------------------------
-- Generate travis configuration
-------------------------------------------------------------------------------
{-
Travis CI–specific notes:
* We use -j2 for parallelism, as Travis' virtual environments use 2 cores, per
https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system.
-}
makeTravis
:: [String]
-> Config
-> Project URI Void Package
-> JobVersions
-> Either HsCiError Travis -- TODO: writer
makeTravis argv config@Config {..} prj jobs@JobVersions {..} = do
-- before caching: clear some redundant stuff
beforeCache <- runSh $ when cfgCache $ do
sh "rm -fv $CABALHOME/packages/hackage.haskell.org/build-reports.log"
comment "remove files that are regenerated by 'cabal update'"
sh "rm -fv $CABALHOME/packages/hackage.haskell.org/00-index.*" -- legacy
sh "rm -fv $CABALHOME/packages/hackage.haskell.org/*.json" -- TUF meta-data
sh "rm -fv $CABALHOME/packages/hackage.haskell.org/01-index.cache"
sh "rm -fv $CABALHOME/packages/hackage.haskell.org/01-index.tar"
sh "rm -fv $CABALHOME/packages/hackage.haskell.org/01-index.tar.idx"
sh "rm -rfv $CABALHOME/packages/head.hackage" -- if we cache, it will break builds.
-- before install: we set up the environment, install GHC/cabal on OSX
beforeInstall <- runSh $ do
-- Validity checks
checkConfigValidity config jobs
-- This have to be first
when anyGHCJS $ sh $ unlines
[ "if echo $CC | grep -q ghcjs; then"
, " GHCJS=true; GHCJSARITH=1;"
, "else"
, " GHCJS=false; GHCJSARITH=0;"
, "fi"
]
-- Adjust $HC
sh "HC=$(echo \"/opt/$CC/bin/ghc\" | sed 's/-/\\//')"
sh "WITHCOMPILER=\"-w $HC\""
shForJob RangeGHCJS "HC=${HC}js"
shForJob RangeGHCJS "WITHCOMPILER=\"--ghcjs ${WITHCOMPILER}js\""
-- Needed to work around haskell/cabal#6214
sh "HADDOCK=$(echo \"/opt/$CC/bin/haddock\" | sed 's/-/\\//')"
unless (null macosVersions) $ do
sh $ "if [ \"$TRAVIS_OS_NAME\" = \"osx\" ]; then HADDOCK=$(echo $HADDOCK | sed \"s:^/opt:$HOME/.ghc-install:\"); fi"
-- Hack: happy needs ghc. Let's install version matching GHCJS.
-- At the moment, there is only GHCJS-8.4, so we install GHC-8.4.4
when anyGHCJS $ do
shForJob RangeGHCJS $ "PATH=\"/opt/ghc/8.4.4/bin:$PATH\""
sh "HCPKG=\"$HC-pkg\""
sh "unset CC"
-- cabal
sh "CABAL=/opt/ghc/bin/cabal"
sh "CABALHOME=$HOME/.cabal"
-- PATH
sh "export PATH=\"$CABALHOME/bin:$PATH\""
-- rootdir is useful for manual script additions
sh "TOP=$(pwd)"
-- macOS installing
let haskellOnMacos = "https://haskell.futurice.com/haskell-on-macos.py"
unless (null macosVersions) $ do
sh $ "if [ \"$TRAVIS_OS_NAME\" = \"osx\" ]; then curl " ++ haskellOnMacos ++ " | python3 - --make-dirs --install-dir=$HOME/.ghc-install --cabal-alias=3.2.0.0 install cabal-install-3.2.0.0 ${TRAVIS_COMPILER}; fi"
sh' [2034,2039] "if [ \"$TRAVIS_OS_NAME\" = \"osx\" ]; then HC=$HOME/.ghc-install/ghc/bin/$TRAVIS_COMPILER; WITHCOMPILER=\"-w $HC\"; HCPKG=$HOME/.ghc-install/ghc/bin/${TRAVIS_COMPILER/ghc/ghc-pkg}; CABAL=$HOME/.ghc-install/ghc/bin/cabal; fi"
-- HCNUMVER, numeric HC version, e.g. ghc 7.8.4 is 70804 and 7.10.3 is 71003
sh "HCNUMVER=$(${HC} --numeric-version|perl -ne '/^(\\d+)\\.(\\d+)\\.(\\d+)(\\.(\\d+))?$/; print(10000 * $1 + 100 * $2 + ($3 == 0 ? $5 != 1 : $3))')"
sh "echo $HCNUMVER"
-- verbose in .cabal/config is not respected
-- https://github.com/haskell/cabal/issues/5956
sh "CABAL=\"$CABAL -vnormal+nowrap\""
-- SC2039: In POSIX sh, set option pipefail is undefined. Travis is bash, so it's fine :)
sh' [2039, 3040] "set -o pipefail"
sh "TEST=--enable-tests"
shForJob (invertCompilerRange $ Range cfgTests) "TEST=--disable-tests"
sh "BENCH=--enable-benchmarks"
shForJob (invertCompilerRange $ Range cfgBenchmarks) "BENCH=--disable-benchmarks"
sh "HEADHACKAGE=false"
shForJob (Range cfgHeadHackage \/ RangePoints (S.singleton GHCHead)) "HEADHACKAGE=true"
-- create ~/.cabal/config
sh "rm -f $CABALHOME/config"
cat "$CABALHOME/config"
[ "verbose: normal +nowrap +markoutput" -- https://github.com/haskell/cabal/issues/5956
, "remote-build-reporting: anonymous"
, "write-ghc-environment-files: never"
, "remote-repo-cache: $CABALHOME/packages"
, "logs-dir: $CABALHOME/logs"
, "world-file: $CABALHOME/world"
, "extra-prog-path: $CABALHOME/bin"
, "symlink-bindir: $CABALHOME/bin"
, "installdir: $CABALHOME/bin"
, "build-summary: $CABALHOME/logs/build.log"
, "store-dir: $CABALHOME/store"
, "install-dirs user"
, " prefix: $CABALHOME"
, "repository hackage.haskell.org"
, " url: http://hackage.haskell.org/"
]
-- Add head.hackage repository to ~/.cabal/config
-- (locally you want to add it to cabal.project)
unless (S.null headGhcVers) $ sh $ unlines $
[ "if $HEADHACKAGE; then"
] ++
lines (catCmd Double "$CABALHOME/config" headHackageRepoStanza) ++
[ "fi"
]
-- in install step we install tools and dependencies
install <- runSh $ do
sh "${CABAL} --version"
sh "echo \"$(${HC} --version) [$(${HC} --print-project-git-commit-id 2> /dev/null || echo '?')]\""
when anyGHCJS $ do
sh "node --version"
sh "echo $GHCJS"
-- Cabal jobs
for_ (cfgJobs >>= cabalJobs) $ \n ->
sh $ "echo 'jobs: " ++ show n ++ "' >> $CABALHOME/config"
-- GHC jobs + ghc-options
for_ (cfgJobs >>= ghcJobs) $ \m -> do
shForJob (Range $ C.orLaterVersion (C.mkVersion [7,8])) $ "GHCJOBS=-j" ++ show m
cat "$CABALHOME/config"
[ "program-default-options"
, " ghc-options: $GHCJOBS +RTS -M6G -RTS"
]
-- output config for debugging purposes
sh "cat $CABALHOME/config"
-- remove project own cabal.project files
sh "rm -fv cabal.project cabal.project.local cabal.project.freeze"
-- Update hackage index.
sh "travis_retry ${CABAL} v2-update -v"
-- Install doctest
let doctestVersionConstraint
| C.isAnyVersion (cfgDoctestVersion cfgDoctest) = ""
| otherwise = " --constraint='doctest " ++ C.prettyShow (cfgDoctestVersion cfgDoctest) ++ "'"
when doctestEnabled $
shForJob (Range (cfgDoctestEnabled cfgDoctest) /\ doctestJobVersionRange) $
cabal $ "v2-install $WITHCOMPILER --ignore-project -j2 doctest" ++ doctestVersionConstraint
-- Install hlint
let hlintVersionConstraint
| C.isAnyVersion (cfgHLintVersion cfgHLint) = ""
| otherwise = " --constraint='hlint " ++ C.prettyShow (cfgHLintVersion cfgHLint) ++ "'"
when (cfgHLintEnabled cfgHLint) $ do
let forHLint = shForJob (hlintJobVersionRange allVersions cfgHeadHackage (cfgHLintJob cfgHLint))
if cfgHLintDownload cfgHLint
then do
-- install --dry-run and use perl regex magic to find a hlint version
-- -v is important
forHLint $ "HLINTVER=$(cd /tmp && (${CABAL} v2-install -v $WITHCOMPILER --dry-run hlint " ++ hlintVersionConstraint ++ " | perl -ne 'if (/\\bhlint-(\\d+(\\.\\d+)*)\\b/) { print \"$1\"; last; }')); echo \"HLint version $HLINTVER\""
forHLint $ "if [ ! -e $HOME/.hlint/hlint-$HLINTVER/hlint ]; then " ++ unwords
[ "echo \"Downloading HLint version $HLINTVER\";"
, "mkdir -p $HOME/.hlint;"
, "curl --write-out 'Status Code: %{http_code} Redirects: %{num_redirects} Total time: %{time_total} Total Dsize: %{size_download}\\n' --silent --location --output $HOME/.hlint/hlint-$HLINTVER.tar.gz \"https://github.com/ndmitchell/hlint/releases/download/v$HLINTVER/hlint-$HLINTVER-x86_64-linux.tar.gz\";"
, "tar -xzv -f $HOME/.hlint/hlint-$HLINTVER.tar.gz -C $HOME/.hlint;"
, "fi"
]
forHLint "mkdir -p $CABALHOME/bin && ln -sf \"$HOME/.hlint/hlint-$HLINTVER/hlint\" $CABALHOME/bin/hlint"
forHLint "hlint --version"
else forHLint $ cabal $ "v2-install $WITHCOMPILER --ignore-project -j2 hlint" ++ hlintVersionConstraint
-- Install cabal-plan (for ghcjs tests)
when (anyGHCJS && cfgGhcjsTests) $ do
shForJob RangeGHCJS $ cabal "v2-install -w ghc-8.4.4 --ignore-project -j2 cabal-plan --constraint='cabal-plan ^>=0.6.0.0' --constraint='cabal-plan +exe'"
-- Install happy
when anyGHCJS $ for_ cfgGhcjsTools $ \t ->
shForJob RangeGHCJS $ cabal $ "v2-install -w ghc-8.4.4 --ignore-project -j2" ++ C.prettyShow t
-- create cabal.project file
generateCabalProject False
-- autoreconf
for_ pkgs $ \Pkg{pkgDir} ->
sh $ "if [ -f \"" ++ pkgDir ++ "/configure.ac\" ]; then (cd \"" ++ pkgDir ++ "\" && autoreconf -i); fi"
-- dump install plan
sh $ cabal "v2-freeze $WITHCOMPILER ${TEST} ${BENCH}"
sh "cat cabal.project.freeze | sed -E 's/^(constraints: *| *)//' | sed 's/any.//'"
sh "rm cabal.project.freeze"
-- Install dependencies
when cfgInstallDeps $ do
-- install dependencies
sh $ cabalTW "v2-build $WITHCOMPILER ${TEST} ${BENCH} --dep -j2 all"
-- install dependencies for no-test-no-bench
shForJob (Range cfgNoTestsNoBench) $ cabalTW "v2-build $WITHCOMPILER --disable-tests --disable-benchmarks --dep -j2 all"
-- Here starts the actual work to be performed for the package under test;
-- any command which exits with a non-zero exit code causes the build to fail.
script <- runSh $ do
sh "DISTDIR=$(mktemp -d /tmp/dist-test.XXXX)"
-- sdist
foldedSh FoldSDist "Packaging..." cfgFolds $ do
sh $ cabal "v2-sdist all"
-- unpack
foldedSh FoldUnpack "Unpacking..." cfgFolds $ do
sh "mv dist-newstyle/sdist/*.tar.gz ${DISTDIR}/"
sh "cd ${DISTDIR} || false" -- fail explicitly, makes SC happier
sh "find . -maxdepth 1 -type f -name '*.tar.gz' -exec tar -xvf '{}' \\;"
sh "find . -maxdepth 1 -type f -name '*.tar.gz' -exec rm '{}' \\;"
for_ pkgs $ \Pkg{pkgName} -> do
sh $ pkgNameDirVariable' pkgName ++ "=\"$(find . -maxdepth 1 -type d -regex '.*/" ++ pkgName ++ "-[0-9.]*')\""
generateCabalProject True
when (anyGHCJS && cfgGhcjsTests) $ sh $ unlines $
[ "pkgdir() {"
, " case $1 in"
] ++
[ " " ++ pkgName ++ ") echo " ++ pkgNameDirVariable pkgName ++ " ;;"
| Pkg{pkgName} <- pkgs
] ++
[ " esac"
, "}"
]
-- build no-tests no-benchmarks
unless (equivVersionRanges C.noVersion cfgNoTestsNoBench) $ foldedSh FoldBuild "Building..." cfgFolds $ do
comment "this builds all libraries and executables (without tests/benchmarks)"
shForJob (Range cfgNoTestsNoBench) $ cabal "v2-build $WITHCOMPILER --disable-tests --disable-benchmarks all"
-- build everything
foldedSh FoldBuildEverything "Building with tests and benchmarks..." cfgFolds $ do
comment "build & run tests, build benchmarks"
sh $ cabal "v2-build $WITHCOMPILER ${TEST} ${BENCH} all --write-ghc-environment-files=always"
-- cabal v2-test fails if there are no test-suites.
foldedSh FoldTest "Testing..." cfgFolds $ do
shForJob (RangeGHC /\ Range (cfgTests /\ cfgRunTests) /\ hasTests) $
cabal $ "v2-test $WITHCOMPILER ${TEST} ${BENCH} all" ++ testShowDetails
when cfgGhcjsTests $ shForJob (RangeGHCJS /\ hasTests) $ unwords
[ "cabal-plan list-bins '*:test:*' | while read -r line; do"
, "testpkg=$(echo \"$line\" | perl -pe 's/:.*//');"
, "testexe=$(echo \"$line\" | awk '{ print $2 }');"
, "echo \"testing $textexe in package $textpkg\";"
, "(cd \"$(pkgdir $testpkg)\" && nodejs \"$testexe\".jsexe/all.js);"
, "done"
]
-- doctest
when doctestEnabled $ foldedSh FoldDoctest "Doctest..." cfgFolds $ do
let doctestOptions = unwords $ cfgDoctestOptions cfgDoctest
sh $ "$CABAL v2-build $WITHCOMPILER ${TEST} ${BENCH} all --dry-run"
unless (null $ cfgDoctestFilterEnvPkgs cfgDoctest) $ do
-- cabal-install mangles unit ids on the OSX,
-- removing the vowels to make filepaths shorter
let manglePkgNames :: String -> [String]
manglePkgNames n
| null macosVersions = [n]
| otherwise = [n, filter notVowel n]
where
notVowel c = notElem c ("aeiou" :: String)
let filterPkgs = intercalate "|" $ concatMap (manglePkgNames . C.unPackageName) $ cfgDoctestFilterEnvPkgs cfgDoctest
sh $ "perl -i -e 'while (<ARGV>) { print unless /package-id\\s+(" ++ filterPkgs ++ ")-\\d+(\\.\\d+)*/; }' .ghc.environment.*"
for_ pkgs $ \Pkg{pkgName,pkgGpd,pkgJobs} ->
when (C.mkPackageName pkgName `notElem` cfgDoctestFilterSrcPkgs cfgDoctest) $ do
for_ (doctestArgs pkgGpd) $ \args -> do
let args' = unwords args
let vr = Range (cfgDoctestEnabled cfgDoctest)
/\ doctestJobVersionRange
/\ RangePoints pkgJobs
unless (null args) $ shForJob vr $
"(cd " ++ pkgNameDirVariable pkgName ++ " && doctest " ++ doctestOptions ++ " " ++ args' ++ ")"
-- hlint
when (cfgHLintEnabled cfgHLint) $ foldedSh FoldHLint "HLint.." cfgFolds $ do
let "" <+> ys = ys
xs <+> "" = xs
xs <+> ys = xs ++ " " ++ ys
prependSpace "" = ""
prependSpace xs = " " ++ xs
let hlintOptions = prependSpace $ maybe "" ("-h ${TOP}/" ++) (cfgHLintYaml cfgHLint) <+> unwords (cfgHLintOptions cfgHLint)
for_ pkgs $ \Pkg{pkgName,pkgGpd,pkgJobs} -> do
for_ (hlintArgs pkgGpd) $ \args -> do
let args' = unwords args
unless (null args) $
shForJob (hlintJobVersionRange allVersions cfgHeadHackage (cfgHLintJob cfgHLint) /\ RangePoints pkgJobs) $
"(cd " ++ pkgNameDirVariable pkgName ++ " && hlint" ++ hlintOptions ++ " " ++ args' ++ ")"
-- cabal check
when cfgCheck $ foldedSh FoldCheck "cabal check..." cfgFolds $ do
for_ pkgs $ \Pkg{pkgName,pkgJobs} -> shForJob (RangePoints pkgJobs) $
"(cd " ++ pkgNameDirVariable pkgName ++ " && ${CABAL} -vnormal check)"
-- haddock
when (hasLibrary && not (equivVersionRanges C.noVersion cfgHaddock)) $
foldedSh FoldHaddock "haddock..." cfgFolds $
shForJob (RangeGHC /\ Range cfgHaddock) $ cabal $ "v2-haddock $WITHCOMPILER " ++ withHaddock ++ " ${TEST} ${BENCH} all"
-- unconstained build
-- Have to build last, as we remove cabal.project.local
unless (equivVersionRanges C.noVersion cfgUnconstrainted) $
foldedSh FoldBuildInstalled "Building without installed constraints for packages in global-db..." cfgFolds $ do
shForJob (Range cfgUnconstrainted) "rm -f cabal.project.local"
shForJob (Range cfgUnconstrainted) $ cabal "v2-build $WITHCOMPILER --disable-tests --disable-benchmarks all"
-- and now, as we don't have cabal.project.local;
-- we can test with other constraint sets
unless (null cfgConstraintSets) $ do
comment "Constraint sets"
sh "rm -f cabal.project.local"
for_ cfgConstraintSets $ \cs -> do
let name = csName cs
let shForCs = shForJob (Range (csGhcVersions cs))
let shForCs' r = shForJob (Range (csGhcVersions cs) /\ r)
let testFlag = if csTests cs then "--enable-tests" else "--disable-tests"
let benchFlag = if csBenchmarks cs then "--enable-benchmarks" else "--disable-benchmarks"
let constraintFlags = map (\x -> "--constraint='" ++ x ++ "'") (csConstraints cs)
let allFlags = unwords (testFlag : benchFlag : constraintFlags)
foldedSh' FoldConstraintSets name ("Constraint set " ++ name) cfgFolds $ do
shForCs $ cabal $ "v2-build $WITHCOMPILER " ++ allFlags ++ " all"
when (csRunTests cs) $
shForCs' hasTests $ cabal $ "v2-test $WITHCOMPILER " ++ allFlags ++ " all --test-show-details=direct"
when (hasLibrary && csHaddock cs) $
shForCs $ cabal $ "v2-haddock $WITHCOMPILER " ++ withHaddock ++ " " ++ allFlags ++ " all"
-- At the end, we allow some raw travis scripts
unless (null cfgRawTravis) $ do
comment "Raw travis commands"
traverse_ sh
[ l
| l <- lines cfgRawTravis
, not (null l)
]
-- assemble travis configuration
return Travis
{ travisLanguage = "c"
, travisUbuntu = cfgUbuntu
, travisGit = TravisGit
{ tgSubmodules = cfgSubmodules
}
, travisCache = TravisCache
{ tcDirectories = buildList $ when cfgCache $ do
item "$HOME/.cabal/packages"
item "$HOME/.cabal/store"
item "$HOME/.hlint"
-- on OSX ghc is installed in $HOME so we can cache it
-- independently of linux
when (cfgCache && not (null macosVersions)) $ do
item "$HOME/.ghc-install"
}
, travisBranches = TravisBranches
{ tbOnly = cfgOnlyBranches
}
, travisNotifications = TravisNotifications
{ tnIRC = justIf (not $ null cfgIrcChannels) $ TravisIRC
{ tiChannels = cfgIrcChannels
, tiSkipJoin = True
, tiTemplate =
[ "\x0313" ++ projectName ++ "\x03/\x0306%{branch}\x03 \x0314%{commit}\x03 %{build_url} %{message}"
]
, tiNick = cfgIrcNickname
, tiPassword = cfgIrcPassword
}
, tnEmail = cfgEmailNotifications
}
, travisServices = buildList $ do
when cfgPostgres $ item "postgresql"
, travisAddons = TravisAddons
{ taApt = TravisApt [] []
, taPostgres = if cfgPostgres then Just "10" else Nothing
, taGoogleChrome = cfgGoogleChrome
}
, travisMatrix = TravisMatrix
{ tmInclude = buildList $ do
let tellJob :: Bool -> CompilerVersion -> ListBuilder TravisJob ()
tellJob osx gv = do
let cvs = dispCabalVersion $ correspondingCabalVersion cfgCabalInstallVersion gv
let gvs = dispGhcVersion gv
-- https://docs.travis-ci.com/user/installing-dependencies/#adding-apt-sources
let hvrppa :: TravisAptSource
hvrppa = TravisAptSourceLine ("deb http://ppa.launchpad.net/hvr/ghc/ubuntu " ++ C.prettyShow cfgUbuntu ++ " main") (Just "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x063dab2bdc0b3f9fcebc378bff3aeacef6f88286")
let ghcjsAptSources :: [TravisAptSource]
ghcjsAptSources | not (isGHCJS gv) = []
| otherwise =
[ TravisAptSourceLine ("deb http://ppa.launchpad.net/hvr/ghcjs/ubuntu " ++ C.prettyShow cfgUbuntu ++ " main") Nothing
, TravisAptSourceLine ("deb https://deb.nodesource.com/node_10.x " ++ C.prettyShow cfgUbuntu ++ " main") (Just "https://deb.nodesource.com/gpgkey/nodesource.gpg.key")
]
let ghcjsPackages :: [String]
ghcjsPackages = case maybeGHCJS gv of
Just v -> [ "ghc-" ++ C.prettyShow v', "nodejs" ] where
-- TODO: partial maximum
v' = maximum $ filter (`C.withinRange` C.withinVersion v) $ knownGhcVersions
Nothing -> []
item TravisJob
{ tjCompiler = gvs
, tjOS = if osx then "osx" else "linux"
, tjEnv = case gv of
GHC v -> M.lookup v cfgEnv
_ -> Nothing
, tjAddons = TravisAddons
{ taApt = TravisApt
{ taPackages = gvs : ("cabal-install-" ++ cvs) : ghcjsPackages ++ S.toList cfgApt
, taSources = hvrppa : ghcjsAptSources
}
, taPostgres = Nothing
, taGoogleChrome = False
}
}
for_ (reverse $ S.toList linuxVersions) $ tellJob False
for_ (reverse $ S.toList macosVersions) $ tellJob True
, tmAllowFailures =
[ TravisAllowFailure $ dispGhcVersion compiler
| compiler <- toList allVersions
, previewGHC cfgHeadHackage compiler || maybeGHC False (`C.withinRange` cfgAllowFailures) compiler
]
}
, travisBeforeCache = beforeCache
, travisBeforeInstall = beforeInstall
, travisInstall = install
, travisScript = script
}
where
Auxiliary {..} = auxiliary config prj jobs
justIf True x = Just x
justIf False _ = Nothing
-- TODO: should this be part of MonadSh ?
foldedSh label = foldedSh' label ""
anyGHCJS = any isGHCJS allVersions
-- https://github.com/travis-ci/docs-travis-ci-com/issues/949#issuecomment-276755003
-- https://github.com/travis-ci/travis-rubies/blob/9f7962a881c55d32da7c76baefc58b89e3941d91/build.sh#L38-L44
-- https://github.com/travis-ci/travis-build/blob/91bf066/lib/travis/build/shell/dsl.rb#L58-L63
foldedSh' :: Fold -> String -> String -> Set Fold -> ShM () -> ShM ()
foldedSh' label sfx plabel labels block
| label `S.notMember` labels = commentedBlock plabel block
| otherwise = case runSh block of
Left err -> throwErr err
Right shs
| all isComment shs -> pure ()
| otherwise -> ShM $ \shs1 -> Right $
( shs1
. (Comment plabel :)
. (Sh ("echo '" ++ plabel ++ "' && echo -en 'travis_fold:start:" ++ label' ++ "\\\\r'") :)
. (shs ++)
. (Sh ("echo -en 'travis_fold:end:" ++ label' ++ "\\\\r'") :)
-- return ()
, ()
)
where
label' | null sfx = showFold label
| otherwise = showFold label ++ "-" ++ sfx
-- GHC versions which need head.hackage
headGhcVers :: Set CompilerVersion
headGhcVers = S.filter (previewGHC cfgHeadHackage) allVersions
cabal :: String -> String
cabal cmd = "${CABAL} " ++ cmd
cabalTW :: String -> String
cabalTW cmd = "travis_wait 40 ${CABAL} " ++ cmd
forJob :: CompilerRange -> String -> Maybe String
forJob vr cmd
| all (`compilerWithinRange` vr) allVersions = Just cmd
| not $ any (`compilerWithinRange` vr) allVersions = Nothing
| otherwise = Just $ unwords
[ "if"
, compilerVersionPredicate allVersions vr
, "; then"
, cmd
, "; fi"
]
shForJob :: CompilerRange -> String -> ShM ()
shForJob vr cmd = maybe (pure ()) sh (forJob vr cmd)
-- catForJob vr fp contents = shForJob vr (catCmd Double fp contents)
generateCabalProject :: Bool -> ShM ()
generateCabalProject dist = do
comment "Generate cabal.project"
sh "rm -rf cabal.project cabal.project.local cabal.project.freeze"
sh "touch cabal.project"
sh $ unlines
[ cmd
| pkg <- pkgs
, let p | dist = pkgNameDirVariable (pkgName pkg)
| otherwise = pkgDir pkg
, cmd <- toList $ forJob (RangePoints $ pkgJobs pkg) $
"echo \"packages: " ++ p ++ "\" >> cabal.project"
]
case cfgErrorMissingMethods of
PackageScopeNone -> pure ()
PackageScopeLocal -> for_ pkgs $ \Pkg{pkgName,pkgJobs} -> do
shForJob (Range (C.orLaterVersion (C.mkVersion [8,2])) /\ RangePoints pkgJobs) $
"echo 'package " ++ pkgName ++ "' >> cabal.project"
shForJob (Range (C.orLaterVersion (C.mkVersion [8,2])) /\ RangePoints pkgJobs) $
"echo ' ghc-options: -Werror=missing-methods' >> cabal.project"
PackageScopeAll -> do
sh "echo 'package *' >> cabal.project"
sh "echo ' ghc-options: -Werror=missing-methods' >> cabal.project"
cat "cabal.project" $ lines $ C.showFields' (const []) (const id) 2 extraCabalProjectFields
-- If using head.hackage, allow building with newer versions of GHC boot libraries.
-- Note that we put this in a cabal.project file, not ~/.cabal/config, in order to avoid
-- https://github.com/haskell/cabal/issues/7291.
unless (S.null headGhcVers) $ sh $ unlines $
[ "if $HEADHACKAGE; then"
, "echo \"allow-newer: $($HCPKG list --simple-output | sed -E 's/([a-zA-Z-]+)-[0-9.]+/*:\\1,/g')\" >> $CABALHOME/config"
, "fi"
]
-- also write cabal.project.local file with
-- @
-- constraints: base installed
-- constraints: array installed
-- ...
--
-- omitting any local package names
case normaliseInstalled cfgInstalled of
InstalledDiff pns -> sh $ unwords
[ "for pkg in $($HCPKG list --simple-output); do"
, "echo $pkg"
, "| sed 's/-[^-]*$//'"
, "| (grep -vE -- " ++ re ++ " || true)"
, "| sed 's/^/constraints: /'"
, "| sed 's/$/ installed/'"
, ">> cabal.project.local; done"
]
where
pns' = S.map C.unPackageName pns `S.union` foldMap (S.singleton . pkgName) pkgs
re = "'^(" ++ intercalate "|" (S.toList pns') ++ ")$'"
InstalledOnly pns | not (null pns') -> sh' [2043] $ unwords
[ "for pkg in " ++ unwords (S.toList pns') ++ "; do"
, "echo \"constraints: $pkg installed\""
, ">> cabal.project.local; done"
]
where
pns' = S.map C.unPackageName pns `S.difference` foldMap (S.singleton . pkgName) pkgs
-- otherwise: nothing
_ -> pure ()
sh "cat cabal.project || true"
sh "cat cabal.project.local || true"
-- Needed to work around haskell/cabal#6214
withHaddock :: String
withHaddock = "--with-haddock $HADDOCK"
data Quotes = Single | Double
escape :: Quotes -> String -> String
escape Single xs = "'" ++ concatMap f xs ++ "'" where
f '\0' = ""
f '\'' = "'\"'\"'"
f x = [x]
escape Double xs = show xs
catCmd :: Quotes -> FilePath -> [String] -> String
catCmd q fp contents = unlines
[ "echo " ++ escape q l ++ replicate (maxLength - length l) ' ' ++ " >> " ++ fp
| l <- contents
]
where
maxLength = foldl' (\a l -> max a (length l)) 0 contents
{-
-- https://travis-ci.community/t/multiline-commands-have-two-spaces-in-front-breaks-heredocs/2756
catCmd fp contents = unlines $
[ "cat >> " ++ fp ++ " << HEREDOC" ] ++
contents ++
[ "HEREDOC" ]
-}
cat :: FilePath -> [String] -> ShM ()
cat fp contents = sh $ catCmd Double fp contents
|
hvr/multi-ghc-travis
|
src/HaskellCI/Travis.hs
|
bsd-3-clause
| 31,590
| 0
| 35
| 10,285
| 5,489
| 2,726
| 2,763
| 454
| 17
|
-- {-# OPTIONS_GHC -cpp -DDEBUG #-}
{-# OPTIONS_GHC -cpp #-}
-- uncomment one of the two above lines to turn debugging on/off for this module
-----------------------------------------------------------------------------
-- |
-- Module : Berp.Base.Class
-- Copyright : (c) 2010 Bernie Pope
-- License : BSD-style
-- Maintainer : florbitous@gmail.com
-- Stability : experimental
-- Portability : ghc
--
-- Implementation of the Python "class" keyword. We call it "klass" (with a k)
-- because "class" is a keyword in Haskell.
--
-----------------------------------------------------------------------------
#include "BerpDebug.h"
module Berp.Base.Class (klass) where
import Berp.Base.LiftedIO (liftIO, MonadIO, readIORef)
import Berp.Base.Ident
import Berp.Base.SemanticTypes (Eval, Object (..), ObjectRef)
#ifdef DEBUG
import Berp.Base.Prims (printObject)
#endif
import Berp.Base.Hash (Hashed)
import Berp.Base.Attributes (mkAttributesList)
import Berp.Base.StdTypes.Type (newType)
import Berp.Base.StdTypes.String (string)
import Berp.Base.StdTypes.Tuple (tuple)
-- import Berp.Base.StdTypes.None (none)
import Berp.Base.StdTypes.Object (object)
klass :: Ident -> [Object] -> Eval [(Hashed String, ObjectRef)] -> Eval Object
klass className srcBases attributesComp = do
-- if the source lists no bases for the class, then force it to be (object)
let trueBases = if null srcBases then [object] else srcBases
attributes <- attributesComp
attributesObjects <- mapM getIdentObj attributes
classDict <- mkAttributesList attributesObjects
typeObject <- liftIO $ newType [string className, tuple trueBases, classDict]
IF_DEBUG((printObject $ object_mro typeObject) >> putStr "\n")
return typeObject
where
getIdentObj :: MonadIO m => (a, ObjectRef) -> m (a, Object)
getIdentObj (ident, ref) = do
obj <- readIORef ref
return (ident, obj)
|
bjpop/berp
|
libs/src/Berp/Base/Class.hs
|
bsd-3-clause
| 1,895
| 0
| 12
| 293
| 390
| 225
| 165
| 24
| 2
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DeriveGeneric #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Setup
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : lemmih@gmail.com
-- Stability : provisional
-- Portability : portable
--
--
-----------------------------------------------------------------------------
module Distribution.Client.Setup
( globalCommand, GlobalFlags(..), defaultGlobalFlags
, RepoContext(..), withRepoContext
, configureCommand, ConfigFlags(..), filterConfigureFlags
, configPackageDB', configCompilerAux'
, configureExCommand, ConfigExFlags(..), defaultConfigExFlags
, buildCommand, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..)
, replCommand, testCommand, benchmarkCommand
, configureExOptions, reconfigureCommand
, installCommand, InstallFlags(..), installOptions, defaultInstallFlags
, defaultSolver, defaultMaxBackjumps
, listCommand, ListFlags(..)
, updateCommand, UpdateFlags(..)
, upgradeCommand
, uninstallCommand
, infoCommand, InfoFlags(..)
, fetchCommand, FetchFlags(..)
, freezeCommand, FreezeFlags(..)
, genBoundsCommand
, outdatedCommand, OutdatedFlags(..), IgnoreMajorVersionBumps(..)
, getCommand, unpackCommand, GetFlags(..)
, checkCommand
, formatCommand
, uploadCommand, UploadFlags(..), IsCandidate(..)
, reportCommand, ReportFlags(..)
, runCommand
, initCommand, IT.InitFlags(..)
, sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..)
, win32SelfUpgradeCommand, Win32SelfUpgradeFlags(..)
, actAsSetupCommand, ActAsSetupFlags(..)
, sandboxCommand, defaultSandboxLocation, SandboxFlags(..)
, execCommand, ExecFlags(..)
, userConfigCommand, UserConfigFlags(..)
, manpageCommand
, applyFlagDefaults
, parsePackageArgs
--TODO: stop exporting these:
, showRepo
, parseRepo
, readRepo
) where
import Prelude ()
import Distribution.Client.Compat.Prelude hiding (get)
import Distribution.Client.Types
( Username(..), Password(..), RemoteRepo(..)
, AllowNewer(..), AllowOlder(..), RelaxDeps(..)
)
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.Dependency.Types
( PreSolver(..) )
import Distribution.Client.IndexUtils.Timestamp
( IndexState(..) )
import qualified Distribution.Client.Init.Types as IT
( InitFlags(..), PackageType(..) )
import Distribution.Client.Targets
( UserConstraint, readUserConstraint )
import Distribution.Utils.NubList
( NubList, toNubList, fromNubList)
import Distribution.Solver.Types.ConstraintSource
import Distribution.Solver.Types.Settings
import Distribution.Simple.Compiler ( Compiler, PackageDB, PackageDBStack )
import Distribution.Simple.Program (ProgramDb, defaultProgramDb)
import Distribution.Simple.Command hiding (boolOpt, boolOpt')
import qualified Distribution.Simple.Command as Command
import Distribution.Simple.Configure
( configCompilerAuxEx, interpretPackageDbFlags, computeEffectiveProfiling )
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Setup
( ConfigFlags(..), BuildFlags(..), ReplFlags
, TestFlags(..), BenchmarkFlags(..)
, SDistFlags(..), HaddockFlags(..)
, readPackageDbList, showPackageDbList
, Flag(..), toFlag, flagToMaybe, flagToList, maybeToFlag
, BooleanFlag(..), optionVerbosity
, boolOpt, boolOpt', trueArg, falseArg
, optionNumJobs )
import Distribution.Simple.InstallDirs
( PathTemplate, InstallDirs(..)
, toPathTemplate, fromPathTemplate, combinePathTemplate )
import Distribution.Version
( Version, mkVersion, nullVersion, anyVersion, thisVersion )
import Distribution.Package
( PackageIdentifier, PackageName, packageName, packageVersion )
import Distribution.Types.Dependency
import Distribution.PackageDescription
( BuildType(..), RepoKind(..) )
import Distribution.System ( Platform )
import Distribution.Text
( Text(..), display )
import Distribution.ReadE
( ReadE(..), readP_to_E, succeedReadE )
import qualified Distribution.Compat.ReadP as Parse
( ReadP, char, munch1, pfail, sepBy1, (+++) )
import Distribution.ParseUtils
( readPToMaybe )
import Distribution.Verbosity
( Verbosity, lessVerbose, normal, verboseNoFlags, verboseNoTimestamp )
import Distribution.Simple.Utils
( wrapText, wrapLine )
import Distribution.Client.GlobalFlags
( GlobalFlags(..), defaultGlobalFlags
, RepoContext(..), withRepoContext
)
import Data.List
( deleteFirstsBy )
import System.FilePath
( (</>) )
import Network.URI
( parseAbsoluteURI, uriToString )
applyFlagDefaults :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
-> (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
applyFlagDefaults (configFlags, configExFlags, installFlags, haddockFlags) =
( commandDefaultFlags configureCommand <> configFlags
, defaultConfigExFlags <> configExFlags
, defaultInstallFlags <> installFlags
, Cabal.defaultHaddockFlags <> haddockFlags
)
globalCommand :: [Command action] -> CommandUI GlobalFlags
globalCommand commands = CommandUI {
commandName = "",
commandSynopsis =
"Command line interface to the Haskell Cabal infrastructure.",
commandUsage = \pname ->
"See http://www.haskell.org/cabal/ for more information.\n"
++ "\n"
++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n",
commandDescription = Just $ \pname ->
let
commands' = commands ++ [commandAddAction helpCommandUI undefined]
cmdDescs = getNormalCommandDescriptions commands'
-- if new commands are added, we want them to appear even if they
-- are not included in the custom listing below. Thus, we calculate
-- the `otherCmds` list and append it under the `other` category.
-- Alternatively, a new testcase could be added that ensures that
-- the set of commands listed here is equal to the set of commands
-- that are actually available.
otherCmds = deleteFirstsBy (==) (map fst cmdDescs)
[ "help"
, "update"
, "install"
, "fetch"
, "list"
, "info"
, "user-config"
, "get"
, "init"
, "configure"
, "reconfigure"
, "build"
, "clean"
, "run"
, "repl"
, "test"
, "bench"
, "check"
, "sdist"
, "upload"
, "report"
, "freeze"
, "gen-bounds"
, "outdated"
, "doctest"
, "haddock"
, "hscolour"
, "copy"
, "register"
, "sandbox"
, "exec"
, "new-build"
, "new-configure"
, "new-repl"
, "new-freeze"
, "new-run"
, "new-test"
, "new-bench"
, "new-haddock"
]
maxlen = maximum $ [length name | (name, _) <- cmdDescs]
align str = str ++ replicate (maxlen - length str) ' '
startGroup n = " ["++n++"]"
par = ""
addCmd n = case lookup n cmdDescs of
Nothing -> ""
Just d -> " " ++ align n ++ " " ++ d
addCmdCustom n d = case lookup n cmdDescs of -- make sure that the
-- command still exists.
Nothing -> ""
Just _ -> " " ++ align n ++ " " ++ d
in
"Commands:\n"
++ unlines (
[ startGroup "global"
, addCmd "update"
, addCmd "install"
, par
, addCmd "help"
, addCmd "info"
, addCmd "list"
, addCmd "fetch"
, addCmd "user-config"
, par
, startGroup "package"
, addCmd "get"
, addCmd "init"
, par
, addCmd "configure"
, addCmd "build"
, addCmd "clean"
, par
, addCmd "run"
, addCmd "repl"
, addCmd "test"
, addCmd "bench"
, par
, addCmd "check"
, addCmd "sdist"
, addCmd "upload"
, addCmd "report"
, par
, addCmd "freeze"
, addCmd "gen-bounds"
, addCmd "outdated"
, addCmd "doctest"
, addCmd "haddock"
, addCmd "hscolour"
, addCmd "copy"
, addCmd "register"
, addCmd "reconfigure"
, par
, startGroup "sandbox"
, addCmd "sandbox"
, addCmd "exec"
, addCmdCustom "repl" "Open interpreter with access to sandbox packages."
, par
, startGroup "new-style projects (beta)"
, addCmd "new-build"
, addCmd "new-configure"
, addCmd "new-repl"
, addCmd "new-run"
, addCmd "new-test"
, addCmd "new-bench"
, addCmd "new-freeze"
, addCmd "new-haddock"
] ++ if null otherCmds then [] else par
:startGroup "other"
:[addCmd n | n <- otherCmds])
++ "\n"
++ "For more information about a command use:\n"
++ " " ++ pname ++ " COMMAND --help\n"
++ "or " ++ pname ++ " help COMMAND\n"
++ "\n"
++ "To install Cabal packages from hackage use:\n"
++ " " ++ pname ++ " install foo [--dry-run]\n"
++ "\n"
++ "Occasionally you need to update the list of available packages:\n"
++ " " ++ pname ++ " update\n",
commandNotes = Nothing,
commandDefaultFlags = mempty,
commandOptions = args
}
where
args :: ShowOrParseArgs -> [OptionField GlobalFlags]
args ShowArgs = argsShown
args ParseArgs = argsShown ++ argsNotShown
-- arguments we want to show in the help
argsShown :: [OptionField GlobalFlags]
argsShown = [
option ['V'] ["version"]
"Print version information"
globalVersion (\v flags -> flags { globalVersion = v })
trueArg
,option [] ["numeric-version"]
"Print just the version number"
globalNumericVersion (\v flags -> flags { globalNumericVersion = v })
trueArg
,option [] ["config-file"]
"Set an alternate location for the config file"
globalConfigFile (\v flags -> flags { globalConfigFile = v })
(reqArgFlag "FILE")
,option [] ["sandbox-config-file"]
"Set an alternate location for the sandbox config file (default: './cabal.sandbox.config')"
globalSandboxConfigFile (\v flags -> flags { globalSandboxConfigFile = v })
(reqArgFlag "FILE")
,option [] ["default-user-config"]
"Set a location for a cabal.config file for projects without their own cabal.config freeze file."
globalConstraintsFile (\v flags -> flags {globalConstraintsFile = v})
(reqArgFlag "FILE")
,option [] ["require-sandbox"]
"requiring the presence of a sandbox for sandbox-aware commands"
globalRequireSandbox (\v flags -> flags { globalRequireSandbox = v })
(boolOpt' ([], ["require-sandbox"]) ([], ["no-require-sandbox"]))
,option [] ["ignore-sandbox"]
"Ignore any existing sandbox"
globalIgnoreSandbox (\v flags -> flags { globalIgnoreSandbox = v })
trueArg
,option [] ["ignore-expiry"]
"Ignore expiry dates on signed metadata (use only in exceptional circumstances)"
globalIgnoreExpiry (\v flags -> flags { globalIgnoreExpiry = v })
trueArg
,option [] ["http-transport"]
"Set a transport for http(s) requests. Accepts 'curl', 'wget', 'powershell', and 'plain-http'. (default: 'curl')"
globalHttpTransport (\v flags -> flags { globalHttpTransport = v })
(reqArgFlag "HttpTransport")
,option [] ["nix"]
"Nix integration: run commands through nix-shell if a 'shell.nix' file exists"
globalNix (\v flags -> flags { globalNix = v })
(boolOpt [] [])
]
-- arguments we don't want shown in the help
argsNotShown :: [OptionField GlobalFlags]
argsNotShown = [
option [] ["remote-repo"]
"The name and url for a remote repository"
globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v })
(reqArg' "NAME:URL" (toNubList . maybeToList . readRepo) (map showRepo . fromNubList))
,option [] ["remote-repo-cache"]
"The location where downloads from all remote repos are cached"
globalCacheDir (\v flags -> flags { globalCacheDir = v })
(reqArgFlag "DIR")
,option [] ["local-repo"]
"The location of a local repository"
globalLocalRepos (\v flags -> flags { globalLocalRepos = v })
(reqArg' "DIR" (\x -> toNubList [x]) fromNubList)
,option [] ["logs-dir"]
"The location to put log files"
globalLogsDir (\v flags -> flags { globalLogsDir = v })
(reqArgFlag "DIR")
,option [] ["world-file"]
"The location of the world file"
globalWorldFile (\v flags -> flags { globalWorldFile = v })
(reqArgFlag "FILE")
]
-- ------------------------------------------------------------
-- * Config flags
-- ------------------------------------------------------------
configureCommand :: CommandUI ConfigFlags
configureCommand = c
{ commandDefaultFlags = mempty
, commandNotes = Just $ \pname -> (case commandNotes c of
Nothing -> ""
Just n -> n pname ++ "\n")
++ "Examples:\n"
++ " " ++ pname ++ " configure\n"
++ " Configure with defaults;\n"
++ " " ++ pname ++ " configure --enable-tests -fcustomflag\n"
++ " Configure building package including tests,\n"
++ " with some package-specific flag.\n"
}
where
c = Cabal.configureCommand defaultProgramDb
configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags]
configureOptions = commandOptions configureCommand
-- | Given some 'ConfigFlags' for the version of Cabal that
-- cabal-install was built with, and a target older 'Version' of
-- Cabal that we want to pass these flags to, convert the
-- flags into a form that will be accepted by the older
-- Setup script. Generally speaking, this just means filtering
-- out flags that the old Cabal library doesn't understand, but
-- in some cases it may also mean "emulating" a feature using
-- some more legacy flags.
filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags
filterConfigureFlags flags cabalLibVersion
-- NB: we expect the latest version to be the most common case,
-- so test it first.
| cabalLibVersion >= mkVersion [2,1,0] = flags_latest
-- The naming convention is that flags_version gives flags with
-- all flags *introduced* in version eliminated.
-- It is NOT the latest version of Cabal library that
-- these flags work for; version of introduction is a more
-- natural metric.
| cabalLibVersion < mkVersion [1,3,10] = flags_1_3_10
| cabalLibVersion < mkVersion [1,10,0] = flags_1_10_0
| cabalLibVersion < mkVersion [1,12,0] = flags_1_12_0
| cabalLibVersion < mkVersion [1,14,0] = flags_1_14_0
| cabalLibVersion < mkVersion [1,18,0] = flags_1_18_0
| cabalLibVersion < mkVersion [1,19,1] = flags_1_19_1
| cabalLibVersion < mkVersion [1,19,2] = flags_1_19_2
| cabalLibVersion < mkVersion [1,21,1] = flags_1_21_1
| cabalLibVersion < mkVersion [1,22,0] = flags_1_22_0
| cabalLibVersion < mkVersion [1,23,0] = flags_1_23_0
| cabalLibVersion < mkVersion [1,25,0] = flags_1_25_0
| cabalLibVersion < mkVersion [2,1,0] = flags_2_1_0
| otherwise = flags_latest
where
flags_latest = flags {
-- Cabal >= 1.19.1 uses '--dependency' and does not need '--constraint'.
configConstraints = []
}
flags_2_1_0 = flags_latest {
-- Cabal < 2.1 doesn't know about -v +timestamp modifier
configVerbosity = fmap verboseNoTimestamp (configVerbosity flags_latest)
-- Cabal < 2.1 doesn't know about --<enable|disable>-static
, configStaticLib = NoFlag
}
flags_1_25_0 = flags_2_1_0 {
-- Cabal < 1.25.0 doesn't know about --dynlibdir.
configInstallDirs = configInstallDirs_1_25_0,
-- Cabal < 1.25 doesn't have extended verbosity syntax
configVerbosity = fmap verboseNoFlags (configVerbosity flags_2_1_0),
-- Cabal < 1.25 doesn't support --deterministic
configDeterministic = mempty
}
configInstallDirs_1_25_0 = let dirs = configInstallDirs flags in
dirs { dynlibdir = NoFlag
, libexecsubdir = NoFlag
, libexecdir = maybeToFlag $
combinePathTemplate <$> flagToMaybe (libexecdir dirs)
<*> flagToMaybe (libexecsubdir dirs)
}
-- Cabal < 1.23 doesn't know about '--profiling-detail'.
-- Cabal < 1.23 has a hacked up version of 'enable-profiling'
-- which we shouldn't use.
(tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling flags
flags_1_23_0 = flags_1_25_0 { configProfDetail = NoFlag
, configProfLibDetail = NoFlag
, configIPID = NoFlag
, configProf = NoFlag
, configProfExe = Flag tryExeProfiling
, configProfLib = Flag tryLibProfiling
}
-- Cabal < 1.22 doesn't know about '--disable-debug-info'.
flags_1_22_0 = flags_1_23_0 { configDebugInfo = NoFlag }
-- Cabal < 1.21.1 doesn't know about 'disable-relocatable'
-- Cabal < 1.21.1 doesn't know about 'enable-profiling'
-- (but we already dealt with it in flags_1_23_0)
flags_1_21_1 =
flags_1_22_0 { configRelocatable = NoFlag
, configCoverage = NoFlag
, configLibCoverage = configCoverage flags
}
-- Cabal < 1.19.2 doesn't know about '--exact-configuration' and
-- '--enable-library-stripping'.
flags_1_19_2 = flags_1_21_1 { configExactConfiguration = NoFlag
, configStripLibs = NoFlag }
-- Cabal < 1.19.1 uses '--constraint' instead of '--dependency'.
flags_1_19_1 = flags_1_19_2 { configDependencies = []
, configConstraints = configConstraints flags }
-- Cabal < 1.18.0 doesn't know about --extra-prog-path and --sysconfdir.
flags_1_18_0 = flags_1_19_1 { configProgramPathExtra = toNubList []
, configInstallDirs = configInstallDirs_1_18_0}
configInstallDirs_1_18_0 = (configInstallDirs flags_1_19_1) { sysconfdir = NoFlag }
-- Cabal < 1.14.0 doesn't know about '--disable-benchmarks'.
flags_1_14_0 = flags_1_18_0 { configBenchmarks = NoFlag }
-- Cabal < 1.12.0 doesn't know about '--enable/disable-executable-dynamic'
-- and '--enable/disable-library-coverage'.
flags_1_12_0 = flags_1_14_0 { configLibCoverage = NoFlag
, configDynExe = NoFlag }
-- Cabal < 1.10.0 doesn't know about '--disable-tests'.
flags_1_10_0 = flags_1_12_0 { configTests = NoFlag }
-- Cabal < 1.3.10 does not grok the '--constraints' flag.
flags_1_3_10 = flags_1_10_0 { configConstraints = [] }
-- | Get the package database settings from 'ConfigFlags', accounting for
-- @--package-db@ and @--user@ flags.
configPackageDB' :: ConfigFlags -> PackageDBStack
configPackageDB' cfg =
interpretPackageDbFlags userInstall (configPackageDBs cfg)
where
userInstall = Cabal.fromFlagOrDefault True (configUserInstall cfg)
-- | Configure the compiler, but reduce verbosity during this step.
configCompilerAux' :: ConfigFlags -> IO (Compiler, Platform, ProgramDb)
configCompilerAux' configFlags =
configCompilerAuxEx configFlags
--FIXME: make configCompilerAux use a sensible verbosity
{ configVerbosity = fmap lessVerbose (configVerbosity configFlags) }
-- ------------------------------------------------------------
-- * Config extra flags
-- ------------------------------------------------------------
-- | cabal configure takes some extra flags beyond runghc Setup configure
--
data ConfigExFlags = ConfigExFlags {
configCabalVersion :: Flag Version,
configExConstraints:: [(UserConstraint, ConstraintSource)],
configPreferences :: [Dependency],
configSolver :: Flag PreSolver,
configAllowNewer :: Maybe AllowNewer,
configAllowOlder :: Maybe AllowOlder
}
deriving (Eq, Generic)
defaultConfigExFlags :: ConfigExFlags
defaultConfigExFlags = mempty { configSolver = Flag defaultSolver }
configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags)
configureExCommand = configureCommand {
commandDefaultFlags = (mempty, defaultConfigExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst
(filter ((`notElem` ["constraint", "dependency", "exact-configuration"])
. optionName) $ configureOptions showOrParseArgs)
++ liftOptions snd setSnd
(configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
configureExOptions :: ShowOrParseArgs
-> ConstraintSource
-> [OptionField ConfigExFlags]
configureExOptions _showOrParseArgs src =
[ option [] ["cabal-lib-version"]
("Select which version of the Cabal lib to use to build packages "
++ "(useful for testing).")
configCabalVersion (\v flags -> flags { configCabalVersion = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++)
(fmap toFlag parse))
(map display . flagToList))
, option [] ["constraint"]
"Specify constraints on a package (version, installed/source, flags)"
configExConstraints (\v flags -> flags { configExConstraints = v })
(reqArg "CONSTRAINT"
((\x -> [(x, src)]) `fmap` ReadE readUserConstraint)
(map $ display . fst))
, option [] ["preference"]
"Specify preferences (soft constraints) on the version of a package"
configPreferences (\v flags -> flags { configPreferences = v })
(reqArg "CONSTRAINT"
(readP_to_E (const "dependency expected")
(fmap (\x -> [x]) parse))
(map display))
, optionSolver configSolver (\v flags -> flags { configSolver = v })
, option [] ["allow-older"]
("Ignore upper bounds in all dependencies or DEPS")
(fmap unAllowOlder . configAllowOlder)
(\v flags -> flags { configAllowOlder = fmap AllowOlder v})
(optArg "DEPS"
(readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser)
(Just RelaxDepsAll) relaxDepsPrinter)
, option [] ["allow-newer"]
("Ignore upper bounds in all dependencies or DEPS")
(fmap unAllowNewer . configAllowNewer)
(\v flags -> flags { configAllowNewer = fmap AllowNewer v})
(optArg "DEPS"
(readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser)
(Just RelaxDepsAll) relaxDepsPrinter)
]
relaxDepsParser :: Parse.ReadP r (Maybe RelaxDeps)
relaxDepsParser =
(Just . RelaxDepsSome) `fmap` Parse.sepBy1 parse (Parse.char ',')
relaxDepsPrinter :: (Maybe RelaxDeps) -> [Maybe String]
relaxDepsPrinter Nothing = []
relaxDepsPrinter (Just RelaxDepsNone) = []
relaxDepsPrinter (Just RelaxDepsAll) = [Nothing]
relaxDepsPrinter (Just (RelaxDepsSome pkgs)) = map (Just . display) $ pkgs
instance Monoid ConfigExFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ConfigExFlags where
(<>) = gmappend
reconfigureCommand :: CommandUI (ConfigFlags, ConfigExFlags)
reconfigureCommand
= configureExCommand
{ commandName = "reconfigure"
, commandSynopsis = "Reconfigure the package if necessary."
, commandDescription = Just $ \pname -> wrapText $
"Run `configure` with the most recently used flags, or append FLAGS "
++ "to the most recently used configuration. "
++ "Accepts the same flags as `" ++ pname ++ " configure'. "
++ "If the package has never been configured, the default flags are "
++ "used."
, commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " reconfigure\n"
++ " Configure with the most recently used flags.\n"
++ " " ++ pname ++ " reconfigure -w PATH\n"
++ " Reconfigure with the most recently used flags,\n"
++ " but use the compiler at PATH.\n\n"
, commandUsage = usageAlternatives "reconfigure" [ "[FLAGS]" ]
, commandDefaultFlags = mempty
}
-- ------------------------------------------------------------
-- * Build flags
-- ------------------------------------------------------------
data SkipAddSourceDepsCheck =
SkipAddSourceDepsCheck | DontSkipAddSourceDepsCheck
deriving Eq
data BuildExFlags = BuildExFlags {
buildOnly :: Flag SkipAddSourceDepsCheck
} deriving Generic
buildExOptions :: ShowOrParseArgs -> [OptionField BuildExFlags]
buildExOptions _showOrParseArgs =
option [] ["only"]
"Don't reinstall add-source dependencies (sandbox-only)"
buildOnly (\v flags -> flags { buildOnly = v })
(noArg (Flag SkipAddSourceDepsCheck))
: []
buildCommand :: CommandUI (BuildFlags, BuildExFlags)
buildCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent, mempty),
commandOptions =
\showOrParseArgs -> liftOptions fst setFst
(commandOptions parent showOrParseArgs)
++
liftOptions snd setSnd (buildExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
parent = Cabal.buildCommand defaultProgramDb
instance Monoid BuildExFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup BuildExFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Repl command
-- ------------------------------------------------------------
replCommand :: CommandUI (ReplFlags, BuildExFlags)
replCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent, mempty),
commandOptions =
\showOrParseArgs -> liftOptions fst setFst
(commandOptions parent showOrParseArgs)
++
liftOptions snd setSnd (buildExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
parent = Cabal.replCommand defaultProgramDb
-- ------------------------------------------------------------
-- * Test command
-- ------------------------------------------------------------
testCommand :: CommandUI (TestFlags, BuildFlags, BuildExFlags)
testCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent,
Cabal.defaultBuildFlags, mempty),
commandOptions =
\showOrParseArgs -> liftOptions get1 set1
(commandOptions parent showOrParseArgs)
++
liftOptions get2 set2
(Cabal.buildOptions progDb showOrParseArgs)
++
liftOptions get3 set3 (buildExOptions showOrParseArgs)
}
where
get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c)
get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c)
get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c)
parent = Cabal.testCommand
progDb = defaultProgramDb
-- ------------------------------------------------------------
-- * Bench command
-- ------------------------------------------------------------
benchmarkCommand :: CommandUI (BenchmarkFlags, BuildFlags, BuildExFlags)
benchmarkCommand = parent {
commandDefaultFlags = (commandDefaultFlags parent,
Cabal.defaultBuildFlags, mempty),
commandOptions =
\showOrParseArgs -> liftOptions get1 set1
(commandOptions parent showOrParseArgs)
++
liftOptions get2 set2
(Cabal.buildOptions progDb showOrParseArgs)
++
liftOptions get3 set3 (buildExOptions showOrParseArgs)
}
where
get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c)
get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c)
get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c)
parent = Cabal.benchmarkCommand
progDb = defaultProgramDb
-- ------------------------------------------------------------
-- * Fetch command
-- ------------------------------------------------------------
data FetchFlags = FetchFlags {
-- fetchOutput :: Flag FilePath,
fetchDeps :: Flag Bool,
fetchDryRun :: Flag Bool,
fetchSolver :: Flag PreSolver,
fetchMaxBackjumps :: Flag Int,
fetchReorderGoals :: Flag ReorderGoals,
fetchCountConflicts :: Flag CountConflicts,
fetchIndependentGoals :: Flag IndependentGoals,
fetchShadowPkgs :: Flag ShadowPkgs,
fetchStrongFlags :: Flag StrongFlags,
fetchAllowBootLibInstalls :: Flag AllowBootLibInstalls,
fetchVerbosity :: Flag Verbosity
}
defaultFetchFlags :: FetchFlags
defaultFetchFlags = FetchFlags {
-- fetchOutput = mempty,
fetchDeps = toFlag True,
fetchDryRun = toFlag False,
fetchSolver = Flag defaultSolver,
fetchMaxBackjumps = Flag defaultMaxBackjumps,
fetchReorderGoals = Flag (ReorderGoals False),
fetchCountConflicts = Flag (CountConflicts True),
fetchIndependentGoals = Flag (IndependentGoals False),
fetchShadowPkgs = Flag (ShadowPkgs False),
fetchStrongFlags = Flag (StrongFlags False),
fetchAllowBootLibInstalls = Flag (AllowBootLibInstalls False),
fetchVerbosity = toFlag normal
}
fetchCommand :: CommandUI FetchFlags
fetchCommand = CommandUI {
commandName = "fetch",
commandSynopsis = "Downloads packages for later installation.",
commandUsage = usageAlternatives "fetch" [ "[FLAGS] PACKAGES"
],
commandDescription = Just $ \_ ->
"Note that it currently is not possible to fetch the dependencies for a\n"
++ "package in the current directory.\n",
commandNotes = Nothing,
commandDefaultFlags = defaultFetchFlags,
commandOptions = \ showOrParseArgs -> [
optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v })
-- , option "o" ["output"]
-- "Put the package(s) somewhere specific rather than the usual cache."
-- fetchOutput (\v flags -> flags { fetchOutput = v })
-- (reqArgFlag "PATH")
, option [] ["dependencies", "deps"]
"Resolve and fetch dependencies (default)"
fetchDeps (\v flags -> flags { fetchDeps = v })
trueArg
, option [] ["no-dependencies", "no-deps"]
"Ignore dependencies"
fetchDeps (\v flags -> flags { fetchDeps = v })
falseArg
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
fetchDryRun (\v flags -> flags { fetchDryRun = v })
trueArg
] ++
optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) :
optionSolverFlags showOrParseArgs
fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v })
fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v })
fetchCountConflicts (\v flags -> flags { fetchCountConflicts = v })
fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v })
fetchShadowPkgs (\v flags -> flags { fetchShadowPkgs = v })
fetchStrongFlags (\v flags -> flags { fetchStrongFlags = v })
fetchAllowBootLibInstalls (\v flags -> flags { fetchAllowBootLibInstalls = v })
}
-- ------------------------------------------------------------
-- * Freeze command
-- ------------------------------------------------------------
data FreezeFlags = FreezeFlags {
freezeDryRun :: Flag Bool,
freezeTests :: Flag Bool,
freezeBenchmarks :: Flag Bool,
freezeSolver :: Flag PreSolver,
freezeMaxBackjumps :: Flag Int,
freezeReorderGoals :: Flag ReorderGoals,
freezeCountConflicts :: Flag CountConflicts,
freezeIndependentGoals :: Flag IndependentGoals,
freezeShadowPkgs :: Flag ShadowPkgs,
freezeStrongFlags :: Flag StrongFlags,
freezeAllowBootLibInstalls :: Flag AllowBootLibInstalls,
freezeVerbosity :: Flag Verbosity
}
defaultFreezeFlags :: FreezeFlags
defaultFreezeFlags = FreezeFlags {
freezeDryRun = toFlag False,
freezeTests = toFlag False,
freezeBenchmarks = toFlag False,
freezeSolver = Flag defaultSolver,
freezeMaxBackjumps = Flag defaultMaxBackjumps,
freezeReorderGoals = Flag (ReorderGoals False),
freezeCountConflicts = Flag (CountConflicts True),
freezeIndependentGoals = Flag (IndependentGoals False),
freezeShadowPkgs = Flag (ShadowPkgs False),
freezeStrongFlags = Flag (StrongFlags False),
freezeAllowBootLibInstalls = Flag (AllowBootLibInstalls False),
freezeVerbosity = toFlag normal
}
freezeCommand :: CommandUI FreezeFlags
freezeCommand = CommandUI {
commandName = "freeze",
commandSynopsis = "Freeze dependencies.",
commandDescription = Just $ \_ -> wrapText $
"Calculates a valid set of dependencies and their exact versions. "
++ "If successful, saves the result to the file `cabal.config`.\n"
++ "\n"
++ "The package versions specified in `cabal.config` will be used for "
++ "any future installs.\n"
++ "\n"
++ "An existing `cabal.config` is ignored and overwritten.\n",
commandNotes = Nothing,
commandUsage = usageFlags "freeze",
commandDefaultFlags = defaultFreezeFlags,
commandOptions = \ showOrParseArgs -> [
optionVerbosity freezeVerbosity
(\v flags -> flags { freezeVerbosity = v })
, option [] ["dry-run"]
"Do not freeze anything, only print what would be frozen"
freezeDryRun (\v flags -> flags { freezeDryRun = v })
trueArg
, option [] ["tests"]
("freezing of the dependencies of any tests suites "
++ "in the package description file.")
freezeTests (\v flags -> flags { freezeTests = v })
(boolOpt [] [])
, option [] ["benchmarks"]
("freezing of the dependencies of any benchmarks suites "
++ "in the package description file.")
freezeBenchmarks (\v flags -> flags { freezeBenchmarks = v })
(boolOpt [] [])
] ++
optionSolver
freezeSolver (\v flags -> flags { freezeSolver = v }):
optionSolverFlags showOrParseArgs
freezeMaxBackjumps (\v flags -> flags { freezeMaxBackjumps = v })
freezeReorderGoals (\v flags -> flags { freezeReorderGoals = v })
freezeCountConflicts (\v flags -> flags { freezeCountConflicts = v })
freezeIndependentGoals (\v flags -> flags { freezeIndependentGoals = v })
freezeShadowPkgs (\v flags -> flags { freezeShadowPkgs = v })
freezeStrongFlags (\v flags -> flags { freezeStrongFlags = v })
freezeAllowBootLibInstalls (\v flags -> flags { freezeAllowBootLibInstalls = v })
}
-- ------------------------------------------------------------
-- * 'gen-bounds' command
-- ------------------------------------------------------------
genBoundsCommand :: CommandUI FreezeFlags
genBoundsCommand = CommandUI {
commandName = "gen-bounds",
commandSynopsis = "Generate dependency bounds.",
commandDescription = Just $ \_ -> wrapText $
"Generates bounds for all dependencies that do not currently have them. "
++ "Generated bounds are printed to stdout. "
++ "You can then paste them into your .cabal file.\n"
++ "\n",
commandNotes = Nothing,
commandUsage = usageFlags "gen-bounds",
commandDefaultFlags = defaultFreezeFlags,
commandOptions = \ _ -> [
optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v })
]
}
-- ------------------------------------------------------------
-- * 'outdated' command
-- ------------------------------------------------------------
data IgnoreMajorVersionBumps = IgnoreMajorVersionBumpsNone
| IgnoreMajorVersionBumpsAll
| IgnoreMajorVersionBumpsSome [PackageName]
instance Monoid IgnoreMajorVersionBumps where
mempty = IgnoreMajorVersionBumpsNone
mappend = (<>)
instance Semigroup IgnoreMajorVersionBumps where
IgnoreMajorVersionBumpsNone <> r = r
l@IgnoreMajorVersionBumpsAll <> _ = l
l@(IgnoreMajorVersionBumpsSome _) <> IgnoreMajorVersionBumpsNone = l
(IgnoreMajorVersionBumpsSome _) <> r@IgnoreMajorVersionBumpsAll = r
(IgnoreMajorVersionBumpsSome a) <> (IgnoreMajorVersionBumpsSome b) =
IgnoreMajorVersionBumpsSome (a ++ b)
data OutdatedFlags = OutdatedFlags {
outdatedVerbosity :: Flag Verbosity,
outdatedFreezeFile :: Flag Bool,
outdatedNewFreezeFile :: Flag Bool,
outdatedSimpleOutput :: Flag Bool,
outdatedExitCode :: Flag Bool,
outdatedQuiet :: Flag Bool,
outdatedIgnore :: [PackageName],
outdatedMinor :: Maybe IgnoreMajorVersionBumps
}
defaultOutdatedFlags :: OutdatedFlags
defaultOutdatedFlags = OutdatedFlags {
outdatedVerbosity = toFlag normal,
outdatedFreezeFile = mempty,
outdatedNewFreezeFile = mempty,
outdatedSimpleOutput = mempty,
outdatedExitCode = mempty,
outdatedQuiet = mempty,
outdatedIgnore = mempty,
outdatedMinor = mempty
}
outdatedCommand :: CommandUI OutdatedFlags
outdatedCommand = CommandUI {
commandName = "outdated",
commandSynopsis = "Check for outdated dependencies",
commandDescription = Just $ \_ -> wrapText $
"Checks for outdated dependencies in the package description file "
++ "or freeze file",
commandNotes = Nothing,
commandUsage = usageFlags "outdated",
commandDefaultFlags = defaultOutdatedFlags,
commandOptions = \ _ -> [
optionVerbosity outdatedVerbosity
(\v flags -> flags { outdatedVerbosity = v })
,option [] ["freeze-file"]
"Act on the freeze file"
outdatedFreezeFile (\v flags -> flags { outdatedFreezeFile = v })
trueArg
,option [] ["new-freeze-file"]
"Act on the new-style freeze file"
outdatedNewFreezeFile (\v flags -> flags { outdatedNewFreezeFile = v })
trueArg
,option [] ["simple-output"]
"Only print names of outdated dependencies, one per line"
outdatedSimpleOutput (\v flags -> flags { outdatedSimpleOutput = v })
trueArg
,option [] ["exit-code"]
"Exit with non-zero when there are outdated dependencies"
outdatedExitCode (\v flags -> flags { outdatedExitCode = v })
trueArg
,option ['q'] ["quiet"]
"Don't print any output. Implies '--exit-code' and '-v0'"
outdatedQuiet (\v flags -> flags { outdatedQuiet = v })
trueArg
,option [] ["ignore"]
"Packages to ignore"
outdatedIgnore (\v flags -> flags { outdatedIgnore = v })
(reqArg "PKGS" pkgNameListParser (map display))
,option [] ["minor"]
"Ignore major version bumps for these packages"
outdatedMinor (\v flags -> flags { outdatedMinor = v })
(optArg "PKGS" ignoreMajorVersionBumpsParser
(Just IgnoreMajorVersionBumpsAll) ignoreMajorVersionBumpsPrinter)
]
}
where
ignoreMajorVersionBumpsPrinter :: (Maybe IgnoreMajorVersionBumps)
-> [Maybe String]
ignoreMajorVersionBumpsPrinter Nothing = []
ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsNone)= []
ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsAll) = [Nothing]
ignoreMajorVersionBumpsPrinter (Just (IgnoreMajorVersionBumpsSome pkgs)) =
map (Just . display) $ pkgs
ignoreMajorVersionBumpsParser =
(Just . IgnoreMajorVersionBumpsSome) `fmap` pkgNameListParser
pkgNameListParser = readP_to_E
("Couldn't parse the list of package names: " ++)
(Parse.sepBy1 parse (Parse.char ','))
-- ------------------------------------------------------------
-- * Update command
-- ------------------------------------------------------------
data UpdateFlags
= UpdateFlags {
updateVerbosity :: Flag Verbosity,
updateIndexState :: Flag IndexState
} deriving Generic
defaultUpdateFlags :: UpdateFlags
defaultUpdateFlags
= UpdateFlags {
updateVerbosity = toFlag normal,
updateIndexState = toFlag IndexStateHead
}
updateCommand :: CommandUI UpdateFlags
updateCommand = CommandUI {
commandName = "update",
commandSynopsis = "Updates list of known packages.",
commandDescription = Just $ \_ ->
"For all known remote repositories, download the package list.\n",
commandNotes = Just $ \_ ->
relevantConfigValuesText ["remote-repo"
,"remote-repo-cache"
,"local-repo"],
commandUsage = usageFlags "update",
commandDefaultFlags = defaultUpdateFlags,
commandOptions = \_ -> [
optionVerbosity updateVerbosity (\v flags -> flags { updateVerbosity = v }),
option [] ["index-state"]
("Update the source package index to its state as it existed at a previous time. " ++
"Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD').")
updateIndexState (\v flags -> flags { updateIndexState = v })
(reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++
"unix-timestamps (e.g. '@1474732068'), " ++
"a ISO8601 UTC timestamp " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'")
(toFlag `fmap` parse))
(flagToList . fmap display))
]
}
-- ------------------------------------------------------------
-- * Other commands
-- ------------------------------------------------------------
upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
upgradeCommand = configureCommand {
commandName = "upgrade",
commandSynopsis = "(command disabled, use install instead)",
commandDescription = Nothing,
commandUsage = usageFlagsOrPackages "upgrade",
commandDefaultFlags = (mempty, mempty, mempty, mempty),
commandOptions = commandOptions installCommand
}
{-
cleanCommand :: CommandUI ()
cleanCommand = makeCommand name shortDesc longDesc emptyFlags options
where
name = "clean"
shortDesc = "Removes downloaded files"
longDesc = Nothing
emptyFlags = ()
options _ = []
-}
checkCommand :: CommandUI (Flag Verbosity)
checkCommand = CommandUI {
commandName = "check",
commandSynopsis = "Check the package for common mistakes.",
commandDescription = Just $ \_ -> wrapText $
"Expects a .cabal package file in the current directory.\n"
++ "\n"
++ "The checks correspond to the requirements to packages on Hackage. "
++ "If no errors and warnings are reported, Hackage will accept this "
++ "package.\n",
commandNotes = Nothing,
commandUsage = \pname -> "Usage: " ++ pname ++ " check\n",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
formatCommand :: CommandUI (Flag Verbosity)
formatCommand = CommandUI {
commandName = "format",
commandSynopsis = "Reformat the .cabal file using the standard style.",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = usageAlternatives "format" ["[FILE]"],
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
uninstallCommand :: CommandUI (Flag Verbosity)
uninstallCommand = CommandUI {
commandName = "uninstall",
commandSynopsis = "Warn about 'uninstall' not being implemented.",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = usageAlternatives "uninstall" ["PACKAGES"],
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
manpageCommand :: CommandUI (Flag Verbosity)
manpageCommand = CommandUI {
commandName = "manpage",
commandSynopsis = "Outputs manpage source.",
commandDescription = Just $ \_ ->
"Output manpage source to STDOUT.\n",
commandNotes = Nothing,
commandUsage = usageFlags "manpage",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> [optionVerbosity id const]
}
runCommand :: CommandUI (BuildFlags, BuildExFlags)
runCommand = CommandUI {
commandName = "run",
commandSynopsis = "Builds and runs an executable.",
commandDescription = Just $ \pname -> wrapText $
"Builds and then runs the specified executable. If no executable is "
++ "specified, but the package contains just one executable, that one "
++ "is built and executed.\n"
++ "\n"
++ "Use `" ++ pname ++ " test --show-details=streaming` to run a "
++ "test-suite and get its full output.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " run\n"
++ " Run the only executable in the current package;\n"
++ " " ++ pname ++ " run foo -- --fooflag\n"
++ " Works similar to `./foo --fooflag`.\n",
commandUsage = usageAlternatives "run"
["[FLAGS] [EXECUTABLE] [-- EXECUTABLE_FLAGS]"],
commandDefaultFlags = mempty,
commandOptions =
\showOrParseArgs -> liftOptions fst setFst
(commandOptions parent showOrParseArgs)
++
liftOptions snd setSnd
(buildExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
parent = Cabal.buildCommand defaultProgramDb
-- ------------------------------------------------------------
-- * Report flags
-- ------------------------------------------------------------
data ReportFlags = ReportFlags {
reportUsername :: Flag Username,
reportPassword :: Flag Password,
reportVerbosity :: Flag Verbosity
} deriving Generic
defaultReportFlags :: ReportFlags
defaultReportFlags = ReportFlags {
reportUsername = mempty,
reportPassword = mempty,
reportVerbosity = toFlag normal
}
reportCommand :: CommandUI ReportFlags
reportCommand = CommandUI {
commandName = "report",
commandSynopsis = "Upload build reports to a remote server.",
commandDescription = Nothing,
commandNotes = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n",
commandUsage = usageAlternatives "report" ["[FLAGS]"],
commandDefaultFlags = defaultReportFlags,
commandOptions = \_ ->
[optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v })
,option ['u'] ["username"]
"Hackage username."
reportUsername (\v flags -> flags { reportUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
reportPassword (\v flags -> flags { reportPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
]
}
instance Monoid ReportFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ReportFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Get flags
-- ------------------------------------------------------------
data GetFlags = GetFlags {
getDestDir :: Flag FilePath,
getPristine :: Flag Bool,
getIndexState :: Flag IndexState,
getSourceRepository :: Flag (Maybe RepoKind),
getVerbosity :: Flag Verbosity
} deriving Generic
defaultGetFlags :: GetFlags
defaultGetFlags = GetFlags {
getDestDir = mempty,
getPristine = mempty,
getIndexState = mempty,
getSourceRepository = mempty,
getVerbosity = toFlag normal
}
getCommand :: CommandUI GetFlags
getCommand = CommandUI {
commandName = "get",
commandSynopsis = "Download/Extract a package's source code (repository).",
commandDescription = Just $ \_ -> wrapText $
"Creates a local copy of a package's source code. By default it gets "
++ "the source\ntarball and unpacks it in a local subdirectory. "
++ "Alternatively, with -s it will\nget the code from the source "
++ "repository specified by the package.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " get hlint\n"
++ " Download the latest stable version of hlint;\n"
++ " " ++ pname ++ " get lens --source-repository=head\n"
++ " Download the source repository (i.e. git clone from github).\n",
commandUsage = usagePackages "get",
commandDefaultFlags = defaultGetFlags,
commandOptions = \_ -> [
optionVerbosity getVerbosity (\v flags -> flags { getVerbosity = v })
,option "d" ["destdir"]
"Where to place the package source, defaults to the current directory."
getDestDir (\v flags -> flags { getDestDir = v })
(reqArgFlag "PATH")
,option "s" ["source-repository"]
"Copy the package's source repository (ie git clone, darcs get, etc as appropriate)."
getSourceRepository (\v flags -> flags { getSourceRepository = v })
(optArg "[head|this|...]" (readP_to_E (const "invalid source-repository")
(fmap (toFlag . Just) parse))
(Flag Nothing)
(map (fmap show) . flagToList))
, option [] ["index-state"]
("Use source package index state as it existed at a previous time. " ++
"Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD'). " ++
"This determines which package versions are available as well as " ++
".cabal file revision is selected (unless --pristine is used).")
getIndexState (\v flags -> flags { getIndexState = v })
(reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++
"unix-timestamps (e.g. '@1474732068'), " ++
"a ISO8601 UTC timestamp " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'")
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["pristine"]
("Unpack the original pristine tarball, rather than updating the "
++ ".cabal file with the latest revision from the package archive.")
getPristine (\v flags -> flags { getPristine = v })
trueArg
]
}
-- 'cabal unpack' is a deprecated alias for 'cabal get'.
unpackCommand :: CommandUI GetFlags
unpackCommand = getCommand {
commandName = "unpack",
commandUsage = usagePackages "unpack"
}
instance Monoid GetFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup GetFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * List flags
-- ------------------------------------------------------------
data ListFlags = ListFlags {
listInstalled :: Flag Bool,
listSimpleOutput :: Flag Bool,
listVerbosity :: Flag Verbosity,
listPackageDBs :: [Maybe PackageDB]
} deriving Generic
defaultListFlags :: ListFlags
defaultListFlags = ListFlags {
listInstalled = Flag False,
listSimpleOutput = Flag False,
listVerbosity = toFlag normal,
listPackageDBs = []
}
listCommand :: CommandUI ListFlags
listCommand = CommandUI {
commandName = "list",
commandSynopsis = "List packages matching a search string.",
commandDescription = Just $ \_ -> wrapText $
"List all packages, or all packages matching one of the search"
++ " strings.\n"
++ "\n"
++ "If there is a sandbox in the current directory and "
++ "config:ignore-sandbox is False, use the sandbox package database. "
++ "Otherwise, use the package database specified with --package-db. "
++ "If not specified, use the user package database.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " list pandoc\n"
++ " Will find pandoc, pandoc-citeproc, pandoc-lens, ...\n",
commandUsage = usageAlternatives "list" [ "[FLAGS]"
, "[FLAGS] STRINGS"],
commandDefaultFlags = defaultListFlags,
commandOptions = \_ -> [
optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v })
, option [] ["installed"]
"Only print installed packages"
listInstalled (\v flags -> flags { listInstalled = v })
trueArg
, option [] ["simple-output"]
"Print in a easy-to-parse format"
listSimpleOutput (\v flags -> flags { listSimpleOutput = v })
trueArg
, option "" ["package-db"]
( "Append the given package database to the list of package"
++ " databases used (to satisfy dependencies and register into)."
++ " May be a specific file, 'global' or 'user'. The initial list"
++ " is ['global'], ['global', 'user'], or ['global', $sandbox],"
++ " depending on context. Use 'clear' to reset the list to empty."
++ " See the user guide for details.")
listPackageDBs (\v flags -> flags { listPackageDBs = v })
(reqArg' "DB" readPackageDbList showPackageDbList)
]
}
instance Monoid ListFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ListFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Info flags
-- ------------------------------------------------------------
data InfoFlags = InfoFlags {
infoVerbosity :: Flag Verbosity,
infoPackageDBs :: [Maybe PackageDB]
} deriving Generic
defaultInfoFlags :: InfoFlags
defaultInfoFlags = InfoFlags {
infoVerbosity = toFlag normal,
infoPackageDBs = []
}
infoCommand :: CommandUI InfoFlags
infoCommand = CommandUI {
commandName = "info",
commandSynopsis = "Display detailed information about a particular package.",
commandDescription = Just $ \_ -> wrapText $
"If there is a sandbox in the current directory and "
++ "config:ignore-sandbox is False, use the sandbox package database. "
++ "Otherwise, use the package database specified with --package-db. "
++ "If not specified, use the user package database.\n",
commandNotes = Nothing,
commandUsage = usageAlternatives "info" ["[FLAGS] PACKAGES"],
commandDefaultFlags = defaultInfoFlags,
commandOptions = \_ -> [
optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v })
, option "" ["package-db"]
( "Append the given package database to the list of package"
++ " databases used (to satisfy dependencies and register into)."
++ " May be a specific file, 'global' or 'user'. The initial list"
++ " is ['global'], ['global', 'user'], or ['global', $sandbox],"
++ " depending on context. Use 'clear' to reset the list to empty."
++ " See the user guide for details.")
infoPackageDBs (\v flags -> flags { infoPackageDBs = v })
(reqArg' "DB" readPackageDbList showPackageDbList)
]
}
instance Monoid InfoFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup InfoFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Install flags
-- ------------------------------------------------------------
-- | Install takes the same flags as configure along with a few extras.
--
data InstallFlags = InstallFlags {
installDocumentation :: Flag Bool,
installHaddockIndex :: Flag PathTemplate,
installDryRun :: Flag Bool,
installMaxBackjumps :: Flag Int,
installReorderGoals :: Flag ReorderGoals,
installCountConflicts :: Flag CountConflicts,
installIndependentGoals :: Flag IndependentGoals,
installShadowPkgs :: Flag ShadowPkgs,
installStrongFlags :: Flag StrongFlags,
installAllowBootLibInstalls :: Flag AllowBootLibInstalls,
installReinstall :: Flag Bool,
installAvoidReinstalls :: Flag AvoidReinstalls,
installOverrideReinstall :: Flag Bool,
installUpgradeDeps :: Flag Bool,
installOnly :: Flag Bool,
installOnlyDeps :: Flag Bool,
installIndexState :: Flag IndexState,
installRootCmd :: Flag String,
installSummaryFile :: NubList PathTemplate,
installLogFile :: Flag PathTemplate,
installBuildReports :: Flag ReportLevel,
installReportPlanningFailure :: Flag Bool,
installSymlinkBinDir :: Flag FilePath,
installPerComponent :: Flag Bool,
installOneShot :: Flag Bool,
installNumJobs :: Flag (Maybe Int),
installKeepGoing :: Flag Bool,
installRunTests :: Flag Bool,
installOfflineMode :: Flag Bool,
-- | The cabal project file name; defaults to @cabal.project@.
-- Th name itself denotes the cabal project file name, but it also
-- is the base of auxiliary project files, such as
-- @cabal.project.local@ and @cabal.project.freeze@ which are also
-- read and written out in some cases. If the path is not found
-- in the current working directory, we will successively probe
-- relative to parent directories until this name is found.
installProjectFileName :: Flag FilePath
}
deriving (Eq, Generic)
instance Binary InstallFlags
defaultInstallFlags :: InstallFlags
defaultInstallFlags = InstallFlags {
installDocumentation = Flag False,
installHaddockIndex = Flag docIndexFile,
installDryRun = Flag False,
installMaxBackjumps = Flag defaultMaxBackjumps,
installReorderGoals = Flag (ReorderGoals False),
installCountConflicts = Flag (CountConflicts True),
installIndependentGoals= Flag (IndependentGoals False),
installShadowPkgs = Flag (ShadowPkgs False),
installStrongFlags = Flag (StrongFlags False),
installAllowBootLibInstalls = Flag (AllowBootLibInstalls False),
installReinstall = Flag False,
installAvoidReinstalls = Flag (AvoidReinstalls False),
installOverrideReinstall = Flag False,
installUpgradeDeps = Flag False,
installOnly = Flag False,
installOnlyDeps = Flag False,
installIndexState = mempty,
installRootCmd = mempty,
installSummaryFile = mempty,
installLogFile = mempty,
installBuildReports = Flag NoReports,
installReportPlanningFailure = Flag False,
installSymlinkBinDir = mempty,
installPerComponent = Flag True,
installOneShot = Flag False,
installNumJobs = mempty,
installKeepGoing = Flag False,
installRunTests = mempty,
installOfflineMode = Flag False,
installProjectFileName = mempty
}
where
docIndexFile = toPathTemplate ("$datadir" </> "doc"
</> "$arch-$os-$compiler" </> "index.html")
defaultMaxBackjumps :: Int
defaultMaxBackjumps = 2000
defaultSolver :: PreSolver
defaultSolver = AlwaysModular
allSolvers :: String
allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [PreSolver]))
installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
installCommand = CommandUI {
commandName = "install",
commandSynopsis = "Install packages.",
commandUsage = usageAlternatives "install" [ "[FLAGS]"
, "[FLAGS] PACKAGES"
],
commandDescription = Just $ \_ -> wrapText $
"Installs one or more packages. By default, the installed package"
++ " will be registered in the user's package database or, if a sandbox"
++ " is present in the current directory, inside the sandbox.\n"
++ "\n"
++ "If PACKAGES are specified, downloads and installs those packages."
++ " Otherwise, install the package in the current directory (and/or its"
++ " dependencies) (there must be exactly one .cabal file in the current"
++ " directory).\n"
++ "\n"
++ "When using a sandbox, the flags for `install` only affect the"
++ " current command and have no effect on future commands. (To achieve"
++ " that, `configure` must be used.)\n"
++ " In contrast, without a sandbox, the flags to `install` are saved and"
++ " affect future commands such as `build` and `repl`. See the help for"
++ " `configure` for a list of commands being affected.\n"
++ "\n"
++ "Installed executables will by default (and without a sandbox)"
++ " be put into `~/.cabal/bin/`."
++ " If you want installed executable to be available globally, make"
++ " sure that the PATH environment variable contains that directory.\n"
++ "When using a sandbox, executables will be put into"
++ " `$SANDBOX/bin/` (by default: `./.cabal-sandbox/bin/`).\n"
++ "\n"
++ "When specifying --bindir, consider also specifying --datadir;"
++ " this way the sandbox can be deleted and the executable should"
++ " continue working as long as bindir and datadir are left untouched.",
commandNotes = Just $ \pname ->
( case commandNotes
$ Cabal.configureCommand defaultProgramDb
of Just desc -> desc pname ++ "\n"
Nothing -> ""
)
++ "Examples:\n"
++ " " ++ pname ++ " install "
++ " Package in the current directory\n"
++ " " ++ pname ++ " install foo "
++ " Package from the hackage server\n"
++ " " ++ pname ++ " install foo-1.0 "
++ " Specific version of a package\n"
++ " " ++ pname ++ " install 'foo < 2' "
++ " Constrained package version\n"
++ " " ++ pname ++ " install haddock --bindir=$HOME/hask-bin/ --datadir=$HOME/hask-data/\n"
++ " " ++ (map (const ' ') pname)
++ " "
++ " Change installation destination\n",
commandDefaultFlags = (mempty, mempty, mempty, mempty),
commandOptions = \showOrParseArgs ->
liftOptions get1 set1
(filter ((`notElem` ["constraint", "dependency"
, "exact-configuration"])
. optionName) $
configureOptions showOrParseArgs)
++ liftOptions get2 set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag)
++ liftOptions get3 set3 (installOptions showOrParseArgs)
++ liftOptions get4 set4 (haddockOptions showOrParseArgs)
}
where
get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d)
get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d)
get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d)
get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d)
haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags]
haddockOptions showOrParseArgs
= [ opt { optionName = "haddock-" ++ name,
optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr
| descr <- optionDescr opt] }
| opt <- commandOptions Cabal.haddockCommand showOrParseArgs
, let name = optionName opt
, name `elem` ["hoogle", "html", "html-location"
,"executables", "tests", "benchmarks", "all", "internal", "css"
,"hyperlink-source", "hscolour-css"
,"contents-location", "for-hackage"]
]
where
fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a
fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w
fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w
fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs]
fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w
installOptions :: ShowOrParseArgs -> [OptionField InstallFlags]
installOptions showOrParseArgs =
[ option "" ["documentation"]
"building of documentation"
installDocumentation (\v flags -> flags { installDocumentation = v })
(boolOpt [] [])
, option [] ["doc-index-file"]
"A central index of haddock API documentation (template cannot use $pkgid)"
installHaddockIndex (\v flags -> flags { installHaddockIndex = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
installDryRun (\v flags -> flags { installDryRun = v })
trueArg
] ++
optionSolverFlags showOrParseArgs
installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v })
installReorderGoals (\v flags -> flags { installReorderGoals = v })
installCountConflicts (\v flags -> flags { installCountConflicts = v })
installIndependentGoals (\v flags -> flags { installIndependentGoals = v })
installShadowPkgs (\v flags -> flags { installShadowPkgs = v })
installStrongFlags (\v flags -> flags { installStrongFlags = v })
installAllowBootLibInstalls (\v flags -> flags { installAllowBootLibInstalls = v }) ++
[ option [] ["reinstall"]
"Install even if it means installing the same version again."
installReinstall (\v flags -> flags { installReinstall = v })
(yesNoOpt showOrParseArgs)
, option [] ["avoid-reinstalls"]
"Do not select versions that would destructively overwrite installed packages."
(fmap asBool . installAvoidReinstalls)
(\v flags -> flags { installAvoidReinstalls = fmap AvoidReinstalls v })
(yesNoOpt showOrParseArgs)
, option [] ["force-reinstalls"]
"Reinstall packages even if they will most likely break other installed packages."
installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v })
(yesNoOpt showOrParseArgs)
, option [] ["upgrade-dependencies"]
"Pick the latest version for all dependencies, rather than trying to pick an installed version."
installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v })
(yesNoOpt showOrParseArgs)
, option [] ["only-dependencies"]
"Install only the dependencies necessary to build the given packages"
installOnlyDeps (\v flags -> flags { installOnlyDeps = v })
(yesNoOpt showOrParseArgs)
, option [] ["dependencies-only"]
"A synonym for --only-dependencies"
installOnlyDeps (\v flags -> flags { installOnlyDeps = v })
(yesNoOpt showOrParseArgs)
, option [] ["index-state"]
("Use source package index state as it existed at a previous time. " ++
"Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD').")
installIndexState (\v flags -> flags { installIndexState = v })
(reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++
"unix-timestamps (e.g. '@1474732068'), " ++
"a ISO8601 UTC timestamp " ++
"(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'")
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["root-cmd"]
"(No longer supported, do not use.)"
installRootCmd (\v flags -> flags { installRootCmd = v })
(reqArg' "COMMAND" toFlag flagToList)
, option [] ["symlink-bindir"]
"Add symlinks to installed executables into this directory."
installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v })
(reqArgFlag "DIR")
, option [] ["build-summary"]
"Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)"
installSummaryFile (\v flags -> flags { installSummaryFile = v })
(reqArg' "TEMPLATE" (\x -> toNubList [toPathTemplate x]) (map fromPathTemplate . fromNubList))
, option [] ["build-log"]
"Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)"
installLogFile (\v flags -> flags { installLogFile = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["remote-build-reporting"]
"Generate build reports to send to a remote server (none, anonymous or detailed)."
installBuildReports (\v flags -> flags { installBuildReports = v })
(reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', "
++ "'anonymous' or 'detailed'")
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["report-planning-failure"]
"Generate build reports when the dependency solver fails. This is used by the Hackage build bot."
installReportPlanningFailure (\v flags -> flags { installReportPlanningFailure = v })
trueArg
, option "" ["per-component"]
"Per-component builds when possible"
installPerComponent (\v flags -> flags { installPerComponent = v })
(boolOpt [] [])
, option [] ["one-shot"]
"Do not record the packages in the world file."
installOneShot (\v flags -> flags { installOneShot = v })
(yesNoOpt showOrParseArgs)
, option [] ["run-tests"]
"Run package test suites during installation."
installRunTests (\v flags -> flags { installRunTests = v })
trueArg
, optionNumJobs
installNumJobs (\v flags -> flags { installNumJobs = v })
, option [] ["keep-going"]
"After a build failure, continue to build other unaffected packages."
installKeepGoing (\v flags -> flags { installKeepGoing = v })
trueArg
, option [] ["offline"]
"Don't download packages from the Internet."
installOfflineMode (\v flags -> flags { installOfflineMode = v })
(yesNoOpt showOrParseArgs)
, option [] ["project-file"]
"Set the name of the cabal.project file to search for in parent directories"
installProjectFileName (\v flags -> flags {installProjectFileName = v})
(reqArgFlag "FILE")
] ++ case showOrParseArgs of -- TODO: remove when "cabal install"
-- avoids
ParseArgs ->
[ option [] ["only"]
"Only installs the package in the current directory."
installOnly (\v flags -> flags { installOnly = v })
trueArg ]
_ -> []
instance Monoid InstallFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup InstallFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Upload flags
-- ------------------------------------------------------------
-- | Is this a candidate package or a package to be published?
data IsCandidate = IsCandidate | IsPublished
deriving Eq
data UploadFlags = UploadFlags {
uploadCandidate :: Flag IsCandidate,
uploadDoc :: Flag Bool,
uploadUsername :: Flag Username,
uploadPassword :: Flag Password,
uploadPasswordCmd :: Flag [String],
uploadVerbosity :: Flag Verbosity
} deriving Generic
defaultUploadFlags :: UploadFlags
defaultUploadFlags = UploadFlags {
uploadCandidate = toFlag IsCandidate,
uploadDoc = toFlag False,
uploadUsername = mempty,
uploadPassword = mempty,
uploadPasswordCmd = mempty,
uploadVerbosity = toFlag normal
}
uploadCommand :: CommandUI UploadFlags
uploadCommand = CommandUI {
commandName = "upload",
commandSynopsis = "Uploads source packages or documentation to Hackage.",
commandDescription = Nothing,
commandNotes = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n"
++ relevantConfigValuesText ["username", "password"],
commandUsage = \pname ->
"Usage: " ++ pname ++ " upload [FLAGS] TARFILES\n",
commandDefaultFlags = defaultUploadFlags,
commandOptions = \_ ->
[optionVerbosity uploadVerbosity
(\v flags -> flags { uploadVerbosity = v })
,option [] ["publish"]
"Publish the package instead of uploading it as a candidate."
uploadCandidate (\v flags -> flags { uploadCandidate = v })
(noArg (Flag IsPublished))
,option ['d'] ["documentation"]
("Upload documentation instead of a source package. "
++ "By default, this uploads documentation for a package candidate. "
++ "To upload documentation for "
++ "a published package, combine with --publish.")
uploadDoc (\v flags -> flags { uploadDoc = v })
trueArg
,option ['u'] ["username"]
"Hackage username."
uploadUsername (\v flags -> flags { uploadUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
uploadPassword (\v flags -> flags { uploadPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
,option ['P'] ["password-command"]
"Command to get Hackage password."
uploadPasswordCmd (\v flags -> flags { uploadPasswordCmd = v })
(reqArg' "PASSWORD" (Flag . words) (fromMaybe [] . flagToMaybe))
]
}
instance Monoid UploadFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup UploadFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Init flags
-- ------------------------------------------------------------
emptyInitFlags :: IT.InitFlags
emptyInitFlags = mempty
defaultInitFlags :: IT.InitFlags
defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal }
initCommand :: CommandUI IT.InitFlags
initCommand = CommandUI {
commandName = "init",
commandSynopsis = "Create a new .cabal package file (interactively).",
commandDescription = Just $ \_ -> wrapText $
"Cabalise a project by creating a .cabal, Setup.hs, and "
++ "optionally a LICENSE file.\n"
++ "\n"
++ "Calling init with no arguments (recommended) uses an "
++ "interactive mode, which will try to guess as much as "
++ "possible and prompt you for the rest. Command-line "
++ "arguments are provided for scripting purposes. "
++ "If you don't want interactive mode, be sure to pass "
++ "the -n flag.\n",
commandNotes = Nothing,
commandUsage = \pname ->
"Usage: " ++ pname ++ " init [FLAGS]\n",
commandDefaultFlags = defaultInitFlags,
commandOptions = \_ ->
[ option ['n'] ["non-interactive"]
"Non-interactive mode."
IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v })
trueArg
, option ['q'] ["quiet"]
"Do not generate log messages to stdout."
IT.quiet (\v flags -> flags { IT.quiet = v })
trueArg
, option [] ["no-comments"]
"Do not generate explanatory comments in the .cabal file."
IT.noComments (\v flags -> flags { IT.noComments = v })
trueArg
, option ['m'] ["minimal"]
"Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments."
IT.minimal (\v flags -> flags { IT.minimal = v })
trueArg
, option [] ["overwrite"]
"Overwrite any existing .cabal, LICENSE, or Setup.hs files without warning."
IT.overwrite (\v flags -> flags { IT.overwrite = v })
trueArg
, option [] ["package-dir"]
"Root directory of the package (default = current directory)."
IT.packageDir (\v flags -> flags { IT.packageDir = v })
(reqArgFlag "DIRECTORY")
, option ['p'] ["package-name"]
"Name of the Cabal package to create."
IT.packageName (\v flags -> flags { IT.packageName = v })
(reqArg "PACKAGE" (readP_to_E ("Cannot parse package name: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["version"]
"Initial version of the package."
IT.version (\v flags -> flags { IT.version = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["cabal-version"]
"Required version of the Cabal library."
IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v })
(reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal version range: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['l'] ["license"]
"Project license."
IT.license (\v flags -> flags { IT.license = v })
(reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['a'] ["author"]
"Name of the project's author."
IT.author (\v flags -> flags { IT.author = v })
(reqArgFlag "NAME")
, option ['e'] ["email"]
"Email address of the maintainer."
IT.email (\v flags -> flags { IT.email = v })
(reqArgFlag "EMAIL")
, option ['u'] ["homepage"]
"Project homepage and/or repository."
IT.homepage (\v flags -> flags { IT.homepage = v })
(reqArgFlag "URL")
, option ['s'] ["synopsis"]
"Short project synopsis."
IT.synopsis (\v flags -> flags { IT.synopsis = v })
(reqArgFlag "TEXT")
, option ['c'] ["category"]
"Project category."
IT.category (\v flags -> flags { IT.category = v })
(reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s))
(flagToList . fmap (either id show)))
, option ['x'] ["extra-source-file"]
"Extra source file to be distributed with tarball."
IT.extraSrc (\v flags -> flags { IT.extraSrc = v })
(reqArg' "FILE" (Just . (:[]))
(fromMaybe []))
, option [] ["is-library"]
"Build a library."
IT.packageType (\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Library))
, option [] ["is-executable"]
"Build an executable."
IT.packageType
(\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Executable))
, option [] ["main-is"]
"Specify the main module."
IT.mainIs
(\v flags -> flags { IT.mainIs = v })
(reqArgFlag "FILE")
, option [] ["language"]
"Specify the default language."
IT.language
(\v flags -> flags { IT.language = v })
(reqArg "LANGUAGE" (readP_to_E ("Cannot parse language: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['o'] ["expose-module"]
"Export a module from the package."
IT.exposedModules
(\v flags -> flags { IT.exposedModules = v })
(reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++)
((Just . (:[])) `fmap` parse))
(maybe [] (fmap display)))
, option [] ["extension"]
"Use a LANGUAGE extension (in the other-extensions field)."
IT.otherExts
(\v flags -> flags { IT.otherExts = v })
(reqArg "EXTENSION" (readP_to_E ("Cannot parse extension: "++)
((Just . (:[])) `fmap` parse))
(maybe [] (fmap display)))
, option ['d'] ["dependency"]
"Package dependency."
IT.dependencies (\v flags -> flags { IT.dependencies = v })
(reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++)
((Just . (:[])) `fmap` parse))
(maybe [] (fmap display)))
, option [] ["source-dir"]
"Directory containing package source."
IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v })
(reqArg' "DIR" (Just . (:[]))
(fromMaybe []))
, option [] ["build-tool"]
"Required external build tool."
IT.buildTools (\v flags -> flags { IT.buildTools = v })
(reqArg' "TOOL" (Just . (:[]))
(fromMaybe []))
, optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v })
]
}
-- ------------------------------------------------------------
-- * SDist flags
-- ------------------------------------------------------------
-- | Extra flags to @sdist@ beyond runghc Setup sdist
--
data SDistExFlags = SDistExFlags {
sDistFormat :: Flag ArchiveFormat
}
deriving (Show, Generic)
data ArchiveFormat = TargzFormat | ZipFormat -- ...
deriving (Show, Eq)
defaultSDistExFlags :: SDistExFlags
defaultSDistExFlags = SDistExFlags {
sDistFormat = Flag TargzFormat
}
sdistCommand :: CommandUI (SDistFlags, SDistExFlags)
sdistCommand = Cabal.sdistCommand {
commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs)
++ liftOptions snd setSnd sdistExOptions
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
sdistExOptions =
[option [] ["archive-format"] "archive-format"
sDistFormat (\v flags -> flags { sDistFormat = v })
(choiceOpt
[ (Flag TargzFormat, ([], ["targz"]),
"Produce a '.tar.gz' format archive (default and required for uploading to hackage)")
, (Flag ZipFormat, ([], ["zip"]),
"Produce a '.zip' format archive")
])
]
instance Monoid SDistExFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup SDistExFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Win32SelfUpgrade flags
-- ------------------------------------------------------------
data Win32SelfUpgradeFlags = Win32SelfUpgradeFlags {
win32SelfUpgradeVerbosity :: Flag Verbosity
} deriving Generic
defaultWin32SelfUpgradeFlags :: Win32SelfUpgradeFlags
defaultWin32SelfUpgradeFlags = Win32SelfUpgradeFlags {
win32SelfUpgradeVerbosity = toFlag normal
}
win32SelfUpgradeCommand :: CommandUI Win32SelfUpgradeFlags
win32SelfUpgradeCommand = CommandUI {
commandName = "win32selfupgrade",
commandSynopsis = "Self-upgrade the executable on Windows",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = \pname ->
"Usage: " ++ pname ++ " win32selfupgrade PID PATH\n",
commandDefaultFlags = defaultWin32SelfUpgradeFlags,
commandOptions = \_ ->
[optionVerbosity win32SelfUpgradeVerbosity
(\v flags -> flags { win32SelfUpgradeVerbosity = v})
]
}
instance Monoid Win32SelfUpgradeFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup Win32SelfUpgradeFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * ActAsSetup flags
-- ------------------------------------------------------------
data ActAsSetupFlags = ActAsSetupFlags {
actAsSetupBuildType :: Flag BuildType
} deriving Generic
defaultActAsSetupFlags :: ActAsSetupFlags
defaultActAsSetupFlags = ActAsSetupFlags {
actAsSetupBuildType = toFlag Simple
}
actAsSetupCommand :: CommandUI ActAsSetupFlags
actAsSetupCommand = CommandUI {
commandName = "act-as-setup",
commandSynopsis = "Run as-if this was a Setup.hs",
commandDescription = Nothing,
commandNotes = Nothing,
commandUsage = \pname ->
"Usage: " ++ pname ++ " act-as-setup\n",
commandDefaultFlags = defaultActAsSetupFlags,
commandOptions = \_ ->
[option "" ["build-type"]
"Use the given build type."
actAsSetupBuildType (\v flags -> flags { actAsSetupBuildType = v })
(reqArg "BUILD-TYPE" (readP_to_E ("Cannot parse build type: "++)
(fmap toFlag parse))
(map display . flagToList))
]
}
instance Monoid ActAsSetupFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ActAsSetupFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Sandbox-related flags
-- ------------------------------------------------------------
data SandboxFlags = SandboxFlags {
sandboxVerbosity :: Flag Verbosity,
sandboxSnapshot :: Flag Bool, -- FIXME: this should be an 'add-source'-only
-- flag.
sandboxLocation :: Flag FilePath
} deriving Generic
defaultSandboxLocation :: FilePath
defaultSandboxLocation = ".cabal-sandbox"
defaultSandboxFlags :: SandboxFlags
defaultSandboxFlags = SandboxFlags {
sandboxVerbosity = toFlag normal,
sandboxSnapshot = toFlag False,
sandboxLocation = toFlag defaultSandboxLocation
}
sandboxCommand :: CommandUI SandboxFlags
sandboxCommand = CommandUI {
commandName = "sandbox",
commandSynopsis = "Create/modify/delete a sandbox.",
commandDescription = Just $ \pname -> concat
[ paragraph $ "Sandboxes are isolated package databases that can be used"
++ " to prevent dependency conflicts that arise when many different"
++ " packages are installed in the same database (i.e. the user's"
++ " database in the home directory)."
, paragraph $ "A sandbox in the current directory (created by"
++ " `sandbox init`) will be used instead of the user's database for"
++ " commands such as `install` and `build`. Note that (a directly"
++ " invoked) GHC will not automatically be aware of sandboxes;"
++ " only if called via appropriate " ++ pname
++ " commands, e.g. `repl`, `build`, `exec`."
, paragraph $ "Currently, " ++ pname ++ " will not search for a sandbox"
++ " in folders above the current one, so cabal will not see the sandbox"
++ " if you are in a subfolder of a sandbox."
, paragraph "Subcommands:"
, headLine "init:"
, indentParagraph $ "Initialize a sandbox in the current directory."
++ " An existing package database will not be modified, but settings"
++ " (such as the location of the database) can be modified this way."
, headLine "delete:"
, indentParagraph $ "Remove the sandbox; deleting all the packages"
++ " installed inside."
, headLine "add-source:"
, indentParagraph $ "Make one or more local packages available in the"
++ " sandbox. PATHS may be relative or absolute."
++ " Typical usecase is when you need"
++ " to make a (temporary) modification to a dependency: You download"
++ " the package into a different directory, make the modification,"
++ " and add that directory to the sandbox with `add-source`."
, indentParagraph $ "Unless given `--snapshot`, any add-source'd"
++ " dependency that was modified since the last build will be"
++ " re-installed automatically."
, headLine "delete-source:"
, indentParagraph $ "Remove an add-source dependency; however, this will"
++ " not delete the package(s) that have been installed in the sandbox"
++ " from this dependency. You can either unregister the package(s) via"
++ " `" ++ pname ++ " sandbox hc-pkg unregister` or re-create the"
++ " sandbox (`sandbox delete; sandbox init`)."
, headLine "list-sources:"
, indentParagraph $ "List the directories of local packages made"
++ " available via `" ++ pname ++ " add-source`."
, headLine "hc-pkg:"
, indentParagraph $ "Similar to `ghc-pkg`, but for the sandbox package"
++ " database. Can be used to list specific/all packages that are"
++ " installed in the sandbox. For subcommands, see the help for"
++ " ghc-pkg. Affected by the compiler version specified by `configure`."
],
commandNotes = Just $ \pname ->
relevantConfigValuesText ["require-sandbox"
,"ignore-sandbox"]
++ "\n"
++ "Examples:\n"
++ " Set up a sandbox with one local dependency, located at ../foo:\n"
++ " " ++ pname ++ " sandbox init\n"
++ " " ++ pname ++ " sandbox add-source ../foo\n"
++ " " ++ pname ++ " install --only-dependencies\n"
++ " Reset the sandbox:\n"
++ " " ++ pname ++ " sandbox delete\n"
++ " " ++ pname ++ " sandbox init\n"
++ " " ++ pname ++ " install --only-dependencies\n"
++ " List the packages in the sandbox:\n"
++ " " ++ pname ++ " sandbox hc-pkg list\n"
++ " Unregister the `broken` package from the sandbox:\n"
++ " " ++ pname ++ " sandbox hc-pkg -- --force unregister broken\n",
commandUsage = usageAlternatives "sandbox"
[ "init [FLAGS]"
, "delete [FLAGS]"
, "add-source [FLAGS] PATHS"
, "delete-source [FLAGS] PATHS"
, "list-sources [FLAGS]"
, "hc-pkg [FLAGS] [--] COMMAND [--] [ARGS]"
],
commandDefaultFlags = defaultSandboxFlags,
commandOptions = \_ ->
[ optionVerbosity sandboxVerbosity
(\v flags -> flags { sandboxVerbosity = v })
, option [] ["snapshot"]
"Take a snapshot instead of creating a link (only applies to 'add-source')"
sandboxSnapshot (\v flags -> flags { sandboxSnapshot = v })
trueArg
, option [] ["sandbox"]
"Sandbox location (default: './.cabal-sandbox')."
sandboxLocation (\v flags -> flags { sandboxLocation = v })
(reqArgFlag "DIR")
]
}
instance Monoid SandboxFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup SandboxFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * Exec Flags
-- ------------------------------------------------------------
data ExecFlags = ExecFlags {
execVerbosity :: Flag Verbosity,
execDistPref :: Flag FilePath
} deriving Generic
defaultExecFlags :: ExecFlags
defaultExecFlags = ExecFlags {
execVerbosity = toFlag normal,
execDistPref = NoFlag
}
execCommand :: CommandUI ExecFlags
execCommand = CommandUI {
commandName = "exec",
commandSynopsis = "Give a command access to the sandbox package repository.",
commandDescription = Just $ \pname -> wrapText $
-- TODO: this is too GHC-focused for my liking..
"A directly invoked GHC will not automatically be aware of any"
++ " sandboxes: the GHC_PACKAGE_PATH environment variable controls what"
++ " GHC uses. `" ++ pname ++ " exec` can be used to modify this variable:"
++ " COMMAND will be executed in a modified environment and thereby uses"
++ " the sandbox package database.\n"
++ "\n"
++ "If there is no sandbox, behaves as identity (executing COMMAND).\n"
++ "\n"
++ "Note that other " ++ pname ++ " commands change the environment"
++ " variable appropriately already, so there is no need to wrap those"
++ " in `" ++ pname ++ " exec`. But with `" ++ pname ++ " exec`, the user"
++ " has more control and can, for example, execute custom scripts which"
++ " indirectly execute GHC.\n"
++ "\n"
++ "Note that `" ++ pname ++ " repl` is different from `" ++ pname
++ " exec -- ghci` as the latter will not forward any additional flags"
++ " being defined in the local package to ghci.\n"
++ "\n"
++ "See `" ++ pname ++ " sandbox`.\n",
commandNotes = Just $ \pname ->
"Examples:\n"
++ " " ++ pname ++ " exec -- ghci -Wall\n"
++ " Start a repl session with sandbox packages and all warnings;\n"
++ " " ++ pname ++ " exec gitit -- -f gitit.cnf\n"
++ " Give gitit access to the sandbox packages, and pass it a flag;\n"
++ " " ++ pname ++ " exec runghc Foo.hs\n"
++ " Execute runghc on Foo.hs with runghc configured to use the\n"
++ " sandbox package database (if a sandbox is being used).\n",
commandUsage = \pname ->
"Usage: " ++ pname ++ " exec [FLAGS] [--] COMMAND [--] [ARGS]\n",
commandDefaultFlags = defaultExecFlags,
commandOptions = \showOrParseArgs ->
[ optionVerbosity execVerbosity
(\v flags -> flags { execVerbosity = v })
, Cabal.optionDistPref
execDistPref (\d flags -> flags { execDistPref = d })
showOrParseArgs
]
}
instance Monoid ExecFlags where
mempty = gmempty
mappend = (<>)
instance Semigroup ExecFlags where
(<>) = gmappend
-- ------------------------------------------------------------
-- * UserConfig flags
-- ------------------------------------------------------------
data UserConfigFlags = UserConfigFlags {
userConfigVerbosity :: Flag Verbosity,
userConfigForce :: Flag Bool
} deriving Generic
instance Monoid UserConfigFlags where
mempty = UserConfigFlags {
userConfigVerbosity = toFlag normal,
userConfigForce = toFlag False
}
mappend = (<>)
instance Semigroup UserConfigFlags where
(<>) = gmappend
userConfigCommand :: CommandUI UserConfigFlags
userConfigCommand = CommandUI {
commandName = "user-config",
commandSynopsis = "Display and update the user's global cabal configuration.",
commandDescription = Just $ \_ -> wrapText $
"When upgrading cabal, the set of configuration keys and their default"
++ " values may change. This command provides means to merge the existing"
++ " config in ~/.cabal/config"
++ " (i.e. all bindings that are actually defined and not commented out)"
++ " and the default config of the new version.\n"
++ "\n"
++ "init: Creates a new config file at either ~/.cabal/config or as"
++ " specified by --config-file, if given. An existing file won't be "
++ " overwritten unless -f or --force is given.\n"
++ "diff: Shows a pseudo-diff of the user's ~/.cabal/config file and"
++ " the default configuration that would be created by cabal if the"
++ " config file did not exist.\n"
++ "update: Applies the pseudo-diff to the configuration that would be"
++ " created by default, and write the result back to ~/.cabal/config.",
commandNotes = Nothing,
commandUsage = usageAlternatives "user-config" ["init", "diff", "update"],
commandDefaultFlags = mempty,
commandOptions = \ _ -> [
optionVerbosity userConfigVerbosity (\v flags -> flags { userConfigVerbosity = v })
, option ['f'] ["force"]
"Overwrite the config file if it already exists."
userConfigForce (\v flags -> flags { userConfigForce = v })
trueArg
]
}
-- ------------------------------------------------------------
-- * GetOpt Utils
-- ------------------------------------------------------------
reqArgFlag :: ArgPlaceHolder ->
MkOptDescr (b -> Flag String) (Flag String -> b -> b) b
reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList
liftOptions :: (b -> a) -> (a -> b -> b)
-> [OptionField a] -> [OptionField b]
liftOptions get set = map (liftOption get set)
yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b
yesNoOpt ShowArgs sf lf = trueArg sf lf
yesNoOpt _ sf lf = Command.boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf
optionSolver :: (flags -> Flag PreSolver)
-> (Flag PreSolver -> flags -> flags)
-> OptionField flags
optionSolver get set =
option [] ["solver"]
("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ".")
get set
(reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers)
(toFlag `fmap` parse))
(flagToList . fmap display))
optionSolverFlags :: ShowOrParseArgs
-> (flags -> Flag Int ) -> (Flag Int -> flags -> flags)
-> (flags -> Flag ReorderGoals) -> (Flag ReorderGoals -> flags -> flags)
-> (flags -> Flag CountConflicts) -> (Flag CountConflicts -> flags -> flags)
-> (flags -> Flag IndependentGoals) -> (Flag IndependentGoals -> flags -> flags)
-> (flags -> Flag ShadowPkgs) -> (Flag ShadowPkgs -> flags -> flags)
-> (flags -> Flag StrongFlags) -> (Flag StrongFlags -> flags -> flags)
-> (flags -> Flag AllowBootLibInstalls) -> (Flag AllowBootLibInstalls -> flags -> flags)
-> [OptionField flags]
optionSolverFlags showOrParseArgs getmbj setmbj getrg setrg getcc setcc getig setig
getsip setsip getstrfl setstrfl getib setib =
[ option [] ["max-backjumps"]
("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.")
getmbj setmbj
(reqArg "NUM" (readP_to_E ("Cannot parse number: "++) (fmap toFlag parse))
(map show . flagToList))
, option [] ["reorder-goals"]
"Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages."
(fmap asBool . getrg)
(setrg . fmap ReorderGoals)
(yesNoOpt showOrParseArgs)
, option [] ["count-conflicts"]
"Try to speed up solving by preferring goals that are involved in a lot of conflicts (default)."
(fmap asBool . getcc)
(setcc . fmap CountConflicts)
(yesNoOpt showOrParseArgs)
, option [] ["independent-goals"]
"Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen."
(fmap asBool . getig)
(setig . fmap IndependentGoals)
(yesNoOpt showOrParseArgs)
, option [] ["shadow-installed-packages"]
"If multiple package instances of the same version are installed, treat all but one as shadowed."
(fmap asBool . getsip)
(setsip . fmap ShadowPkgs)
(yesNoOpt showOrParseArgs)
, option [] ["strong-flags"]
"Do not defer flag choices (this used to be the default in cabal-install <= 1.20)."
(fmap asBool . getstrfl)
(setstrfl . fmap StrongFlags)
(yesNoOpt showOrParseArgs)
, option [] ["allow-boot-library-installs"]
"Allow cabal to install base, ghc-prim, integer-simple, integer-gmp, and template-haskell."
(fmap asBool . getib)
(setib . fmap AllowBootLibInstalls)
(yesNoOpt showOrParseArgs)
]
usageFlagsOrPackages :: String -> String -> String
usageFlagsOrPackages name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n"
++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n"
usagePackages :: String -> String -> String
usagePackages name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n"
usageFlags :: String -> String -> String
usageFlags name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n"
--TODO: do we want to allow per-package flags?
parsePackageArgs :: [String] -> Either String [Dependency]
parsePackageArgs = parsePkgArgs []
where
parsePkgArgs ds [] = Right (reverse ds)
parsePkgArgs ds (arg:args) =
case readPToMaybe parseDependencyOrPackageId arg of
Just dep -> parsePkgArgs (dep:ds) args
Nothing -> Left $
show arg ++ " is not valid syntax for a package name or"
++ " package dependency."
parseDependencyOrPackageId :: Parse.ReadP r Dependency
parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse
where
pkgidToDependency :: PackageIdentifier -> Dependency
pkgidToDependency p = case packageVersion p of
v | v == nullVersion -> Dependency (packageName p) anyVersion
| otherwise -> Dependency (packageName p) (thisVersion v)
showRepo :: RemoteRepo -> String
showRepo repo = remoteRepoName repo ++ ":"
++ uriToString id (remoteRepoURI repo) []
readRepo :: String -> Maybe RemoteRepo
readRepo = readPToMaybe parseRepo
parseRepo :: Parse.ReadP r RemoteRepo
parseRepo = do
name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.")
_ <- Parse.char ':'
uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~")
uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr)
return RemoteRepo {
remoteRepoName = name,
remoteRepoURI = uri,
remoteRepoSecure = Nothing,
remoteRepoRootKeys = [],
remoteRepoKeyThreshold = 0,
remoteRepoShouldTryHttps = False
}
-- ------------------------------------------------------------
-- * Helpers for Documentation
-- ------------------------------------------------------------
headLine :: String -> String
headLine = unlines
. map unwords
. wrapLine 79
. words
paragraph :: String -> String
paragraph = (++"\n")
. unlines
. map unwords
. wrapLine 79
. words
indentParagraph :: String -> String
indentParagraph = unlines
. (flip (++)) [""]
. map ((" "++).unwords)
. wrapLine 77
. words
relevantConfigValuesText :: [String] -> String
relevantConfigValuesText vs =
"Relevant global configuration keys:\n"
++ concat [" " ++ v ++ "\n" |v <- vs]
|
themoritz/cabal
|
cabal-install/Distribution/Client/Setup.hs
|
bsd-3-clause
| 105,008
| 0
| 40
| 29,015
| 20,681
| 11,689
| 8,992
| 1,999
| 5
|
{-# LANGUAGE DuplicateRecordFields, OverloadedStrings #-}
import Test.HUnit
import Utils
import MotionDetector.Internal as MD
import WSServer.Internal
main = do
_ <- runTestTT tests
return ()
tests = test [
"MotionDetector.detectMotion" ~: [
"should change position from Top to Bottom once border is reached" ~: let
acc = zeroAcceleration {accx=(-6)}
ms = MotionScaler 0 $ [1..20] |> fmap (\_ -> 0)
ms' = ms {motions=(accx acc):([1..19] |> fmap (\_ -> 0))}
s = MDState {
motion=Running, position=Top, lastChange=0,
lastx=(-3), jumpIgnore=0, scaler=ms
}
s' = s {position=Bottom, lastx=accx acc, scaler=ms'}
in (Running, s') ~=? (detectMotion s acc)
],
"WSServer.parseAcceleration" ~: [
"should parse proper acceleration" ~:
(parseAcceleration "1;2;3;4;5;6") ~=? (Just $ MD.Acceleration 1 2 3 4 5 6),
"should fail on invalid input" ~: (parseAcceleration "abc") ~=? Nothing
]
]
|
mat-hek/jump
|
spec/HSpec.hs
|
bsd-3-clause
| 1,021
| 1
| 21
| 273
| 329
| 183
| 146
| 23
| 1
|
-----------------------------------------------------------------------------
--
-- Module : Control.Monad.Supervisor.Transactions
-- Copyright :
-- License : BSD3
--
-- Maintainer : agocorona@gmail.com
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
{-#LANGUAGE FlexibleInstances, MultiParamTypeClasses,
GeneralizedNewtypeDeriving, FlexibleContexts
,UndecidableInstances, DeriveDataTypeable #-}
module Control.Monad.Supervisor.Transactions (
) where
import Control.Monad.State
import Control.Monad.Supervisor hiding (breturn)
import qualified Control.Monad.Supervisor as Sup(breturn)
import Data.Monoid
import Control.Monad.Trans
import Control.Monad.Supervisor.Session hiding (runs)
import Data.Typeable
breturn x= setSessionData (BackTracking False) >> Sup.breturn x
instance MonadState SessionData m => Supervise SessionData m where
supBack s= setSessionData (BackTracking True) !> "supBack"
-- supervise _ mx= setSessionData (BackTracking False) >> mx !> "supervise"
--instance (MonadState a m ,MonadState b m)=> MonadState (a,b) m where
-- get= do
-- x <- get
-- y <- get
-- return (x,y)
--
-- put (x,y)= put x >> put y
class Monad m => BackTrackingStatus m where
isGoingBack :: m Bool
class Monoid t => HasInverse t where
cancel :: t -> t
-- property: forall w, w => v <> t <> w <> cancel t === v <> w
newtype BackTracking= BackTracking Bool deriving Typeable
processReversibleEvent :: (Supervise SessionData m, HasInverse a)=> a -> a -> (a -> Sup SessionData m()) -> Sup SessionData m ()
processReversibleEvent resource event update = do
now <- isGoingBack
if now !> ("back="++ show now) then update (resource <> cancel event) >> fail ""
else update (resource <> event) >> breturn ()
--processIrreversibleEvent :: (BackTrackingStatus m, Monoid a)=> a -> a -> Sup m a -> Sup m a
--processIrreversibleEvent resource event failAction= do
-- now <- lift $ isGoingBack
-- if not now
-- then breturn $ resource <> event
-- else do
-- breturn() -- will not go back beyond this
-- failAction
instance Monoid Int where mempty=0; mappend =(+)
newtype Potatoes= Potatoes Int deriving (Num, Monoid, Typeable)
instance HasInverse Potatoes where cancel = negate
instance Supervise SessionData m => BackTrackingStatus m where
isGoingBack= do
BackTracking b <- getSessionData `onNothing` (return (BackTracking False) !> "backtracking not set" ) !> "isgoingback"
return b
runs= flip evalStateT
main= do
print "hi"
runs emptySessionData $ runSup $ do
setSessionData (BackTracking True)
getSessionData `onNothing` error "not set" >>= \(BackTracking b) -> liftIO (print b)
Potatoes potatoes <- getSessionData `onNothing` return (Potatoes 0)
liftIO . print $ " Now you have"++ show potatoes ++ " in your shopping cart"
liftIO . print $ "press enter for more potatoes"
liftIO $ getLine
liftIO . print $ " you want to add potatoes to the shopping cart"
processReversibleEvent (Potatoes potatoes) (Potatoes 5) setSessionData
liftIO . print $ "but wait!. you do not need potatoes, you need oranges!. "
liftIO . print $ "No problem. press enter to drop the potatoes from the cart"
liftIO $ getLine
fail ""
|
agocorona/control-monad-supervisor
|
Control/Monad/Supervisor/Transactions.hs
|
bsd-3-clause
| 3,402
| 0
| 15
| 694
| 706
| 373
| 333
| 47
| 2
|
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[Simplify]{The main module of the simplifier}
-}
{-# LANGUAGE CPP #-}
module Simplify ( simplTopBinds, simplExpr, simplRules ) where
#include "HsVersions.h"
import DynFlags
import SimplMonad
import Type hiding ( substTy, substTyVar, extendTvSubst, extendCvSubst )
import SimplEnv
import SimplUtils
import FamInstEnv ( FamInstEnv )
import Literal ( litIsLifted ) --, mkMachInt ) -- temporalily commented out. See #8326
import Id
import MkId ( seqId, voidPrimId )
import MkCore ( mkImpossibleExpr, castBottomExpr )
import IdInfo
import Name ( Name, mkSystemVarName, isExternalName, getOccFS )
import Coercion hiding ( substCo, substCoVar )
import OptCoercion ( optCoercion )
import FamInstEnv ( topNormaliseType_maybe )
import DataCon ( DataCon, dataConWorkId, dataConRepStrictness
, isMarkedStrict, dataConRepArgTys ) --, dataConTyCon, dataConTag, fIRST_TAG )
--import TyCon ( isEnumerationTyCon ) -- temporalily commented out. See #8326
import CoreMonad ( Tick(..), SimplifierMode(..) )
import CoreSyn
import Demand ( StrictSig(..), dmdTypeDepth, isStrictDmd )
import PprCore ( pprCoreExpr )
import CoreUnfold
import CoreUtils
import CoreArity
--import PrimOp ( tagToEnumKey ) -- temporalily commented out. See #8326
import Rules ( mkRuleInfo, lookupRule, getRules )
import TysPrim ( voidPrimTy ) --, intPrimTy ) -- temporalily commented out. See #8326
import BasicTypes ( TopLevelFlag(..), isTopLevel, RecFlag(..) )
import MonadUtils ( foldlM, mapAccumLM, liftIO )
import Maybes ( orElse )
--import Unique ( hasKey ) -- temporalily commented out. See #8326
import Control.Monad
import Outputable
import FastString
import Pair
import Util
import ErrUtils
{-
The guts of the simplifier is in this module, but the driver loop for
the simplifier is in SimplCore.hs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
The simplifier used to guarantee that the output had no shadowing, but
it does not do so any more. (Actually, it never did!) The reason is
documented with simplifyArgs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
Many parts of the simplifier return a bunch of "floats" as well as an
expression. This is wrapped as a datatype SimplUtils.FloatsWith.
All "floats" are let-binds, not case-binds, but some non-rec lets may
be unlifted (with RHS ok-for-speculation).
-----------------------------------------
ORGANISATION OF FUNCTIONS
-----------------------------------------
simplTopBinds
- simplify all top-level binders
- for NonRec, call simplRecOrTopPair
- for Rec, call simplRecBind
------------------------------
simplExpr (applied lambda) ==> simplNonRecBind
simplExpr (Let (NonRec ...) ..) ==> simplNonRecBind
simplExpr (Let (Rec ...) ..) ==> simplify binders; simplRecBind
------------------------------
simplRecBind [binders already simplfied]
- use simplRecOrTopPair on each pair in turn
simplRecOrTopPair [binder already simplified]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
Returns:
- check for PreInlineUnconditionally
- simplLazyBind
simplNonRecBind
Used for: non-top-level non-recursive bindings
beta reductions (which amount to the same thing)
Because it can deal with strict arts, it takes a
"thing-inside" and returns an expression
- check for PreInlineUnconditionally
- simplify binder, including its IdInfo
- if strict binding
simplStrictArg
mkAtomicArgs
completeNonRecX
else
simplLazyBind
addFloats
simplNonRecX: [given a *simplified* RHS, but an *unsimplified* binder]
Used for: binding case-binder and constr args in a known-constructor case
- check for PreInLineUnconditionally
- simplify binder
- completeNonRecX
------------------------------
simplLazyBind: [binder already simplified, RHS not]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
non-top-level, but *lazy* non-recursive bindings
[must not be strict or unboxed]
Returns floats + an augmented environment, not an expression
- substituteIdInfo and add result to in-scope
[so that rules are available in rec rhs]
- simplify rhs
- mkAtomicArgs
- float if exposes constructor or PAP
- completeBind
completeNonRecX: [binder and rhs both simplified]
- if the the thing needs case binding (unlifted and not ok-for-spec)
build a Case
else
completeBind
addFloats
completeBind: [given a simplified RHS]
[used for both rec and non-rec bindings, top level and not]
- try PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
Right hand sides and arguments
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In many ways we want to treat
(a) the right hand side of a let(rec), and
(b) a function argument
in the same way. But not always! In particular, we would
like to leave these arguments exactly as they are, so they
will match a RULE more easily.
f (g x, h x)
g (+ x)
It's harder to make the rule match if we ANF-ise the constructor,
or eta-expand the PAP:
f (let { a = g x; b = h x } in (a,b))
g (\y. + x y)
On the other hand if we see the let-defns
p = (g x, h x)
q = + x
then we *do* want to ANF-ise and eta-expand, so that p and q
can be safely inlined.
Even floating lets out is a bit dubious. For let RHS's we float lets
out if that exposes a value, so that the value can be inlined more vigorously.
For example
r = let x = e in (x,x)
Here, if we float the let out we'll expose a nice constructor. We did experiments
that showed this to be a generally good thing. But it was a bad thing to float
lets out unconditionally, because that meant they got allocated more often.
For function arguments, there's less reason to expose a constructor (it won't
get inlined). Just possibly it might make a rule match, but I'm pretty skeptical.
So for the moment we don't float lets out of function arguments either.
Eta expansion
~~~~~~~~~~~~~~
For eta expansion, we want to catch things like
case e of (a,b) -> \x -> case a of (p,q) -> \y -> r
If the \x was on the RHS of a let, we'd eta expand to bring the two
lambdas together. And in general that's a good thing to do. Perhaps
we should eta expand wherever we find a (value) lambda? Then the eta
expansion at a let RHS can concentrate solely on the PAP case.
************************************************************************
* *
\subsection{Bindings}
* *
************************************************************************
-}
simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simplTopBinds env0 binds0
= do { -- Put all the top-level binders into scope at the start
-- so that if a transformation rule has unexpectedly brought
-- anything into scope, then we don't get a complaint about that.
-- It's rather as if the top-level binders were imported.
-- See note [Glomming] in OccurAnal.
; env1 <- simplRecBndrs env0 (bindersOfBinds binds0)
; env2 <- simpl_binds env1 binds0
; freeTick SimplifierDone
; return env2 }
where
-- We need to track the zapped top-level binders, because
-- they should have their fragile IdInfo zapped (notably occurrence info)
-- That's why we run down binds and bndrs' simultaneously.
--
simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simpl_binds env [] = return env
simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind
; simpl_binds env' binds }
simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs
simpl_bind env (NonRec b r) = do { (env', b') <- addBndrRules env b (lookupRecBndr env b)
; simplRecOrTopPair env' TopLevel NonRecursive b b' r }
{-
************************************************************************
* *
\subsection{Lazy bindings}
* *
************************************************************************
simplRecBind is used for
* recursive bindings only
-}
simplRecBind :: SimplEnv -> TopLevelFlag
-> [(InId, InExpr)]
-> SimplM SimplEnv
simplRecBind env0 top_lvl pairs0
= do { (env_with_info, triples) <- mapAccumLM add_rules env0 pairs0
; env1 <- go (zapFloats env_with_info) triples
; return (env0 `addRecFloats` env1) }
-- addFloats adds the floats from env1,
-- _and_ updates env0 with the in-scope set from env1
where
add_rules :: SimplEnv -> (InBndr,InExpr) -> SimplM (SimplEnv, (InBndr, OutBndr, InExpr))
-- Add the (substituted) rules to the binder
add_rules env (bndr, rhs)
= do { (env', bndr') <- addBndrRules env bndr (lookupRecBndr env bndr)
; return (env', (bndr, bndr', rhs)) }
go env [] = return env
go env ((old_bndr, new_bndr, rhs) : pairs)
= do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs
; go env' pairs }
{-
simplOrTopPair is used for
* recursive bindings (whether top level or not)
* top-level non-recursive bindings
It assumes the binder has already been simplified, but not its IdInfo.
-}
simplRecOrTopPair :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutBndr -> InExpr -- Binder and rhs
-> SimplM SimplEnv -- Returns an env that includes the binding
simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs
= do { dflags <- getDynFlags
; trace_bind dflags $
if preInlineUnconditionally dflags env top_lvl old_bndr rhs
-- Check for unconditional inline
then do tick (PreInlineUnconditionally old_bndr)
return (extendIdSubst env old_bndr (mkContEx env rhs))
else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env }
where
trace_bind dflags thing_inside
| not (dopt Opt_D_verbose_core2core dflags)
= thing_inside
| otherwise
= pprTrace "SimplBind" (ppr old_bndr) thing_inside
-- trace_bind emits a trace for each top-level binding, which
-- helps to locate the tracing for inlining and rule firing
{-
simplLazyBind is used for
* [simplRecOrTopPair] recursive bindings (whether top level or not)
* [simplRecOrTopPair] top-level non-recursive bindings
* [simplNonRecE] non-top-level *lazy* non-recursive bindings
Nota bene:
1. It assumes that the binder is *already* simplified,
and is in scope, and its IdInfo too, except unfolding
2. It assumes that the binder type is lifted.
3. It does not check for pre-inline-unconditionally;
that should have been done already.
-}
simplLazyBind :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutId -- Binder, both pre-and post simpl
-- The OutId has IdInfo, except arity, unfolding
-> InExpr -> SimplEnv -- The RHS and its environment
-> SimplM SimplEnv
-- Precondition: rhs obeys the let/app invariant
simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se
= -- pprTrace "simplLazyBind" ((ppr bndr <+> ppr bndr1) $$ ppr rhs $$ ppr (seIdSubst rhs_se)) $
do { let rhs_env = rhs_se `setInScope` env
(tvs, body) = case collectTyAndValBinders rhs of
(tvs, [], body)
| surely_not_lam body -> (tvs, body)
_ -> ([], rhs)
surely_not_lam (Lam {}) = False
surely_not_lam (Tick t e)
| not (tickishFloatable t) = surely_not_lam e
-- eta-reduction could float
surely_not_lam _ = True
-- Do not do the "abstract tyyvar" thing if there's
-- a lambda inside, because it defeats eta-reduction
-- f = /\a. \x. g a x
-- should eta-reduce.
; (body_env, tvs') <- simplBinders rhs_env tvs
-- See Note [Floating and type abstraction] in SimplUtils
-- Simplify the RHS
; let rhs_cont = mkRhsStop (substTy body_env (exprType body))
; (body_env1, body1) <- simplExprF body_env body rhs_cont
-- ANF-ise a constructor or PAP rhs
; (body_env2, body2) <- prepareRhs top_lvl body_env1 bndr1 body1
; (env', rhs')
<- if not (doFloatFromRhs top_lvl is_rec False body2 body_env2)
then -- No floating, revert to body1
do { rhs' <- mkLam tvs' (wrapFloats body_env1 body1) rhs_cont
; return (env, rhs') }
else if null tvs then -- Simple floating
do { tick LetFloatFromLet
; return (addFloats env body_env2, body2) }
else -- Do type-abstraction first
do { tick LetFloatFromLet
; (poly_binds, body3) <- abstractFloats tvs' body_env2 body2
; rhs' <- mkLam tvs' body3 rhs_cont
; env' <- foldlM (addPolyBind top_lvl) env poly_binds
; return (env', rhs') }
; completeBind env' top_lvl bndr bndr1 rhs' }
{-
A specialised variant of simplNonRec used when the RHS is already simplified,
notably in knownCon. It uses case-binding where necessary.
-}
simplNonRecX :: SimplEnv
-> InId -- Old binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
simplNonRecX env bndr new_rhs
| isDeadBinder bndr -- Not uncommon; e.g. case (a,b) of c { (p,q) -> p }
= return env -- Here c is dead, and we avoid creating
-- the binding c = (a,b)
| Coercion co <- new_rhs
= return (extendCvSubst env bndr co)
| otherwise
= do { (env', bndr') <- simplBinder env bndr
; completeNonRecX NotTopLevel env' (isStrictId bndr) bndr bndr' new_rhs }
-- simplNonRecX is only used for NotTopLevel things
completeNonRecX :: TopLevelFlag -> SimplEnv
-> Bool
-> InId -- Old binder
-> OutId -- New binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs
= do { (env1, rhs1) <- prepareRhs top_lvl (zapFloats env) new_bndr new_rhs
; (env2, rhs2) <-
if doFloatFromRhs NotTopLevel NonRecursive is_strict rhs1 env1
then do { tick LetFloatFromLet
; return (addFloats env env1, rhs1) } -- Add the floats to the main env
else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS
; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 }
{-
{- No, no, no! Do not try preInlineUnconditionally in completeNonRecX
Doing so risks exponential behaviour, because new_rhs has been simplified once already
In the cases described by the folowing commment, postInlineUnconditionally will
catch many of the relevant cases.
-- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
--
-- Similarly, single occurrences can be inlined vigourously
-- e.g. case (f x, g y) of (a,b) -> ....
-- If a,b occur once we can avoid constructing the let binding for them.
Furthermore in the case-binding case preInlineUnconditionally risks extra thunks
-- Consider case I# (quotInt# x y) of
-- I# v -> let w = J# v in ...
-- If we gaily inline (quotInt# x y) for v, we end up building an
-- extra thunk:
-- let w = J# (quotInt# x y) in ...
-- because quotInt# can fail.
| preInlineUnconditionally env NotTopLevel bndr new_rhs
= thing_inside (extendIdSubst env bndr (DoneEx new_rhs))
-}
----------------------------------
prepareRhs takes a putative RHS, checks whether it's a PAP or
constructor application and, if so, converts it to ANF, so that the
resulting thing can be inlined more easily. Thus
x = (f a, g b)
becomes
t1 = f a
t2 = g b
x = (t1,t2)
We also want to deal well cases like this
v = (f e1 `cast` co) e2
Here we want to make e1,e2 trivial and get
x1 = e1; x2 = e2; v = (f x1 `cast` co) v2
That's what the 'go' loop in prepareRhs does
-}
prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Adds new floats to the env iff that allows us to return a good RHS
prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions]
| Pair ty1 _ty2 <- coercionKind co -- Do *not* do this if rhs has an unlifted type
, not (isUnliftedType ty1) -- see Note [Float coercions (unlifted)]
= do { (env', rhs') <- makeTrivialWithInfo top_lvl env (getOccFS id) sanitised_info rhs
; return (env', Cast rhs' co) }
where
sanitised_info = vanillaIdInfo `setStrictnessInfo` strictnessInfo info
`setDemandInfo` demandInfo info
info = idInfo id
prepareRhs top_lvl env0 id rhs0
= do { (_is_exp, env1, rhs1) <- go 0 env0 rhs0
; return (env1, rhs1) }
where
go n_val_args env (Cast rhs co)
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Cast rhs' co) }
go n_val_args env (App fun (Type ty))
= do { (is_exp, env', rhs') <- go n_val_args env fun
; return (is_exp, env', App rhs' (Type ty)) }
go n_val_args env (App fun arg)
= do { (is_exp, env', fun') <- go (n_val_args+1) env fun
; case is_exp of
True -> do { (env'', arg') <- makeTrivial top_lvl env' (getOccFS id) arg
; return (True, env'', App fun' arg') }
False -> return (False, env, App fun arg) }
go n_val_args env (Var fun)
= return (is_exp, env, Var fun)
where
is_exp = isExpandableApp fun n_val_args -- The fun a constructor or PAP
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in
-- OccurAnal.occAnalApp
go n_val_args env (Tick t rhs)
-- We want to be able to float bindings past this
-- tick. Non-scoping ticks don't care.
| tickishScoped t == NoScope
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Tick t rhs') }
-- On the other hand, for scoping ticks we need to be able to
-- copy them on the floats, which in turn is only allowed if
-- we can obtain non-counting ticks.
| not (tickishCounts t) || tickishCanSplit t
= do { (is_exp, env', rhs') <- go n_val_args (zapFloats env) rhs
; let tickIt (id, expr) = (id, mkTick (mkNoCount t) expr)
floats' = seFloats $ env `addFloats` mapFloats env' tickIt
; return (is_exp, env' { seFloats = floats' }, Tick t rhs') }
go _ env other
= return (False, env, other)
{-
Note [Float coercions]
~~~~~~~~~~~~~~~~~~~~~~
When we find the binding
x = e `cast` co
we'd like to transform it to
x' = e
x = x `cast` co -- A trivial binding
There's a chance that e will be a constructor application or function, or something
like that, so moving the coercion to the usage site may well cancel the coercions
and lead to further optimisation. Example:
data family T a :: *
data instance T Int = T Int
foo :: Int -> Int -> Int
foo m n = ...
where
x = T m
go 0 = 0
go n = case x of { T m -> go (n-m) }
-- This case should optimise
Note [Preserve strictness when floating coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the Note [Float coercions] transformation, keep the strictness info.
Eg
f = e `cast` co -- f has strictness SSL
When we transform to
f' = e -- f' also has strictness SSL
f = f' `cast` co -- f still has strictness SSL
Its not wrong to drop it on the floor, but better to keep it.
Note [Float coercions (unlifted)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT don't do [Float coercions] if 'e' has an unlifted type.
This *can* happen:
foo :: Int = (error (# Int,Int #) "urk")
`cast` CoUnsafe (# Int,Int #) Int
If do the makeTrivial thing to the error call, we'll get
foo = case error (# Int,Int #) "urk" of v -> v `cast` ...
But 'v' isn't in scope!
These strange casts can happen as a result of case-of-case
bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of
(# p,q #) -> p+q
-}
makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec)
makeTrivialArg env (ValArg e) = do
{ (env', e') <- makeTrivial NotTopLevel env (fsLit "arg") e
; return (env', ValArg e') }
makeTrivialArg env arg = return (env, arg) -- CastBy, TyArg
makeTrivial :: TopLevelFlag -> SimplEnv
-> FastString -- ^ a "friendly name" to build the new binder from
-> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Binds the expression to a variable, if it's not trivial, returning the variable
makeTrivial top_lvl env context expr =
makeTrivialWithInfo top_lvl env context vanillaIdInfo expr
makeTrivialWithInfo :: TopLevelFlag -> SimplEnv
-> FastString
-- ^ a "friendly name" to build the new binder from
-> IdInfo -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Propagate strictness and demand info to the new binder
-- Note [Preserve strictness when floating coercions]
-- Returned SimplEnv has same substitution as incoming one
makeTrivialWithInfo top_lvl env context info expr
| exprIsTrivial expr -- Already trivial
|| not (bindingOk top_lvl expr expr_ty) -- Cannot trivialise
-- See Note [Cannot trivialise]
= return (env, expr)
| otherwise -- See Note [Take care] below
= do { uniq <- getUniqueM
; let name = mkSystemVarName uniq context
var = mkLocalIdOrCoVarWithInfo name expr_ty info
; env' <- completeNonRecX top_lvl env False var var expr
; expr' <- simplVar env' var
; return (env', expr') }
-- The simplVar is needed becase we're constructing a new binding
-- a = rhs
-- And if rhs is of form (rhs1 |> co), then we might get
-- a1 = rhs1
-- a = a1 |> co
-- and now a's RHS is trivial and can be substituted out, and that
-- is what completeNonRecX will do
-- To put it another way, it's as if we'd simplified
-- let var = e in var
where
expr_ty = exprType expr
bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool
-- True iff we can have a binding of this expression at this level
-- Precondition: the type is the type of the expression
bindingOk top_lvl _ expr_ty
| isTopLevel top_lvl = not (isUnliftedType expr_ty)
| otherwise = True
{-
Note [Cannot trivialise]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider tih
f :: Int -> Addr#
foo :: Bar
foo = Bar (f 3)
Then we can't ANF-ise foo, even though we'd like to, because
we can't make a top-level binding for the Addr# (f 3). And if
so we don't want to turn it into
foo = let x = f 3 in Bar x
because we'll just end up inlining x back, and that makes the
simplifier loop. Better not to ANF-ise it at all.
A case in point is literal strings (a MachStr is not regarded as
trivial):
foo = Ptr "blob"#
We don't want to ANF-ise this.
************************************************************************
* *
\subsection{Completing a lazy binding}
* *
************************************************************************
completeBind
* deals only with Ids, not TyVars
* takes an already-simplified binder and RHS
* is used for both recursive and non-recursive bindings
* is used for both top-level and non-top-level bindings
It does the following:
- tries discarding a dead binding
- tries PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
It does *not* attempt to do let-to-case. Why? Because it is used for
- top-level bindings (when let-to-case is impossible)
- many situations where the "rhs" is known to be a WHNF
(so let-to-case is inappropriate).
Nor does it do the atomic-argument thing
-}
completeBind :: SimplEnv
-> TopLevelFlag -- Flag stuck into unfolding
-> InId -- Old binder
-> OutId -> OutExpr -- New binder and RHS
-> SimplM SimplEnv
-- completeBind may choose to do its work
-- * by extending the substitution (e.g. let x = y in ...)
-- * or by adding to the floats in the envt
--
-- Precondition: rhs obeys the let/app invariant
completeBind env top_lvl old_bndr new_bndr new_rhs
| isCoVar old_bndr
= case new_rhs of
Coercion co -> return (extendCvSubst env old_bndr co)
_ -> return (addNonRec env new_bndr new_rhs)
| otherwise
= ASSERT( isId new_bndr )
do { let old_info = idInfo old_bndr
old_unf = unfoldingInfo old_info
occ_info = occInfo old_info
-- Do eta-expansion on the RHS of the binding
-- See Note [Eta-expanding at let bindings] in SimplUtils
; (new_arity, final_rhs) <- tryEtaExpandRhs env new_bndr new_rhs
-- Simplify the unfolding
; new_unfolding <- simplLetUnfolding env top_lvl old_bndr final_rhs old_unf
; dflags <- getDynFlags
; if postInlineUnconditionally dflags env top_lvl new_bndr occ_info
final_rhs new_unfolding
-- Inline and discard the binding
then do { tick (PostInlineUnconditionally old_bndr)
; return (extendIdSubst env old_bndr (DoneEx final_rhs)) }
-- Use the substitution to make quite, quite sure that the
-- substitution will happen, since we are going to discard the binding
else
do { let info1 = idInfo new_bndr `setArityInfo` new_arity
-- Unfolding info: Note [Setting the new unfolding]
info2 = info1 `setUnfoldingInfo` new_unfolding
-- Demand info: Note [Setting the demand info]
--
-- We also have to nuke demand info if for some reason
-- eta-expansion *reduces* the arity of the binding to less
-- than that of the strictness sig. This can happen: see Note [Arity decrease].
info3 | isEvaldUnfolding new_unfolding
|| (case strictnessInfo info2 of
StrictSig dmd_ty -> new_arity < dmdTypeDepth dmd_ty)
= zapDemandInfo info2 `orElse` info2
| otherwise
= info2
final_id = new_bndr `setIdInfo` info3
; -- pprTrace "Binding" (ppr final_id <+> ppr new_unfolding) $
return (addNonRec env final_id final_rhs) } }
-- The addNonRec adds it to the in-scope set too
------------------------------
addPolyBind :: TopLevelFlag -> SimplEnv -> OutBind -> SimplM SimplEnv
-- Add a new binding to the environment, complete with its unfolding
-- but *do not* do postInlineUnconditionally, because we have already
-- processed some of the scope of the binding
-- We still want the unfolding though. Consider
-- let
-- x = /\a. let y = ... in Just y
-- in body
-- Then we float the y-binding out (via abstractFloats and addPolyBind)
-- but 'x' may well then be inlined in 'body' in which case we'd like the
-- opportunity to inline 'y' too.
--
-- INVARIANT: the arity is correct on the incoming binders
addPolyBind top_lvl env (NonRec poly_id rhs)
= do { unfolding <- simplLetUnfolding env top_lvl poly_id rhs noUnfolding
-- Assumes that poly_id did not have an INLINE prag
-- which is perhaps wrong. ToDo: think about this
; let final_id = setIdInfo poly_id $
idInfo poly_id `setUnfoldingInfo` unfolding
; return (addNonRec env final_id rhs) }
addPolyBind _ env bind@(Rec _)
= return (extendFloats env bind)
-- Hack: letrecs are more awkward, so we extend "by steam"
-- without adding unfoldings etc. At worst this leads to
-- more simplifier iterations
{- Note [Arity decrease]
~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking the arity of a binding should not decrease. But it *can*
legitimately happen because of RULES. Eg
f = g Int
where g has arity 2, will have arity 2. But if there's a rewrite rule
g Int --> h
where h has arity 1, then f's arity will decrease. Here's a real-life example,
which is in the output of Specialise:
Rec {
$dm {Arity 2} = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm #-}
dInt = MkD .... opInt ...
opInt {Arity 1} = $dm dInt
$s$dm {Arity 0} = \x. op dInt }
Here opInt has arity 1; but when we apply the rule its arity drops to 0.
That's why Specialise goes to a little trouble to pin the right arity
on specialised functions too.
Note [Setting the demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the unfolding is a value, the demand info may
go pear-shaped, so we nuke it. Example:
let x = (a,b) in
case x of (p,q) -> h p q x
Here x is certainly demanded. But after we've nuked
the case, we'll get just
let x = (a,b) in h a b x
and now x is not demanded (I'm assuming h is lazy)
This really happens. Similarly
let f = \x -> e in ...f..f...
After inlining f at some of its call sites the original binding may
(for example) be no longer strictly demanded.
The solution here is a bit ad hoc...
************************************************************************
* *
\subsection[Simplify-simplExpr]{The main function: simplExpr}
* *
************************************************************************
The reason for this OutExprStuff stuff is that we want to float *after*
simplifying a RHS, not before. If we do so naively we get quadratic
behaviour as things float out.
To see why it's important to do it after, consider this (real) example:
let t = f x
in fst t
==>
let t = let a = e1
b = e2
in (a,b)
in fst t
==>
let a = e1
b = e2
t = (a,b)
in
a -- Can't inline a this round, cos it appears twice
==>
e1
Each of the ==> steps is a round of simplification. We'd save a
whole round if we float first. This can cascade. Consider
let f = g d
in \x -> ...f...
==>
let f = let d1 = ..d.. in \y -> e
in \x -> ...f...
==>
let d1 = ..d..
in \x -> ...(\y ->e)...
Only in this second round can the \y be applied, and it
might do the same again.
-}
simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr
simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty)
where
expr_out_ty :: OutType
expr_out_ty = substTy env (exprType expr)
simplExprC :: SimplEnv -> CoreExpr -> SimplCont -> SimplM CoreExpr
-- Simplify an expression, given a continuation
simplExprC env expr cont
= -- pprTrace "simplExprC" (ppr expr $$ ppr cont {- $$ ppr (seIdSubst env) -} $$ ppr (seFloats env) ) $
do { (env', expr') <- simplExprF (zapFloats env) expr cont
; -- pprTrace "simplExprC ret" (ppr expr $$ ppr expr') $
-- pprTrace "simplExprC ret3" (ppr (seInScope env')) $
-- pprTrace "simplExprC ret4" (ppr (seFloats env')) $
return (wrapFloats env' expr') }
--------------------------------------------------
simplExprF :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF env e cont
= {- pprTrace "simplExprF" (vcat
[ ppr e
, text "cont =" <+> ppr cont
, text "inscope =" <+> ppr (seInScope env)
, text "tvsubst =" <+> ppr (seTvSubst env)
, text "idsubst =" <+> ppr (seIdSubst env)
, text "cvsubst =" <+> ppr (seCvSubst env)
{- , ppr (seFloats env) -}
]) $ -}
simplExprF1 env e cont
simplExprF1 :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF1 env (Var v) cont = simplIdF env v cont
simplExprF1 env (Lit lit) cont = rebuild env (Lit lit) cont
simplExprF1 env (Tick t expr) cont = simplTick env t expr cont
simplExprF1 env (Cast body co) cont = simplCast env body co cont
simplExprF1 env (Coercion co) cont = simplCoercionF env co cont
simplExprF1 env (Type ty) cont = ASSERT( contIsRhsOrArg cont )
rebuild env (Type (substTy env ty)) cont
simplExprF1 env (App fun arg) cont
= simplExprF env fun $
case arg of
Type ty -> ApplyToTy { sc_arg_ty = substTy env ty
, sc_hole_ty = substTy env (exprType fun)
, sc_cont = cont }
_ -> ApplyToVal { sc_arg = arg, sc_env = env
, sc_dup = NoDup, sc_cont = cont }
simplExprF1 env expr@(Lam {}) cont
= simplLam env zapped_bndrs body cont
-- The main issue here is under-saturated lambdas
-- (\x1. \x2. e) arg1
-- Here x1 might have "occurs-once" occ-info, because occ-info
-- is computed assuming that a group of lambdas is applied
-- all at once. If there are too few args, we must zap the
-- occ-info, UNLESS the remaining binders are one-shot
where
(bndrs, body) = collectBinders expr
zapped_bndrs | need_to_zap = map zap bndrs
| otherwise = bndrs
need_to_zap = any zappable_bndr (drop n_args bndrs)
n_args = countArgs cont
-- NB: countArgs counts all the args (incl type args)
-- and likewise drop counts all binders (incl type lambdas)
zappable_bndr b = isId b && not (isOneShotBndr b)
zap b | isTyVar b = b
| otherwise = zapLamIdInfo b
simplExprF1 env (Case scrut bndr _ alts) cont
= simplExprF env scrut (Select { sc_dup = NoDup, sc_bndr = bndr
, sc_alts = alts
, sc_env = env, sc_cont = cont })
simplExprF1 env (Let (Rec pairs) body) cont
= do { env' <- simplRecBndrs env (map fst pairs)
-- NB: bndrs' don't have unfoldings or rules
-- We add them as we go down
; env'' <- simplRecBind env' NotTopLevel pairs
; simplExprF env'' body cont }
simplExprF1 env (Let (NonRec bndr rhs) body) cont
= simplNonRecE env bndr (rhs, env) ([], body) cont
---------------------------------
simplType :: SimplEnv -> InType -> SimplM OutType
-- Kept monadic just so we can do the seqType
simplType env ty
= -- pprTrace "simplType" (ppr ty $$ ppr (seTvSubst env)) $
seqType new_ty `seq` return new_ty
where
new_ty = substTy env ty
---------------------------------
simplCoercionF :: SimplEnv -> InCoercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCoercionF env co cont
= do { co' <- simplCoercion env co
; rebuild env (Coercion co') cont }
simplCoercion :: SimplEnv -> InCoercion -> SimplM OutCoercion
simplCoercion env co
= let opt_co = optCoercion (getTCvSubst env) co
in seqCo opt_co `seq` return opt_co
-----------------------------------
-- | Push a TickIt context outwards past applications and cases, as
-- long as this is a non-scoping tick, to let case and application
-- optimisations apply.
simplTick :: SimplEnv -> Tickish Id -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplTick env tickish expr cont
-- A scoped tick turns into a continuation, so that we can spot
-- (scc t (\x . e)) in simplLam and eliminate the scc. If we didn't do
-- it this way, then it would take two passes of the simplifier to
-- reduce ((scc t (\x . e)) e').
-- NB, don't do this with counting ticks, because if the expr is
-- bottom, then rebuildCall will discard the continuation.
-- XXX: we cannot do this, because the simplifier assumes that
-- the context can be pushed into a case with a single branch. e.g.
-- scc<f> case expensive of p -> e
-- becomes
-- case expensive of p -> scc<f> e
--
-- So I'm disabling this for now. It just means we will do more
-- simplifier iterations that necessary in some cases.
-- | tickishScoped tickish && not (tickishCounts tickish)
-- = simplExprF env expr (TickIt tickish cont)
-- For unscoped or soft-scoped ticks, we are allowed to float in new
-- cost, so we simply push the continuation inside the tick. This
-- has the effect of moving the tick to the outside of a case or
-- application context, allowing the normal case and application
-- optimisations to fire.
| tickish `tickishScopesLike` SoftScope
= do { (env', expr') <- simplExprF env expr cont
; return (env', mkTick tickish expr')
}
-- Push tick inside if the context looks like this will allow us to
-- do a case-of-case - see Note [case-of-scc-of-case]
| Select {} <- cont, Just expr' <- push_tick_inside
= simplExprF env expr' cont
-- We don't want to move the tick, but we might still want to allow
-- floats to pass through with appropriate wrapping (or not, see
-- wrap_floats below)
--- | not (tickishCounts tickish) || tickishCanSplit tickish
-- = wrap_floats
| otherwise
= no_floating_past_tick
where
-- Try to push tick inside a case, see Note [case-of-scc-of-case].
push_tick_inside =
case expr0 of
Case scrut bndr ty alts
-> Just $ Case (tickScrut scrut) bndr ty (map tickAlt alts)
_other -> Nothing
where (ticks, expr0) = stripTicksTop movable (Tick tickish expr)
movable t = not (tickishCounts t) ||
t `tickishScopesLike` NoScope ||
tickishCanSplit t
tickScrut e = foldr mkTick e ticks
-- Alternatives get annotated with all ticks that scope in some way,
-- but we don't want to count entries.
tickAlt (c,bs,e) = (c,bs, foldr mkTick e ts_scope)
ts_scope = map mkNoCount $
filter (not . (`tickishScopesLike` NoScope)) ticks
no_floating_past_tick =
do { let (inc,outc) = splitCont cont
; (env', expr') <- simplExprF (zapFloats env) expr inc
; let tickish' = simplTickish env tickish
; (env'', expr'') <- rebuild (zapFloats env')
(wrapFloats env' expr')
(TickIt tickish' outc)
; return (addFloats env env'', expr'')
}
-- Alternative version that wraps outgoing floats with the tick. This
-- results in ticks being duplicated, as we don't make any attempt to
-- eliminate the tick if we re-inline the binding (because the tick
-- semantics allows unrestricted inlining of HNFs), so I'm not doing
-- this any more. FloatOut will catch any real opportunities for
-- floating.
--
-- wrap_floats =
-- do { let (inc,outc) = splitCont cont
-- ; (env', expr') <- simplExprF (zapFloats env) expr inc
-- ; let tickish' = simplTickish env tickish
-- ; let wrap_float (b,rhs) = (zapIdStrictness (setIdArity b 0),
-- mkTick (mkNoCount tickish') rhs)
-- -- when wrapping a float with mkTick, we better zap the Id's
-- -- strictness info and arity, because it might be wrong now.
-- ; let env'' = addFloats env (mapFloats env' wrap_float)
-- ; rebuild env'' expr' (TickIt tickish' outc)
-- }
simplTickish env tickish
| Breakpoint n ids <- tickish
= Breakpoint n (map (getDoneId . substId env) ids)
| otherwise = tickish
-- Push type application and coercion inside a tick
splitCont :: SimplCont -> (SimplCont, SimplCont)
splitCont cont@(ApplyToTy { sc_cont = tail }) = (cont { sc_cont = inc }, outc)
where (inc,outc) = splitCont tail
splitCont (CastIt co c) = (CastIt co inc, outc)
where (inc,outc) = splitCont c
splitCont other = (mkBoringStop (contHoleType other), other)
getDoneId (DoneId id) = id
getDoneId (DoneEx e) = getIdFromTrivialExpr e -- Note [substTickish] in CoreSubst
getDoneId other = pprPanic "getDoneId" (ppr other)
-- Note [case-of-scc-of-case]
-- It's pretty important to be able to transform case-of-case when
-- there's an SCC in the way. For example, the following comes up
-- in nofib/real/compress/Encode.hs:
--
-- case scctick<code_string.r1>
-- case $wcode_string_r13s wild_XC w1_s137 w2_s138 l_aje
-- of _ { (# ww1_s13f, ww2_s13g, ww3_s13h #) ->
-- (ww1_s13f, ww2_s13g, ww3_s13h)
-- }
-- of _ { (ww_s12Y, ww1_s12Z, ww2_s130) ->
-- tick<code_string.f1>
-- (ww_s12Y,
-- ww1_s12Z,
-- PTTrees.PT
-- @ GHC.Types.Char @ GHC.Types.Int wild2_Xj ww2_s130 r_ajf)
-- }
--
-- We really want this case-of-case to fire, because then the 3-tuple
-- will go away (indeed, the CPR optimisation is relying on this
-- happening). But the scctick is in the way - we need to push it
-- inside to expose the case-of-case. So we perform this
-- transformation on the inner case:
--
-- scctick c (case e of { p1 -> e1; ...; pn -> en })
-- ==>
-- case (scctick c e) of { p1 -> scc c e1; ...; pn -> scc c en }
--
-- So we've moved a constant amount of work out of the scc to expose
-- the case. We only do this when the continuation is interesting: in
-- for now, it has to be another Case (maybe generalise this later).
{-
************************************************************************
* *
\subsection{The main rebuilder}
* *
************************************************************************
-}
rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- At this point the substitution in the SimplEnv should be irrelevant
-- only the in-scope set and floats should matter
rebuild env expr cont
= case cont of
Stop {} -> return (env, expr)
TickIt t cont -> rebuild env (mkTick t expr) cont
CastIt co cont -> rebuild env (mkCast expr co) cont
-- NB: mkCast implements the (Coercion co |> g) optimisation
Select { sc_bndr = bndr, sc_alts = alts, sc_env = se, sc_cont = cont }
-> rebuildCase (se `setFloats` env) expr bndr alts cont
StrictArg info _ cont -> rebuildCall env (info `addValArgTo` expr) cont
StrictBind b bs body se cont -> do { env' <- simplNonRecX (se `setFloats` env) b expr
-- expr satisfies let/app since it started life
-- in a call to simplNonRecE
; simplLam env' bs body cont }
ApplyToTy { sc_arg_ty = ty, sc_cont = cont}
-> rebuild env (App expr (Type ty)) cont
ApplyToVal { sc_arg = arg, sc_env = se, sc_dup = dup_flag, sc_cont = cont}
-- See Note [Avoid redundant simplification]
| isSimplified dup_flag -> rebuild env (App expr arg) cont
| otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg
; rebuild env (App expr arg') cont }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
-}
simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCast env body co0 cont0
= do { co1 <- simplCoercion env co0
; cont1 <- addCoerce co1 cont0
; simplExprF env body cont1 }
where
addCoerce co cont = add_coerce co (coercionKind co) cont
add_coerce _co (Pair s1 k1) cont -- co :: ty~ty
| s1 `eqType` k1 = return cont -- is a no-op
add_coerce co1 (Pair s1 _k2) (CastIt co2 cont)
| (Pair _l1 t1) <- coercionKind co2
-- e |> (g1 :: S1~L) |> (g2 :: L~T1)
-- ==>
-- e, if S1=T1
-- e |> (g1 . g2 :: S1~T1) otherwise
--
-- For example, in the initial form of a worker
-- we may find (coerce T (coerce S (\x.e))) y
-- and we'd like it to simplify to e[y/x] in one round
-- of simplification
, s1 `eqType` t1 = return cont -- The coerces cancel out
| otherwise = return (CastIt (mkTransCo co1 co2) cont)
add_coerce co (Pair s1s2 _t1t2) cont@(ApplyToTy { sc_arg_ty = arg_ty, sc_cont = tail })
-- (f |> g) ty ---> (f ty) |> (g @ ty)
-- This implements the PushT rule from the paper
| isForAllTy s1s2
= do { cont' <- addCoerce new_cast tail
; return (cont { sc_cont = cont' }) }
where
new_cast = mkInstCo co (mkNomReflCo arg_ty)
add_coerce co (Pair s1s2 t1t2) (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup, sc_cont = cont })
| isFunTy s1s2 -- This implements the Push rule from the paper
, isFunTy t1t2 -- Check t1t2 to ensure 'arg' is a value arg
-- (e |> (g :: s1s2 ~ t1->t2)) f
-- ===>
-- (e (f |> (arg g :: t1~s1))
-- |> (res g :: s2->t2)
--
-- t1t2 must be a function type, t1->t2, because it's applied
-- to something but s1s2 might conceivably not be
--
-- When we build the ApplyTo we can't mix the out-types
-- with the InExpr in the argument, so we simply substitute
-- to make it all consistent. It's a bit messy.
-- But it isn't a common case.
--
-- Example of use: Trac #995
= do { (dup', arg_se', arg') <- simplArg env dup arg_se arg
; cont' <- addCoerce co2 cont
; return (ApplyToVal { sc_arg = mkCast arg' (mkSymCo co1)
, sc_env = arg_se'
, sc_dup = dup'
, sc_cont = cont' }) }
where
-- we split coercion t1->t2 ~ s1->s2 into t1 ~ s1 and
-- t2 ~ s2 with left and right on the curried form:
-- (->) t1 t2 ~ (->) s1 s2
[co1, co2] = decomposeCo 2 co
add_coerce co _ cont = return (CastIt co cont)
simplArg :: SimplEnv -> DupFlag -> StaticEnv -> CoreExpr
-> SimplM (DupFlag, StaticEnv, OutExpr)
simplArg env dup_flag arg_env arg
| isSimplified dup_flag
= return (dup_flag, arg_env, arg)
| otherwise
= do { arg' <- simplExpr (arg_env `setInScope` env) arg
; return (Simplified, zapSubstEnv arg_env, arg') }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
Note [Zap unfolding when beta-reducing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lambda-bound variables can have stable unfoldings, such as
$j = \x. \b{Unf=Just x}. e
See Note [Case binders and join points] below; the unfolding for lets
us optimise e better. However when we beta-reduce it we want to
revert to using the actual value, otherwise we can end up in the
stupid situation of
let x = blah in
let b{Unf=Just x} = y
in ...b...
Here it'd be far better to drop the unfolding and use the actual RHS.
-}
simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplLam env [] body cont = simplExprF env body cont
-- Beta reduction
simplLam env (bndr:bndrs) body (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplLam (extendTvSubst env bndr arg_ty) bndrs body cont }
simplLam env (bndr:bndrs) body (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplNonRecE env' (zap_unfolding bndr) (arg, arg_se) (bndrs, body) cont }
where
env' | Coercion co <- arg
= extendCvSubst env bndr co
| otherwise
= env
zap_unfolding bndr -- See Note [Zap unfolding when beta-reducing]
| isId bndr, isStableUnfolding (realIdUnfolding bndr)
= setIdUnfolding bndr NoUnfolding
| otherwise = bndr
-- discard a non-counting tick on a lambda. This may change the
-- cost attribution slightly (moving the allocation of the
-- lambda elsewhere), but we don't care: optimisation changes
-- cost attribution all the time.
simplLam env bndrs body (TickIt tickish cont)
| not (tickishCounts tickish)
= simplLam env bndrs body cont
-- Not enough args, so there are real lambdas left to put in the result
simplLam env bndrs body cont
= do { (env', bndrs') <- simplLamBndrs env bndrs
; body' <- simplExpr env' body
; new_lam <- mkLam bndrs' body' cont
; rebuild env' new_lam cont }
simplLamBndrs :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplLamBndrs env bndrs = mapAccumLM simplLamBndr env bndrs
-------------
simplLamBndr :: SimplEnv -> Var -> SimplM (SimplEnv, Var)
-- Used for lambda binders. These sometimes have unfoldings added by
-- the worker/wrapper pass that must be preserved, because they can't
-- be reconstructed from context. For example:
-- f x = case x of (a,b) -> fw a b x
-- fw a b x{=(a,b)} = ...
-- The "{=(a,b)}" is an unfolding we can't reconstruct otherwise.
simplLamBndr env bndr
| isId bndr && hasSomeUnfolding old_unf -- Special case
= do { (env1, bndr1) <- simplBinder env bndr
; unf' <- simplUnfolding env1 NotTopLevel bndr old_unf
; let bndr2 = bndr1 `setIdUnfolding` unf'
; return (modifyInScope env1 bndr2, bndr2) }
| otherwise
= simplBinder env bndr -- Normal case
where
old_unf = idUnfolding bndr
------------------
simplNonRecE :: SimplEnv
-> InBndr -- The binder
-> (InExpr, SimplEnv) -- Rhs of binding (or arg of lambda)
-> ([InBndr], InExpr) -- Body of the let/lambda
-- \xs.e
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
-- simplNonRecE is used for
-- * non-top-level non-recursive lets in expressions
-- * beta reduction
--
-- It deals with strict bindings, via the StrictBind continuation,
-- which may abort the whole process
--
-- Precondition: rhs satisfies the let/app invariant
-- Note [CoreSyn let/app invariant] in CoreSyn
--
-- The "body" of the binding comes as a pair of ([InId],InExpr)
-- representing a lambda; so we recurse back to simplLam
-- Why? Because of the binder-occ-info-zapping done before
-- the call to simplLam in simplExprF (Lam ...)
-- First deal with type applications and type lets
-- (/\a. e) (Type ty) and (let a = Type ty in e)
simplNonRecE env bndr (Type ty_arg, rhs_se) (bndrs, body) cont
= ASSERT( isTyVar bndr )
do { ty_arg' <- simplType (rhs_se `setInScope` env) ty_arg
; simplLam (extendTvSubst env bndr ty_arg') bndrs body cont }
simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont
= do dflags <- getDynFlags
case () of
_ | preInlineUnconditionally dflags env NotTopLevel bndr rhs
-> do { tick (PreInlineUnconditionally bndr)
; -- pprTrace "preInlineUncond" (ppr bndr <+> ppr rhs) $
simplLam (extendIdSubst env bndr (mkContEx rhs_se rhs)) bndrs body cont }
| isStrictId bndr -- Includes coercions
-> simplExprF (rhs_se `setFloats` env) rhs
(StrictBind bndr bndrs body env cont)
| otherwise
-> ASSERT( not (isTyVar bndr) )
do { (env1, bndr1) <- simplNonRecBndr env bndr
; (env2, bndr2) <- addBndrRules env1 bndr bndr1
; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se
; simplLam env3 bndrs body cont }
{-
************************************************************************
* *
Variables
* *
************************************************************************
-}
simplVar :: SimplEnv -> InVar -> SimplM OutExpr
-- Look up an InVar in the environment
simplVar env var
| isTyVar var = return (Type (substTyVar env var))
| isCoVar var = return (Coercion (substCoVar env var))
| otherwise
= case substId env var of
DoneId var1 -> return (Var var1)
DoneEx e -> return e
ContEx tvs cvs ids e -> simplExpr (setSubstEnv env tvs cvs ids) e
simplIdF :: SimplEnv -> InId -> SimplCont -> SimplM (SimplEnv, OutExpr)
simplIdF env var cont
= case substId env var of
DoneEx e -> simplExprF (zapSubstEnv env) e cont
ContEx tvs cvs ids e -> simplExprF (setSubstEnv env tvs cvs ids) e cont
DoneId var1 -> completeCall env var1 cont
-- Note [zapSubstEnv]
-- The template is already simplified, so don't re-substitute.
-- This is VITAL. Consider
-- let x = e in
-- let y = \z -> ...x... in
-- \ x -> ...y...
-- We'll clone the inner \x, adding x->x' in the id_subst
-- Then when we inline y, we must *not* replace x by x' in
-- the inlined copy!!
---------------------------------------------------------
-- Dealing with a call site
completeCall :: SimplEnv -> OutId -> SimplCont -> SimplM (SimplEnv, OutExpr)
completeCall env var cont
= do { ------------- Try inlining ----------------
dflags <- getDynFlags
; let (lone_variable, arg_infos, call_cont) = contArgs cont
n_val_args = length arg_infos
interesting_cont = interestingCallContext call_cont
unfolding = activeUnfolding env var
maybe_inline = callSiteInline dflags var unfolding
lone_variable arg_infos interesting_cont
; case maybe_inline of {
Just expr -- There is an inlining!
-> do { checkedTick (UnfoldingDone var)
; dump_inline dflags expr cont
; simplExprF (zapSubstEnv env) expr cont }
; Nothing -> do -- No inlining!
{ rule_base <- getSimplRules
; let info = mkArgInfo var (getRules rule_base var) n_val_args call_cont
; rebuildCall env info cont
}}}
where
dump_inline dflags unfolding cont
| not (dopt Opt_D_dump_inlinings dflags) = return ()
| not (dopt Opt_D_verbose_core2core dflags)
= when (isExternalName (idName var)) $
liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done:", nest 4 (ppr var)]
| otherwise
= liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done: " <> ppr var,
nest 4 (vcat [text "Inlined fn: " <+> nest 2 (ppr unfolding),
text "Cont: " <+> ppr cont])]
rebuildCall :: SimplEnv
-> ArgInfo
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_strs = [] }) cont
-- When we run out of strictness args, it means
-- that the call is definitely bottom; see SimplUtils.mkArgInfo
-- Then we want to discard the entire strict continuation. E.g.
-- * case (error "hello") of { ... }
-- * (error "Hello") arg
-- * f (error "Hello") where f is strict
-- etc
-- Then, especially in the first of these cases, we'd like to discard
-- the continuation, leaving just the bottoming expression. But the
-- type might not be right, so we may have to add a coerce.
| not (contIsTrivial cont) -- Only do this if there is a non-trivial
= return (env, castBottomExpr res cont_ty) -- contination to discard, else we do it
where -- again and again!
res = argInfoExpr fun rev_args
cont_ty = contResultType cont
rebuildCall env info (CastIt co cont)
= rebuildCall env (addCastTo info co) cont
rebuildCall env info (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= rebuildCall env (info `addTyArgTo` arg_ty) cont
rebuildCall env info@(ArgInfo { ai_encl = encl_rules, ai_type = fun_ty
, ai_strs = str:strs, ai_discs = disc:discs })
(ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup_flag, sc_cont = cont })
| isSimplified dup_flag -- See Note [Avoid redundant simplification]
= rebuildCall env (addValArgTo info' arg) cont
| str -- Strict argument
= -- pprTrace "Strict Arg" (ppr arg $$ ppr (seIdSubst env) $$ ppr (seInScope env)) $
simplExprF (arg_se `setFloats` env) arg
(StrictArg info' cci cont)
-- Note [Shadowing]
| otherwise -- Lazy argument
-- DO NOT float anything outside, hence simplExprC
-- There is no benefit (unlike in a let-binding), and we'd
-- have to be very careful about bogus strictness through
-- floating a demanded let.
= do { arg' <- simplExprC (arg_se `setInScope` env) arg
(mkLazyArgStop (funArgTy fun_ty) cci)
; rebuildCall env (addValArgTo info' arg') cont }
where
info' = info { ai_strs = strs, ai_discs = discs }
cci | encl_rules = RuleArgCtxt
| disc > 0 = DiscArgCtxt -- Be keener here
| otherwise = BoringCtxt -- Nothing interesting
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules }) cont
| null rules
= rebuild env (argInfoExpr fun rev_args) cont -- No rules, common case
| otherwise
= do { -- We've accumulated a simplified call in <fun,rev_args>
-- so try rewrite rules; see Note [RULEs apply to simplified arguments]
-- See also Note [Rules for recursive functions]
; let env' = zapSubstEnv env -- See Note [zapSubstEnv];
-- and NB that 'rev_args' are all fully simplified
; mb_rule <- tryRules env' rules fun (reverse rev_args) cont
; case mb_rule of {
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
-- Rules don't match
; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules
} }
{-
Note [RULES apply to simplified arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very desirable to try RULES once the arguments have been simplified, because
doing so ensures that rule cascades work in one pass. Consider
{-# RULES g (h x) = k x
f (k x) = x #-}
...f (g (h x))...
Then we want to rewrite (g (h x)) to (k x) and only then try f's rules. If
we match f's rules against the un-simplified RHS, it won't match. This
makes a particularly big difference when superclass selectors are involved:
op ($p1 ($p2 (df d)))
We want all this to unravel in one sweep.
Note [Avoid redundant simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because RULES apply to simplified arguments, there's a danger of repeatedly
simplifying already-simplified arguments. An important example is that of
(>>=) d e1 e2
Here e1, e2 are simplified before the rule is applied, but don't really
participate in the rule firing. So we mark them as Simplified to avoid
re-simplifying them.
Note [Shadowing]
~~~~~~~~~~~~~~~~
This part of the simplifier may break the no-shadowing invariant
Consider
f (...(\a -> e)...) (case y of (a,b) -> e')
where f is strict in its second arg
If we simplify the innermost one first we get (...(\a -> e)...)
Simplifying the second arg makes us float the case out, so we end up with
case y of (a,b) -> f (...(\a -> e)...) e'
So the output does not have the no-shadowing invariant. However, there is
no danger of getting name-capture, because when the first arg was simplified
we used an in-scope set that at least mentioned all the variables free in its
static environment, and that is enough.
We can't just do innermost first, or we'd end up with a dual problem:
case x of (a,b) -> f e (...(\a -> e')...)
I spent hours trying to recover the no-shadowing invariant, but I just could
not think of an elegant way to do it. The simplifier is already knee-deep in
continuations. We have to keep the right in-scope set around; AND we have
to get the effect that finding (error "foo") in a strict arg position will
discard the entire application and replace it with (error "foo"). Getting
all this at once is TOO HARD!
************************************************************************
* *
Rewrite rules
* *
************************************************************************
-}
tryRules :: SimplEnv -> [CoreRule]
-> Id -> [ArgSpec] -> SimplCont
-> SimplM (Maybe (CoreExpr, SimplCont))
-- The SimplEnv already has zapSubstEnv applied to it
tryRules env rules fn args call_cont
| null rules
= return Nothing
{- Disabled until we fix #8326
| fn `hasKey` tagToEnumKey -- See Note [Optimising tagToEnum#]
, [_type_arg, val_arg] <- args
, Select dup bndr ((_,[],rhs1) : rest_alts) se cont <- call_cont
, isDeadBinder bndr
= do { dflags <- getDynFlags
; let enum_to_tag :: CoreAlt -> CoreAlt
-- Takes K -> e into tagK# -> e
-- where tagK# is the tag of constructor K
enum_to_tag (DataAlt con, [], rhs)
= ASSERT( isEnumerationTyCon (dataConTyCon con) )
(LitAlt tag, [], rhs)
where
tag = mkMachInt dflags (toInteger (dataConTag con - fIRST_TAG))
enum_to_tag alt = pprPanic "tryRules: tagToEnum" (ppr alt)
new_alts = (DEFAULT, [], rhs1) : map enum_to_tag rest_alts
new_bndr = setIdType bndr intPrimTy
-- The binder is dead, but should have the right type
; return (Just (val_arg, Select dup new_bndr new_alts se cont)) }
-}
| otherwise
= do { dflags <- getDynFlags
; case lookupRule dflags (getUnfoldingInRuleMatch env) (activeRule env)
fn (argInfoAppArgs args) rules of {
Nothing ->
do { nodump dflags -- This ensures that an empty file is written
; return Nothing } ; -- No rule matches
Just (rule, rule_rhs) ->
do { checkedTick (RuleFired (ru_name rule))
; let cont' = pushSimplifiedArgs env
(drop (ruleArity rule) args)
call_cont
-- (ruleArity rule) says how many args the rule consumed
; dump dflags rule rule_rhs
; return (Just (rule_rhs, cont')) }}}
where
dump dflags rule rule_rhs
| dopt Opt_D_dump_rule_rewrites dflags
= log_rule dflags Opt_D_dump_rule_rewrites "Rule fired" $ vcat
[ text "Rule:" <+> ftext (ru_name rule)
, text "Before:" <+> hang (ppr fn) 2 (sep (map ppr args))
, text "After: " <+> pprCoreExpr rule_rhs
, text "Cont: " <+> ppr call_cont ]
| dopt Opt_D_dump_rule_firings dflags
= log_rule dflags Opt_D_dump_rule_firings "Rule fired:" $
ftext (ru_name rule)
| otherwise
= return ()
nodump dflags
| dopt Opt_D_dump_rule_rewrites dflags
= liftIO $ dumpSDoc dflags alwaysQualify Opt_D_dump_rule_rewrites "" empty
| dopt Opt_D_dump_rule_firings dflags
= liftIO $ dumpSDoc dflags alwaysQualify Opt_D_dump_rule_firings "" empty
| otherwise
= return ()
log_rule dflags flag hdr details
= liftIO . dumpSDoc dflags alwaysQualify flag "" $
sep [text hdr, nest 4 details]
{-
Note [Optimising tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an enumeration data type:
data Foo = A | B | C
Then we want to transform
case tagToEnum# x of ==> case x of
A -> e1 DEFAULT -> e1
B -> e2 1# -> e2
C -> e3 2# -> e3
thereby getting rid of the tagToEnum# altogether. If there was a DEFAULT
alternative we retain it (remember it comes first). If not the case must
be exhaustive, and we reflect that in the transformed version by adding
a DEFAULT. Otherwise Lint complains that the new case is not exhaustive.
See #8317.
Note [Rules for recursive functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You might think that we shouldn't apply rules for a loop breaker:
doing so might give rise to an infinite loop, because a RULE is
rather like an extra equation for the function:
RULE: f (g x) y = x+y
Eqn: f a y = a-y
But it's too drastic to disable rules for loop breakers.
Even the foldr/build rule would be disabled, because foldr
is recursive, and hence a loop breaker:
foldr k z (build g) = g k z
So it's up to the programmer: rules can cause divergence
************************************************************************
* *
Rebuilding a case expression
* *
************************************************************************
Note [Case elimination]
~~~~~~~~~~~~~~~~~~~~~~~
The case-elimination transformation discards redundant case expressions.
Start with a simple situation:
case x# of ===> let y# = x# in e
y# -> e
(when x#, y# are of primitive type, of course). We can't (in general)
do this for algebraic cases, because we might turn bottom into
non-bottom!
The code in SimplUtils.prepareAlts has the effect of generalise this
idea to look for a case where we're scrutinising a variable, and we
know that only the default case can match. For example:
case x of
0# -> ...
DEFAULT -> ...(case x of
0# -> ...
DEFAULT -> ...) ...
Here the inner case is first trimmed to have only one alternative, the
DEFAULT, after which it's an instance of the previous case. This
really only shows up in eliminating error-checking code.
Note that SimplUtils.mkCase combines identical RHSs. So
case e of ===> case e of DEFAULT -> r
True -> r
False -> r
Now again the case may be elminated by the CaseElim transformation.
This includes things like (==# a# b#)::Bool so that we simplify
case ==# a# b# of { True -> x; False -> x }
to just
x
This particular example shows up in default methods for
comparison operations (e.g. in (>=) for Int.Int32)
Note [Case elimination: lifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a case over a lifted type has a single alternative, and is being used
as a strict 'let' (all isDeadBinder bndrs), we may want to do this
transformation:
case e of r ===> let r = e in ...r...
_ -> ...r...
(a) 'e' is already evaluated (it may so if e is a variable)
Specifically we check (exprIsHNF e). In this case
we can just allocate the WHNF directly with a let.
or
(b) 'x' is not used at all and e is ok-for-speculation
The ok-for-spec bit checks that we don't lose any
exceptions or divergence.
NB: it'd be *sound* to switch from case to let if the
scrutinee was not yet WHNF but was guaranteed to
converge; but sticking with case means we won't build a
thunk
or
(c) 'x' is used strictly in the body, and 'e' is a variable
Then we can just substitute 'e' for 'x' in the body.
See Note [Eliminating redundant seqs]
For (b), the "not used at all" test is important. Consider
case (case a ># b of { True -> (p,q); False -> (q,p) }) of
r -> blah
The scrutinee is ok-for-speculation (it looks inside cases), but we do
not want to transform to
let r = case a ># b of { True -> (p,q); False -> (q,p) }
in blah
because that builds an unnecessary thunk.
Note [Eliminating redundant seqs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have this:
case x of r { _ -> ..r.. }
where 'r' is used strictly in (..r..), the case is effectively a 'seq'
on 'x', but since 'r' is used strictly anyway, we can safely transform to
(...x...)
Note that this can change the error behaviour. For example, we might
transform
case x of { _ -> error "bad" }
--> error "bad"
which is might be puzzling if 'x' currently lambda-bound, but later gets
let-bound to (error "good").
Nevertheless, the paper "A semantics for imprecise exceptions" allows
this transformation. If you want to fix the evaluation order, use
'pseq'. See Trac #8900 for an example where the loss of this
transformation bit us in practice.
See also Note [Empty case alternatives] in CoreSyn.
Just for reference, the original code (added Jan 13) looked like this:
|| case_bndr_evald_next rhs
case_bndr_evald_next :: CoreExpr -> Bool
-- See Note [Case binder next]
case_bndr_evald_next (Var v) = v == case_bndr
case_bndr_evald_next (Cast e _) = case_bndr_evald_next e
case_bndr_evald_next (App e _) = case_bndr_evald_next e
case_bndr_evald_next (Case e _ _ _) = case_bndr_evald_next e
case_bndr_evald_next _ = False
(This came up when fixing Trac #7542. See also Note [Eta reduction of
an eval'd function] in CoreUtils.)
Note [Case elimination: unlifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case a +# b of r -> ...r...
Then we do case-elimination (to make a let) followed by inlining,
to get
.....(a +# b)....
If we have
case indexArray# a i of r -> ...r...
we might like to do the same, and inline the (indexArray# a i).
But indexArray# is not okForSpeculation, so we don't build a let
in rebuildCase (lest it get floated *out*), so the inlining doesn't
happen either.
This really isn't a big deal I think. The let can be
Further notes about case elimination
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider: test :: Integer -> IO ()
test = print
Turns out that this compiles to:
Print.test
= \ eta :: Integer
eta1 :: Void# ->
case PrelNum.< eta PrelNum.zeroInteger of wild { __DEFAULT ->
case hPutStr stdout
(PrelNum.jtos eta ($w[] @ Char))
eta1
of wild1 { (# new_s, a4 #) -> PrelIO.lvl23 new_s }}
Notice the strange '<' which has no effect at all. This is a funny one.
It started like this:
f x y = if x < 0 then jtos x
else if y==0 then "" else jtos x
At a particular call site we have (f v 1). So we inline to get
if v < 0 then jtos x
else if 1==0 then "" else jtos x
Now simplify the 1==0 conditional:
if v<0 then jtos v else jtos v
Now common-up the two branches of the case:
case (v<0) of DEFAULT -> jtos v
Why don't we drop the case? Because it's strict in v. It's technically
wrong to drop even unnecessary evaluations, and in practice they
may be a result of 'seq' so we *definitely* don't want to drop those.
I don't really know how to improve this situation.
-}
---------------------------------------------------------
-- Eliminate the case if possible
rebuildCase, reallyRebuildCase
:: SimplEnv
-> OutExpr -- Scrutinee
-> InId -- Case binder
-> [InAlt] -- Alternatives (inceasing order)
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
--------------------------------------------------
-- 1. Eliminate the case if there's a known constructor
--------------------------------------------------
rebuildCase env scrut case_bndr alts cont
| Lit lit <- scrut -- No need for same treatment as constructors
-- because literals are inlined more vigorously
, not (litIsLifted lit)
= do { tick (KnownBranch case_bndr)
; case findAlt (LitAlt lit) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (_, bs, rhs) -> simple_rhs bs rhs }
| Just (con, ty_args, other_args) <- exprIsConApp_maybe (getUnfoldingInRuleMatch env) scrut
-- Works when the scrutinee is a variable with a known unfolding
-- as well as when it's an explicit constructor application
= do { tick (KnownBranch case_bndr)
; case findAlt (DataAlt con) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (DEFAULT, bs, rhs) -> simple_rhs bs rhs
Just (_, bs, rhs) -> knownCon env scrut con ty_args other_args
case_bndr bs rhs cont
}
where
simple_rhs bs rhs = ASSERT( null bs )
do { env' <- simplNonRecX env case_bndr scrut
-- scrut is a constructor application,
-- hence satisfies let/app invariant
; simplExprF env' rhs cont }
--------------------------------------------------
-- 2. Eliminate the case if scrutinee is evaluated
--------------------------------------------------
rebuildCase env scrut case_bndr alts@[(_, bndrs, rhs)] cont
-- See if we can get rid of the case altogether
-- See Note [Case elimination]
-- mkCase made sure that if all the alternatives are equal,
-- then there is now only one (DEFAULT) rhs
-- 2a. Dropping the case altogether, if
-- a) it binds nothing (so it's really just a 'seq')
-- b) evaluating the scrutinee has no side effects
| is_plain_seq
, exprOkForSideEffects scrut
-- The entire case is dead, so we can drop it
-- if the scrutinee converges without having imperative
-- side effects or raising a Haskell exception
-- See Note [PrimOp can_fail and has_side_effects] in PrimOp
= simplExprF env rhs cont
-- 2b. Turn the case into a let, if
-- a) it binds only the case-binder
-- b) unlifted case: the scrutinee is ok-for-speculation
-- lifted case: the scrutinee is in HNF (or will later be demanded)
| all_dead_bndrs
, if is_unlifted
then exprOkForSpeculation scrut -- See Note [Case elimination: unlifted case]
else exprIsHNF scrut -- See Note [Case elimination: lifted case]
|| scrut_is_demanded_var scrut
= do { tick (CaseElim case_bndr)
; env' <- simplNonRecX env case_bndr scrut
; simplExprF env' rhs cont }
-- 2c. Try the seq rules if
-- a) it binds only the case binder
-- b) a rule for seq applies
-- See Note [User-defined RULES for seq] in MkId
| is_plain_seq
= do { let scrut_ty = exprType scrut
rhs_ty = substTy env (exprType rhs)
out_args = [ TyArg { as_arg_ty = scrut_ty
, as_hole_ty = seq_id_ty }
, TyArg { as_arg_ty = rhs_ty
, as_hole_ty = piResultTy seq_id_ty scrut_ty }
, ValArg scrut]
rule_cont = ApplyToVal { sc_dup = NoDup, sc_arg = rhs
, sc_env = env, sc_cont = cont }
env' = zapSubstEnv env
-- Lazily evaluated, so we don't do most of this
; rule_base <- getSimplRules
; mb_rule <- tryRules env' (getRules rule_base seqId) seqId out_args rule_cont
; case mb_rule of
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
Nothing -> reallyRebuildCase env scrut case_bndr alts cont }
where
is_unlifted = isUnliftedType (idType case_bndr)
all_dead_bndrs = all isDeadBinder bndrs -- bndrs are [InId]
is_plain_seq = all_dead_bndrs && isDeadBinder case_bndr -- Evaluation *only* for effect
seq_id_ty = idType seqId
scrut_is_demanded_var :: CoreExpr -> Bool
-- See Note [Eliminating redundant seqs]
scrut_is_demanded_var (Cast s _) = scrut_is_demanded_var s
scrut_is_demanded_var (Var _) = isStrictDmd (idDemandInfo case_bndr)
scrut_is_demanded_var _ = False
rebuildCase env scrut case_bndr alts cont
= reallyRebuildCase env scrut case_bndr alts cont
--------------------------------------------------
-- 3. Catch-all case
--------------------------------------------------
reallyRebuildCase env scrut case_bndr alts cont
= do { -- Prepare the continuation;
-- The new subst_env is in place
(env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- Simplify the alternatives
; (scrut', case_bndr', alts') <- simplAlts env' scrut case_bndr alts dup_cont
; dflags <- getDynFlags
; let alts_ty' = contResultType dup_cont
; case_expr <- mkCase dflags scrut' case_bndr' alts_ty' alts'
-- Notice that rebuild gets the in-scope set from env', not alt_env
-- (which in any case is only build in simplAlts)
-- The case binder *not* scope over the whole returned case-expression
; rebuild env' case_expr nodup_cont }
{-
simplCaseBinder checks whether the scrutinee is a variable, v. If so,
try to eliminate uses of v in the RHSs in favour of case_bndr; that
way, there's a chance that v will now only be used once, and hence
inlined.
Historical note: we use to do the "case binder swap" in the Simplifier
so there were additional complications if the scrutinee was a variable.
Now the binder-swap stuff is done in the occurrence analyer; see
OccurAnal Note [Binder swap].
Note [knownCon occ info]
~~~~~~~~~~~~~~~~~~~~~~~~
If the case binder is not dead, then neither are the pattern bound
variables:
case <any> of x { (a,b) ->
case x of { (p,q) -> p } }
Here (a,b) both look dead, but come alive after the inner case is eliminated.
The point is that we bring into the envt a binding
let x = (a,b)
after the outer case, and that makes (a,b) alive. At least we do unless
the case binder is guaranteed dead.
Note [Case alternative occ info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are simply reconstructing a case (the common case), we always
zap the occurrence info on the binders in the alternatives. Even
if the case binder is dead, the scrutinee is usually a variable, and *that*
can bring the case-alternative binders back to life.
See Note [Add unfolding for scrutinee]
Note [Improving seq]
~~~~~~~~~~~~~~~~~~~
Consider
type family F :: * -> *
type instance F Int = Int
... case e of x { DEFAULT -> rhs } ...
where x::F Int. Then we'd like to rewrite (F Int) to Int, getting
case e `cast` co of x'::Int
I# x# -> let x = x' `cast` sym co
in rhs
so that 'rhs' can take advantage of the form of x'.
Notice that Note [Case of cast] (in OccurAnal) may then apply to the result.
Nota Bene: We only do the [Improving seq] transformation if the
case binder 'x' is actually used in the rhs; that is, if the case
is *not* a *pure* seq.
a) There is no point in adding the cast to a pure seq.
b) There is a good reason not to: doing so would interfere
with seq rules (Note [Built-in RULES for seq] in MkId).
In particular, this [Improving seq] thing *adds* a cast
while [Built-in RULES for seq] *removes* one, so they
just flip-flop.
You might worry about
case v of x { __DEFAULT ->
... case (v `cast` co) of y { I# -> ... }}
This is a pure seq (since x is unused), so [Improving seq] won't happen.
But it's ok: the simplifier will replace 'v' by 'x' in the rhs to get
case v of x { __DEFAULT ->
... case (x `cast` co) of y { I# -> ... }}
Now the outer case is not a pure seq, so [Improving seq] will happen,
and then the inner case will disappear.
The need for [Improving seq] showed up in Roman's experiments. Example:
foo :: F Int -> Int -> Int
foo t n = t `seq` bar n
where
bar 0 = 0
bar n = bar (n - case t of TI i -> i)
Here we'd like to avoid repeated evaluating t inside the loop, by
taking advantage of the `seq`.
At one point I did transformation in LiberateCase, but it's more
robust here. (Otherwise, there's a danger that we'll simply drop the
'seq' altogether, before LiberateCase gets to see it.)
-}
simplAlts :: SimplEnv
-> OutExpr
-> InId -- Case binder
-> [InAlt] -- Non-empty
-> SimplCont
-> SimplM (OutExpr, OutId, [OutAlt]) -- Includes the continuation
-- Like simplExpr, this just returns the simplified alternatives;
-- it does not return an environment
-- The returned alternatives can be empty, none are possible
simplAlts env scrut case_bndr alts cont'
= do { let env0 = zapFloats env
; (env1, case_bndr1) <- simplBinder env0 case_bndr
; fam_envs <- getFamEnvs
; (alt_env', scrut', case_bndr') <- improveSeq fam_envs env1 scrut
case_bndr case_bndr1 alts
; (imposs_deflt_cons, in_alts) <- prepareAlts scrut' case_bndr' alts
-- NB: it's possible that the returned in_alts is empty: this is handled
-- by the caller (rebuildCase) in the missingAlt function
; alts' <- mapM (simplAlt alt_env' (Just scrut') imposs_deflt_cons case_bndr' cont') in_alts
; -- pprTrace "simplAlts" (ppr case_bndr $$ ppr alts_ty $$ ppr alts_ty' $$ ppr alts $$ ppr cont') $
return (scrut', case_bndr', alts') }
------------------------------------
improveSeq :: (FamInstEnv, FamInstEnv) -> SimplEnv
-> OutExpr -> InId -> OutId -> [InAlt]
-> SimplM (SimplEnv, OutExpr, OutId)
-- Note [Improving seq]
improveSeq fam_envs env scrut case_bndr case_bndr1 [(DEFAULT,_,_)]
| not (isDeadBinder case_bndr) -- Not a pure seq! See Note [Improving seq]
, Just (co, ty2) <- topNormaliseType_maybe fam_envs (idType case_bndr1)
= do { case_bndr2 <- newId (fsLit "nt") ty2
; let rhs = DoneEx (Var case_bndr2 `Cast` mkSymCo co)
env2 = extendIdSubst env case_bndr rhs
; return (env2, scrut `Cast` co, case_bndr2) }
improveSeq _ env scrut _ case_bndr1 _
= return (env, scrut, case_bndr1)
------------------------------------
simplAlt :: SimplEnv
-> Maybe OutExpr -- The scrutinee
-> [AltCon] -- These constructors can't be present when
-- matching the DEFAULT alternative
-> OutId -- The case binder
-> SimplCont
-> InAlt
-> SimplM OutAlt
simplAlt env _ imposs_deflt_cons case_bndr' cont' (DEFAULT, bndrs, rhs)
= ASSERT( null bndrs )
do { let env' = addBinderUnfolding env case_bndr'
(mkOtherCon imposs_deflt_cons)
-- Record the constructors that the case-binder *can't* be.
; rhs' <- simplExprC env' rhs cont'
; return (DEFAULT, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (LitAlt lit, bndrs, rhs)
= ASSERT( null bndrs )
do { env' <- addAltUnfoldings env scrut' case_bndr' (Lit lit)
; rhs' <- simplExprC env' rhs cont'
; return (LitAlt lit, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (DataAlt con, vs, rhs)
= do { -- Deal with the pattern-bound variables
-- Mark the ones that are in ! positions in the
-- data constructor as certainly-evaluated.
-- NB: simplLamBinders preserves this eval info
; let vs_with_evals = add_evals (dataConRepStrictness con)
; (env', vs') <- simplLamBndrs env vs_with_evals
-- Bind the case-binder to (con args)
; let inst_tys' = tyConAppArgs (idType case_bndr')
con_app :: OutExpr
con_app = mkConApp2 con inst_tys' vs'
; env'' <- addAltUnfoldings env' scrut' case_bndr' con_app
; rhs' <- simplExprC env'' rhs cont'
; return (DataAlt con, vs', rhs') }
where
-- add_evals records the evaluated-ness of the bound variables of
-- a case pattern. This is *important*. Consider
-- data T = T !Int !Int
--
-- case x of { T a b -> T (a+1) b }
--
-- We really must record that b is already evaluated so that we don't
-- go and re-evaluate it when constructing the result.
-- See Note [Data-con worker strictness] in MkId.hs
add_evals the_strs
= go vs the_strs
where
go [] [] = []
go (v:vs') strs | isTyVar v = v : go vs' strs
go (v:vs') (str:strs)
| isMarkedStrict str = eval v : go vs' strs
| otherwise = zap v : go vs' strs
go _ _ = pprPanic "cat_evals"
(ppr con $$
ppr vs $$
ppr_with_length the_strs $$
ppr_with_length (dataConRepArgTys con) $$
ppr_with_length (dataConRepStrictness con))
where
ppr_with_length list
= ppr list <+> parens (text "length =" <+> ppr (length list))
-- NB: If this panic triggers, note that
-- NoStrictnessMark doesn't print!
zap v = zapIdOccInfo v -- See Note [Case alternative occ info]
eval v = zap v `setIdUnfolding` evaldUnfolding
addAltUnfoldings :: SimplEnv -> Maybe OutExpr -> OutId -> OutExpr -> SimplM SimplEnv
addAltUnfoldings env scrut case_bndr con_app
= do { dflags <- getDynFlags
; let con_app_unf = mkSimpleUnfolding dflags con_app
env1 = addBinderUnfolding env case_bndr con_app_unf
-- See Note [Add unfolding for scrutinee]
env2 = case scrut of
Just (Var v) -> addBinderUnfolding env1 v con_app_unf
Just (Cast (Var v) co) -> addBinderUnfolding env1 v $
mkSimpleUnfolding dflags (Cast con_app (mkSymCo co))
_ -> env1
; traceSmpl "addAltUnf" (vcat [ppr case_bndr <+> ppr scrut, ppr con_app])
; return env2 }
addBinderUnfolding :: SimplEnv -> Id -> Unfolding -> SimplEnv
addBinderUnfolding env bndr unf
| debugIsOn, Just tmpl <- maybeUnfoldingTemplate unf
= WARN( not (eqType (idType bndr) (exprType tmpl)),
ppr bndr $$ ppr (idType bndr) $$ ppr tmpl $$ ppr (exprType tmpl) )
modifyInScope env (bndr `setIdUnfolding` unf)
| otherwise
= modifyInScope env (bndr `setIdUnfolding` unf)
zapBndrOccInfo :: Bool -> Id -> Id
-- Consider case e of b { (a,b) -> ... }
-- Then if we bind b to (a,b) in "...", and b is not dead,
-- then we must zap the deadness info on a,b
zapBndrOccInfo keep_occ_info pat_id
| keep_occ_info = pat_id
| otherwise = zapIdOccInfo pat_id
{-
Note [Add unfolding for scrutinee]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general it's unlikely that a variable scrutinee will appear
in the case alternatives case x of { ...x unlikely to appear... }
because the binder-swap in OccAnal has got rid of all such occcurrences
See Note [Binder swap] in OccAnal.
BUT it is still VERY IMPORTANT to add a suitable unfolding for a
variable scrutinee, in simplAlt. Here's why
case x of y
(a,b) -> case b of c
I# v -> ...(f y)...
There is no occurrence of 'b' in the (...(f y)...). But y gets
the unfolding (a,b), and *that* mentions b. If f has a RULE
RULE f (p, I# q) = ...
we want that rule to match, so we must extend the in-scope env with a
suitable unfolding for 'y'. It's *essential* for rule matching; but
it's also good for case-elimintation -- suppose that 'f' was inlined
and did multi-level case analysis, then we'd solve it in one
simplifier sweep instead of two.
Exactly the same issue arises in SpecConstr;
see Note [Add scrutinee to ValueEnv too] in SpecConstr
HOWEVER, given
case x of y { Just a -> r1; Nothing -> r2 }
we do not want to add the unfolding x -> y to 'x', which might seem cool,
since 'y' itself has different unfoldings in r1 and r2. Reason: if we
did that, we'd have to zap y's deadness info and that is a very useful
piece of information.
So instead we add the unfolding x -> Just a, and x -> Nothing in the
respective RHSs.
************************************************************************
* *
\subsection{Known constructor}
* *
************************************************************************
We are a bit careful with occurrence info. Here's an example
(\x* -> case x of (a*, b) -> f a) (h v, e)
where the * means "occurs once". This effectively becomes
case (h v, e) of (a*, b) -> f a)
and then
let a* = h v; b = e in f a
and then
f (h v)
All this should happen in one sweep.
-}
knownCon :: SimplEnv
-> OutExpr -- The scrutinee
-> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces)
-> InId -> [InBndr] -> InExpr -- The alternative
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
knownCon env scrut dc dc_ty_args dc_args bndr bs rhs cont
= do { env' <- bind_args env bs dc_args
; env'' <- bind_case_bndr env'
; simplExprF env'' rhs cont }
where
zap_occ = zapBndrOccInfo (isDeadBinder bndr) -- bndr is an InId
-- Ugh!
bind_args env' [] _ = return env'
bind_args env' (b:bs') (Type ty : args)
= ASSERT( isTyVar b )
bind_args (extendTvSubst env' b ty) bs' args
bind_args env' (b:bs') (Coercion co : args)
= ASSERT( isCoVar b )
bind_args (extendCvSubst env' b co) bs' args
bind_args env' (b:bs') (arg : args)
= ASSERT( isId b )
do { let b' = zap_occ b
-- Note that the binder might be "dead", because it doesn't
-- occur in the RHS; and simplNonRecX may therefore discard
-- it via postInlineUnconditionally.
-- Nevertheless we must keep it if the case-binder is alive,
-- because it may be used in the con_app. See Note [knownCon occ info]
; env'' <- simplNonRecX env' b' arg -- arg satisfies let/app invariant
; bind_args env'' bs' args }
bind_args _ _ _ =
pprPanic "bind_args" $ ppr dc $$ ppr bs $$ ppr dc_args $$
text "scrut:" <+> ppr scrut
-- It's useful to bind bndr to scrut, rather than to a fresh
-- binding x = Con arg1 .. argn
-- because very often the scrut is a variable, so we avoid
-- creating, and then subsequently eliminating, a let-binding
-- BUT, if scrut is a not a variable, we must be careful
-- about duplicating the arg redexes; in that case, make
-- a new con-app from the args
bind_case_bndr env
| isDeadBinder bndr = return env
| exprIsTrivial scrut = return (extendIdSubst env bndr (DoneEx scrut))
| otherwise = do { dc_args <- mapM (simplVar env) bs
-- dc_ty_args are aready OutTypes,
-- but bs are InBndrs
; let con_app = Var (dataConWorkId dc)
`mkTyApps` dc_ty_args
`mkApps` dc_args
; simplNonRecX env bndr con_app }
-------------------
missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- This isn't strictly an error, although it is unusual.
-- It's possible that the simplifier might "see" that
-- an inner case has no accessible alternatives before
-- it "sees" that the entire branch of an outer case is
-- inaccessible. So we simply put an error case here instead.
missingAlt env case_bndr _ cont
= WARN( True, text "missingAlt" <+> ppr case_bndr )
return (env, mkImpossibleExpr (contResultType cont))
{-
************************************************************************
* *
\subsection{Duplicating continuations}
* *
************************************************************************
-}
prepareCaseCont :: SimplEnv
-> [InAlt] -> SimplCont
-> SimplM (SimplEnv,
SimplCont, -- Dupable part
SimplCont) -- Non-dupable part
-- We are considering
-- K[case _ of { p1 -> r1; ...; pn -> rn }]
-- where K is some enclosing continuation for the case
-- Goal: split K into two pieces Kdup,Knodup so that
-- a) Kdup can be duplicated
-- b) Knodup[Kdup[e]] = K[e]
-- The idea is that we'll transform thus:
-- Knodup[ (case _ of { p1 -> Kdup[r1]; ...; pn -> Kdup[rn] }
--
-- We may also return some extra bindings in SimplEnv (that scope over
-- the entire continuation)
--
-- When case-of-case is off, just make the entire continuation non-dupable
prepareCaseCont env alts cont
| not (sm_case_case (getMode env)) = return (env, mkBoringStop (contHoleType cont), cont)
| not (many_alts alts) = return (env, cont, mkBoringStop (contResultType cont))
| otherwise = mkDupableCont env cont
where
many_alts :: [InAlt] -> Bool -- True iff strictly > 1 non-bottom alternative
many_alts [] = False -- See Note [Bottom alternatives]
many_alts [_] = False
many_alts (alt:alts)
| is_bot_alt alt = many_alts alts
| otherwise = not (all is_bot_alt alts)
is_bot_alt (_,_,rhs) = exprIsBottom rhs
{-
Note [Bottom alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have
case (case x of { A -> error .. ; B -> e; C -> error ..)
of alts
then we can just duplicate those alts because the A and C cases
will disappear immediately. This is more direct than creating
join points and inlining them away; and in some cases we would
not even create the join points (see Note [Single-alternative case])
and we would keep the case-of-case which is silly. See Trac #4930.
-}
mkDupableCont :: SimplEnv -> SimplCont
-> SimplM (SimplEnv, SimplCont, SimplCont)
mkDupableCont env cont
| contIsDupable cont
= return (env, cont, mkBoringStop (contResultType cont))
mkDupableCont _ (Stop {}) = panic "mkDupableCont" -- Handled by previous eqn
mkDupableCont env (CastIt ty cont)
= do { (env', dup, nodup) <- mkDupableCont env cont
; return (env', CastIt ty dup, nodup) }
-- Duplicating ticks for now, not sure if this is good or not
mkDupableCont env cont@(TickIt{})
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env cont@(StrictBind {})
= return (env, mkBoringStop (contHoleType cont), cont)
-- See Note [Duplicating StrictBind]
mkDupableCont env (StrictArg info cci cont)
-- See Note [Duplicating StrictArg]
= do { (env', dup, nodup) <- mkDupableCont env cont
; (env'', args') <- mapAccumLM makeTrivialArg env' (ai_args info)
; return (env'', StrictArg (info { ai_args = args' }) cci dup, nodup) }
mkDupableCont env cont@(ApplyToTy { sc_cont = tail })
= do { (env', dup_cont, nodup_cont) <- mkDupableCont env tail
; return (env', cont { sc_cont = dup_cont }, nodup_cont ) }
mkDupableCont env (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_env = se, sc_cont = cont })
= -- e.g. [...hole...] (...arg...)
-- ==>
-- let a = ...arg...
-- in [...hole...] a
do { (env', dup_cont, nodup_cont) <- mkDupableCont env cont
; (_, se', arg') <- simplArg env' dup se arg
; (env'', arg'') <- makeTrivial NotTopLevel env' (fsLit "karg") arg'
; let app_cont = ApplyToVal { sc_arg = arg'', sc_env = se'
, sc_dup = OkToDup, sc_cont = dup_cont }
; return (env'', app_cont, nodup_cont) }
mkDupableCont env cont@(Select { sc_bndr = case_bndr, sc_alts = [(_, bs, _rhs)] })
-- See Note [Single-alternative case]
-- | not (exprIsDupable rhs && contIsDupable case_cont)
-- | not (isDeadBinder case_bndr)
| all isDeadBinder bs -- InIds
&& not (isUnliftedType (idType case_bndr))
-- Note [Single-alternative-unlifted]
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env (Select { sc_bndr = case_bndr, sc_alts = alts
, sc_env = se, sc_cont = cont })
= -- e.g. (case [...hole...] of { pi -> ei })
-- ===>
-- let ji = \xij -> ei
-- in case [...hole...] of { pi -> ji xij }
do { tick (CaseOfCase case_bndr)
; (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- NB: We call prepareCaseCont here. If there is only one
-- alternative, then dup_cont may be big, but that's ok
-- because we push it into the single alternative, and then
-- use mkDupableAlt to turn that simplified alternative into
-- a join point if it's too big to duplicate.
-- And this is important: see Note [Fusing case continuations]
; let alt_env = se `setInScope` env'
; (alt_env', case_bndr') <- simplBinder alt_env case_bndr
; alts' <- mapM (simplAlt alt_env' Nothing [] case_bndr' dup_cont) alts
-- Safe to say that there are no handled-cons for the DEFAULT case
-- NB: simplBinder does not zap deadness occ-info, so
-- a dead case_bndr' will still advertise its deadness
-- This is really important because in
-- case e of b { (# p,q #) -> ... }
-- b is always dead, and indeed we are not allowed to bind b to (# p,q #),
-- which might happen if e was an explicit unboxed pair and b wasn't marked dead.
-- In the new alts we build, we have the new case binder, so it must retain
-- its deadness.
-- NB: we don't use alt_env further; it has the substEnv for
-- the alternatives, and we don't want that
; (env'', alts'') <- mkDupableAlts env' case_bndr' alts'
; return (env'', -- Note [Duplicated env]
Select { sc_dup = OkToDup
, sc_bndr = case_bndr', sc_alts = alts''
, sc_env = zapSubstEnv env''
, sc_cont = mkBoringStop (contHoleType nodup_cont) },
nodup_cont) }
mkDupableAlts :: SimplEnv -> OutId -> [InAlt]
-> SimplM (SimplEnv, [InAlt])
-- Absorbs the continuation into the new alternatives
mkDupableAlts env case_bndr' the_alts
= go env the_alts
where
go env0 [] = return (env0, [])
go env0 (alt:alts)
= do { (env1, alt') <- mkDupableAlt env0 case_bndr' alt
; (env2, alts') <- go env1 alts
; return (env2, alt' : alts' ) }
mkDupableAlt :: SimplEnv -> OutId -> (AltCon, [CoreBndr], CoreExpr)
-> SimplM (SimplEnv, (AltCon, [CoreBndr], CoreExpr))
mkDupableAlt env case_bndr (con, bndrs', rhs') = do
dflags <- getDynFlags
if exprIsDupable dflags rhs' -- Note [Small alternative rhs]
then return (env, (con, bndrs', rhs'))
else
do { let rhs_ty' = exprType rhs'
scrut_ty = idType case_bndr
case_bndr_w_unf
= case con of
DEFAULT -> case_bndr
DataAlt dc -> setIdUnfolding case_bndr unf
where
-- See Note [Case binders and join points]
unf = mkInlineUnfolding Nothing rhs
rhs = mkConApp2 dc (tyConAppArgs scrut_ty) bndrs'
LitAlt {} -> WARN( True, text "mkDupableAlt"
<+> ppr case_bndr <+> ppr con )
case_bndr
-- The case binder is alive but trivial, so why has
-- it not been substituted away?
used_bndrs' | isDeadBinder case_bndr = filter abstract_over bndrs'
| otherwise = bndrs' ++ [case_bndr_w_unf]
abstract_over bndr
| isTyVar bndr = True -- Abstract over all type variables just in case
| otherwise = not (isDeadBinder bndr)
-- The deadness info on the new Ids is preserved by simplBinders
; (final_bndrs', final_args) -- Note [Join point abstraction]
<- if (any isId used_bndrs')
then return (used_bndrs', varsToCoreExprs used_bndrs')
else do { rw_id <- newId (fsLit "w") voidPrimTy
; return ([setOneShotLambda rw_id], [Var voidPrimId]) }
; join_bndr <- newId (fsLit "$j") (mkLamTypes final_bndrs' rhs_ty')
-- Note [Funky mkLamTypes]
; let -- We make the lambdas into one-shot-lambdas. The
-- join point is sure to be applied at most once, and doing so
-- prevents the body of the join point being floated out by
-- the full laziness pass
really_final_bndrs = map one_shot final_bndrs'
one_shot v | isId v = setOneShotLambda v
| otherwise = v
join_rhs = mkLams really_final_bndrs rhs'
join_arity = exprArity join_rhs
join_call = mkApps (Var join_bndr) final_args
; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs)
; return (env', (con, bndrs', join_call)) }
-- See Note [Duplicated env]
{-
Note [Fusing case continuations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's important to fuse two successive case continuations when the
first has one alternative. That's why we call prepareCaseCont here.
Consider this, which arises from thunk splitting (see Note [Thunk
splitting] in WorkWrap):
let
x* = case (case v of {pn -> rn}) of
I# a -> I# a
in body
The simplifier will find
(Var v) with continuation
Select (pn -> rn) (
Select [I# a -> I# a] (
StrictBind body Stop
So we'll call mkDupableCont on
Select [I# a -> I# a] (StrictBind body Stop)
There is just one alternative in the first Select, so we want to
simplify the rhs (I# a) with continuation (StricgtBind body Stop)
Supposing that body is big, we end up with
let $j a = <let x = I# a in body>
in case v of { pn -> case rn of
I# a -> $j a }
This is just what we want because the rn produces a box that
the case rn cancels with.
See Trac #4957 a fuller example.
Note [Case binders and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
case (case .. ) of c {
I# c# -> ....c....
If we make a join point with c but not c# we get
$j = \c -> ....c....
But if later inlining scrutines the c, thus
$j = \c -> ... case c of { I# y -> ... } ...
we won't see that 'c' has already been scrutinised. This actually
happens in the 'tabulate' function in wave4main, and makes a significant
difference to allocation.
An alternative plan is this:
$j = \c# -> let c = I# c# in ...c....
but that is bad if 'c' is *not* later scrutinised.
So instead we do both: we pass 'c' and 'c#' , and record in c's inlining
(a stable unfolding) that it's really I# c#, thus
$j = \c# -> \c[=I# c#] -> ...c....
Absence analysis may later discard 'c'.
NB: take great care when doing strictness analysis;
see Note [Lamba-bound unfoldings] in DmdAnal.
Also note that we can still end up passing stuff that isn't used. Before
strictness analysis we have
let $j x y c{=(x,y)} = (h c, ...)
in ...
After strictness analysis we see that h is strict, we end up with
let $j x y c{=(x,y)} = ($wh x y, ...)
and c is unused.
Note [Duplicated env]
~~~~~~~~~~~~~~~~~~~~~
Some of the alternatives are simplified, but have not been turned into a join point
So they *must* have an zapped subst-env. So we can't use completeNonRecX to
bind the join point, because it might to do PostInlineUnconditionally, and
we'd lose that when zapping the subst-env. We could have a per-alt subst-env,
but zapping it (as we do in mkDupableCont, the Select case) is safe, and
at worst delays the join-point inlining.
Note [Small alternative rhs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is worth checking for a small RHS because otherwise we
get extra let bindings that may cause an extra iteration of the simplifier to
inline back in place. Quite often the rhs is just a variable or constructor.
The Ord instance of Maybe in PrelMaybe.hs, for example, took several extra
iterations because the version with the let bindings looked big, and so wasn't
inlined, but after the join points had been inlined it looked smaller, and so
was inlined.
NB: we have to check the size of rhs', not rhs.
Duplicating a small InAlt might invalidate occurrence information
However, if it *is* dupable, we return the *un* simplified alternative,
because otherwise we'd need to pair it up with an empty subst-env....
but we only have one env shared between all the alts.
(Remember we must zap the subst-env before re-simplifying something).
Rather than do this we simply agree to re-simplify the original (small) thing later.
Note [Funky mkLamTypes]
~~~~~~~~~~~~~~~~~~~~~~
Notice the funky mkLamTypes. If the constructor has existentials
it's possible that the join point will be abstracted over
type variables as well as term variables.
Example: Suppose we have
data T = forall t. C [t]
Then faced with
case (case e of ...) of
C t xs::[t] -> rhs
We get the join point
let j :: forall t. [t] -> ...
j = /\t \xs::[t] -> rhs
in
case (case e of ...) of
C t xs::[t] -> j t xs
Note [Join point abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Join points always have at least one value argument,
for several reasons
* If we try to lift a primitive-typed something out
for let-binding-purposes, we will *caseify* it (!),
with potentially-disastrous strictness results. So
instead we turn it into a function: \v -> e
where v::Void#. The value passed to this function is void,
which generates (almost) no code.
* CPR. We used to say "&& isUnliftedType rhs_ty'" here, but now
we make the join point into a function whenever used_bndrs'
is empty. This makes the join-point more CPR friendly.
Consider: let j = if .. then I# 3 else I# 4
in case .. of { A -> j; B -> j; C -> ... }
Now CPR doesn't w/w j because it's a thunk, so
that means that the enclosing function can't w/w either,
which is a lose. Here's the example that happened in practice:
kgmod :: Int -> Int -> Int
kgmod x y = if x > 0 && y < 0 || x < 0 && y > 0
then 78
else 5
* Let-no-escape. We want a join point to turn into a let-no-escape
so that it is implemented as a jump, and one of the conditions
for LNE is that it's not updatable. In CoreToStg, see
Note [What is a non-escaping let]
* Floating. Since a join point will be entered once, no sharing is
gained by floating out, but something might be lost by doing
so because it might be allocated.
I have seen a case alternative like this:
True -> \v -> ...
It's a bit silly to add the realWorld dummy arg in this case, making
$j = \s v -> ...
True -> $j s
(the \v alone is enough to make CPR happy) but I think it's rare
There's a slight infelicity here: we pass the overall
case_bndr to all the join points if it's used in *any* RHS,
because we don't know its usage in each RHS separately
Note [Duplicating StrictArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The original plan had (where E is a big argument)
e.g. f E [..hole..]
==> let $j = \a -> f E a
in $j [..hole..]
But this is terrible! Here's an example:
&& E (case x of { T -> F; F -> T })
Now, && is strict so we end up simplifying the case with
an ArgOf continuation. If we let-bind it, we get
let $j = \v -> && E v
in simplExpr (case x of { T -> F; F -> T })
(ArgOf (\r -> $j r)
And after simplifying more we get
let $j = \v -> && E v
in case x of { T -> $j F; F -> $j T }
Which is a Very Bad Thing
What we do now is this
f E [..hole..]
==> let a = E
in f a [..hole..]
Now if the thing in the hole is a case expression (which is when
we'll call mkDupableCont), we'll push the function call into the
branches, which is what we want. Now RULES for f may fire, and
call-pattern specialisation. Here's an example from Trac #3116
go (n+1) (case l of
1 -> bs'
_ -> Chunk p fpc (o+1) (l-1) bs')
If we can push the call for 'go' inside the case, we get
call-pattern specialisation for 'go', which is *crucial* for
this program.
Here is the (&&) example:
&& E (case x of { T -> F; F -> T })
==> let a = E in
case x of { T -> && a F; F -> && a T }
Much better!
Notice that
* Arguments to f *after* the strict one are handled by
the ApplyToVal case of mkDupableCont. Eg
f [..hole..] E
* We can only do the let-binding of E because the function
part of a StrictArg continuation is an explicit syntax
tree. In earlier versions we represented it as a function
(CoreExpr -> CoreEpxr) which we couldn't take apart.
Do *not* duplicate StrictBind and StritArg continuations. We gain
nothing by propagating them into the expressions, and we do lose a
lot.
The desire not to duplicate is the entire reason that
mkDupableCont returns a pair of continuations.
Note [Duplicating StrictBind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unlike StrictArg, there doesn't seem anything to gain from
duplicating a StrictBind continuation, so we don't.
Note [Single-alternative cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This case is just like the ArgOf case. Here's an example:
data T a = MkT !a
...(MkT (abs x))...
Then we get
case (case x of I# x' ->
case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
Because the (case x) has only one alternative, we'll transform to
case x of I# x' ->
case (case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
But now we do *NOT* want to make a join point etc, giving
case x of I# x' ->
let $j = \y -> MkT y
in case x' <# 0# of
True -> $j (I# (negate# x'))
False -> $j (I# x')
In this case the $j will inline again, but suppose there was a big
strict computation enclosing the orginal call to MkT. Then, it won't
"see" the MkT any more, because it's big and won't get duplicated.
And, what is worse, nothing was gained by the case-of-case transform.
So, in circumstances like these, we don't want to build join points
and push the outer case into the branches of the inner one. Instead,
don't duplicate the continuation.
When should we use this strategy? We should not use it on *every*
single-alternative case:
e.g. case (case ....) of (a,b) -> (# a,b #)
Here we must push the outer case into the inner one!
Other choices:
* Match [(DEFAULT,_,_)], but in the common case of Int,
the alternative-filling-in code turned the outer case into
case (...) of y { I# _ -> MkT y }
* Match on single alternative plus (not (isDeadBinder case_bndr))
Rationale: pushing the case inwards won't eliminate the construction.
But there's a risk of
case (...) of y { (a,b) -> let z=(a,b) in ... }
Now y looks dead, but it'll come alive again. Still, this
seems like the best option at the moment.
* Match on single alternative plus (all (isDeadBinder bndrs))
Rationale: this is essentially seq.
* Match when the rhs is *not* duplicable, and hence would lead to a
join point. This catches the disaster-case above. We can test
the *un-simplified* rhs, which is fine. It might get bigger or
smaller after simplification; if it gets smaller, this case might
fire next time round. NB also that we must test contIsDupable
case_cont *too, because case_cont might be big!
HOWEVER: I found that this version doesn't work well, because
we can get let x = case (...) of { small } in ...case x...
When x is inlined into its full context, we find that it was a bad
idea to have pushed the outer case inside the (...) case.
There is a cost to not doing case-of-case; see Trac #10626.
Note [Single-alternative-unlifted]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's another single-alternative where we really want to do case-of-case:
data Mk1 = Mk1 Int# | Mk2 Int#
M1.f =
\r [x_s74 y_s6X]
case
case y_s6X of tpl_s7m {
M1.Mk1 ipv_s70 -> ipv_s70;
M1.Mk2 ipv_s72 -> ipv_s72;
}
of
wild_s7c
{ __DEFAULT ->
case
case x_s74 of tpl_s7n {
M1.Mk1 ipv_s77 -> ipv_s77;
M1.Mk2 ipv_s79 -> ipv_s79;
}
of
wild1_s7b
{ __DEFAULT -> ==# [wild1_s7b wild_s7c];
};
};
So the outer case is doing *nothing at all*, other than serving as a
join-point. In this case we really want to do case-of-case and decide
whether to use a real join point or just duplicate the continuation:
let $j s7c = case x of
Mk1 ipv77 -> (==) s7c ipv77
Mk1 ipv79 -> (==) s7c ipv79
in
case y of
Mk1 ipv70 -> $j ipv70
Mk2 ipv72 -> $j ipv72
Hence: check whether the case binder's type is unlifted, because then
the outer case is *not* a seq.
************************************************************************
* *
Unfoldings
* *
************************************************************************
-}
simplLetUnfolding :: SimplEnv-> TopLevelFlag
-> InId
-> OutExpr
-> Unfolding -> SimplM Unfolding
simplLetUnfolding env top_lvl id new_rhs unf
| isStableUnfolding unf
= simplUnfolding env top_lvl id unf
| otherwise
= bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags InlineRhs (isTopLevel top_lvl) bottoming new_rhs) }
-- We make an unfolding *even for loop-breakers*.
-- Reason: (a) It might be useful to know that they are WHNF
-- (b) In TidyPgm we currently assume that, if we want to
-- expose the unfolding then indeed we *have* an unfolding
-- to expose. (We could instead use the RHS, but currently
-- we don't.) The simple thing is always to have one.
where
bottoming = isBottomingId id
simplUnfolding :: SimplEnv-> TopLevelFlag -> InId -> Unfolding -> SimplM Unfolding
-- Note [Setting the new unfolding]
simplUnfolding env top_lvl id unf
= case unf of
NoUnfolding -> return unf
OtherCon {} -> return unf
DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args }
-> do { (env', bndrs') <- simplBinders rule_env bndrs
; args' <- mapM (simplExpr env') args
; return (mkDFunUnfolding bndrs' con args') }
CoreUnfolding { uf_tmpl = expr, uf_src = src, uf_guidance = guide }
| isStableSource src
-> do { expr' <- simplExpr rule_env expr
; case guide of
UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok } -- Happens for INLINE things
-> let guide' = UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok
, ug_boring_ok = inlineBoringOk expr' }
-- Refresh the boring-ok flag, in case expr'
-- has got small. This happens, notably in the inlinings
-- for dfuns for single-method classes; see
-- Note [Single-method classes] in TcInstDcls.
-- A test case is Trac #4138
in return (mkCoreUnfolding src is_top_lvl expr' guide')
-- See Note [Top-level flag on inline rules] in CoreUnfold
_other -- Happens for INLINABLE things
-> bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags src is_top_lvl bottoming expr') } }
-- If the guidance is UnfIfGoodArgs, this is an INLINABLE
-- unfolding, and we need to make sure the guidance is kept up
-- to date with respect to any changes in the unfolding.
| otherwise -> return noUnfolding -- Discard unstable unfoldings
where
bottoming = isBottomingId id
is_top_lvl = isTopLevel top_lvl
act = idInlineActivation id
rule_env = updMode (updModeForStableUnfoldings act) env
-- See Note [Simplifying inside stable unfoldings] in SimplUtils
{-
Note [Force bottoming field]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to force bottoming, or the new unfolding holds
on to the old unfolding (which is part of the id).
Note [Setting the new unfolding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* If there's an INLINE pragma, we simplify the RHS gently. Maybe we
should do nothing at all, but simplifying gently might get rid of
more crap.
* If not, we make an unfolding from the new RHS. But *only* for
non-loop-breakers. Making loop breakers not have an unfolding at all
means that we can avoid tests in exprIsConApp, for example. This is
important: if exprIsConApp says 'yes' for a recursive thing, then we
can get into an infinite loop
If there's an stable unfolding on a loop breaker (which happens for
INLINEABLE), we hang on to the inlining. It's pretty dodgy, but the
user did say 'INLINE'. May need to revisit this choice.
************************************************************************
* *
Rules
* *
************************************************************************
Note [Rules in a letrec]
~~~~~~~~~~~~~~~~~~~~~~~~
After creating fresh binders for the binders of a letrec, we
substitute the RULES and add them back onto the binders; this is done
*before* processing any of the RHSs. This is important. Manuel found
cases where he really, really wanted a RULE for a recursive function
to apply in that function's own right-hand side.
See Note [Loop breaking and RULES] in OccAnal.
-}
addBndrRules :: SimplEnv -> InBndr -> OutBndr -> SimplM (SimplEnv, OutBndr)
-- Rules are added back into the bin
addBndrRules env in_id out_id
| null old_rules
= return (env, out_id)
| otherwise
= do { new_rules <- simplRules env (Just (idName out_id)) old_rules
; let final_id = out_id `setIdSpecialisation` mkRuleInfo new_rules
; return (modifyInScope env final_id, final_id) }
where
old_rules = ruleInfoRules (idSpecialisation in_id)
simplRules :: SimplEnv -> Maybe Name -> [CoreRule] -> SimplM [CoreRule]
simplRules env mb_new_nm rules
= mapM simpl_rule rules
where
simpl_rule rule@(BuiltinRule {})
= return rule
simpl_rule rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs })
= do { (env', bndrs') <- simplBinders env bndrs
; let rule_env = updMode updModeForRules env'
; args' <- mapM (simplExpr rule_env) args
; rhs' <- simplExpr rule_env rhs
; return (rule { ru_bndrs = bndrs'
, ru_fn = mb_new_nm `orElse` fn_name
, ru_args = args'
, ru_rhs = rhs' }) }
|
vTurbine/ghc
|
compiler/simplCore/Simplify.hs
|
bsd-3-clause
| 124,417
| 20
| 25
| 38,197
| 15,479
| 8,190
| 7,289
| -1
| -1
|
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.NV.TexgenReflection
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.NV.TexgenReflection (
-- * Extension Support
glGetNVTexgenReflection,
gl_NV_texgen_reflection,
-- * Enums
pattern GL_NORMAL_MAP_NV,
pattern GL_REFLECTION_MAP_NV
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/NV/TexgenReflection.hs
|
bsd-3-clause
| 687
| 0
| 5
| 95
| 52
| 39
| 13
| 8
| 0
|
module Main where
import MainW2 (mainW)
import Reflex.Dom (run)
main :: IO ()
main = run mainW
|
gspia/reflex-dom-htmlea
|
exampleTbl/app/Main.hs
|
bsd-3-clause
| 104
| 0
| 6
| 26
| 40
| 23
| 17
| 5
| 1
|
import Control.Monad
import Data.Aeson
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Maybe
import System.Console.GetOpt
import System.Environment
import System.Exit
import Types
import Game hiding (id)
import qualified Game
import Play
import Util
import qualified Tactics.RandomWalk as RandomWalk
import qualified Tactics.RandomWalk2 as RandomWalk2
import qualified Tactics.RandomZigZag as RandomZigZag
import qualified Tactics.Reynolds as Reynolds
import qualified Tactics.Hybrid as Hybrid
import qualified Tactics.SearchAllLocking as SearchAllLocking
data Options
= Options
{ optInputs :: [FilePath]
, optTimeLimit :: Maybe Int
, optMemoryLimit :: Maybe Int
, optNCores :: Maybe Int
, optPhrases :: [String]
, optHelp :: Bool
}
defaultOptions :: Options
defaultOptions
= Options
{ optInputs = []
, optTimeLimit = Nothing
, optMemoryLimit = Nothing
, optNCores = Nothing
, optPhrases = []
, optHelp = False
}
options :: [OptDescr (Options -> Options)]
options =
[ Option "f" [] (ReqArg (\val opt -> opt{ optInputs = val : optInputs opt }) "FILENAME") "File containing JSON encoded input"
, Option "t" [] (ReqArg (\val opt -> opt{ optTimeLimit = Just (read val) }) "NUMBER") "Time limit, in seconds, to produce output"
, Option "m" [] (ReqArg (\val opt -> opt{ optMemoryLimit = Just (read val) }) "NUMBER") "Memory limit, in megabytes, to produce output"
, Option "c" [] (ReqArg (\val opt -> opt{ optNCores = Just (read val) }) "NUMBER") "Number of processor cores available"
, Option "p" [] (ReqArg (\val opt -> opt{ optPhrases = val : optPhrases opt }) "STRING") "Phrase of power"
, Option "h" ["help"] (NoArg (\opt -> opt{ optHelp = True }) ) "Print help message"
]
main :: IO ()
main = do
args <- getArgs
case getOpt Permute options args of
(_,_,errs@(_:_)) -> do
mapM_ putStrLn errs
exitFailure
(o,args2,[]) -> do
let opt = foldl (flip id) defaultOptions o
if optHelp opt then do
help
else do
player <- Hybrid.newPlayer 300
os <- liftM concat $ forM (optInputs opt) $ \fname -> do
Just input <- readProblem fname
forM (zip (initGameStates input (optPhrases opt)) [0..]) $ \(gm, sd) -> do
let gm2 = autoPlay2 player gm
tag <- genTag (Game.id input) sd
return $ dumpOutputItem gm2 tag
LBS.putStrLn $ encode os
_ -> help
help :: IO ()
help = putStrLn $ usageInfo "USAGE: davar [OPTIONS] -p STRING -p STRING .. -f FILENAME -f FILENAME .." options
|
msakai/icfpc2015
|
tools/davar.hs
|
bsd-3-clause
| 2,554
| 15
| 28
| 567
| 863
| 463
| 400
| 66
| 4
|
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE OverloadedStrings #-}
module Events
( appEvent
, startEvent
)
where
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Monad.Trans (liftIO)
import Control.Concurrent (forkIO, writeChan, threadDelay)
import Control.Lens
import qualified Data.Vector as V
import Data.Monoid
import Graphics.Vty
import System.Process
import System.Posix.Env (getEnvDefault)
import Brick.Main
import Brick.Types
import Brick.Widgets.List
import Brick.Widgets.Edit
import Data.Text.Zipper
import Database.Schema.Migrations
( createNewMigration
)
import Database.Schema.Migrations.Migration (Migration(..), newMigration)
import qualified Database.Schema.Migrations.Backend as B
import qualified Database.Schema.Migrations.Store as S
import Types
appEvent :: St -> AppEvent -> EventM (Next St)
appEvent st e =
case st^.uiMode of
MigrationListing -> migrationListingEvent st e
EditMigration -> editMigrationEvent st e
withSelectedMigration :: St -> Lens' St (List a) -> (Int -> EventM (Next St)) -> EventM (Next St)
withSelectedMigration st listLens act =
case st^.listLens.listSelectedL of
Nothing -> continue st
Just i -> act i
migrationListingEvent :: St -> AppEvent -> EventM (Next St)
migrationListingEvent st (VtyEvent e) =
case e of
-- Quit the program
EvKey (KChar 'q') [] -> halt st
EvKey KEsc [] -> halt st
-- Up and down select migrations
EvKey KUp [] -> continue =<< handleEventLensed st migrationList e
EvKey KDown [] -> continue =<< handleEventLensed st migrationList e
-- Edit a migration inside the program
EvKey (KChar 'e') [] ->
withSelectedMigration st migrationList $ \i -> do
result <- liftIO $ S.loadMigration (st^.store) (st^.migrationList.listElementsL.ix i)
case result of
Left _ -> continue st
Right m ->
continue $ st & uiMode .~ EditMigration
& editMigrationName.editContentsL .~ (stringZipper [mId m] $ Just 1)
& editMigrationDeps .~ migrationDepsList st (mDeps m)
& editingMigration .~ Just m
-- Create a new migration
EvKey (KChar 'n') [] -> continue $ st & uiMode .~ EditMigration
& editMigrationName.editContentsL .~ (stringZipper [] $ Just 1)
& editMigrationDeps .~ migrationDepsList st []
& editingMigration .~ Nothing
-- Spawn an external editor to edit the migration
EvKey (KChar 'E') [] ->
withSelectedMigration st migrationList $ \i -> do
result <- liftIO $ S.loadMigration (st^.store) (st^.migrationList.listElementsL.ix i)
case result of
Left _ -> continue st
Right m -> suspendAndResume $ do
editorPath <- getEnvDefault "EDITOR" "vi"
path <- S.fullMigrationName (st^.store) (mId m)
callProcess editorPath [path]
return st
_ -> continue st
migrationListingEvent st (ClearStatus msg) = continue $ clearStatus msg st
migrationDepsList :: St -> [String] -> List (Bool, String)
migrationDepsList st deps =
list (Name "editMigrationDeps") depElems 1
where
depElems = V.fromList $ flip map (st^.availableMigrations) $ \m -> (m `elem` deps, m)
editMigrationEvent :: St -> AppEvent -> EventM (Next St)
editMigrationEvent st (VtyEvent e) =
let migrationNameL = editMigrationName.to getEditContents.to concat
in case e of
-- Esc takes you back to the migration listing
EvKey KEsc [] -> continue $ st & uiMode .~ MigrationListing
-- Up and down navigate the dependency list
EvKey KUp [] -> continue =<< handleEventLensed st editMigrationDeps e
EvKey KDown [] -> continue =<< handleEventLensed st editMigrationDeps e
-- Toggle the dependency state of the selected dependency
EvKey (KChar ' ') [] ->
withSelectedMigration st editMigrationDeps $ \i ->
continue $ st & editMigrationDeps.listElementsL.ix i._1 %~ not
-- Ignore enter keypresses if the migration name editor is empty
EvKey KEnter [] | length (st^.migrationNameL) == 0 -> continue st
-- Enter saves the migration being created or modified
EvKey KEnter [] -> do
result <- case st^.editingMigration of
Nothing -> do
let newM = (newMigration (st^.migrationNameL))
{ mDeps = snd <$> (filter fst $ V.toList $ st^.editMigrationDeps.listElementsL)
}
liftIO $ createNewMigration (st^.store) newM
Just m -> do
let updatedM = m { mDeps = snd <$> (filter fst $ V.toList $ st^.editMigrationDeps.listElementsL)
}
liftIO $ S.saveMigration (st^.store) updatedM
return $ Right updatedM
case result of
Left err -> continue =<< setStatus ("Error: " <> err) st
Right _ -> continue
=<< setStatus "Migration saved."
=<< (reloadMigrations $ st & uiMode .~ MigrationListing)
-- Only honor text input if we are editing a new migration; we
-- don't permit the name of existing migrations to be changed
_ -> case st^.editingMigration of
Nothing -> continue =<< handleEventLensed st editMigrationName e
Just _ -> continue st
editMigrationEvent st (ClearStatus msg) = continue $ clearStatus msg st
startEvent :: St -> EventM St
startEvent = reloadMigrations
reloadMigrations :: St -> EventM St
reloadMigrations st = do
installedMs <- liftIO $ B.getMigrations $ st^.backend
availableMs <- liftIO $ S.getMigrations $ st^.store
return $ st & installedMigrations .~ installedMs
& availableMigrations .~ availableMs
& migrationList .~ list (Name "migrationList") (V.fromList availableMs) 1
setStatus :: String -> St -> EventM St
setStatus msg st = do
-- set the status now, fork a thread that clears it later
void $ liftIO $ forkIO $ do
threadDelay $ 1000000 * 7
writeChan (st^.statusChan) (ClearStatus msg)
return $ st & status .~ Just msg
clearStatus :: String -> St -> St
clearStatus msg st =
if st^.status == Just msg
then st & status .~ Nothing
else st
|
jtdaugherty/dbmigrations-client
|
src/Events.hs
|
bsd-3-clause
| 6,791
| 0
| 28
| 2,116
| 1,845
| 929
| 916
| -1
| -1
|
module Main where
someFunc :: IO ()
someFunc = putStrLn "someFunc"
main :: IO ()
main = someFunc
|
cosmicexplorer/clue-solver
|
app/Main.hs
|
bsd-3-clause
| 99
| 0
| 6
| 20
| 37
| 20
| 17
| 5
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module EFA.Test.Stack where
import EFA.Test.Arithmetic (Triple)
import qualified EFA.Equation.Stack as Stack
import qualified EFA.Equation.MultiValue as MV
import EFA.Equation.Stack (Stack)
import qualified EFA.Equation.Arithmetic as Arith
import EFA.Equation.Arithmetic ((~+), (~-), (~*), (~/))
import qualified Data.Map as Map
import Data.Map (Map)
import Control.Applicative (liftA2)
import qualified Test.QuickCheck.Property.Generic as Law
import qualified Test.QuickCheck as QC
import Test.QuickCheck.Modifiers (Positive, getPositive)
import Test.QuickCheck.All (quickCheckAll)
type IntStack = Stack Char Integer
type IntMultiValue = MV.MultiValue Char Integer
type RatioStack = Stack Char Rational
type PosRatioMultiValue = MV.MultiValue Char (Positive Rational)
newtype AMap k a = AMap (Map k a) deriving (Show)
instance
(Ord k, QC.Arbitrary k, QC.Arbitrary a) =>
QC.Arbitrary (AMap k a) where
arbitrary = fmap (AMap . Map.fromList) QC.arbitrary
shrink (AMap m) = fmap (AMap . Map.fromList) $ QC.shrink $ Map.toList m
prop_filterIdentity :: IntStack -> Bool
prop_filterIdentity x =
case Stack.startFilter x of
fx -> Stack.filter Map.empty fx == Just fx
prop_filterProjectNaive :: AMap Char Stack.Branch -> IntStack -> Bool
prop_filterProjectNaive (AMap c) x =
Stack.filterNaive c x == Stack.filterNaive c (Stack.filterNaive c x)
prop_filterProject :: AMap Char Stack.Branch -> IntStack -> Bool
prop_filterProject (AMap c) x =
case Stack.startFilter x of
fx ->
Stack.filter c fx
==
(Stack.filter c =<< Stack.filter c fx)
prop_filterCommutative ::
AMap Char Stack.Branch -> AMap Char Stack.Branch -> IntStack -> Bool
prop_filterCommutative (AMap c0) (AMap c1) x =
case Stack.startFilter x of
fx ->
(Stack.filter c0 =<< Stack.filter c1 fx)
==
(Stack.filter c1 =<< Stack.filter c0 fx)
prop_filterMerge :: AMap Char Stack.Branch -> AMap Char Stack.Branch -> IntStack -> Bool
prop_filterMerge (AMap c0) (AMap c1) x =
case Stack.startFilter x of
fx ->
(Stack.filter c1 =<< Stack.filter c0 fx)
==
(flip Stack.filter fx =<< Stack.mergeConditions c0 c1)
prop_filterPlus :: AMap Char Stack.Branch -> IntStack -> IntStack -> Bool
prop_filterPlus (AMap c) x y =
let filt = fmap Stack.filtered . Stack.filter c . Stack.startFilter
in filt (x + y) == liftA2 (+) (filt x) (filt y)
prop_multiValueConvert :: IntMultiValue -> Bool
prop_multiValueConvert x =
x == Stack.toMultiValue (Stack.fromMultiValue x)
prop_multiValuePlus :: IntMultiValue -> IntMultiValue -> Bool
prop_multiValuePlus x y =
Stack.fromMultiValue (x+y)
==
Stack.fromMultiValue x + Stack.fromMultiValue y
prop_multiValueTimes :: IntMultiValue -> IntMultiValue -> Bool
prop_multiValueTimes x y =
Stack.fromMultiValue (x*y)
==
Stack.fromMultiValue x * Stack.fromMultiValue y
prop_multiValueNegate :: IntMultiValue -> Bool
prop_multiValueNegate x =
Stack.fromMultiValue (negate x) == negate (Stack.fromMultiValue x)
prop_multiValueRecip :: PosRatioMultiValue -> Bool
prop_multiValueRecip px =
case fmap getPositive px of
x -> Stack.fromMultiValue (recip x) == recip (Stack.fromMultiValue x)
prop_multiValueIntegrate :: MV.MultiValue Char (Triple Integer) -> Bool
prop_multiValueIntegrate x =
Stack.fromMultiValue (Arith.integrate x)
==
Arith.integrate (Stack.fromMultiValue x)
prop_arithmeticPlus :: IntStack -> IntStack -> Bool
prop_arithmeticPlus x y = x+y == x~+y
prop_arithmeticMinus :: IntStack -> IntStack -> Bool
prop_arithmeticMinus x y = x-y == x~-y
prop_arithmeticNegate :: IntStack -> Bool
prop_arithmeticNegate x = negate x == Arith.negate x
prop_arithmeticTimes :: RatioStack -> RatioStack -> Bool
prop_arithmeticTimes x y = x*y == x~*y
prop_arithmeticDivide :: RatioStack -> PosRatioMultiValue -> Bool
prop_arithmeticDivide x py =
case Stack.fromMultiValue $ fmap getPositive py of
y -> x/y == x~/y
prop_arithmeticRecip :: PosRatioMultiValue -> Bool
prop_arithmeticRecip px =
case Stack.fromMultiValue $ fmap getPositive px of
x -> recip x == Arith.recip x
prop_commutativePlus :: IntStack -> IntStack -> Bool
prop_commutativePlus = Law.eq $ Law.prop_Commutative (+) Law.T
prop_commutativeTimes :: IntStack -> IntStack -> Bool
prop_commutativeTimes = Law.eq $ Law.prop_Commutative (*) Law.T
prop_associativePlus :: IntStack -> IntStack -> IntStack -> Bool
prop_associativePlus = Law.eq $ Law.prop_Associative (+) Law.T
prop_associativeTimes :: IntStack -> IntStack -> IntStack -> Bool
prop_associativeTimes = Law.eq $ Law.prop_Associative (*) Law.T
prop_identityPlus :: IntStack -> Bool
prop_identityPlus = Law.eq $ Law.prop_Identity 0 (+) Law.T
prop_identityTimes :: IntStack -> Bool
prop_identityTimes = Law.eq $ Law.prop_Identity 1 (*) Law.T
prop_associativeMinus :: IntStack -> IntStack -> IntStack -> Bool
prop_associativeMinus x y z = (x+y)-z == x+(y-z)
prop_swapMinus :: IntStack -> IntStack -> Bool
prop_swapMinus x y = (x-y) == negate (y-x)
prop_inversePlus :: IntStack -> Bool
prop_inversePlus =
Law.eqWith Stack.eqRelaxedNum $
Law.prop_GroupInverse 0 (+) negate Law.T
prop_distributivePlus :: IntStack -> IntStack -> IntStack -> Bool
prop_distributivePlus x y z = (x+y)*z == x*z + y*z
prop_distributiveMinus :: IntStack -> IntStack -> IntStack -> Bool
prop_distributiveMinus x y z = (x-y)*z == x*z - y*z
runTests :: IO Bool
runTests = $quickCheckAll
|
energyflowanalysis/efa-2.1
|
test/EFA/Test/Stack.hs
|
bsd-3-clause
| 5,539
| 0
| 12
| 983
| 1,887
| 983
| 904
| 125
| 1
|
{-# OPTIONS_GHC -W #-}
module Parse.Pattern (term, expr) where
import Control.Applicative ((<$>))
import Data.Char (isUpper)
import qualified Data.List as List
import Text.Parsec hiding (newline,spaces,State)
import Parse.Helpers
import Parse.Literal
import SourceSyntax.Literal
import qualified SourceSyntax.Pattern as P
basic :: IParser P.Pattern
basic = choice
[ char '_' >> return P.Anything
, do v <- var
return $ case v of
"True" -> P.Literal (Boolean True)
"False" -> P.Literal (Boolean False)
c:_ | isUpper c -> P.Data v []
_ -> P.Var v
, P.Literal <$> literal
]
asPattern :: P.Pattern -> IParser P.Pattern
asPattern pattern = do
var <- optionMaybe (try (whitespace >> reserved "as" >> whitespace >> lowVar))
return $ case var of
Just v -> P.Alias v pattern
Nothing -> pattern
record :: IParser P.Pattern
record = P.Record <$> brackets (commaSep1 lowVar)
tuple :: IParser P.Pattern
tuple = do
ps <- parens (commaSep expr)
return $ case ps of
[p] -> p
_ -> P.tuple ps
list :: IParser P.Pattern
list = P.list <$> braces (commaSep expr)
term :: IParser P.Pattern
term =
(choice [ record, tuple, list, basic ]) <?> "pattern"
patternConstructor :: IParser P.Pattern
patternConstructor = do
v <- List.intercalate "." <$> dotSep1 capVar
case v of
"True" -> return $ P.Literal (Boolean True)
"False" -> return $ P.Literal (Boolean False)
_ -> P.Data v <$> spacePrefix term
expr :: IParser P.Pattern
expr = do
patterns <- consSep1 (patternConstructor <|> term)
asPattern (foldr1 P.cons patterns) <?> "pattern"
|
deadfoxygrandpa/Elm
|
compiler/Parse/Pattern.hs
|
bsd-3-clause
| 1,747
| 0
| 16
| 477
| 624
| 316
| 308
| 50
| 4
|
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
module RW.Chap6.JSONClass where
import RW.Chap5.SimpleJSON
type JSONError = String
class JSON a where
toJValue :: a -> JValue
fromJValue :: JValue -> Either JSONError a
instance JSON JValue where
toJValue = id
fromJValue = Right
instance JSON Bool where
toJValue = JBool
fromJValue (JBool b) = Right b
fromJValue _ = Left "Could not parse a JBool"
instance JSON String where
toJValue = JString
fromJValue (JString s) = Right s
fromJValue _ = Left "Could not parse a JString"
doubleToJValue :: (Double -> a) -> JValue -> Either JSONError a
doubleToJValue f (JNumber n) = Right (f n)
doubleToJValue _ _ = Left "not a JSON number"
instance JSON Int where
toJValue = JNumber . realToFrac
fromJValue = doubleToJValue round
instance JSON Integer where
toJValue = JNumber . realToFrac
fromJValue = doubleToJValue round
instance JSON Double where
toJValue = JNumber
fromJValue = doubleToJValue id
|
ChrisCoffey/rwh
|
src/RW/Chap6/JSONClass.hs
|
bsd-3-clause
| 1,018
| 0
| 8
| 208
| 287
| 149
| 138
| 31
| 1
|
module Foreign.Mms.Core(Mode(..), writeMms) where
import Control.Monad.State.Strict
import Foreign.Mms.Class(Mms(..))
import Foreign.Mms.Put(evalPut)
import qualified Data.ByteString.Lazy as L
data Mode = Allocated | Mapped
writeMms :: Mms a m => a -> L.ByteString
writeMms x = evalPut $ writeData x >> writeFields x
|
eeight/haskell-mms
|
src/Foreign/Mms/Core.hs
|
bsd-3-clause
| 321
| 0
| 7
| 44
| 113
| 68
| 45
| 8
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main (main) where
import Data.Aeson.Types (toJSON)
import Data.List.Split (splitOn)
import QS (qs)
import Web.Scotty (get, json, param, scotty)
main :: IO ()
main = scotty 3000 $ get "/" $ do
(parameter :: String) <- param "items"
json . toJSON . qs $ splitOn "," parameter
|
jbirch/fuckin-about-with-hs
|
src/Main.hs
|
bsd-3-clause
| 438
| 0
| 10
| 140
| 126
| 70
| 56
| 11
| 1
|
{-- snippet fragment --}
mean :: [Double] -> Double
mean xs = sum xs / fromIntegral (length xs)
{-- /snippet fragment --}
|
binesiyu/ifl
|
examples/ch25/Fragment.hs
|
mit
| 122
| 0
| 8
| 22
| 42
| 21
| 21
| 2
| 1
|
{-|
This module defines a generic web application interface. It is a common
protocol between web servers and web applications.
The overriding design principles here are performance and generality. To
address performance, this library uses a streaming interface for request and
response bodies, paired with bytestring's 'Builder' type. The advantages of a
streaming API over lazy IO have been debated elsewhere and so will not be
addressed here. However, helper functions like 'responseLBS' allow you to
continue using lazy IO if you so desire.
Generality is achieved by removing many variables commonly found in similar
projects that are not universal to all servers. The goal is that the 'Request'
object contains only data which is meaningful in all circumstances.
Please remember when using this package that, while your application may
compile without a hitch against many different servers, there are other
considerations to be taken when moving to a new backend. For example, if you
transfer from a CGI application to a FastCGI one, you might suddenly find you
have a memory leak. Conversely, a FastCGI application would be well served to
preload all templates from disk when first starting; this would kill the
performance of a CGI application.
This package purposely provides very little functionality. You can find various
middlewares, backends and utilities on Hackage. Some of the most commonly used
include:
[warp] <http://hackage.haskell.org/package/warp>
[wai-extra] <http://hackage.haskell.org/package/wai-extra>
-}
-- Ignore deprecations, because this module needs to use the deprecated requestBody to construct a response.
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Network.Wai
(
-- * Types
Application
, Middleware
, ResponseReceived
-- * Request
, Request
, defaultRequest
, RequestBodyLength (..)
-- ** Request accessors
, requestMethod
, httpVersion
, rawPathInfo
, rawQueryString
, requestHeaders
, isSecure
, remoteHost
, pathInfo
, queryString
, getRequestBodyChunk
, requestBody
, vault
, requestBodyLength
, requestHeaderHost
, requestHeaderRange
, requestHeaderReferer
, requestHeaderUserAgent
-- $streamingRequestBodies
, strictRequestBody
, consumeRequestBodyStrict
, lazyRequestBody
, consumeRequestBodyLazy
-- * Response
, Response
, StreamingBody
, FilePart (..)
-- ** Response composers
, responseFile
, responseBuilder
, responseLBS
, responseStream
, responseRaw
-- ** Response accessors
, responseStatus
, responseHeaders
-- ** Response modifiers
, responseToStream
, mapResponseHeaders
, mapResponseStatus
-- * Middleware composition
, ifRequest
, modifyResponse
) where
import Data.ByteString.Builder (Builder, lazyByteString)
import Data.ByteString.Builder (byteString)
import Control.Monad (unless)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as L
import qualified Data.ByteString.Lazy.Internal as LI
import Data.ByteString.Lazy.Internal (defaultChunkSize)
import Data.ByteString.Lazy.Char8 ()
import Data.Function (fix)
import qualified Network.HTTP.Types as H
import Network.Socket (SockAddr (SockAddrInet))
import Network.Wai.Internal
import qualified System.IO as IO
import System.IO.Unsafe (unsafeInterleaveIO)
----------------------------------------------------------------
-- | Creating 'Response' from a file.
responseFile :: H.Status -> H.ResponseHeaders -> FilePath -> Maybe FilePart -> Response
responseFile = ResponseFile
-- | Creating 'Response' from 'Builder'.
--
-- Some questions and answers about the usage of 'Builder' here:
--
-- Q1. Shouldn't it be at the user's discretion to use Builders internally and
-- then create a stream of ByteStrings?
--
-- A1. That would be less efficient, as we wouldn't get cheap concatenation
-- with the response headers.
--
-- Q2. Isn't it really inefficient to convert from ByteString to Builder, and
-- then right back to ByteString?
--
-- A2. No. If the ByteStrings are small, then they will be copied into a larger
-- buffer, which should be a performance gain overall (less system calls). If
-- they are already large, then an insert operation is used
-- to avoid copying.
--
-- Q3. Doesn't this prevent us from creating comet-style servers, since data
-- will be cached?
--
-- A3. You can force a Builder to output a ByteString before it is an
-- optimal size by sending a flush command.
responseBuilder :: H.Status -> H.ResponseHeaders -> Builder -> Response
responseBuilder = ResponseBuilder
-- | Creating 'Response' from 'L.ByteString'. This is a wrapper for
-- 'responseBuilder'.
responseLBS :: H.Status -> H.ResponseHeaders -> L.ByteString -> Response
responseLBS s h = ResponseBuilder s h . lazyByteString
-- | Creating 'Response' from a stream of values.
--
-- In order to allocate resources in an exception-safe manner, you can use the
-- @bracket@ pattern outside of the call to @responseStream@. As a trivial
-- example:
--
-- @
-- app :: Application
-- app req respond = bracket_
-- (putStrLn \"Allocating scarce resource\")
-- (putStrLn \"Cleaning up\")
-- $ respond $ responseStream status200 [] $ \\write flush -> do
-- write $ byteString \"Hello\\n\"
-- flush
-- write $ byteString \"World\\n\"
-- @
--
-- Note that in some cases you can use @bracket@ from inside @responseStream@
-- as well. However, placing the call on the outside allows your status value
-- and response headers to depend on the scarce resource.
--
-- Since 3.0.0
responseStream :: H.Status
-> H.ResponseHeaders
-> StreamingBody
-> Response
responseStream = ResponseStream
-- | Create a response for a raw application. This is useful for \"upgrade\"
-- situations such as WebSockets, where an application requests for the server
-- to grant it raw network access.
--
-- This function requires a backup response to be provided, for the case where
-- the handler in question does not support such upgrading (e.g., CGI apps).
--
-- In the event that you read from the request body before returning a
-- @responseRaw@, behavior is undefined.
--
-- Since 2.1.0
responseRaw :: (IO B.ByteString -> (B.ByteString -> IO ()) -> IO ())
-> Response
-> Response
responseRaw = ResponseRaw
----------------------------------------------------------------
-- | Accessing 'H.Status' in 'Response'.
responseStatus :: Response -> H.Status
responseStatus (ResponseFile s _ _ _) = s
responseStatus (ResponseBuilder s _ _ ) = s
responseStatus (ResponseStream s _ _ ) = s
responseStatus (ResponseRaw _ res ) = responseStatus res
-- | Accessing 'H.ResponseHeaders' in 'Response'.
responseHeaders :: Response -> H.ResponseHeaders
responseHeaders (ResponseFile _ hs _ _) = hs
responseHeaders (ResponseBuilder _ hs _ ) = hs
responseHeaders (ResponseStream _ hs _ ) = hs
responseHeaders (ResponseRaw _ res) = responseHeaders res
-- | Converting the body information in 'Response' to a 'StreamingBody'.
responseToStream :: Response
-> ( H.Status
, H.ResponseHeaders
, (StreamingBody -> IO a) -> IO a
)
responseToStream (ResponseStream s h b) = (s, h, ($ b))
responseToStream (ResponseFile s h fp (Just part)) =
( s
, h
, \withBody -> IO.withBinaryFile fp IO.ReadMode $ \handle -> withBody $ \sendChunk _flush -> do
IO.hSeek handle IO.AbsoluteSeek $ filePartOffset part
let loop remaining | remaining <= 0 = return ()
loop remaining = do
bs <- B.hGetSome handle defaultChunkSize
unless (B.null bs) $ do
let x = B.take remaining bs
sendChunk $ byteString x
loop $ remaining - B.length x
loop $ fromIntegral $ filePartByteCount part
)
responseToStream (ResponseFile s h fp Nothing) =
( s
, h
, \withBody -> IO.withBinaryFile fp IO.ReadMode $ \handle ->
withBody $ \sendChunk _flush -> fix $ \loop -> do
bs <- B.hGetSome handle defaultChunkSize
unless (B.null bs) $ do
sendChunk $ byteString bs
loop
)
responseToStream (ResponseBuilder s h b) =
(s, h, \withBody -> withBody $ \sendChunk _flush -> sendChunk b)
responseToStream (ResponseRaw _ res) = responseToStream res
-- | Apply the provided function to the response header list of the Response.
mapResponseHeaders :: (H.ResponseHeaders -> H.ResponseHeaders) -> Response -> Response
mapResponseHeaders f (ResponseFile s h b1 b2) = ResponseFile s (f h) b1 b2
mapResponseHeaders f (ResponseBuilder s h b) = ResponseBuilder s (f h) b
mapResponseHeaders f (ResponseStream s h b) = ResponseStream s (f h) b
mapResponseHeaders _ r@(ResponseRaw _ _) = r
-- | Apply the provided function to the response status of the Response.
mapResponseStatus :: (H.Status -> H.Status) -> Response -> Response
mapResponseStatus f (ResponseFile s h b1 b2) = ResponseFile (f s) h b1 b2
mapResponseStatus f (ResponseBuilder s h b) = ResponseBuilder (f s) h b
mapResponseStatus f (ResponseStream s h b) = ResponseStream (f s) h b
mapResponseStatus _ r@(ResponseRaw _ _) = r
----------------------------------------------------------------
-- | The WAI application.
--
-- Note that, since WAI 3.0, this type is structured in continuation passing
-- style to allow for proper safe resource handling. This was handled in the
-- past via other means (e.g., @ResourceT@). As a demonstration:
--
-- @
-- app :: Application
-- app req respond = bracket_
-- (putStrLn \"Allocating scarce resource\")
-- (putStrLn \"Cleaning up\")
-- (respond $ responseLBS status200 [] \"Hello World\")
-- @
type Application = Request -> (Response -> IO ResponseReceived) -> IO ResponseReceived
-- | A default, blank request.
--
-- Since 2.0.0
defaultRequest :: Request
defaultRequest = Request
{ requestMethod = H.methodGet
, httpVersion = H.http10
, rawPathInfo = B.empty
, rawQueryString = B.empty
, requestHeaders = []
, isSecure = False
, remoteHost = SockAddrInet 0 0
, pathInfo = []
, queryString = []
, requestBody = return B.empty
, vault = mempty
, requestBodyLength = KnownLength 0
, requestHeaderHost = Nothing
, requestHeaderRange = Nothing
, requestHeaderReferer = Nothing
, requestHeaderUserAgent = Nothing
}
-- | A @Middleware@ is a component that sits between the server and application.
--
-- It can modify both the 'Request' and 'Response',
-- to provide simple transformations that are required for all (or most of)
-- your web server’s routes.
--
-- = Users of middleware
--
-- If you are trying to apply one or more 'Middleware's to your 'Application',
-- just call them as functions.
--
-- For example, if you have @corsMiddleware@ and @authorizationMiddleware@,
-- and you want to authorize first, you can do:
--
-- @
-- let allMiddleware app = authorizationMiddleware (corsMiddleware app)
-- @
--
-- to get a new 'Middleware', which first authorizes, then sets, CORS headers.
-- The “outer” middleware is called first.
--
-- You can also chain them via '(.)':
--
-- @
-- let allMiddleware =
-- authorizationMiddleware
-- . corsMiddleware
-- . … more middleware here …
-- @
--
-- Then, once you have an @app :: Application@, you can wrap it
-- in your middleware:
--
-- @
-- let myApp = allMiddleware app :: Application
-- @
--
-- and run it as usual:
--
-- @
-- Warp.run port myApp
-- @
--
-- = Authors of middleware
--
-- When fully expanded, 'Middleware' has the type signature:
--
-- > (Request -> (Response -> IO ResponseReceived) -> IO ResponseReceived) -> Request -> (Response -> IO ResponseReceived) -> IO ResponseReceived
--
-- or if we shorten to @type Respond = Response -> IO ResponseReceived@:
--
-- > (Request -> Respond -> IO ResponseReceived) -> Request -> Respond -> IO ResponseReceived
--
-- so a middleware definition takes 3 arguments, an inner application, a request and a response callback.
--
-- Compare with the type of a simple `Application`:
--
-- > Request -> Respond -> IO ResponseReceived
--
-- It takes the 'Request' and @Respond@, but not the extra application.
--
-- Said differently, a middleware has the power of a normal 'Application'
-- — it can inspect the 'Request' and return a 'Response' —
-- but it can (and in many cases it /should/) also call the 'Application' which was passed to it.
--
-- == Modifying the 'Request'
--
-- A lot of middleware just looks at the request and does something based on its values.
--
-- For example, the @authorizationMiddleware@ from above could look at the @Authorization@
-- HTTP header and run <https://jwt.io/ JWT> verification logic against the database.
--
-- @
-- authorizationMiddleware app req respond = do
-- case verifyJWT ('requestHeaders' req) of
-- InvalidJWT err -> respond (invalidJWTResponse err)
-- ValidJWT -> app req respond
-- @
--
-- Notice how the inner app is called when the validation was successful.
-- If it was not, we can respond
-- e.g. with <https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/401 HTTP 401 Unauthorized>,
-- by constructing a 'Response' with 'responseLBS' and passing it to @respond@.
--
-- == Passing arguments to and from your 'Middleware'
--
-- Middleware must often be configurable.
-- Let’s say you have a type @JWTSettings@ that you want to be passed to the middleware.
-- Simply pass an extra argument to your middleware. Then your middleware type turns into:
--
-- @
-- authorizationMiddleware :: JWTSettings -> Application -> Request -> Respond -> IO ResponseReceived
-- authorizationMiddleware jwtSettings req respond =
-- case verifyJWT jwtSettings ('requestHeaders' req) of
-- InvalidJWT err -> respond (invalidJWTResponse err)
-- ValidJWT -> app req respond
-- @
--
-- or alternatively:
--
-- @
-- authorizationMiddleware :: JWTSettings -> Middleware
-- @
--
-- Perhaps less intuitively, you can also /pass on/ data from middleware to the wrapped 'Application':
--
-- @
-- authorizationMiddleware :: JWTSettings -> (JWT -> Application) -> Request -> Respond -> IO ResponseReceived
-- authorizationMiddleware jwtSettings req respond =
-- case verifyJWT jwtSettings ('requestHeaders' req) of
-- InvalidJWT err -> respond (invalidJWTResponse err)
-- ValidJWT jwt -> app jwt req respond
-- @
--
-- although then, chaining different middleware has to take this extra argument into account:
--
-- @
-- let finalApp =
-- authorizationMiddleware
-- (\\jwt -> corsMiddleware
-- (… more middleware here …
-- (app jwt)))
-- @
--
-- == Modifying the 'Response'
--
-- 'Middleware' can also modify the 'Response' that is returned by the inner application.
--
-- This is done by taking the @respond@ callback, using it to define a new @respond'@,
-- and passing this new @respond'@ to the @app@:
--
-- @
-- gzipMiddleware app req respond = do
-- let respond' resp = do
-- resp' <- gzipResponseBody resp
-- respond resp'
-- app req respond'
-- @
--
-- However, modifying the response (especially the response body) is not trivial,
-- so in order to get a sense of how to do it (dealing with the type of 'responseToStream'),
-- it’s best to look at an example, for example <https://hackage.haskell.org/package/wai-extra/docs/src/Network.Wai.Middleware.Gzip.html#gzip the GZIP middleware of wai-extra>.
type Middleware = Application -> Application
-- | apply a function that modifies a response as a 'Middleware'
modifyResponse :: (Response -> Response) -> Middleware
modifyResponse f app req respond = app req $ respond . f
-- | conditionally apply a 'Middleware'
ifRequest :: (Request -> Bool) -> Middleware -> Middleware
ifRequest rpred middle app req | rpred req = middle app req
| otherwise = app req
-- $streamingRequestBodies
--
-- == Streaming Request Bodies
--
-- WAI is designed for streaming in request bodies, which allows you to process them incrementally.
-- You can stream in the request body using functions like 'getRequestBodyChunk',
-- the @wai-conduit@ package, or Yesod's @rawRequestBody@.
--
-- In the normal case, incremental processing is more efficient, since it
-- reduces maximum total memory usage.
-- In the worst case, it helps protect your server against denial-of-service (DOS) attacks, in which
-- an attacker sends huge request bodies to your server.
--
-- Consider these tips to avoid reading the entire request body into memory:
--
-- * Look for library functions that support incremental processing. Sometimes these will use streaming
-- libraries like @conduit@, @pipes@, or @streaming@.
-- * Any attoparsec parser supports streaming input. For an example of this, see the
-- "Data.Conduit.Attoparsec" module in @conduit-extra@.
-- * Consider streaming directly to a file on disk. For an example of this, see the
-- "Data.Conduit.Binary" module in @conduit-extra@.
-- * If you need to direct the request body to multiple destinations, you can stream to both those
-- destinations at the same time.
-- For example, if you wanted to run an HMAC on the request body as well as parse it into JSON,
-- you could use Conduit's @zipSinks@ to send the data to @cryptonite-conduit@'s 'sinkHMAC' and
-- @aeson@'s Attoparsec parser.
-- * If possible, avoid processing large data on your server at all.
-- For example, instead of uploading a file to your server and then to AWS S3,
-- you can have the browser upload directly to S3.
--
-- That said, sometimes it is convenient, or even necessary to read the whole request body into memory.
-- For these purposes, functions like 'strictRequestBody' or 'lazyRequestBody' can be used.
-- When this is the case, consider these strategies to mitigating potential DOS attacks:
--
-- * Set a limit on the request body size you allow.
-- If certain endpoints need larger bodies, whitelist just those endpoints for the large size.
-- Be especially cautious about endpoints that don't require authentication, since these are easier to DOS.
-- You can accomplish this with @wai-extra@'s @requestSizeLimitMiddleware@ or Yesod's @maximumContentLength@.
-- * Consider rate limiting not just on total requests, but also on total bytes sent in.
-- * Consider using services that allow you to identify and blacklist attackers.
-- * Minimize the amount of time the request body stays in memory.
-- * If you need to share request bodies across middleware and your application, you can do so using Wai's 'vault'.
-- If you do this, remove the request body from the vault as soon as possible.
--
-- Warning: Incremental processing will not always be sufficient to prevent a DOS attack.
-- For example, if an attacker sends you a JSON body with a 2MB long string inside,
-- even if you process the body incrementally, you'll still end up with a 2MB-sized 'Text'.
--
-- To mitigate this, employ some of the countermeasures listed above,
-- and try to reject such payloads as early as possible in your codebase.
-- | Get the request body as a lazy ByteString. However, do /not/ use any lazy
-- I\/O, instead reading the entire body into memory strictly.
--
-- Note: Since this function consumes the request body, future calls to it will return the empty string.
--
-- Since 3.0.1
strictRequestBody :: Request -> IO L.ByteString
strictRequestBody req =
loop id
where
loop front = do
bs <- getRequestBodyChunk req
if B.null bs
then return $ front LI.Empty
else loop (front . LI.Chunk bs)
-- | Synonym for 'strictRequestBody'.
-- This function name is meant to signal the non-idempotent nature of 'strictRequestBody'.
--
-- @since 3.2.3
consumeRequestBodyStrict :: Request -> IO L.ByteString
consumeRequestBodyStrict = strictRequestBody
-- | Get the request body as a lazy ByteString. This uses lazy I\/O under the
-- surface, and therefore all typical warnings regarding lazy I/O apply.
--
-- Note: Since this function consumes the request body, future calls to it will return the empty string.
--
-- Since 1.4.1
lazyRequestBody :: Request -> IO L.ByteString
lazyRequestBody req =
loop
where
loop = unsafeInterleaveIO $ do
bs <- getRequestBodyChunk req
if B.null bs
then return LI.Empty
else do
bss <- loop
return $ LI.Chunk bs bss
-- | Synonym for 'lazyRequestBody'.
-- This function name is meant to signal the non-idempotent nature of 'lazyRequestBody'.
--
-- @since 3.2.3
consumeRequestBodyLazy :: Request -> IO L.ByteString
consumeRequestBodyLazy = lazyRequestBody
|
kazu-yamamoto/wai
|
wai/Network/Wai.hs
|
mit
| 21,051
| 0
| 25
| 4,394
| 2,185
| 1,309
| 876
| 171
| 2
|
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE IncoherentInstances #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
module Baum.Heap.Central where
import Baum.Heap.Config
import Baum.Heap.Class
import Baum.Heap.Op
import Baum.Heap.Inter
import Baum.Heap.Generate
import qualified Tree
-- import qualified Baum.Binary as B
-- import qualified Baum.ZweiDrei as Z
import Challenger.Partial
import Autolib.Reporter
import Autolib.ToDoc
import Autolib.Dot
import Autolib.Reader
import Inter.Types
import Inter.Quiz
import Data.Typeable
newtype T t = T t deriving (Typeable)
instance Show t => Show (T t) where
show (T t) = show t
instance Read t => Read (T t) where
readsPrec d s = [(T t, s') | (t, s') <- readsPrec d s]
instance OrderScore (T t) where
scoringOrder _ = None
instance ( Tag t baum a ) =>
Measure (T t) ( Instanz baum a ) [ Op a ] where
measure t inst ops = fromIntegral $ length ops
instance ( Tag t baum a ) =>
Partial (T t) ( Instanz baum a ) [ Op a ] where
report _ ( start, plan, end ) = do
inform $ text "Auf den Baum:"
peng start
inform $ vcat
[ text "sollen diese Operationen angewendet werden"
, text "(wobei Sie Any geeignet ersetzen sollen):"
, nest 4 $ toDoc plan
, text "so daß dieser Baum entsteht:"
]
peng end
initial _ ( start, plan, end ) =
plan
total _ ( start, plan, end ) ops = do
inform $ text "Beginne mit"
peng start
c <- steps start plan ops
inform $ text "Stimmt überein mit Aufgabenstellung?"
peng end
assert ( c `equal` end) $ Autolib.ToDoc.empty
instance Tag t baum a
=> Generator (T t) ( Config a ) ( Instanz baum a ) where
generator t conf key = Baum.Heap.Generate.generate conf
instance Project (T t) ( Instanz baum a ) ( Instanz baum a ) where
project t i = i
baum_type :: Tag t baum Int => t -> baum Int
baum_type = undefined
make_fixed :: ( Tag t baum Int ) => t -> Make
make_fixed t =
let start = ( flip insert 4 $ flip insert 3
$ Baum.Heap.Class.empty )
`asTypeOf` baum_type t
end = deleteMin $ flip insert 2
$ start
in direct ( T t ) ( start
, [ Any, Any ] :: [ Op Int ]
, end
)
make_quiz :: ( Tag t baum Int ) => t -> Make
make_quiz t =
quiz (T t) Baum.Heap.Config.example
|
Erdwolf/autotool-bonn
|
src/Baum/Heap/Central.hs
|
gpl-2.0
| 2,593
| 22
| 13
| 745
| 811
| 433
| 378
| 72
| 1
|
{-|
A 'Ledger' is derived from a 'Journal' by applying a filter specification
to select 'Transaction's and 'Posting's of interest. It contains the
filtered journal and knows the resulting chart of accounts, account
balances, and postings in each account.
-}
{-# LANGUAGE OverloadedStrings #-}
module Hledger.Data.Ledger (
nullledger
,ledgerFromJournal
,ledgerAccountNames
,ledgerAccount
,ledgerRootAccount
,ledgerTopAccounts
,ledgerLeafAccounts
,ledgerPostings
,ledgerDateSpan
,ledgerCommodities
,tests_Ledger
)
where
import qualified Data.Map as M
import Safe (headDef)
import Text.Printf
import Test.Tasty (testGroup)
import Test.Tasty.HUnit ((@?=), testCase)
import Hledger.Data.Types
import Hledger.Data.Account
import Hledger.Data.Journal
import Hledger.Query
instance Show Ledger where
show l = printf "Ledger with %d transactions, %d accounts\n" --"%s"
(length (jtxns $ ljournal l) +
length (jtxnmodifiers $ ljournal l) +
length (jperiodictxns $ ljournal l))
(length $ ledgerAccountNames l)
-- (showtree $ ledgerAccountNameTree l)
nullledger :: Ledger
nullledger = Ledger {
ljournal = nulljournal,
laccounts = []
}
-- | Filter a journal's transactions with the given query, then build
-- a "Ledger", containing the journal plus the tree of all its
-- accounts with their subaccount-inclusive and subaccount-exclusive
-- balances. If the query includes a depth limit, the ledger's journal
-- will be depth limited, but the ledger's account tree will not.
ledgerFromJournal :: Query -> Journal -> Ledger
ledgerFromJournal q j = nullledger{ljournal=j'', laccounts=as}
where
(q',depthq) = (filterQuery (not . queryIsDepth) q, filterQuery queryIsDepth q)
j' = filterJournalAmounts (filterQuery queryIsSym q) $ -- remove amount parts which the query's sym: terms would exclude
filterJournalPostings q' j
as = accountsFromPostings $ journalPostings j'
j'' = filterJournalPostings depthq j'
-- | List a ledger's account names.
ledgerAccountNames :: Ledger -> [AccountName]
ledgerAccountNames = drop 1 . map aname . laccounts
-- | Get the named account from a ledger.
ledgerAccount :: Ledger -> AccountName -> Maybe Account
ledgerAccount l a = lookupAccount a $ laccounts l
-- | Get this ledger's root account, which is a dummy "root" account
-- above all others. This should always be first in the account list,
-- if somehow not this returns a null account.
ledgerRootAccount :: Ledger -> Account
ledgerRootAccount = headDef nullacct . laccounts
-- | List a ledger's top-level accounts (the ones below the root), in tree order.
ledgerTopAccounts :: Ledger -> [Account]
ledgerTopAccounts = asubs . head . laccounts
-- | List a ledger's bottom-level (subaccount-less) accounts, in tree order.
ledgerLeafAccounts :: Ledger -> [Account]
ledgerLeafAccounts = filter (null.asubs) . laccounts
-- | List a ledger's postings, in the order parsed.
ledgerPostings :: Ledger -> [Posting]
ledgerPostings = journalPostings . ljournal
-- | The (fully specified) date span containing all the ledger's (filtered) transactions,
-- or DateSpan Nothing Nothing if there are none.
ledgerDateSpan :: Ledger -> DateSpan
ledgerDateSpan = journalDateSpanBothDates . ljournal
-- | All commodities used in this ledger.
ledgerCommodities :: Ledger -> [CommoditySymbol]
ledgerCommodities = M.keys . jinferredcommodities . ljournal
-- tests
tests_Ledger =
testGroup "Ledger" [
testCase "ledgerFromJournal" $ do
length (ledgerPostings $ ledgerFromJournal Any nulljournal) @?= 0
length (ledgerPostings $ ledgerFromJournal Any samplejournal) @?= 13
length (ledgerPostings $ ledgerFromJournal (Depth 2) samplejournal) @?= 7
]
|
simonmichael/hledger
|
hledger-lib/Hledger/Data/Ledger.hs
|
gpl-3.0
| 3,775
| 0
| 16
| 685
| 664
| 370
| 294
| 61
| 1
|
{-# LANGUAGE RecordWildCards #-}
module Graphics.GL.Pal.Geometries.Triangle where
import Graphics.GL
import Graphics.GL.Pal.Types
import Graphics.GL.Pal.Geometry
import Linear hiding (trace)
import Control.Lens hiding (indices)
import Data.Foldable
import Control.Monad.Trans
triangleData :: V3 GLfloat -> V3 GLfloat -> V3 GLfloat -> V3 GLfloat -> GeometryData
triangleData p1 p2 p3 n1 = GeometryData{..}
where
p = 1/sqrt 2
-- The base Array of vertices
vertList = [ p1
, p2
, p3
]
faceList = [ V3 1 2 0 ]
gdNumVerts = fromIntegral (length vertList)
gdPositions = makeTrianglePositions size vertList
gdUVs = [V2 0 0, V2 0.5 1, V2 1 0]
gdIndices = concatMap toList faceList
gdNormals = replicate (length vertList) n1
gdTangents = replicate (length vertList) 0
triangleGeometry :: MonadIO m => GLfloat -> m Geometry
triangleGeometry size = geometryFromData $ triangleData size
|
lukexi/gl-pal
|
src/Graphics/GL/Pal/Geometries/Triangle.hs
|
bsd-3-clause
| 1,028
| 0
| 9
| 282
| 285
| 155
| 130
| 24
| 1
|
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
-------------------------------------------------------------------------------------
-- |
-- Copyright : (c) Hans Hoglund 2012-2014
--
-- License : BSD-style
--
-- Maintainer : hans@hanshoglund.se
-- Stability : experimental
-- Portability : non-portable (TF,GNTD)
--
-- Provides special barlines as meta-data.
--
-- (Ordinary barlines are generated automatically, see also "Music.Score.Meta.Time").
--
-------------------------------------------------------------------------------------
module Music.Score.Meta.Barline (
-- * Barline type
BarlineType(..),
Barline,
-- ** Adding barlines to scores
barline,
doubleBarline,
finalBarline,
barlineDuring,
-- ** Extracting barlines
withBarline,
) where
import Control.Lens (view)
import Control.Monad.Plus
import Data.Foldable (Foldable)
import qualified Data.Foldable as F
import qualified Data.List as List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Semigroup
import Data.Set (Set)
import qualified Data.Set as Set
import Data.String
import Data.Traversable (Traversable)
import qualified Data.Traversable as T
import Data.Typeable
import Music.Pitch.Literal
import Music.Score.Meta
import Music.Score.Part
import Music.Score.Pitch
import Music.Score.Internal.Util
import Music.Time
import Music.Time.Reactive
-- | Represents a barline.
--
-- TODO repeats
data Barline = Barline BarlineType
deriving (Eq, Ord, Show, Typeable)
data BarlineType = StandardBarline | DoubleBarline | FinalBarline
deriving (Eq, Ord, Show, Typeable)
-- | Add a barline over the whole score.
barline :: (HasMeta a, HasPosition a) => Barline -> a -> a
barline c x = barlineDuring (_era x) c x
-- | Add a barline over the whole score.
doubleBarline :: (HasMeta a, HasPosition a) => Barline -> a -> a
doubleBarline = undefined
-- | Add a barline over the whole score.
finalBarline :: (HasMeta a, HasPosition a) => Barline -> a -> a
finalBarline = undefined
-- | Add a barline to the given score.
barlineDuring :: HasMeta a => Span -> Barline -> a -> a
barlineDuring s c = addMetaNote $ view event (s, (Option $ Just $ Last c))
-- | Extract barlines in from the given score, using the given default barline.
withBarline :: (Barline -> Score a -> Score a) -> Score a -> Score a
withBarline f = withMeta (maybe id f . fmap getLast . getOption)
|
music-suite/music-score
|
src/Music/Score/Meta/Barline.hs
|
bsd-3-clause
| 3,212
| 0
| 10
| 890
| 537
| 324
| 213
| 55
| 1
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Register
-- Copyright : Isaac Jones 2003-2004
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This module deals with registering and unregistering packages. There are a
-- couple ways it can do this, one is to do it directly. Another is to generate
-- a script that can be run later to do it. The idea here being that the user
-- is shielded from the details of what command to use for package registration
-- for a particular compiler. In practice this aspect was not especially
-- popular so we also provide a way to simply generate the package registration
-- file which then must be manually passed to @ghc-pkg@. It is possible to
-- generate registration information for where the package is to be installed,
-- or alternatively to register the package in place in the build tree. The
-- latter is occasionally handy, and will become more important when we try to
-- build multi-package systems.
--
-- This module does not delegate anything to the per-compiler modules but just
-- mixes it all in in this module, which is rather unsatisfactory. The script
-- generation and the unregister feature are not well used or tested.
module Distribution.Simple.Register (
register,
unregister,
initPackageDB,
doesPackageDBExist,
createPackageDB,
deletePackageDB,
invokeHcPkg,
registerPackage,
generateRegistrationInfo,
inplaceInstalledPackageInfo,
absoluteInstalledPackageInfo,
generalInstalledPackageInfo,
) where
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.BuildPaths
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.GHCJS as GHCJS
import qualified Distribution.Simple.LHC as LHC
import qualified Distribution.Simple.UHC as UHC
import qualified Distribution.Simple.HaskellSuite as HaskellSuite
import Distribution.Simple.Compiler
import Distribution.Simple.Program
import Distribution.Simple.Program.Script
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import Distribution.Simple.Setup
import Distribution.PackageDescription
import Distribution.Package
import qualified Distribution.InstalledPackageInfo as IPI
import Distribution.InstalledPackageInfo (InstalledPackageInfo)
import Distribution.Simple.Utils
import Distribution.System
import Distribution.Text
import Distribution.Verbosity as Verbosity
import System.FilePath ((</>), (<.>), isAbsolute)
import System.Directory
( getCurrentDirectory, removeDirectoryRecursive, removeFile
, doesDirectoryExist, doesFileExist )
import Data.Version
import Control.Monad (when)
import Data.Maybe
( isJust, fromMaybe, maybeToList )
import Data.List
( partition, nub )
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
-- -----------------------------------------------------------------------------
-- Registration
register :: PackageDescription -> LocalBuildInfo
-> RegisterFlags -- ^Install in the user's database?; verbose
-> IO ()
register pkg@PackageDescription { library = Just lib } lbi regFlags
= do
let clbi = getComponentLocalBuildInfo lbi CLibName
absPackageDBs <- absolutePackageDBPaths packageDbs
installedPkgInfo <- generateRegistrationInfo
verbosity pkg lib lbi clbi inplace reloc distPref
(registrationPackageDB absPackageDBs)
when (fromFlag (regPrintId regFlags)) $ do
putStrLn (display (IPI.installedUnitId installedPkgInfo))
-- Three different modes:
case () of
_ | modeGenerateRegFile -> writeRegistrationFile installedPkgInfo
| modeGenerateRegScript -> writeRegisterScript installedPkgInfo
| otherwise -> do
setupMessage verbosity "Registering" (packageId pkg)
registerPackage verbosity (compiler lbi) (withPrograms lbi) False
packageDbs installedPkgInfo
where
modeGenerateRegFile = isJust (flagToMaybe (regGenPkgConf regFlags))
regFile = fromMaybe (display (packageId pkg) <.> "conf")
(fromFlag (regGenPkgConf regFlags))
modeGenerateRegScript = fromFlag (regGenScript regFlags)
inplace = fromFlag (regInPlace regFlags)
reloc = relocatable lbi
-- FIXME: there's really no guarantee this will work.
-- registering into a totally different db stack can
-- fail if dependencies cannot be satisfied.
packageDbs = nub $ withPackageDB lbi
++ maybeToList (flagToMaybe (regPackageDB regFlags))
distPref = fromFlag (regDistPref regFlags)
verbosity = fromFlag (regVerbosity regFlags)
writeRegistrationFile installedPkgInfo = do
notice verbosity ("Creating package registration file: " ++ regFile)
writeUTF8File regFile (IPI.showInstalledPackageInfo installedPkgInfo)
writeRegisterScript installedPkgInfo =
case compilerFlavor (compiler lbi) of
JHC -> notice verbosity "Registration scripts not needed for jhc"
UHC -> notice verbosity "Registration scripts not needed for uhc"
_ -> withHcPkg
"Registration scripts are not implemented for this compiler"
(compiler lbi) (withPrograms lbi)
(writeHcPkgRegisterScript verbosity installedPkgInfo packageDbs)
register _ _ regFlags = notice verbosity "No package to register"
where
verbosity = fromFlag (regVerbosity regFlags)
generateRegistrationInfo :: Verbosity
-> PackageDescription
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Bool
-> Bool
-> FilePath
-> PackageDB
-> IO InstalledPackageInfo
generateRegistrationInfo verbosity pkg lib lbi clbi inplace reloc distPref packageDb = do
--TODO: eliminate pwd!
pwd <- getCurrentDirectory
--TODO: the method of setting the UnitId is compiler specific
-- this aspect should be delegated to a per-compiler helper.
let comp = compiler lbi
abi_hash <-
case compilerFlavor comp of
GHC | compilerVersion comp >= Version [6,11] [] -> do
fmap AbiHash $ GHC.libAbiHash verbosity pkg lbi lib clbi
GHCJS -> do
fmap AbiHash $ GHCJS.libAbiHash verbosity pkg lbi lib clbi
_ -> return (AbiHash "")
installedPkgInfo <-
if inplace
then return (inplaceInstalledPackageInfo pwd distPref
pkg abi_hash lib lbi clbi)
else if reloc
then relocRegistrationInfo verbosity
pkg lib lbi clbi abi_hash packageDb
else return (absoluteInstalledPackageInfo
pkg abi_hash lib lbi clbi)
return installedPkgInfo{ IPI.abiHash = abi_hash }
relocRegistrationInfo :: Verbosity
-> PackageDescription
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> AbiHash
-> PackageDB
-> IO InstalledPackageInfo
relocRegistrationInfo verbosity pkg lib lbi clbi abi_hash packageDb =
case (compilerFlavor (compiler lbi)) of
GHC -> do fs <- GHC.pkgRoot verbosity lbi packageDb
return (relocatableInstalledPackageInfo
pkg abi_hash lib lbi clbi fs)
_ -> die "Distribution.Simple.Register.relocRegistrationInfo: \
\not implemented for this compiler"
initPackageDB :: Verbosity -> Compiler -> ProgramConfiguration -> FilePath -> IO ()
initPackageDB verbosity comp progdb dbPath =
createPackageDB verbosity comp progdb False dbPath
-- | Create an empty package DB at the specified location.
createPackageDB :: Verbosity -> Compiler -> ProgramConfiguration -> Bool
-> FilePath -> IO ()
createPackageDB verbosity comp progdb preferCompat dbPath =
case compilerFlavor comp of
GHC -> HcPkg.init (GHC.hcPkgInfo progdb) verbosity preferCompat dbPath
GHCJS -> HcPkg.init (GHCJS.hcPkgInfo progdb) verbosity False dbPath
LHC -> HcPkg.init (LHC.hcPkgInfo progdb) verbosity False dbPath
UHC -> return ()
HaskellSuite _ -> HaskellSuite.initPackageDB verbosity progdb dbPath
_ -> die $ "Distribution.Simple.Register.createPackageDB: "
++ "not implemented for this compiler"
doesPackageDBExist :: FilePath -> IO Bool
doesPackageDBExist dbPath = do
-- currently one impl for all compiler flavours, but could change if needed
dir_exists <- doesDirectoryExist dbPath
if dir_exists
then return True
else doesFileExist dbPath
deletePackageDB :: FilePath -> IO ()
deletePackageDB dbPath = do
-- currently one impl for all compiler flavours, but could change if needed
dir_exists <- doesDirectoryExist dbPath
if dir_exists
then removeDirectoryRecursive dbPath
else do file_exists <- doesFileExist dbPath
when file_exists $ removeFile dbPath
-- | Run @hc-pkg@ using a given package DB stack, directly forwarding the
-- provided command-line arguments to it.
invokeHcPkg :: Verbosity -> Compiler -> ProgramConfiguration -> PackageDBStack
-> [String] -> IO ()
invokeHcPkg verbosity comp conf dbStack extraArgs =
withHcPkg "invokeHcPkg" comp conf
(\hpi -> HcPkg.invoke hpi verbosity dbStack extraArgs)
withHcPkg :: String -> Compiler -> ProgramConfiguration
-> (HcPkg.HcPkgInfo -> IO a) -> IO a
withHcPkg name comp conf f =
case compilerFlavor comp of
GHC -> f (GHC.hcPkgInfo conf)
GHCJS -> f (GHCJS.hcPkgInfo conf)
LHC -> f (LHC.hcPkgInfo conf)
_ -> die ("Distribution.Simple.Register." ++ name ++ ":\
\not implemented for this compiler")
registerPackage :: Verbosity
-> Compiler
-> ProgramConfiguration
-> Bool
-> PackageDBStack
-> InstalledPackageInfo
-> IO ()
registerPackage verbosity comp progdb multiInstance packageDbs installedPkgInfo =
case compilerFlavor comp of
GHC -> GHC.registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo
GHCJS -> GHCJS.registerPackage verbosity progdb multiInstance packageDbs installedPkgInfo
_ | multiInstance
-> die "Registering multiple package instances is not yet supported for this compiler"
LHC -> LHC.registerPackage verbosity progdb packageDbs installedPkgInfo
UHC -> UHC.registerPackage verbosity comp progdb packageDbs installedPkgInfo
JHC -> notice verbosity "Registering for jhc (nothing to do)"
HaskellSuite {} ->
HaskellSuite.registerPackage verbosity progdb packageDbs installedPkgInfo
_ -> die "Registering is not implemented for this compiler"
writeHcPkgRegisterScript :: Verbosity
-> InstalledPackageInfo
-> PackageDBStack
-> HcPkg.HcPkgInfo
-> IO ()
writeHcPkgRegisterScript verbosity installedPkgInfo packageDbs hpi = do
let invocation = HcPkg.reregisterInvocation hpi Verbosity.normal
packageDbs (Right installedPkgInfo)
regScript = invocationAsSystemScript buildOS invocation
notice verbosity ("Creating package registration script: " ++ regScriptFileName)
writeUTF8File regScriptFileName regScript
setFileExecutable regScriptFileName
regScriptFileName :: FilePath
regScriptFileName = case buildOS of
Windows -> "register.bat"
_ -> "register.sh"
-- -----------------------------------------------------------------------------
-- Making the InstalledPackageInfo
-- | Construct 'InstalledPackageInfo' for a library in a package, given a set
-- of installation directories.
--
generalInstalledPackageInfo
:: ([FilePath] -> [FilePath]) -- ^ Translate relative include dir paths to
-- absolute paths.
-> PackageDescription
-> AbiHash
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> InstallDirs FilePath
-> InstalledPackageInfo
generalInstalledPackageInfo adjustRelIncDirs pkg abi_hash lib lbi clbi installDirs =
IPI.InstalledPackageInfo {
IPI.sourcePackageId = packageId pkg,
IPI.installedUnitId = componentUnitId clbi,
IPI.compatPackageKey = componentCompatPackageKey clbi,
IPI.license = license pkg,
IPI.copyright = copyright pkg,
IPI.maintainer = maintainer pkg,
IPI.author = author pkg,
IPI.stability = stability pkg,
IPI.homepage = homepage pkg,
IPI.pkgUrl = pkgUrl pkg,
IPI.synopsis = synopsis pkg,
IPI.description = description pkg,
IPI.category = category pkg,
IPI.abiHash = abi_hash,
IPI.exposed = libExposed lib,
IPI.exposedModules = componentExposedModules clbi,
IPI.hiddenModules = otherModules bi,
IPI.trusted = IPI.trusted IPI.emptyInstalledPackageInfo,
IPI.importDirs = [ libdir installDirs | hasModules ],
-- Note. the libsubdir and datasubdir templates have already been expanded
-- into libdir and datadir.
IPI.libraryDirs = if hasLibrary
then libdir installDirs : extraLibDirs bi
else extraLibDirs bi,
IPI.dataDir = datadir installDirs,
IPI.hsLibraries = if hasLibrary
then [getHSLibraryName (componentUnitId clbi)]
else [],
IPI.extraLibraries = extraLibs bi,
IPI.extraGHCiLibraries = extraGHCiLibs bi,
IPI.includeDirs = absinc ++ adjustRelIncDirs relinc,
IPI.includes = includes bi,
IPI.depends = map fst (componentPackageDeps clbi),
IPI.ccOptions = [], -- Note. NOT ccOptions bi!
-- We don't want cc-options to be propagated
-- to C compilations in other packages.
IPI.ldOptions = ldOptions bi,
IPI.frameworkDirs = [],
IPI.frameworks = frameworks bi,
IPI.haddockInterfaces = [haddockdir installDirs </> haddockName pkg],
IPI.haddockHTMLs = [htmldir installDirs],
IPI.pkgRoot = Nothing
}
where
bi = libBuildInfo lib
(absinc, relinc) = partition isAbsolute (includeDirs bi)
hasModules = not $ null (libModules lib)
hasLibrary = hasModules || not (null (cSources bi))
|| (not (null (jsSources bi)) &&
compilerFlavor (compiler lbi) == GHCJS)
-- | Construct 'InstalledPackageInfo' for a library that is in place in the
-- build tree.
--
-- This function knows about the layout of in place packages.
--
inplaceInstalledPackageInfo :: FilePath -- ^ top of the build tree
-> FilePath -- ^ location of the dist tree
-> PackageDescription
-> AbiHash
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> InstalledPackageInfo
inplaceInstalledPackageInfo inplaceDir distPref pkg abi_hash lib lbi clbi =
generalInstalledPackageInfo adjustRelativeIncludeDirs
pkg abi_hash lib lbi clbi installDirs
where
adjustRelativeIncludeDirs = map (inplaceDir </>)
libTargetDir
| componentUnitId clbi == localUnitId lbi = buildDir lbi
| otherwise = buildDir lbi </> display (componentUnitId clbi)
installDirs =
(absoluteInstallDirs pkg lbi NoCopyDest) {
libdir = inplaceDir </> libTargetDir,
datadir = inplaceDir </> dataDir pkg,
docdir = inplaceDocdir,
htmldir = inplaceHtmldir,
haddockdir = inplaceHtmldir
}
inplaceDocdir = inplaceDir </> distPref </> "doc"
inplaceHtmldir = inplaceDocdir </> "html" </> display (packageName pkg)
-- | Construct 'InstalledPackageInfo' for the final install location of a
-- library package.
--
-- This function knows about the layout of installed packages.
--
absoluteInstalledPackageInfo :: PackageDescription
-> AbiHash
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> InstalledPackageInfo
absoluteInstalledPackageInfo pkg abi_hash lib lbi clbi =
generalInstalledPackageInfo adjustReativeIncludeDirs
pkg abi_hash lib lbi clbi installDirs
where
-- For installed packages we install all include files into one dir,
-- whereas in the build tree they may live in multiple local dirs.
adjustReativeIncludeDirs _
| null (installIncludes bi) = []
| otherwise = [includedir installDirs]
bi = libBuildInfo lib
installDirs = absoluteInstallDirs pkg lbi NoCopyDest
relocatableInstalledPackageInfo :: PackageDescription
-> AbiHash
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> FilePath
-> InstalledPackageInfo
relocatableInstalledPackageInfo pkg abi_hash lib lbi clbi pkgroot =
generalInstalledPackageInfo adjustReativeIncludeDirs
pkg abi_hash lib lbi clbi installDirs
where
-- For installed packages we install all include files into one dir,
-- whereas in the build tree they may live in multiple local dirs.
adjustReativeIncludeDirs _
| null (installIncludes bi) = []
| otherwise = [includedir installDirs]
bi = libBuildInfo lib
installDirs = fmap (("${pkgroot}" </>) . shortRelativePath pkgroot)
$ absoluteInstallDirs pkg lbi NoCopyDest
-- -----------------------------------------------------------------------------
-- Unregistration
unregister :: PackageDescription -> LocalBuildInfo -> RegisterFlags -> IO ()
unregister pkg lbi regFlags = do
let pkgid = packageId pkg
genScript = fromFlag (regGenScript regFlags)
verbosity = fromFlag (regVerbosity regFlags)
packageDb = fromFlagOrDefault (registrationPackageDB (withPackageDB lbi))
(regPackageDB regFlags)
unreg hpi =
let invocation = HcPkg.unregisterInvocation
hpi Verbosity.normal packageDb pkgid
in if genScript
then writeFileAtomic unregScriptFileName
(BS.Char8.pack $ invocationAsSystemScript buildOS invocation)
else runProgramInvocation verbosity invocation
setupMessage verbosity "Unregistering" pkgid
withHcPkg "unregistering is only implemented for GHC and GHCJS"
(compiler lbi) (withPrograms lbi) unreg
unregScriptFileName :: FilePath
unregScriptFileName = case buildOS of
Windows -> "unregister.bat"
_ -> "unregister.sh"
|
edsko/cabal
|
Cabal/src/Distribution/Simple/Register.hs
|
bsd-3-clause
| 19,706
| 0
| 16
| 5,650
| 3,523
| 1,826
| 1,697
| 335
| 8
|
{-# LANGUAGE CPP #-}
{- |
Module : $Header$
Description : CMDL interface commands
Copyright : uni-bremen and DFKI
License : GPLv2 or higher, see LICENSE.txt
Maintainer : r.pascanu@jacobs-university.de
Stability : provisional
Portability : portable
CMDL.ProveCommands contains all commands (except prove\/consistency check)
related to prove mode
-}
module CMDL.ProveCommands
( cTranslate
, cDropTranslations
, cGoalsAxmGeneral
, cDoLoop
, cProve
, cDisprove
, cProveAll
, cSetUseThms
, cSetSave2File
, cEndScript
, cStartScript
, cTimeLimit
, cNotACommand
, cShowOutput
) where
import CMDL.DataTypes (CmdlState (intState), CmdlGoalAxiom (..),
CmdlListAction (..), ProveCmdType (..))
import CMDL.DataTypesUtils (add2hist, genMsgAndCode, genMessage,
genAddMessage, getIdComorphism)
import CMDL.DgCommands (selectANode)
import CMDL.ProveConsistency (doLoop, sigIntHandler)
import CMDL.Utils (checkIntString)
import Common.Result (Result (Result))
import Common.Utils (trim, splitOn)
import Data.List (find, nub)
import Data.Maybe (fromMaybe)
import Comorphisms.LogicGraph (lookupComorphism_in_LG)
import Proofs.AbstractState
import Logic.Comorphism (compComorphism, AnyComorphism (..))
import Logic.Logic (language_name)
import Control.Concurrent (forkIO)
import Control.Concurrent.MVar (newEmptyMVar, newMVar, takeMVar)
import Control.Monad (foldM)
#ifdef UNIX
import System.Posix.Signals (Handler (Catch), installHandler, sigINT)
#endif
import Interfaces.GenericATPState (ATPTacticScript (tsTimeLimit, tsExtraOpts))
import Interfaces.DataTypes (ListChange (..), IntIState (..), Int_NodeInfo (..),
UndoRedoElem (..), IntState (i_state))
-- | Drops any seleceted comorphism
cDropTranslations :: CmdlState -> IO CmdlState
cDropTranslations state =
case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just pS ->
case cComorphism pS of
Nothing -> return state
Just _ -> return $
add2hist [CComorphismChange $ cComorphism pS] $
state {
intState = (intState state ) {
i_state = Just $ pS {
cComorphism = getIdComorphism $ elements pS } }
}
-- | select comorphisms
cTranslate :: String -> CmdlState -> IO CmdlState
cTranslate input state' =
foldM (\ state c -> case i_state $ intState state of
-- nothing selected !
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just pS -> case lookupComorphism_in_LG c of
Result m Nothing -> return $ genMsgAndCode (show m) 1 state
Result _ (Just cm) ->
case cComorphism pS of
{- when selecting some theory the Id comorphism is automatically
generated -}
Nothing -> return $ genMsgAndCode "No theory selected" 1 state
Just ocm ->
case compComorphism ocm cm of
Result _ Nothing ->
return $ genMsgAndCode "Can not add comorphism" 1 state
Result _ (Just smth) ->
return $ genAddMessage [] ("Adding comorphism " ++
(\ (Comorphism c') -> language_name c') cm)
$ add2hist [CComorphismChange $ cComorphism pS] $
state {
intState = (intState state) {
i_state = Just pS {
cComorphism = Just smth } }
}) (genMessage "" "" state')
(concatMap (splitOn ';') $
concatMap (splitOn ':') $
words $ trim input)
parseElements :: CmdlListAction -> [String] -> CmdlGoalAxiom
-> [Int_NodeInfo]
-> ([Int_NodeInfo], [ListChange])
-> ([Int_NodeInfo], [ListChange])
parseElements action gls gls_axm elems (acc1, acc2)
= case elems of
[] -> (acc1, acc2)
Element st nb : ll ->
let allgls = case gls_axm of
ChangeGoals -> map fst $ getGoals st
ChangeAxioms -> map fst $ getAxioms st
selgls = case gls_axm of
ChangeGoals -> selectedGoals st
ChangeAxioms -> includedAxioms st
fn' x y = x == y
fn ks x = case find (fn' x) ks of
Just _ ->
case action of
ActionDel -> False
_ -> True
Nothing ->
case action of
ActionDel -> True
_ -> False
gls' = case action of
ActionDelAll -> []
ActionDel -> filter (fn selgls) gls
ActionSetAll -> allgls
ActionSet -> filter (fn allgls) gls
ActionAdd -> nub $ selgls ++ filter (fn allgls) gls
nwelm = case gls_axm of
ChangeGoals -> Element (st {selectedGoals = gls'}) nb
ChangeAxioms -> Element (st {includedAxioms = gls'}) nb
hchg = case gls_axm of
ChangeGoals -> GoalsChange (selectedGoals st) nb
ChangeAxioms -> AxiomsChange (includedAxioms st) nb
in parseElements action gls gls_axm ll (nwelm : acc1, hchg : acc2)
{- | A general function that implements the actions of setting,
adding or deleting goals or axioms from the selection list -}
cGoalsAxmGeneral :: CmdlListAction -> CmdlGoalAxiom ->
String -> CmdlState
-> IO CmdlState
cGoalsAxmGeneral action gls_axm input state
= case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just pS ->
case elements pS of
[] -> return $ genMsgAndCode "Nothing selected" 1 state
ls ->
do
let gls = words input
let (ls', hst) = parseElements action gls
gls_axm
ls ([], [])
return $ add2hist [ListChange hst] $
state {
intState = (intState state) {
i_state = Just pS {
elements = ls'
}
}
}
cDoLoop ::
ProveCmdType
-> CmdlState
-> IO CmdlState
cDoLoop proveCmdType state
= case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just pS ->
case elements pS of
[] -> return $ genMsgAndCode "Nothing selected" 1 state
ls ->
do
-- create initial mVars to comunicate
miSt <- newMVar $ intState state
mSt <- newMVar Nothing
mThr <- newMVar Nothing
mW <- newEmptyMVar
-- fork
thrID <- forkIO (doLoop miSt mThr mSt mW ls proveCmdType)
-- install the handler that waits for SIG_INT
#ifdef UNIX
oldHandler <- installHandler sigINT (Catch $
sigIntHandler mThr miSt mSt thrID mW (i_ln pS)
) Nothing
#endif
-- block and wait for answers
answ <- takeMVar mW
#ifdef UNIX
installHandler sigINT oldHandler Nothing
#endif
let nwpS = fromMaybe pS (i_state answ)
nwls = concatMap (\ (Element _ x) -> selectANode x nwpS) ls
hst = concatMap (\ (Element stt x) ->
[ AxiomsChange (includedAxioms stt) x
, GoalsChange (selectedGoals stt) x ]) ls
return $ add2hist [ListChange hst] $
state { intState = answ {
i_state = Just $ nwpS { elements = nwls }}}
{- | Proves only selected goals from all nodes using selected
axioms -}
cProve :: CmdlState -> IO CmdlState
cProve = cDoLoop Prove
cDisprove :: CmdlState -> IO CmdlState
cDisprove = cDoLoop Disprove
{- | Proves all goals in the nodes selected using all axioms and
theorems -}
cProveAll :: CmdlState -> IO CmdlState
cProveAll state
= case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just pS ->
case elements pS of
[] -> return $ genMsgAndCode "Nothing selected" 1 state
ls ->
let ls' = map (\ (Element st nb) ->
Element (resetSelection st) nb) ls
nwSt = add2hist [ListChange [NodesChange $ elements pS]] $
state {
intState = (intState state) {
i_state = Just $ pS { elements = ls' } } }
in cProve nwSt
-- | Sets the use theorems flag of the interface
cSetUseThms :: Bool -> CmdlState -> IO CmdlState
cSetUseThms val state
= case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just pS ->
return $ add2hist [UseThmChange $ useTheorems pS] $
state {
intState = (intState state) {
i_state = Just pS {
useTheorems = val } } }
cShowOutput :: Bool -> CmdlState -> IO CmdlState
cShowOutput b state =
case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just pS -> return $ add2hist [ChShowOutput $ showOutput pS] $
state {
intState = (intState state) {
i_state = Just pS {
showOutput = b } } }
-- | Sets the save2File value to either true or false
cSetSave2File :: Bool -> CmdlState -> IO CmdlState
cSetSave2File val state
= case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just ps ->
return $ add2hist [Save2FileChange $ save2file ps] $
state {
intState = (intState state) {
i_state = Just ps { save2file = val } } }
{- | The function is called everytime when the input could
not be parsed as a command -}
cNotACommand :: String -> CmdlState -> IO CmdlState
cNotACommand input state
= case input of
-- if input line is empty do nothing
[] -> return state
-- anything else see if it is in a blocl of command
s ->
case i_state $ intState state of
Nothing -> return $ genMsgAndCode ("Error on input line :" ++ s) 1 state
Just pS ->
if loadScript pS
then
do
let olds = script pS
oldextOpts = tsExtraOpts olds
let nwSt = state {
intState = (intState state) {
i_state = Just pS {
script = olds { tsExtraOpts = s : oldextOpts }
} } }
return $ add2hist [ScriptChange $ script pS] nwSt
else return $ genMsgAndCode ("Error on input line :" ++ s) 1 state
-- | Function to signal the interface that the script has ended
cEndScript :: CmdlState -> IO CmdlState
cEndScript state
= case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just ps ->
if loadScript ps
then
do
let nwSt = state {
intState = (intState state) {
i_state = Just ps {
loadScript = False } } }
return $ add2hist [LoadScriptChange $ loadScript ps] nwSt
else return $ genMsgAndCode "No previous call of begin-script" 1 state
{- | Function to signal the interface that a scrips starts so it should
not try to parse the input -}
cStartScript :: CmdlState -> IO CmdlState
cStartScript state
= case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just ps ->
return $ add2hist [LoadScriptChange $ loadScript ps] $
state {
intState = (intState state) {
i_state = Just ps {
loadScript = True } } }
-- sets a time limit
cTimeLimit :: String -> CmdlState -> IO CmdlState
cTimeLimit input state
= case i_state $ intState state of
Nothing -> return $ genMsgAndCode "Nothing selected" 1 state
Just ps ->
if checkIntString $ trim input
then
do
let inpVal = read $ trim input
let oldS = script ps
return $ add2hist [ScriptChange $ script ps] $
state {
intState = (intState state) {
i_state = Just ps {
script = oldS { tsTimeLimit = inpVal } } } }
else return $ genMsgAndCode "Please insert a number of seconds" 1 state
|
keithodulaigh/Hets
|
CMDL/ProveCommands.hs
|
gpl-2.0
| 12,902
| 0
| 30
| 4,702
| 3,170
| 1,638
| 1,532
| 261
| 13
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK show-extensions #-}
{-# LANGUAGE MultiWayIf #-}
-- |
-- Module : Yi.Buffer.HighLevel
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- High level operations on buffers.
module Yi.Buffer.HighLevel
( atEof
, atEol
, atLastLine
, atSol
, atSof
, bdeleteB
, bdeleteLineB
, bkillWordB
, botB
, bufInfoB
, BufferFileInfo (..)
, capitaliseWordB
, deleteBlankLinesB
, deleteHorizontalSpaceB
, deleteRegionWithStyleB
, deleteToEol
, deleteTrailingSpaceB
, downFromTosB
, downScreenB
, downScreensB
, exchangePointAndMarkB
, fillParagraph
, findMatchingPairB
, firstNonSpaceB
, flipRectangleB
, getBookmarkB
, getLineAndCol
, getLineAndColOfPoint
, getNextLineB
, getNextNonBlankLineB
, getRawestSelectRegionB
, getSelectionMarkPointB
, getSelectRegionB
, gotoCharacterB
, hasWhiteSpaceBefore
, incrementNextNumberByB
, insertRopeWithStyleB
, isCurrentLineAllWhiteSpaceB
, isCurrentLineEmptyB
, isNumberB
, killWordB
, lastNonSpaceB
, leftEdgesOfRegionB
, leftOnEol
, lineMoveVisRel
, linePrefixSelectionB
, lineStreamB
, lowercaseWordB
, middleB
, modifyExtendedSelectionB
, moveNonspaceOrSol
, movePercentageFileB
, moveToMTB
, moveToEol
, moveToSol
, moveXorEol
, moveXorSol
, nextCExc
, nextCInc
, nextCInLineExc
, nextCInLineInc
, nextNParagraphs
, nextWordB
, prevCExc
, prevCInc
, prevCInLineExc
, prevCInLineInc
, prevNParagraphs
, prevWordB
, readCurrentWordB
, readLnB
, readPrevWordB
, readRegionRopeWithStyleB
, replaceBufferContent
, revertB
, rightEdgesOfRegionB
, scrollB
, scrollCursorToBottomB
, scrollCursorToTopB
, scrollScreensB
, scrollToCursorB
, scrollToLineAboveWindowB
, scrollToLineBelowWindowB
, setSelectionMarkPointB
, setSelectRegionB
, shapeOfBlockRegionB
, sortLines
, sortLinesWithRegion
, snapInsB
, snapScreenB
, splitBlockRegionToContiguousSubRegionsB
, swapB
, switchCaseChar
, test3CharB
, testHexB
, toggleCommentB
, topB
, unLineCommentSelectionB
, upFromBosB
, uppercaseWordB
, upScreenB
, upScreensB
, vimScrollB
, vimScrollByB
, markWord
) where
import Control.Applicative (Applicative ((<*>)), (<$>))
import Control.Lens (assign, over, use, (%=), (.=))
import Control.Lens.Cons (_last)
import Control.Monad (forM, forM_, liftM, replicateM_, unless, void, when)
import Control.Monad.RWS.Strict (ask)
import Control.Monad.State (gets)
import Data.Char (isDigit, isHexDigit, isOctDigit, isSpace, isUpper, toLower, toUpper)
import Data.List (intersperse, sort)
import Data.Maybe (catMaybes, fromMaybe, listToMaybe)
import Data.Monoid (Monoid (mempty), (<>))
import qualified Data.Text as T (Text, toLower, toUpper, unpack)
import Data.Time (UTCTime)
import Data.Tuple (swap)
import Numeric (readHex, readOct, showHex, showOct)
import Yi.Buffer.Basic (Direction (..), Mark, Point (..), Size (Size))
import Yi.Buffer.Misc
import Yi.Buffer.Normal
import Yi.Buffer.Region
import Yi.Config.Misc (ScrollStyle (SingleLine))
import Yi.Rope (YiString)
import qualified Yi.Rope as R
import Yi.String (capitalizeFirst, fillText, isBlank, mapLines, onLines, overInit)
import Yi.Utils (SemiNum ((+~), (-~)))
import Yi.Window (Window (actualLines, width, wkey))
-- ---------------------------------------------------------------------
-- Movement operations
-- | Move point between the middle, top and bottom of the screen
-- If the point stays at the middle, it'll be gone to the top
-- else if the point stays at the top, it'll be gone to the bottom
-- else it'll be gone to the middle
moveToMTB :: BufferM ()
moveToMTB = (==) <$> curLn <*> screenMidLn >>= \case
True -> downFromTosB 0
_ -> (==) <$> curLn <*> screenTopLn >>= \case
True -> upFromBosB 0
_ -> downFromTosB =<< (-) <$> screenMidLn <*> screenTopLn
-- | Move point to start of line
moveToSol :: BufferM ()
moveToSol = maybeMoveB Line Backward
-- | Move point to end of line
moveToEol :: BufferM ()
moveToEol = maybeMoveB Line Forward
-- | Move cursor to origin
topB :: BufferM ()
topB = moveTo 0
-- | Move cursor to end of buffer
botB :: BufferM ()
botB = moveTo =<< sizeB
-- | Move left if on eol, but not on blank line
leftOnEol :: BufferM ()
-- @savingPrefCol@ is needed, because deep down @leftB@ contains @forgetPrefCol@
-- which messes up vertical cursor motion in Vim normal mode
leftOnEol = savingPrefCol $ do
eol <- atEol
sol <- atSol
when (eol && not sol) leftB
-- | Move @x@ chars back, or to the sol, whichever is less
moveXorSol :: Int -> BufferM ()
moveXorSol x = replicateM_ x $ do c <- atSol; unless c leftB
-- | Move @x@ chars forward, or to the eol, whichever is less
moveXorEol :: Int -> BufferM ()
moveXorEol x = replicateM_ x $ do c <- atEol; unless c rightB
-- | Move to first char of next word forwards
nextWordB :: BufferM ()
nextWordB = moveB unitWord Forward
-- | Move to first char of next word backwards
prevWordB :: BufferM ()
prevWordB = moveB unitWord Backward
-- * Char-based movement actions.
gotoCharacterB :: Char -> Direction -> RegionStyle -> Bool -> BufferM ()
gotoCharacterB c dir style stopAtLineBreaks = do
start <- pointB
let predicate = if stopAtLineBreaks then (`elem` [c, '\n']) else (== c)
(move, moveBack) = if dir == Forward then (rightB, leftB) else (leftB, rightB)
doUntilB_ (predicate <$> readB) move
b <- readB
if stopAtLineBreaks && b == '\n'
then moveTo start
else when (style == Exclusive && b == c) moveBack
-- | Move to the next occurence of @c@
nextCInc :: Char -> BufferM ()
nextCInc c = gotoCharacterB c Forward Inclusive False
nextCInLineInc :: Char -> BufferM ()
nextCInLineInc c = gotoCharacterB c Forward Inclusive True
-- | Move to the character before the next occurence of @c@
nextCExc :: Char -> BufferM ()
nextCExc c = gotoCharacterB c Forward Exclusive False
nextCInLineExc :: Char -> BufferM ()
nextCInLineExc c = gotoCharacterB c Forward Exclusive True
-- | Move to the previous occurence of @c@
prevCInc :: Char -> BufferM ()
prevCInc c = gotoCharacterB c Backward Inclusive False
prevCInLineInc :: Char -> BufferM ()
prevCInLineInc c = gotoCharacterB c Backward Inclusive True
-- | Move to the character after the previous occurence of @c@
prevCExc :: Char -> BufferM ()
prevCExc c = gotoCharacterB c Backward Exclusive False
prevCInLineExc :: Char -> BufferM ()
prevCInLineExc c = gotoCharacterB c Backward Exclusive True
-- | Move to first non-space character in this line
firstNonSpaceB :: BufferM ()
firstNonSpaceB = do
moveToSol
untilB_ ((||) <$> atEol <*> ((not . isSpace) <$> readB)) rightB
-- | Move to the last non-space character in this line
lastNonSpaceB :: BufferM ()
lastNonSpaceB = do
moveToEol
untilB_ ((||) <$> atSol <*> ((not . isSpace) <$> readB)) leftB
-- | Go to the first non space character in the line;
-- if already there, then go to the beginning of the line.
moveNonspaceOrSol :: BufferM ()
moveNonspaceOrSol = do
prev <- readPreviousOfLnB
if R.all isSpace prev then moveToSol else firstNonSpaceB
-- | True if current line consists of just a newline (no whitespace)
isCurrentLineEmptyB :: BufferM Bool
isCurrentLineEmptyB = savingPointB $ moveToSol >> atEol
-- | Note: Returns False if line doesn't have any characters besides a newline
isCurrentLineAllWhiteSpaceB :: BufferM Bool
isCurrentLineAllWhiteSpaceB = savingPointB $ do
isEmpty <- isCurrentLineEmptyB
if isEmpty
then return False
else do
let go = do
eol <- atEol
if eol
then return True
else do
c <- readB
if isSpace c
then rightB >> go
else return False
moveToSol
go
------------
-- | Move down next @n@ paragraphs
nextNParagraphs :: Int -> BufferM ()
nextNParagraphs n = replicateM_ n $ moveB unitEmacsParagraph Forward
-- | Move up prev @n@ paragraphs
prevNParagraphs :: Int -> BufferM ()
prevNParagraphs n = replicateM_ n $ moveB unitEmacsParagraph Backward
-- ! Examples:
-- @goUnmatchedB Backward '(' ')'@
-- Move to the previous unmatched '('
-- @goUnmatchedB Forward '{' '}'@
-- Move to the next unmatched '}'
goUnmatchedB :: Direction -> Char -> Char -> BufferM ()
goUnmatchedB dir cStart' cStop' = getLineAndCol >>= \position ->
stepB >> readB >>= go position (0::Int)
where
go pos opened c
| c == cStop && opened == 0 = return ()
| c == cStop = goIfNotEofSof pos (opened-1)
| c == cStart = goIfNotEofSof pos (opened+1)
| otherwise = goIfNotEofSof pos opened
goIfNotEofSof pos opened = atEof >>= \eof -> atSof >>= \sof ->
if not eof && not sof
then stepB >> readB >>= go pos opened
else gotoLn (fst pos) >> moveToColB (snd pos)
(stepB, cStart, cStop) | dir == Forward = (rightB, cStart', cStop')
| otherwise = (leftB, cStop', cStart')
-----------------------------------------------------------------------
-- Queries
-- | Return true if the current point is the start of a line
atSol :: BufferM Bool
atSol = atBoundaryB Line Backward
-- | Return true if the current point is the end of a line
atEol :: BufferM Bool
atEol = atBoundaryB Line Forward
-- | True if point at start of file
atSof :: BufferM Bool
atSof = atBoundaryB Document Backward
-- | True if point at end of file
atEof :: BufferM Bool
atEof = atBoundaryB Document Forward
-- | True if point at the last line
atLastLine :: BufferM Bool
atLastLine = savingPointB $ do
moveToEol
(==) <$> sizeB <*> pointB
-- | Get the current line and column number
getLineAndCol :: BufferM (Int, Int)
getLineAndCol = (,) <$> curLn <*> curCol
getLineAndColOfPoint :: Point -> BufferM (Int, Int)
getLineAndColOfPoint p = savingPointB $ moveTo p >> getLineAndCol
-- | Read the line the point is on
readLnB :: BufferM YiString
readLnB = readUnitB Line
-- | Read from point to beginning of line
readPreviousOfLnB :: BufferM YiString
readPreviousOfLnB = readRegionB =<< regionOfPartB Line Backward
hasWhiteSpaceBefore :: BufferM Bool
hasWhiteSpaceBefore = liftM isSpace (prevPointB >>= readAtB)
-- | Get the previous point, unless at the beginning of the file
prevPointB :: BufferM Point
prevPointB = do
sof <- atSof
if sof then pointB
else do p <- pointB
return $ Point (fromPoint p - 1)
-- | Reads in word at point.
readCurrentWordB :: BufferM YiString
readCurrentWordB = readUnitB unitWord
-- | Reads in word before point.
readPrevWordB :: BufferM YiString
readPrevWordB = readPrevUnitB unitViWordOnLine
-------------------------
-- Deletes
-- | Delete one character backward
bdeleteB :: BufferM ()
bdeleteB = deleteB Character Backward
-- | Delete forward whitespace or non-whitespace depending on
-- the character under point.
killWordB :: BufferM ()
killWordB = deleteB unitWord Forward
-- | Delete backward whitespace or non-whitespace depending on
-- the character before point.
bkillWordB :: BufferM ()
bkillWordB = deleteB unitWord Backward
-- | Delete backward to the sof or the new line character
bdeleteLineB :: BufferM ()
bdeleteLineB = atSol >>= \sol -> if sol then bdeleteB else deleteB Line Backward
-- UnivArgument is in Yi.Keymap.Emacs.Utils but we can't import it due
-- to cyclic imports.
-- | emacs' @delete-horizontal-space@ with the optional argument.
deleteHorizontalSpaceB :: Maybe Int -> BufferM ()
deleteHorizontalSpaceB u = do
c <- curCol
reg <- regionOfB Line
text <- readRegionB reg
let (r, jb) = deleteSpaces c text
modifyRegionB (const r) reg
-- Jump backwards to where the now-deleted spaces have started so
-- it's consistent and feels natural instead of leaving us somewhere
-- in the text.
moveToColB $ c - jb
where
deleteSpaces :: Int -> R.YiString -> (R.YiString, Int)
deleteSpaces c l =
let (f, b) = R.splitAt c l
f' = R.dropWhileEnd isSpace f
cleaned = f' <> case u of
Nothing -> R.dropWhile isSpace b
Just _ -> b
-- We only want to jump back the number of spaces before the
-- point, not the total number of characters we're removing.
in (cleaned, R.length f - R.length f')
----------------------------------------
-- Transform operations
-- | capitalise the word under the cursor
uppercaseWordB :: BufferM ()
uppercaseWordB = transformB (R.withText T.toUpper) unitWord Forward
-- | lowerise word under the cursor
lowercaseWordB :: BufferM ()
lowercaseWordB = transformB (R.withText T.toLower) unitWord Forward
-- | capitalise the first letter of this word
capitaliseWordB :: BufferM ()
capitaliseWordB = transformB capitalizeFirst unitWord Forward
switchCaseChar :: Char -> Char
switchCaseChar c = if isUpper c then toLower c else toUpper c
-- | Delete to the end of line, excluding it.
deleteToEol :: BufferM ()
deleteToEol = deleteRegionB =<< regionOfPartB Line Forward
-- | Transpose two characters, (the Emacs C-t action)
swapB :: BufferM ()
swapB = do eol <- atEol
when eol leftB
transposeB Character Forward
-- | Delete trailing whitespace from all lines. Uses 'savingPositionB'
-- to get back to where it was.
deleteTrailingSpaceB :: BufferM ()
deleteTrailingSpaceB =
regionOfB Document >>=
savingPositionB . modifyRegionB (tru . mapLines stripEnd)
where
-- Strips the space from the end of each line, preserving
-- newlines.
stripEnd :: R.YiString -> R.YiString
stripEnd x = case R.last x of
Nothing -> x
Just '\n' -> (`R.snoc` '\n') $ R.dropWhileEnd isSpace x
_ -> R.dropWhileEnd isSpace x
-- | Cut off trailing newlines, making sure to preserve one.
tru :: R.YiString -> R.YiString
tru x = if R.length x == 0
then x
else (`R.snoc` '\n') $ R.dropWhileEnd (== '\n') x
-- ----------------------------------------------------
-- | Marks
-- | Set the current buffer selection mark
setSelectionMarkPointB :: Point -> BufferM ()
setSelectionMarkPointB p = (.= p) . markPointA =<< selMark <$> askMarks
-- | Get the current buffer selection mark
getSelectionMarkPointB :: BufferM Point
getSelectionMarkPointB = use . markPointA =<< selMark <$> askMarks
-- | Exchange point & mark.
exchangePointAndMarkB :: BufferM ()
exchangePointAndMarkB = do m <- getSelectionMarkPointB
p <- pointB
setSelectionMarkPointB p
moveTo m
getBookmarkB :: String -> BufferM Mark
getBookmarkB = getMarkB . Just
-- ---------------------------------------------------------------------
-- Buffer operations
data BufferFileInfo =
BufferFileInfo { bufInfoFileName :: FilePath
, bufInfoSize :: Int
, bufInfoLineNo :: Int
, bufInfoColNo :: Int
, bufInfoCharNo :: Point
, bufInfoPercent :: T.Text
, bufInfoModified :: Bool
}
-- | File info, size in chars, line no, col num, char num, percent
bufInfoB :: BufferM BufferFileInfo
bufInfoB = do
s <- sizeB
p <- pointB
m <- gets isUnchangedBuffer
l <- curLn
c <- curCol
nm <- gets identString
let bufInfo = BufferFileInfo { bufInfoFileName = T.unpack nm
, bufInfoSize = fromIntegral s
, bufInfoLineNo = l
, bufInfoColNo = c
, bufInfoCharNo = p
, bufInfoPercent = getPercent p s
, bufInfoModified = not m
}
return bufInfo
-----------------------------
-- Window-related operations
upScreensB :: Int -> BufferM ()
upScreensB = scrollScreensB . negate
downScreensB :: Int -> BufferM ()
downScreensB = scrollScreensB
-- | Scroll up 1 screen
upScreenB :: BufferM ()
upScreenB = scrollScreensB (-1)
-- | Scroll down 1 screen
downScreenB :: BufferM ()
downScreenB = scrollScreensB 1
-- | Scroll by n screens (negative for up)
scrollScreensB :: Int -> BufferM ()
scrollScreensB n = do
h <- askWindow actualLines
scrollB $ n * max 0 (h - 1) -- subtract some amount to get some overlap (emacs-like).
-- | Same as scrollB, but also moves the cursor
vimScrollB :: Int -> BufferM ()
vimScrollB n = do scrollB n
void $ lineMoveRel n
-- | Same as scrollByB, but also moves the cursor
vimScrollByB :: (Int -> Int) -> Int -> BufferM ()
vimScrollByB f n = do h <- askWindow actualLines
vimScrollB $ n * f h
-- | Move to middle line in screen
scrollToCursorB :: BufferM ()
scrollToCursorB = do
MarkSet f i _ <- markLines
h <- askWindow actualLines
let m = f + (h `div` 2)
scrollB $ i - m
-- | Move cursor to the top of the screen
scrollCursorToTopB :: BufferM ()
scrollCursorToTopB = do
MarkSet f i _ <- markLines
scrollB $ i - f
-- | Move cursor to the bottom of the screen
scrollCursorToBottomB :: BufferM ()
scrollCursorToBottomB = do
MarkSet _ i _ <- markLines
r <- winRegionB
t <- lineOf (regionEnd r - 1)
scrollB $ i - t
-- | Scroll by n lines.
scrollB :: Int -> BufferM ()
scrollB n = do
MarkSet fr _ _ <- askMarks
savingPointB $ do
moveTo =<< use (markPointA fr)
void $ gotoLnFrom n
(markPointA fr .=) =<< pointB
w <- askWindow wkey
(%=) pointFollowsWindowA (\old w' -> ((w == w') || old w'))
-- Scroll line above window to the bottom.
scrollToLineAboveWindowB :: BufferM ()
scrollToLineAboveWindowB = do downFromTosB 0
replicateM_ 1 lineUp
scrollCursorToBottomB
-- Scroll line below window to the top.
scrollToLineBelowWindowB :: BufferM ()
scrollToLineBelowWindowB = do upFromBosB 0
replicateM_ 1 lineDown
scrollCursorToTopB
-- | Move the point to inside the viewable region
snapInsB :: BufferM ()
snapInsB = do
movePoint <- use pointFollowsWindowA
w <- askWindow wkey
when (movePoint w) $ do
r <- winRegionB
p <- pointB
moveTo $ max (regionStart r) $ min (regionEnd r) p
-- | return index of Sol on line @n@ above current line
indexOfSolAbove :: Int -> BufferM Point
indexOfSolAbove n = pointAt $ gotoLnFrom (negate n)
data RelPosition = Above | Below | Within
deriving (Show)
-- | return relative position of the point @p@
-- relative to the region defined by the points @rs@ and @re@
pointScreenRelPosition :: Point -> Point -> Point -> RelPosition
pointScreenRelPosition p rs re
| rs > p && p > re = Within
| p < rs = Above
| p > re = Below
pointScreenRelPosition _ _ _ = Within -- just to disable the non-exhaustive pattern match warning
-- | Move the visible region to include the point
snapScreenB :: Maybe ScrollStyle ->BufferM Bool
snapScreenB style = do
movePoint <- use pointFollowsWindowA
w <- askWindow wkey
if movePoint w then return False else do
inWin <- pointInWindowB =<< pointB
if inWin then return False else do
h <- askWindow actualLines
r <- winRegionB
p <- pointB
let gap = case style of
Just SingleLine -> case pointScreenRelPosition p (regionStart r) (regionEnd r) of
Above -> 0
Below -> h - 1
Within -> 0 -- Impossible but handle it anyway
_ -> h `div` 2
i <- indexOfSolAbove gap
f <- fromMark <$> askMarks
markPointA f .= i
return True
-- | Move to @n@ lines down from top of screen
downFromTosB :: Int -> BufferM ()
downFromTosB n = do
moveTo =<< use . markPointA =<< fromMark <$> askMarks
replicateM_ n lineDown
-- | Move to @n@ lines up from the bottom of the screen
upFromBosB :: Int -> BufferM ()
upFromBosB n = do
r <- winRegionB
moveTo (regionEnd r - 1)
moveToSol
replicateM_ n lineUp
-- | Move to middle line in screen
middleB :: BufferM ()
middleB = do
w <- ask
f <- fromMark <$> askMarks
moveTo =<< use (markPointA f)
replicateM_ (actualLines w `div` 2) lineDown
pointInWindowB :: Point -> BufferM Bool
pointInWindowB p = nearRegion p <$> winRegionB
-----------------------------
-- Region-related operations
-- | Return the region between point and mark
getRawestSelectRegionB :: BufferM Region
getRawestSelectRegionB = do
m <- getSelectionMarkPointB
p <- pointB
return $ mkRegion p m
-- | Return the empty region if the selection is not visible.
getRawSelectRegionB :: BufferM Region
getRawSelectRegionB = do
s <- use highlightSelectionA
if s then getRawestSelectRegionB else do
p <- pointB
return $ mkRegion p p
-- | Get the current region boundaries. Extended to the current selection unit.
getSelectRegionB :: BufferM Region
getSelectRegionB = do
regionStyle <- getRegionStyle
r <- getRawSelectRegionB
convertRegionToStyleB r regionStyle
-- | Select the given region: set the selection mark at the 'regionStart'
-- and the current point at the 'regionEnd'.
setSelectRegionB :: Region -> BufferM ()
setSelectRegionB region = do
assign highlightSelectionA True
setSelectionMarkPointB $ regionStart region
moveTo $ regionEnd region
------------------------------------------
-- Some line related movements/operations
deleteBlankLinesB :: BufferM ()
deleteBlankLinesB = do
isThisBlank <- isBlank <$> readLnB
when isThisBlank $ do
p <- pointB
-- go up to the 1st blank line in the group
void $ whileB (R.null <$> getNextLineB Backward) lineUp
q <- pointB
-- delete the whole blank region.
deleteRegionB $ mkRegion p q
-- | Get a (lazy) stream of lines in the buffer, starting at the /next/ line
-- in the given direction.
lineStreamB :: Direction -> BufferM [YiString]
lineStreamB dir = fmap rev . R.lines <$> (streamB dir =<< pointB)
where
rev = case dir of
Forward -> id
Backward -> R.reverse
-- | Get the next line of text in the given direction. This returns
-- simply 'Nothing' if there no such line.
getMaybeNextLineB :: Direction -> BufferM (Maybe YiString)
getMaybeNextLineB dir = listToMaybe <$> lineStreamB dir
-- | The same as 'getMaybeNextLineB' but avoids the use of the 'Maybe'
-- type in the return by returning the empty string if there is no
-- next line.
getNextLineB :: Direction -> BufferM YiString
getNextLineB dir = fromMaybe R.empty <$> getMaybeNextLineB dir
-- | Get closest line to the current line (not including the current
-- line) in the given direction which satisfies the given condition.
-- Returns 'Nothing' if there is no line which satisfies the
-- condition.
getNextLineWhichB :: Direction -> (YiString -> Bool) -> BufferM (Maybe YiString)
getNextLineWhichB dir cond = listToMaybe . filter cond <$> lineStreamB dir
-- | Returns the closest line to the current line which is non-blank,
-- in the given direction. Returns the empty string if there is no
-- such line (for example if we are on the top line already).
getNextNonBlankLineB :: Direction -> BufferM YiString
getNextNonBlankLineB dir =
fromMaybe R.empty <$> getNextLineWhichB dir (not . R.null)
------------------------------------------------
-- Some more utility functions involving
-- regions (generally that which is selected)
modifyExtendedSelectionB :: TextUnit -> (R.YiString -> R.YiString) -> BufferM ()
modifyExtendedSelectionB unit transform
= modifyRegionB transform =<< unitWiseRegion unit =<< getSelectRegionB
-- | Prefix each line in the selection using the given string.
linePrefixSelectionB :: R.YiString -- ^ The string that starts a line comment
-> BufferM ()
linePrefixSelectionB s =
modifyExtendedSelectionB Line . overInit $ mapLines (s <>)
-- | Uncomments the selection using the given line comment
-- starting string. This only works for the comments which
-- begin at the start of the line.
unLineCommentSelectionB :: R.YiString -- ^ The string which begins a
-- line comment
-> R.YiString -- ^ A potentially shorter
-- string that begins a comment
-> BufferM ()
unLineCommentSelectionB s1 s2 =
modifyExtendedSelectionB Line $ mapLines unCommentLine
where
(l1, l2) = (R.length s1, R.length s2)
unCommentLine :: R.YiString -> R.YiString
unCommentLine line = case (R.splitAt l1 line, R.splitAt l2 line) of
((f, s) , (f', s')) | s1 == f -> s
| s2 == f' -> s'
| otherwise -> line
-- | Just like 'toggleCommentSelectionB' but automatically inserts a
-- whitespace suffix to the inserted comment string. In fact:
toggleCommentB :: R.YiString -> BufferM ()
toggleCommentB c = toggleCommentSelectionB (c `R.snoc` ' ') c
-- | Toggle line comments in the selection by adding or removing a
-- prefix to each line.
toggleCommentSelectionB :: R.YiString -> R.YiString -> BufferM ()
toggleCommentSelectionB insPrefix delPrefix = do
l <- readUnitB Line
if delPrefix == R.take (R.length delPrefix) l
then unLineCommentSelectionB insPrefix delPrefix
else linePrefixSelectionB insPrefix
-- | Replace the contents of the buffer with some string
replaceBufferContent :: YiString -> BufferM ()
replaceBufferContent newvalue = do
r <- regionOfB Document
replaceRegionB r newvalue
-- | Fill the text in the region so it fits nicely 80 columns.
fillRegion :: Region -> BufferM ()
fillRegion = modifyRegionB (R.unlines . fillText 80)
fillParagraph :: BufferM ()
fillParagraph = fillRegion =<< regionOfB unitParagraph
-- | Sort the lines of the region.
sortLines :: BufferM ()
sortLines = modifyExtendedSelectionB Line (onLines sort)
-- | Forces an extra newline into the region (if one exists)
modifyExtendedLRegion :: Region -> (R.YiString -> R.YiString) -> BufferM ()
modifyExtendedLRegion region transform = do
reg <- unitWiseRegion Line region
modifyRegionB transform (fixR reg)
where fixR reg = mkRegion (regionStart reg) $ regionEnd reg + 1
sortLinesWithRegion :: Region -> BufferM ()
sortLinesWithRegion region = modifyExtendedLRegion region (onLines sort')
where sort' [] = []
sort' lns =
if hasnl (last lns)
then sort lns
else over _last
-- should be completely safe since every element contains newline
(fromMaybe (error "sortLinesWithRegion fromMaybe") . R.init) . sort $
over _last (`R.snoc` '\n') lns
hasnl t | R.last t == Just '\n' = True
| otherwise = False
-- | Helper function: revert the buffer contents to its on-disk version
revertB :: YiString -> Maybe R.ConverterName -> UTCTime -> BufferM ()
revertB s cn now = do
r <- regionOfB Document
replaceRegionB r s
encodingConverterNameA .= cn
markSavedB now
-- get lengths of parts covered by block region
--
-- Consider block region starting at 'o' and ending at 'z':
--
-- start
-- |
-- \|/
-- def foo(bar):
-- baz
--
-- ab
-- xyz0
-- /|\
-- |
-- finish
--
-- shapeOfBlockRegionB returns (regionStart, [2, 2, 0, 1, 2])
-- TODO: accept stickToEol flag
shapeOfBlockRegionB :: Region -> BufferM (Point, [Int])
shapeOfBlockRegionB reg = savingPointB $ do
(l0, c0) <- getLineAndColOfPoint $ regionStart reg
(l1, c1) <- getLineAndColOfPoint $ regionEnd reg
let (left, top, bottom, right) = (min c0 c1, min l0 l1, max l0 l1, max c0 c1)
lengths <- forM [top .. bottom] $ \l -> do
void $ gotoLn l
moveToColB left
currentLeft <- curCol
if currentLeft /= left
then return 0
else do
moveToColB right
rightAtEol <- atEol
leftOnEol
currentRight <- curCol
return $ if currentRight == 0 && rightAtEol
then 0
else currentRight - currentLeft + 1
startingPoint <- pointOfLineColB top left
return (startingPoint, lengths)
leftEdgesOfRegionB :: RegionStyle -> Region -> BufferM [Point]
leftEdgesOfRegionB Block reg = savingPointB $ do
(l0, _) <- getLineAndColOfPoint $ regionStart reg
(l1, _) <- getLineAndColOfPoint $ regionEnd reg
moveTo $ regionStart reg
fmap catMaybes $ forM [0 .. abs (l0 - l1)] $ \i -> savingPointB $ do
void $ lineMoveRel i
p <- pointB
eol <- atEol
return (if not eol then Just p else Nothing)
leftEdgesOfRegionB LineWise reg = savingPointB $ do
lastSol <- do
moveTo $ regionEnd reg
moveToSol
pointB
let go acc p = do moveTo p
moveToSol
edge <- pointB
if edge >= lastSol
then return $ reverse (edge:acc)
else do
void $ lineMoveRel 1
go (edge:acc) =<< pointB
go [] (regionStart reg)
leftEdgesOfRegionB _ r = return [regionStart r]
rightEdgesOfRegionB :: RegionStyle -> Region -> BufferM [Point]
rightEdgesOfRegionB Block reg = savingPointB $ do
(l0, _) <- getLineAndColOfPoint $ regionStart reg
(l1, _) <- getLineAndColOfPoint $ regionEnd reg
moveTo $ 1 + regionEnd reg
fmap reverse $ forM [0 .. abs (l0 - l1)] $ \i -> savingPointB $ do
void $ lineMoveRel $ -i
pointB
rightEdgesOfRegionB LineWise reg = savingPointB $ do
lastEol <- do
moveTo $ regionEnd reg
moveToEol
pointB
let go acc p = do moveTo p
moveToEol
edge <- pointB
if edge >= lastEol
then return $ reverse (edge:acc)
else do
void $ lineMoveRel 1
go (edge:acc) =<< pointB
go [] (regionStart reg)
rightEdgesOfRegionB _ reg = savingPointB $ do
moveTo $ regionEnd reg
leftOnEol
fmap return pointB
splitBlockRegionToContiguousSubRegionsB :: Region -> BufferM [Region]
splitBlockRegionToContiguousSubRegionsB reg = savingPointB $ do
(start, lengths) <- shapeOfBlockRegionB reg
moveTo start
forM lengths $ \l -> do
p0 <- pointB
moveXorEol l
p1 <- pointB
let subRegion = mkRegion p0 p1
moveTo p0
void $ lineMoveRel 1
return subRegion
deleteRegionWithStyleB :: Region -> RegionStyle -> BufferM Point
deleteRegionWithStyleB reg Block = savingPointB $ do
(start, lengths) <- shapeOfBlockRegionB reg
moveTo start
forM_ (zip [1..] lengths) $ \(i, l) -> do
deleteN l
moveTo start
lineMoveRel i
return start
deleteRegionWithStyleB reg style = savingPointB $ do
effectiveRegion <- convertRegionToStyleB reg style
deleteRegionB effectiveRegion
return $! regionStart effectiveRegion
readRegionRopeWithStyleB :: Region -> RegionStyle -> BufferM YiString
readRegionRopeWithStyleB reg Block = savingPointB $ do
(start, lengths) <- shapeOfBlockRegionB reg
moveTo start
chunks <- forM lengths $ \l ->
if l == 0
then lineMoveRel 1 >> return mempty
else do
p <- pointB
r <- readRegionB $ mkRegion p (p +~ Size l)
void $ lineMoveRel 1
return r
return $ R.intersperse '\n' chunks
readRegionRopeWithStyleB reg style = readRegionB =<< convertRegionToStyleB reg style
insertRopeWithStyleB :: YiString -> RegionStyle -> BufferM ()
insertRopeWithStyleB rope Block = savingPointB $ do
let ls = R.lines rope
advanceLine = atLastLine >>= \case
False -> void $ lineMoveRel 1
True -> do
col <- curCol
moveToEol
newlineB
insertN $ R.replicateChar col ' '
sequence_ $ intersperse advanceLine $ fmap (savingPointB . insertN) ls
insertRopeWithStyleB rope LineWise = do
moveToSol
savingPointB $ insertN rope
insertRopeWithStyleB rope _ = insertN rope
-- consider the following buffer content
--
-- 123456789
-- qwertyuio
-- asdfgh
--
-- The following examples use characters from that buffer as points.
-- h' denotes the newline after h
--
-- 1 r -> 4 q
-- 9 q -> 1 o
-- q h -> y a
-- a o -> h' q
-- o a -> q h'
-- 1 a -> 1 a
--
-- property: fmap swap (flipRectangleB a b) = flipRectangleB b a
flipRectangleB :: Point -> Point -> BufferM (Point, Point)
flipRectangleB p0 p1 = savingPointB $ do
(_, c0) <- getLineAndColOfPoint p0
(_, c1) <- getLineAndColOfPoint p1
case compare c0 c1 of
EQ -> return (p0, p1)
GT -> swap <$> flipRectangleB p1 p0
LT -> do
-- now we know that c0 < c1
moveTo p0
moveXorEol $ c1 - c0
flippedP0 <- pointB
return (flippedP0, p1 -~ Size (c1 - c0))
movePercentageFileB :: Int -> BufferM ()
movePercentageFileB i = do
let f :: Double
f = case fromIntegral i / 100.0 of
x | x > 1.0 -> 1.0
| x < 0.0 -> 0.0 -- Impossible?
| otherwise -> x
lineCount <- lineCountB
void $ gotoLn $ floor (fromIntegral lineCount * f)
firstNonSpaceB
findMatchingPairB :: BufferM ()
findMatchingPairB = do
let go dir a b = goUnmatchedB dir a b >> return True
goToMatch = do
c <- readB
case c of '(' -> go Forward '(' ')'
')' -> go Backward '(' ')'
'{' -> go Forward '{' '}'
'}' -> go Backward '{' '}'
'[' -> go Forward '[' ']'
']' -> go Backward '[' ']'
_ -> otherChar
otherChar = do eof <- atEof
eol <- atEol
if eof || eol
then return False
else rightB >> goToMatch
p <- pointB
foundMatch <- goToMatch
unless foundMatch $ moveTo p
-- Vim numbers
-- | Increase (or decrease if negative) next number on line by n.
incrementNextNumberByB :: Int -> BufferM ()
incrementNextNumberByB n = do
start <- pointB
untilB_ (not <$> isNumberB) $ moveXorSol 1
untilB_ isNumberB $ moveXorEol 1
begin <- pointB
beginIsEol <- atEol
untilB_ (not <$> isNumberB) $ moveXorEol 1
end <- pointB
if beginIsEol then moveTo start
else do modifyRegionB (increment n) (mkRegion begin end)
moveXorSol 1
-- | Increment number in string by n.
increment :: Int -> R.YiString -> R.YiString
increment n l = R.fromString $ go (R.toString l)
where
go ('0':'x':xs) = (\ys -> '0':'x':ys) . (`showHex` "") . (+ n) . fst . head . readHex $ xs
go ('0':'o':xs) = (\ys -> '0':'o':ys) . (`showOct` "") . (+ n) . fst . head . readOct $ xs
go s = show . (+ n) . (\x -> read x :: Int) $ s
-- | Is character under cursor a number.
isNumberB :: BufferM Bool
isNumberB = do
eol <- atEol
sol <- atSol
if sol then isDigit <$> readB
else if eol then return False
else test3CharB
-- | Used by isNumber to test if current character under cursor is a number.
test3CharB :: BufferM Bool
test3CharB = do
moveXorSol 1
previous <- readB
moveXorEol 2
next <- readB
moveXorSol 1
current <- readB
if | previous == '0' && current == 'o' && isOctDigit next -> return True -- octal format
| previous == '0' && current == 'x' && isHexDigit next -> return True -- hex format
| current == '-' && isDigit next -> return True -- negative numbers
| isDigit current -> return True -- all decimal digits
| isHexDigit current -> testHexB -- ['a'..'f'] for hex
| otherwise -> return False
-- | Characters ['a'..'f'] are part of a hex number only if preceded by 0x.
-- Test if the current occurence of ['a'..'f'] is part of a hex number.
testHexB :: BufferM Bool
testHexB = savingPointB $ do
untilB_ (not . isHexDigit <$> readB) (moveXorSol 1)
leftChar <- readB
moveXorSol 1
leftToLeftChar <- readB
if leftChar == 'x' && leftToLeftChar == '0'
then return True
else return False
-- | Move point down by @n@ lines
-- If line extends past width of window, count moving
-- a single line as moving width points to the right.
lineMoveVisRel :: Int -> BufferM ()
lineMoveVisRel = movingToPrefVisCol . lineMoveVisRelUp
lineMoveVisRelUp :: Int -> BufferM ()
lineMoveVisRelUp 0 = return ()
lineMoveVisRelUp n | n < 0 = lineMoveVisRelDown $ negate n
| otherwise = do
wid <- width <$> use lastActiveWindowA
col <- curCol
len <- pointB >>= eolPointB >>= colOf
let jumps = (len `div` wid) - (col `div` wid)
next = n - jumps
if next <= 0
then moveXorEol (n * wid)
else do moveXorEol (jumps * wid)
void $ gotoLnFrom 1
lineMoveVisRelUp $ next - 1
lineMoveVisRelDown :: Int -> BufferM ()
lineMoveVisRelDown 0 = return ()
lineMoveVisRelDown n | n < 0 = lineMoveVisRelUp $ negate n
| otherwise = do
wid <- width <$> use lastActiveWindowA
col <- curCol
let jumps = col `div` wid
next = n - jumps
if next <= 0
then leftN (n * wid)
else do leftN (jumps * wid)
void $ gotoLnFrom $ -1
moveToEol
lineMoveVisRelDown $ next - 1
-- | Implements the same logic that emacs' `mark-word` does.
-- Checks the mark point and moves it forth (or backward) for one word.
markWord :: BufferM ()
markWord = do
curPos <- pointB
curMark <- getSelectionMarkPointB
isVisible <- getVisibleSelection
savingPointB $ do
if not isVisible
then nextWordB
else do
moveTo curMark
if curMark < curPos
then prevWordB
else nextWordB
setVisibleSelection True
pointB >>= setSelectionMarkPointB
|
TOSPIO/yi
|
src/library/Yi/Buffer/HighLevel.hs
|
gpl-2.0
| 39,291
| 0
| 22
| 11,049
| 9,468
| 4,825
| 4,643
| 822
| 8
|
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad
import qualified Data.ByteString.Char8 as B8
import Data.List
import Data.Maybe
import Data.Monoid
import OpenSSL
import OpenSSL.EVP.Cipher
import OpenSSL.EVP.Open
import OpenSSL.EVP.PKey
import OpenSSL.EVP.Seal
import OpenSSL.PEM
import OpenSSL.RSA
import Text.Printf
main = withOpenSSL $
do putStrLn "cipher: DES-CBC"
des <- liftM fromJust $ getCipherByName "DES-CBC"
putStrLn "generating RSA keypair..."
rsa <- generateRSAKey 512 65537 Nothing
let plainText = "Hello, world!"
B8.putStrLn ("plain text to encrypt: " `mappend` plainText)
putStrLn ""
putStrLn "encrypting..."
(encrypted, [encKey], iv) <- sealBS des [fromPublicKey rsa] plainText
B8.putStrLn ("encrypted symmetric key: " `mappend` binToHex encKey)
B8.putStrLn ("IV: " `mappend` binToHex iv)
B8.putStrLn ("encrypted message: " `mappend` binToHex encrypted)
putStrLn ""
putStrLn "decrypting..."
let decrypted = openBS des encKey iv rsa encrypted
B8.putStrLn ("decrypted message: " `mappend` decrypted)
binToHex :: B8.ByteString -> B8.ByteString
binToHex = B8.pack . intercalate ":" . map (printf "%02x" . fromEnum) . B8.unpack
|
phonohawk/HsOpenSSL
|
examples/HelloWorld.hs
|
cc0-1.0
| 1,326
| 0
| 11
| 322
| 354
| 180
| 174
| 33
| 1
|
module Main (main) where
import Test.Framework (defaultMain)
import qualified Properties
import qualified UnitTests
main :: IO ()
main = do
ioTests <- UnitTests.ioTests
defaultMain (Properties.tests : UnitTests.tests : ioTests)
|
abbradar/aeson
|
tests/Tests.hs
|
bsd-3-clause
| 238
| 0
| 11
| 39
| 72
| 40
| 32
| 8
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
module API where
import Data.Dynamic
data Null = Null { a, b :: Int }
deriving (Typeable, Show)
null :: Null
null = Null { a = 42 , b = 1 }
|
Changaco/haskell-plugins
|
testsuite/unloadAll/null/api/API.hs
|
lgpl-2.1
| 184
| 0
| 8
| 45
| 62
| 39
| 23
| 7
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Buffer.TextUnit
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- Working with blocks (units) of text.
--
module Yi.Buffer.TextUnit
( TextUnit(..)
, outsideUnit
, leftBoundaryUnit
, unitWord
, unitViWord
, unitViWORD
, unitViWordAnyBnd
, unitViWORDAnyBnd
, unitViWordOnLine
, unitViWORDOnLine
, unitDelimited
, unitSentence, unitEmacsParagraph, unitParagraph
, isAnySep, unitSep, unitSepThisLine, isWordChar
, moveB, maybeMoveB
, transformB, transposeB
, regionOfB, regionOfNonEmptyB, regionOfPartB
, regionWithTwoMovesB
, regionOfPartNonEmptyB, regionOfPartNonEmptyAtB
, readPrevUnitB, readUnitB
, untilB, doUntilB_, untilB_, whileB, doIfCharB
, atBoundaryB
, numberOfB
, deleteB, genMaybeMoveB
, genMoveB, BoundarySide(..), genAtBoundaryB
, checkPeekB
, halfUnit
, deleteUnitB
) where
import Control.Monad (void, when, (<=<))
import Data.Char (GeneralCategory (LineSeparator, ParagraphSeparator, Space),
generalCategory, isAlphaNum, isSeparator, isSpace)
import Data.Typeable (Typeable)
import Yi.Buffer.Basic (Direction (..), Point (Point), mayReverse, reverseDir)
import Yi.Buffer.Misc
import Yi.Buffer.Region
import Yi.Rope (YiString)
import qualified Yi.Rope as R (head, reverse, tail, toString)
-- | Designate a given "unit" of text.
data TextUnit = Character -- ^ a single character
| Line -- ^ a line of text (between newlines)
| VLine -- ^ a "vertical" line of text (area of text between two characters at the same column number)
| Document -- ^ the whole document
| GenUnit {genEnclosingUnit :: TextUnit,
genUnitBoundary :: Direction -> BufferM Bool}
-- there could be more text units, like Page, Searched, etc. it's probably a good
-- idea to use GenUnit though.
deriving Typeable
-- | Turns a unit into its "negative" by inverting the boundaries. For example,
-- @outsideUnit unitViWord@ will be the unit of spaces between words. For units
-- without boundaries ('Character', 'Document', ...), this is the identity
-- function.
outsideUnit :: TextUnit -> TextUnit
outsideUnit (GenUnit enclosing boundary) = GenUnit enclosing (boundary . reverseDir)
outsideUnit x = x -- for a lack of better definition
-- | Common boundary checking function: run the condition on @len@
-- characters in specified direction shifted by specified offset.
genBoundary :: Int -- ^ Offset from current position
-> Int -- ^ Look-ahead
-> (YiString -> Bool) -- ^ predicate
-> Direction -- ^ Direction to look in
-> BufferM Bool
genBoundary ofs len condition dir = condition <$> peekB
where
peekB = do
Point p' <- pointB
let pt@(Point p) = Point (p' + mayNegate ofs)
case dir of
Forward -> betweenB pt (Point $ max 0 p + len)
Backward -> R.reverse <$> betweenB (Point $ p - len) pt
mayNegate = case dir of
Forward -> id
Backward -> negate
-- | a word as in use in Emacs (fundamental mode)
unitWord :: TextUnit
unitWord =
GenUnit Document $
\direction -> checkPeekB (-1) [isWordChar, not . isWordChar] direction
-- | delimited on the left and right by given characters, boolean
-- argument tells if whether those are included.
unitDelimited :: Char -> Char -> Bool -> TextUnit
unitDelimited left right included = GenUnit Document $ \direction ->
case (included,direction) of
(False, Backward) -> do
isCursorOnLeftChar <- (== left) <$> readB
when isCursorOnLeftChar rightB
checkPeekB 0 [(== left)] Backward
(False, Forward) -> do
isCursorOnRightChar <- (== right) <$> readB
isTextUnitBlank <- checkPeekB 0 [(== left)] Backward
if isTextUnitBlank && isCursorOnRightChar
then leftB >> return True
else return isCursorOnRightChar
(True, Backward) -> checkPeekB 0 [(== left)] Forward
(True, Forward) -> rightB >> checkPeekB 0 [(== right)] Backward
isWordChar :: Char -> Bool
isWordChar x = isAlphaNum x || x == '_'
isNl :: Char -> Bool
isNl = (== '\n')
-- | Tells if a char can end a sentence ('.', '!', '?').
isEndOfSentence :: Char -> Bool
isEndOfSentence = (`elem` ".!?")
-- | Verifies that the string matches all the predicates, pairwise. If
-- the string is "too small", then return 'False'. Note the length of
-- predicates has to be finite.
checks :: [Char -> Bool] -> YiString -> Bool
checks ps' t' = go ps' (R.toString t')
where
go [] _ = True
go _ [] = False
go (p:ps) (x:xs) = p x && go ps xs
checkPeekB :: Int -> [Char -> Bool] -> Direction -> BufferM Bool
checkPeekB offset conds = genBoundary offset (length conds) (checks conds)
-- | Helper that takes first two characters of YiString. Faster than
-- take 2 and string conversion.
firstTwo :: YiString -> Maybe (Char, Char)
firstTwo t = case R.head t of
Nothing -> Nothing
Just c -> case R.tail t >>= R.head of
Nothing -> Nothing
Just c' -> Just (c, c')
atViWordBoundary :: (Char -> Int) -> Direction -> BufferM Bool
atViWordBoundary charType = genBoundary (-1) 2 $ \cs -> case firstTwo cs of
Just (c1, c2) -> isNl c1 && isNl c2 -- stop at empty lines
|| not (isSpace c1) && (charType c1 /= charType c2)
Nothing -> True
atAnyViWordBoundary :: (Char -> Int) -> Direction -> BufferM Bool
atAnyViWordBoundary charType = genBoundary (-1) 2 $ \cs -> case firstTwo cs of
Just (c1, c2) -> isNl c1 || isNl c2 || charType c1 /= charType c2
Nothing -> True
atViWordBoundaryOnLine :: (Char -> Int) -> Direction -> BufferM Bool
atViWordBoundaryOnLine charType = genBoundary (-1) 2 $ \cs -> case firstTwo cs of
Just (c1, c2)-> isNl c1 || isNl c2 || not (isSpace c1) && charType c1 /= charType c2
Nothing -> True
unitViWord :: TextUnit
unitViWord = GenUnit Document $ atViWordBoundary viWordCharType
unitViWORD :: TextUnit
unitViWORD = GenUnit Document $ atViWordBoundary viWORDCharType
unitViWordAnyBnd :: TextUnit
unitViWordAnyBnd = GenUnit Document $ atAnyViWordBoundary viWordCharType
unitViWORDAnyBnd :: TextUnit
unitViWORDAnyBnd = GenUnit Document $ atAnyViWordBoundary viWORDCharType
unitViWordOnLine :: TextUnit
unitViWordOnLine = GenUnit Document $ atViWordBoundaryOnLine viWordCharType
unitViWORDOnLine :: TextUnit
unitViWORDOnLine = GenUnit Document $ atViWordBoundaryOnLine viWORDCharType
viWordCharType :: Char -> Int
viWordCharType c | isSpace c = 1
| isWordChar c = 2
| otherwise = 3
viWORDCharType :: Char -> Int
viWORDCharType c | isSpace c = 1
| otherwise = 2
-- | Separator characters (space, tab, unicode separators). Most of
-- the units above attempt to identify "words" with various
-- punctuation and symbols included or excluded. This set of units is
-- a simple inverse: it is true for "whitespace" or "separators" and
-- false for anything that is not (letters, numbers, symbols,
-- punctuation, whatever).
isAnySep :: Char -> Bool
isAnySep c = isSeparator c || isSpace c || generalCategory c `elem` seps
where
seps = [ Space, LineSeparator, ParagraphSeparator ]
atSepBoundary :: Direction -> BufferM Bool
atSepBoundary = genBoundary (-1) 2 $ \cs -> case firstTwo cs of
Just (c1, c2) -> isNl c1 || isNl c2 || isAnySep c1 /= isAnySep c2
Nothing -> True
-- | unitSep is true for any kind of whitespace/separator
unitSep :: TextUnit
unitSep = GenUnit Document atSepBoundary
-- | unitSepThisLine is true for any kind of whitespace/separator on this line only
unitSepThisLine :: TextUnit
unitSepThisLine = GenUnit Line atSepBoundary
-- | Is the point at a @Unit@ boundary in the specified @Direction@?
atBoundary :: TextUnit -> Direction -> BufferM Bool
atBoundary Document Backward = (== 0) <$> pointB
atBoundary Document Forward = (>=) <$> pointB <*> sizeB
atBoundary Character _ = return True
atBoundary VLine _ = return True -- a fallacy; this needs a little refactoring.
atBoundary Line direction = checkPeekB 0 [isNl] direction
atBoundary (GenUnit _ atBound) dir = atBound dir
enclosingUnit :: TextUnit -> TextUnit
enclosingUnit (GenUnit enclosing _) = enclosing
enclosingUnit _ = Document
atBoundaryB :: TextUnit -> Direction -> BufferM Bool
atBoundaryB Document d = atBoundary Document d
atBoundaryB u d = (||) <$> atBoundary u d <*> atBoundaryB (enclosingUnit u) d
-- | Paragraph to implement emacs-like forward-paragraph/backward-paragraph
unitEmacsParagraph :: TextUnit
unitEmacsParagraph = GenUnit Document $ checkPeekB (-2) [not . isNl, isNl, isNl]
-- | Paragraph that begins and ends in the paragraph, not the empty lines surrounding it.
unitParagraph :: TextUnit
unitParagraph = GenUnit Document $ checkPeekB (-1) [not . isNl, isNl, isNl]
unitSentence :: TextUnit
unitSentence = GenUnit unitEmacsParagraph $ \dir -> checkPeekB (if dir == Forward then -1 else 0) (mayReverse dir [isEndOfSentence, isSpace]) dir
-- | Unit that have its left and right boundaries at the left boundary of the argument unit.
leftBoundaryUnit :: TextUnit -> TextUnit
leftBoundaryUnit u = GenUnit Document (\_dir -> atBoundaryB u Backward)
-- | @genAtBoundaryB u d s@ returns whether the point is at a given boundary @(d,s)@ .
-- Boundary @(d,s)@ , taking Word as example, means:
-- Word
-- ^^ ^^
-- 12 34
-- 1: (Backward,OutsideBound)
-- 2: (Backward,InsideBound)
-- 3: (Forward,InsideBound)
-- 4: (Forward,OutsideBound)
--
-- rules:
-- genAtBoundaryB u Backward InsideBound = atBoundaryB u Backward
-- genAtBoundaryB u Forward OutsideBound = atBoundaryB u Forward
genAtBoundaryB :: TextUnit -> Direction -> BoundarySide -> BufferM Bool
genAtBoundaryB u d s = withOffset (off u d s) $ atBoundaryB u d
where withOffset 0 f = f
withOffset ofs f = savingPointB (((ofs +) <$> pointB) >>= moveTo >> f)
off _ Backward InsideBound = 0
off _ Backward OutsideBound = 1
off _ Forward InsideBound = 1
off _ Forward OutsideBound = 0
numberOfB :: TextUnit -> TextUnit -> BufferM Int
numberOfB unit containingUnit = savingPointB $ do
maybeMoveB containingUnit Backward
start <- pointB
moveB containingUnit Forward
end <- pointB
moveTo start
length <$> untilB ((>= end) <$> pointB) (moveB unit Forward)
whileB :: BufferM Bool -> BufferM a -> BufferM [a]
whileB cond = untilB (not <$> cond)
-- | Repeat an action until the condition is fulfilled or the cursor
-- stops moving. The Action may be performed zero times.
untilB :: BufferM Bool -> BufferM a -> BufferM [a]
untilB cond f = do
stop <- cond
if stop then return [] else doUntilB cond f
-- | Repeat an action until the condition is fulfilled or the cursor
-- stops moving. The Action is performed at least once.
doUntilB :: BufferM Bool -> BufferM a -> BufferM [a]
doUntilB cond f = loop
where loop = do
p <- pointB
x <- f
p' <- pointB
stop <- cond
(x:) <$> if p /= p' && not stop
then loop
else return []
doUntilB_ :: BufferM Bool -> BufferM a -> BufferM ()
doUntilB_ cond f = void (doUntilB cond f) -- maybe do an optimized version?
untilB_ :: BufferM Bool -> BufferM a -> BufferM ()
untilB_ cond f = void (untilB cond f) -- maybe do an optimized version?
-- | Do an action if the current buffer character passes the predicate
doIfCharB :: (Char -> Bool) -> BufferM a -> BufferM ()
doIfCharB p o = readB >>= \c -> when (p c) $ void o
-- | Boundary side
data BoundarySide = InsideBound | OutsideBound
deriving Eq
-- | Generic move operation
-- Warning: moving To the (OutsideBound, Backward) bound of Document is impossible (offset -1!)
-- @genMoveB u b d@: move in direction d until encountering boundary b or unit u. See 'genAtBoundaryB' for boundary explanation.
genMoveB :: TextUnit -> (Direction, BoundarySide) -> Direction -> BufferM ()
genMoveB Document (Forward,InsideBound) Forward = moveTo =<< subtract 1 <$> sizeB
genMoveB Document _ Forward = moveTo =<< sizeB
genMoveB Document _ Backward = moveTo 0 -- impossible to go outside beginning of doc.
genMoveB Character _ Forward = rightB
genMoveB Character _ Backward = leftB
genMoveB VLine _ Forward = do
ofs <- lineMoveRel 1
when (ofs < 1) (maybeMoveB Line Forward)
genMoveB VLine _ Backward = lineUp
genMoveB unit (boundDir, boundSide) moveDir =
doUntilB_ (genAtBoundaryB unit boundDir boundSide) (moveB Character moveDir)
-- | Generic maybe move operation.
-- As genMoveB, but don't move if we are at boundary already.
genMaybeMoveB :: TextUnit -> (Direction, BoundarySide) -> Direction -> BufferM ()
-- optimized case for Document
genMaybeMoveB Document boundSpec moveDir = genMoveB Document boundSpec moveDir
-- optimized case for start/end of Line
genMaybeMoveB Line (Backward, InsideBound) Backward = moveTo =<< solPointB =<< pointB
genMaybeMoveB Line (Forward, OutsideBound) Forward = moveTo =<< eolPointB =<< pointB
genMaybeMoveB unit (boundDir, boundSide) moveDir =
untilB_ (genAtBoundaryB unit boundDir boundSide) (moveB Character moveDir)
-- | Move to the next unit boundary
moveB :: TextUnit -> Direction -> BufferM ()
moveB u d = genMoveB u (d, case d of Forward -> OutsideBound; Backward -> InsideBound) d
-- | As 'moveB', unless the point is at a unit boundary
-- So for example here moveToEol = maybeMoveB Line Forward;
-- in that it will move to the end of current line and nowhere if we
-- are already at the end of the current line. Similarly for moveToSol.
maybeMoveB :: TextUnit -> Direction -> BufferM ()
maybeMoveB u d = genMaybeMoveB u (d, case d of Forward -> OutsideBound; Backward -> InsideBound) d
transposeB :: TextUnit -> Direction -> BufferM ()
transposeB unit direction = do
moveB unit (reverseDir direction)
w0 <- pointB
moveB unit direction
w0' <- pointB
moveB unit direction
w1' <- pointB
moveB unit (reverseDir direction)
w1 <- pointB
swapRegionsB (mkRegion w0 w0') (mkRegion w1 w1')
moveTo w1'
-- | Transforms the region given by 'TextUnit' in the 'Direction' with
-- user-supplied function.
transformB :: (YiString -> YiString) -> TextUnit -> Direction -> BufferM ()
transformB f unit direction = do
p <- pointB
moveB unit direction
q <- pointB
let r = mkRegion p q
replaceRegionB r =<< f <$> readRegionB r
-- | Delete between point and next unit boundary, return the deleted region.
deleteB :: TextUnit -> Direction -> BufferM ()
deleteB unit dir = deleteRegionB =<< regionOfPartNonEmptyB unit dir
regionWithTwoMovesB :: BufferM a -> BufferM b -> BufferM Region
regionWithTwoMovesB move1 move2 =
savingPointB $ mkRegion <$> (move1 >> pointB) <*> (move2 >> pointB)
-- | Region of the whole textunit where the current point is.
regionOfB :: TextUnit -> BufferM Region
regionOfB unit = regionWithTwoMovesB (maybeMoveB unit Backward) (maybeMoveB unit Forward)
-- An alternate definition would be the following, but it can return two units if the current point is between them.
-- eg. "word1 ^ word2" would return both words.
-- regionOfB unit = mkRegion
-- <$> pointAfter (maybeMoveB unit Backward)
-- <*> destinationOfMoveB (maybeMoveB unit Forward)
-- | Non empty region of the whole textunit where the current point is.
regionOfNonEmptyB :: TextUnit -> BufferM Region
regionOfNonEmptyB unit = savingPointB $
mkRegion <$> (maybeMoveB unit Backward >> pointB) <*> (moveB unit Forward >> pointB)
-- | Region between the point and the next boundary.
-- The region is empty if the point is at the boundary.
regionOfPartB :: TextUnit -> Direction -> BufferM Region
regionOfPartB unit dir = mkRegion <$> pointB <*> destinationOfMoveB (maybeMoveB unit dir)
-- | Non empty region between the point and the next boundary,
-- In fact the region can be empty if we are at the end of file.
regionOfPartNonEmptyB :: TextUnit -> Direction -> BufferM Region
regionOfPartNonEmptyB unit dir = mkRegion <$> pointB <*> destinationOfMoveB (moveB unit dir)
-- | Non empty region at given point and the next boundary,
regionOfPartNonEmptyAtB :: TextUnit -> Direction -> Point -> BufferM Region
regionOfPartNonEmptyAtB unit dir p = do
oldP <- pointB
moveTo p
r <- regionOfPartNonEmptyB unit dir
moveTo oldP
return r
readPrevUnitB :: TextUnit -> BufferM YiString
readPrevUnitB unit = readRegionB =<< regionOfPartNonEmptyB unit Backward
readUnitB :: TextUnit -> BufferM YiString
readUnitB = readRegionB <=< regionOfB
halfUnit :: Direction -> TextUnit -> TextUnit
halfUnit dir (GenUnit enclosing boundary) =
GenUnit enclosing (\d -> if d == dir then boundary d else return False)
halfUnit _dir tu = tu
deleteUnitB :: TextUnit -> Direction -> BufferM ()
deleteUnitB unit dir = deleteRegionB =<< regionOfPartNonEmptyB unit dir
|
formrre/yi
|
yi-core/src/Yi/Buffer/TextUnit.hs
|
gpl-2.0
| 17,293
| 0
| 16
| 3,821
| 4,174
| 2,185
| 1,989
| 278
| 5
|
{-# LANGUAGE OverloadedStrings #-}
import Network.Wai.Handler.Warp
import Control.Exception (bracket)
import Control.Monad (forever)
import Network (sClose)
import Network.Socket (accept)
import Control.Monad.IO.Class (liftIO)
import qualified Data.Enumerator as E
import qualified Data.Enumerator.Binary as EB
import Control.Concurrent (forkIO)
import Network.Wai (responseLBS)
import Network.HTTP.Types (status200)
import Data.Enumerator (($$), run_)
app = const $ return $ responseLBS status200 [("Content-type", "text/plain")] "This is not kept alive under any circumstances"
main = withManager 30000000 $ \man -> bracket
(bindPort (settingsPort set) (settingsHost set))
sClose
(\socket -> forever $ do
(conn, sa) <- accept socket
th <- liftIO $ registerKillThread man
_ <- forkIO $ do
run_ $ enumSocket th 4096 conn $$ do
liftIO $ pause th
(len, env) <- parseRequest (settingsPort set) sa
liftIO $ resume th
res <- E.joinI $ EB.isolate len $$ app env
_ <- liftIO $ sendResponse th env conn res
liftIO $ sClose conn
return ()
)
where
set = defaultSettings
|
erikd/wai
|
warp/attic/server-no-keepalive.hs
|
mit
| 1,224
| 0
| 24
| 313
| 380
| 202
| 178
| 30
| 1
|
{-
Attr: Mackerel attribute semantics
Part of Mackerel: a strawman device definition DSL for Barrelfish
Copyright (c) 2007, 2008, ETH Zurich.
All rights reserved.
This file is distributed under the terms in the attached LICENSE file.
If you do not find this file, copies can be found by writing to:
ETH Zurich D-INFK, Haldeneggsteig 4, CH-8092 Zurich. Attn: Systems Group.
-}
module Attr where
data Attr = RO -- Read Only
| WO -- Write Only
| RC -- Read Only, Read Clears
| ROS -- Read Only Sticky
| RW -- Read Write
| RWC -- Read Write (1 to) Clear
| RWZC -- Read Write Zero to Clear
| RWO -- Write once
| RWCS -- R/W clear sticky
| RWS -- Read Write Sticky
| RWL -- Read Write locked
| MBZ -- Must be Zero
| MB1 -- Must be one
| RSVD -- Reserved
| NOATTR -- Default No attribute
deriving (Show, Eq)
-- User can reasonably read from this register
attr_user_can_read :: Attr -> Bool
attr_user_can_read WO = False
attr_user_can_read MBZ = False
attr_user_can_read MB1 = False
attr_user_can_read RSVD = False
attr_user_can_read _ = True
-- User can reasonably write to this register
attr_user_can_write :: Attr -> Bool
attr_user_can_write RO = False
attr_user_can_write RC = False
attr_user_can_write ROS = False
attr_user_can_write MBZ = False
attr_user_can_write MB1 = False
attr_user_can_write RSVD = False
attr_user_can_write _ = True
attr_is_writeable :: Attr -> Bool
attr_is_writeable RO = False
attr_is_writeable WO = True
attr_is_writeable RC = False
attr_is_writeable ROS = False
attr_is_writeable RW = True
attr_is_writeable RWC = True
attr_is_writeable RWZC = True
attr_is_writeable RWO = True
attr_is_writeable RWCS= True
attr_is_writeable RWS = True
attr_is_writeable RWL = True
attr_is_writeable MBZ = False
attr_is_writeable MB1 = False
attr_is_writeable RSVD = False
attr_is_writeable _ = False
attr_is_readable :: Attr -> Bool
attr_is_readable RO = True
attr_is_readable WO = False
attr_is_readable RC = True
attr_is_readable ROS = True
attr_is_readable RW = True
attr_is_readable RWC = True
attr_is_readable RWZC = True
attr_is_readable RWO = True
attr_is_readable RWCS= True
attr_is_readable RWS = True
attr_is_readable RWL = True
attr_is_readable MBZ = False
attr_is_readable MB1 = False
attr_is_readable RSVD = False
attr_is_readable _ = False
attr_is_writeonly :: Attr -> Bool
attr_is_writeonly RO = False
attr_is_writeonly WO = True
attr_is_writeonly RC = False
attr_is_writeonly ROS = False
attr_is_writeonly RW = False
attr_is_writeonly RWC = False
attr_is_writeonly RWZC = False
attr_is_writeonly RWO = False
attr_is_writeonly RWCS= False
attr_is_writeonly RWS = False
attr_is_writeonly RWL = False
attr_is_writeonly MBZ = False
attr_is_writeonly MB1 = False
attr_is_writeonly RSVD = False
attr_is_writeonly _ = False
-- Field must always be written with a value read from the register.
attr_preserve_on_write :: Attr -> Bool
attr_preserve_on_write RSVD = True
attr_preserve_on_write _ = False
-- Field can be preserved by reading from the register
attr_can_init_from_reg :: Attr -> Bool
attr_can_init_from_reg RW = True
attr_can_init_from_reg RSVD = True
attr_can_init_from_reg RWS = True
attr_can_init_from_reg RWL = True
attr_can_init_from_reg _ = False
-- Field must always be written as zero
attr_zero_before_write :: Attr -> Bool
attr_zero_before_write MBZ = True
attr_zero_before_write _ = False
-- Field must always be written as one
attr_set_before_write :: Attr -> Bool
attr_set_before_write MB1 = True
attr_set_before_write _ = False
|
daleooo/barrelfish
|
tools/mackerel/Attr.hs
|
mit
| 3,770
| 0
| 6
| 828
| 711
| 380
| 331
| 94
| 1
|
{-# LANGUAGE TypeFamilies #-}
module T13915a_Foo where
data family T a
data instance T Int = MkT
|
ezyang/ghc
|
testsuite/tests/typecheck/should_compile/T13915a_Foo.hs
|
bsd-3-clause
| 98
| 0
| 5
| 18
| 22
| 14
| 8
| 4
| 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.