code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE PatternGuards #-}
-- | Simply-typed Curry-style (nominal) lambda-calculus
-- with integers and zero-comparison
-- Type inference, of the type and of the environment, aka
-- conditional typechecking
-- This code does not in general infer polymorphic bindings as this is akin
-- to higher-order unification.
--
-- The benefit of the approach: we can handle _open_ terms.
-- Some of them we type check, and some of them we reject. The rejection means
-- the term cannot be typed in _any_ type environment.
--
-- One often hears a complaint against typing: one can evaluate
-- terms we can't even type check. This code shows that we can type check
-- terms we can't even evaluate.
--
-- We cannot evaluate open terms at all, but we can type check them,
-- inferring both the type as well as the _requirement_
-- on the environments in which the term must be used.
--
-- <http://okmij.org/ftp/Computation/Computation.html#teval>
--
module Language.TEval.TInfTEnv where
import qualified Data.IntMap as M
data Typ = TInt | !Typ :> !Typ | TV TVarName deriving (Show, Eq)
infixr 9 :>
type TVarName = Int
data Term = V VarName
| L VarName Term
| A Term Term
| I Int
| Term :+ Term -- addition
| IFZ Term Term Term -- if zero
| Fix Term -- fix f, where f :: (a->b)->(a->b)
deriving (Show, Eq)
infixl 9 `A`
type VarName = String
-- | Type Environment: associating types with `free' term variables
type TEnv = [(VarName, Typ)]
env0 :: TEnv
env0 = []
lkup :: TEnv -> VarName -> Typ
lkup env x = maybe err id $ lookup x env
where err = error $ "Unbound variable " ++ x
ext :: TEnv -> (VarName,Typ) -> TEnv
ext env xt = xt : env
unext :: TEnv -> VarName -> TEnv
unext env v = case break (\(x,_) -> x == v) env of
(h,(_:t)) -> h ++ t
_ -> error $ "No variable " ++ v
env_map :: (Typ->Typ) -> TEnv -> TEnv
env_map f = map (\(x,t) -> (x,f t))
-- | Merge two environment, using the given function to resolve the conflicts,
-- if any
env_fold_merge :: TEnv -> TEnv ->
(Typ -> Typ -> seed -> Either err seed) ->
seed -> Either err (TEnv,seed)
env_fold_merge env1 env2 f seed = foldr folder (Right (env1,seed)) env2
where
folder _ s@(Left _) = s
folder (x,t2) (Right (env1,seed)) | Just t1 <- lookup x env1 =
case f t1 t2 seed of
Right seed -> Right (env1,seed)
Left err -> Left err
folder xt2 (Right (env1,seed)) = Right (ext env1 xt2,seed)
-- | Type Variable Environment: associating types with `free' type variables
data TVE = TVE Int (M.IntMap Typ) deriving Show
-- | Allocate a fresh type variable (see the first component of TVE)
newtv :: TVE -> (Typ,TVE)
newtv (TVE n s) = (TV n,TVE (succ n) s)
tve0 :: TVE
tve0 = TVE 0 M.empty
tvlkup :: TVE -> TVarName -> Maybe Typ
tvlkup (TVE _ s) v = M.lookup v s
tvext :: TVE -> (TVarName,Typ) -> TVE
tvext (TVE c s) (tv,t) = TVE c $ M.insert tv t s
-- | Type variables are logic variables: hypothetical reasoning
tvsub :: TVE -> Typ -> Typ
tvsub tve (t1 :> t2) = tvsub tve t1 :> tvsub tve t2
tvsub tve (TV v) | Just t <- tvlkup tve v = tvsub tve t
tvsub tve t = t
-- | `shallow' substitution; check if tv is bound to anything `substantial'
tvchase :: TVE -> Typ -> Typ
tvchase tve (TV v) | Just t <- tvlkup tve v = tvchase tve t
tvchase _ t = t
-- | The unification. If unification failed, return the reason
unify :: Typ -> Typ -> TVE -> Either String TVE
unify t1 t2 tve = unify' (tvchase tve t1) (tvchase tve t2) tve
-- | If either t1 or t2 are type variables, they are definitely unbound
unify' :: Typ -> Typ -> TVE -> Either String TVE
unify' TInt TInt = Right
unify' (t1a :> t1r) (t2a :> t2r) = either Left (unify t1r t2r) . unify t1a t2a
unify' (TV v1) t2 = unifyv v1 t2
unify' t1 (TV v2) = unifyv v2 t1
unify' t1 t2 = const (Left $ unwords ["constant mismatch:",show t1,"and",
show t2])
-- | Unify a free variable v1 with t2
unifyv :: TVarName -> Typ -> TVE -> Either String TVE
unifyv v1 (TV v2) tve =
if v1 == v2 then Right tve
else Right (tvext tve (v1,TV v2)) -- record new constraint
unifyv v1 t2 tve = if occurs v1 t2 tve
then Left $ unwords ["occurs check:",show (TV v1),
"in",show $ tvsub tve t2]
else Right (tvext tve (v1,t2))
-- | The occurs check: if v appears free in t
occurs :: TVarName -> Typ -> TVE -> Bool
occurs v TInt _ = False
occurs v (t1 :> t2) tve = occurs v t1 tve || occurs v t2 tve
occurs v (TV v2) tve =
case tvlkup tve v2 of
Nothing -> v == v2
Just t -> occurs v t tve
merge_env :: TEnv -> TEnv -> (TVE -> (TEnv,TVE))
merge_env env1 env2 tve =
either error id $ env_fold_merge env1 env2 unify tve
-- | Type reconstruction: abstract evaluation
teval' :: Term -> (TVE -> (Typ,TEnv,TVE))
teval' (V x) = \tve0 ->
let (tv,tve1) = newtv tve0
env1 = ext env0 (x,tv)
in (tv,env1,tve1)
teval' (L x e) = \tve0 ->
let (tv,tve1) = newtv tve0
env1 = ext env0 (x,tv)
(te,env2,tve2) = teval' e tve1
(env3,tve3) = merge_env env2 env1 tve2
env4 = unext env3 x
in (tv :> te,env4,tve3)
teval' (A e1 e2) = \tve0 ->
let (t1,env1,tve1) = teval' e1 tve0
(t2,env2,tve2) = teval' e2 tve1
(env3,tve3) = merge_env env1 env2 tve2
(t1r,tve4) = newtv tve3
in case unify t1 (t2 :> t1r) tve4 of
Right tve -> (t1r,env3,tve)
Left err -> error $ err
teval' (I n) = \tve0 -> (TInt,env0,tve0)
teval' (e1 :+ e2) = \tve0 ->
let (t1,env1,tve1) = teval' e1 tve0
(t2,env2,tve2) = teval' e2 tve1
(env3,tve3) = merge_env env1 env2 tve2
in case either Left (unify t2 TInt) . unify t1 TInt $ tve3 of
Right tve -> (TInt,env3,tve)
Left err -> error $ "Trying to add non-integers: " ++ err
teval' (IFZ e1 e2 e3) = \tve0 ->
let (t1,env1,tve1) = teval' e1 tve0
(t2,env2,tve2) = teval' e2 tve1
(t3,env3,tve3) = teval' e3 tve2
(env4,tve4) = merge_env env1 env2 tve3
(env5,tve5) = merge_env env4 env3 tve4
in case unify t1 TInt tve5 of
Right tve ->
case unify t2 t3 tve of
Right tve -> (t2,env5,tve)
Left err -> error $ unwords ["Branches of IFZ have different",
"types. Unification failed:",err]
Left err -> error $ "Trying to compare a non-integer to 0: " ++ err
teval' (Fix e) = \tve0 ->
let (t,env,tve1) = teval' e tve0
(ta,tve2) = newtv tve1
(tb,tve3) = newtv tve2
in case unify t ((ta :> tb) :> (ta :> tb)) tve3 of
Right tve -> (ta :> tb,env,tve)
Left err -> error $ "Inappropriate type in Fix: "++err
-- | Resolve all type variables, as far as possible
teval :: Term -> (Typ,TEnv)
teval e = let (t,env,tve) = teval' e tve0
in (tvsub tve t, env_map (tvsub tve) env)
-- Tests
(vx,vy) = (V "x",V "y")
test0 = teval' ((L "x" (vx :+ (I 2))) `A` (I 1)) tve0
-- (TV 2,[],TVE 3 (fromList [(0,TInt),(1,TInt),(2,TInt)]))
term1 = L "x" (IFZ vx (I 1) (vx :+ (I 2)))
test10 = teval' term1 tve0
-- (TV 0 :> TInt,[],TVE 3 (fromList [(0,TInt),(1,TInt),(2,TInt)]))
-- need a better presentation of the final result: cf. teval' and teval
test1 = teval term1 -- TInt :> TInt
termid = L "x" vx
testid = teval termid -- (TV 0 :> TV 0,[])
term2a = L "x" (L "y" (vx `A` vy))
test2a = teval term2a
-- ((TV 1 :> TV 4) :> (TV 1 :> TV 4),[])
-- Used to be hidden problem. The main benefit of types: static approximation
-- of program behavior
-- The terms with unbound variables no longer fail the type check.
-- We infer both the type and the environment
term3 = L "x" (IFZ vx (I 1) vy)
test3 = teval term3
-- (TInt :> TInt,[("y",TInt)])
-- That term is ill-typed in any environment...
term4 = L "x" (IFZ vx (I 1) (vx `A` (I 1)))
test4 = teval term4
-- compare the error message with that of test4a and test4b in TEvalNC.hs
term6 = (L "x" (I 1)) `A` vy
test61 = teval term6
-- (TInt,[("y",TV 1)])
test62 = teval $ (I 1) :+ vx
-- (TInt,[("x",TInt)])
-- But some terms still fail type-checking: no environment could
-- be found to make the term typeable
test63 = teval $ IFZ (vx `A` (I 1)) vx (I 2)
-- *** Exception: Branches of IFZ have different types.
-- Unification failed: constant mismatch: TInt :> TV 1 and TInt
-- Here two branches of the conditional make inconsistent assumptions
-- about the environment
test64 = teval $ IFZ (I 1) (vx `A` (I 1)) (vx :+ (I 2))
-- *** Exception: constant mismatch: TInt :> TV 1 and TInt
tmul1 = L "x" (L "y"
(IFZ vx (I 0)
((tmul1 `A` (vx :+ (I (-1))) `A` vy) :+ vy)))
testm1 = teval tmul1 -- is typechecking really decidable?
-- Can termY be typechecked?
delta = L "y" (vy `A` vy)
testd = teval delta
tmul = Fix (L "self" (L "x" (L "y"
(IFZ vx (I 0)
(((V "self") `A` (vx :+ (I (-1))) `A` vy) :+ vy)))))
testm21' = teval' tmul tve0
-- (TV 5 :> TV 6,TVE 7 (fromList
-- [(0,TInt :> TV 3),(1,TInt),(2,TInt),(3,TV 2 :> TV 4),
-- (4,TInt),(5,TInt),(6,TV 2 :> TV 4)]))
testm21 = teval tmul -- TInt :> (TInt :> TInt)
testm22 = teval (tmul `A` (I 2)) -- TInt :> TInt
testm23 = teval (tmul `A` (I 2) `A` (I 3)) -- TInt
testm24 = teval (tmul `A` (I (-1)) `A` (I (-1))) -- TInt
-- using the metalanguage definition of termid: `top-level let'
term2id = L "f" (L "y"
((I 2) :+
((termid `A` (V "f")) `A` ((termid `A` vy) :+ (I 1)))))
test2id = teval term2id -- (TInt :> TInt) :> (TInt :> TInt)
-- using the metalanguage let
termlet = let c2 = L "f" (L "x" (V "f" `A` (V "f" `A` vx)))
inc = L "x" (vx :+ (I 1))
compose = L "f" (L "g" (L "x" (V "f" `A` (V "g" `A` vx))))
in c2 `A` (compose `A` inc `A` inc) `A` (I 10) :+
((c2 `A` (compose `A` inc) `A` termid) `A` (I 100))
testlet = teval termlet -- (TInt,[])
| suhailshergill/liboleg | Language/TEval/TInfTEnv.hs | bsd-3-clause | 10,129 | 0 | 24 | 2,805 | 3,574 | 1,923 | 1,651 | 180 | 6 |
module Main where
import Day22 as D22
main :: IO ()
main = D22.run
| ulyssesp/AoC | app/Main.hs | bsd-3-clause | 69 | 0 | 6 | 16 | 26 | 16 | 10 | 4 | 1 |
module SPARC.Imm (
-- immediate values
Imm(..),
strImmLit,
litToImm
)
where
import GhcPrelude
import GHC.Cmm
import GHC.Cmm.CLabel
import Outputable
-- | An immediate value.
-- Not all of these are directly representable by the machine.
-- Things like ImmLit are slurped out and put in a data segment instead.
--
data Imm
= ImmInt Int
-- Sigh.
| ImmInteger Integer
-- AbstractC Label (with baggage)
| ImmCLbl CLabel
-- Simple string
| ImmLit SDoc
| ImmIndex CLabel Int
| ImmFloat Rational
| ImmDouble Rational
| ImmConstantSum Imm Imm
| ImmConstantDiff Imm Imm
| LO Imm
| HI Imm
-- | Create a ImmLit containing this string.
strImmLit :: String -> Imm
strImmLit s = ImmLit (text s)
-- | Convert a CmmLit to an Imm.
-- Narrow to the width: a CmmInt might be out of
-- range, but we assume that ImmInteger only contains
-- in-range values. A signed value should be fine here.
--
litToImm :: CmmLit -> Imm
litToImm lit
= case lit of
CmmInt i w -> ImmInteger (narrowS w i)
CmmFloat f W32 -> ImmFloat f
CmmFloat f W64 -> ImmDouble f
CmmLabel l -> ImmCLbl l
CmmLabelOff l off -> ImmIndex l off
CmmLabelDiffOff l1 l2 off _
-> ImmConstantSum
(ImmConstantDiff (ImmCLbl l1) (ImmCLbl l2))
(ImmInt off)
_ -> panic "SPARC.Regs.litToImm: no match"
| sdiehl/ghc | compiler/nativeGen/SPARC/Imm.hs | bsd-3-clause | 1,625 | 0 | 12 | 611 | 297 | 161 | 136 | 35 | 7 |
-----------------------------------------------------------------------------
{- |
Graphics.Gnewplot.Exec allows you to plot values of types that implement PlotWithGnuplot in
various ways (to a file or on disk).
Example:
@
import Graphics.Gnewplot.Instances
import Graphics.Gnewplot.Exec
someData :: [(Double,Double)]
somedata = [(0.0, 1.0),(0.1, 2.0),(0.2, 1.4),(0.3, 1.7),(0.4, 1.0),
(0.5, 1.8),(0.6, 1.1),(0.7, 1.5),(0.8, 1.2),(0.9, 1.9)]
main = do
gnuplotToPS \"foo.ps\" someData
gnuplotToPNG \"foo.png\" someData
gnuplotOnScreen someData
@
-}
{-# LANGUAGE GeneralizedNewtypeDeriving, FlexibleInstances, ExistentialQuantification #-}
{-# LANGUAGE TypeOperators, FlexibleContexts, GADTs, ScopedTypeVariables, DeriveDataTypeable #-}
module Graphics.Gnewplot.Exec(gnuplotOnScreen, gnuplotToPNG, gnuplotToCanvas, gnuplotToPS, gnuplotToPDF, gnuplotToSparklinePNG, uniqueIntStr, execGP, gnuplotToPNGOpts, TermOpts (FontSpec, SizeSpec)) where
--module Graphics.Gnewplot.Exec where
--import EvalM
import System.IO
import System.Cmd
import System.Exit
--import Math.Probably.FoldingStats hiding (F)
import Control.Monad
import Data.Unique
import Data.List
--import Control.Monad.Trans
import System.Directory
--import System.Posix.Files
import System.Random
import Graphics.Gnewplot.Types
import Debug.Trace
{- stolen from gnuplot-0.3.3 (Henning Thieleman) -}
import qualified System.Process as Proc
import Control.Concurrent
import Control.Exception
myForkIO :: IO () -> IO ()
myForkIO io = do
mvar <- newEmptyMVar
forkIO (io `finally` putMVar mvar ())
takeMVar mvar
interactivePlot ma = do --putStrLn program
h@(inp,o,e,pid) <- Proc.runInteractiveCommand "gnuplot"
threadDelay $ 100*1000
myForkIO $ do tellInteractivePlot h "set terminal wxt noraise"
ma h
hClose o
hClose e
hClose inp
Proc.terminateProcess pid
Proc.waitForProcess pid
return ()
tellInteractivePlot (inp,o,e,p) s = do
hPutStr inp $ s++"\n"
hFlush inp
execGPPipe ::
String {-^ The lines of the gnuplot script to be piped into gnuplot -}
-> IO ExitCode
execGPPipe program =
do --putStrLn program
(inp,_out,_err,pid) <-
Proc.runInteractiveProcess "gnuplot" [""] Nothing Nothing
hPutStr inp program
--print pid
Proc.waitForProcess pid
execGPSh ::
String {-^ The lines of the gnuplot script to be piped into gnuplot -}
-- -> [String] {-^ Options for gnuplot -}
-> IO ExitCode
execGPSh program =
let cmd =
"sh -c 'echo " ++ quote ( program) ++
" | gnuplot '"
in do putStrLn cmd
system cmd
execGPPersist ::
String {-^ The lines of the gnuplot script to be piped into gnuplot -}
-- -> [String] {-^ Options for gnuplot -}
-> IO ()
execGPPersist cmds = do
x <- randomRIO (0,99999999::Int)
let fnm = "/tmp/gnuplotCmds"++show x
writeFile fnm cmds
system $ "gnuplot -persist "++fnm
removeFile $ fnm
execGPTmp cmds = do
x <- randomRIO (0,99999999::Int)
let fnm = "/tmp/gnuplotCmds"++show x
writeFile fnm cmds
system $ "gnuplot "++fnm
--removeFile $ fnm
execGP = execGPTmp
-- | Generate a unique string
uniqueIntStr :: IO String
uniqueIntStr = (show. hashUnique) `fmap` newUnique
-- | Plot to screen -- will open a new window
gnuplotOnScreen :: PlotWithGnuplot a => a -> IO ()
gnuplotOnScreen x = do
plines <- multiPlot unitRect x
let cmdLines = "set datafile missing \"NaN\"\n"++
(showMultiPlot plines)
writeFile "/tmp/gnuplotCmds" cmdLines
system "gnuplot -persist /tmp/gnuplotCmds"
--removeFile "/tmp/gnuplotCmds"
cleanupCmds $ map snd plines
return ()
-- | Plot to canvas
gnuplotToCanvas :: PlotWithGnuplot a => String -> a -> IO ()
gnuplotToCanvas fp x = do
plines <- multiPlot unitRect x
let cmdLines = "set datafile missing \"NaN\"\n"++
"set terminal canvas enhanced name '"++fp++"'\n"++
"set output '"++fp++".js'\n"++
(showMultiPlot plines)
--putStrLn cmdLines
execGP cmdLines
{- writeFile "/tmp/gnuplotCmds" cmdLines
system "gnuplot /tmp/gnuplotCmds"
removeFile "/tmp/gnuplotCmds" -}
cleanupCmds $ map snd plines
return ()
-- | Plot to a png file
gnuplotToPNG :: PlotWithGnuplot a => String -> a -> IO ()
gnuplotToPNG fp x = do
plines <- multiPlot unitRect x
let cmdLines = "set datafile missing \"NaN\"\n"++
"set terminal png enhanced size 1200,900 crop\n"++
"set output '"++fp++"'\n"++
(showMultiPlot plines)
--putStrLn cmdLines
execGP cmdLines
{- writeFile "/tmp/gnuplotCmds" cmdLines
system "gnuplot /tmp/gnuplotCmds"
removeFile "/tmp/gnuplotCmds" -}
cleanupCmds $ map snd plines
return ()
data TermOpts = FontSpec String
| SizeSpec String
gnuplotToPNGOpts :: PlotWithGnuplot a => String -> [TermOpts] -> a -> IO ()
gnuplotToPNGOpts fp opts x = do
plines <- multiPlot unitRect x
let fontstr = case [spec | FontSpec spec <- opts ] of
[] -> ""
s:_ -> "font \""++s++"\" "
let szstr = case [spec | SizeSpec spec <- opts ] of
[] -> ""
s:_ -> "size "++s++" "
let cmdLines = "set datafile missing \"NaN\"\n"++
"set terminal png enhanced "++fontstr++szstr ++" crop\n"++
"set output '"++fp++"'\n"++
(showMultiPlot plines)
--putStrLn cmdLines
execGP cmdLines
{- writeFile "/tmp/gnuplotCmds" cmdLines
system "gnuplot /tmp/gnuplotCmds"
removeFile "/tmp/gnuplotCmds" -}
cleanupCmds $ map snd plines
return ()
gnuplotToPSOpts:: PlotWithGnuplot a => String-> [TermOpts] -> a -> IO ()
gnuplotToPSOpts fp opts x = do
plines <- multiPlot unitRect x
let fontstr = case [spec | FontSpec spec <- opts ] of
[] -> "\"Helvetica\" 16 "
s:_ -> s++" "
let szstr = case [spec | SizeSpec spec <- opts ] of
[] -> "size 5.0,3.5"
s:_ -> "size "++s++" "
let cmdLines = "set datafile missing \"NaN\"\n"++
"set terminal postscript eps enhanced color "++fontstr++szstr ++"\n"++
"set output '"++fp++"'\n"++
(showMultiPlot plines)
execGP cmdLines
{- writeFile "/tmp/gnuplotCmds" cmdLines
system "gnuplot /tmp/gnuplotCmds"
removeFile "/tmp/gnuplotCmds"-}
cleanupCmds $ map snd plines
return ()
-- | Plot to a very small (100 by 50 pixels) png file
gnuplotToSparklinePNG :: PlotWithGnuplot a => String -> a -> IO ()
gnuplotToSparklinePNG fp x = do
plines <- multiPlot unitRect x
let cmdLines = "set datafile missing \"NaN\"\n"++
"set terminal png size 100,50 crop\n"++
"unset xtics\n"++
"unset ytics\n"++
"set border 0\n"++
"set output '"++fp++"'\n"++
(showMultiPlot plines)
execGP cmdLines
{-writeFile "/tmp/gnuplotCmds" cmdLines
system "gnuplot /tmp/gnuplotCmds 2>/dev/null"
removeFile "/tmp/gnuplotCmds"-}
cleanupCmds $ map snd plines
return ()
-- | Plot to PDF figure fire. Requires ps2pdf in the current path
gnuplotToPDF:: PlotWithGnuplot a => String -> a -> IO ()
gnuplotToPDF fp x = do
gnuplotToPS fp x
system $ "ps2pdf "++fp
return ()
-- | Plot to an encapsulated postscript file
gnuplotToPS:: PlotWithGnuplot a => String-> a -> IO ()
gnuplotToPS fp x = do
plines <- multiPlot unitRect x
let cmdLines = "set datafile missing \"NaN\"\n"++
"set terminal postscript eps enhanced color \"Helvetica\" 16 size 5.0,3.5\n"++
"set output '"++fp++"'\n"++
(showMultiPlot plines)
execGP $ trace (show plines) $ cmdLines
{- writeFile "/tmp/gnuplotCmds" cmdLines
system "gnuplot /tmp/gnuplotCmds"
removeFile "/tmp/gnuplotCmds"-}
cleanupCmds $ map snd plines
return ()
{-gnuplotMany :: [String] -> [(String, GnuplotBox)] -> IO ()
gnuplotMany opts nmbxs = do
nmcmds <- forM nmbxs $ \(nm, GnuplotBox x) -> do
cmd <- multiPlot unitRect x
--print2 nm cmd
return (nm,cmd)
let start = "set datafile missing \"NaN\"\n"
let h = optVal 'h' 480 opts
let w = optVal 'w' 640 opts
let term = "set terminal png size "++ show w++","++show h++" crop\n"
let cmds = start++term ++concatMap plotOne nmcmds
execGP cmds
forM_ nmcmds $ \(_,cmd) -> cleanupCmds $ map snd cmd
return ()
where plotOne (fp, plines) = "set output '"++fp++"'\n"++
(showMultiPlot plines)
gnuplotManyLatex :: [String] -> [(String, GnuplotBox)] -> IO ()
gnuplotManyLatex opts nmbxs = do
nmcmds <- forM nmbxs $ \(nm, GnuplotBox x) -> do
cmd <- multiPlot unitRect x
--print2 nm cmd
return (nm,cmd)
let start = "set datafile missing \"NaN\"\n"
let h::Double = (/10) $ realToFrac $ optVal 'h' (35::Int) opts
let w::Double = (/10) $ realToFrac $ optVal 'w' (50::Int) opts
let fs = optVal 'f' 16 opts
let term = "set terminal postscript eps enhanced color \"Helvetica\" "++show fs++" size "++ show w++","++show h++"\n"-- crop\n"
let cmds = start++term ++concatMap plotOne nmcmds
execGP cmds
forM_ nmcmds $ \(nm,cmd) -> do
system $ "epstopdf "++nm++".eps"
cleanupCmds $ map snd cmd
return ()
where plotOne (fp, plines) = "set output '"++fp++".eps'\n"++
(showMultiPlot plines)
-} | glutamate/gnewplot | Graphics/Gnewplot/Exec.hs | bsd-3-clause | 9,720 | 0 | 17 | 2,580 | 1,768 | 866 | 902 | 164 | 3 |
{-# LANGUAGE NPlusKPatterns #-}
module Code26 where
type Node = Int
type Graph = Node -> (Node,Node)
left, right :: Graph -> Node -> Node
left = (fst .)
right = (snd .)
setl, setr :: Graph -> Node -> Node -> Graph
setl g x y = \ z -> if x == z then (y,right g x) else g z
setr g x y = \ z -> if x == z then (left g x, y) else g z
mark0 :: Graph -> Node -> (Graph, Node -> Bool)
mark0 g root = seek0 (g,const False) [root]
seek0 :: (Graph, Node -> Bool) -> [Node] -> (Graph, Node -> Bool)
seek0 (g,m) [] = (g,m)
seek0 (g,m) (x:xs)
| not (m x) = seek0 (g,set m x) (left g x : right g x : xs)
| otherwise = seek0 (g,m) xs
set, unset :: (Node -> Bool) -> Node -> (Node -> Bool)
set f x = \ y -> if y == x then True else f y
unset f x = \ y -> if y == x then False else f y
-- Eliminating duplicate entries
mark1 :: Graph -> Node -> (Graph, Node -> Bool)
mark1 g root = seek1 (g,const False) root []
seek1 :: (Graph, Node -> Bool) -> Node -> [Node] -> (Graph, Node -> Bool)
seek1 (g,m) x xs
| not (m x) = seek1 (g,set m x) (left g x) (x:xs)
| null xs = (g,m)
| otherwise = seek1 (g,m) (right g (head xs)) (tail xs)
-- Threading the stack
mark2 :: Graph -> Node -> (Graph,Node -> Bool)
mark2 g root = seek2 (g,const False) (const False) root []
seek2 :: (Graph, Node -> Bool) -> (Node -> Bool) -> Node -> [Node] -> (Graph, Node -> Bool)
seek2 (g,m) p x xs
| not (m x) = seek2 (g,set m x) (set p x) (left g x) (x:xs)
| otherwise = find2 (g,m) p xs
find2 :: (Graph, Node -> Bool) -> (Node -> Bool) -> [Node] -> (Graph, Node -> Bool)
find2 (g,m) _ [] = (g,m)
find2 (g,m) p (y:ys)
| not (p y) = find2 (g,m) p ys
| otherwise = seek2 (g,m) (unset p y) (right g y) (y:ys)
-- Representing the stack by a linked list
stack :: Graph -> (Node -> Bool) -> Node -> [Node]
stack g p x | x == 0 = []
| p x = x : stack g p (left g x)
| not (p x) = x : stack g p (right g x)
restore :: Graph -> (Node -> Bool) -> Node -> [Node] -> Graph
restore g _ _ [] = g
restore g p x (y:ys) | p y = restore (setl g y x) p y ys
| not (p y) = restore (setr g y x) p y ys
-- Schorr-Waite algorithm
mark :: Graph -> Node -> (Graph, Node -> Bool)
mark g root = seek3 (g,const False) (const False) root 0
seek3 :: (Graph, Node -> Bool) -> (Node -> Bool) -> Node -> Node -> (Graph, Node -> Bool)
seek3 (g,m) p x y
| not (m x) = seek3 (setl g x y,set m x) (set p x) (left g x) x
| otherwise = find3 (g,m) p x y
find3 :: (Graph, Node -> Bool) -> (Node -> Bool) -> Node -> Node -> (Graph, Node -> Bool)
find3 (g,m) p x y
| y == 0 = (g,m)
| p y = seek3 (swing g y x,m) (unset p y) (right g y) y
| otherwise = find3 (setr g y x,m) p y (right g y)
where swing g y x = setr (setl g y x) y (left g y)
| sampou-org/pfad | Code/Code26.hs | bsd-3-clause | 2,932 | 0 | 10 | 913 | 1,734 | 911 | 823 | 58 | 2 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternGuards #-}
module Text.XmlHtml.HTML.Render where
import Blaze.ByteString.Builder
import Control.Applicative
import Data.Maybe
import Data.Monoid
import qualified Text.Parsec as P
import Text.XmlHtml.Common
import Text.XmlHtml.TextParser
import Text.XmlHtml.HTML.Meta
import qualified Text.XmlHtml.HTML.Parse as P
import Text.XmlHtml.XML.Render (docTypeDecl, entity)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.HashSet as S
------------------------------------------------------------------------------
-- | And, the rendering code.
render :: Encoding -> Maybe DocType -> [Node] -> Builder
render e dt ns = byteOrder
`mappend` docTypeDecl e dt
`mappend` nodes
where byteOrder | isUTF16 e = fromText e "\xFEFF" -- byte order mark
| otherwise = mempty
nodes | null ns = mempty
| otherwise = node e (head ns)
`mappend` (mconcat $ map (node e) (tail ns))
------------------------------------------------------------------------------
-- | Function for rendering HTML nodes without the overhead of creating a
-- Document structure.
renderHtmlFragment :: Encoding -> [Node] -> Builder
renderHtmlFragment _ [] = mempty
renderHtmlFragment e (n:ns) =
node e n `mappend` (mconcat $ map (node e) ns)
------------------------------------------------------------------------------
-- | HTML allows & so long as it is not "ambiguous" (i.e., looks like an
-- entity). So we have a special case for that.
escaped :: [Char] -> Encoding -> Text -> Builder
escaped _ _ "" = mempty
escaped bad e t =
let (p,s) = T.break (`elem` bad) t
r = T.uncons s
in fromText e p `mappend` case r of
Nothing
-> mempty
Just ('&',ss) | isLeft (parseText ambigAmp "" s)
-> fromText e "&" `mappend` escaped bad e ss
Just (c,ss)
-> entity e c `mappend` escaped bad e ss
where isLeft = either (const True) (const False)
ambigAmp = P.char '&' *>
(P.finishCharRef *> return () <|> P.finishEntityRef *> return ())
------------------------------------------------------------------------------
node :: Encoding -> Node -> Builder
node e (TextNode t) = escaped "<>&" e t
node e (Comment t) | "--" `T.isInfixOf` t = error "Invalid comment"
| "-" `T.isSuffixOf` t = error "Invalid comment"
| otherwise = fromText e "<!--"
`mappend` fromText e t
`mappend` fromText e "-->"
node e (Element t a c) =
let tbase = T.toLower $ snd $ T.breakOnEnd ":" t
in element e t tbase a c
------------------------------------------------------------------------------
-- | Process the first node differently to encode leading whitespace. This
-- lets us be sure that @parseHTML@ is a left inverse to @render@.
firstNode :: Encoding -> Node -> Builder
firstNode e (Comment t) = node e (Comment t)
firstNode e (Element t a c) = node e (Element t a c)
firstNode _ (TextNode "") = mempty
firstNode e (TextNode t) = let (c,t') = fromJust $ T.uncons t
in escaped "<>& \t\r\n" e (T.singleton c)
`mappend` node e (TextNode t')
------------------------------------------------------------------------------
-- XXX: Should do something to avoid concatting large CDATA sections before
-- writing them to the output.
element :: Encoding -> Text -> Text -> [(Text, Text)] -> [Node] -> Builder
element e t tb a c
| tb `S.member` voidTags && null c =
fromText e "<"
`mappend` fromText e t
`mappend` (mconcat $ map (attribute e) a)
`mappend` fromText e " />"
{- | tb `S.member` voidTags =
error $ T.unpack t ++ " must be empty"
-}
| tb `S.member` rawTextTags,
all isTextNode c,
let s = T.concat (map nodeText c),
not ("</" `T.append` t `T.isInfixOf` s) =
fromText e "<"
`mappend` fromText e t
`mappend` (mconcat $ map (attribute e) a)
`mappend` fromText e ">"
`mappend` fromText e s
`mappend` fromText e "</"
`mappend` fromText e t
`mappend` fromText e ">"
| tb `S.member` rawTextTags,
[ TextNode _ ] <- c =
error $ T.unpack t ++ " cannot contain text looking like its end tag"
| tb `S.member` rawTextTags =
error $ T.unpack t ++ " cannot contain child elements or comments"
| otherwise =
fromText e "<"
`mappend` fromText e t
`mappend` (mconcat $ map (attribute e) a)
`mappend` fromText e ">"
`mappend` (mconcat $ map (node e) c)
`mappend` fromText e "</"
`mappend` fromText e t
`mappend` fromText e ">"
------------------------------------------------------------------------------
attribute :: Encoding -> (Text, Text) -> Builder
attribute e (n,v)
| v == "" =
fromText e " "
`mappend` fromText e n
| not ("\'" `T.isInfixOf` v) =
fromText e " "
`mappend` fromText e n
`mappend` fromText e "=\'"
`mappend` escaped "&" e v
`mappend` fromText e "\'"
| otherwise =
fromText e " "
`mappend` fromText e n
`mappend` fromText e "=\""
`mappend` escaped "&\"" e v
`mappend` fromText e "\""
| silkapp/xmlhtml | src/Text/XmlHtml/HTML/Render.hs | bsd-3-clause | 5,731 | 0 | 16 | 1,778 | 1,636 | 865 | 771 | 111 | 3 |
module System.Shana.Type where
import Control.Arrow
import Control.Category
import Prelude hiding ((.), id)
import Control.Monad ((>=>))
newtype Shana a b = Shana { runShana :: a -> IO [b] }
instance Category Shana where
id = Shana $ return . return
Shana g . Shana f = Shana $ f >=> mapM g >=> return . concat
instance Arrow Shana where
arr f = Shana $ return . return . f
first (Shana f) = Shana $ \(x, y) -> do
xs <- f x
return $ zip xs (repeat y)
| nfjinjing/shana | src/System/Shana/Type.hs | bsd-3-clause | 475 | 0 | 13 | 114 | 210 | 114 | 96 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module Serials.Route.UserSignup where
import Prelude hiding (id)
import Control.Applicative
import Control.Monad.Trans.Either
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Reader
import Crypto.BCrypt (hashPasswordUsingPolicy, HashingPolicy(..))
import Data.Text (Text, pack, unpack, toLower)
import qualified Data.Text as Text
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import Data.Aeson (ToJSON, FromJSON)
import Data.Time
import Data.Pool
import Data.Maybe (isJust, isNothing)
import Data.Either (lefts)
import Data.Monoid ((<>))
import GHC.Generics
import Database.RethinkDB.NoClash (RethinkDBHandle, FromDatum(..), ToDatum(..))
import Serials.Route.App
import qualified Serials.Model.User as User
import Serials.Model.User (User)
import qualified Serials.Model.Invite as Invite
import Serials.Lib.Mail
import Serials.Model.Invite (Invite)
import Serials.Model.Types (EmailAddress(..))
import Serials.Model.App (readAllEnv)
import Serials.Types
import Servant.Server
import Text.Blaze.Html5 hiding (code)
import Text.Blaze.Html5.Attributes
data UserSignup = UserSignup {
firstName :: Text,
lastName :: Text,
email :: Text,
code :: Text,
password :: Text,
passwordConfirmation :: Text
} deriving (Show, Generic)
instance FromJSON UserSignup
instance ToJSON UserSignup
instance FromDatum UserSignup
instance ToDatum UserSignup
signup :: UserSignup -> App (Either Text User)
signup u = validate (validateSignup u) $ do
time <- liftIO $ getCurrentTime
mhash <- liftIO $ hashPassword (password u)
case mhash of
Nothing -> return $ Left "Could not hash user password"
Just hash -> do
let user = newUser u time hash
createdUser <- User.insert user
Invite.markUsed (code u) (User.id createdUser)
sendWelcomeEmail createdUser
return $ Right createdUser
newUser :: UserSignup -> UTCTime -> Text -> User
newUser u time hash = User.User {
User.id = pack "",
User.firstName = firstName u,
User.lastName = lastName u,
User.email = EmailAddress $ toLower $ email u,
User.resetToken = Nothing,
User.hashedPassword = hash,
User.admin = False,
User.created = time
}
customHashPolicy :: HashingPolicy
customHashPolicy = HashingPolicy 10 "$2b$"
hashPassword :: Text -> IO (Maybe Text)
hashPassword pw = do
hash <- hashPasswordUsingPolicy customHashPolicy $ encodeUtf8 pw
return $ decodeUtf8 <$> hash
-- Email -----------------------------------------------------------
sendWelcomeEmail :: User -> App ()
sendWelcomeEmail u = do
ep <- asks (endpoint . env)
sendMail [User.email u] (welcomeEmail ep u)
welcomeEmail :: Endpoint -> User -> Email
welcomeEmail ep u = Email "Your account is active!" $ do
logoPage ep $ do
h3 ("Hello " >> toHtml (User.firstName u) >> ", welcome to Web Fiction!")
p $ do
a ! href (textValue $ ep <> "/app#/login") $ "Click here to start reading"
-- Password Resetting ----------------------------------------------
forgotPassword :: EmailAddress -> App ()
forgotPassword email = do
token <- liftIO $ Invite.generateCode
User.addResetToken email token
ep <- askEndpoint
sendMail [email] (passwordEmail ep token)
resetPassword :: Text -> Text -> App (Either Text ())
resetPassword token pw = do
mu <- User.findByToken token
case mu of
Nothing -> return $ Left "Invalid Token"
Just u -> do
mhash <- liftIO $ hashPassword pw
case mhash of
Nothing -> return $ Left "Could not hash password"
Just hash -> do
let user = u { User.hashedPassword = hash, User.resetToken = Nothing }
User.save (User.id user) user
return $ Right ()
-- you need the token in the url, at least
passwordEmail :: Endpoint -> Text -> Email
passwordEmail ep token = Email "Reset your password" $ do
logoPage ep $ do
h3 "Reset your password"
p $ do
a ! href (textValue $ ep <> "/app#/password/reset/" <> token) $ "Click here to reset your password"
-- Validation --------------------------------------------------
-- maybe there's a library function for this
validate :: (Monad m) => m (Either Text ()) -> m (Either Text a) -> m (Either Text a)
validate validator rest = do
result <- validator
case result of
Left err -> return $ Left err
Right _ -> rest
validatePassword :: UserSignup -> Either Text ()
validatePassword u =
if (password u) /= (passwordConfirmation u)
then Left "Password and Password Confirmation do not match"
else validateRequired "Password" (password u)
validateEmail :: UserSignup -> App (Either Text ())
validateEmail u = do
existUser <- User.findByEmail $ toLower $ email u
return $ if isJust existUser
then Left "User already exists with that email"
else Right ()
validateInvite :: UserSignup -> App (Either Text ())
validateInvite u = do
mInvite <- Invite.find (code u)
case mInvite of
Nothing -> return $ Left "Invite not found"
Just invite ->
return $ if isJust (Invite.signup invite)
then Left "Invite already used"
else Right ()
validateRequired :: Text -> Text -> Either Text ()
validateRequired name txt =
if Text.length txt == 0
then Left $ name <> " is required"
else Right ()
validateSignup :: UserSignup -> App (Either Text ())
validateSignup u = do
errs <- lefts <$> sequence validators
return $ case errs of
[] -> Right ()
x:xs -> Left x
where
validators = [
return $ validateRequired "First Name" (firstName u),
return $ validateRequired "Last Name" (firstName u),
return $ validatePassword u,
validateEmail u,
validateInvite u
]
| bitemyapp/serials | server/Serials/Route/UserSignup.hs | mit | 5,746 | 0 | 21 | 1,192 | 1,796 | 925 | 871 | 150 | 3 |
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeOperators #-}
module Repro
where
import Generics.SOP
recover :: forall a xs.
(Code a ~ '[xs], HasDatatypeInfo a)
=> a
recover =
case datatypeInfo (Proxy @a) :: DatatypeInfo '[xs] of
Newtype _ _ _ ->
let sop :: NP [] xs =
(undefined
:: forall c xs
. All c xs
=> NP [] xs)
in undefined
| deepfire/mood | doc/ghc-repro-16095-2.hs | agpl-3.0 | 595 | 0 | 16 | 187 | 147 | 80 | 67 | 21 | 1 |
-- Test for trac #1042
import Control.Exception
import Data.Int
import Prelude hiding (catch)
main :: IO ()
main = do print ((minBound :: Int) `div` (-1)) `myCatch` print
print ((minBound :: Int8) `div` (-1)) `myCatch` print
print ((minBound :: Int16) `div` (-1)) `myCatch` print
print ((minBound :: Int32) `div` (-1)) `myCatch` print
print ((minBound :: Int64) `div` (-1)) `myCatch` print
myCatch :: IO a -> (ArithException -> IO a) -> IO a
myCatch = catch
| kfish/const-math-ghc-plugin | tests/ghc-7.6/numrun013.hs | bsd-3-clause | 506 | 0 | 12 | 122 | 230 | 133 | 97 | 11 | 1 |
module Language.Haskell.Interpreter.Unsafe (
unsafeSetGhcOption, unsafeRunInterpreterWithArgs
)
where
import Control.Monad.Trans
import Control.Monad.Catch
import Hint.Base
import Hint.Configuration
import Hint.InterpreterT
-- | Set a GHC option for the current session,
-- eg. @unsafeSetGhcOption \"-XNoMonomorphismRestriction\"@.
--
-- Warning: Some options may interact badly with the Interpreter.
unsafeSetGhcOption :: MonadInterpreter m => String -> m ()
unsafeSetGhcOption = setGhcOption
-- | Executes the interpreter, setting the args as though they were
-- command-line args. In particular, this means args that have no
-- effect with :set in ghci might function properly from this
-- context.
--
-- Warning: Some options may interact badly with the Interpreter.
unsafeRunInterpreterWithArgs :: (MonadMask m, MonadIO m, Functor m)
=> [String]
-> InterpreterT m a
-> m (Either InterpreterError a)
unsafeRunInterpreterWithArgs = runInterpreterWithArgs
| konn/hint-forked | src/Language/Haskell/Interpreter/Unsafe.hs | bsd-3-clause | 1,076 | 0 | 10 | 241 | 137 | 81 | 56 | 14 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
-- Run:
-- ./Bench -u all.csv
-- runghc PlotResults.hs all.csv
module Main where
import Text.CSV
import System.Process
import qualified Data.Map as Map
import System.IO
import System.FilePath
import Data.List
import System.Directory
import System.Environment
import Control.Exception
main = do
[file] <- getArgs
pd <- readRunInfos file
renderPlotData pd
-- each entry in this map is one line in the graph.
type PlotData = Map.Map String [RunInfo]
data RunInfo = RunInfo {
runSize :: Int,
runMean :: Double
}
deriving Show
runGnuplot :: [String] -> IO ()
runGnuplot ss = do
(Just hIn,Nothing,Nothing,ph) <- createProcess (proc "gnuplot" ["-persist"])
{std_in = CreatePipe}
hPutStrLn hIn $ unlines $ ["set terminal x11"] ++ ss ++ ["quit"]
waitForProcess ph
return ()
-- read from a criterion -u dump.
readRunInfos :: FilePath -> IO PlotData
readRunInfos path = do
-- ignore the header
Right (_:csv) <- parseCSVFromFile path
return $ foldl' (\m (n,r) -> Map.insertWith (++) n [r] m) Map.empty
$ fmap parseRun
$ filter (any (not . null)) csv
where
parseRun csv = let
sizeStr = takeFileName $ csv !! 0
testName = takeFileName $ takeDirectory $ csv !! 0
size = tryRead "size" sizeStr
mean = tryRead "mean" $ csv !! 1
in (testName, RunInfo size mean)
tempDataFile :: [RunInfo] -> IO FilePath
tempDataFile runs = do
dir <- getTemporaryDirectory
(path,h) <- openTempFile dir "plots.dat"
mapM_ (hPutStrLn h)
$ map (\r -> show (runSize r) ++ " " ++ show (runMean r))
runs
hClose h
return path
renderPlotData :: PlotData -> IO ()
renderPlotData pd = do
let pdList = Map.toList pd
fs <- mapM tempDataFile $ fmap snd pdList
let cmds = [ "set logscale x"
, "set logscale y"
, "set xtics"
, "plot " ++ intercalate ", "
[show f
++ " using 1:2:xticlabels(1)"
++ " title " ++ show n
++ " with lines "
| (n,f) <- zip (fmap fst pdList) fs]
]
runGnuplot cmds
mapM_ removeFile fs
tryRead :: Read a => String -> String -> a
tryRead descr s = mapException (\(e::SomeException) -> userError ("descr: " ++ show s ++ " " ++ show e))
$ read s
| judah/vector-fftw | tests/timing/PlotResults.hs | bsd-3-clause | 2,606 | 0 | 18 | 908 | 784 | 399 | 385 | 66 | 1 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2014 Galois, Inc.
-- License : BSD3
-- Maintainer : cryptol@galois.com
-- Stability : provisional
-- Portability : portable
module Cryptol.Transform.Specialize
where
import Cryptol.TypeCheck.AST
import Cryptol.TypeCheck.TypeMap
import Cryptol.TypeCheck.Subst
import qualified Cryptol.ModuleSystem as M
import qualified Cryptol.ModuleSystem.Env as M
import qualified Cryptol.ModuleSystem.Monad as M
import Control.Applicative
import Data.List (intercalate)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (catMaybes)
import Data.Traversable (traverse)
import MonadLib
-- | A QName should have an entry in the SpecCache iff it is
-- specializable. Each QName starts out with an empty TypesMap.
type SpecCache = Map QName (Decl, TypesMap (QName, Maybe Decl))
type M = M.ModuleT (StateT SpecCache IO)
specialize :: Expr -> M.ModuleCmd Expr
specialize expr modEnv = do
let extDgs = allDeclGroups modEnv
run $ specializeEWhere expr extDgs
where
run = fmap fst . runStateT Map.empty . M.runModuleT modEnv
specializeExpr :: Expr -> M Expr
specializeExpr expr =
case expr of
ECon _econ -> pure expr
EList es t -> EList <$> traverse specializeExpr es <*> pure t
ETuple es -> ETuple <$> traverse specializeExpr es
ERec fs -> ERec <$> traverse (traverseSnd specializeExpr) fs
ESel e s -> ESel <$> specializeExpr e <*> pure s
EIf e1 e2 e3 -> EIf <$> specializeExpr e1 <*> specializeExpr e2 <*> specializeExpr e3
EComp t e mss -> EComp t <$> specializeExpr e <*> traverse (traverse specializeMatch) mss
-- FIXME: this is wrong. Think about scoping rules.
EVar {} -> specializeConst expr
ETAbs t e -> ETAbs t <$> specializeExpr e
ETApp {} -> specializeConst expr
EApp e1 e2 -> EApp <$> specializeExpr e1 <*> specializeExpr e2
EAbs qn t e -> EAbs qn t <$> specializeExpr e
EProofAbs p e -> EProofAbs p <$> specializeExpr e
EProofApp {} -> specializeConst expr
ECast e t -> ECast <$> specializeExpr e <*> pure t
-- TODO: if typeOf e == t, then drop the coercion.
EWhere e dgs -> specializeEWhere e dgs
specializeMatch :: Match -> M Match
specializeMatch (From qn t e) = From qn t <$> specializeExpr e
specializeMatch (Let decl)
| null (sVars (dSignature decl)) = return (Let decl)
| otherwise = fail "unimplemented: specializeMatch Let unimplemented"
-- TODO: should treat this case like EWhere.
specializeEWhere :: Expr -> [DeclGroup] -> M Expr
specializeEWhere e dgs = do
let decls = concatMap groupDecls dgs
let newCache = Map.fromList [ (dName d, (d, emptyTM)) | d <- decls ]
-- We assume that the names bound in dgs are disjoint from the other names in scope.
modifySpecCache (Map.union newCache)
e' <- specializeExpr e
-- Then reassemble the DeclGroups.
let splitDecl :: Decl -> M [Decl]
splitDecl d = do
Just (_, tm) <- Map.lookup (dName d) <$> getSpecCache
return (catMaybes $ map (snd . snd) $ toListTM tm)
let splitDeclGroup :: DeclGroup -> M [DeclGroup]
splitDeclGroup (Recursive ds) = do
ds' <- concat <$> traverse splitDecl ds
if null ds'
then return []
else return [Recursive ds']
splitDeclGroup (NonRecursive d) = map NonRecursive <$> splitDecl d
dgs' <- concat <$> traverse splitDeclGroup dgs
modifySpecCache (flip Map.difference newCache) -- Remove local definitions from cache.
return $ if null dgs'
then e'
else EWhere e' dgs'
specializeConst :: Expr -> M Expr
specializeConst e0 = do
let (e1, n) = destEProofApps e0
let (e2, ts) = destETApps e1
case e2 of
EVar qname ->
do cache <- getSpecCache
case Map.lookup qname cache of
Nothing -> return e0 -- Primitive/unspecializable variable; leave it alone
Just (decl, tm) ->
case lookupTM ts tm of
Just (qname', _) -> return (EVar qname') -- Already specialized
Nothing -> do -- A new type instance of this function
qname' <- freshName qname ts -- New type instance, record new name
sig' <- instantiateSchema ts n (dSignature decl)
modifySpecCache (Map.adjust (fmap (insertTM ts (qname', Nothing))) qname)
rhs' <- specializeExpr =<< instantiateExpr ts n (dDefinition decl)
let decl' = decl { dName = qname', dSignature = sig', dDefinition = rhs' }
modifySpecCache (Map.adjust (fmap (insertTM ts (qname', Just decl'))) qname)
return (EVar qname')
_ -> return e0 -- type/proof application to non-variable; not specializable
destEProofApps :: Expr -> (Expr, Int)
destEProofApps = go 0
where
go n (EProofApp e) = go (n + 1) e
go n e = (e, n)
destETApps :: Expr -> (Expr, [Type])
destETApps = go []
where
go ts (ETApp e t) = go (t : ts) e
go ts e = (e, ts)
-- Any top-level declarations in the current module can be found in the
-- ModuleEnv's LoadedModules, and so we can count of freshName to avoid collisions with them.
-- Additionally, decls in 'where' clauses can only (currently) be parsed with unqualified names.
-- Any generated name for a specialized function will be qualified with the current @ModName@,
-- so genned names will not collide with local decls either.
freshName :: QName -> [Type] -> M QName
freshName (QName m name) tys = do
let name' = reifyName name tys
bNames <- matchingBoundNames m
let loop i = let nm = name' ++ "_" ++ show i
in if nm `elem` bNames
then loop $ i + 1
else nm
let go = if name' `elem` bNames
then loop (1 :: Integer)
else name'
return $ QName m (Name go)
matchingBoundNames :: (Maybe ModName) -> M [String]
matchingBoundNames m = do
qns <- allPublicQNames <$> M.getModuleEnv
return [ n | QName m' (Name n) <- qns , m == m' ]
reifyName :: Name -> [Type] -> String
reifyName name tys = intercalate "_" (showName name : concatMap showT tys)
where
tvInt (TVFree i _ _ _) = i
tvInt (TVBound i _) = i
showT typ =
case typ of
TCon tc ts -> showTCon tc : concatMap showT ts
TUser _ _ t -> showT t
TVar tvar -> [ "a" ++ show (tvInt tvar) ]
TRec trec -> "rec" : concatMap showRecFld trec
showTCon tcon =
case tcon of
TC tc -> showTC tc
PC pc -> showPC pc
TF tf -> showTF tf
showPC pc =
case pc of
PEqual -> "eq"
PNeq -> "neq"
PGeq -> "geq"
PFin -> "fin"
PHas sel -> "sel_" ++ showSel sel
PArith -> "arith"
PCmp -> "cmp"
showTC tc =
case tc of
TCNum n -> show n
TCInf -> "inf"
TCBit -> "bit"
TCSeq -> "seq"
TCFun -> "fun"
TCTuple n -> "t" ++ show n
TCNewtype _ -> "user"
showSel sel = intercalate "_" $
case sel of
TupleSel _ sig -> "tup" : maybe [] ((:[]) . show) sig
RecordSel x sig -> "rec" : showName x : map showName (maybe [] id sig)
ListSel _ sig -> "list" : maybe [] ((:[]) . show) sig
showName nm =
case nm of
Name s -> s
NewName _ n -> "x" ++ show n
showTF tf =
case tf of
TCAdd -> "add"
TCSub -> "sub"
TCMul -> "mul"
TCDiv -> "div"
TCMod -> "mod"
TCLg2 -> "lg2"
TCExp -> "exp"
TCWidth -> "width"
TCMin -> "min"
TCMax -> "max"
TCLenFromThen -> "len_from_then"
TCLenFromThenTo -> "len_from_then_to"
showRecFld (nm,t) = showName nm : showT t
instantiateSchema :: [Type] -> Int -> Schema -> M Schema
instantiateSchema ts n (Forall params props ty)
| length params /= length ts = fail "instantiateSchema: wrong number of type arguments"
| length props /= n = fail "instantiateSchema: wrong number of prop arguments"
| otherwise = return $ Forall [] [] (apSubst sub ty)
where sub = listSubst [ (tpVar p, t) | (p, t) <- zip params ts ]
-- | Reduce `length ts` outermost type abstractions and `n` proof abstractions.
instantiateExpr :: [Type] -> Int -> Expr -> M Expr
instantiateExpr [] 0 e = return e
instantiateExpr [] n (EProofAbs _ e) = instantiateExpr [] (n - 1) e
instantiateExpr (t : ts) n (ETAbs param e) =
instantiateExpr ts n (apSubst (singleSubst (tpVar param) t) e)
instantiateExpr _ _ _ = fail "instantiateExpr: wrong number of type/proof arguments"
allDeclGroups :: M.ModuleEnv -> [DeclGroup]
allDeclGroups =
concatMap mDecls
. M.loadedModules
allLoadedModules :: M.ModuleEnv -> [M.LoadedModule]
allLoadedModules =
M.getLoadedModules
. M.meLoadedModules
allPublicQNames :: M.ModuleEnv -> [QName]
allPublicQNames =
concatMap
( Map.keys
. M.ifDecls
. M.ifPublic
. M.lmInterface
)
. allLoadedModules
getSpecCache :: M SpecCache
getSpecCache = lift get
setSpecCache :: SpecCache -> M ()
setSpecCache = lift . set
modifySpecCache :: (SpecCache -> SpecCache) -> M ()
modifySpecCache = lift . modify
modify :: StateM m s => (s -> s) -> m ()
modify f = get >>= (set . f)
traverseSnd :: Functor f => (b -> f c) -> (a, b) -> f (a, c)
traverseSnd f (x, y) = (,) x <$> f y
| TomMD/cryptol | src/Cryptol/Transform/Specialize.hs | bsd-3-clause | 9,538 | 0 | 28 | 2,752 | 3,054 | 1,513 | 1,541 | 207 | 33 |
{-# LANGUAGE CPP #-}
module Eta.Main.FileCleanup
( TempFileLifetime(..)
, cleanTempDirs, cleanTempFiles, cleanCurrentModuleTempFiles
, addFilesToClean, changeTempFilesLifetime
, newTempName, newTempLibName
, withSystemTempDirectory, withTempDirectory
) where
import Eta.Main.DynFlags
import Eta.Main.ErrUtils
import Eta.Utils.Outputable
import Eta.Utils.Util
import Eta.Utils.Exception as Exception
import Eta.Main.DriverPhases
import Control.Monad
import Data.List
import qualified Data.Set as Set
import qualified Data.Map as Map
import Data.IORef
import System.Directory
import System.FilePath
import System.IO.Error
#if !defined(mingw32_HOST_OS)
import qualified System.Posix.Internals
#endif
-- | Used when a temp file is created. This determines which component Set of
-- FilesToClean will get the temp file
data TempFileLifetime
= TFL_CurrentModule
-- ^ A file with lifetime TFL_CurrentModule will be cleaned up at the
-- end of upweep_mod
| TFL_GhcSession
-- ^ A file with lifetime TFL_GhcSession will be cleaned up at the end of
-- runGhc(T)
deriving (Show)
cleanTempDirs :: DynFlags -> IO ()
cleanTempDirs dflags
= unless (gopt Opt_KeepTmpFiles dflags)
$ mask_
$ do let ref = dirsToClean dflags
ds <- atomicModifyIORef' ref $ \ds -> (Map.empty, ds)
removeTmpDirs dflags (Map.elems ds)
-- | Delete all files in @filesToClean dflags@.
cleanTempFiles :: DynFlags -> IO ()
cleanTempFiles dflags
= unless (gopt Opt_KeepTmpFiles dflags)
$ mask_
$ do let ref = filesToClean dflags
to_delete <- atomicModifyIORef' ref $
\FilesToClean
{ ftcCurrentModule = cm_files
, ftcGhcSession = gs_files
} -> ( emptyFilesToClean
, Set.toList cm_files ++ Set.toList gs_files)
removeTmpFiles dflags to_delete
-- | Delete all files in @filesToClean dflags@. That have lifetime
-- TFL_CurrentModule.
-- If a file must be cleaned eventually, but must survive a
-- cleanCurrentModuleTempFiles, ensure it has lifetime TFL_GhcSession.
cleanCurrentModuleTempFiles :: DynFlags -> IO ()
cleanCurrentModuleTempFiles dflags
= unless (gopt Opt_KeepTmpFiles dflags)
$ mask_
$ do let ref = filesToClean dflags
to_delete <- atomicModifyIORef' ref $
\ftc@FilesToClean{ftcCurrentModule = cm_files} ->
(ftc {ftcCurrentModule = Set.empty}, Set.toList cm_files)
removeTmpFiles dflags to_delete
-- | Ensure that new_files are cleaned on the next call of
-- 'cleanTempFiles' or 'cleanCurrentModuleTempFiles', depending on lifetime.
-- If any of new_files are already tracked, they will have their lifetime
-- updated.
addFilesToClean :: DynFlags -> TempFileLifetime -> [FilePath] -> IO ()
addFilesToClean dflags lifetime new_files = modifyIORef' (filesToClean dflags) $
\FilesToClean
{ ftcCurrentModule = cm_files
, ftcGhcSession = gs_files
} -> case lifetime of
TFL_CurrentModule -> FilesToClean
{ ftcCurrentModule = cm_files `Set.union` new_files_set
, ftcGhcSession = gs_files `Set.difference` new_files_set
}
TFL_GhcSession -> FilesToClean
{ ftcCurrentModule = cm_files `Set.difference` new_files_set
, ftcGhcSession = gs_files `Set.union` new_files_set
}
where
new_files_set = Set.fromList new_files
-- | Update the lifetime of files already being tracked. If any files are
-- not being tracked they will be discarded.
changeTempFilesLifetime :: DynFlags -> TempFileLifetime -> [FilePath] -> IO ()
changeTempFilesLifetime dflags lifetime files = do
FilesToClean
{ ftcCurrentModule = cm_files
, ftcGhcSession = gs_files
} <- readIORef (filesToClean dflags)
let old_set = case lifetime of
TFL_CurrentModule -> gs_files
TFL_GhcSession -> cm_files
existing_files = [f | f <- files, f `Set.member` old_set]
addFilesToClean dflags lifetime existing_files
-- Return a unique numeric temp file suffix
newTempSuffix :: DynFlags -> IO Int
newTempSuffix dflags =
atomicModifyIORef' (nextTempSuffix dflags) $ \n -> (n+1,n)
-- Find a temporary name that doesn't already exist.
newTempName :: DynFlags -> TempFileLifetime -> Suffix -> IO FilePath
newTempName dflags lifetime extn
= do d <- getTempDir dflags
findTempName (d </> "eta_") -- See Note [Deterministic base name]
where
findTempName :: FilePath -> IO FilePath
findTempName prefix
= do n <- newTempSuffix dflags
let filename = prefix ++ show n <.> extn
b <- doesFileExist filename
if b then findTempName prefix
else do -- clean it up later
addFilesToClean dflags lifetime [filename]
return filename
newTempLibName :: DynFlags -> TempFileLifetime -> Suffix
-> IO (FilePath, FilePath, String)
newTempLibName dflags lifetime extn
= do d <- getTempDir dflags
findTempName d ("eta_")
where
findTempName :: FilePath -> String -> IO (FilePath, FilePath, String)
findTempName dir prefix
= do n <- newTempSuffix dflags -- See Note [Deterministic base name]
let libname = prefix ++ show n
filename = dir </> "lib" ++ libname <.> extn
b <- doesFileExist filename
if b then findTempName dir prefix
else do -- clean it up later
addFilesToClean dflags lifetime [filename]
return (filename, dir, libname)
-- Return our temporary directory within tmp_dir, creating one if we
-- don't have one yet.
getTempDir :: DynFlags -> IO FilePath
getTempDir dflags = do
mapping <- readIORef dir_ref
case Map.lookup tmp_dir mapping of
Nothing -> do
pid <- getProcessID
let prefix = tmp_dir </> "eta" ++ show pid ++ "_"
mask_ $ mkTempDir prefix
Just dir -> return dir
where
tmp_dir = tmpDir dflags
dir_ref = dirsToClean dflags
mkTempDir :: FilePath -> IO FilePath
mkTempDir prefix = do
n <- newTempSuffix dflags
let our_dir = prefix ++ show n
-- 1. Speculatively create our new directory.
createDirectory our_dir
-- 2. Update the dirsToClean mapping unless an entry already exists
-- (i.e. unless another thread beat us to it).
their_dir <- atomicModifyIORef' dir_ref $ \mapping ->
case Map.lookup tmp_dir mapping of
Just dir -> (mapping, Just dir)
Nothing -> (Map.insert tmp_dir our_dir mapping, Nothing)
-- 3. If there was an existing entry, return it and delete the
-- directory we created. Otherwise return the directory we created.
case their_dir of
Nothing -> do
debugTraceMsg dflags 2 $
text "Created temporary directory:" <+> text our_dir
return our_dir
Just dir -> do
removeDirectory our_dir
return dir
`catchIO` \e -> if isAlreadyExistsError e
then mkTempDir prefix else ioError e
{- Note [Deterministic base name]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The filename of temporary files, especially the basename of C files, can end
up in the output in some form, e.g. as part of linker debug information. In the
interest of bit-wise exactly reproducible compilation (#4012), the basename of
the temporary file no longer contains random information (it used to contain
the process id).
This is ok, as the temporary directory used contains the pid (see getTempDir).
-}
removeTmpDirs :: DynFlags -> [FilePath] -> IO ()
removeTmpDirs dflags ds
= traceCmd dflags "Deleting temp dirs"
("Deleting: " ++ unwords ds)
(mapM_ (removeWith dflags removeDirectory) ds)
removeTmpFiles :: DynFlags -> [FilePath] -> IO ()
removeTmpFiles dflags fs
= warnNon $
traceCmd dflags "Deleting temp files"
("Deleting: " ++ unwords deletees)
(mapM_ (removeWith dflags removeFile) deletees)
where
-- Flat out refuse to delete files that are likely to be source input
-- files (is there a worse bug than having a compiler delete your source
-- files?)
--
-- Deleting source files is a sign of a bug elsewhere, so prominently flag
-- the condition.
warnNon act
| null non_deletees = act
| otherwise = do
putMsg dflags (text "WARNING - NOT deleting source files:"
<+> hsep (map text non_deletees))
act
(non_deletees, deletees) = partition isHaskellUserSrcFilename fs
removeWith :: DynFlags -> (FilePath -> IO ()) -> FilePath -> IO ()
removeWith dflags remover f = remover f `catchIO`
(\e ->
let msg = if isDoesNotExistError e
then text "Warning: deleting non-existent" <+> text f
else text "Warning: exception raised when deleting"
<+> text f <> colon
$$ text (show e)
in debugTraceMsg dflags 2 msg
)
#if defined(mingw32_HOST_OS)
-- relies on Int == Int32 on Windows
foreign import ccall unsafe "_getpid" getProcessID :: IO Int
#else
getProcessID :: IO Int
getProcessID = System.Posix.Internals.c_getpid >>= return . fromIntegral
#endif
-- The following three functions are from the `temporary` package.
-- | Create and use a temporary directory in the system standard temporary
-- directory.
--
-- Behaves exactly the same as 'withTempDirectory', except that the parent
-- temporary directory will be that returned by 'getTemporaryDirectory'.
withSystemTempDirectory :: String -- ^ Directory name template. See 'openTempFile'.
-> (FilePath -> IO a) -- ^ Callback that can use the directory
-> IO a
withSystemTempDirectory template action =
getTemporaryDirectory >>= \tmpDir -> withTempDirectory tmpDir template action
-- | Create and use a temporary directory.
--
-- Creates a new temporary directory inside the given directory, making use
-- of the template. The temp directory is deleted after use. For example:
--
-- > withTempDirectory "src" "sdist." $ \tmpDir -> do ...
--
-- The @tmpDir@ will be a new subdirectory of the given directory, e.g.
-- @src/sdist.342@.
withTempDirectory :: FilePath -- ^ Temp directory to create the directory in
-> String -- ^ Directory name template. See 'openTempFile'.
-> (FilePath -> IO a) -- ^ Callback that can use the directory
-> IO a
withTempDirectory targetDir template =
Exception.bracket
(createTempDirectory targetDir template)
(ignoringIOErrors . removeDirectoryRecursive)
ignoringIOErrors :: IO () -> IO ()
ignoringIOErrors ioe = ioe `catch` (\e -> const (return ()) (e :: IOError))
createTempDirectory :: FilePath -> String -> IO FilePath
createTempDirectory dir template = do
pid <- getProcessID
findTempName pid
where findTempName x = do
let path = dir </> template ++ show x
createDirectory path
return path
`catchIO` \e -> if isAlreadyExistsError e
then findTempName (x+1) else ioError e
| rahulmutt/ghcvm | compiler/Eta/Main/FileCleanup.hs | bsd-3-clause | 11,215 | 0 | 18 | 2,861 | 2,204 | 1,141 | 1,063 | 193 | 5 |
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveFunctor #-}
module Rpc ( Rpc, rpc, module Rpc.Core ) where
import Control.Applicative
import Control.Monad.Error
import Control.Monad.Trans
import qualified Control.Exception as E
import Rpc.Core
import Error
import Tools.FreezeIOM
newtype Rpc a = Rpc (RpcM ApiError a)
deriving (Functor, Applicative, Monad, MonadIO, MonadError ApiError, MonadRpc ApiError, FreezeIOM RpcContext (Either ApiError))
rpc ctx (Rpc f) = runRpcM f ctx | jean-edouard/manager | api/Rpc.hs | gpl-2.0 | 484 | 0 | 8 | 67 | 140 | 81 | 59 | 12 | 1 |
{-# OPTIONS_HADDOCK hide #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.BlendingFactor
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This is a purely internal module for (un-)marshaling BlendingFactor.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.BlendingFactor (
BlendingFactor(..), marshalBlendingFactor, unmarshalBlendingFactor
) where
import Graphics.Rendering.OpenGL.Raw
--------------------------------------------------------------------------------
data BlendingFactor =
Zero
| One
| SrcColor
| OneMinusSrcColor
| DstColor
| OneMinusDstColor
| SrcAlpha
| OneMinusSrcAlpha
| DstAlpha
| OneMinusDstAlpha
| ConstantColor
| OneMinusConstantColor
| ConstantAlpha
| OneMinusConstantAlpha
| SrcAlphaSaturate
deriving ( Eq, Ord, Show )
marshalBlendingFactor :: BlendingFactor -> GLenum
marshalBlendingFactor x = case x of
Zero -> gl_ZERO
One -> gl_ONE
SrcColor -> gl_SRC_COLOR
OneMinusSrcColor -> gl_ONE_MINUS_SRC_COLOR
DstColor -> gl_DST_COLOR
OneMinusDstColor -> gl_ONE_MINUS_DST_COLOR
SrcAlpha -> gl_SRC_ALPHA
OneMinusSrcAlpha -> gl_ONE_MINUS_SRC_ALPHA
DstAlpha -> gl_DST_ALPHA
OneMinusDstAlpha -> gl_ONE_MINUS_DST_ALPHA
ConstantColor -> gl_CONSTANT_COLOR
OneMinusConstantColor -> gl_ONE_MINUS_CONSTANT_COLOR
ConstantAlpha -> gl_CONSTANT_ALPHA
OneMinusConstantAlpha -> gl_ONE_MINUS_CONSTANT_ALPHA
SrcAlphaSaturate -> gl_SRC_ALPHA_SATURATE
unmarshalBlendingFactor :: GLenum -> BlendingFactor
unmarshalBlendingFactor x
| x == gl_ZERO = Zero
| x == gl_ONE = One
| x == gl_SRC_COLOR = SrcColor
| x == gl_ONE_MINUS_SRC_COLOR = OneMinusSrcColor
| x == gl_DST_COLOR = DstColor
| x == gl_ONE_MINUS_DST_COLOR = OneMinusDstColor
| x == gl_SRC_ALPHA = SrcAlpha
| x == gl_ONE_MINUS_SRC_ALPHA = OneMinusSrcAlpha
| x == gl_DST_ALPHA = DstAlpha
| x == gl_ONE_MINUS_DST_ALPHA = OneMinusDstAlpha
| x == gl_CONSTANT_COLOR = ConstantColor
| x == gl_ONE_MINUS_CONSTANT_COLOR = OneMinusConstantColor
| x == gl_CONSTANT_ALPHA = ConstantAlpha
| x == gl_ONE_MINUS_CONSTANT_ALPHA = OneMinusConstantAlpha
| x == gl_SRC_ALPHA_SATURATE = SrcAlphaSaturate
| otherwise = error ("unmarshalBlendingFactor: illegal value " ++ show x)
| hesiod/OpenGL | src/Graphics/Rendering/OpenGL/GL/BlendingFactor.hs | bsd-3-clause | 2,554 | 0 | 9 | 436 | 452 | 240 | 212 | 56 | 15 |
{-# LANGUAGE ImplicitParams #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
module Main where
import GHC.Exts
import Data.Type.Equality
type family F x :: Constraint
type instance F Int = (?x :: String)
data Box where MkBox :: (?x :: String) => Box
data Box2 a where MkBox2 :: F a => Box2 a
f :: Box2 a -> Box -> a :~: Int -> String
f MkBox2 MkBox Refl = ?x
main :: IO ()
main = do { let mb = let ?x = "right" in MkBox
; let mb2 = let ?x = "wrong" in MkBox2
; print (f mb2 mb Refl) }
| sdiehl/ghc | testsuite/tests/typecheck/should_run/T17104.hs | bsd-3-clause | 585 | 0 | 13 | 142 | 194 | 109 | 85 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="id-ID">
<title>Directory List v1.0</title>
<maps>
<homeID>directorylistv1</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | kingthorin/zap-extensions | addOns/directorylistv1/src/main/javahelp/help_id_ID/helpset_id_ID.hs | apache-2.0 | 976 | 78 | 66 | 157 | 412 | 209 | 203 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="zh-CN">
<title>Reveal | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>搜索</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | msrader/zap-extensions | src/org/zaproxy/zap/extension/reveal/resources/help_zh_CN/helpset_zh_CN.hs | apache-2.0 | 968 | 82 | 65 | 159 | 411 | 208 | 203 | -1 | -1 |
{-# LANGUAGE QuasiQuotes #-}
module B where
import A
foo:: [aquoter|Int|] -> [aquoter|String|]
foo = show
| shlevy/ghc | testsuite/tests/quasiquotation/T13863/B.hs | bsd-3-clause | 108 | 0 | 5 | 18 | 30 | 22 | 8 | 5 | 1 |
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
{-# LANGUAGE CPP #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Thrift.Protocol.Compact
( module Thrift.Protocol
, CompactProtocol(..)
) where
import Control.Applicative
import Control.Exception ( throw )
import Control.Monad
import Data.Attoparsec.ByteString as P
import Data.Attoparsec.ByteString.Lazy as LP
import Data.Bits
import Data.ByteString.Lazy.Builder as B
import Data.Int
import Data.List as List
import Data.Monoid
import Data.Word
import Data.Text.Lazy.Encoding ( decodeUtf8, encodeUtf8 )
import Thrift.Protocol hiding (versionMask)
import Thrift.Transport
import Thrift.Types
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import qualified Data.HashMap.Strict as Map
import qualified Data.Text.Lazy as LT
-- | the Compact Protocol implements the standard Thrift 'TCompactProcotol'
-- which is similar to the 'TBinaryProtocol', but takes less space on the wire.
-- Integral types are encoded using as varints.
data CompactProtocol a = CompactProtocol a
-- ^ Constuct a 'CompactProtocol' with a 'Transport'
protocolID, version, versionMask, typeMask, typeBits :: Word8
protocolID = 0x82 -- 1000 0010
version = 0x01
versionMask = 0x1f -- 0001 1111
typeMask = 0xe0 -- 1110 0000
typeBits = 0x07 -- 0000 0111
typeShiftAmount :: Int
typeShiftAmount = 5
instance Protocol CompactProtocol where
getTransport (CompactProtocol t) = t
writeMessageBegin p (n, t, s) = tWrite (getTransport p) $ toLazyByteString $
B.word8 protocolID <>
B.word8 ((version .&. versionMask) .|.
(((fromIntegral $ fromEnum t) `shiftL`
typeShiftAmount) .&. typeMask)) <>
buildVarint (i32ToZigZag s) <>
buildCompactValue (TString $ encodeUtf8 n)
readMessageBegin p = runParser p $ do
pid <- fromIntegral <$> P.anyWord8
when (pid /= protocolID) $ error "Bad Protocol ID"
w <- fromIntegral <$> P.anyWord8
let ver = w .&. versionMask
when (ver /= version) $ error "Bad Protocol version"
let typ = (w `shiftR` typeShiftAmount) .&. typeBits
seqId <- parseVarint zigZagToI32
TString name <- parseCompactValue T_STRING
return (decodeUtf8 name, toEnum $ fromIntegral $ typ, seqId)
serializeVal _ = toLazyByteString . buildCompactValue
deserializeVal _ ty bs =
case LP.eitherResult $ LP.parse (parseCompactValue ty) bs of
Left s -> error s
Right val -> val
readVal p ty = runParser p $ parseCompactValue ty
-- | Writing Functions
buildCompactValue :: ThriftVal -> Builder
buildCompactValue (TStruct fields) = buildCompactStruct fields
buildCompactValue (TMap kt vt entries) =
let len = fromIntegral $ length entries :: Word32 in
if len == 0
then B.word8 0x00
else buildVarint len <>
B.word8 (fromTType kt `shiftL` 4 .|. fromTType vt) <>
buildCompactMap entries
buildCompactValue (TList ty entries) =
let len = length entries in
(if len < 15
then B.word8 $ (fromIntegral len `shiftL` 4) .|. fromTType ty
else B.word8 (0xF0 .|. fromTType ty) <>
buildVarint (fromIntegral len :: Word32)) <>
buildCompactList entries
buildCompactValue (TSet ty entries) = buildCompactValue (TList ty entries)
buildCompactValue (TBool b) =
B.word8 $ toEnum $ if b then 1 else 0
buildCompactValue (TByte b) = int8 b
buildCompactValue (TI16 i) = buildVarint $ i16ToZigZag i
buildCompactValue (TI32 i) = buildVarint $ i32ToZigZag i
buildCompactValue (TI64 i) = buildVarint $ i64ToZigZag i
buildCompactValue (TDouble d) = doubleLE d
buildCompactValue (TString s) = buildVarint len <> lazyByteString s
where
len = fromIntegral (LBS.length s) :: Word32
buildCompactValue (TBinary s) = buildCompactValue (TString s)
buildCompactStruct :: Map.HashMap Int16 (LT.Text, ThriftVal) -> Builder
buildCompactStruct = flip (loop 0) mempty . Map.toList
where
loop _ [] acc = acc <> B.word8 (fromTType T_STOP)
loop lastId ((fid, (_,val)) : fields) acc = loop fid fields $ acc <>
(if fid > lastId && fid - lastId <= 15
then B.word8 $ fromIntegral ((fid - lastId) `shiftL` 4) .|. typeOf val
else B.word8 (typeOf val) <> buildVarint (i16ToZigZag fid)) <>
(if typeOf val > 0x02 -- Not a T_BOOL
then buildCompactValue val
else mempty) -- T_BOOLs are encoded in the type
buildCompactMap :: [(ThriftVal, ThriftVal)] -> Builder
buildCompactMap = foldl combine mempty
where
combine s (key, val) = buildCompactValue key <> buildCompactValue val <> s
buildCompactList :: [ThriftVal] -> Builder
buildCompactList = foldr (mappend . buildCompactValue) mempty
-- | Reading Functions
parseCompactValue :: ThriftType -> Parser ThriftVal
parseCompactValue (T_STRUCT tmap) = TStruct <$> parseCompactStruct tmap
parseCompactValue (T_MAP kt' vt') = do
n <- parseVarint id
if n == 0
then return $ TMap kt' vt' []
else do
w <- P.anyWord8
let kt = typeFrom $ w `shiftR` 4
vt = typeFrom $ w .&. 0x0F
TMap kt vt <$> parseCompactMap kt vt n
parseCompactValue (T_LIST ty) = TList ty <$> parseCompactList
parseCompactValue (T_SET ty) = TSet ty <$> parseCompactList
parseCompactValue T_BOOL = TBool . (/=0) <$> P.anyWord8
parseCompactValue T_BYTE = TByte . fromIntegral <$> P.anyWord8
parseCompactValue T_I16 = TI16 <$> parseVarint zigZagToI16
parseCompactValue T_I32 = TI32 <$> parseVarint zigZagToI32
parseCompactValue T_I64 = TI64 <$> parseVarint zigZagToI64
parseCompactValue T_DOUBLE = TDouble . bsToDoubleLE <$> P.take 8
parseCompactValue T_STRING = parseCompactString TString
parseCompactValue T_BINARY = parseCompactString TBinary
parseCompactValue ty = error $ "Cannot read value of type " ++ show ty
parseCompactString ty = do
len :: Word32 <- parseVarint id
ty . LBS.fromStrict <$> P.take (fromIntegral len)
parseCompactStruct :: TypeMap -> Parser (Map.HashMap Int16 (LT.Text, ThriftVal))
parseCompactStruct tmap = Map.fromList <$> parseFields 0
where
parseFields :: Int16 -> Parser [(Int16, (LT.Text, ThriftVal))]
parseFields lastId = do
w <- P.anyWord8
if w == 0x00
then return []
else do
let ty = typeFrom (w .&. 0x0F)
modifier = (w .&. 0xF0) `shiftR` 4
fid <- if modifier /= 0
then return (lastId + fromIntegral modifier)
else parseVarint zigZagToI16
val <- if ty == T_BOOL
then return (TBool $ (w .&. 0x0F) == 0x01)
else case (ty, Map.lookup fid tmap) of
(T_STRING, Just (_, T_BINARY)) -> parseCompactValue T_BINARY
_ -> parseCompactValue ty
((fid, (LT.empty, val)) : ) <$> parseFields fid
parseCompactMap :: ThriftType -> ThriftType -> Int32 ->
Parser [(ThriftVal, ThriftVal)]
parseCompactMap kt vt n | n <= 0 = return []
| otherwise = do
k <- parseCompactValue kt
v <- parseCompactValue vt
((k,v) :) <$> parseCompactMap kt vt (n-1)
parseCompactList :: Parser [ThriftVal]
parseCompactList = do
w <- P.anyWord8
let ty = typeFrom $ w .&. 0x0F
lsize = w `shiftR` 4
size <- if lsize == 0xF
then parseVarint id
else return $ fromIntegral lsize
loop ty size
where
loop :: ThriftType -> Int32 -> Parser [ThriftVal]
loop ty n | n <= 0 = return []
| otherwise = liftM2 (:) (parseCompactValue ty)
(loop ty (n-1))
-- Signed numbers must be converted to "Zig Zag" format before they can be
-- serialized in the Varint format
i16ToZigZag :: Int16 -> Word16
i16ToZigZag n = fromIntegral $ (n `shiftL` 1) `xor` (n `shiftR` 15)
zigZagToI16 :: Word16 -> Int16
zigZagToI16 n = fromIntegral $ (n `shiftR` 1) `xor` negate (n .&. 0x1)
i32ToZigZag :: Int32 -> Word32
i32ToZigZag n = fromIntegral $ (n `shiftL` 1) `xor` (n `shiftR` 31)
zigZagToI32 :: Word32 -> Int32
zigZagToI32 n = fromIntegral $ (n `shiftR` 1) `xor` negate (n .&. 0x1)
i64ToZigZag :: Int64 -> Word64
i64ToZigZag n = fromIntegral $ (n `shiftL` 1) `xor` (n `shiftR` 63)
zigZagToI64 :: Word64 -> Int64
zigZagToI64 n = fromIntegral $ (n `shiftR` 1) `xor` negate (n .&. 0x1)
buildVarint :: (Bits a, Integral a) => a -> Builder
buildVarint n | n .&. complement 0x7F == 0 = B.word8 $ fromIntegral n
| otherwise = B.word8 (0x80 .|. (fromIntegral n .&. 0x7F)) <>
buildVarint (n `shiftR` 7)
parseVarint :: (Bits a, Integral a, Ord a) => (a -> b) -> Parser b
parseVarint fromZigZag = do
bytestemp <- BS.unpack <$> P.takeTill (not . flip testBit 7)
lsb <- P.anyWord8
let bytes = lsb : List.reverse bytestemp
return $ fromZigZag $ List.foldl' combine 0x00 bytes
where combine a b = (a `shiftL` 7) .|. (fromIntegral b .&. 0x7f)
-- | Compute the Compact Type
fromTType :: ThriftType -> Word8
fromTType ty = case ty of
T_STOP -> 0x00
T_BOOL -> 0x01
T_BYTE -> 0x03
T_I16 -> 0x04
T_I32 -> 0x05
T_I64 -> 0x06
T_DOUBLE -> 0x07
T_STRING -> 0x08
T_BINARY -> 0x08
T_LIST{} -> 0x09
T_SET{} -> 0x0A
T_MAP{} -> 0x0B
T_STRUCT{} -> 0x0C
T_VOID -> error "No Compact type for T_VOID"
typeOf :: ThriftVal -> Word8
typeOf v = case v of
TBool True -> 0x01
TBool False -> 0x02
TByte _ -> 0x03
TI16 _ -> 0x04
TI32 _ -> 0x05
TI64 _ -> 0x06
TDouble _ -> 0x07
TString _ -> 0x08
TBinary _ -> 0x08
TList{} -> 0x09
TSet{} -> 0x0A
TMap{} -> 0x0B
TStruct{} -> 0x0C
typeFrom :: Word8 -> ThriftType
typeFrom w = case w of
0x01 -> T_BOOL
0x02 -> T_BOOL
0x03 -> T_BYTE
0x04 -> T_I16
0x05 -> T_I32
0x06 -> T_I64
0x07 -> T_DOUBLE
0x08 -> T_STRING
0x09 -> T_LIST T_VOID
0x0A -> T_SET T_VOID
0x0B -> T_MAP T_VOID T_VOID
0x0C -> T_STRUCT Map.empty
n -> error $ "typeFrom: " ++ show n ++ " is not a compact type"
| jcgruenhage/dendrite | vendor/src/github.com/apache/thrift/lib/hs/src/Thrift/Protocol/Compact.hs | apache-2.0 | 10,701 | 0 | 19 | 2,432 | 3,338 | 1,738 | 1,600 | 235 | 14 |
-----------------------------------------------------------------------------
--
-- Module : Language.PureScript.Sugar
-- Copyright : (c) Phil Freeman 2013
-- License : MIT
--
-- Maintainer : Phil Freeman <paf31@cantab.net>
-- Stability : experimental
-- Portability :
--
-- |
-- Desugaring passes
--
-----------------------------------------------------------------------------
{-# LANGUAGE FlexibleContexts #-}
module Language.PureScript.Sugar (desugar, module S) where
import Prelude ()
import Prelude.Compat
import Control.Monad
import Control.Category ((>>>))
import Control.Monad.Error.Class (MonadError())
import Control.Monad.Writer.Class (MonadWriter())
import Control.Monad.Supply.Class
import Language.PureScript.AST
import Language.PureScript.Errors
import Language.PureScript.Externs
import Language.PureScript.Sugar.BindingGroups as S
import Language.PureScript.Sugar.CaseDeclarations as S
import Language.PureScript.Sugar.DoNotation as S
import Language.PureScript.Sugar.Names as S
import Language.PureScript.Sugar.ObjectWildcards as S
import Language.PureScript.Sugar.Operators as S
import Language.PureScript.Sugar.TypeClasses as S
import Language.PureScript.Sugar.TypeClasses.Deriving as S
import Language.PureScript.Sugar.TypeDeclarations as S
-- |
-- The desugaring pipeline proceeds as follows:
--
-- * Remove signed literals in favour of `negate` applications
--
-- * Desugar object literals with wildcards into lambdas
--
-- * Desugar operator sections
--
-- * Desugar do-notation using the @Prelude.Monad@ type class
--
-- * Desugar top-level case declarations into explicit case expressions
--
-- * Desugar type declarations into value declarations with explicit type annotations
--
-- * Qualify any unqualified names and types
--
-- * Rebracket user-defined binary operators
--
-- * Introduce type synonyms for type class dictionaries
--
-- * Group mutually recursive value and data declarations into binding groups.
--
desugar :: (Applicative m, MonadSupply m, MonadError MultipleErrors m, MonadWriter MultipleErrors m) => [ExternsFile] -> [Module] -> m [Module]
desugar externs =
map removeSignedLiterals
>>> traverse desugarObjectConstructors
>=> traverse desugarOperatorSections
>=> traverse desugarDoModule
>=> desugarCasesModule
>=> desugarTypeDeclarationsModule
>=> desugarImports externs
>=> rebracket externs
>=> traverse deriveInstances
>=> desugarTypeClasses externs
>=> createBindingGroupsModule
| michaelficarra/purescript | src/Language/PureScript/Sugar.hs | mit | 2,512 | 0 | 15 | 348 | 347 | 230 | 117 | 34 | 1 |
module Main where
import qualified Data.List as List
import Control.Arrow ((>>>))
import PositionalParser (prepare, consume, finish)
-- Using Currying for customizing
-- each field size
consumeFirst = consume 5
consumeSecond = consume 6
consumeThird = consume 6
-- Composing the parsing function.
-- The (>>>) operator works like the inverse order of function composition operator (.)
-- In other words: (g . f) == (f >>> g)
-- This works similar to F#, Elixir's and Elm Pipe Operator '|>'
parseLine = prepare >>> consumeFirst >>> consumeSecond >>> consumeThird >>> finish
main = do
let line = "25101170007089280"
let fields = parseLine line -- This yields to a list of fields :: [String]
let csvFields = List.intersperse ";" fields -- Interpolates ";" between fields
mapM putStr (csvFields ++ ["\n"]) -- Print every field and a line feed at the end
| hbobenicio/haskell-examples | positional-parser/Main.hs | mit | 864 | 0 | 11 | 153 | 152 | 86 | 66 | 13 | 1 |
{-# LANGUAGE JavaScriptFFI #-}
-- | A wrapper over the Electron debugging API, as documented
-- <https://electron.atom.io/docs/api/debugger here>.
--
-- Up-to-date documentation on the Chrome DevTools Protocol can be seen
-- <https://chromedevtools.github.io/devtools-protocol here>.
module GHCJS.Electron.Debugger
( Debugger (..)
, unsafeGetDebugger
, unsafeAttach
, unsafeAttachWithVersion
, unsafeDetach
, unsafeSendCommand
) where
import Data.Text (Text)
import GHCJS.Types
import GHCJS.Electron.Types
import JavaScript.Object
-- | An Electron @debugger@ object.
newtype Debugger
= MkDebugger JSVal
-- data DebuggerEvent
-- = DebuggerDetach
-- { event :: !Event
-- , reason :: !Text
-- }
-- | DebuggerMessage
-- { event :: !Event
-- , method :: !Text
-- , params :: !Object
-- }
-- | Get the 'Debugger' object for a given 'BrowserWindow'.
foreign import javascript safe
"$r = $1.webContents.debugger;"
unsafeGetDebugger :: BrowserWindow -> IO Debugger
-- | Attaches the given 'Debugger' to the @webContents@.
foreign import javascript safe
"$1.attach();"
unsafeAttach :: Debugger -> IO ()
-- | Attaches the given 'Debugger' to the @webContents@.
foreign import javascript safe
"$1.attach($2);"
unsafeAttachWithVersion :: Debugger
-> JSString
-- ^ The requested debugging protocol version.
-> IO ()
-- | Detaches the given 'Debugger' from the @webContents@.
foreign import javascript safe
"$1.detach();"
unsafeDetach :: Debugger -> IO ()
-- | Send the given command to the debugging target.
--
-- Up-to-date documentation on the Chrome DevTools Protocol can be seen
-- <https://chromedevtools.github.io/devtools-protocol here>.
foreign import javascript safe
"$1.sendCommand($2, $3, $4);"
unsafeSendCommand :: Debugger
-> JSString
-- ^ The method name (as defined in the Chrome DevTools
-- Protocol) to call.
-> Object
-- ^ A JSON object containing request parameters.
-> Callback (JSVal -> JSVal -> IO ())
-- ^ The callback that will be run when the method returns.
--
-- The first parameter of the callback contains any
-- relevant error information, and the second parameter
-- contains any returned data.
-> IO ()
| taktoa/ghcjs-electron | src/GHCJS/Electron/Debugger.hs | mit | 2,590 | 16 | 10 | 777 | 251 | 152 | 99 | 30 | 0 |
import System.Process
import System.Directory
import Data.Function
import Data.List
import Data.Maybe
maybemizaInput (-1) = Nothing
maybemizaInput x = Just x
chamarNVezes :: Integer -> Integer -> IO [Maybe Integer]
chamarNVezes n input =
if n == 0 then
return []
else
do
output <- readProcess "/home/vitor/hw-verilog/C++/Debug/hw-verilog" [show input] ""
let listOutput = map read (words output) :: [Integer]
recursiveCall <- chamarNVezes (n - 1) input
return $ maybemizaInput (sum listOutput) : recursiveCall
media [] = Nothing
media lista = Just $ sum lista / fromIntegral (length lista)
desvioPadrao [] = Nothing
desvioPadrao [_] = Nothing
desvioPadrao lista = let
mediaLista = fromJust (media lista)
in
Just $ sqrt $ sum (map (\x -> (x - mediaLista)^2) lista) / fromIntegral (length lista - 1)
minimum_ [] = Nothing
minimum_ lista = Just (minimum lista)
maximum_ [] = Nothing
maximum_ lista = Just (maximum lista)
formatarMaybe (Just x) = show x
formatarMaybe Nothing = "N/A"
printarEstatisticas :: Integer -> [Maybe Float] -> IO ()
printarEstatisticas max_geracoes lista_amostras = let
respostasCorretas = catMaybes lista_amostras
taxaSucesso = fromIntegral (length respostasCorretas) / fromIntegral (length lista_amostras)
output = [Just (fromInteger max_geracoes), media respostasCorretas,
minimum_ respostasCorretas, maximum_ respostasCorretas,
desvioPadrao respostasCorretas, Just taxaSucesso]
in
putStr $ formatar (map formatarMaybe output) ++ "\n"
formatar =
intercalate " | "
linhas = "-------------------------------------------"
main = do
putStr $ formatar ["Max ger.", "Media", "Menor", "Maior", "Desvio Padrao", "Taxa sucesso"] ++ "\n"
putStr $ linhas ++ "\n"
setCurrentDirectory "/home/vitor/hw-verilog/C++"
let maxGeracoes = [2000, 5000, 10000]
let amostras = 20
multipleOutputs <- mapM (chamarNVezes amostras) maxGeracoes
mapM_ (\(max_g, mul_o) -> printarEstatisticas max_g (map (fmap fromInteger) mul_o)) (zip maxGeracoes multipleOutputs)
| VitorCBSB/hw-verilog | statistics.hs | mit | 2,019 | 18 | 16 | 325 | 725 | 360 | 365 | 48 | 2 |
module PropLogic(Var, Prop(..), parse) where
import Parsing (Parser(..), symbol, identifier, (+++))
type Var = String
data Prop = Atom Var
| Not Prop
| Imply Prop Prop
| Or Prop Prop
| And Prop Prop
| Iff Prop Prop
deriving (Eq)
instance Show Prop where
show (Atom p) = p
show (Not prop) = "(-" ++ show prop ++ ")"
show (Imply prop1 prop2) = "(" ++ show prop1 ++ " => " ++ show prop2 ++ ")"
show (Or prop1 prop2) = "(" ++ show prop1 ++ " \\/ " ++ show prop2 ++ ")"
show (And prop1 prop2) = "(" ++ show prop1 ++ " /\\ " ++ show prop2 ++ ")"
show (Iff prop1 prop2) = "(" ++ show prop1 ++ " <=> " ++ show prop2 ++ ")"
parseAtom :: Parser Prop
parseAtom = do x <- identifier
return (Atom x)
parseImply :: Parser Prop
parseImply = do symbol "("
wf1 <- parsePL
symbol "=>"
wf2 <- parsePL
symbol ")"
return (Imply wf1 wf2)
parseNot :: Parser Prop
parseNot = do symbol "("
symbol "-"
wf <- parsePL
symbol ")"
return (Not wf)
parseOr :: Parser Prop
parseOr = do symbol "("
wf1 <- parsePL
symbol "\\/"
wf2 <- parsePL
symbol ")"
return (Or wf1 wf2)
parseAnd :: Parser Prop
parseAnd = do symbol "("
wf1 <- parsePL
symbol "/\\"
wf2 <- parsePL
symbol ")"
return (And wf1 wf2)
parseIff :: Parser Prop
parseIff = do symbol "("
wf1 <- parsePL
symbol "<=>"
wf2 <- parsePL
symbol ")"
return (Iff wf1 wf2)
parsePL :: Parser Prop
parsePL = parseAtom +++ parseImply +++ parseNot +++
parseOr +++ parseAnd +++ parseIff
deP :: Parser Prop -> String -> [(Prop, String)]
deP (P x) = x
parse :: String -> Maybe Prop
parse input = case (deP parsePL input) of
((wf,rest):_) -> Just wf
[] -> Nothing
| JoeLoser/CS4450-Principles-of-Programming | homeworks/hw4/PropLogic.hs | mit | 2,147 | 0 | 10 | 886 | 748 | 362 | 386 | 63 | 2 |
-- |
-- Module: Math.NumberTheory.Moduli.Singleton
-- Copyright: (c) 2019 Andrew Lelechenko
-- Licence: MIT
-- Maintainer: Andrew Lelechenko <andrew.lelechenko@gmail.com>
--
-- Singleton data types.
--
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE ViewPatterns #-}
module Math.NumberTheory.Moduli.Singleton
( -- * SFactors singleton
SFactors
, sfactors
, someSFactors
, unSFactors
, proofFromSFactors
-- * CyclicGroup singleton
, CyclicGroup
, cyclicGroup
, cyclicGroupFromFactors
, cyclicGroupFromModulo
, proofFromCyclicGroup
, pattern CG2
, pattern CG4
, pattern CGOddPrimePower
, pattern CGDoubleOddPrimePower
-- * SFactors \<=\> CyclicGroup
, cyclicGroupToSFactors
, sfactorsToCyclicGroup
-- * Some wrapper
, Some(..)
) where
import Control.DeepSeq
import Data.Constraint
import Data.Kind
import Data.List (sort)
import qualified Data.Map as M
import Data.Proxy
#if __GLASGOW_HASKELL__ < 803
import Data.Semigroup
#endif
import GHC.Generics
import GHC.TypeNats (KnownNat, Nat, natVal)
import Numeric.Natural
import Unsafe.Coerce
import Math.NumberTheory.Roots (highestPower)
import Math.NumberTheory.Primes
import Math.NumberTheory.Primes.Types
-- | Wrapper to hide an unknown type-level natural.
data Some (a :: Nat -> Type) where
Some :: a m -> Some a
-- | From "Data.Constraint.Nat".
newtype Magic n = Magic (KnownNat n => Dict (KnownNat n))
-- | This singleton data type establishes a correspondence
-- between a modulo @m@ on type level
-- and its factorisation on term level.
newtype SFactors a (m :: Nat) = SFactors
{ unSFactors :: [(Prime a, Word)]
-- ^ Factors of @m@.
} deriving (Show, Generic)
instance Eq (SFactors a m) where
_ == _ = True
instance Ord (SFactors a m) where
_ `compare` _ = EQ
instance NFData a => NFData (SFactors a m)
instance Ord a => Eq (Some (SFactors a)) where
Some (SFactors xs) == Some (SFactors ys) =
xs == ys
instance Ord a => Ord (Some (SFactors a)) where
Some (SFactors xs) `compare` Some (SFactors ys) =
xs `compare` ys
instance Show a => Show (Some (SFactors a)) where
showsPrec p (Some x) = showsPrec p x
instance NFData a => NFData (Some (SFactors a)) where
rnf (Some x) = rnf x
-- | Create a singleton from a type-level positive modulo @m@,
-- passed in a constraint.
--
-- >>> :set -XDataKinds
-- >>> sfactors :: SFactors Integer 13
-- SFactors {unSFactors = [(Prime 13,1)]}
sfactors :: forall a m. (Ord a, UniqueFactorisation a, KnownNat m) => SFactors a m
sfactors = if m == 0
then error "sfactors: modulo must be positive"
else SFactors (sort (factorise m))
where
m = fromIntegral (natVal (Proxy :: Proxy m))
-- | Create a singleton from factors of @m@.
-- Factors must be distinct, as in output of 'factorise'.
--
-- >>> import Math.NumberTheory.Primes
-- >>> someSFactors (factorise 98)
-- SFactors {unSFactors = [(Prime 2,1),(Prime 7,2)]}
someSFactors :: (Ord a, Num a) => [(Prime a, Word)] -> Some (SFactors a)
someSFactors
= Some
. SFactors
-- Just a precaution against ill-formed lists of factors
. M.assocs
. M.fromListWith (+)
-- | Convert a singleton to a proof that @m@ is known. Usage example:
--
-- > toModulo :: SFactors Integer m -> Natural
-- > toModulo t = case proofFromSFactors t of Sub Dict -> natVal t
proofFromSFactors :: Integral a => SFactors a m -> (() :- KnownNat m)
proofFromSFactors (SFactors fs) = Sub $ unsafeCoerce (Magic Dict) (fromIntegral (factorBack fs) :: Natural)
-- | This singleton data type establishes a correspondence
-- between a modulo @m@ on type level
-- and a cyclic group of the same order on term level.
data CyclicGroup a (m :: Nat)
= CG2' -- ^ Residues modulo 2.
| CG4' -- ^ Residues modulo 4.
| CGOddPrimePower' (Prime a) Word
-- ^ Residues modulo @p@^@k@ for __odd__ prime @p@.
| CGDoubleOddPrimePower' (Prime a) Word
-- ^ Residues modulo 2@p@^@k@ for __odd__ prime @p@.
deriving (Show, Generic)
instance Eq (CyclicGroup a m) where
_ == _ = True
instance Ord (CyclicGroup a m) where
_ `compare` _ = EQ
instance NFData a => NFData (CyclicGroup a m)
instance Eq a => Eq (Some (CyclicGroup a)) where
Some CG2' == Some CG2' = True
Some CG4' == Some CG4' = True
Some (CGOddPrimePower' p1 k1) == Some (CGOddPrimePower' p2 k2) =
p1 == p2 && k1 == k2
Some (CGDoubleOddPrimePower' p1 k1) == Some (CGDoubleOddPrimePower' p2 k2) =
p1 == p2 && k1 == k2
_ == _ = False
instance Ord a => Ord (Some (CyclicGroup a)) where
compare (Some x) (Some y) = case x of
CG2' -> case y of
CG2' -> EQ
_ -> LT
CG4' -> case y of
CG2' -> GT
CG4' -> EQ
_ -> LT
CGOddPrimePower' p1 k1 -> case y of
CGDoubleOddPrimePower'{} -> LT
CGOddPrimePower' p2 k2 ->
p1 `compare` p2 <> k1 `compare` k2
_ -> GT
CGDoubleOddPrimePower' p1 k1 -> case y of
CGDoubleOddPrimePower' p2 k2 ->
p1 `compare` p2 <> k1 `compare` k2
_ -> GT
instance Show a => Show (Some (CyclicGroup a)) where
showsPrec p (Some x) = showsPrec p x
instance NFData a => NFData (Some (CyclicGroup a)) where
rnf (Some x) = rnf x
-- | Create a singleton from a type-level positive modulo @m@,
-- passed in a constraint.
--
-- >>> :set -XDataKinds
-- >>> import Data.Maybe
-- >>> cyclicGroup :: Maybe (CyclicGroup Integer 169)
-- Just (CGOddPrimePower' (Prime 13) 2)
--
-- >>> :set -XTypeOperators -XNoStarIsType
-- >>> import GHC.TypeNats
-- >>> sfactorsToCyclicGroup (sfactors :: SFactors Integer 4)
-- Just CG4'
-- >>> sfactorsToCyclicGroup (sfactors :: SFactors Integer (2 * 13 ^ 3))
-- Just (CGDoubleOddPrimePower' (Prime 13) 3)
-- >>> sfactorsToCyclicGroup (sfactors :: SFactors Integer (4 * 13))
-- Nothing
cyclicGroup
:: forall a m.
(Integral a, UniqueFactorisation a, KnownNat m)
=> Maybe (CyclicGroup a m)
cyclicGroup = fromModuloInternal m
where
m = fromIntegral (natVal (Proxy :: Proxy m))
-- | Create a singleton from factors.
-- Factors must be distinct, as in output of 'factorise'.
cyclicGroupFromFactors
:: (Eq a, Num a)
=> [(Prime a, Word)]
-> Maybe (Some (CyclicGroup a))
cyclicGroupFromFactors = \case
[(unPrime -> 2, 1)] -> Just $ Some CG2'
[(unPrime -> 2, 2)] -> Just $ Some CG4'
[(unPrime -> 2, _)] -> Nothing
[(p, k)] -> Just $ Some $ CGOddPrimePower' p k
[(unPrime -> 2, 1), (p, k)] -> Just $ Some $ CGDoubleOddPrimePower' p k
[(p, k), (unPrime -> 2, 1)] -> Just $ Some $ CGDoubleOddPrimePower' p k
_ -> Nothing
-- | Similar to 'cyclicGroupFromFactors' . 'factorise',
-- but much faster, because it
-- but performes only one primality test instead of full
-- factorisation.
cyclicGroupFromModulo
:: (Integral a, UniqueFactorisation a)
=> a
-> Maybe (Some (CyclicGroup a))
cyclicGroupFromModulo = fmap Some . fromModuloInternal
fromModuloInternal
:: (Integral a, UniqueFactorisation a)
=> a
-> Maybe (CyclicGroup a m)
fromModuloInternal = \case
2 -> Just CG2'
4 -> Just CG4'
n
| even n -> uncurry CGDoubleOddPrimePower' <$> isOddPrimePower (n `div` 2)
| otherwise -> uncurry CGOddPrimePower' <$> isOddPrimePower n
isOddPrimePower
:: (Integral a, UniqueFactorisation a)
=> a
-> Maybe (Prime a, Word)
isOddPrimePower n
| even n = Nothing
| otherwise = (, k) <$> isPrime p
where
(p, k) = highestPower n
-- | Convert a cyclic group to a proof that @m@ is known. Usage example:
--
-- > toModulo :: CyclicGroup Integer m -> Natural
-- > toModulo t = case proofFromCyclicGroup t of Sub Dict -> natVal t
proofFromCyclicGroup :: Integral a => CyclicGroup a m -> (() :- KnownNat m)
proofFromCyclicGroup = proofFromSFactors . cyclicGroupToSFactors
-- | Check whether a multiplicative group of residues,
-- characterized by its modulo, is cyclic and, if yes, return its form.
--
-- >>> :set -XTypeOperators -XNoStarIsType
-- >>> import GHC.TypeNats
-- >>> sfactorsToCyclicGroup (sfactors :: SFactors Integer 4)
-- Just CG4'
-- >>> sfactorsToCyclicGroup (sfactors :: SFactors Integer (2 * 13 ^ 3))
-- Just (CGDoubleOddPrimePower' (Prime 13) 3)
-- >>> sfactorsToCyclicGroup (sfactors :: SFactors Integer (4 * 13))
-- Nothing
sfactorsToCyclicGroup :: (Eq a, Num a) => SFactors a m -> Maybe (CyclicGroup a m)
sfactorsToCyclicGroup (SFactors fs) = case fs of
[(unPrime -> 2, 1)] -> Just CG2'
[(unPrime -> 2, 2)] -> Just CG4'
[(unPrime -> 2, _)] -> Nothing
[(p, k)] -> Just $ CGOddPrimePower' p k
[(p, k), (unPrime -> 2, 1)] -> Just $ CGDoubleOddPrimePower' p k
[(unPrime -> 2, 1), (p, k)] -> Just $ CGDoubleOddPrimePower' p k
_ -> Nothing
-- | Invert 'sfactorsToCyclicGroup'.
--
-- >>> import Data.Maybe
-- >>> cyclicGroupToSFactors (fromJust (sfactorsToCyclicGroup (sfactors :: SFactors Integer 4)))
-- SFactors {unSFactors = [(Prime 2,2)]}
cyclicGroupToSFactors :: Num a => CyclicGroup a m -> SFactors a m
cyclicGroupToSFactors = SFactors . \case
CG2' -> [(Prime 2, 1)]
CG4' -> [(Prime 2, 2)]
CGOddPrimePower' p k -> [(p, k)]
CGDoubleOddPrimePower' p k -> [(Prime 2, 1), (p, k)]
-- | Unidirectional pattern for residues modulo 2.
pattern CG2 :: CyclicGroup a m
pattern CG2 <- CG2'
-- | Unidirectional pattern for residues modulo 4.
pattern CG4 :: CyclicGroup a m
pattern CG4 <- CG4'
-- | Unidirectional pattern for residues modulo @p@^@k@ for __odd__ prime @p@.
pattern CGOddPrimePower :: Prime a -> Word -> CyclicGroup a m
pattern CGOddPrimePower p k <- CGOddPrimePower' p k
-- | Unidirectional pattern for residues modulo 2@p@^@k@ for __odd__ prime @p@.
pattern CGDoubleOddPrimePower :: Prime a -> Word -> CyclicGroup a m
pattern CGDoubleOddPrimePower p k <- CGDoubleOddPrimePower' p k
#if __GLASGOW_HASKELL__ > 801
{-# COMPLETE CG2, CG4, CGOddPrimePower, CGDoubleOddPrimePower #-}
#endif
| cartazio/arithmoi | Math/NumberTheory/Moduli/Singleton.hs | mit | 10,187 | 0 | 14 | 2,134 | 2,603 | 1,406 | 1,197 | 187 | 7 |
{-# LANGUAGE OverloadedStrings, TypeFamilies, Rank2Types, DeriveDataTypeable #-}
module HotDB.Core.Node (
Node(..)
, Document(..)
, lookupPath
, adjustPath
) where
import Prelude hiding (lookup)
import Data.Bits (shift, xor)
import Data.Foldable (toList)
import Data.Int (Int64)
import qualified Data.Map as M
import qualified Data.Maybe as Maybe
import qualified Data.Aeson as A
import qualified Data.Text as T
import qualified Data.Text.Read as TR
import qualified Data.Scientific as S
import qualified Data.Sequence as Seq
import Data.Typeable
import Data.Word (Word32, Word64)
import qualified Data.Vector as V
import Control.Monad (guard, mzero)
import Control.Applicative (pure)
import Text.Read (readMaybe)
import Data.Traversable hiding (sequence)
import Control.Applicative
import qualified HotDB.Core.JsonUtils as J
import qualified HotDB.Core.Path as P
data Node = EmptyNode
| RootNode Node
| BoolNode Bool
| IntNode Int64
| DoubleNode Double
| TextNode String
| SequenceNode (Seq.Seq Node)
| MapNode (M.Map T.Text Node)
deriving (Show, Eq, Typeable)
newtype Document = Document (Node, String) deriving (Show, Eq)
instance A.ToJSON Node where
toJSON EmptyNode = A.toJSON A.Null
toJSON (RootNode v) = A.toJSON $ J.Discriminated "r" v
toJSON (BoolNode v) = A.toJSON v
toJSON (IntNode v) = A.toJSON $ J.Discriminated "i" (J.Int64 v)
toJSON (DoubleNode v) = A.toJSON v
toJSON (TextNode v) = A.toJSON v
toJSON (SequenceNode v) = A.toJSON $ map A.toJSON $ toList v
toJSON (MapNode v) = A.toJSON $ J.Discriminated "m" (J.Map v)
instance A.FromJSON Node where
parseJSON A.Null = pure EmptyNode
parseJSON (A.Bool v) = pure $ BoolNode v
parseJSON (A.Number v) = pure $ DoubleNode $ S.toRealFloat v
parseJSON (A.String v) = pure $ TextNode $ T.unpack v
parseJSON n@(A.Array v) = SequenceNode . vecToSeq <$> traverse A.parseJSON v
parseJSON (A.Object v) = RootNode <$> J.discriminator "r" v <|>
IntNode . J.getInt64 <$> J.discriminator "i" v <|>
MapNode . M.fromList <$> J.discriminator "m" v
instance A.ToJSON Document where
toJSON (Document d) = A.toJSON d
instance A.FromJSON Document where
parseJSON (A.Array v) = (\r c -> Document (r, c)) <$> A.parseJSON (v V.! 0) <*> A.parseJSON (v V.! 1)
vecToSeq :: V.Vector a -> Seq.Seq a
vecToSeq = Seq.fromList . V.toList
parseInt :: Integral a => T.Text -> Maybe a
parseInt t = case (TR.decimal t :: Either String (Integer, T.Text)) of
Right (i, "") -> let i' = fromIntegral i in
if (fromIntegral i') == i
then Just i'
else Nothing
_ -> Nothing
parseIndex :: T.Text -> Maybe Word32
parseIndex = parseInt
lookup :: Node -> T.Text -> Maybe Node
lookup (RootNode n) "/" = Just n
lookup (SequenceNode s) key =
case mi of
Just i -> let i' = fromIntegral i in
if i' < Seq.length (s)
then Just $ Seq.index s i'
else Nothing
_ -> Nothing
where mi = parseIndex key
lookup (MapNode m) key = M.lookup key m
lookup _ _ = Nothing
lookupPath :: Node -> P.Path -> Maybe Node
lookupPath n p =
lookupPath' n $ P.getPath p
where lookupPath' n [] = Just n
lookupPath' n ("":ps) = lookupPath' n ps
lookupPath' n (p:ps) = do
c <- lookup n p
lookupPath' c ps
adjust :: Node -> T.Text -> (Node -> Maybe Node) -> Maybe Node
adjust (RootNode n) "/" f = RootNode <$> f n
adjust sn@(SequenceNode s) key f =
case mi of
Just i -> let i' = fromIntegral i in
if i' < Seq.length (s)
then do
n' <- f $ Seq.index s i'
Just $ SequenceNode $ Seq.update i' n' s
else Nothing
_ -> Nothing
where mi = parseIndex key
adjust (MapNode m) key f = do
n <- M.lookup key m
n' <- f n
return $ MapNode $ M.insert key n' m
adjust _ _ _ = Nothing
adjustPath :: Node -> P.Path -> (Node -> Maybe Node) -> Maybe Node
adjustPath n p f =
adjustPath' n (P.getPath p) f
where adjustPath' n [] f = f n
adjustPath' n ("":ps) f = adjustPath' n ps f
adjustPath' n (p:ps) f = adjust n p (\c -> adjustPath' c ps f)
| jjwchoy/hotdb | src/HotDB/Core/Node.hs | mit | 4,212 | 0 | 16 | 1,067 | 1,690 | 875 | 815 | -1 | -1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NamedFieldPuns #-}
{-
Copyright (C) 2012-2017 Kacper Bak, Michal Antkiewicz <http://gsd.uwaterloo.ca>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
module Language.Clafer.Intermediate.ResolverInheritance where
import Control.Applicative
import Control.Lens ((^.), (&), (%%~), (.~), traverse)
import Control.Monad
import Control.Monad.Except
import Control.Monad.State
import Data.Maybe
import Data.Graph
import Data.Tree
import Data.List
import qualified Data.Map as Map
import Data.StringMap (StringMap)
import qualified Data.StringMap as SMap
import Prelude hiding (traverse)
import Language.ClaferT
import Language.Clafer.Common
import Language.Clafer.Front.AbsClafer
import Language.Clafer.Intermediate.Intclafer
import Language.Clafer.Intermediate.ResolverName
-- | Resolve Non-overlapping inheritance
resolveNModule :: (IModule, GEnv) -> Resolve (IModule, GEnv)
resolveNModule (imodule, genv') =
do
let
unresolvedDecls = _mDecls imodule
abstractClafers = filter _isAbstract $ bfsClafers $ toClafers unresolvedDecls
resolvedDecls <- mapM (resolveNElement abstractClafers) unresolvedDecls
let
relocatedDecls = relocateTopLevelAbstractToParents resolvedDecls -- F> Top-level abstract clafer extending a nested abstract clafer <https://github.com/gsdlab/clafer/issues/67> <F
uidClaferMap' = createUidIClaferMap imodule{_mDecls = relocatedDecls}
resolvedHierarchyDecls <- mapM (resolveHierarchy uidClaferMap') relocatedDecls
let
resolvedHierarchiesIModule = imodule{_mDecls = resolvedHierarchyDecls}
return
( resolvedHierarchiesIModule
, genv'{ sClafers = bfs toNodeShallow $ toClafers resolvedHierarchyDecls
, uidClaferMap = createUidIClaferMap resolvedHierarchiesIModule}
)
resolveNClafer :: [IClafer] -> IClafer -> Resolve IClafer
resolveNClafer abstractClafers clafer =
do
(super', superIClafer') <- resolveNSuper abstractClafers $ _super clafer
-- F> Top-level abstract clafer extending a nested abstract clafer <https://github.com/gsdlab/clafer/issues/67> F>
let
parentUID' =
case superIClafer' of
(Just superIClafer'') ->
if _isAbstract clafer && isTopLevel clafer && not (isTopLevel superIClafer'')
then _parentUID superIClafer'' -- make clafer a sibling of the superIClafer'
else _parentUID clafer
Nothing -> _parentUID clafer
-- <F Top-level abstract clafer extending a nested abstract clafer <https://github.com/gsdlab/clafer/issues/67> <F
elements' <- mapM (resolveNElement abstractClafers) $ _elements clafer
return $ clafer {_super = super', _parentUID = parentUID', _elements = elements'}
resolveNSuper :: [IClafer] -> Maybe PExp -> Resolve (Maybe PExp, Maybe IClafer)
resolveNSuper _ Nothing = return (Nothing, Nothing)
resolveNSuper abstractClafers (Just (PExp _ pid' pos' (IClaferId _ id' _ _))) =
if isPrimitive id'
then throwError $ SemanticErr pos' $ "Primitive types are not allowed as super types: " ++ id'
else do
r <- resolveN pos' abstractClafers id'
(id'', [superClafer']) <- case r of
Nothing -> throwError $ SemanticErr pos' $ "No superclafer found: " ++ id'
Just m -> return m
return (Just $ PExp (Just $ TClafer [id'']) pid' pos' (IClaferId "" id'' (isTopLevel superClafer') (Just id''))
, Just superClafer')
resolveNSuper _ x = return (x, Nothing)
resolveNElement :: [IClafer] -> IElement -> Resolve IElement
resolveNElement abstractClafers x = case x of
IEClafer clafer -> IEClafer <$> resolveNClafer abstractClafers clafer
IEConstraint _ _ -> return x
IEGoal _ _ -> return x
resolveN :: Span -> [IClafer] -> String -> Resolve (Maybe (String, [IClafer]))
resolveN pos' abstractClafers id' =
findUnique pos' id' $ map (\x -> (x, [x])) abstractClafers
resolveHierarchy :: UIDIClaferMap -> IElement -> Resolve IElement
resolveHierarchy uidClaferMap' (IEClafer iClafer') = IEClafer <$> (super.traverse.iType.traverse %%~ addHierarchy $ iClafer')
where
addHierarchy :: IType -> Resolve IType
addHierarchy (TClafer _) = TClafer <$> checkForLoop (tail $ mapHierarchy _uid getSuper uidClaferMap' iClafer')
addHierarchy x = return x
checkForLoop :: [String] -> Resolve [String]
checkForLoop supers = case find (_uid iClafer' ==) supers of
Nothing -> return supers
Just _ -> throwError $ SemanticErr (_cinPos iClafer') $ "ResolverInheritance: clafer " ++ _uid iClafer' ++ " inherits from itself"
resolveHierarchy _ x = return x
-- | Resolve overlapping inheritance
resolveOModule :: (IModule, GEnv) -> Resolve (IModule, GEnv)
resolveOModule (imodule, genv') =
do
let decls' = _mDecls imodule
decls'' <- mapM (resolveOElement (defSEnv genv' decls')) decls'
let imodule' = imodule{_mDecls = decls''}
return ( imodule'
, genv'{sClafers = bfs toNodeShallow $ toClafers decls'', uidClaferMap = createUidIClaferMap imodule'})
resolveOClafer :: SEnv -> IClafer -> Resolve IClafer
resolveOClafer env clafer =
do
reference' <- resolveOReference env {context = Just clafer} $ _reference clafer
elements' <- mapM (resolveOElement env {context = Just clafer}) $ _elements clafer
return $ clafer {_reference = reference', _elements = elements'}
resolveOReference :: SEnv -> Maybe IReference -> Resolve (Maybe IReference)
resolveOReference _ Nothing = return Nothing
resolveOReference env (Just (IReference is' exp')) = Just <$> IReference is' <$> resolvePExp env exp'
resolveOElement :: SEnv -> IElement -> Resolve IElement
resolveOElement env x = case x of
IEClafer clafer -> IEClafer <$> resolveOClafer env clafer
IEConstraint _ _ -> return x
IEGoal _ _ -> return x
-- | Resolve inherited and default cardinalities
analyzeModule :: (IModule, GEnv) -> IModule
analyzeModule (imodule, genv') =
imodule{_mDecls = map (analyzeElement (defSEnv genv' decls')) decls'}
where
decls' = _mDecls imodule
analyzeClafer :: SEnv -> IClafer -> IClafer
analyzeClafer env clafer =
clafer' {_elements = map (analyzeElement env {context = Just clafer'}) $
_elements clafer'}
where
clafer' = clafer {_gcard = analyzeGCard env clafer,
_card = analyzeCard env clafer}
-- only for non-overlapping
analyzeGCard :: SEnv -> IClafer -> Maybe IGCard
analyzeGCard env clafer = gcard' `mplus` (Just $ IGCard False (0, -1))
where
gcard'
| isNothing $ _super clafer = _gcard clafer
| otherwise = listToMaybe $ mapMaybe _gcard $ findHierarchy getSuper (uidClaferMap $ genv env) clafer
analyzeCard :: SEnv -> IClafer -> Maybe Interval
analyzeCard env clafer = _card clafer `mplus` Just card'
where
card'
| _isAbstract clafer = (0, -1)
| (isJust $ context env) && pGcard == (0, -1)
|| (isTopLevel clafer) = (1, 1)
| otherwise = (0, 1)
pGcard = _interval $ fromJust $ _gcard $ fromJust $ context env
analyzeElement :: SEnv -> IElement -> IElement
analyzeElement env x = case x of
IEClafer clafer -> IEClafer $ analyzeClafer env clafer
IEConstraint _ _ -> x
IEGoal _ _ -> x
-- | Expand inheritance
resolveEModule :: (IModule, GEnv) -> (IModule, GEnv)
resolveEModule (imodule, genv') = (imodule', newGenv)
where
decls' = _mDecls imodule
imodule' = imodule{_mDecls = decls''}
newGenv = genv''{uidClaferMap = createUidIClaferMap imodule'}
(decls'', genv'') = runState (mapM (resolveEElement []
(unrollableModule imodule)
False decls') decls') genv'
-- -----------------------------------------------------------------------------
unrollableModule :: IModule -> [String]
unrollableModule imodule = getDirUnrollables $
mapMaybe unrollabeDeclaration $ _mDecls imodule
unrollabeDeclaration :: IElement -> Maybe (String, [String])
unrollabeDeclaration x = case x of
IEClafer clafer -> if _isAbstract clafer
then Just (_uid clafer, unrollableClafer clafer)
else Nothing
IEConstraint _ _ -> Nothing
IEGoal _ _ -> Nothing
unrollableClafer :: IClafer -> [String]
unrollableClafer clafer = (getSuper clafer) ++ deps
where
deps = (toClafers $ _elements clafer) >>= unrollableClafer
getDirUnrollables :: [(String, [String])] -> [String]
getDirUnrollables dependencies = (filter isUnrollable $ map (map v2n) $
map flatten (scc graph)) >>= map fst3
where
(graph, v2n, _) = graphFromEdges $map (\(c, ss) -> (c, c, ss)) dependencies
isUnrollable (x:[]) = fst3 x `elem` trd3 x
isUnrollable _ = True
-- -----------------------------------------------------------------------------
resolveEClafer :: MonadState GEnv m => [String] -> [String] -> Bool -> [IElement] -> IClafer -> m IClafer
resolveEClafer predecessors unrollables absAncestor declarations clafer = do
uidClaferMap' <- gets uidClaferMap
clafer' <- renameClafer absAncestor (_parentUID clafer) clafer
let predecessors' = _uid clafer' : predecessors
(sElements, super', superList) <-
resolveEInheritance predecessors' unrollables absAncestor declarations
(findHierarchy getSuper uidClaferMap' clafer)
let sClafer = Map.fromList $ zip (map _uid superList) $ repeat [predecessors']
modify (\e -> e {stable = Map.delete "clafer" $
Map.unionWith ((nub.).(++)) sClafer $
stable e})
elements' <-
mapM (resolveEElement predecessors' unrollables absAncestor declarations)
$ _elements clafer
return $ clafer' {_super = super', _elements = elements' ++ sElements}
renameClafer :: MonadState GEnv m => Bool -> UID -> IClafer -> m IClafer
renameClafer False _ clafer = return clafer
renameClafer True puid clafer = renameClafer' puid clafer
renameClafer' :: MonadState GEnv m => UID -> IClafer -> m IClafer
renameClafer' puid clafer = do
let claferIdent = _ident clafer
identCountMap' <- gets identCountMap
let count = Map.findWithDefault 0 claferIdent identCountMap'
modify (\e -> e { identCountMap = Map.alter (\_ -> Just (count+1)) claferIdent identCountMap' } )
return $ clafer { _uid = genId claferIdent count, _parentUID = puid }
genId :: String -> Int -> String
genId id' count = concat ["c", show count, "_", id']
resolveEInheritance :: MonadState GEnv m => [String] -> [String] -> Bool -> [IElement] -> [IClafer] -> m ([IElement], Maybe PExp, [IClafer])
resolveEInheritance predecessors unrollables absAncestor declarations allSuper = do
let superList = (if absAncestor then id else tail) allSuper
let unrollSuper = filter (\s -> _uid s `notElem` unrollables) $ tail allSuper
elements' <-
mapM (resolveEElement predecessors unrollables True declarations) $
unrollSuper >>= _elements
let super' = case (`elem` unrollables) <$> getSuper clafer of
[True] -> _super clafer
_ -> Nothing
return (elements', super', superList)
where
clafer = head allSuper
resolveEElement :: MonadState GEnv m => [String] -> [String] -> Bool -> [IElement] -> IElement -> m IElement
resolveEElement predecessors unrollables absAncestor declarations x = case x of
IEClafer clafer -> if _isAbstract clafer then return x else IEClafer `liftM`
resolveEClafer predecessors unrollables absAncestor declarations clafer
IEConstraint _ _ -> return x
IEGoal _ _ -> return x
-- -----------------------------------------------------------------------------
resolveRedefinition :: (IModule, GEnv) -> Resolve IModule
resolveRedefinition (iModule, _) =
if (not $ null improperClafers)
then throwError $ SemanticErr noSpan ("Refinement errors in the following places:\n" ++ improperClafers)
else return iModule
where
uidIClaferMap' = createUidIClaferMap iModule
improperClafers :: String
improperClafers = foldMapIR isImproper iModule
isImproper :: Ir -> String
isImproper (IRClafer claf@IClafer{_cinPos = (Span (Pos l c) _) ,_ident=i}) =
let
match = matchNestedInheritance uidIClaferMap' claf
in
if (isProperNesting uidIClaferMap' match)
then let
(properCardinalityRefinement, properBagToSetRefinement, properTargetSubtyping) = isProperRefinement uidIClaferMap' match
in if (properCardinalityRefinement)
then if (properBagToSetRefinement)
then if (properTargetSubtyping)
then ""
else ("Improper target subtyping for clafer '" ++ i ++ "' on line " ++ show l ++ " column " ++ show c ++ "\n")
else ("Improper bag to set refinement for clafer '" ++ i ++ "' on line " ++ show l ++ " column " ++ show c ++ "\n")
else ("Improper cardinality refinement for clafer '" ++ i ++ "' on line " ++ show l ++ " column " ++ show c ++ "\n")
else ("Improperly nested clafer '" ++ i ++ "' on line " ++ show l ++ " column " ++ show c ++ "\n")
isImproper _ = ""
-- F> Top-level abstract clafer extending a nested abstract clafer <https://github.com/gsdlab/clafer/issues/67> F>
relocateTopLevelAbstractToParents :: [IElement] -> [IElement]
relocateTopLevelAbstractToParents originalElements =
let
(elementsToBeRelocated, remainingElements) = partition needsRelocation originalElements
in
case elementsToBeRelocated of
[] -> originalElements
_ -> map (insertElements $ mkParentUIDIElementMap elementsToBeRelocated) remainingElements
where
needsRelocation :: IElement -> Bool
needsRelocation IEClafer{_iClafer} = not $ isTopLevel _iClafer
needsRelocation _ = False
-- creates a map from parentUID to a list of elements to be added as children of a clafer with that UID
mkParentUIDIElementMap :: [IElement] -> StringMap [IElement]
mkParentUIDIElementMap elems = foldl'
(\accumMap' (parentUID', elem') -> SMap.insertWith (++) parentUID' [elem'] accumMap')
SMap.empty
(map (\e -> (_parentUID $ _iClafer e, e)) elems)
insertElements :: StringMap [IElement] -> IElement -> IElement
insertElements parentMap targetElement = let
targetUID = targetElement ^. iClafer . uid
newChildren = SMap.findWithDefault [] targetUID parentMap
currentElements = targetElement ^. iClafer . elements
newElements = map (insertElements parentMap) currentElements
++ newChildren
in
targetElement & iClafer . elements .~ newElements
-- <F Top-level abstract clafer extending a nested abstract clafer <https://github.com/gsdlab/clafer/issues/67> <F
| juodaspaulius/clafer | src/Language/Clafer/Intermediate/ResolverInheritance.hs | mit | 16,098 | 1 | 21 | 3,673 | 4,138 | 2,134 | 2,004 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Concurrent
import Import
import Model
import Model.Server
import SchoolOfHaskell.Scheduler.API
import View (renderControls, renderEditor)
-- | Main function of the School of Haskell client.
main :: IO ()
main = do
-- Get the code elements.
els <- getElementsByClassName "soh-code"
-- Initialize app state
app <- getApp (length els)
ace <- newUnmanaged app
termjs <- newUnmanaged app
iframe <- newUnmanaged app
-- Render the controls, if they're in a predefined div.
mcontrols <- getElementById "soh-controls"
mcolumn <- getElementById "soh-column"
inlineControls <- case (mcontrols, mcolumn) of
(Just controls, Just column) -> do
void $ forkIO $ react app (renderControls termjs iframe) controls
positionControlsOnResize controls column
return False
_ -> return True
-- Substitute the code elements with editors
forM_ (zip els [SnippetId 0..]) $ \(el, sid) -> do
code <- getElementText el
let renderer = renderEditor ace termjs iframe sid code inlineControls
void $ forkIO $ react app renderer el
-- Run the application
#if LOCAL_SOH_RUNNER
let devMappings = PortMappings [(4000, 4000), (3000, 3000)]
runApp "localhost" devMappings devReceipt app
#else
let spec = ContainerSpec "soh-runner"
-- clearContainers
receipt <- createContainer spec
(host, ports) <- pollForContainerAddress 60 $
getContainerDetailByReceipt receipt
runApp host ports receipt app
#endif
-- clearContainers :: IO ()
-- clearContainers = do
-- containers <- listContainers
-- forM_ containers stopContainerById
| fpco/schoolofhaskell | soh-client/src/Main.hs | mit | 1,624 | 0 | 16 | 306 | 350 | 175 | 175 | 31 | 2 |
{-# LANGUAGE FlexibleInstances, CPP, PatternGuards #-}
-- | Lower level building blocks for custom code generation.
module Language.Haskell.GHC.Simple.Impl (
Ghc, PkgKey,
liftIO,
toSimplifiedStg,
toModMetadata,
modulePkgKey, pkgKeyString
) where
-- GHC scaffolding
import BinIface
import GHC hiding (Warning)
import GhcMonad (liftIO)
import HscMain
import HscTypes
import TidyPgm
import CorePrep
import StgSyn
import CoreSyn
import CoreToStg
import SimplStg
import DriverPipeline
#if __GLASGOW_HASKELL__ >= 800
import qualified Module as M (moduleUnitId, unitIdString, UnitId)
#elif __GLASGOW_HASKELL__ >= 710
import qualified Module as M (modulePackageKey, packageKeyString, PackageKey)
#else
import qualified Module as M (modulePackageId, packageIdString, PackageId)
#endif
import Control.Monad
import Data.IORef
import System.FilePath (takeDirectory)
import System.Directory (doesFileExist, createDirectoryIfMissing)
import Language.Haskell.GHC.Simple.Types
instance Intermediate [StgTopBinding] where
prepare = toSimplifiedStg
instance Intermediate CgGuts where
prepare _ = return
instance Intermediate CoreProgram where
prepare ms cgguts = do
env <- hsc_env `fmap` getPipeState
liftIO $ prepareCore env (hsc_dflags env) ms cgguts
-- | Package ID/key of a module.
modulePkgKey :: Module -> PkgKey
-- | String representation of a package ID/key.
pkgKeyString :: PkgKey -> String
#if __GLASGOW_HASKELL__ >= 800
-- | Synonym for 'M.UnitId', to bridge a slight incompatibility between
-- GHC 7.8/7.10/8.0.
type PkgKey = M.UnitId
modulePkgKey = M.moduleUnitId
pkgKeyString = M.unitIdString
#elif __GLASGOW_HASKELL__ >= 710
-- | Synonym for 'M.PackageKey', to bridge a slight incompatibility between
-- GHC 7.8 and 7.10.
type PkgKey = M.PackageKey
modulePkgKey = M.modulePackageKey
pkgKeyString = M.packageKeyString
#else
-- | Synonym for 'M.PackageId', to bridge a slight incompatibility between
-- GHC 7.8 and 7.10.
type PkgKey = M.PackageId
modulePkgKey = M.modulePackageId
pkgKeyString = M.packageIdString
#endif
-- | Build a 'ModMetadata' out of a 'ModSummary'.
toModMetadata :: CompConfig
-> ModSummary
-> ModMetadata
toModMetadata cfg ms = ModMetadata {
mmSummary = ms,
mmName = moduleNameString $ ms_mod_name ms,
mmPackageKey = pkgKeyString . modulePkgKey $ ms_mod ms,
mmSourceIsHsBoot = ms_hsc_src ms == HsBootFile,
mmSourceFile = ml_hs_file $ ms_location ms,
mmInterfaceFile = ml_hi_file $ ms_location ms
}
-- | Compile a 'ModSummary' into a list of simplified 'StgBinding's.
-- See <https://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/StgSynType>
-- for more information about STG and how it relates to core and Haskell.
toSimplifiedStg :: ModSummary -> CgGuts -> CompPipeline [StgTopBinding]
toSimplifiedStg ms cgguts = do
env <- hsc_env `fmap` getPipeState
let dfs = hsc_dflags env
liftIO $ do
prog <- prepareCore env dfs ms cgguts
let stg = fst $ coreToStg dfs (ms_mod ms) prog
stg2stg dfs stg
-- | Prepare a core module for code generation.
prepareCore :: HscEnv -> DynFlags -> ModSummary -> CgGuts -> IO CoreProgram
prepareCore env dfs _ms p = do
#if __GLASGOW_HASKELL__ >= 800
liftIO $ fst <$> corePrepPgm env (ms_mod _ms) (ms_location _ms) (cg_binds p) (cg_tycons p)
#elif __GLASGOW_HASKELL__ >= 710
liftIO $ corePrepPgm env (ms_location _ms) (cg_binds p) (cg_tycons p)
#else
liftIO $ corePrepPgm dfs env (cg_binds p) (cg_tycons p)
#endif
| valderman/ghc-simple | src/Language/Haskell/GHC/Simple/Impl.hs | mit | 3,522 | 0 | 16 | 617 | 582 | 325 | 257 | 59 | 1 |
module Main where
import Graphics.Vty
import Control.Monad.State
import Editor
import UI
-- | Run an editor.
runEditor :: IO ()
runEditor = do
v <- liftIO iv
es <- liftIO $ mkIdealEditorState v
evalStateT loop es where
loop = do v <- liftIO iv
s <- get
liftIO $ drawEditor s v
k <- liftIO $ next_event v
updateEditor k (context s)
s' <- get
if done s' then liftIO $ shutdown v else loop
iv = mkVty
main :: IO ()
main = runEditor | awbraunstein/emonad | src/Main.hs | mit | 530 | 0 | 11 | 186 | 181 | 88 | 93 | 20 | 2 |
module Euler007 (euler7) where
import Data.Numbers.Primes
euler7 :: Int
euler7 = primes !! 10000
| TrustNoOne/Euler | haskell/src/Euler007.hs | mit | 101 | 0 | 5 | 18 | 29 | 18 | 11 | 4 | 1 |
{-# LANGUAGE MagicHash #-}
module EmptyCanvasApp where
import Prelude hiding (show)
import Java
import Java.Array
import JavaFX.Types
import JavaFX.Methods
data {-# CLASS "org.eta.EmptyCanvasApp extends javafx.application.Application" #-}
EmptyCanvasApp = EmptyCanvasApp (Object# EmptyCanvasApp)
-- @Override
-- public void start(Stage primaryStage) throws Exception {
start :: Stage -> Java EmptyCanvasApp ()
start primaryStage = do
-- Canvas c = new Canvas(320, 320);
c <- newCanvas 320 320
-- Scene s = new Scene(new Group(c));
s <- newGroup [superCast c] >>= newScene
-- primaryStage.setTitle("Eta-JavaFX EmptyCanvas");
-- primaryStage.setScene(s);
-- primaryStage.show();
primaryStage <.> (setTitle "Eta-JavaFX EmptyCanvas" >> setScene s >> show)
foreign export java "start" start :: Stage -> Java EmptyCanvasApp ()
| filippovitale/eta-playground | javafx-empty-canvas/src/EmptyCanvasApp.hs | mit | 847 | 1 | 11 | 132 | 151 | 83 | 68 | -1 | -1 |
module Test.Properties.UniqhashMachines where
import Data.Machine
import Text.UniqhashMachines
prop_detectChanges :: Bool
prop_detectChanges = expected == result
where
expected = [1,1,2]
result = run $ source [(1 :: Int,'a'),(1,'a'),(1,'b'),(1,'b'),(2,'a'),(1,'b'),(2,'a')] ~> emitChanges
| sordina/uniqhash | Test/Properties/UniqhashMachines.hs | mit | 299 | 0 | 11 | 39 | 129 | 81 | 48 | 7 | 1 |
-- | Instances for Proposition.hs. This is code that is not trustworthy enough to
-- be in the kernel, and we limit our use of automatic Deriving in Proposition.hs to
-- just Eq. Exercise for the reader: if Theorem automatically derived Traversable,
-- how would this make the logic unsound?
module Instances where
import Control.Monad
import Data.Function
import Data.Monoid
import Data.Traversable
import Proposition
instance Functor Term where
fmap = liftM
instance Applicative Term where
pure = Var
(<*>) = liftM2 ($)
instance Monad Term where
tm >>= f = instTerm f tm
instance Foldable Term where
foldMap = foldMapDefault
instance Traversable Term where
traverse f (Var x) = Var <$> f x
traverse f (Not t) = Not <$> traverse f t
traverse f (a :=>: c) = (:=>:) <$> traverse f a <*> traverse f c
instance Ord a => Ord (Term a) where
compare (Var x) (Var y) = compare x y
compare (Var x) _ = LT
compare _ (Var y) = GT
compare (Not t) (Not t') = compare t t'
compare (Not _) _ = LT
compare _ (Not _) = GT
compare (a :=>: c) (a' :=>: c') = compare a a' <> compare c c'
instance Show a => Show (Term a) where
showsPrec _ (Var p) s = showsPrec 11 p s
showsPrec _ (Not (Var p)) s = "~" ++ showsPrec 11 p s
showsPrec _ (Not (Not p)) s = "~" ++ shows (Not p) s
showsPrec _ (Not p) s = "~(" ++ shows p (")" ++ s)
showsPrec _ ((p :=>: q) :=>: r) s =
"(" ++ shows (p :=>: q) (")" ++ " ==> " ++ shows r s)
showsPrec _ (p :=>: q) s = shows p (" ==> " ++ shows q s)
instance Functor Theorem where
fmap f = inst (pure . f)
instance Foldable Theorem where
foldMap f = foldMap f . termOfTheorem
instance Eq a => Eq (Theorem a) where
(==) = (==) `on` termOfTheorem
instance Show a => Show (Theorem a) where
show thm = "|- " ++ show (termOfTheorem thm)
instance Eq Two where
x == y = compare x y == EQ
instance Ord Two where
compare X X = EQ
compare X Y = LT
compare Y X = GT
compare Y Y = EQ
instance Show Two where
show X = "X"
show Y = "Y"
instance Eq Three where
x == y = compare x y == EQ
instance Ord Three where
compare P P = EQ
compare P _ = LT
compare _ P = GT
compare Q Q = EQ
compare Q _ = LT
compare _ Q = GT
compare R R = EQ
instance Show Three where
show P = "P"
show Q = "Q"
show R = "R"
| Chattered/proplcf | Instances.hs | mit | 2,419 | 0 | 10 | 700 | 989 | 499 | 490 | 67 | 0 |
module Graphics.Cogh.Element
( module Export
, Position
, Size
, Scale
, Origin
, Element
, position
, size
, scale
, origin
, rotation
, depth
, action
, emptyElement
, rectangle
, group
, image
) where
import Graphics.Cogh.Color as Export
import Graphics.Cogh.Render as Export (Texture, textureSize)
import Data.Function
import Graphics.Cogh.Element.Internal
import Graphics.Cogh.Matrix (Position, Size, Scale, Origin)
import Graphics.Cogh.Render
import Graphics.Cogh.Vector (toVector)
import Lens.Micro
rectangle :: Size -> Color -> Element a
rectangle rectSize c = emptyElement & size .~ rectSize & render .~ rectRender
where
rectRender window matrix = drawRect window matrix c
image :: Texture -> Element a
image texture =
emptyElement & size .~ (texture & textureSize & toVector) & render .~
textureRender
where
textureRender window matrix = drawTexture window matrix texture
group :: [Element a] -> Element a
group es = emptyElement & children .~ es
| ivokosir/cogh | src/Graphics/Cogh/Element.hs | mit | 1,014 | 0 | 10 | 199 | 300 | 173 | 127 | 36 | 1 |
{-# LANGUAGE BangPatterns, FlexibleInstances #-}
module Stage.Sources
(addSources)
where
import Model
import FieldElt
import Data.Array.Repa as R
import Data.Array.Repa.Unsafe as R
import Data.Vector.Unboxed (Unbox)
-- | Addition of forces stage for simulation
addSources
:: (FieldElt a, FieldSource a, Unbox a)
=> Delta -- ^ Time delta.
-> a -- ^ Value to insert.
-> Maybe (SourceDensity a)
-> Field a
-> IO (Field a)
addSources !delta !value (Just (SourceDensity aim mul)) field
= {-# SCC addSources #-}
field `deepSeqArray`
do computeP $ unsafeTraverse field id (insertSource delta value aim mul)
addSources _ _ Nothing field
= return field
insertSource
:: (FieldElt a, FieldSource a)
=> Delta
-> a -- ^ Value to insert
-> DIM2 -> a
-> (DIM2 -> a)
-> DIM2
-> a
insertSource !delta !value !aim !mul locate !pos
| aim == pos = addSource delta value (locate pos) mul
| otherwise = locate pos
{-# INLINE insertSource #-}
{-# SPECIALIZE addSources
:: Delta
-> Float
-> Maybe (SourceDensity Float)
-> Field Float
-> IO (Field Float) #-}
{-# SPECIALIZE addSources
:: Delta
-> (Float, Float)
-> Maybe (SourceDensity (Float, Float))
-> Field (Float, Float)
-> IO (Field (Float, Float)) #-}
-- FieldSource ----------------------------------------------------------------
class FieldSource a where
addSource :: Delta -> a -> a -> a -> a
instance FieldSource Float where
addSource !delta !value !a !mul
= a ~+~ (value * delta * mul)
{-# INLINE addSource #-}
instance FieldSource (Float, Float) where
addSource !delta (newA, newB) (a,b) (mulA, mulB)
= ( a + (newA * delta * (-mulA))
, b + (newB * delta * (-mulB)))
{-# INLINE addSource #-}
| gscalzo/HaskellTheHardWay | gloss-try/gloss-master/gloss-examples/raster/Fluid/src-repa/Stage/Sources.hs | mit | 2,005 | 0 | 12 | 641 | 474 | 251 | 223 | -1 | -1 |
{-# htermination (compareMyBool :: MyBool -> MyBool -> Ordering) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Ordering = LT | EQ | GT ;
compare0 x y MyTrue = GT;
otherwise :: MyBool;
otherwise = MyTrue;
compare1 x y MyTrue = LT;
compare1 x y MyFalse = compare0 x y otherwise;
ltEsMyBool :: MyBool -> MyBool -> MyBool
ltEsMyBool MyFalse MyFalse = MyTrue;
ltEsMyBool MyFalse MyTrue = MyTrue;
ltEsMyBool MyTrue MyFalse = MyFalse;
ltEsMyBool MyTrue MyTrue = MyTrue;
compare2 x y MyTrue = EQ;
compare2 x y MyFalse = compare1 x y (ltEsMyBool x y);
esEsMyBool :: MyBool -> MyBool -> MyBool
esEsMyBool MyFalse MyFalse = MyTrue;
esEsMyBool MyFalse MyTrue = MyFalse;
esEsMyBool MyTrue MyFalse = MyFalse;
esEsMyBool MyTrue MyTrue = MyTrue;
compare3 x y = compare2 x y (esEsMyBool x y);
compareMyBool :: MyBool -> MyBool -> Ordering
compareMyBool x y = compare3 x y;
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/compare_2.hs | mit | 942 | 0 | 8 | 191 | 319 | 174 | 145 | 24 | 1 |
module Graphics.Shader.Program (
ShaderState(..), Shader,
emptyAttribs, mkShaderAttrib, mkShaderAttrib2,
mkVertexShader,
mkFragmentShader,
) where
--------------------------------------------------------------------------------
import Control.Monad.State
import Graphics.Shader.Internal.Statement
import Graphics.Shader.Internal.Variable
--------------------------------------------------------------------------------
data ShaderState = ShaderState {
nextVarID :: ShaderVarID,
stmt :: Statement
}
type Shader a = State ShaderState a
data ShaderProgram a b = ShaderProgram {
inputs :: ShaderAttributes a,
outputs :: ShaderAttributes b,
shaderStatements :: Statement
}
data ShaderAttributes a = ShaderAttributes {
numAttributes :: Int,
getAttrib :: Int -> ShaderVarRep
}
emptyAttribs :: ShaderAttributes ()
emptyAttribs = ShaderAttributes { numAttributes = 0, getAttrib = error "No attributes!" }
mkShaderAttrib :: ShaderVar a -> ShaderAttributes (ShaderVar a)
mkShaderAttrib var = ShaderAttributes {
numAttributes = 1,
getAttrib = \_ -> var
}
mkShaderAttrib2 :: (ShaderVar a, ShaderVar b) -> ShaderAttributes (ShaderVar a, ShaderVar b)
mkShaderAttrib2 (var1, var2) = ShaderAttributes {
numAttributes = 2,
getAttrib = fn
}
where fn 1 = var2
fn _ = var1
type Vec4f = ShaderVec4 Float
mkVertexShader :: ShaderAttributes a ->
(ShaderAttributes a -> Shader (ShaderVar Vec4f, ShaderAttributes b)) ->
ShaderProgram a b
mkVertexShader attribs shaderFn = let
input = attribs { getAttrib = \idx -> (\sa -> sa { varID = idx }) (getAttrib attribs idx) }
shader = shaderFn input
((posVar, out), st) = runState shader $ ShaderState {
nextVarID = (numAttributes attribs),
stmt = emptyStmt
}
in
ShaderProgram { inputs = input, outputs = out, shaderStatements = stmt st }
mkFragmentShader :: ShaderAttributes a ->
(ShaderAttributes a -> Shader (ShaderVar Vec4f)) -> ShaderProgram a ()
mkFragmentShader varyings shaderFn = let
input = varyings { getAttrib = \idx -> (\sa -> sa { varID = idx }) (getAttrib varyings idx) }
shader = shaderFn input
(_, st) = runState shader $ ShaderState {
nextVarID = (numAttributes varyings),
stmt = emptyStmt
}
in
ShaderProgram { inputs = input, outputs = emptyAttribs, shaderStatements = stmt st }
| Mokosha/shaders | Graphics/Shader/Program.hs | mit | 2,365 | 0 | 16 | 453 | 678 | 382 | 296 | 51 | 2 |
-- clay-custom.hs
-- This is an example of adding custom values using the other type class or
-- fallback operator `-:` to explicitly specify values
-- http://www.shakthimaan.com/posts/2016/01/27/haskell-web-programming/news.html
{-# LANGUAGE OverloadedStrings #-}
import Clay
main :: IO ()
main = putCss $
body ?
do fontSize (other "11pt !important")
"border" -: "0"
| katychuang/getting-started-with-haskell | tutorials/clay/clay-custom.hs | mit | 390 | 0 | 10 | 73 | 51 | 27 | 24 | 7 | 1 |
------------------------------------------------------------------------------
import System.IO
import XMonad
import XMonad.Layout.Gaps
import XMonad.Layout.NoBorders
import XMonad.Layout.ResizableTile
import XMonad.Layout.SimpleFloat
import XMonad.Layout.Spacing
import XMonad.Hooks.DynamicLog
import XMonad.Hooks.ManageDocks
import XMonad.Hooks.SetWMName
import XMonad.Util.Run (spawnPipe)
import XMonad.Util.EZConfig (additionalKeys)
import qualified Data.Map as M
import qualified XMonad.Util.CustomKeys as C
------------------------------------------------------------------------------
main :: IO ()
main = do
h <- spawnPipe workspaceBar
c <- spawnPipe statsBar
-- spawn trayerCmd
xmonad $ defaultConfig
{ borderWidth = 4
, focusedBorderColor = "#709080"
, keys = C.customKeys delkeys inskeys
, layoutHook = myLayoutHook
, logHook = myLogHook h
, manageHook = myManageHook <+> manageHook defaultConfig
, modMask = mod4Mask
, normalBorderColor = "#000000"
, startupHook = setWMName "LG3D" -- Workaround for Java Swing issues.
, terminal = "urxvtc"
, workspaces = myWorkspaces
}
where
delkeys XConfig {modMask = modm} = [(modm, xK_b)]
inskeys = myKeys
------------------------------------------------------------------------------
-- dzen2 workspace bar.
workspaceBar = "dzen2 -bg '#000000' -ta l -h 24 -w 1600 -fn '-*-terminus-*-r-normal-*-12-*-*-*-*-*-*-*' -e '' "
------------------------------------------------------------------------------
-- conky statistics bar.
--
-- conky's output is piped into a second instance of dzen2 which sits
-- on the right side of the workspace bar.
statsBar = "conky -c /home/john/.xmonad/conky_dzen | dzen2 -x '1600' -w '320' -h '24' -ta 'r' -bg '#000000' -fg '#777777' -y '0' -fn '-*-terminus-*-r-normal-*-12-*-*-*-*-*-*-*'"
------------------------------------------------------------------------------
-- Trayer startup command.
--trayerCmd = "trayer --edge top --align right --SetDockType true --SetPartialStrut true --expand true --width 120 --height 24 --transparent true --tint 0x000000 --alpha 0"
------------------------------------------------------------------------------
myManageHook = composeAll
[ className =? "Mplayer" --> doFloat
, className =? "Gimp" --> doFloat
, className =? "vlc" --> doFloat
, className =? "Empathy" --> doFloat
, className =? "trayer" --> doIgnore
, resource =? "stalonetray" --> doIgnore
, resource =? "wicd-gtk" --> doFloat
]
------------------------------------------------------------------------------
myLogHook h = dynamicLogWithPP $ defaultPP
{ ppCurrent = dzenColor "#000000" "#709080" . pad . pad
, ppVisible = dzenColor "#7C7A7B" "#000000" . pad . pad
, ppHidden = dzenColor "#b8bcb8" "#000000" . pad . pad
, ppHiddenNoWindows = dzenColor "#444444" "#000000" . pad . pad
, ppLayout = dzenColor "#dadada" "#000000" . pad . pad
, ppUrgent = dzenColor "#FF0000" "#000000" . pad . pad
, ppTitle = dzenColor "#777777" "#000000"
, ppOutput = hPutStrLn h
}
------------------------------------------------------------------------------
myWorkspaces = map (\icon -> "^i(/home/john/.xmonad/icons/sm4tik/" ++ icon ++ ")")
[ "diskette.xbm"
, "ac.xbm"
, "fox.xbm"
, "info_01.xbm"
, "cpu.xbm"
, "shroom.xbm"
, "note.xbm"
, "phones.xbm"
]
------------------------------------------------------------------------------
gapWidth = 6
-- myTiled = avoidStruts $ tiled
--myTiled = spacing gapWidth $ avoidStruts $ gaps [(U, gapWidth), (D, gapWidth), (L, gapWidth), (R, gapWidth)] $ tiled
myTiled = avoidStruts $ tiled
where
tiled = Tall nmaster delta ratio
-- Default number of windows in the master pane.
nmaster = 1
-- Default proportion of the screen occupied by the master pane.
ratio = 1/2
-- Percent of screen to increment when resizing panes.
delta = 3/100
myFull = avoidStruts $ noBorders Full
myLayoutHook = myTiled ||| myFull ||| simpleFloat
------------------------------------------------------------------------------
myKeys conf@(XConfig {modMask = modm}) =
[ ((modm, xK_Return), spawn $ XMonad.terminal conf)
, ((modm, xK_p), spawn "dmenu_run")
, ((modm, xK_b), sendMessage ToggleStruts)
]
| jhm/dotfiles | xmonad/.xmonad/xmonad.hs | mit | 4,563 | 2 | 11 | 983 | 737 | 430 | 307 | 72 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-serversideencryptionbydefault.html
module Stratosphere.ResourceProperties.S3BucketServerSideEncryptionByDefault where
import Stratosphere.ResourceImports
-- | Full data type definition for S3BucketServerSideEncryptionByDefault. See
-- 's3BucketServerSideEncryptionByDefault' for a more convenient
-- constructor.
data S3BucketServerSideEncryptionByDefault =
S3BucketServerSideEncryptionByDefault
{ _s3BucketServerSideEncryptionByDefaultKMSMasterKeyID :: Maybe (Val Text)
, _s3BucketServerSideEncryptionByDefaultSSEAlgorithm :: Val Text
} deriving (Show, Eq)
instance ToJSON S3BucketServerSideEncryptionByDefault where
toJSON S3BucketServerSideEncryptionByDefault{..} =
object $
catMaybes
[ fmap (("KMSMasterKeyID",) . toJSON) _s3BucketServerSideEncryptionByDefaultKMSMasterKeyID
, (Just . ("SSEAlgorithm",) . toJSON) _s3BucketServerSideEncryptionByDefaultSSEAlgorithm
]
-- | Constructor for 'S3BucketServerSideEncryptionByDefault' containing
-- required fields as arguments.
s3BucketServerSideEncryptionByDefault
:: Val Text -- ^ 'sbssebdSSEAlgorithm'
-> S3BucketServerSideEncryptionByDefault
s3BucketServerSideEncryptionByDefault sSEAlgorithmarg =
S3BucketServerSideEncryptionByDefault
{ _s3BucketServerSideEncryptionByDefaultKMSMasterKeyID = Nothing
, _s3BucketServerSideEncryptionByDefaultSSEAlgorithm = sSEAlgorithmarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-serversideencryptionbydefault.html#cfn-s3-bucket-serversideencryptionbydefault-kmsmasterkeyid
sbssebdKMSMasterKeyID :: Lens' S3BucketServerSideEncryptionByDefault (Maybe (Val Text))
sbssebdKMSMasterKeyID = lens _s3BucketServerSideEncryptionByDefaultKMSMasterKeyID (\s a -> s { _s3BucketServerSideEncryptionByDefaultKMSMasterKeyID = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-serversideencryptionbydefault.html#cfn-s3-bucket-serversideencryptionbydefault-ssealgorithm
sbssebdSSEAlgorithm :: Lens' S3BucketServerSideEncryptionByDefault (Val Text)
sbssebdSSEAlgorithm = lens _s3BucketServerSideEncryptionByDefaultSSEAlgorithm (\s a -> s { _s3BucketServerSideEncryptionByDefaultSSEAlgorithm = a })
| frontrowed/stratosphere | library-gen/Stratosphere/ResourceProperties/S3BucketServerSideEncryptionByDefault.hs | mit | 2,435 | 0 | 13 | 213 | 266 | 152 | 114 | 28 | 1 |
module Language.Lambda.Examples.NatSpec where
import Test.Hspec
import Language.Lambda.HspecUtils
spec :: Spec
spec = describe "Nat" $ do
-- Nat is the definition of natural numbers. More precisely, Nat
-- is the set of nonnegative integers. We represent nats using
-- Church Encodings:
--
-- 0: \f x. x
-- 1: \f x. f x
-- 2: \f x. f (f x)
-- ...and so on
describe "successor" $ do
-- successor is a function that adds 1
-- succ(0) = 1
-- succ(1) = 2
-- ... and so forth
--
-- successor is defined by
-- succ = \n f x. f (n f x)
it "succ 0 = 1" $
"(\\n f x. f (n f x)) (\\f x. x)" `shouldEvalTo` "\\f x. f x"
it "succ 1 = 2" $
"(\\n f x. f (n f x)) (\\f x. f x)" `shouldEvalTo` "\\f x. f (f x)"
describe "add" $ do
-- add(m, n) = m + n
--
-- It is defined by applying successor m times on n:
-- add = \m n f x. m f (n f x)
it "add 0 2 = 2" $
"(\\m n f x. m f (n f x)) (\\f x. x) (\\f x. f (f x))"
`shouldEvalTo` "\\f x. f (f x)"
it "add 3 2 = 5" $
"(\\m n f x. m f (n f x)) (\\f x. f (f (f x))) (\\f x. f (f x))"
`shouldEvalTo` "\\f x. f (f (f (f (f x))))"
-- Here, we use `\f x. n f x` instead of `n`. This is because
-- I haven't implemented eta conversion
it "add 0 n = n" $
"(\\m n f x. m f (n f x)) (\\f x. x) n"
`shouldEvalTo` "\\f x. n f x"
describe "multiply" $ do
-- multiply(m, n) = m * n
--
-- multiply is defined by applying add m times
-- multiply = \m n f x. m (n f x) x)
--
-- Using eta conversion, we can omit the parameter x
-- multiply = \m n f. m (n f)
it "multiply 0 2 = 0" $
"(\\m n f. m (n f)) (\\f x. x) (\\f x. f (f x))"
`shouldEvalTo` "\\f x. x"
it "multiply 2 3 = 6" $
"(\\m n f. m (n f)) (\\f x. f (f x)) (\\f x. f (f (f x)))"
`shouldEvalTo` "\\f x. f (f (f (f (f (f x)))))"
it "multiply 0 n = 0" $
"(\\m n f. m (n f)) (\\f x. x) n"
`shouldEvalTo` "\\f x. x"
it "multiply 1 n = n" $
"(\\m n f. m (n f)) (\\f x. f x) n"
`shouldEvalTo` "\\f x. n f x"
describe "power" $ do
-- The function power raises m to the power of n.
-- power(m, n) = m^n
--
-- power is defined by applying multiply n times
-- power = \m n f x. (n m) f x
--
-- Using eta conversion again, we can omit the parameter f
-- power = \m n = n m
-- NOTE: Here we use the first form to get more predictable
-- variable names. Otherwise, alpha conversion will choose a random
-- unique variable.
it "power 0 1 = 0" $
"(\\m n f x. (n m) f x) (\\f x. x) (\\f x. f x)"
`shouldEvalTo` "\\f x. x"
it "power 2 3 = 8" $
"(\\m n f x. (n m) f x) (\\f x. f (f x)) (\\f x. f (f (f x)))"
`shouldEvalTo` "\\f x. f (f (f (f (f (f (f (f x)))))))"
it "power n 0 = 1" $
"(\\m n f x. (n m) f x) n (\\f x. x)"
`shouldEvalTo` "\\f x. f x"
it "power n 1 = n" $
"(\\m n f x. (n m) f x) n (\\f x. f x)"
`shouldEvalTo` "\\f x. n f x"
| sgillespie/lambda-calculus | test/Language/Lambda/Examples/NatSpec.hs | mit | 3,082 | 0 | 13 | 1,038 | 323 | 181 | 142 | 46 | 1 |
data Person = Person Bool deriving (Show)
printPerson :: Person -> IO ()
printPerson person = putStrLn (show person)
| Numberartificial/workflow | haskell-first-principles/haskell-from-first-principles-master/06/06.14.01-does-it-typecheck1.hs | mit | 117 | 0 | 7 | 19 | 48 | 24 | 24 | 3 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Estuary.Help.Saludos where
import Reflex
import Reflex.Dom
import Data.Text
import GHCJS.DOM.EventM
import Estuary.Widgets.Reflex
import Estuary.Widgets.Reflex
--render multiple sub-help files
saludosHelpFile :: MonadWidget t m => m ()
saludosHelpFile = divClass "languageHelpContainer" $ divClass "languageHelp" $ do
about
functionRef "hola"
functionRef "cómo estas"
functionRef "saludos"
functionRef "que tal"
functionRef "todo bien"
return ()
-- about
about :: MonadWidget t m => m ()
about = do
divClass "about primary-color code-font" $ text "Saludos"
divClass "about primary-color code-font" $ text "A mini live coding esolang developed in Bogotá, Colombia."
exampleText :: Text -> Text
exampleText "hola" = "¡hola!"
exampleText "cómo estas" = "¡cómo estas!"
exampleText "saludos" = "¡saludos!"
exampleText "que tal" = "¡hola! que tal"
exampleText "todo bien" = "¡saludos! todo bien 2"
referenceText :: Text -> Text
referenceText "hola" = "returns Dirt's \"moog\" sample"
referenceText "cómo estas" = "returns Dirt's \"arpy\" sample"
referenceText "saludos" = "returns Dirt's \"bd\" sample"
referenceText "que tal" = "returns TidalCycles' brak"
referenceText "todo bien" = "returns TidalCycles' chop"
-- help files for samples
functionRef :: MonadWidget t m => Text -> m ()
functionRef x = divClass "helpWrapper" $ do
switchToReference <- buttonWithClass' x
exampleVisible <- toggle True switchToReference
referenceVisible <- toggle False switchToReference
hideableWidget exampleVisible "exampleText primary-color code-font" $ text (exampleText x)
hideableWidget referenceVisible "referenceText code-font" $ text (referenceText x)
return ()
| d0kt0r0/estuary | client/src/Estuary/Help/Saludos.hs | gpl-3.0 | 1,733 | 0 | 11 | 255 | 391 | 186 | 205 | 41 | 1 |
module Ast where
data Program = Program [ClassDecl]
data ClassDecl = Class
{ className :: Id
, classSuper :: Id
, classVars :: [(VarKind, VarDecl)]
, classMethods :: [MethodDecl]
} deriving (Show, Eq)
-- Check naming of fields
data MethodDecl = Method
{ retType :: ExpType
, methodName :: Id
, methodArgs :: [VarDecl]
, methodLocals :: [VarDecl]
, methodStmts :: [Statement]
, retExp :: Exp
} deriving (Show, Eq)
data VarKind = Static | NonStatic deriving (Show, Eq)
data VarDecl = Var
{ varType :: ExpType
, varName :: Id
} deriving (Show, Eq)
data Statement = Block [Statement]
| If Exp Statement Statement
| While Exp Statement
| Println Exp
| Assignment Id Exp
| ArrayAssignment Id Exp Exp
| Break
| Continue
deriving (Show, Eq)
data Exp = Operation Exp BinOp Exp
| Subscript Exp Exp
| Length Exp
| MethodCall Exp Id [Exp]
| FieldRef Exp Id
| EInteger Int
| ETrue | EFalse
| EId Id
| This
| NewArray ExpType Exp
| NewId Id
| Not Exp
| Null
| EString String
| EFloat Float
deriving (Show, Eq)
data BinOp = And
| Or
| Equal
| LessThan
| LessThanEq
| GreaterThan
| GreaterThanEq
| Plus
| Minus
| Multiplication
| Division
deriving (Show, Eq)
data ExpType = ArrayType ExpType
| BoolType
| IntType
| ObjectType Id
| StringType
| FloatType
deriving (Show, Eq)
type Id = String
| WraithM/HJavaInterp | src/Ast.hs | gpl-3.0 | 1,543 | 0 | 10 | 502 | 452 | 273 | 179 | 66 | 0 |
module Token where
openBracket = '['
closeBracket = ']'
greater = '>'
lesser = '<'
plus = '+'
minus = '-'
dot = '.'
comma = ','
| agremm/bfint | src/Token.hs | gpl-3.0 | 172 | 0 | 4 | 71 | 44 | 27 | 17 | 9 | 1 |
-- Print out the nth prime, where n is the 1st argument
module Main where
import NaiveSieve (primes)
import System (getArgs)
printNthPrime :: Int -> IO ()
printNthPrime n = print (n, primes !! (n - 1))
main = do
args <- getArgs
printNthPrime $ read $ head args
| dkensinger/haskell | haskell-primes/NaiveSieveTest.hs | gpl-3.0 | 274 | 0 | 9 | 62 | 89 | 48 | 41 | 8 | 1 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
import Data.Monoid (mappend)
import Hakyll
--------------------------------------------------------------------------------
main :: IO ()
main = hakyll $ do
match "images/**" $ do
route idRoute
compile copyFileCompiler
match "static/**" $ do
route idRoute -- preserve directory and name
compile copyFileCompiler -- don't alter the contents of the file
match "LICENSE.txt" $ do
route idRoute
compile copyFileCompiler
match "robots.txt" $ do
route idRoute
compile copyFileCompiler
match "js/*" $ do
route idRoute
compile copyFileCompiler
match "css/*" $ do
route idRoute
compile compressCssCompiler
match (fromList ["contact.md"]) $ do
route $ setExtension "html"
compile $ pandocCompiler
>>= loadAndApplyTemplate "templates/default.html" defaultContext
>>= relativizeUrls
match "blog/*" $ do
route $ setExtension "html"
compile $ pandocCompiler
>>= loadAndApplyTemplate "templates/post.html" postCtx
>>= loadAndApplyTemplate "templates/default.html" postCtx
>>= relativizeUrls
create ["archive.html"] $ do
route idRoute
compile $ do
posts <- recentFirst =<< loadAll "blog/*"
let archiveCtx =
listField "posts" postCtx (return posts) `mappend`
constField "title" "Blog Posts" `mappend`
defaultContext
makeItem ""
>>= loadAndApplyTemplate "templates/archive.html" archiveCtx
>>= loadAndApplyTemplate "templates/default.html" archiveCtx
>>= relativizeUrls
match "about.html" $ do
route idRoute
compile $ do
let indexCtx =
constField "title" "About" `mappend`
defaultContext
getResourceBody
>>= applyAsTemplate indexCtx
>>= loadAndApplyTemplate "templates/default.html" indexCtx
>>= relativizeUrls
match "legal.html" $ do
route idRoute
compile $ do
let indexCtx =
constField "title" "Legal Information" `mappend`
defaultContext
getResourceBody
>>= applyAsTemplate indexCtx
>>= loadAndApplyTemplate "templates/default.html" indexCtx
>>= relativizeUrls
match "index.html" $ do
route idRoute
compile $ do
posts <- recentFirst =<< loadAll "blog/*"
let indexCtx =
listField "posts" postCtx (return posts) `mappend`
constField "title" "Home" `mappend`
defaultContext
getResourceBody
>>= applyAsTemplate indexCtx
>>= loadAndApplyTemplate "templates/default.html" indexCtx
>>= relativizeUrls
match "templates/*" $ compile templateBodyCompiler
--------------------------------------------------------------------------------
postCtx :: Context String
postCtx =
dateField "date" "%B %e, %Y" `mappend`
defaultContext
| mcgirr/mikemcgirr-com | site.hs | gpl-3.0 | 3,422 | 0 | 21 | 1,180 | 644 | 285 | 359 | 83 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# OPTIONS -fno-warn-missing-signatures #-}
module Editor.Config where
import qualified Graphics.DrawingCombinators as Draw
import qualified Graphics.UI.Bottle.EventMap as E
import qualified Graphics.UI.Bottle.Widgets.FocusDelegator as FocusDelegator
mk = E.KeyEventType
noMods = mk E.noMods
ctrl = mk E.ctrl . E.charKey
alt = mk E.alt . E.charKey
shift = mk E.shift . E.charKey
ctrlAlt = mk (E.noMods {E.modCtrl = True, E.modAlt = True}) . E.charKey
-- altShift = mk E.noMods { E.modAlt = True, E.modShift = True } . E.charKey
k = noMods . E.charKey
-- pasteKeys = [ctrl 'v']
-- cutKeys = [ctrl 'x']
-- actionKeys = [noMods E.KeyEnter]
quitKeys = [ctrl 'q']
undoKeys = [ctrl 'z']
redoKeys = [ctrl 'y']
makeBranchKeys = [ctrl 's']
-- moveToParentKeys = [mk E.alt E.KeyLeft]
overlayDocKeys = [noMods E.KeyF1, alt 'h']
addNextParamKeys = [E.KeyEventType E.noMods E.KeySpace]
delBranchKeys = [alt 'o']
closePaneKeys = [alt 'w']
replaceKeys = [alt 'r']
pickResultKeys = [noMods E.KeyEnter]
jumpToDefinitionKeys = [noMods E.KeyEnter]
delKeys = [noMods E.KeyBackspace, noMods E.KeyDel, mk E.alt E.KeyDel]
giveAsArgumentKeys = [k ']', shift '0']
callWithArgumentKeys = [k '[', shift '9']
addNextArgumentKeys = [E.KeyEventType E.noMods E.KeySpace]
debugModeKeys = [ctrlAlt 'd']
exprFocusDelegatorKeys = FocusDelegator.Keys {
FocusDelegator.startDelegatingKey = mk E.shift E.KeyRight,
FocusDelegator.stopDelegatingKey = mk E.shift E.KeyLeft
}
newDefinitionKeys = [alt 'n']
builtinColor = Draw.Color 1 0.6 0.2 1
definitionColor = Draw.Color 0.8 0.5 1 1
parameterColor = Draw.Color 0.2 0.8 0.9 1
literalIntColor = Draw.Color 0 1 0 1
previousCursorKeys = [mk E.alt E.KeyLeft]
focusedHoleBackgroundColor = Draw.Color 1 0 0 0.361
unfocusedHoleBackgroundColor = Draw.Color 1 0 0 1
parenHighlightColor = Draw.Color 0.3 0 1 0.25
unnamedStr = "<noname>"
jumpToLhsKeys = [k '`']
jumpToRhsKeys = [k '=', noMods E.KeyPadEqual]
lambdaWrapKeys = [k '\\']
lambdaColor = Draw.Color 1 0.2 0.2 1
lambdaTextSize = 30
rightArrowColor = Draw.Color 1 0.2 0.2 1
rightArrowTextSize = 30
whereColor = Draw.Color 0.8 0.6 0.1 1
whereTextSize = 16
whereScaleFactor = 0.85
foldKeys = [k '-']
unfoldKeys = foldKeys
helpTextSize = 10
baseTextSize = 30
typeScaleFactor = 0.6
squareParensScaleFactor = 0.96 | nimia/bottle | codeedit/Editor/Config.hs | gpl-3.0 | 2,432 | 0 | 9 | 446 | 726 | 397 | 329 | 58 | 1 |
-- grid is a game written in Haskell
-- Copyright (C) 2018 karamellpelle@hotmail.com
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with grid. If not, see <http://www.gnu.org/licenses/>.
--
module Game.Grid.GridWorld.Turn
(
Turn (..),
Z3,
straightTurn,
downTurn,
upTurn,
leftTurn,
rightTurn,
clockTurn,
anticlockTurn,
backTurn,
turnInverse,
turnDiff,
Dir (..),
direction,
leftDir,
rightDir,
backwardDir,
forwardDir,
downDir,
upDir,
) where
import MyPrelude
import Data.Int
-- what can be said about Turn?
--
-- Turn is generated by straightTurn, downTurn, upTurn, leftTurn, rightTurn:
-- ok, by idea.
--
-- Turn is a group:
-- ok.
--
-- |Turn| = 24:
-- we have 6 directions. every direction has 4 neighbour directions. this gives |Turn| <= 6*4 = 24.
-- I can however count 24 elements of Turn (4+4 in zx-plane, 4+4 in xy-plane, 4+4 in yz-plane).
-- hence |Turn| = 24.
--
-- Turn is not abelian:
-- up * left != left * up
--
--
--
--
-- ok. lets model the group Turn as the subset of M3x3( Z3 ) generated by straightTurn,
-- downTurn, upTurn, leftTurn, rightTurn.
-- GHC note: data Int8 = I8# Int#
type Z3 =
Int8
data Turn =
Turn !Z3 !Z3 !Z3
!Z3 !Z3 !Z3
!Z3 !Z3 !Z3
deriving Eq
-- (actually, we can define Z3 using 2 bits, and put a whole 4x4 matrix into Word32. the matrix
-- operations can be implemented by multiplication, bitshifts, bitmasks, bitors. a nice property
-- is that each row and column consists of all zeros except one. but I found that this requires more
-- instructions than the above.)
-- a more important property of Turn is that we have a 0-element to start with. actually,
-- this structure of Turn turns out to be a monoid.
instance Monoid Turn where
mempty = straightTurn
mappend = appendTurn
-- from right to left
-- (fixme: a* -> b*, b* -> a*)
appendTurn :: Turn -> Turn -> Turn
appendTurn (Turn ax0 ax1 ax2
ay0 ay1 ay2
az0 az1 az2)
(Turn bx0 bx1 bx2
by0 by1 by2
bz0 bz1 bz2) =
Turn (bx0 * ax0 + by0 * ax1 + bz0 * ax2)
(bx1 * ax0 + by1 * ax1 + bz1 * ax2)
(bx2 * ax0 + by2 * ax1 + bz2 * ax2)
(bx0 * ay0 + by0 * ay1 + bz0 * ay2)
(bx1 * ay0 + by1 * ay1 + bz1 * ay2)
(bx2 * ay0 + by2 * ay1 + bz2 * ay2)
(bx0 * az0 + by0 * az1 + bz0 * az2)
(bx1 * az0 + by1 * az1 + bz1 * az2)
(bx2 * az0 + by2 * az1 + bz2 * az2)
--------------------------------------------------------------------------------
--
straightTurn :: Turn
straightTurn =
Turn 1 0 0
0 1 0
0 0 1
leftTurn :: Turn
leftTurn =
Turn 0 0 (-1)
0 1 0
1 0 0
rightTurn :: Turn
rightTurn =
Turn 0 0 1
0 1 0
(-1) 0 0
upTurn :: Turn
upTurn =
Turn 0 1 0
(-1) 0 0
0 0 1
downTurn :: Turn
downTurn =
Turn 0 (-1) 0
1 0 0
0 0 1
clockTurn :: Turn
clockTurn =
Turn 1 0 0
0 0 1
0 (-1) 0
anticlockTurn :: Turn
anticlockTurn =
Turn 1 0 0
0 0 (-1)
0 1 0
backTurn :: Turn -- ( the only one with two (-1) )
backTurn =
Turn (-1) 0 0
0 1 0
0 0 (-1)
-- | it happens that the inverse is just the transpose :)
turnInverse :: Turn -> Turn
turnInverse (Turn x0 x1 x2
y0 y1 y2
z0 z1 z2) =
Turn x0 y0 z0
x1 y1 z1
x2 y2 z2
-- | the difference turn -> turn'
turnDiff :: Turn -> Turn -> Turn
turnDiff turn turn' =
turn' `mappend` (turnInverse turn)
--------------------------------------------------------------------------------
-- direction
data Dir =
Dir
{
dirX :: !Z3,
dirY :: !Z3,
dirZ :: !Z3
} deriving Eq
direction :: Turn -> Dir
direction (Turn x0 x1 x2
_ _ _
_ _ _) =
Dir x0 x1 x2
leftDir :: Dir
leftDir =
Dir 0 0 (-1)
rightDir :: Dir
rightDir =
Dir 0 0 1
backwardDir :: Dir
backwardDir =
Dir (-1) 0 0
forwardDir :: Dir
forwardDir =
Dir 1 0 0
downDir :: Dir
downDir =
Dir 0 (-1) 0
upDir :: Dir
upDir =
Dir 0 1 0
-- tmp
instance Show Turn where
show (Turn x0 x1 x2 y0 y1 y2 z0 z1 z2) =
"Turn (" ++ show x0 ++ " " ++ show x1 ++ " " ++ show x2 ++ ") (" ++
show y0 ++ " " ++ show y1 ++ " " ++ show y2 ++ ") (" ++
show z0 ++ " " ++ show z1 ++ " " ++ show z2 ++ ")"
instance Show Dir where
show (Dir x y z) =
"Dir " ++ show x ++ " " ++ show y ++ " " ++ show z ++ " "
| karamellpelle/grid | source/Game/Grid/GridWorld/Turn.hs | gpl-3.0 | 5,302 | 0 | 24 | 1,769 | 1,254 | 684 | 570 | 161 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
-- |
-- Module : Network.Google.Sheets
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Reads and writes Google Sheets.
--
-- /See:/ <https://developers.google.com/sheets/ Google Sheets API Reference>
module Network.Google.Sheets
(
-- * Service Configuration
sheetsService
-- * OAuth Scopes
, spreadsheetsReadOnlyScope
, driveReadOnlyScope
, driveScope
, spreadsheetsScope
-- * API Declaration
, SheetsAPI
-- * Resources
-- ** sheets.spreadsheets.batchUpdate
, module Network.Google.Resource.Sheets.Spreadsheets.BatchUpdate
-- ** sheets.spreadsheets.create
, module Network.Google.Resource.Sheets.Spreadsheets.Create
-- ** sheets.spreadsheets.get
, module Network.Google.Resource.Sheets.Spreadsheets.Get
-- ** sheets.spreadsheets.sheets.copyTo
, module Network.Google.Resource.Sheets.Spreadsheets.Sheets.CopyTo
-- ** sheets.spreadsheets.values.append
, module Network.Google.Resource.Sheets.Spreadsheets.Values.Append
-- ** sheets.spreadsheets.values.batchClear
, module Network.Google.Resource.Sheets.Spreadsheets.Values.BatchClear
-- ** sheets.spreadsheets.values.batchGet
, module Network.Google.Resource.Sheets.Spreadsheets.Values.BatchGet
-- ** sheets.spreadsheets.values.batchUpdate
, module Network.Google.Resource.Sheets.Spreadsheets.Values.BatchUpdate
-- ** sheets.spreadsheets.values.clear
, module Network.Google.Resource.Sheets.Spreadsheets.Values.Clear
-- ** sheets.spreadsheets.values.get
, module Network.Google.Resource.Sheets.Spreadsheets.Values.Get
-- ** sheets.spreadsheets.values.update
, module Network.Google.Resource.Sheets.Spreadsheets.Values.Update
-- * Types
-- ** PivotGroupSortValueBucket
, PivotGroupSortValueBucket
, pivotGroupSortValueBucket
, pgsvbBuckets
, pgsvbValuesIndex
-- ** ValueRange
, ValueRange
, valueRange
, vrValues
, vrRange
, vrMajorDimension
-- ** SortRangeRequest
, SortRangeRequest
, sortRangeRequest
, srrSortSpecs
, srrRange
-- ** CopyPasteRequestPasteType
, CopyPasteRequestPasteType (..)
-- ** DeleteNamedRangeRequest
, DeleteNamedRangeRequest
, deleteNamedRangeRequest
, dnrrNamedRangeId
-- ** UpdateNamedRangeRequest
, UpdateNamedRangeRequest
, updateNamedRangeRequest
, unrrNamedRange
, unrrFields
-- ** BasicChartAxisPosition
, BasicChartAxisPosition (..)
-- ** ChartData
, ChartData
, chartData
, cdSourceRange
-- ** BatchClearValuesRequest
, BatchClearValuesRequest
, batchClearValuesRequest
, bcvrRanges
-- ** DeleteRangeRequestShiftDimension
, DeleteRangeRequestShiftDimension (..)
-- ** BasicChartSeriesTargetAxis
, BasicChartSeriesTargetAxis (..)
-- ** SpreadsheetProperties
, SpreadsheetProperties
, spreadsheetProperties
, spDefaultFormat
, spLocale
, spAutoRecalc
, spTitle
, spTimeZone
-- ** BOrders
, BOrders
, bOrders
, boBottom
, boLeft
, boRight
, boTop
-- ** TextFormatRun
, TextFormatRun
, textFormatRun
, tfrFormat
, tfrStartIndex
-- ** AddSheetRequest
, AddSheetRequest
, addSheetRequest
, asrProperties
-- ** SortSpec
, SortSpec
, sortSpec
, ssSortOrder
, ssDimensionIndex
-- ** BatchUpdateValuesRequestResponseDateTimeRenderOption
, BatchUpdateValuesRequestResponseDateTimeRenderOption (..)
-- ** CopyPasteRequest
, CopyPasteRequest
, copyPasteRequest
, cprDestination
, cprSource
, cprPasteOrientation
, cprPasteType
-- ** GridRange
, GridRange
, gridRange
, grEndColumnIndex
, grStartColumnIndex
, grEndRowIndex
, grStartRowIndex
, grSheetId
-- ** AppendDimensionRequestDimension
, AppendDimensionRequestDimension (..)
-- ** AddFilterViewResponse
, AddFilterViewResponse
, addFilterViewResponse
, afvrFilter
-- ** DimensionRangeDimension
, DimensionRangeDimension (..)
-- ** BooleanCondition
, BooleanCondition
, booleanCondition
, bcValues
, bcType
-- ** AutoResizeDimensionsRequest
, AutoResizeDimensionsRequest
, autoResizeDimensionsRequest
, ardrDimensions
-- ** DeleteRangeRequest
, DeleteRangeRequest
, deleteRangeRequest
, drrShiftDimension
, drrRange
-- ** Sheet
, Sheet
, sheet
, sData
, sMerges
, sProtectedRanges
, sBandedRanges
, sCharts
, sBasicFilter
, sConditionalFormats
, sFilterViews
, sProperties
-- ** GridCoordinate
, GridCoordinate
, gridCoordinate
, gcColumnIndex
, gcRowIndex
, gcSheetId
-- ** ClearValuesResponse
, ClearValuesResponse
, clearValuesResponse
, cvrClearedRange
, cvrSpreadsheetId
-- ** ClearBasicFilterRequest
, ClearBasicFilterRequest
, clearBasicFilterRequest
, cbfrSheetId
-- ** UpdateEmbeddedObjectPositionRequest
, UpdateEmbeddedObjectPositionRequest
, updateEmbeddedObjectPositionRequest
, ueoprNewPosition
, ueoprObjectId
, ueoprFields
-- ** SourceAndDestinationDimension
, SourceAndDestinationDimension (..)
-- ** BooleanRule
, BooleanRule
, booleanRule
, brFormat
, brCondition
-- ** CellFormatWrapStrategy
, CellFormatWrapStrategy (..)
-- ** SourceAndDestination
, SourceAndDestination
, sourceAndDestination
, sadDimension
, sadSource
, sadFillLength
-- ** PasteDataRequest
, PasteDataRequest
, pasteDataRequest
, pdrData
, pdrCoordinate
, pdrHTML
, pdrType
, pdrDelimiter
-- ** BatchUpdateValuesRequestValueInputOption
, BatchUpdateValuesRequestValueInputOption (..)
-- ** AppendCellsRequest
, AppendCellsRequest
, appendCellsRequest
, acrRows
, acrSheetId
, acrFields
-- ** FindReplaceResponse
, FindReplaceResponse
, findReplaceResponse
, frrValuesChanged
, frrFormulasChanged
, frrRowsChanged
, frrSheetsChanged
, frrOccurrencesChanged
-- ** PieChartSpec
, PieChartSpec
, pieChartSpec
, pcsPieHole
, pcsLegendPosition
, pcsDomain
, pcsSeries
, pcsThreeDimensional
-- ** AppendValuesResponse
, AppendValuesResponse
, appendValuesResponse
, avrSpreadsheetId
, avrUpdates
, avrTableRange
-- ** BatchUpdateValuesRequestResponseValueRenderOption
, BatchUpdateValuesRequestResponseValueRenderOption (..)
-- ** DataValidationRule
, DataValidationRule
, dataValidationRule
, dvrShowCustomUi
, dvrInputMessage
, dvrStrict
, dvrCondition
-- ** FilterView
, FilterView
, filterView
, fvSortSpecs
, fvNamedRangeId
, fvRange
, fvFilterViewId
, fvTitle
, fvCriteria
-- ** Color
, Color
, color
, cRed
, cAlpha
, cGreen
, cBlue
-- ** DeleteFilterViewRequest
, DeleteFilterViewRequest
, deleteFilterViewRequest
, dfvrFilterId
-- ** UpdateFilterViewRequest
, UpdateFilterViewRequest
, updateFilterViewRequest
, ufvrFilter
, ufvrFields
-- ** BasicChartSeries
, BasicChartSeries
, basicChartSeries
, bcsTargetAxis
, bcsSeries
, bcsType
-- ** AddProtectedRangeRequest
, AddProtectedRangeRequest
, addProtectedRangeRequest
, aprrProtectedRange
-- ** PieChartSpecLegendPosition
, PieChartSpecLegendPosition (..)
-- ** RepeatCellRequest
, RepeatCellRequest
, repeatCellRequest
, rcrCell
, rcrRange
, rcrFields
-- ** ConditionValue
, ConditionValue
, conditionValue
, cvRelativeDate
, cvUserEnteredValue
-- ** DeleteDimensionRequest
, DeleteDimensionRequest
, deleteDimensionRequest
, ddrRange
-- ** ClearValuesRequest
, ClearValuesRequest
, clearValuesRequest
-- ** FindReplaceRequest
, FindReplaceRequest
, findReplaceRequest
, frrMatchCase
, frrAllSheets
, frrIncludeFormulas
, frrMatchEntireCell
, frrRange
, frrSheetId
, frrFind
, frrSearchByRegex
, frrReplacement
-- ** MoveDimensionRequest
, MoveDimensionRequest
, moveDimensionRequest
, mdrDestinationIndex
, mdrSource
-- ** CellFormatVerticalAlignment
, CellFormatVerticalAlignment (..)
-- ** NumberFormatType
, NumberFormatType (..)
-- ** GradientRule
, GradientRule
, gradientRule
, grMidpoint
, grMaxpoint
, grMinpoint
-- ** CutPasteRequest
, CutPasteRequest
, cutPasteRequest
, cDestination
, cSource
, cPasteType
-- ** UpdateEmbeddedObjectPositionResponse
, UpdateEmbeddedObjectPositionResponse
, updateEmbeddedObjectPositionResponse
, ueoprPosition
-- ** ConditionValueRelativeDate
, ConditionValueRelativeDate (..)
-- ** Response
, Response
, response
, rAddFilterView
, rDuplicateFilterView
, rUpdateEmbeddedObjectPosition
, rAddSheet
, rFindReplace
, rAddProtectedRange
, rDeleteConditionalFormatRule
, rUpdateConditionalFormatRule
, rAddNamedRange
, rAddChart
, rAddBanding
, rDuplicateSheet
-- ** FilterCriteria
, FilterCriteria
, filterCriteria
, fcHiddenValues
, fcCondition
-- ** ErrorValue
, ErrorValue
, errorValue
, evType
, evMessage
-- ** UpdateConditionalFormatRuleRequest
, UpdateConditionalFormatRuleRequest
, updateConditionalFormatRuleRequest
, ucfrrRule
, ucfrrNewIndex
, ucfrrSheetId
, ucfrrIndex
-- ** DeleteConditionalFormatRuleRequest
, DeleteConditionalFormatRuleRequest
, deleteConditionalFormatRuleRequest
, dcfrrSheetId
, dcfrrIndex
-- ** SortSpecSortOrder
, SortSpecSortOrder (..)
-- ** OverlayPosition
, OverlayPosition
, overlayPosition
, opHeightPixels
, opOffSetYPixels
, opAnchorCell
, opWidthPixels
, opOffSetXPixels
-- ** DeleteEmbeddedObjectRequest
, DeleteEmbeddedObjectRequest
, deleteEmbeddedObjectRequest
, deorObjectId
-- ** SheetProperties
, SheetProperties
, sheetProperties
, sTabColor
, sGridProperties
, sSheetType
, sHidden
, sSheetId
, sTitle
, sRightToLeft
, sIndex
-- ** FilterViewCriteria
, FilterViewCriteria
, filterViewCriteria
, fvcAddtional
-- ** BatchUpdateValuesResponse
, BatchUpdateValuesResponse
, batchUpdateValuesResponse
, buvrTotalUpdatedColumns
, buvrResponses
, buvrSpreadsheetId
, buvrTotalUpdatedSheets
, buvrTotalUpdatedCells
, buvrTotalUpdatedRows
-- ** UpdateSheetPropertiesRequest
, UpdateSheetPropertiesRequest
, updateSheetPropertiesRequest
, usprFields
, usprProperties
-- ** Spreadsheet
, Spreadsheet
, spreadsheet
, sprSheets
, sprNamedRanges
, sprSpreadsheetId
, sprSpreadsheetURL
, sprProperties
-- ** InsertDimensionRequest
, InsertDimensionRequest
, insertDimensionRequest
, idrRange
, idrInheritFromBefore
-- ** PivotValueSummarizeFunction
, PivotValueSummarizeFunction (..)
-- ** InterpolationPoint
, InterpolationPoint
, interpolationPoint
, ipColor
, ipValue
, ipType
-- ** CellData
, CellData
, cellData
, cdTextFormatRuns
, cdNote
, cdUserEnteredValue
, cdUserEnteredFormat
, cdEffectiveFormat
, cdPivotTable
, cdFormattedValue
, cdDataValidation
, cdHyperlink
, cdEffectiveValue
-- ** ChartSourceRange
, ChartSourceRange
, chartSourceRange
, csrSources
-- ** AddNamedRangeResponse
, AddNamedRangeResponse
, addNamedRangeResponse
, anrrNamedRange
-- ** AddChartResponse
, AddChartResponse
, addChartResponse
, acrChart
-- ** UpdateChartSpecRequest
, UpdateChartSpecRequest
, updateChartSpecRequest
, ucsrSpec
, ucsrChartId
-- ** SetBasicFilterRequest
, SetBasicFilterRequest
, setBasicFilterRequest
, sbfrFilter
-- ** GridProperties
, GridProperties
, gridProperties
, gpFrozenColumnCount
, gpColumnCount
, gpHideGridlines
, gpFrozenRowCount
, gpRowCount
-- ** CellFormatHyperlinkDisplayType
, CellFormatHyperlinkDisplayType (..)
-- ** BasicFilterCriteria
, BasicFilterCriteria
, basicFilterCriteria
, bfcAddtional
-- ** AddBandingRequest
, AddBandingRequest
, addBandingRequest
, abrBandedRange
-- ** UpdateDimensionPropertiesRequest
, UpdateDimensionPropertiesRequest
, updateDimensionPropertiesRequest
, udprRange
, udprFields
, udprProperties
-- ** PivotTableCriteria
, PivotTableCriteria
, pivotTableCriteria
, ptcAddtional
-- ** AutoFillRequest
, AutoFillRequest
, autoFillRequest
, afrSourceAndDestination
, afrUseAlternateSeries
, afrRange
-- ** DuplicateSheetRequest
, DuplicateSheetRequest
, duplicateSheetRequest
, dsrNewSheetName
, dsrInsertSheetIndex
, dsrSourceSheetId
, dsrNewSheetId
-- ** DuplicateFilterViewResponse
, DuplicateFilterViewResponse
, duplicateFilterViewResponse
, dfvrFilter
-- ** SheetPropertiesSheetType
, SheetPropertiesSheetType (..)
-- ** BatchUpdateValuesRequest
, BatchUpdateValuesRequest
, batchUpdateValuesRequest
, buvrData
, buvrValueInputOption
, buvrIncludeValuesInResponse
, buvrResponseDateTimeRenderOption
, buvrResponseValueRenderOption
-- ** AddChartRequest
, AddChartRequest
, addChartRequest
, aChart
-- ** NamedRange
, NamedRange
, namedRange
, nrNamedRangeId
, nrName
, nrRange
-- ** MergeCellsRequest
, MergeCellsRequest
, mergeCellsRequest
, mcrMergeType
, mcrRange
-- ** MergeCellsRequestMergeType
, MergeCellsRequestMergeType (..)
-- ** CellFormatHorizontalAlignment
, CellFormatHorizontalAlignment (..)
-- ** BOrder
, BOrder
, bOrder
, boStyle
, boColor
, boWidth
-- ** ExtendedValue
, ExtendedValue
, extendedValue
, evBoolValue
, evNumberValue
, evErrorValue
, evStringValue
, evFormulaValue
-- ** AddNamedRangeRequest
, AddNamedRangeRequest
, addNamedRangeRequest
, aNamedRange
-- ** PivotFilterCriteria
, PivotFilterCriteria
, pivotFilterCriteria
, pfcVisibleValues
-- ** DimensionRange
, DimensionRange
, dimensionRange
, drDimension
, drEndIndex
, drSheetId
, drStartIndex
-- ** UpdateSpreadsheetPropertiesRequest
, UpdateSpreadsheetPropertiesRequest
, updateSpreadsheetPropertiesRequest
, uFields
, uProperties
-- ** AddProtectedRangeResponse
, AddProtectedRangeResponse
, addProtectedRangeResponse
, aProtectedRange
-- ** AppendDimensionRequest
, AppendDimensionRequest
, appendDimensionRequest
, adrLength
, adrDimension
, adrSheetId
-- ** PivotValue
, PivotValue
, pivotValue
, pvSourceColumnOffSet
, pvFormula
, pvName
, pvSummarizeFunction
-- ** UnmergeCellsRequest
, UnmergeCellsRequest
, unmergeCellsRequest
, ucrRange
-- ** DeleteSheetRequest
, DeleteSheetRequest
, deleteSheetRequest
, dsrSheetId
-- ** BooleanConditionType
, BooleanConditionType (..)
-- ** BandedRange
, BandedRange
, bandedRange
, brBandedRangeId
, brRowProperties
, brRange
, brColumnProperties
-- ** UpdateBOrdersRequest
, UpdateBOrdersRequest
, updateBOrdersRequest
, uborBottom
, uborInnerHorizontal
, uborLeft
, uborInnerVertical
, uborRange
, uborRight
, uborTop
-- ** ValueRangeMajorDimension
, ValueRangeMajorDimension (..)
-- ** PivotGroupSortOrder
, PivotGroupSortOrder (..)
-- ** BasicChartSpecChartType
, BasicChartSpecChartType (..)
-- ** EmbeddedChart
, EmbeddedChart
, embeddedChart
, ecSpec
, ecPosition
, ecChartId
-- ** RowData
, RowData
, rowData
, rdValues
-- ** Editors
, Editors
, editors
, eGroups
, eUsers
, eDomainUsersCanEdit
-- ** Xgafv
, Xgafv (..)
-- ** PivotTable
, PivotTable
, pivotTable
, ptValues
, ptValueLayout
, ptRows
, ptSource
, ptColumns
, ptCriteria
-- ** EmbeddedObjectPosition
, EmbeddedObjectPosition
, embeddedObjectPosition
, eopOverlayPosition
, eopSheetId
, eopNewSheet
-- ** BasicFilter
, BasicFilter
, basicFilter
, bfSortSpecs
, bfRange
, bfCriteria
-- ** TextToColumnsRequest
, TextToColumnsRequest
, textToColumnsRequest
, ttcrDelimiterType
, ttcrSource
, ttcrDelimiter
-- ** SpreadsheetPropertiesAutoRecalc
, SpreadsheetPropertiesAutoRecalc (..)
-- ** CopyPasteRequestPasteOrientation
, CopyPasteRequestPasteOrientation (..)
-- ** BatchUpdateSpreadsheetRequest
, BatchUpdateSpreadsheetRequest
, batchUpdateSpreadsheetRequest
, busrResponseIncludeGridData
, busrResponseRanges
, busrRequests
, busrIncludeSpreadsheetInResponse
-- ** PasteDataRequestType
, PasteDataRequestType (..)
-- ** UpdateValuesResponse
, UpdateValuesResponse
, updateValuesResponse
, uvrUpdatedCells
, uvrSpreadsheetId
, uvrUpdatedRows
, uvrUpdatedRange
, uvrUpdatedData
, uvrUpdatedColumns
-- ** CopySheetToAnotherSpreadsheetRequest
, CopySheetToAnotherSpreadsheetRequest
, copySheetToAnotherSpreadsheetRequest
, cstasrDestinationSpreadsheetId
-- ** AddFilterViewRequest
, AddFilterViewRequest
, addFilterViewRequest
, aFilter
-- ** PivotGroupValueMetadata
, PivotGroupValueMetadata
, pivotGroupValueMetadata
, pgvmValue
, pgvmCollapsed
-- ** CellFormatTextDirection
, CellFormatTextDirection (..)
-- ** BasicChartSeriesType
, BasicChartSeriesType (..)
-- ** UpdateCellsRequest
, UpdateCellsRequest
, updateCellsRequest
, updStart
, updRows
, updRange
, updFields
-- ** CellFormat
, CellFormat
, cellFormat
, cfBOrders
, cfVerticalAlignment
, cfBackgRoundColor
, cfHyperlinkDisplayType
, cfWrapStrategy
, cfNumberFormat
, cfTextDirection
, cfTextFormat
, cfHorizontalAlignment
, cfPadding
-- ** DeleteProtectedRangeRequest
, DeleteProtectedRangeRequest
, deleteProtectedRangeRequest
, dprrProtectedRangeId
-- ** UpdateProtectedRangeRequest
, UpdateProtectedRangeRequest
, updateProtectedRangeRequest
, uprrProtectedRange
, uprrFields
-- ** AddSheetResponse
, AddSheetResponse
, addSheetResponse
, aProperties
-- ** ProtectedRange
, ProtectedRange
, protectedRange
, prProtectedRangeId
, prWarningOnly
, prNamedRangeId
, prRange
, prEditors
, prUnprotectedRanges
, prRequestingUserCanEdit
, prDescription
-- ** BasicChartAxis
, BasicChartAxis
, basicChartAxis
, bcaFormat
, bcaTitle
, bcaPosition
-- ** GridData
, GridData
, gridData
, gdRowMetadata
, gdStartRow
, gdRowData
, gdColumnMetadata
, gdStartColumn
-- ** NumberFormat
, NumberFormat
, numberFormat
, nfPattern
, nfType
-- ** BatchUpdateSpreadsheetResponse
, BatchUpdateSpreadsheetResponse
, batchUpdateSpreadsheetResponse
, busrSpreadsheetId
, busrReplies
, busrUpdatedSpreadsheet
-- ** SetDataValidationRequest
, SetDataValidationRequest
, setDataValidationRequest
, sdvrRule
, sdvrRange
-- ** BandingProperties
, BandingProperties
, bandingProperties
, bpSecondBandColor
, bpHeaderColor
, bpFooterColor
, bpFirstBandColor
-- ** ChartSpecHiddenDimensionStrategy
, ChartSpecHiddenDimensionStrategy (..)
-- ** DuplicateFilterViewRequest
, DuplicateFilterViewRequest
, duplicateFilterViewRequest
, dFilterId
-- ** BOrderStyle
, BOrderStyle (..)
-- ** PivotGroup
, PivotGroup
, pivotGroup
, pgValueMetadata
, pgSourceColumnOffSet
, pgSortOrder
, pgShowTotals
, pgValueBucket
-- ** AddBandingResponse
, AddBandingResponse
, addBandingResponse
, aBandedRange
-- ** CutPasteRequestPasteType
, CutPasteRequestPasteType (..)
-- ** BasicChartSpecLegendPosition
, BasicChartSpecLegendPosition (..)
-- ** ErrorValueType
, ErrorValueType (..)
-- ** ConditionalFormatRule
, ConditionalFormatRule
, conditionalFormatRule
, cfrBooleanRule
, cfrGradientRule
, cfrRanges
-- ** BasicChartSpec
, BasicChartSpec
, basicChartSpec
, bHeaderCount
, bLegendPosition
, bSeries
, bChartType
, bDomains
, bAxis
-- ** AddConditionalFormatRuleRequest
, AddConditionalFormatRuleRequest
, addConditionalFormatRuleRequest
, acfrrRule
, acfrrIndex
-- ** PivotTableValueLayout
, PivotTableValueLayout (..)
-- ** DuplicateSheetResponse
, DuplicateSheetResponse
, duplicateSheetResponse
, dsrProperties
-- ** TextFormat
, TextFormat
, textFormat
, tfFontFamily
, tfForegRoundColor
, tfFontSize
, tfUnderline
, tfItalic
, tfBold
, tfStrikethrough
-- ** BatchClearValuesResponse
, BatchClearValuesResponse
, batchClearValuesResponse
, bcvrClearedRanges
, bcvrSpreadsheetId
-- ** BasicChartDomain
, BasicChartDomain
, basicChartDomain
, bcdDomain
-- ** InterpolationPointType
, InterpolationPointType (..)
-- ** TextToColumnsRequestDelimiterType
, TextToColumnsRequestDelimiterType (..)
-- ** InsertRangeRequest
, InsertRangeRequest
, insertRangeRequest
, irrShiftDimension
, irrRange
-- ** InsertRangeRequestShiftDimension
, InsertRangeRequestShiftDimension (..)
-- ** Padding
, Padding
, padding
, pBottom
, pLeft
, pRight
, pTop
-- ** ChartSpec
, ChartSpec
, chartSpec
, csTitle
, csPieChart
, csBasicChart
, csHiddenDimensionStrategy
-- ** DimensionProperties
, DimensionProperties
, dimensionProperties
, dpHiddenByFilter
, dpPixelSize
, dpHiddenByUser
-- ** UpdateBandingRequest
, UpdateBandingRequest
, updateBandingRequest
, ubrBandedRange
, ubrFields
-- ** BatchGetValuesResponse
, BatchGetValuesResponse
, batchGetValuesResponse
, bgvrSpreadsheetId
, bgvrValueRanges
-- ** DeleteBandingRequest
, DeleteBandingRequest
, deleteBandingRequest
, dbrBandedRangeId
-- ** Request'
, Request'
, request'
, reqAddFilterView
, reqDeleteProtectedRange
, reqUpdateProtectedRange
, reqUpdateCells
, reqDuplicateFilterView
, reqAddConditionalFormatRule
, reqSortRange
, reqUpdateNamedRange
, reqDeleteNamedRange
, reqInsertRange
, reqDeleteBanding
, reqUpdateBanding
, reqClearBasicFilter
, reqAppendCells
, reqPasteData
, reqUpdateEmbeddedObjectPosition
, reqDeleteRange
, reqCopyPaste
, reqAutoResizeDimensions
, reqAddSheet
, reqFindReplace
, reqDeleteDimension
, reqCutPaste
, reqMoveDimension
, reqRepeatCell
, reqAddProtectedRange
, reqUpdateFilterView
, reqDeleteFilterView
, reqInsertDimension
, reqUpdateSheetProperties
, reqDeleteConditionalFormatRule
, reqUpdateConditionalFormatRule
, reqDeleteEmbeddedObject
, reqMergeCells
, reqAddNamedRange
, reqAddChart
, reqAddBanding
, reqDuplicateSheet
, reqAutoFill
, reqUpdateDimensionProperties
, reqUpdateChartSpec
, reqSetBasicFilter
, reqTextToColumns
, reqUpdateSpreadsheetProperties
, reqDeleteSheet
, reqUnmergeCells
, reqUpdateBOrders
, reqAppendDimension
, reqSetDataValidation
-- ** DeleteConditionalFormatRuleResponse
, DeleteConditionalFormatRuleResponse
, deleteConditionalFormatRuleResponse
, dcfrrRule
-- ** UpdateConditionalFormatRuleResponse
, UpdateConditionalFormatRuleResponse
, updateConditionalFormatRuleResponse
, uNewRule
, uNewIndex
, uOldIndex
, uOldRule
) where
import Network.Google.Prelude
import Network.Google.Resource.Sheets.Spreadsheets.BatchUpdate
import Network.Google.Resource.Sheets.Spreadsheets.Create
import Network.Google.Resource.Sheets.Spreadsheets.Get
import Network.Google.Resource.Sheets.Spreadsheets.Sheets.CopyTo
import Network.Google.Resource.Sheets.Spreadsheets.Values.Append
import Network.Google.Resource.Sheets.Spreadsheets.Values.BatchClear
import Network.Google.Resource.Sheets.Spreadsheets.Values.BatchGet
import Network.Google.Resource.Sheets.Spreadsheets.Values.BatchUpdate
import Network.Google.Resource.Sheets.Spreadsheets.Values.Clear
import Network.Google.Resource.Sheets.Spreadsheets.Values.Get
import Network.Google.Resource.Sheets.Spreadsheets.Values.Update
import Network.Google.Sheets.Types
{- $resources
TODO
-}
-- | Represents the entirety of the methods and resources available for the Google Sheets API service.
type SheetsAPI =
SpreadsheetsSheetsCopyToResource :<|>
SpreadsheetsValuesBatchClearResource
:<|> SpreadsheetsValuesGetResource
:<|> SpreadsheetsValuesClearResource
:<|> SpreadsheetsValuesBatchGetResource
:<|> SpreadsheetsValuesBatchUpdateResource
:<|> SpreadsheetsValuesAppendResource
:<|> SpreadsheetsValuesUpdateResource
:<|> SpreadsheetsGetResource
:<|> SpreadsheetsCreateResource
:<|> SpreadsheetsBatchUpdateResource
| rueshyna/gogol | gogol-sheets/gen/Network/Google/Sheets.hs | mpl-2.0 | 26,580 | 0 | 14 | 6,639 | 2,960 | 2,081 | 879 | 809 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.GamesConfiguration.AchievementConfigurations.Insert
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Insert a new achievement configuration in this application.
--
-- /See:/ <https://developers.google.com/games/ Google Play Game Services Publishing API Reference> for @gamesConfiguration.achievementConfigurations.insert@.
module Network.Google.Resource.GamesConfiguration.AchievementConfigurations.Insert
(
-- * REST Resource
AchievementConfigurationsInsertResource
-- * Creating a Request
, achievementConfigurationsInsert
, AchievementConfigurationsInsert
-- * Request Lenses
, aciXgafv
, aciUploadProtocol
, aciAccessToken
, aciUploadType
, aciPayload
, aciApplicationId
, aciCallback
) where
import Network.Google.GamesConfiguration.Types
import Network.Google.Prelude
-- | A resource alias for @gamesConfiguration.achievementConfigurations.insert@ method which the
-- 'AchievementConfigurationsInsert' request conforms to.
type AchievementConfigurationsInsertResource =
"games" :>
"v1configuration" :>
"applications" :>
Capture "applicationId" Text :>
"achievements" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] AchievementConfiguration :>
Post '[JSON] AchievementConfiguration
-- | Insert a new achievement configuration in this application.
--
-- /See:/ 'achievementConfigurationsInsert' smart constructor.
data AchievementConfigurationsInsert =
AchievementConfigurationsInsert'
{ _aciXgafv :: !(Maybe Xgafv)
, _aciUploadProtocol :: !(Maybe Text)
, _aciAccessToken :: !(Maybe Text)
, _aciUploadType :: !(Maybe Text)
, _aciPayload :: !AchievementConfiguration
, _aciApplicationId :: !Text
, _aciCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AchievementConfigurationsInsert' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aciXgafv'
--
-- * 'aciUploadProtocol'
--
-- * 'aciAccessToken'
--
-- * 'aciUploadType'
--
-- * 'aciPayload'
--
-- * 'aciApplicationId'
--
-- * 'aciCallback'
achievementConfigurationsInsert
:: AchievementConfiguration -- ^ 'aciPayload'
-> Text -- ^ 'aciApplicationId'
-> AchievementConfigurationsInsert
achievementConfigurationsInsert pAciPayload_ pAciApplicationId_ =
AchievementConfigurationsInsert'
{ _aciXgafv = Nothing
, _aciUploadProtocol = Nothing
, _aciAccessToken = Nothing
, _aciUploadType = Nothing
, _aciPayload = pAciPayload_
, _aciApplicationId = pAciApplicationId_
, _aciCallback = Nothing
}
-- | V1 error format.
aciXgafv :: Lens' AchievementConfigurationsInsert (Maybe Xgafv)
aciXgafv = lens _aciXgafv (\ s a -> s{_aciXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
aciUploadProtocol :: Lens' AchievementConfigurationsInsert (Maybe Text)
aciUploadProtocol
= lens _aciUploadProtocol
(\ s a -> s{_aciUploadProtocol = a})
-- | OAuth access token.
aciAccessToken :: Lens' AchievementConfigurationsInsert (Maybe Text)
aciAccessToken
= lens _aciAccessToken
(\ s a -> s{_aciAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
aciUploadType :: Lens' AchievementConfigurationsInsert (Maybe Text)
aciUploadType
= lens _aciUploadType
(\ s a -> s{_aciUploadType = a})
-- | Multipart request metadata.
aciPayload :: Lens' AchievementConfigurationsInsert AchievementConfiguration
aciPayload
= lens _aciPayload (\ s a -> s{_aciPayload = a})
-- | The application ID from the Google Play developer console.
aciApplicationId :: Lens' AchievementConfigurationsInsert Text
aciApplicationId
= lens _aciApplicationId
(\ s a -> s{_aciApplicationId = a})
-- | JSONP
aciCallback :: Lens' AchievementConfigurationsInsert (Maybe Text)
aciCallback
= lens _aciCallback (\ s a -> s{_aciCallback = a})
instance GoogleRequest
AchievementConfigurationsInsert
where
type Rs AchievementConfigurationsInsert =
AchievementConfiguration
type Scopes AchievementConfigurationsInsert =
'["https://www.googleapis.com/auth/androidpublisher"]
requestClient AchievementConfigurationsInsert'{..}
= go _aciApplicationId _aciXgafv _aciUploadProtocol
_aciAccessToken
_aciUploadType
_aciCallback
(Just AltJSON)
_aciPayload
gamesConfigurationService
where go
= buildClient
(Proxy ::
Proxy AchievementConfigurationsInsertResource)
mempty
| brendanhay/gogol | gogol-games-configuration/gen/Network/Google/Resource/GamesConfiguration/AchievementConfigurations/Insert.hs | mpl-2.0 | 5,774 | 0 | 19 | 1,310 | 786 | 457 | 329 | 120 | 1 |
-- Implicit CAD. Copyright (C) 2011, Christopher Olah (chris@colah.ca)
-- Copyright 2016, Julia Longtin (julial@turinglace.com)
-- Released under the GNU AGPLV3+, see LICENSE
-- Allow our DiscreteAproxable class to handle multiple parameters.
{-# LANGUAGE MultiParamTypeClasses #-}
-- For the instance declaration of DiscreteAproxable SymbolicObj2 [Polyline]
{-# LANGUAGE FlexibleInstances #-}
-- | A module for retrieving approximate represententations of objects.
module Graphics.Implicit.Export.DiscreteAproxable (DiscreteAproxable, discreteAprox) where
import Prelude((-), (/), ($), (<), map, round, (+), maximum, abs, (*), fromIntegral, max, realToFrac, Int)
-- Definitions for our number system, objects, and the things we can use to approximately represent objects.
import Graphics.Implicit.Definitions (ℝ, ℝ2, SymbolicObj2, SymbolicObj3, Polyline, Triangle, TriangleMesh(TriangleMesh), NormedTriangleMesh(NormedTriangleMesh))
import Graphics.Implicit.ObjectUtil (getImplicit2, getImplicit3, getBox2, getBox3)
import Graphics.Implicit.Export.SymbolicObj3 (symbolicGetMesh)
import Graphics.Implicit.Export.SymbolicObj2 (symbolicGetContour)
import Graphics.Implicit.Export.Util (normTriangle)
-- We are the only ones that use this.
import Graphics.Implicit.Export.RayTrace (Color(Color), Camera(Camera), Light(Light), Scene(Scene), average, traceRay, cameraRay)
import Codec.Picture (DynamicImage(ImageRGBA8), PixelRGBA8(PixelRGBA8), generateImage)
import Data.VectorSpace ((^+^), (^/), (*^), (^-^))
import Data.AffineSpace ((.-^), (.+^))
default (ℝ)
unmesh :: TriangleMesh -> [Triangle]
unmesh (TriangleMesh m) = m
-- | There is a discrete way to aproximate this object.
-- eg. Aproximating a 3D object with a triangle mesh
-- would be DiscreteApproxable Obj3 TriangleMesh
class DiscreteAproxable obj aprox where
discreteAprox :: ℝ -> obj -> aprox
instance DiscreteAproxable SymbolicObj3 TriangleMesh where
discreteAprox = symbolicGetMesh
instance DiscreteAproxable SymbolicObj3 NormedTriangleMesh where
discreteAprox res obj = NormedTriangleMesh $ map (normTriangle res (getImplicit3 obj)) $ unmesh $ symbolicGetMesh res obj
-- FIXME: way too many magic numbers.
-- FIXME: adjustable resolution!
instance DiscreteAproxable SymbolicObj3 DynamicImage where
discreteAprox _ symbObj = ImageRGBA8 $ generateImage pixelRenderer (round w) (round h)
where
-- | Size of the image to produce.
(w,h) = (150, 150) :: ℝ2
obj = getImplicit3 symbObj
box@((x1,y1,z1), (_,y2,z2)) = getBox3 symbObj
av :: ℝ -> ℝ -> ℝ
av a b = (a+b)/2
avY = av y1 y2
avZ = av z1 z2
deviation = maximum [abs $ y1 - avY, abs $ y2 - avY, abs $ z1 - avZ, abs $ z2 - avZ]
camera = Camera (x1-deviation*2.2, avY, avZ) (0, -1, 0) (0,0, -1) 1.0
lights = [Light (x1-deviation*1.5, y1 - 0.4*(y2-y1), avZ) (0.03*deviation) ]
scene = Scene obj (Color 200 200 230 255) lights (Color 255 255 255 0)
-- | passed to generateImage, it's external, and determines this type.
pixelRenderer :: Int -> Int -> PixelRGBA8
pixelRenderer a b = renderScreen
(fromIntegral a/w - 0.5) (fromIntegral b/h - 0.5)
renderScreen :: ℝ -> ℝ -> PixelRGBA8
renderScreen a b =
colorToPixelRGBA8 $
average [
traceRay
(cameraRay camera ((a,b) ^+^ ( 0.25/w, 0.25/h)))
2 box scene,
traceRay
(cameraRay camera ((a,b) ^+^ (-0.25/w, 0.25/h)))
0.5 box scene,
traceRay
(cameraRay camera ((a,b) ^+^ (0.25/w, -0.25/h)))
0.5 box scene,
traceRay
(cameraRay camera ((a,b) ^+^ (-0.25/w,-0.25/h)))
0.5 box scene
]
where
colorToPixelRGBA8 :: Color -> PixelRGBA8
colorToPixelRGBA8 (Color rr gg bb aa) = PixelRGBA8 rr gg bb aa
instance DiscreteAproxable SymbolicObj2 [Polyline] where
discreteAprox = symbolicGetContour
-- FIXME: way too many magic numbers.
-- FIXME: adjustable resolution?
instance DiscreteAproxable SymbolicObj2 DynamicImage where
discreteAprox _ symbObj = ImageRGBA8 $ generateImage pixelRenderer (round w) (round h)
where
-- | Size of the image to produce.
(w,h) = (150, 150) :: ℝ2
obj = getImplicit2 symbObj
(p1@(x1,_), p2@(_,y2)) = getBox2 symbObj
(dx, dy) = p2 ^-^ p1
dxy = max dx dy
-- | passed to generateImage, it's external, and determines this type.
pixelRenderer :: Int -> Int -> PixelRGBA8
pixelRenderer mya myb = mycolor
where
xy a b = ((x1,y2) .-^ (dxy-dx, dy-dxy)^/2) .+^ dxy*^(a/w, -b/h)
s = 0.25 :: ℝ
(a', b') = (realToFrac mya, realToFrac myb) :: ℝ2
mycolor = colorToPixelRGBA8 $ average [objColor $ xy a' b', objColor $ xy a' b',
objColor $ xy (a'+s) (b'+s),
objColor $ xy (a'-s) (b'-s),
objColor $ xy (a'+s) (b'+s),
objColor $ xy (a'-s) (b'-s)]
colorToPixelRGBA8 :: Color -> PixelRGBA8
colorToPixelRGBA8 (Color rr gg bb aa) = PixelRGBA8 rr gg bb aa
objColor p = if obj p < 0 then Color 150 150 160 255 else Color 255 255 255 0
| krakrjak/ImplicitCAD | Graphics/Implicit/Export/DiscreteAproxable.hs | agpl-3.0 | 5,789 | 2 | 18 | 1,752 | 1,607 | 915 | 692 | 78 | 1 |
module Freekick.Libsoccer.TournamentInstance
where
import Freekick.Libsoccer.Tournament
import Freekick.Libsoccer.Stage
import Freekick.Libsoccer.Club
import Freekick.Libsoccer.Match
import Libaddutil.Primitives
import Data.Maybe
import Data.List
data TournamentInstance =
TournamentInstance { name :: String
, stageinstances :: [StageInstance]
}
deriving (Eq, Show)
data StageInstance =
StageInstance { stage :: Stage
, clubs :: [String]
, rounds :: [Round]
, finished :: Bool
}
deriving (Eq, Show)
type Round = [Match]
createTournamentInstance :: Tournament -> TournamentInstance
createTournamentInstance t = TournamentInstance (Freekick.Libsoccer.Tournament.name t) ss
where ss = createStageInstances t
createStageInstances :: Tournament -> [StageInstance]
createStageInstances t = createStageInstancesFromStages (stages t) (beginDate (schedule t)) datediff
where datediff = averageTimeBetweenMatches t
createStageInstancesFromStages :: [Stage] -> Date -> Int -> [StageInstance]
createStageInstancesFromStages [] _ _ = []
createStageInstancesFromStages (s:ss) startdate datediff = thisstageinstance : (createStageInstancesFromStages ss nextdate datediff)
where thisstageinstance = createStageInstance s startdate datediff
nextdate = if isNothing lmd then startdate `addDaysToDate` datediff else fromJust (lmd) `addDaysToDate` (2 * datediff)
lmd = lastMatchDate thisstageinstance
createStageInstance :: Stage -> Date -> Int -> StageInstance
createStageInstance s startdate datediff = StageInstance s [] rs False
where rs = take ((participantnum (stagesetup s)) `div` 2) (repeat (createEmptyMatchesFromDates matchdates))
matchdates = createMatchDates (countMatchesPerClubInStage s) startdate datediff
lastMatchDate :: StageInstance -> Maybe Date
lastMatchDate s | length (Freekick.Libsoccer.TournamentInstance.rounds s) == 0 = Nothing
| otherwise = Just $ fst (playtime lm)
where lm = head (last (Freekick.Libsoccer.TournamentInstance.rounds s))
addClubs :: TournamentInstance -> [Club] -> TournamentInstance
addClubs t cs | length cs == 0 = t
addClubs t cs | otherwise = TournamentInstance (Freekick.Libsoccer.TournamentInstance.name t) assignedss
where assignedss = assignClubsToStages newss
newss = addClubs' oldss clubnames
oldss = stageinstances t
clubnames = map Freekick.Libsoccer.Club.name cs
addClubsByNames :: TournamentInstance -> [String] -> TournamentInstance
addClubsByNames t ns | length ns == 0 = t
addClubsByNames t ns | otherwise = t{stageinstances = assignedss}
where assignedss = assignClubsToStages newss
newss = addClubs' (stageinstances t) ns
addClubs' :: [StageInstance] -> [String] -> [StageInstance]
addClubs' [] _ = []
addClubs' s [] = s
addClubs' (s:ss) cs = if neededclubs <= 0 then s : (addClubs' ss cs) else added : (addClubs' ss (drop neededclubs cs))
where neededclubs = participantnum (stagesetup (stage s)) - (length (clubs s))
added = addClubs'' s (take neededclubs cs)
addClubs'' :: StageInstance -> [String] -> StageInstance
addClubs'' olds [] = olds
addClubs'' olds cs = olds{clubs=clubs olds ++ cs}
freeSlots :: StageInstance -> Int
freeSlots s = freeSlots' (head (Freekick.Libsoccer.TournamentInstance.rounds s))
freeSlots' :: [Match] -> Int
freeSlots' [] = 0
freeSlots' (m:ms) = ht + at + freeSlots' ms
where ht = if homeclub m == "" then 1 else 0
at = if awayclub m == "" then 1 else 0
assignClubsToTournamentMatches :: TournamentInstance -> TournamentInstance
assignClubsToTournamentMatches t = t{Freekick.Libsoccer.TournamentInstance.stageinstances = newst}
where newst = assignClubsToStages (stageinstances t)
assignClubsToStages :: [StageInstance] -> [StageInstance]
assignClubsToStages [] = []
assignClubsToStages (s:ss) = if freeSlots s >= length (clubs s)
then (assignClubsToStageMatches s (clubs s) : ss)
else (assignClubsToStageMatches s (clubs s) : assignClubsToStages ss)
assignClubsToStageMatches :: StageInstance -> [String] -> StageInstance
assignClubsToStageMatches s cs = s{Freekick.Libsoccer.TournamentInstance.rounds = newrounds}
where newrounds = assignClubsToStageMatches' (Freekick.Libsoccer.TournamentInstance.rounds s) cs
assignClubsToStageMatches' :: [Round] -> [String] -> [Round]
assignClubsToStageMatches' [] _ = []
assignClubsToStageMatches' r [] = r
assignClubsToStageMatches' (r:rs) (c:cs) | length cs == 0 = (r:rs)
| otherwise = (assignClubsToRound r c (head cs)) : (assignClubsToStageMatches' rs (tail cs))
assignClubsToRound :: Round -> String -> String -> Round
assignClubsToRound [] _ _ = []
assignClubsToRound (m:ms) c1 c2 = (assignClubsToMatch m c1 c2) : (assignClubsToRound ms c1 c2)
assignClubsToMatch :: Match -> String -> String -> Match
assignClubsToMatch m c1 c2 = m{homeclub = c1, awayclub = c2}
printDatesAndMatches :: [TournamentInstance] -> IO ()
printDatesAndMatches [] = return ()
printDatesAndMatches ts = do
let allmatches = getAllMatches ts
let sortedmatches = sortMatchesByDate allmatches
printRoundData $ sortedmatches
getAllMatches :: [TournamentInstance] -> [Match]
getAllMatches [] = []
getAllMatches (t:ts) = (concat (map getAllMatches' (stageinstances t))) ++ getAllMatches ts
getAllMatches' :: StageInstance -> [Match]
getAllMatches' s = concat $ rounds s
sortMatchesByDate :: [Match] -> [Match]
sortMatchesByDate [] = []
sortMatchesByDate (m:ms) = sortMatchesByDate (filter (`earlierMatch` m) ms) ++
[m] ++
sortMatchesByDate (filter (`laterOrSamedateMatch` m) ms)
earlierMatch :: Match -> Match -> Bool
earlierMatch m1 m2 = if (getMatchDate m1) `earlierDate` (getMatchDate m2) then True else False
laterOrSamedateMatch :: Match -> Match -> Bool
laterOrSamedateMatch m1 m2 = not (earlierMatch m1 m2)
printDatesAndMatchesFromTI :: TournamentInstance -> IO ()
printDatesAndMatchesFromTI t = printDatesAndMatchesFromSIs $ stageinstances t
printDatesAndMatchesFromSIs :: [StageInstance] -> IO ()
printDatesAndMatchesFromSIs [] = return ()
printDatesAndMatchesFromSIs (s:ss) = do
printDatesAndMatchesFromSI s
printDatesAndMatchesFromSIs ss
printDatesAndMatchesFromSI :: StageInstance -> IO ()
printDatesAndMatchesFromSI s = printRoundData $ concat $ rounds s
printRoundData :: [Match] -> IO ()
printRoundData [] = return ()
printRoundData (m:ms) = do
printMatchData m
printRoundData ms
playMatchesOfTheDay :: Date -> TournamentInstance -> IO (TournamentInstance, [([String], StageTarget)])
playMatchesOfTheDay d t = do
sc <- mapM (playMatchesOfTheDay'' d) (stageinstances t)
let (ss, ch) = unzip sc
let newt = t{stageinstances = ss}
return (newt, concat ch)
playMatchesOfTheDay'' :: Date -> StageInstance -> IO ((StageInstance, [([String], StageTarget)]))
playMatchesOfTheDay'' d s = do
if finished s == True
then return (s, [])
else do
newr <- mapM (playRounds' d) (rounds s)
let fin = allMatchesPlayed (concat newr)
let news = s{rounds = newr, finished = fin}
let ch = if fin == False then [] else updateStageInstance news
return (news, ch)
playRounds' :: Date -> [Match] -> IO [Match]
playRounds' _ [] = return []
playRounds' d ms = do
let (pls, rest) = partition (hasDate d) ms
newps <- playMatches pls
return (newps ++ rest)
setFinishedFlag :: StageInstance -> StageInstance
setFinishedFlag s = if stageInstanceFinished s then s{finished = True} else s{finished = False}
updateTournamentInstance :: TournamentInstance -> [([String], StageTarget)]
updateTournamentInstance t = concat $ map updateStageInstance (stageinstances t)
updateStageInstance :: StageInstance -> [([String], StageTarget)]
updateStageInstance s = if (finished s) then listMovedClubs s else []
listMovedClubs :: StageInstance -> [([String], StageTarget)]
listMovedClubs s = listMovedClubs' (stage s) (rounds s)
listMovedClubs' :: Stage -> [Round] -> [([String], StageTarget)]
listMovedClubs' _ [] = []
listMovedClubs' League{promotions=pr, relegations=rl} rs = [] -- TODO
listMovedClubs' Knockout{promotiontarget=pt, relegationtarget=rt} rs = [(kowinners, pt), (kolosers, rt)]
where (kowinners, kolosers) = unzip $ map getKOWinners rs
getKOWinners :: [Match] -> (String, String)
getKOWinners ms = if sum1 > sum2 then (ht, at) else (at, ht)
where sum1 = sum (map (homegoals . result) ms)
sum2 = sum (map (awaygoals . result) ms)
ht = homeclub $ head ms
at = awayclub $ head ms
stageInstanceFinished :: StageInstance -> Bool
stageInstanceFinished s = if freeSlots s > 0 then False else allMatchesPlayed (concat (rounds s))
printTournamentInstanceInfo :: TournamentInstance -> String
printTournamentInstanceInfo t = Freekick.Libsoccer.TournamentInstance.name t ++ "\n" ++ (intercalate "\n" (map printStageInstanceInfo (stageinstances t)))
printStageInstanceInfo :: StageInstance -> String
printStageInstanceInfo s = printStageInfo (stage s) ++ "\nClubs\n" ++ (intercalate "\n" (clubs s)) ++ "\n"
| anttisalonen/freekick | haskell/libfreekick/Freekick/Libsoccer/TournamentInstance.hs | agpl-3.0 | 9,599 | 0 | 15 | 2,060 | 3,082 | 1,623 | 1,459 | 169 | 3 |
import Test.Hspec
-- Dummy implementation for testing
slow_sqrs_sum :: [Int] -> Int
slow_sqrs_sum a = foldr (+) 0 (map (^2) a)
slow_sum_sqr :: [Int] -> Int
slow_sum_sqr a = (sum a) ^ 2
slow_sum_sqr_diff :: [Int] -> Int
slow_sum_sqr_diff a = (slow_sum_sqr a) - (slow_sqrs_sum a)
-- Fast emplementation using math
-- We need to calculate (1 + 2 + ... + n)^2 - (1^2 + 2^2 + ... + n^2)
-- If we open first square we'll get:
-- x1^2 + x2^2 + ... + xn^2 + 2*(x1[x2..xn] + x2[x3..xn] + xn-1 * xn
-- So diff will get rid of first sum of squares and well have only 2*(...) part
fast_sum_sqr_diff :: [Int] -> Int
fast_sum_sqr_diff a = 2 * foldl (+) 0 (
foldr (++) [] [map (*x) (filter (>x) a) | x <- a]
)
-- Tests
run_test = hspec $ do
describe "Dummy" $ do
it "dummy test" $ do
True `shouldBe` True
describe "Euler test" $ do
it "sqrs sum" $ do
slow_sqrs_sum [1..10] `shouldBe` 385
slow_sum_sqr [1..10] `shouldBe` 3025
slow_sum_sqr_diff [1..10] `shouldBe` 2640
fast_sum_sqr_diff [1..10] `shouldBe` 2640
describe "Unit" $ do
it "Making sure fast implementation has same results" $ do
fast_sum_sqr_diff [1..4] `shouldBe` slow_sum_sqr_diff [1..4]
fast_sum_sqr_diff [1..5] `shouldBe` slow_sum_sqr_diff [1..5]
fast_sum_sqr_diff [1..6] `shouldBe` slow_sum_sqr_diff [1..6]
fast_sum_sqr_diff [1..7] `shouldBe` slow_sum_sqr_diff [1..7]
fast_sum_sqr_diff [1..8] `shouldBe` slow_sum_sqr_diff [1..8]
fast_sum_sqr_diff [1..9] `shouldBe` slow_sum_sqr_diff [1..9]
fast_sum_sqr_diff [1..10] `shouldBe` slow_sum_sqr_diff [1..10]
fast_sum_sqr_diff [1..50] `shouldBe` slow_sum_sqr_diff [1..50]
fast_sum_sqr_diff [1..100] `shouldBe` slow_sum_sqr_diff [1..100]
-- res = slow_sum_sqr_diff [1..100]
res = fast_sum_sqr_diff [1..100]
-- Main
main = do
run_test
putStrLn ("res = " ++ show res)
| orbitgray/ProjectEuler | haskell/006.hs | lgpl-3.0 | 2,004 | 0 | 16 | 510 | 609 | 319 | 290 | 35 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Data.Time.Calendar.Laohuangli where
import qualified Data.Text as T
import Text.Printf
import Data.Time
import Data.Time.Calendar.WeekDate
import qualified Data.Set as Set
data Laohuangli =
Laohuangli { today :: (Integer, Int, Int, DayOfWeek)
, goodActivities :: [Activity]
, badActivities :: [Activity]
, direction :: Direction
, drinks :: [Drink]
, goddessCloseness :: GoddessCloseness
}
printLaohuangli :: Laohuangli -> IO ()
printLaohuangli Laohuangli {today=(y, m, d, w), goodActivities=gs, badActivities=bs,
direction=dir, drinks=ds, goddessCloseness = gc}
= do
gns <- mapM getActivityName gs
bns <- mapM getActivityName bs
putStrLn $ printf "今天是%d年%d月%d日 %s\n" y m d (showDayOfWeekLocalized w)
++ printf "宜:%s\n" (join ", " gns)
++ printf "忌:%s\n" (join ", " bns)
++ printf "座位朝向:面向%s写程序,BUG 最少。\n" (show dir)
++ printf "今日宜饮:%s\n" (join "," ds)
++ printf "女神亲近指数:%s\n" (show gc)
where
join s txts = T.unpack $ T.intercalate s txts
showDayOfWeekLocalized :: DayOfWeek -> String
showDayOfWeekLocalized d =
case d of
Monday -> "星期一"
Tuesday -> "星期二"
Wednesday -> "星期三"
Thursday -> "星期四"
Friday -> "星期五"
Saturday -> "星期六"
Sunday -> "星期日"
isWeekend :: DayOfWeek -> Bool
isWeekend d = notElem d [Saturday, Sunday]
toDayOfWeek :: Int -> DayOfWeek
toDayOfWeek = toEnum
-- activity
data Activity = Activity { actName :: T.Text
, goodDesc :: T.Text
, badDesc :: T.Text
, isWeekendAct :: Bool
} deriving Show
getActivityName :: Activity -> IO T.Text
getActivityName a
| "%v" `T.isInfixOf` rowName = getNanmmingActivityName a
| "%t" `T.isInfixOf` rowName = getToolingActivityName a
| "%l" `T.isInfixOf` rowName = getLineActivityName a
| otherwise = return rowName
where
rowName = actName a
getNanmmingActivityName :: Activity -> IO T.Text
getNanmmingActivityName a = do
varIndex <- mod <$> random 12 <*> (pure $ length varNames)
return $ T.replace "%v" (varNames !! varIndex) (actName a)
where
varNames = ["jieguo", "huodong", "pay", "expire", "zhangdan", "every", "free", "i1", "a", "virtual", "ad", "spider", "mima", "pass", "ui"]
getToolingActivityName :: Activity -> IO T.Text
getToolingActivityName a = do
toolIndex <- mod <$> random 11 <*> (pure $ length tools)
return $ T.replace "%t" (tools !! toolIndex) (actName a)
where
tools = ["Eclipse写程序", "MSOffice写文档", "记事本写程序", "Windows8", "Linux", "MacOS", "IE", "Android设备", "iOS设备"]
getLineActivityName :: Activity -> IO T.Text
getLineActivityName a = do
line <- (\n -> n `mod` 247 + 30) <$> random 12
return $ T.replace "%l" (T.pack $ show line) (actName a)
-- direction
data Direction = North | East | South | West
| Northeast | Southeast | Southwest | Northwest
instance Show Direction where
show North = "北方"
show East = "东方"
show South = "南方"
show West = "西方"
show Northeast = "东北方"
show Southeast = "东南方"
show Southwest = "西南方"
show Northwest = "西北方"
directions :: [Direction]
directions = [North, Northeast, East, Southeast, South, Southwest, West, Northwest]
-- GoddessCloseness
newtype GoddessCloseness = GoddessCloseness Int
instance Show GoddessCloseness where
show (GoddessCloseness value) = drop (5 - value) (take (10 - value) "★★★★★☆☆☆☆☆")
-- drink
type Drink = T.Text
-- random util
random :: Int -> IO Int
random seed = do
(y, m, d) <- getToday
let n = (fromInteger y * 10000 + m * 100 + d) `mod` 11117
let numSeq = iterate (\x -> x^2 `mod` 11117) n
return $ numSeq !! (seed + 100)
modRandom :: Int -> Int -> IO Int
modRandom seed modulus = mod <$> random seed <*> (pure modulus)
pickRandom :: [a] -> Int -> IO [a]
pickRandom xs n = do
dropIndices <- Set.fromList <$> actualIndices <$> sequence [(`mod` (len - i)) <$> (random i) | i <- [0..len - n - 1]]
return [ x | (x, i) <- zip xs [0..], i `Set.notMember` dropIndices]
where
len = length xs
actualIndices (y:ys) = actualIndices' ys [y]
actualIndices' [] acc = acc
actualIndices' (z:zs) acc = actualIndices' zs ((actualIndex z acc):acc)
actualIndex m acc
| all (<= m) acc = m + length acc
| all (> m) acc = m
| otherwise = let m' = m + (length $ filter (<= m) acc)
in actualIndex m' (filter (> m) acc)
-- getter
getToday' :: IO Day
getToday' = getZonedTime >>= (return . localDay . zonedTimeToLocalTime)
getToday :: IO (Integer, Int, Int)
getToday = getToday' >>= (return . toGregorian)
getDayOfWeek :: IO DayOfWeek
getDayOfWeek = do
day <- getToday'
let (_, _, weekNum) = toWeekDate day
return $ toDayOfWeek weekNum
getGoddessCloseness :: IO GoddessCloseness
getGoddessCloseness = (+1) <$> modRandom 6 5 >>= (return . GoddessCloseness)
getDirection :: IO Direction
getDirection = do
index <- modRandom 2 (length directions)
return $ (directions !! index)
getDrinks :: IO [Drink]
getDrinks = pickRandom defaultDrinks 2
where
defaultDrinks = ["水","茶","红茶","绿茶","咖啡","奶茶","可乐","鲜奶","豆奶","果汁","果味汽水","苏打水","运动饮料","酸奶","酒"]
getTodayLaohuangli :: IO Laohuangli
getTodayLaohuangli = do
(y, m, d) <- getToday
w <- getDayOfWeek
numGood <- (+2) <$> modRandom 98 3
numBad <- (+2) <$> modRandom 87 3
let filteredActs = if isWeekend w then filter isWeekendAct activities else activities
randActs <- pickRandom filteredActs (numGood + numBad)
dir <- getDirection
ds <- getDrinks
gc <- getGoddessCloseness
return Laohuangli {today=(y, m, d, w), goodActivities=take numGood randActs, badActivities=drop numGood randActs, direction=dir, drinks=ds, goddessCloseness=gc}
activities :: [Activity]
activities = [ Activity {actName="写单元测试", goodDesc="写单元测试将减少出错", badDesc="写单元测试会降低你的开发效率", isWeekendAct=False}
, Activity {actName="洗澡", goodDesc="你几天没洗澡了?", badDesc="会把设计方面的灵感洗掉", isWeekendAct=True}
, Activity {actName="锻炼一下身体", goodDesc="", badDesc="能量没消耗多少,吃得却更多", isWeekendAct=True}
, Activity {actName="抽烟", goodDesc="抽烟有利于提神,增加思维敏捷", badDesc="除非你活够了,死得早点没关系", isWeekendAct=True}
, Activity {actName="白天上线", goodDesc="今天白天上线是安全的", badDesc="可能导致灾难性后果", isWeekendAct=False}
, Activity {actName="重构", goodDesc="代码质量得到提高", badDesc="你很有可能会陷入泥潭", isWeekendAct=False}
, Activity {actName="使用%t", goodDesc="你看起来更有品位", badDesc="别人会觉得你在装逼", isWeekendAct=False}
, Activity {actName="跳槽", goodDesc="该放手时就放手", badDesc="鉴于当前的经济形势,你的下一份工作未必比现在强", isWeekendAct=False}
, Activity {actName="招人", goodDesc="你面前这位有成为牛人的潜质", badDesc="这人会写程序吗?", isWeekendAct=False}
, Activity {actName="面试", goodDesc="面试官今天心情很好", badDesc="面试官不爽,会拿你出气", isWeekendAct=False}
, Activity {actName="提交辞职申请", goodDesc="公司找到了一个比你更能干更便宜的家伙,巴不得你赶快滚蛋", badDesc="鉴于当前的经济形势,你的下一份工作未必比现在强", isWeekendAct=False}
, Activity {actName="申请加薪", goodDesc="老板今天心情很好", badDesc="公司正在考虑裁员", isWeekendAct=False}
, Activity {actName="晚上加班", goodDesc="晚上是程序员精神最好的时候", badDesc="", isWeekendAct=True}
, Activity {actName="在妹子面前吹牛", goodDesc="改善你矮穷挫的形象", badDesc="会被识破", isWeekendAct=True}
, Activity {actName="撸管", goodDesc="避免缓冲区溢出", badDesc="强撸灰飞烟灭",isWeekendAct=True}
, Activity {actName="浏览成人网站", goodDesc="重拾对生活的信心", badDesc="你会心神不宁", isWeekendAct=True}
, Activity {actName="命名变量%v", goodDesc="", badDesc="", isWeekendAct= False}
, Activity {actName="写超过%l行的方法", goodDesc="你的代码组织的很好,长一点没关系", badDesc="你的代码将混乱不堪,你自己都看不懂", isWeekendAct=False}
, Activity {actName="提交代码", goodDesc="遇到冲突的几率是最低的", badDesc="你遇到的一大堆冲突会让你觉得自己是不是时间穿越了", isWeekendAct=False}
, Activity {actName="代码复审", goodDesc="发现重要问题的几率大大增加", badDesc="你什么问题都发现不了,白白浪费时间", isWeekendAct=False}
, Activity {actName="开会", goodDesc="写代码之余放松一下打个盹,有益健康", badDesc="小心被扣屎盆子背黑锅", isWeekendAct=False}
, Activity {actName="打DOTA", goodDesc="你将有如神助", badDesc="你会被虐的很惨", isWeekendAct=True}
, Activity {actName="晚上上线", goodDesc="晚上是程序员精神最好的时候", badDesc="你白天已经筋疲力尽了", isWeekendAct=False}
, Activity {actName="修复BUG", goodDesc="你今天对BUG的嗅觉大大提高", badDesc="新产生的BUG将比修复的更多", isWeekendAct=False}
, Activity {actName="设计评审", goodDesc="设计评审会议将变成头脑风暴", badDesc="人人筋疲力尽,评审就这么过了", isWeekendAct=False}
, Activity {actName="需求评审", goodDesc="", badDesc="", isWeekendAct=False}
, Activity {actName="上微博", goodDesc="今天发生的事不能错过", badDesc="今天的微博充满负能量", isWeekendAct=True}
, Activity {actName="上AB站", goodDesc="还需要理由吗?", badDesc="满屏兄贵亮瞎你的眼", isWeekendAct=True}
, Activity {actName="玩FlappyBird", goodDesc="今天破纪录的几率很高", badDesc="除非你想玩到把手机砸了", isWeekendAct=True}]
| fishtreesugar/laohuangli-hs | src/Data/Time/Calendar/Laohuangli.hs | bsd-3-clause | 10,618 | 0 | 16 | 1,897 | 2,949 | 1,682 | 1,267 | 165 | 7 |
{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Main
where
import MFlow.Wai.Blaze.Html.All hiding(main)
import qualified MFlow.Forms as F
import Data.Typeable
import System.IO.Unsafe
import Control.Workflow
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Logger
import Control.Monad (liftM)
import Data.Monoid
--
queryBusinessClient :: FlowM Html IO ()
queryBusinessClient = do
-- HTML
lastName <- page
$ getString Nothing <! hint "Last Name"
<++ br
<** submitButton "Submit"
page $ wlink () << b "Here2"
page
$ b << ("hey oh"::String)
++> br
++> wlink () << b "click here"
where
hint x = [("placeholder", x)]
site :: FlowM Html IO ()
site = do
-- login
queryBusinessClient
-- logout
where
login = do
r <- page
$ h3 << ("Login"::String)
++> userWidget Nothing userLogin
return r
logout = do
page
$ wlink () << b "Logout"
F.logout
main :: IO ()
main = do
setAdminUser ("nickgeoca"::String) (""::String)
runNavigation "" . transientNav $ do -- TODO: Add TLS here with runSecureNavigation
site | agocorona/MFlow | nick.hs | bsd-3-clause | 1,467 | 0 | 12 | 351 | 362 | 197 | 165 | 49 | 1 |
{-# OPTIONS_GHC -W #-}
module Type.Solve (solve) where
import Control.Monad
import Control.Monad.State
import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Traversable as Traversable
import qualified Data.UnionFind.IO as UF
import Type.Type
import Type.Unify
import qualified Type.ExtraChecks as Check
import qualified Type.State as TS
import qualified SourceSyntax.Annotation as A
-- | Every variable has rank less than or equal to the maxRank of the pool.
-- This sorts variables into the young and old pools accordingly.
generalize :: TS.Pool -> StateT TS.SolverState IO ()
generalize youngPool = do
youngMark <- TS.uniqueMark
let youngRank = TS.maxRank youngPool
insert dict var = do
desc <- liftIO $ UF.descriptor var
liftIO $ UF.modifyDescriptor var (\desc -> desc { mark = youngMark })
return $ Map.insertWith (++) (rank desc) [var] dict
-- Sort the youngPool variables by rank.
rankDict <- foldM insert Map.empty (TS.inhabitants youngPool)
-- get the ranks right for each entry.
-- start at low ranks so that we only have to pass
-- over the information once.
visitedMark <- TS.uniqueMark
mapM (\(poolRank, vars) -> mapM (adjustRank youngMark visitedMark poolRank) vars) (Map.toList rankDict)
-- For variables that have rank lowerer than youngRank, register them in
-- the old pool if they are not redundant.
let registerIfNotRedundant var = do
isRedundant <- liftIO $ UF.redundant var
if isRedundant then return var else TS.register var
let rankDict' = Map.delete youngRank rankDict
Traversable.traverse (mapM registerIfNotRedundant) rankDict'
-- For variables with rank youngRank
-- If rank < youngRank: register in oldPool
-- otherwise generalize
let registerIfLowerRank var = do
isRedundant <- liftIO $ UF.redundant var
case isRedundant of
True -> return ()
False -> do
desc <- liftIO $ UF.descriptor var
case rank desc < youngRank of
True -> TS.register var >> return ()
False -> do
let flex' = case flex desc of { Flexible -> Rigid ; other -> other }
liftIO $ UF.setDescriptor var (desc { rank = noRank, flex = flex' })
mapM_ registerIfLowerRank (Map.findWithDefault [] youngRank rankDict)
-- adjust the ranks of variables such that ranks never increase as you
-- move deeper into a variable.
adjustRank :: Int -> Int -> Int -> Variable -> StateT TS.SolverState IO Int
adjustRank youngMark visitedMark groupRank variable =
let adjust = adjustRank youngMark visitedMark groupRank in
do desc <- liftIO $ UF.descriptor variable
case () of
() | mark desc == youngMark ->
do -- Set the variable as marked first because it may be cyclic.
liftIO $ UF.modifyDescriptor variable $ \desc -> desc { mark = visitedMark }
rank' <- case structure desc of
Nothing -> return groupRank
Just term ->
case term of
App1 a b -> max `liftM` adjust a `ap` adjust b
Fun1 a b -> max `liftM` adjust a `ap` adjust b
Var1 x -> adjust x
EmptyRecord1 -> return outermostRank
Record1 fields extension ->
do ranks <- mapM adjust (concat (Map.elems fields))
rnk <- adjust extension
return . maximum $ rnk : ranks
liftIO $ UF.modifyDescriptor variable $ \desc -> desc { rank = rank' }
return rank'
| mark desc /= visitedMark ->
do let rank' = min groupRank (rank desc)
liftIO $ UF.setDescriptor variable (desc { mark = visitedMark, rank = rank' })
return rank'
| otherwise -> return (rank desc)
solve :: TypeConstraint -> StateT TS.SolverState IO ()
solve (A.A region constraint) =
case constraint of
CTrue -> return ()
CSaveEnv -> TS.saveLocalEnv
CEqual term1 term2 -> do
t1 <- TS.flatten term1
t2 <- TS.flatten term2
unify region t1 t2
CAnd cs -> mapM_ solve cs
CLet [Scheme [] fqs constraint' _] (A.A _ CTrue) -> do
oldEnv <- TS.getEnv
mapM TS.introduce fqs
solve constraint'
TS.modifyEnv (\_ -> oldEnv)
CLet schemes constraint' -> do
oldEnv <- TS.getEnv
headers <- Map.unions `fmap` mapM (solveScheme region) schemes
TS.modifyEnv $ \env -> Map.union headers env
solve constraint'
mapM Check.occurs $ Map.toList headers
TS.modifyEnv (\_ -> oldEnv)
CInstance name term -> do
env <- TS.getEnv
freshCopy <-
case Map.lookup name env of
Just tipe -> TS.makeInstance tipe
Nothing
| List.isPrefixOf "Native." name -> liftIO (var Flexible)
| otherwise ->
error ("Could not find '" ++ name ++ "' when solving type constraints.")
t <- TS.flatten term
unify region freshCopy t
solveScheme :: A.Region -> TypeScheme -> StateT TS.SolverState IO (Map.Map String Variable)
solveScheme region scheme =
case scheme of
Scheme [] [] constraint header -> do
solve constraint
Traversable.traverse TS.flatten header
Scheme rigidQuantifiers flexibleQuantifiers constraint header -> do
let quantifiers = rigidQuantifiers ++ flexibleQuantifiers
oldPool <- TS.getPool
-- fill in a new pool when working on this scheme's constraints
freshPool <- TS.nextRankPool
TS.switchToPool freshPool
mapM TS.introduce quantifiers
header' <- Traversable.traverse TS.flatten header
solve constraint
allDistinct region rigidQuantifiers
youngPool <- TS.getPool
TS.switchToPool oldPool
generalize youngPool
mapM (isGeneric region) rigidQuantifiers
return header'
-- Checks that all of the given variables belong to distinct equivalence classes.
-- Also checks that their structure is Nothing, so they represent a variable, not
-- a more complex term.
allDistinct :: A.Region -> [Variable] -> StateT TS.SolverState IO ()
allDistinct region vars = do
seen <- TS.uniqueMark
let check var = do
desc <- liftIO $ UF.descriptor var
case structure desc of
Just _ -> TS.addError region (Just msg) var var
where msg = "Cannot generalize something that is not a type variable."
Nothing -> do
if mark desc == seen
then let msg = "Duplicate variable during generalization."
in TS.addError region (Just msg) var var
else return ()
liftIO $ UF.setDescriptor var (desc { mark = seen })
mapM_ check vars
-- Check that a variable has rank == noRank, meaning that it can be generalized.
isGeneric :: A.Region -> Variable -> StateT TS.SolverState IO ()
isGeneric region var = do
desc <- liftIO $ UF.descriptor var
if rank desc == noRank
then return ()
else let msg = "Unable to generalize a type variable. It is not unranked."
in TS.addError region (Just msg) var var
| deadfoxygrandpa/Elm | compiler/Type/Solve.hs | bsd-3-clause | 7,577 | 0 | 30 | 2,427 | 1,997 | 962 | 1,035 | 143 | 8 |
{-# LANGUAGE OverloadedStrings #-}
module Display where
import Control.Concurrent.STM
import Network.Wai.Handler.Warp
import Network.Wai
import Network.HTTP.Types
import qualified Data.ByteString.Lazy.Char8 as LBS
display :: TVar String -> IO ()
display statusLine = do
run 4444 (myApp statusLine)
myApp :: TVar String -> Request -> (Response -> IO a) -> IO a
myApp statusLine req respond = do
status <- atomically (readTVar statusLine)
case pathInfo req of
[] -> respond (responseLBS status200 [] (LBS.pack html))
("status":[]) -> respond (responseLBS status200 [] (LBS.pack status))
_ -> respond (responseLBS status400 [] (LBS.pack "Not found"))
html :: String
html =
"<html> \
\<head> \
\<style>\
\p {\
\color:white;\
\font-size:40px;\
\font-family:\"Museo Sans\",sans;\
\}\
\</style>\
\</head>\
\<body> \
\<p id=content></p> \
\<script type=text/javascript> \
\window.setInterval(updater, 2000); \
\function updater() { \
\var xhttp = new XMLHttpRequest(); \
\xhttp.onreadystatechange = function() { \
\if (this.readyState == 4 && this.status == 200) { \
\document.getElementById(\"content\").innerHTML = this.responseText; \
\} \
\}; \
\xhttp.open(\"GET\", \"status\", true); \
\xhttp.send(); \
\}\
\</script> \
\</body> \
\</html>"
| josuf107/xioqbot | src/Display.hs | bsd-3-clause | 1,686 | 0 | 15 | 612 | 257 | 133 | 124 | 20 | 3 |
module GUI.BookmarkView (
BookmarkView,
bookmarkViewNew,
BookmarkViewActions(..),
bookmarkViewGet,
bookmarkViewAdd,
bookmarkViewRemove,
bookmarkViewClear,
bookmarkViewSetLabel,
) where
import GHC.RTS.Events (Timestamp)
import Graphics.UI.Gtk
import Numeric
---------------------------------------------------------------------------
-- | Abstract bookmark view object.
--
data BookmarkView = BookmarkView {
bookmarkStore :: ListStore (Timestamp, String)
}
-- | The actions to take in response to TraceView events.
--
data BookmarkViewActions = BookmarkViewActions {
bookmarkViewAddBookmark :: IO (),
bookmarkViewRemoveBookmark :: Int -> IO (),
bookmarkViewGotoBookmark :: Timestamp -> IO (),
bookmarkViewEditLabel :: Int -> String -> IO ()
}
---------------------------------------------------------------------------
bookmarkViewAdd :: BookmarkView -> Timestamp -> String -> IO ()
bookmarkViewAdd BookmarkView{bookmarkStore} ts label = do
listStoreAppend bookmarkStore (ts, label)
return ()
bookmarkViewRemove :: BookmarkView -> Int -> IO ()
bookmarkViewRemove BookmarkView{bookmarkStore} n = do
listStoreRemove bookmarkStore n
return ()
bookmarkViewClear :: BookmarkView -> IO ()
bookmarkViewClear BookmarkView{bookmarkStore} =
listStoreClear bookmarkStore
bookmarkViewGet :: BookmarkView -> IO [(Timestamp, String)]
bookmarkViewGet BookmarkView{bookmarkStore} =
listStoreToList bookmarkStore
bookmarkViewSetLabel :: BookmarkView -> Int -> String -> IO ()
bookmarkViewSetLabel BookmarkView{bookmarkStore} n label = do
(ts,_) <- listStoreGetValue bookmarkStore n
listStoreSetValue bookmarkStore n (ts, label)
---------------------------------------------------------------------------
bookmarkViewNew :: Builder -> BookmarkViewActions -> IO BookmarkView
bookmarkViewNew builder BookmarkViewActions{..} = do
let getWidget cast name = builderGetObject builder cast name
---------------------------------------------------------------------------
bookmarkTreeView <- getWidget castToTreeView "bookmark_list"
bookmarkStore <- listStoreNew []
columnTs <- treeViewColumnNew
cellTs <- cellRendererTextNew
columnLabel <- treeViewColumnNew
cellLabel <- cellRendererTextNew
selection <- treeViewGetSelection bookmarkTreeView
treeViewColumnSetTitle columnTs "Time"
treeViewColumnSetTitle columnLabel "Label"
treeViewColumnPackStart columnTs cellTs False
treeViewColumnPackStart columnLabel cellLabel True
treeViewAppendColumn bookmarkTreeView columnTs
treeViewAppendColumn bookmarkTreeView columnLabel
treeViewSetModel bookmarkTreeView bookmarkStore
cellLayoutSetAttributes columnTs cellTs bookmarkStore $ \(ts,_) ->
[ cellText := showFFloat (Just 6) (fromIntegral ts / 1000000) "s" ]
cellLayoutSetAttributes columnLabel cellLabel bookmarkStore $ \(_,label) ->
[ cellText := label ]
---------------------------------------------------------------------------
addBookmarkButton <- getWidget castToToolButton "add_bookmark_button"
deleteBookmarkButton <- getWidget castToToolButton "delete_bookmark"
gotoBookmarkButton <- getWidget castToToolButton "goto_bookmark_button"
onToolButtonClicked addBookmarkButton $
bookmarkViewAddBookmark
onToolButtonClicked deleteBookmarkButton $ do
selected <- treeSelectionGetSelected selection
case selected of
Nothing -> return ()
Just iter ->
let pos = listStoreIterToIndex iter
in bookmarkViewRemoveBookmark pos
onToolButtonClicked gotoBookmarkButton $ do
selected <- treeSelectionGetSelected selection
case selected of
Nothing -> return ()
Just iter -> do
let pos = listStoreIterToIndex iter
(ts,_) <- listStoreGetValue bookmarkStore pos
bookmarkViewGotoBookmark ts
onRowActivated bookmarkTreeView $ \[pos] _ -> do
(ts, _) <- listStoreGetValue bookmarkStore pos
bookmarkViewGotoBookmark ts
set cellLabel [ cellTextEditable := True ]
on cellLabel edited $ \[pos] val -> do
bookmarkViewEditLabel pos val
---------------------------------------------------------------------------
return BookmarkView{..}
| ml9951/ThreadScope | GUI/BookmarkView.hs | bsd-3-clause | 4,385 | 0 | 18 | 843 | 970 | 472 | 498 | -1 | -1 |
digitsOfSum :: (Foldable t1, Integral t) => t -> t1 String -> Integer
digitsOfSum n a = go sum
where go x
| x < 10^n = x
| otherwise = go (quot x 10)
sum = foldl (\a b -> a + (read b :: Integer)) 0 a
main :: IO ()
main = do
f <- readFile "013.txt"
print $ digitsOfSum 10 (lines f)
| JacksonGariety/euler.hs | 013.hs | bsd-3-clause | 316 | 0 | 12 | 101 | 167 | 81 | 86 | 10 | 1 |
module Exceptions where
import System.Environment
import Control.Exception
willIFail :: Integer -> IO (Either ArithException ())
willIFail denom = try $ print $ div 5 denom
onlyReportError :: Show e => IO (Either e a) -> IO ()
onlyReportError action = do
result <- action
case result of
Left e -> print e
Right _ -> return ()
willFail :: Integer -> IO ()
willFail = onlyReportError . willIFail
willFail' :: Integer -> IO ()
willFail' denom =
print (div 5 denom) `catch` handler
where handler :: ArithException -> IO ()
handler = print
testDiv :: String -> IO ()
testDiv d = onlyReportError $ willIFail (read d)
main :: IO ()
main = do
args <- getArgs
mapM_ testDiv args
canICatch :: Exception e => e -> IO (Either ArithException ())
canICatch = try . throwIO
| vasily-kirichenko/haskell-book | src/ErrorHandling/Exceptions.hs | bsd-3-clause | 821 | 0 | 11 | 198 | 323 | 159 | 164 | 26 | 2 |
module Main where
import Control.Monad
import Data.Aeson
import qualified Data.ByteString.Lazy as BS
import Data.Either
import qualified Data.Map as Map
import Data.Maybe
import Data.Yaml
import Test.Hspec
import Text.Garnett.Definition
import Text.Garnett.Completers.BashCompleter
import Text.Garnett.Completers.ShellDSL
import Text.Garnett.Writers.HaskellWriter
import Writers.HaskellWriter
import Paths_garnett
main :: IO ()
main = hspec $ do
describe "decode" $ do
it "successfully decodes the example Garnett file" $ do
ex <- decodeGFile "example.yaml"
isRight ex `shouldBe` True
printBash :: FilePath -> IO ()
printBash fp = do
f <- decodeGFile fp
case f of
Left exc -> print exc
Right gf -> print $ toDoc $ allBash gf
decodeGFile :: FilePath -> IO (Either ParseException GarnettFile)
decodeGFile fp = do
fp' <- getDataFileName fp
decodeFileEither fp'
{-
defaultBlock = Block { _progName = Map.fromList [(defaultFmt, "test")]
, _authorName = Map.fromList [(defaultFmt, "test name")]
, _authorEmail = Map.fromList [(defaultFmt, "test@email")]
, _shortDesc = Map.fromList [(defaultFmt, Markup "A test prog")]
, _completions = Map.fromList [(defaultFmt, comp)]
}
where comp = Completions { _shortCompl = Map.fromList [("help", 'h')]
, _longCompl = Map.fromList [("help", "help")]
}
-}
| jkarni/Garnett | tests/Tests.hs | bsd-3-clause | 1,679 | 0 | 15 | 564 | 263 | 139 | 124 | 31 | 2 |
module Data.IORef.Logic
( IORef
, newIORef
, readIORef
, writeIORef
, modifyIORef
, modifyIORef'
) where
import Control.Monad.IO.Logic
import Control.Monad.ST.Logic.Internal
type IORef s = Ref s IO
newIORef :: a -> LogicIO s (IORef s a)
newIORef = newRef
{-# INLINE newIORef #-}
readIORef :: IORef s a -> LogicIO s a
readIORef = readRef
{-# INLINE readIORef #-}
writeIORef :: IORef s a -> a -> LogicIO s ()
writeIORef = writeRef
{-# INLINE writeIORef #-}
modifyIORef :: IORef s a -> (a -> a) -> LogicIO s ()
modifyIORef = modifyRef
{-# INLINE modifyIORef #-}
modifyIORef' :: IORef s a -> (a -> a) -> LogicIO s ()
modifyIORef' = modifyRef'
{-# INLINE modifyIORef' #-}
| sonyandy/logicst | src/Data/IORef/Logic.hs | bsd-3-clause | 723 | 0 | 8 | 173 | 214 | 120 | 94 | 25 | 1 |
module Common.List (
rotate
, minus
, intersect
, nub'
, unique
, maximumBy'
, minimumBy'
, maximum'
, minimum'
) where
import Data.List (foldl1', group)
import qualified Data.Set as S
{-# INLINABLE rotate #-}
{-# INLINABLE minus #-}
{-# INLINABLE intersect #-}
{-# INLINABLE nub' #-}
{-# INLINABLE unique #-}
{-# INLINABLE maximumBy' #-}
{-# INLINABLE minimumBy' #-}
{-# INLINABLE maximum' #-}
{-# INLINABLE minimum' #-}
rotate :: Int -> [a] -> [a]
rotate n xs = take (length xs) (drop n (cycle xs))
-- set differeance (assert already sorted)
minus :: (Ord a) => [a] -> [a] -> [a]
minus xs [] = xs
minus [] _ = []
minus xs'@(x:xs) ys'@(y:ys) = case compare x y of
LT -> x : xs `minus` ys'
EQ -> xs `minus` ys
GT -> xs' `minus` ys
-- set intersection (assert already sorted)
intersect :: (Ord a) => [a] -> [a] -> [a]
intersect [] _ = []
intersect _ [] = []
intersect xs'@(x:xs) ys'@(y:ys) = case compare x y of
EQ -> x : xs `intersect` ys
LT -> xs `intersect` ys'
GT -> xs' `intersect` ys
nub' :: (Ord a) => [a] -> [a]
nub' = S.toList . S.fromList
-- test if one list has a unique element
unique :: (Eq a) => [a] -> Bool
unique xs = 1 == length (group xs)
maximumBy' :: (a -> a -> Ordering) -> [a] -> a
maximumBy' _ [] = undefined
maximumBy' cmp xs = foldl1' helper xs
where
helper a b = case cmp a b of
LT -> b
_ -> a
minimumBy' :: (a -> a -> Ordering) -> [a] -> a
minimumBy' _ [] = undefined
minimumBy' cmp xs = foldl1' helper xs
where
helper a b = case cmp a b of
GT -> b
_ -> a
maximum' :: Ord a => [a] -> a
maximum' = foldl1' max
minimum' :: Ord a => [a] -> a
minimum' = foldl1' min
| foreverbell/project-euler-solutions | lib/Common/List.hs | bsd-3-clause | 1,669 | 0 | 9 | 410 | 703 | 387 | 316 | 48 | 3 |
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
-- |
-- Module: Database.PostgreSQL.Store.Generics
-- Copyright: (c) Ole Krüger 2016
-- License: BSD3
-- Maintainer: Ole Krüger <ole@vprsm.de>
module Database.PostgreSQL.Store.Generics (
-- * Generic Entity
Generic,
Rep,
toGeneric,
fromGeneric,
-- * Type-Level Information
KRecord (..),
KFlatSum (..),
KDataType (..),
-- * Mapper classes
GRecord (..),
GFlatSum (..),
GDataType (..),
Record (..),
FlatSum (..),
DataType (..),
-- * Analyzers
AnalyzeRecordRep,
AnalyzeFlatSumRep,
AnalyzeDataType
) where
import GHC.Generics hiding (Generic (..))
import qualified GHC.Generics as G
import GHC.TypeLits
import Data.Kind
-- | Information about a record
data KRecord
= TCombine KRecord KRecord
-- ^ Combination of two records
| TSingle Meta Type
-- ^ Single element with meta information and type
-- | Mappings between a 'G.Generic' representation and our 'KRecord'-based representation
class GRecord (rec :: KRecord) where
-- | 'Generic' representation
type RecordRep rec :: * -> *
-- | 'KRecord'-based representation
data Record rec
-- | From 'Generic' representation
toRecord :: RecordRep rec x -> Record rec
-- | To 'Generic' representation
fromRecord :: Record rec -> RecordRep rec x
-- | Single record
instance GRecord ('TSingle meta typ) where
type RecordRep ('TSingle meta typ) = S1 meta (Rec0 typ)
data Record ('TSingle meta typ) = Single typ
toRecord (M1 (K1 x)) = Single x
fromRecord (Single x) = M1 (K1 x)
deriving instance (Show typ) => Show (Record ('TSingle meta typ))
-- | Combination of records
instance (GRecord lhs, GRecord rhs) => GRecord ('TCombine lhs rhs) where
type RecordRep ('TCombine lhs rhs) = RecordRep lhs :*: RecordRep rhs
data Record ('TCombine lhs rhs) = Combine (Record lhs) (Record rhs)
toRecord (lhs :*: rhs) = Combine (toRecord lhs) (toRecord rhs)
fromRecord (Combine lhs rhs) = fromRecord lhs :*: fromRecord rhs
deriving instance (Show (Record lhs), Show (Record rhs)) => Show (Record ('TCombine lhs rhs))
-- | Analyze the 'Generic' representation of the selectors. Make sure it has 1 or more fields. Then
-- transform it into a 'KRecord'.
type family AnalyzeRecordRep org (sel :: * -> *) :: KRecord where
-- Single field
AnalyzeRecordRep org (S1 meta (Rec0 typ)) =
'TSingle meta typ
-- Multiple fields
AnalyzeRecordRep org (lhs :*: rhs) =
'TCombine (AnalyzeRecordRep org lhs) (AnalyzeRecordRep org rhs)
-- Missing field(s)
AnalyzeRecordRep org U1 =
TypeError ('Text "Given type "
':<>: 'ShowType org
':<>: 'Text " has one constructor, therefore that constructor must have \
\at least one field")
-- Something else
AnalyzeRecordRep org other =
TypeError ('Text "Given type "
':<>: 'ShowType org
':<>: 'Text " has a constructor with an invalid selector"
':$$: 'ShowType other)
-- | Information about the constructors of an enumeration
data KFlatSum
= TChoose KFlatSum KFlatSum
-- ^ Combination of values
| TValue Meta
-- ^ Single value of the enumeration
-- | Mappings between a 'G.Generic' representation and our 'KFlatSum'-based representation
class GFlatSum (enum :: KFlatSum) where
-- | 'Generic' representation
type FlatSumRep enum :: * -> *
-- | 'KFlatSum'-based representation
data FlatSum enum
-- | From 'Generic' representation
toFlatSum :: FlatSumRep enum x -> FlatSum enum
-- | To 'Generic' representation
fromFlatSum :: FlatSum enum -> FlatSumRep enum x
-- | Single constructor
instance GFlatSum ('TValue meta) where
type FlatSumRep ('TValue meta) = C1 meta U1
data FlatSum ('TValue meta) = Unit
toFlatSum (M1 U1) = Unit
fromFlatSum Unit = M1 U1
deriving instance Show (FlatSum ('TValue meta))
-- | Combination of multiple constructors
instance (GFlatSum lhs, GFlatSum rhs) => GFlatSum ('TChoose lhs rhs) where
type FlatSumRep ('TChoose lhs rhs) = FlatSumRep lhs :+: FlatSumRep rhs
data FlatSum ('TChoose lhs rhs) = ChooseLeft (FlatSum lhs) | ChooseRight (FlatSum rhs)
toFlatSum (L1 lhs) = ChooseLeft (toFlatSum lhs)
toFlatSum (R1 rhs) = ChooseRight (toFlatSum rhs)
fromFlatSum (ChooseLeft lhs) = L1 (fromFlatSum lhs)
fromFlatSum (ChooseRight rhs) = R1 (fromFlatSum rhs)
deriving instance (Show (FlatSum lhs), Show (FlatSum rhs)) => Show (FlatSum ('TChoose lhs rhs))
-- | Analyze the 'Generic' representation of constructors. Make sure every constructor has zero
-- fields. Then transform it into a 'KFlatSum'.
type family AnalyzeFlatSumRep org (cons :: * -> *) :: KFlatSum where
-- Constructor without record selector
AnalyzeFlatSumRep org (C1 meta U1) =
'TValue meta
-- Constructor with a record selector is invalid
AnalyzeFlatSumRep org (C1 meta1 (S1 meta2 rec)) =
TypeError ('Text "Given type "
':<>: 'ShowType org
':<>: 'Text " has multiple constructors, therefore these constructors must have \
\no fields")
-- Constructor with a record selector is invalid
AnalyzeFlatSumRep org (C1 meta1 (lhs :*: rhs)) =
TypeError ('Text "Given type "
':<>: 'ShowType org
':<>: 'Text " has multiple constructors, therefore these constructors must have \
\no fields")
-- More constructors
AnalyzeFlatSumRep org (lhs :+: rhs) =
'TChoose (AnalyzeFlatSumRep org lhs) (AnalyzeFlatSumRep org rhs)
-- Something else
AnalyzeFlatSumRep org other =
TypeError ('Text "Given type "
':<>: 'ShowType org
':<>: 'Text " has an invalid constructor"
':$$: 'ShowType other)
-- | Information about a data type
data KDataType
= TRecord Meta Meta KRecord
-- ^ Record
| TFlatSum Meta KFlatSum
-- ^ Enumeration
-- | Mappings between a 'G.Generic' representation and our 'KDataType'-based representation
class GDataType (dat :: KDataType) where
-- | 'Generic' representation
type DataTypeRep dat :: * -> *
-- | 'KDataType'-based representation
data DataType dat
-- | From 'Generic' representation
toDataType :: DataTypeRep dat x -> DataType dat
-- | To 'Generic' representation
fromDataType :: DataType dat -> DataTypeRep dat x
-- | With single constructor
instance (GRecord rec) => GDataType ('TRecord d c rec) where
type DataTypeRep ('TRecord d c rec) = D1 d (C1 c (RecordRep rec))
data DataType ('TRecord d c rec) = Record (Record rec)
toDataType (M1 (M1 rec)) = Record (toRecord rec)
fromDataType (Record rec) = M1 (M1 (fromRecord rec))
deriving instance (Show (Record rec)) => Show (DataType ('TRecord d c rec))
-- | With multiple constructors
instance (GFlatSum enum) => GDataType ('TFlatSum d enum) where
type DataTypeRep ('TFlatSum d enum) = D1 d (FlatSumRep enum)
data DataType ('TFlatSum d enum) = FlatSum (FlatSum enum)
toDataType (M1 enum) = FlatSum (toFlatSum enum)
fromDataType (FlatSum flatSum) = M1 (fromFlatSum flatSum)
deriving instance (Show (FlatSum enum)) => Show (DataType ('TFlatSum d enum))
-- | Analyze the 'Generic' representation of a data type. If only one constructor exists, further
-- analyzing is delegated to 'AnalyzeRecordRep'. When two or more exist, analyzing is performed by
-- 'AnalyzeFlatSumRep'. The results are gather in a 'KDataType' instance.
type family AnalyzeDataType org (dat :: * -> *) :: KDataType where
-- Single constructor
AnalyzeDataType org (D1 meta1 (C1 meta2 sel)) =
'TRecord meta1 meta2 (AnalyzeRecordRep org sel)
-- Multiple constructors
AnalyzeDataType org (D1 meta (lhs :+: rhs)) =
'TFlatSum meta (AnalyzeFlatSumRep org (lhs :+: rhs))
-- Missing constructor(s)
AnalyzeDataType org (D1 meta V1) =
TypeError ('Text "Given type "
':<>: 'ShowType org
':<>: 'Text " must have a constructor")
-- Data type with constructor(s) that does not match the given patterns
AnalyzeDataType org (D1 meta other) =
TypeError ('Text "Given type "
':<>: 'ShowType org
':<>: 'Text " has an invalid constructor"
':$$: 'ShowType other)
-- Something else
AnalyzeDataType org other =
TypeError ('Text "Given type "
':<>: 'ShowType org
':<>: 'Text " is not a valid data type"
':$$: 'ShowType other)
-- | 'KDataType' representation of a data type
type Rep a = AnalyzeDataType a (G.Rep a)
-- | Make sure @a@ has a safe generic representation. Types that qualify implement 'G.Generic' (GHC)
-- and fulfill one of the following criteria:
--
-- * single constructor with 1 or more fields
-- * multiple constructors with no fields
--
-- This constraint is mostly utilized to give the user more information about why their type has
-- been rejected.
type Generic a = (G.Generic a, GDataType (Rep a), DataTypeRep (Rep a) ~ G.Rep a)
-- | Convert to generic representation.
fromGeneric :: (Generic a) => a -> DataType (Rep a)
fromGeneric = toDataType . G.from
-- | Build from generic representation.
toGeneric :: (Generic a) => DataType (Rep a) -> a
toGeneric = G.to . fromDataType
| vapourismo/pg-store | src/Database/PostgreSQL/Store/Generics.hs | bsd-3-clause | 9,387 | 260 | 10 | 2,000 | 2,597 | 1,421 | 1,176 | 150 | 1 |
module TestPositiveP(test_group) where
import qualified Wigner.Symbols as S
import qualified Wigner.DefineExpression as D
import qualified Wigner.Transformations as T
import Wigner.Expression
import Wigner.Complex
import Wigner.Texable
import Wigner.Deltas
import Data.Ratio
import qualified Data.Map as M
import Test.Framework (testGroup)
import Test.Framework.Providers.HUnit
import Test.HUnit
s_rho = S.symbol "\\rho"
rho = D.operator s_rho
-- mode symbols
s_a = S.symbol "a"
s_alpha = S.symbol "\\alpha"
s_beta = S.symbol "\\beta"
corr = M.fromList [(s_a, (s_alpha, s_beta))]
a = D.operator s_a
alpha = D.constant s_alpha
beta = D.constant s_beta
d_alpha = D.differential s_alpha
d_beta = D.differential s_beta
-- functional symbols
s_psi = S.symbol "\\Psi"
s_phi = S.symbol "\\Phi"
func_corr = M.fromList [(s_psi, (s_psi, s_phi))]
x = Func (Element S.x [] [])
psi_op = D.operatorFunc s_psi [x]
psi = D.function s_psi [x]
phi = D.function s_phi [x]
d_psi = D.differentialFuncIx s_psi [] [x]
d_phi = D.differentialFuncIx s_phi [] [x]
k = D.constant (S.symbol "\\mathcal{K}")
gamma = D.constant (S.symbol "\\Gamma")
commutator x y = x * y - y * x
transform = T.positivePTransformation corr s_rho
func_transform = T.positivePTransformation func_corr s_rho
test_linear = (showTex fpe) @?= (showTex result) where
hamiltonian = dagger a * a
fpe = transform $ commutator hamiltonian rho
result = -d_alpha * alpha + d_beta * beta
-- Taken from Steel et al, 1998
test_functional = (showTex fpe) @?= (showTex result) where
hamiltonian = dagger psi_op * k * psi_op + gamma * (dagger psi_op)^2 * psi_op^2 / 2
fpe = func_transform $ -D.i * (commutator hamiltonian rho)
result = -d_psi * (-D.i * (k * psi + gamma * psi^2 * phi)) +
d_psi^2 * (-D.i * gamma * psi^2) / 2 -
d_phi * (D.i * (k * phi + gamma * phi^2 * psi)) +
d_phi^2 * (D.i * gamma * phi^2) / 2
test_group = testGroup "Positive-P transformations" [
testCase "linear" test_linear,
testCase "functional" test_functional
]
| fjarri/wigner | test/TestPositiveP.hs | bsd-3-clause | 2,055 | 0 | 25 | 388 | 791 | 425 | 366 | 52 | 1 |
-- |
-- Many time structures such as 'Score' allows for rests between notes. Generally rests
-- are simply treated as blank space, and thus have no duration. Sometimes it is useful
-- to represent rests explicitly, so this module provides an alias for 'pure' 'Nothing' that
-- can be used to that end.
--
-- To remove rests from a score, use 'mcatMaybes', for example:
--
-- > open $ mcatMaybes $ scat [c,d,rest^*2,e]^/8
--
module Music.Time.Rest (
-- * Rests
rest,
) where
import Control.Applicative
import Music.Time.Juxtapose
rest :: Applicative f => f (Maybe a)
rest = pure Nothing
-- TODO overload a la IsPitch (bottom instances for ()/Maybe, transformed like IsPitch)
-- Or remove?
| music-suite/music-score | src/Music/Time/Rest.hs | bsd-3-clause | 730 | 0 | 8 | 158 | 66 | 43 | 23 | 6 | 1 |
{-# OPTIONS_GHC -Wall -fno-warn-hi-shadowing -fno-warn-unused-do-bind -fno-warn-name-shadowing #-}
module REPL where
import Control.Monad
import System.Environment (getArgs)
import System.IO
import Eval
--import Env
import Types
import Parser
import Prim
import Control.Monad.IO.Class (liftIO)
import System.Console.Haskeline
flushStr :: String -> IO ()
flushStr str = do
putStr str
hFlush stdout
readPrompt :: String -> IO String
readPrompt prompt = do
flushStr prompt
getLine
evalString :: Env -> String -> IO String
evalString env expr =
runIOThrows $ liftM show $ readExpr expr >>= (\expr -> eval' env expr EndCont)
evalAndPrint :: Env -> String -> IO ()
evalAndPrint env expr = evalString env expr >>= putStrLn
--runOne :: [String] -> IO ()
--runOne args = do
-- env <- primitiveBindings >>= flip bindVars [("args", List $ map String $ drop 1 args)]
-- r <- runIOThrows $ liftM show $ eval env (List [Symbol "load", String (head args)]) EndCont
-- hPutStrLn stderr r
until_ :: Monad m => (a -> Bool) -> m a -> (a -> m ()) -> m ()
until_ pred prompt action = do
result <- prompt
unless (pred result) $ action result >> until_ pred prompt action
completeFun :: Monad m => String -> m [Completion]
completeFun s = return $ map simpleCompletion (prims ++ specials)
where prims = map fst $ filter (\(n,_) -> take (length s) n == s) primitives
specials = [] -- TODO
wordComplete :: Monad m => CompletionFunc m
wordComplete = completeWord Nothing " \t()\"\'#%" completeFun
runREPL :: IO ()
runREPL = do
args <- getArgs
env <- primitiveBindings
if null args then
runInputT (setComplete wordComplete defaultSettings) (loop env)
else
return ()
where loop :: Env -> InputT IO ()
loop env = do
minput <- getInputLine "λ> "
case minput of
Nothing -> return ()
Just "quit" -> return ()
Just input -> do liftIO $ evalAndPrint env input
loop env | osa1/toylisp | src/REPL.hs | bsd-3-clause | 2,031 | 0 | 15 | 503 | 611 | 304 | 307 | 49 | 4 |
-- | Simulates the @isJavaIdentifierStart@ Java method. <http://docs.oracle.com/javase/6/docs/api/java/lang/Character.html#isJavaIdentifierStart%28int%29>
module Language.Java.Character.IsJavaIdentifierStart
(
IsJavaIdentifierStart(..)
) where
import Data.Char
import Data.Word
import Data.Set.Diet(Diet)
import qualified Data.Set.Diet as S
-- | Instances simulate Java characters and provide a decision on simulating @isJavaIdentifierStart@.
class Enum c => IsJavaIdentifierStart c where
isJavaIdentifierStart ::
c
-> Bool
isNotJavaIdentifierStart ::
c
-> Bool
isNotJavaIdentifierStart =
not . isJavaIdentifierStart
instance IsJavaIdentifierStart Char where
isJavaIdentifierStart c =
ord c `S.member` isJavaIdentifierStartSet
instance IsJavaIdentifierStart Int where
isJavaIdentifierStart c =
c `S.member` isJavaIdentifierStartSet
instance IsJavaIdentifierStart Integer where
isJavaIdentifierStart c =
c `S.member` isJavaIdentifierStartSet
instance IsJavaIdentifierStart Word8 where
isJavaIdentifierStart c =
c `S.member` isJavaIdentifierStartSet
instance IsJavaIdentifierStart Word16 where
isJavaIdentifierStart c =
c `S.member` isJavaIdentifierStartSet
instance IsJavaIdentifierStart Word32 where
isJavaIdentifierStart c =
c `S.member` isJavaIdentifierStartSet
instance IsJavaIdentifierStart Word64 where
isJavaIdentifierStart c =
c `S.member` isJavaIdentifierStartSet
isJavaIdentifierStartSet ::
(Num a, Enum a, Ord a) =>
Diet a
isJavaIdentifierStartSet =
let r = [
[36]
, [65..90]
, [95]
, [97..122]
, [162..165]
, [170]
, [181]
, [186]
, [192..214]
, [216..246]
, [248..566]
, [592..705]
, [710..721]
, [736..740]
, [750]
, [890]
, [902]
, [904..906]
, [908]
, [910..929]
, [931..974]
, [976..1013]
, [1015..1019]
, [1024..1153]
, [1162..1230]
, [1232..1269]
, [1272..1273]
, [1280..1295]
, [1329..1366]
, [1369]
, [1377..1415]
, [1488..1514]
, [1520..1522]
, [1569..1594]
, [1600..1610]
, [1646..1647]
, [1649..1747]
, [1749]
, [1765..1766]
, [1774..1775]
, [1786..1788]
, [1791]
, [1808]
, [1810..1839]
, [1869..1871]
, [1920..1957]
, [1969]
, [2308..2361]
, [2365]
, [2384]
, [2392..2401]
, [2437..2444]
, [2447..2448]
, [2451..2472]
, [2474..2480]
, [2482]
, [2486..2489]
, [2493]
, [2524..2525]
, [2527..2529]
, [2544..2547]
, [2565..2570]
, [2575..2576]
, [2579..2600]
, [2602..2608]
, [2610..2611]
, [2613..2614]
, [2616..2617]
, [2649..2652]
, [2654]
, [2674..2676]
, [2693..2701]
, [2703..2705]
, [2707..2728]
, [2730..2736]
, [2738..2739]
, [2741..2745]
, [2749]
, [2768]
, [2784..2785]
, [2801]
, [2821..2828]
, [2831..2832]
, [2835..2856]
, [2858..2864]
, [2866..2867]
, [2869..2873]
, [2877]
, [2908..2909]
, [2911..2913]
, [2929]
, [2947]
, [2949..2954]
, [2958..2960]
, [2962..2965]
, [2969..2970]
, [2972]
, [2974..2975]
, [2979..2980]
, [2984..2986]
, [2990..2997]
, [2999..3001]
, [3065]
, [3077..3084]
, [3086..3088]
, [3090..3112]
, [3114..3123]
, [3125..3129]
, [3168..3169]
, [3205..3212]
, [3214..3216]
, [3218..3240]
, [3242..3251]
, [3253..3257]
, [3261]
, [3294]
, [3296..3297]
, [3333..3340]
, [3342..3344]
, [3346..3368]
, [3370..3385]
, [3424..3425]
, [3461..3478]
, [3482..3505]
, [3507..3515]
, [3517]
, [3520..3526]
, [3585..3632]
, [3634..3635]
, [3647..3654]
, [3713..3714]
, [3716]
, [3719..3720]
, [3722]
, [3725]
, [3732..3735]
, [3737..3743]
, [3745..3747]
, [3749]
, [3751]
, [3754..3755]
, [3757..3760]
, [3762..3763]
, [3773]
, [3776..3780]
, [3782]
, [3804..3805]
, [3840]
, [3904..3911]
, [3913..3946]
, [3976..3979]
, [4096..4129]
, [4131..4135]
, [4137..4138]
, [4176..4181]
, [4256..4293]
, [4304..4344]
, [4352..4441]
, [4447..4514]
, [4520..4601]
, [4608..4614]
, [4616..4678]
, [4680]
, [4682..4685]
, [4688..4694]
, [4696]
, [4698..4701]
, [4704..4742]
, [4744]
, [4746..4749]
, [4752..4782]
, [4784]
, [4786..4789]
, [4792..4798]
, [4800]
, [4802..4805]
, [4808..4814]
, [4816..4822]
, [4824..4846]
, [4848..4878]
, [4880]
, [4882..4885]
, [4888..4894]
, [4896..4934]
, [4936..4954]
, [5024..5108]
, [5121..5740]
, [5743..5750]
, [5761..5786]
, [5792..5866]
, [5870..5872]
, [5888..5900]
, [5902..5905]
, [5920..5937]
, [5952..5969]
, [5984..5996]
, [5998..6000]
, [6016..6067]
, [6103]
, [6107..6108]
, [6176..6263]
, [6272..6312]
, [6400..6428]
, [6480..6509]
, [6512..6516]
, [7424..7531]
, [7680..7835]
, [7840..7929]
, [7936..7957]
, [7960..7965]
, [7968..8005]
, [8008..8013]
, [8016..8023]
, [8025]
, [8027]
, [8029]
, [8031..8061]
, [8064..8116]
, [8118..8124]
, [8126]
, [8130..8132]
, [8134..8140]
, [8144..8147]
, [8150..8155]
, [8160..8172]
, [8178..8180]
, [8182..8188]
, [8255..8256]
, [8276]
, [8305]
, [8319]
, [8352..8369]
, [8450]
, [8455]
, [8458..8467]
, [8469]
, [8473..8477]
, [8484]
, [8486]
, [8488]
, [8490..8493]
, [8495..8497]
, [8499..8505]
, [8509..8511]
, [8517..8521]
, [8544..8579]
, [12293..12295]
, [12321..12329]
, [12337..12341]
, [12344..12348]
, [12353..12438]
, [12445..12447]
, [12449..12543]
, [12549..12588]
, [12593..12686]
, [12704..12727]
, [12784..12799]
, [13312..19893]
, [19968..40869]
, [40960..42124]
, [44032..55203]
, [63744..64045]
, [64048..64106]
, [64256..64262]
, [64275..64279]
, [64285]
, [64287..64296]
, [64298..64310]
, [64312..64316]
, [64318]
, [64320..64321]
, [64323..64324]
, [64326..64433]
, [64467..64829]
, [64848..64911]
, [64914..64967]
, [65008..65020]
, [65075..65076]
, [65101..65103]
, [65129]
, [65136..65140]
, [65142..65276]
, [65284]
, [65313..65338]
, [65343]
, [65345..65370]
, [65381..65470]
, [65474..65479]
, [65482..65487]
, [65490..65495]
, [65498..65500]
, [65504..65505]
, [65509..65510]
, [65536..65547]
, [65549..65574]
, [65576..65594]
, [65596..65597]
, [65599..65613]
, [65616..65629]
, [65664..65786]
, [66304..66334]
, [66352..66378]
, [66432..66461]
, [66560..66717]
, [67584..67589]
, [67592]
, [67594..67637]
, [67639..67640]
, [67644]
, [67647]
, [119808..119892]
, [119894..119964]
, [119966..119967]
, [119970]
, [119973..119974]
, [119977..119980]
, [119982..119993]
, [119995]
, [119997..120003]
, [120005..120069]
, [120071..120074]
, [120077..120084]
, [120086..120092]
, [120094..120121]
, [120123..120126]
, [120128..120132]
, [120134]
, [120138..120144]
, [120146..120483]
, [120488..120512]
, [120514..120538]
, [120540..120570]
, [120572..120596]
, [120598..120628]
, [120630..120654]
, [120656..120686]
, [120688..120712]
, [120714..120744]
, [120746..120770]
, [120772..120777]
, [131072..173782]
, [194560..195101]
]
in S.fromList . concat $ r | tonymorris/java-character | src/Language/Java/Character/IsJavaIdentifierStart.hs | bsd-3-clause | 9,935 | 0 | 10 | 4,357 | 2,916 | 1,817 | 1,099 | 385 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.AmountOfMoney.SV.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.AmountOfMoney.Types
import Duckling.Locale
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale SV Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (simple Dollar 10)
[ "$10"
, "10$"
, "tio dollar"
]
, examples (simple Cent 10)
[ "tio öre"
]
, examples (simple Dollar 10000)
[ "$10.000"
, "10K$"
, "$10k"
]
, examples (simple USD 1.23)
[ "USD1,23"
]
, examples (simple SEK 10)
[ "10kronor"
, "10kr"
, "10 kr"
, "tio kronor"
, "10 SEK"
]
, examples (simple SEK 2.23)
[ "2 kronor och 23 öre"
, "två kronor 23 öre"
, "två kronor och 23 öre"
]
, examples (simple EUR 20)
[ "20€"
, "20 euro"
, "20 Euro"
, "20 Euros"
, "EUR 20"
]
, examples (simple EUR 29.99)
[ "EUR29,99"
]
, examples (simple INR 20)
[ "Rs. 20"
, "Rs 20"
, "20 Rupees"
, "20Rs"
, "Rs20"
]
, examples (simple INR 20.43)
[ "20 Rupees 43"
, "tjugo rupees 43"
]
, examples (simple INR 33)
[ "INR33"
]
, examples (simple Pound 9)
[ "£9"
, "nio pund"
]
, examples (simple GBP 3.01)
[ "GBP3,01"
, "GBP 3,01"
]
, examples (simple NOK 10)
[ "10 norska kronor"
, "10 nkr"
]
]
| facebookincubator/duckling | Duckling/AmountOfMoney/SV/Corpus.hs | bsd-3-clause | 2,181 | 0 | 9 | 956 | 429 | 246 | 183 | 63 | 1 |
------------------------------------------------------------------------------
-- | This module provides config info for globally assumed/known url & names
module Config.Locations
-- export all
where
------------------------------------------------------------------------------
-- import Data.SafeCopy
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- Handler name entry points
-- | Edit blog
handlernameEditBlog :: String
handlernameEditBlog = "editBlog"
------------------------------------------------------------------------------
-- Handler param name
-- | Edit blog
handlernameBlogId :: String
handlernameBlogId = "blogId"
------------------------------------------------------------------------------
-- Utils
-- | Make top level handler name
handlernameMkTop :: String -> String
handlernameMkTop s@('/':_) = s
handlernameMkTop s = "/" ++ s
| atzedijkstra/blog-server | src/Config/Locations.hs | bsd-3-clause | 990 | 0 | 8 | 111 | 80 | 52 | 28 | 8 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# OPTIONS_GHC -fno-warn-partial-type-signatures #-}
module Haskell.Ide.IdeBackend
(idebackendDescriptor
) where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Monad.IO.Class
import Data.Aeson
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Haskell.Ide.Engine.ExtensibleState
import Haskell.Ide.Engine.MonadFunctions
import Haskell.Ide.Engine.PluginDescriptor
import Haskell.Ide.Engine.PluginUtils
import Haskell.Ide.Engine.SemanticTypes
import IdeSession
import IdeSession.Util.Logger
import Language.Haskell.GhcMod.Cradle
import Language.Haskell.GhcMod.GhcPkg
import Language.Haskell.GhcMod.Monad.Types hiding (liftIO)
import Language.Haskell.GhcMod.Types hiding (liftIO,ModuleName)
import System.FilePath
import System.Log.FastLogger
idebackendDescriptor :: TaggedPluginDescriptor _
idebackendDescriptor = PluginDescriptor
{
pdUIShortName = "ide-backend"
, pdUIOverview = "HIE plugin for ide-backend"
, pdCommands =
buildCommand typeCmd (Proxy :: Proxy "type") "type" [".hs"] (SCtxRegion :& RNil) RNil
:& RNil
, pdExposedServices = []
, pdUsedServices = []
}
-- | Get the type for a region in a file
typeCmd :: CommandFunc TypeInfo
typeCmd =
CmdSync $
\_ctxs req ->
case getParams (IdFile "file" :& IdPos "start_pos" :& IdPos "end_pos" :&
RNil)
req of
Left err -> return err
Right (ParamFile filename :& ParamPos startPos :& ParamPos endPos :& RNil) ->
do SubProcess cin cout _tid <- ensureProcessRunning filename
liftIO $
atomically $
writeTChan cin
(Type filename startPos endPos)
response <- liftIO $ atomically $ readTChan cout
case response of
TypeResp typeinfo -> return (IdeResponseOk typeinfo)
ErrorResp error' ->
return (IdeResponseError (IdeError PluginError error' Null))
Right _ ->
return (IdeResponseError
(IdeError InternalError
"IdeBackendPlugin.typesCmd: ghc’s exhaustiveness checker is broken"
Null))
instance ExtensionClass AsyncPluginState where
initialValue = APS Nothing
-- | Holds the worker process needed to cache the `IdeSession`
data AsyncPluginState = APS (Maybe SubProcess)
-- | Commands send to the worker process
data WorkerCmd = Type T.Text Pos Pos deriving (Show)
-- | Responses from the worker process
data WorkerResponse = TypeResp TypeInfo | ErrorResp T.Text
-- | The state for a worker process, consisting of two communicating
-- channels and the `ThreadId`
data SubProcess = SubProcess
{ spChIn :: TChan WorkerCmd
, spChOut :: TChan WorkerResponse
, spProcess :: ThreadId
}
-- | Try to find an already running process or start a new one if it
-- doesn’t already exist
ensureProcessRunning :: T.Text -> IdeM SubProcess
ensureProcessRunning filename =
do (APS v) <- get -- from extensible state
case v of
Nothing ->
do
-- Get the required packagedbs from ghc-mod
-- This won’t be necessary once we switch to one hie instance per project
cradle' <- findCradle' (takeDirectory (T.unpack filename))
pkgdbs <-
gmeLocal (\(GhcModEnv opts _) -> GhcModEnv opts cradle') getPackageDbStack
cin <- liftIO $ atomically newTChan
cout <- liftIO $ atomically newTChan
tid <- liftIO $ forkIO (workerProc pkgdbs cin cout)
let v' =
SubProcess {spChIn = cin
,spChOut = cout
,spProcess = tid}
put (APS (Just v')) -- into extensible state
return v'
Just v' -> return v'
-- | Log function to get ide-backend to use our logger
logFunc :: LogFunc
logFunc _loc _source level logStr =
logOtherm level (T.decodeUtf8 $ fromLogStr logStr)
-- | Long running worker process responsible for processing the commands
workerProc :: [GhcPkgDb] -> TChan WorkerCmd -> TChan WorkerResponse -> IO ()
workerProc pkgdbs cin cout =
do session <-
initSessionWithCallbacks
(IdeCallbacks logFunc)
(defaultSessionInitParams {sessionInitTargets = TargetsInclude []})
(defaultSessionConfig {configLocalWorkingDir = Nothing
,configLog = debugm
,configPackageDBStack = (GlobalPackageDB:) $ map convPkgDb pkgdbs})
updateSession session
(updateCodeGeneration True)
(debugm . show)
let loop :: Int -> IO ()
loop cnt =
do debugm "workerProc:top of loop"
req <- liftIO $ atomically $ readTChan cin
debugm $ "workerProc loop:got:" ++ show req
case req of
Type file startPos endPos ->
do liftIO $
handleTypeInfo session cout file startPos endPos
loop (cnt + 1)
loop 1
-- | Convert the package database from ghc-mod’s representation to cabal’s
-- representation
convPkgDb :: GhcPkgDb -> PackageDB
convPkgDb GlobalDb = GlobalPackageDB
convPkgDb UserDb = UserPackageDB
convPkgDb (PackageDb db) = SpecificPackageDB db
-- | Find the type for a region in a file. Add the supplied file to
-- the session targets.
handleTypeInfo :: IdeSession
-> TChan WorkerResponse
-> T.Text
-> Pos
-> Pos
-> IO ()
handleTypeInfo session cout file (startLine,startCol) (endLine,endCol) =
do updateSession session
(updateTargets (TargetsInclude . pure $ T.unpack file))
(debugm . show)
errors <- getSourceErrors session
case errors of
(_:_) -> atomically . writeTChan cout $ ErrorResp (T.pack $ show errors)
[] ->
do filemap <- getFileMap session
expTypes <- getExpTypes session
case filemap (T.unpack file) of
Nothing ->
atomically . writeTChan cout $ ErrorResp "No module found"
Just mod' ->
case expTypes (moduleName mod')
SourceSpan {spanFilePath = T.unpack file
,spanFromLine = startLine
,spanToLine = endLine
,spanFromColumn = startCol
,spanToColumn = endCol} of
ts ->
atomically . writeTChan cout . TypeResp . TypeInfo $
map toTypeResult ts
where toTypeResult
:: (SourceSpan,T.Text) -> TypeResult
toTypeResult (SourceSpan{..},t) =
TypeResult {trStart = (spanFromLine,spanFromColumn)
,trEnd = (spanToLine,spanToColumn)
,trText = t}
| JPMoresmau/haskell-ide-engine | hie-ide-backend/Haskell/Ide/IdeBackend.hs | bsd-3-clause | 7,323 | 0 | 22 | 2,390 | 1,551 | 813 | 738 | 153 | 4 |
module Foo.Used(used) where
used = ()
| ndmitchell/weeder | test/foo/src/Foo/Used.hs | bsd-3-clause | 40 | 0 | 5 | 8 | 18 | 11 | 7 | 2 | 1 |
-- | This module modifies material in Renzo Carbonara\'s <http://hackage.haskell.org/package/pipes-zlib pipes-zlib> package.
module Streaming.Zip (
-- * Streams
decompress
, decompress'
, compress
, gunzip
, gunzip'
, gzip
-- * Compression levels
, CompressionLevel
, defaultCompression
, noCompression
, bestSpeed
, bestCompression
, compressionLevel
-- * Window size
-- $ccz-re-export
, Z.defaultWindowBits
, windowBits
) where
import Data.Streaming.Zlib as Z
import Control.Exception (throwIO)
import Control.Monad (unless)
import qualified Data.ByteString as B
import Data.ByteString.Streaming
import Streaming
import qualified Data.ByteString.Streaming.Internal as I
import Data.ByteString.Streaming.Internal (ByteString (..))
--------------------------------------------------------------------------------
-- | Decompress a streaming bytestring. 'Z.WindowBits' is from "Codec.Compression.Zlib"
--
-- @
-- 'decompress' 'defaultWindowBits' :: 'MonadIO' m => 'ByteString' m r -> 'ByteString' m r
-- @
decompress
:: MonadIO m
=> Z.WindowBits
-> ByteString m r -- ^ Compressed stream
-> ByteString m r -- ^ Decompressed stream
decompress wbits p0 = do
inf <- liftIO $ Z.initInflate wbits
r <- for p0 $ \bs -> do
popper <- liftIO (Z.feedInflate inf bs)
fromPopper popper
bs <- liftIO $ Z.finishInflate inf
unless (B.null bs) (chunk bs)
return r
{-# INLINABLE decompress #-}
-- | Decompress a zipped byte stream, returning any leftover input
-- that follows the compressed material.
decompress'
:: MonadIO m
=> Z.WindowBits
-> ByteString m r -- ^ Compressed byte stream
-> ByteString m (Either (ByteString m r) r)
-- ^ Decompressed byte stream, ending with either leftovers or a result
decompress' wbits p0 = go p0 =<< liftIO (Z.initInflate wbits)
where
flush inf = do
bs <- liftIO $ Z.flushInflate inf
unless (B.null bs) (chunk bs)
go p inf = do
res <- lift (nextChunk p)
case res of
Left r -> return $ Right r
Right (bs, p') -> do
fromPopper =<< liftIO (Z.feedInflate inf bs)
flush inf
leftover <- liftIO $ Z.getUnusedInflate inf
if B.null leftover
then go p' inf
else return $ Left (chunk leftover >> p')
{-# INLINABLE decompress' #-}
-- | Compress a byte stream.
--
-- See the "Codec.Compression.Zlib" module for details about
-- 'Z.CompressionLevel' and 'Z.WindowBits'.
-- @
-- 'compress' 'defaultCompression' 'defaultWindowBits' :: 'MonadIO' m => 'ByteString' m r -> 'ByteString' m r
-- @
--
compress
:: MonadIO m
=> CompressionLevel
-> Z.WindowBits
-> ByteString m r -- ^ Decompressed stream
-> ByteString m r -- ^ Compressed stream
compress (CompressionLevel clevel) wbits p0 = do
def <- liftIO $ Z.initDeflate clevel wbits
let loop bs = case bs of
I.Chunk c rest -> do
popper <- liftIO (Z.feedDeflate def c)
fromPopper popper
loop rest
I.Go m -> I.Go (liftM loop m)
I.Empty r -> return r
r <- loop p0
fromPopper $ Z.finishDeflate def
return r
{-# INLINABLE compress #-}
--------------------------------------------------------------------------------
-- $ccz-re-export
--
-- The following are re-exported from "Codec.Compression.Zlib" for your
-- convenience.
--------------------------------------------------------------------------------
-- Compression Levels
-- | How hard should we try to compress?
newtype CompressionLevel = CompressionLevel Int
deriving (Show, Read, Eq, Ord)
defaultCompression, noCompression, bestSpeed, bestCompression :: CompressionLevel
defaultCompression = CompressionLevel (-1)
noCompression = CompressionLevel 0
bestSpeed = CompressionLevel 1
bestCompression = CompressionLevel 9
-- | A specific compression level between 0 and 9.
compressionLevel :: Int -> CompressionLevel
compressionLevel n
| n >= 0 && n <= 9 = CompressionLevel n
| otherwise = error "CompressionLevel must be in the range 0..9"
windowBits :: Int -> WindowBits
windowBits = WindowBits
-- | Decompress a gzipped byte stream.
gunzip
:: MonadIO m
=> ByteString m r -- ^ Compressed stream
-> ByteString m r -- ^ Decompressed stream
gunzip = decompress gzWindowBits
{-# INLINABLE gunzip #-}
-- | Decompress a gzipped byte stream, returning any leftover input
-- that follows the compressed stream.
gunzip'
:: MonadIO m
=> ByteString m r -- ^ Compressed byte stream
-> ByteString m (Either (ByteString m r) r)
-- ^ Decompressed bytes stream, returning either a 'ByteString' of
-- the leftover input or the return value from the input 'ByteString'.
gunzip' = decompress' gzWindowBits
{-# INLINE gunzip' #-}
-- | Compress a byte stream in the gzip format.
gzip
:: MonadIO m
=> CompressionLevel
-> ByteString m r -- ^ Decompressed stream
-> ByteString m r -- ^ Compressed stream
gzip clevel = compress clevel gzWindowBits
{-# INLINE gzip #-}
gzWindowBits :: Z.WindowBits
gzWindowBits = Z.WindowBits 31
--------------------------------------------------------------------------------
-- Internal stuff
for bs0 op = loop bs0 where
loop bs = case bs of
I.Chunk c rest -> op c >> loop rest
I.Go m -> I.Go (liftM loop m)
I.Empty r -> return r
{-# INLINABLE for #-}
-- | Produce values from the given 'Z.Popper' until exhausted.
fromPopper :: MonadIO m
=> Z.Popper
-> ByteString m ()
fromPopper pop = loop
where
loop = do
mbs <- liftIO pop
case mbs of
PRDone -> I.Empty ()
PRError e -> I.Go (liftIO (throwIO e))
PRNext bs -> I.Chunk bs loop
{-# INLINABLE fromPopper #-}
| michaelt/streaming-utils | Streaming/Zip.hs | bsd-3-clause | 5,912 | 0 | 19 | 1,446 | 1,278 | 654 | 624 | 122 | 3 |
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable, ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies, ConstraintKinds #-}
module Development.Shake.Internal.Rules.Oracle(
addOracle, addOracleCache, addOracleHash,
askOracle, askOracles
) where
import Development.Shake.Internal.Core.Types
import Development.Shake.Internal.Core.Rules
import Development.Shake.Internal.Options
import Development.Shake.Internal.Core.Build
import Development.Shake.Internal.Value
import Development.Shake.Classes
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as LBS
import Control.Monad
import Data.Binary
import General.Binary
import General.Extra
-- Use short type names, since the names appear in the Haddock, and are too long if they are in full
newtype OracleQ question = OracleQ question
deriving (Show,Typeable,Eq,Hashable,Binary,NFData)
newtype OracleA answer = OracleA answer
deriving (Show,Typeable,Eq,Hashable,Binary,NFData)
fromOracleA :: OracleA a -> a
fromOracleA (OracleA x) = x
type instance RuleResult (OracleQ a) = OracleA (RuleResult a)
data Flavor = Norm | Cache | Hash deriving Eq
addOracleFlavor :: (Located, RuleResult q ~ a, ShakeValue q, ShakeValue a) => Flavor -> (q -> Action a) -> Rules (q -> Action a)
addOracleFlavor flavor act = do
-- rebuild is automatic for oracles, skip just means we don't rebuild
opts <- getShakeOptionsRules
let skip = shakeRebuildApply opts "" == RebuildLater
addBuiltinRule noLint (\_ v -> Just $ runBuilder $ putEx $ hash v) $ \(OracleQ q) old mode -> case old of
Just old | (flavor /= Hash && skip) || (flavor == Cache && mode == RunDependenciesSame) ->
pure $ RunResult ChangedNothing old $ decode' old
_ -> do
-- can only use cmpHash if flavor == Hash
let cmpValue new = if fmap decode' old == Just new then ChangedRecomputeSame else ChangedRecomputeDiff
let cmpHash newHash = if old == Just newHash then ChangedRecomputeSame else ChangedRecomputeDiff
cache <- if flavor == Cache then historyLoad 0 else pure Nothing
case cache of
Just newEncode -> do
let new = decode' newEncode
pure $ RunResult (cmpValue new) newEncode new
Nothing -> do
new <- OracleA <$> act q
let newHash = encodeHash new
let newEncode = encode' new
when (flavor == Cache) $
historySave 0 newEncode
pure $
if flavor == Hash
then RunResult (cmpHash newHash) newHash new
else RunResult (cmpValue new) newEncode new
pure askOracle
where
encodeHash :: Hashable a => a -> BS.ByteString
encodeHash = runBuilder . putEx . hash
encode' :: Binary a => a -> BS.ByteString
encode' = BS.concat . LBS.toChunks . encode
decode' :: Binary a => BS.ByteString -> a
decode' = decode . LBS.fromChunks . pure
-- | Add extra information which rules can depend on.
-- An oracle is a function from a question type @q@, to an answer type @a@.
-- As an example, we can define an oracle allowing you to depend on the current version of GHC:
--
-- @
-- newtype GhcVersion = GhcVersion () deriving (Show,Typeable,Eq,Hashable,Binary,NFData)
-- type instance RuleResult GhcVersion = String
-- rules = do
-- 'addOracle' $ \\(GhcVersion _) -> 'Development.Shake.fromStdout' \<$\> 'Development.Shake.cmd' \"ghc --numeric-version\" :: Action String
-- ... rules ...
-- @
--
-- If a rule calls @'askOracle' (GhcVersion ())@, that rule will be rerun whenever the GHC version changes.
-- Some notes:
--
-- * We define @GhcVersion@ with a @newtype@ around @()@, allowing the use of @GeneralizedNewtypeDeriving@.
-- All the necessary type classes are exported from "Development.Shake.Classes".
--
-- * The @type instance@ requires the extension @TypeFamilies@.
--
-- * Each call to 'addOracle' must use a different type of question.
--
-- * Actions passed to 'addOracle' will be run in every build they are required, even if nothing else changes,
-- so be careful of slow actions.
-- If the result of an oracle does not change it will not invalidate any rules depending on it.
-- To always rerun files rules see 'Development.Shake.alwaysRerun'.
--
-- As a more complex example, consider tracking Haskell package versions:
--
-- @
-- newtype GhcPkgList = GhcPkgList () deriving (Show,Typeable,Eq,Hashable,Binary,NFData)
-- type instance RuleResult GhcPkgList = [(String, String)]
-- newtype GhcPkgVersion = GhcPkgVersion String deriving (Show,Typeable,Eq,Hashable,Binary,NFData)
-- type instance RuleResult GhcPkgVersion = Maybe String
--
-- rules = do
-- getPkgList \<- 'addOracle' $ \\GhcPkgList{} -> do
-- Stdout out <- 'Development.Shake.cmd' \"ghc-pkg list --simple-output\"
-- pure [(reverse b, reverse a) | x <- words out, let (a,_:b) = break (== \'-\') $ reverse x]
--
-- getPkgVersion \<- 'addOracle' $ \\(GhcPkgVersion pkg) -> do
-- pkgs <- getPkgList $ GhcPkgList ()
-- pure $ lookup pkg pkgs
--
-- \"myrule\" %> \\_ -> do
-- getPkgVersion $ GhcPkgVersion \"shake\"
-- ... rule using the shake version ...
-- @
--
-- Using these definitions, any rule depending on the version of @shake@
-- should call @getPkgVersion $ GhcPkgVersion \"shake\"@ to rebuild when @shake@ is upgraded.
--
-- If you apply 'versioned' to an oracle it will cause that oracle result to be discarded, and not do early-termination.
addOracle :: (RuleResult q ~ a, ShakeValue q, ShakeValue a, Partial) => (q -> Action a) -> Rules (q -> Action a)
addOracle = withFrozenCallStack $ addOracleFlavor Norm
-- | An alternative to to 'addOracle' that relies on the 'hash' function providing a perfect equality,
-- doesn't support @--skip@, but requires less storage.
addOracleHash :: (RuleResult q ~ a, ShakeValue q, ShakeValue a, Partial) => (q -> Action a) -> Rules (q -> Action a)
addOracleHash = withFrozenCallStack $ addOracleFlavor Hash
-- | A combination of 'addOracle' and 'newCache' - an action that only runs when its dependencies change,
-- whose result is stored in the database.
--
-- * Does the information need recomputing every time? e.g. looking up stuff in the environment?
-- If so, use 'addOracle' instead.
--
-- * Is the action mostly deserisalising some file? If so, use 'newCache'.
--
-- * Is the operation expensive computation from other results? If so, use 'addOracleCache'.
--
-- An alternative to using 'addOracleCache' is introducing an intermediate file containing the result,
-- which requires less storage in the Shake database and can be inspected by existing file-system viewing
-- tools.
addOracleCache ::(RuleResult q ~ a, ShakeValue q, ShakeValue a, Partial) => (q -> Action a) -> Rules (q -> Action a)
addOracleCache = withFrozenCallStack $ addOracleFlavor Cache
-- | Get information previously added with 'addOracle' or 'addOracleCache'.
-- The question/answer types must match those provided previously.
askOracle :: (RuleResult q ~ a, ShakeValue q, ShakeValue a) => q -> Action a
askOracle = fmap fromOracleA . apply1 . OracleQ
-- | A parallel version of 'askOracle'.
askOracles :: (RuleResult q ~ a, ShakeValue q, ShakeValue a) => [q] -> Action [a]
askOracles = fmap (map fromOracleA) . apply . map OracleQ
| ndmitchell/shake | src/Development/Shake/Internal/Rules/Oracle.hs | bsd-3-clause | 7,623 | 0 | 23 | 1,755 | 1,241 | 675 | 566 | -1 | -1 |
module WordNumber where
import Data.List (intercalate)
digitToWord :: Int -> String
digitToWord n = ["zero",
"one",
"two",
"three",
"four",
"five",
"six",
"seven",
"eight",
"nine"] !! n
digits :: Int -> [Int]
digits n = go n []
where go x xs
| div x 10 == 0 = mod x 10 : xs
| otherwise = go (div x 10) (mod x 10 : xs)
wordNumber :: Int -> String
wordNumber = intercalate "-" . map digitToWord . digits
| dmvianna/morse | src/WordNumber.hs | bsd-3-clause | 651 | 0 | 11 | 328 | 190 | 100 | 90 | 20 | 1 |
module Data.JSON.Schema.Generator.Types
( Schema (..)
, SchemaChoice (..)
, scString
, scInteger
, scNumber
, scBoolean
) where
import Data.Text (Text)
--------------------------------------------------------------------------------
-- | A schema for a JSON value.
--
data Schema =
SCSchema
{ scId :: !Text
, scUsedSchema :: !Text
, scSchemaType :: !Schema
, scDefinitions :: ![(Text, Schema)]
}
| SCString
{ scDescription :: !(Maybe Text)
, scNullable :: !Bool
, scFormat :: !(Maybe Text)
, scLowerBound :: !(Maybe Integer)
, scUpperBound :: !(Maybe Integer)
}
| SCInteger
{ scDescription :: !(Maybe Text)
, scNullable :: !Bool
, scLowerBound :: !(Maybe Integer)
, scUpperBound :: !(Maybe Integer)
}
| SCNumber
{ scDescription :: !(Maybe Text)
, scNullable :: !Bool
, scLowerBound :: !(Maybe Integer)
, scUpperBound :: !(Maybe Integer)
}
| SCBoolean
{ scDescription :: !(Maybe Text)
, scNullable :: !Bool
}
| SCConst
{ scTitle :: !Text
, scDescription :: !(Maybe Text)
, scValue :: !Text
}
| SCObject
{ scTitle :: !Text
, scDescription :: !(Maybe Text)
, scNullable :: !Bool
, scProperties :: ![(Text, Schema)]
, scPatternProps :: ![(Text, Schema)]
, scRequired :: ![Text]
}
| SCArray
{ scTitle :: !Text
, scDescription :: !(Maybe Text)
, scNullable :: !Bool
, scItems :: ![Schema]
, scLowerBound :: !(Maybe Integer)
, scUpperBound :: !(Maybe Integer)
}
| SCOneOf
{ scTitle :: !Text
, scDescription :: !(Maybe Text)
, scNullable :: !Bool
, scChoices :: ![SchemaChoice]
}
| SCRef
{ scReference :: !Text
, scNullable :: !Bool
}
| SCNull
deriving (Show)
-- | A sum encoding for ADT.
--
data SchemaChoice =
SCChoiceEnum
{ sctName :: !Text -- constructor name.
, sctTitle :: !Text -- an arbitrary text. e.g. Types.UnitType1.UnitData11.
}
-- ^ Encoding for constructors that are all unit type.
-- e.g. "test": {"enum": ["xxx", "yyy", "zzz"]}
| SCChoiceArray
{ sctName :: !Text -- constructor name.
, sctTitle :: !Text -- an arbitrary text. e.g. Types.ProductType1.ProductData11.
, sctArray :: ![Schema] -- parametes of constructor.
}
-- ^ Encoding for constructors that are non record type.
-- e.g. "test": [{"tag": "xxx", "contents": []},...] or "test": [{"xxx": [],},...]
| SCChoiceMap
{ sctName :: !Text -- constructor name.
, sctTitle :: !Text -- an arbitrary text. e.g. Types.RecordType1.RecordData11.
, sctMap :: ![(Text, Schema)] -- list of record field name and schema in this constructor.
, sctRequired :: ![Text] -- required field names.
}
-- ^ Encoding for constructos that are record type.
-- e.g. "test": [{"tag": "xxx", "contents": {"aaa": "yyy",...}},...] or "test": [{"xxx": []},...]
deriving (Show)
-- ^ A smart consturctor for String.
--
scString :: Schema
scString = SCString
{ scDescription = Nothing
, scNullable = False
, scFormat = Nothing
, scLowerBound = Nothing
, scUpperBound = Nothing
}
-- ^ A smart consturctor for Integer.
--
scInteger :: Schema
scInteger = SCInteger
{ scDescription = Nothing
, scNullable = False
, scLowerBound = Nothing
, scUpperBound = Nothing
}
-- ^ A smart consturctor for Number.
--
scNumber :: Schema
scNumber = SCNumber
{ scDescription = Nothing
, scNullable = False
, scLowerBound = Nothing
, scUpperBound = Nothing
}
-- ^ A smart consturctor for Boolean.
--
scBoolean :: Schema
scBoolean = SCBoolean
{ scDescription = Nothing
, scNullable = False
}
| yuga/jsonschema-gen | src/Data/JSON/Schema/Generator/Types.hs | bsd-3-clause | 4,285 | 0 | 11 | 1,513 | 799 | 482 | 317 | 196 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : TestSuite.Crypto.AES
-- Copyright : (c) Levent Erkok
-- License : BSD3
-- Maintainer : erkokl@gmail.com
-- Stability : experimental
--
-- Test suite for Data.SBV.Examples.Crypto.AES
-----------------------------------------------------------------------------
module TestSuite.Crypto.AES(testSuite) where
import Data.SBV
import Data.SBV.Internals
import Data.SBV.Examples.Crypto.AES
import SBVTest
-- Test suite
testSuite :: SBVTestSuite
testSuite = mkTestSuite $ \goldCheck -> test [
"aes128Enc" ~: compileToC' "aes128Enc" "" (aes128EncDec True) `goldCheck` "aes128Enc.gold"
, "aes128Dec" ~: compileToC' "aes128Dec" "" (aes128EncDec False) `goldCheck` "aes128Dec.gold"
, "aes128Lib" ~: compileToCLib' "aes128Lib" aes128Comps `goldCheck` "aes128Lib.gold"
]
where aes128EncDec d = do pt <- cgInputArr 4 "pt"
key <- cgInputArr 4 "key"
cgSetDriverValues $ repeat 0
let (encKs, decKs) = aesKeySchedule key
res | d = aesEncrypt pt encKs
| True = aesDecrypt pt decKs
cgOutputArr "ct" res
aes128Comps = [(f, setVals c, "test") | (f, c, b) <- aes128LibComponents]
setVals c = cgSetDriverValues (repeat 0) >> c
| Copilot-Language/sbv-for-copilot | SBVUnitTest/TestSuite/Crypto/AES.hs | bsd-3-clause | 1,435 | 0 | 14 | 384 | 296 | 160 | 136 | 19 | 1 |
module Handler.AdminSpec (spec) where
import TestImport
spec :: Spec
spec = withApp $ do
describe "getAdminR" $ do
error "Spec not implemented: getAdminR"
| haBuu/tfs-website | test/Handler/AdminSpec.hs | mit | 171 | 0 | 11 | 39 | 44 | 23 | 21 | 6 | 1 |
{- |
Module : ./Temporal/ModalCaslToCtl.hs
Copyright : (c) Klaus Hartke, Uni Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : experimental
Portability : portable
-}
module ModalCaslToCtl where
import Control.Monad as Monad
import Data.Maybe as Maybe
import ModalCasl as Casl
import Ctl
{- ----------------------------------------------------------------------------
Convert Modal CASL formulas to CTL formulas
---------------------------------------------------------------------------- -}
convert :: Casl.StateFormula a -> Maybe (Ctl.Formula a)
convert (Casl.Var x) = Just (Ctl.Atom x)
convert (Casl.Snot phi) = liftM Ctl.Not (convert phi)
convert (Casl.Sand phi psi) = liftM2 Ctl.And (convert phi) (convert psi)
convert (Casl.Sor phi psi) = liftM2 Ctl.Or (convert phi) (convert psi)
convert (Casl.A (Casl.X phi)) = liftM Ctl.AX (convert' phi)
convert (Casl.E (Casl.X phi)) = liftM Ctl.EX (convert' phi)
convert (Casl.A (Casl.G phi)) = liftM Ctl.AG (convert' phi)
convert (Casl.E (Casl.G phi)) = liftM Ctl.EG (convert' phi)
convert (Casl.A (Casl.F phi)) = liftM Ctl.AF (convert' phi)
convert (Casl.E (Casl.F phi)) = liftM Ctl.EF (convert' phi)
convert (Casl.A (Casl.W phi psi)) = convert (Casl.A ((phi `Casl.Pand` psi)
`Casl.B` (Casl.Pnot phi `Casl.Pand` psi)))
convert (Casl.E (Casl.W phi psi)) = convert (Casl.E ((phi `Casl.Pand` psi)
`Casl.B` (Casl.Pnot phi `Casl.Pand` psi)))
convert (Casl.A (Casl.U phi psi)) = convert (Casl.A (psi `Casl.Pand`
(Casl.Pnot phi `Casl.Pand` Casl.Pnot psi)))
convert (Casl.E (Casl.U phi psi)) = convert (Casl.E (psi `Casl.Pand`
(Casl.Pnot phi `Casl.Pand` Casl.Pnot psi)))
convert (Casl.A (Casl.B phi psi)) = convert (Casl.A (Casl.Pnot
(Casl.Pnot phi `Casl.U'` psi)))
convert (Casl.E (Casl.B phi psi)) = convert (Casl.E (Casl.Pnot
(Casl.Pnot phi `Casl.U'` psi)))
convert (Casl.A (Casl.W' phi psi)) = convert (Casl.A (Casl.Pnot phi `Casl.U'`
Casl.Pand phi psi))
convert (Casl.E (Casl.W' phi psi)) = convert (Casl.E (Casl.Pnot phi `Casl.U'`
Casl.Pand phi psi))
convert (Casl.A (Casl.U' phi psi)) = liftM2 Ctl.AU (convert' phi) (convert' psi)
convert (Casl.E (Casl.U' phi psi)) = liftM2 Ctl.EU (convert' phi) (convert' psi)
convert (Casl.A (Casl.B' phi psi)) = convert (Casl.A (Casl.Pnot phi `Casl.U'`
Casl.Pand phi (Casl.Pnot psi)))
convert (Casl.E (Casl.B' phi psi)) = convert (Casl.E (Casl.Pnot phi `Casl.U'`
Casl.Pand phi (Casl.Pnot psi)))
convert _ = Nothing
convert' :: Casl.PathFormula a -> Maybe (Ctl.Formula a)
convert' (State phi) = convert phi
convert' (Casl.Pnot phi) = liftM Ctl.Not (convert' phi)
convert' (Casl.Pand phi psi) = liftM2 Ctl.And (convert' phi) (convert' psi)
convert' (Casl.Por phi psi) = liftM2 Ctl.Or (convert' phi) (convert' psi)
convert' _ = Nothing
-- ----------------------------------------------------------------------------
| spechub/Hets | Temporal/ModalCaslToCtl.hs | gpl-2.0 | 2,971 | 0 | 13 | 503 | 1,335 | 677 | 658 | 45 | 1 |
{-| Implementation of the Ganeti configuration database.
-}
{-
Copyright (C) 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
-}
module Ganeti.Config
( LinkIpMap
, NdParamObject(..)
, loadConfig
, getNodeInstances
, getNodeRole
, getNodeNdParams
, getDefaultNicLink
, getDefaultHypervisor
, getInstancesIpByLink
, getNode
, getInstance
, getGroup
, getGroupNdParams
, getGroupIpolicy
, getGroupDiskParams
, getGroupNodes
, getGroupInstances
, getGroupOfNode
, getInstPrimaryNode
, getInstMinorsForNode
, buildLinkIpInstnameMap
, instNodes
) where
import Control.Monad (liftM)
import Data.List (foldl')
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Text.JSON as J
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import Ganeti.Errors
import Ganeti.JSON
import Ganeti.Objects
import Ganeti.Types
-- | Type alias for the link and ip map.
type LinkIpMap = M.Map String (M.Map String String)
-- | Type class denoting objects which have node parameters.
class NdParamObject a where
getNdParamsOf :: ConfigData -> a -> Maybe FilledNDParams
-- | Reads the config file.
readConfig :: FilePath -> IO String
readConfig = readFile
-- | Parses the configuration file.
parseConfig :: String -> Result ConfigData
parseConfig = fromJResult "parsing configuration" . J.decodeStrict
-- | Wrapper over 'readConfig' and 'parseConfig'.
loadConfig :: FilePath -> IO (Result ConfigData)
loadConfig = fmap parseConfig . readConfig
-- * Query functions
-- | Computes the nodes covered by a disk.
computeDiskNodes :: Disk -> S.Set String
computeDiskNodes dsk =
case diskLogicalId dsk of
LIDDrbd8 nodeA nodeB _ _ _ _ -> S.fromList [nodeA, nodeB]
_ -> S.empty
-- | Computes all disk-related nodes of an instance. For non-DRBD,
-- this will be empty, for DRBD it will contain both the primary and
-- the secondaries.
instDiskNodes :: Instance -> S.Set String
instDiskNodes = S.unions . map computeDiskNodes . instDisks
-- | Computes all nodes of an instance.
instNodes :: Instance -> S.Set String
instNodes inst = instPrimaryNode inst `S.insert` instDiskNodes inst
-- | Computes the secondary nodes of an instance. Since this is valid
-- only for DRBD, we call directly 'instDiskNodes', skipping over the
-- extra primary insert.
instSecondaryNodes :: Instance -> S.Set String
instSecondaryNodes inst =
instPrimaryNode inst `S.delete` instDiskNodes inst
-- | Get instances of a given node.
getNodeInstances :: ConfigData -> String -> ([Instance], [Instance])
getNodeInstances cfg nname =
let all_inst = M.elems . fromContainer . configInstances $ cfg
pri_inst = filter ((== nname) . instPrimaryNode) all_inst
sec_inst = filter ((nname `S.member`) . instSecondaryNodes) all_inst
in (pri_inst, sec_inst)
-- | Computes the role of a node.
getNodeRole :: ConfigData -> Node -> NodeRole
getNodeRole cfg node
| nodeName node == clusterMasterNode (configCluster cfg) = NRMaster
| nodeMasterCandidate node = NRCandidate
| nodeDrained node = NRDrained
| nodeOffline node = NROffline
| otherwise = NRRegular
-- | Returns the default cluster link.
getDefaultNicLink :: ConfigData -> String
getDefaultNicLink =
nicpLink . (M.! C.ppDefault) . fromContainer .
clusterNicparams . configCluster
-- | Returns the default cluster hypervisor.
getDefaultHypervisor :: ConfigData -> Hypervisor
getDefaultHypervisor cfg =
case clusterEnabledHypervisors $ configCluster cfg of
-- FIXME: this case shouldn't happen (configuration broken), but
-- for now we handle it here because we're not authoritative for
-- the config
[] -> XenPvm
x:_ -> x
-- | Returns instances of a given link.
getInstancesIpByLink :: LinkIpMap -> String -> [String]
getInstancesIpByLink linkipmap link =
M.keys $ M.findWithDefault M.empty link linkipmap
-- | Generic lookup function that converts from a possible abbreviated
-- name to a full name.
getItem :: String -> String -> M.Map String a -> ErrorResult a
getItem kind name allitems = do
let lresult = lookupName (M.keys allitems) name
err msg = Bad $ OpPrereqError (kind ++ " name " ++ name ++ " " ++ msg)
ECodeNoEnt
fullname <- case lrMatchPriority lresult of
PartialMatch -> Ok $ lrContent lresult
ExactMatch -> Ok $ lrContent lresult
MultipleMatch -> err "has multiple matches"
FailMatch -> err "not found"
maybe (err "not found after successfull match?!") Ok $
M.lookup fullname allitems
-- | Looks up a node.
getNode :: ConfigData -> String -> ErrorResult Node
getNode cfg name = getItem "Node" name (fromContainer $ configNodes cfg)
-- | Looks up an instance.
getInstance :: ConfigData -> String -> ErrorResult Instance
getInstance cfg name =
getItem "Instance" name (fromContainer $ configInstances cfg)
-- | Looks up a node group. This is more tricky than for
-- node/instances since the groups map is indexed by uuid, not name.
getGroup :: ConfigData -> String -> ErrorResult NodeGroup
getGroup cfg name =
let groups = fromContainer (configNodegroups cfg)
in case getItem "NodeGroup" name groups of
-- if not found by uuid, we need to look it up by name, slow
Ok grp -> Ok grp
Bad _ -> let by_name = M.mapKeys
(groupName . (M.!) groups) groups
in getItem "NodeGroup" name by_name
-- | Computes a node group's node params.
getGroupNdParams :: ConfigData -> NodeGroup -> FilledNDParams
getGroupNdParams cfg ng =
fillNDParams (clusterNdparams $ configCluster cfg) (groupNdparams ng)
-- | Computes a node group's ipolicy.
getGroupIpolicy :: ConfigData -> NodeGroup -> FilledIPolicy
getGroupIpolicy cfg ng =
fillIPolicy (clusterIpolicy $ configCluster cfg) (groupIpolicy ng)
-- | Computes a group\'s (merged) disk params.
getGroupDiskParams :: ConfigData -> NodeGroup -> DiskParams
getGroupDiskParams cfg ng =
GenericContainer $
fillDict (fromContainer . clusterDiskparams $ configCluster cfg)
(fromContainer $ groupDiskparams ng) []
-- | Get nodes of a given node group.
getGroupNodes :: ConfigData -> String -> [Node]
getGroupNodes cfg gname =
let all_nodes = M.elems . fromContainer . configNodes $ cfg in
filter ((==gname) . nodeGroup) all_nodes
-- | Get (primary, secondary) instances of a given node group.
getGroupInstances :: ConfigData -> String -> ([Instance], [Instance])
getGroupInstances cfg gname =
let gnodes = map nodeName (getGroupNodes cfg gname)
ginsts = map (getNodeInstances cfg) gnodes in
(concatMap fst ginsts, concatMap snd ginsts)
-- | Looks up an instance's primary node.
getInstPrimaryNode :: ConfigData -> String -> ErrorResult Node
getInstPrimaryNode cfg name =
liftM instPrimaryNode (getInstance cfg name) >>= getNode cfg
-- | Filters DRBD minors for a given node.
getDrbdMinorsForNode :: String -> Disk -> [(Int, String)]
getDrbdMinorsForNode node disk =
let child_minors = concatMap (getDrbdMinorsForNode node) (diskChildren disk)
this_minors =
case diskLogicalId disk of
LIDDrbd8 nodeA nodeB _ minorA minorB _
| nodeA == node -> [(minorA, nodeB)]
| nodeB == node -> [(minorB, nodeA)]
_ -> []
in this_minors ++ child_minors
-- | String for primary role.
rolePrimary :: String
rolePrimary = "primary"
-- | String for secondary role.
roleSecondary :: String
roleSecondary = "secondary"
-- | Gets the list of DRBD minors for an instance that are related to
-- a given node.
getInstMinorsForNode :: String -> Instance
-> [(String, Int, String, String, String, String)]
getInstMinorsForNode node inst =
let role = if node == instPrimaryNode inst
then rolePrimary
else roleSecondary
iname = instName inst
-- FIXME: the disk/ build there is hack-ish; unify this in a
-- separate place, or reuse the iv_name (but that is deprecated on
-- the Python side)
in concatMap (\(idx, dsk) ->
[(node, minor, iname, "disk/" ++ show idx, role, peer)
| (minor, peer) <- getDrbdMinorsForNode node dsk]) .
zip [(0::Int)..] . instDisks $ inst
-- | Builds link -> ip -> instname map.
--
-- TODO: improve this by splitting it into multiple independent functions:
--
-- * abstract the \"fetch instance with filled params\" functionality
--
-- * abstsract the [instance] -> [(nic, instance_name)] part
--
-- * etc.
buildLinkIpInstnameMap :: ConfigData -> LinkIpMap
buildLinkIpInstnameMap cfg =
let cluster = configCluster cfg
instances = M.elems . fromContainer . configInstances $ cfg
defparams = (M.!) (fromContainer $ clusterNicparams cluster) C.ppDefault
nics = concatMap (\i -> [(instName i, nic) | nic <- instNics i])
instances
in foldl' (\accum (iname, nic) ->
let pparams = nicNicparams nic
fparams = fillNicParams defparams pparams
link = nicpLink fparams
in case nicIp nic of
Nothing -> accum
Just ip -> let oldipmap = M.findWithDefault M.empty
link accum
newipmap = M.insert ip iname oldipmap
in M.insert link newipmap accum
) M.empty nics
-- | Returns a node's group, with optional failure if we can't find it
-- (configuration corrupt).
getGroupOfNode :: ConfigData -> Node -> Maybe NodeGroup
getGroupOfNode cfg node =
M.lookup (nodeGroup node) (fromContainer . configNodegroups $ cfg)
-- | Returns a node's ndparams, filled.
getNodeNdParams :: ConfigData -> Node -> Maybe FilledNDParams
getNodeNdParams cfg node = do
group <- getGroupOfNode cfg node
let gparams = getGroupNdParams cfg group
return $ fillNDParams gparams (nodeNdparams node)
instance NdParamObject Node where
getNdParamsOf = getNodeNdParams
instance NdParamObject NodeGroup where
getNdParamsOf cfg = Just . getGroupNdParams cfg
instance NdParamObject Cluster where
getNdParamsOf _ = Just . clusterNdparams
| dblia/nosql-ganeti | src/Ganeti/Config.hs | gpl-2.0 | 10,847 | 0 | 20 | 2,391 | 2,315 | 1,219 | 1,096 | 185 | 4 |
module EnumFromTo7 where
main :: [Int]
main = list
where
list :: [Int]
list = [60,62..50]
| roberth/uu-helium | test/correct/EnumFromTo7.hs | gpl-3.0 | 101 | 0 | 7 | 27 | 40 | 25 | 15 | 5 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
module Abstract.Category.Limit (Complete(..), Cocomplete(..)) where
import Data.Proxy
import Abstract.Category
import Data.List.NonEmpty (NonEmpty (..))
-- | Type class for morphisms whose category is Complete.
--
-- Mainly provides categorical operations that Complete categories
-- are guaranteed to have.
--
-- Note that for performance reasons, verigraph assumes that the parameters
-- are valid for all functions in this module.
class (Category morph) => Complete morph where
-- | Given two morphisms @/f : A -> B/@ and @/g : A -> B/@ retuns the equalizer morphism
-- @/h : X -> A/@
calculateEqualizer :: morph -> morph -> morph
-- | Given a non-empty list of morphisms of the form @/f : A -> B/@ returns the equalizer
-- morphism @/h : X -> A/@
calculateNEqualizer :: NonEmpty morph -> morph
calculateNEqualizer (f :| []) = identity (domain f)
calculateNEqualizer (f :| [g]) = calculateEqualizer f g
calculateNEqualizer (f :| g : gs) =
let
q = calculateNEqualizer (g :| gs)
p = calculateEqualizer (f <&> q) (g <&> q)
in (q <&> p)
-- | Given two objects @A@ and @B@ it returns the product @(AxB, f: AxB -> A, g: AxB -> B)@
calculateProduct :: Obj morph -> Obj morph -> (morph,morph)
calculateProduct x y =
calculatePullback (morphismToFinalFrom y) (morphismToFinalFrom x)
-- | Given a non-empty list of objects @Bi@ it returns the product @fi : PROD(Bi) -> Bi@
calculateNProduct :: NonEmpty (Obj morph) -> [morph]
calculateNProduct (x :| []) = [identity x]
calculateNProduct (x :| [y]) = [px, py]
where (px, py) = calculateProduct x y
calculateNProduct (x :| y : ys) =
let
qs = calculateNProduct (y :| ys)
(px, pys) = calculateProduct x (domain $ head qs)
in px : map (<&> pys) qs
finalObject :: morph -> Obj morph
morphismToFinalFrom :: Obj morph -> morph
isFinal :: Proxy morph -> Obj morph -> Bool
isFinal _ = isIsomorphism . morphismToFinalFrom @morph
calculatePullback :: morph -> morph -> (morph,morph)
calculatePullback f g = (f',g')
where
a = domain f
b = domain g
(a',b') = calculateProduct a b
a'f = f <&> a'
b'g = g <&> b'
h = calculateEqualizer a'f b'g
f' = b' <&> h
g' = a' <&> h
-- | Type class for morphisms whose category is Cocomplete.
--
-- Mainly provides categorical operations that Cocomplete categories
-- are guaranteed to have.
--
-- Note that for performance reasons, verigraph assumes that the parameters
-- are valid for all functions in this module.
class (Category morph) => Cocomplete morph where
-- | Given two morphisms @/f : A -> B/@ and @/g : A -> B/@ retuns the coequalizer morphism
-- @/h : B -> X/@
calculateCoequalizer :: morph -> morph -> morph
-- | Given a non-empty list of morphisms of the form @/f : A -> B/@ returns the coequalizer Morphism
-- @/h : B -> X/@
calculateNCoequalizer :: NonEmpty morph -> morph
calculateNCoequalizer (f :| []) = identity (codomain f)
calculateNCoequalizer (f :| [g]) = calculateCoequalizer f g
calculateNCoequalizer (f :| g : gs) =
let
k = calculateNCoequalizer (g :| gs)
j = calculateCoequalizer (k <&> f) (k <&> g)
in j <&> k
-- | Given two objects @A@ and @B@ it returns the coproduct @(A+B, f: A -> A+B, g: B -> A+B)@
calculateCoproduct :: Obj morph -> Obj morph -> (morph,morph)
-- | Given a non-empty list of objects @Bi@ it returns the coproduct @fi : Bi -> SUM(Bi)@
calculateNCoproduct :: NonEmpty (Obj morph) -> [morph]
calculateNCoproduct (x :| []) = [identity x]
calculateNCoproduct (x :| [y]) = [jx, jy]
where (jx, jy) = calculateCoproduct x y
calculateNCoproduct (x :| y : ys) =
let
ks = calculateNCoproduct (y :| ys)
(jx, jys) = calculateCoproduct x (codomain $ head ks)
in jx : map (jys <&>) ks
initialObject :: morph -> Obj morph
morphismFromInitialTo :: Obj morph -> morph
isInitial :: Proxy morph -> Obj morph -> Bool
isInitial _ = isIsomorphism . morphismFromInitialTo @morph
calculatePushout :: morph -> morph -> (morph,morph)
calculatePushout f g = (f', g')
where
b = codomain f
c = codomain g
(b',c') = calculateCoproduct b c
gc' = c' <&> g
fb' = b' <&> f
h = calculateCoequalizer fb' gc'
g' = h <&> b'
f' = h <&> c'
| rodrigo-machado/verigraph | src/library/Abstract/Category/Limit.hs | gpl-3.0 | 4,509 | 0 | 14 | 1,141 | 1,162 | 625 | 537 | 76 | 0 |
{-# LANGUAGE QuasiQuotes #-}
module Pos.Util.Swagger where
import Universum
import Data.Swagger
import NeatInterpolation (text)
import Servant.Server (Handler, Server)
import Servant.Swagger.UI.Core (SwaggerSchemaUI',
swaggerSchemaUIServerImpl)
import Servant.Swagger.UI.ReDoc (redocFiles)
-- | Provide an alternative UI (ReDoc) for rendering Swagger documentation.
swaggerSchemaUIServer
:: (Server api ~ Handler Swagger)
=> Swagger -> Server (SwaggerSchemaUI' dir api)
swaggerSchemaUIServer =
swaggerSchemaUIServerImpl redocIndexTemplate redocFiles
where
redocIndexTemplate :: Text
redocIndexTemplate = [text|
<!doctype html>
<html lang="en">
<head>
<title>ReDoc</title>
<meta charset="utf-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
body { margin: 0; padding: 0; }
</style>
<script>
// Force Strict-URL Routing for assets relative paths
(function onload() {
if (!window.location.pathname.endsWith("/")) {
window.location.pathname += "/";
}
}());
</script>
</head>
<body>
<redoc spec-url="../SERVANT_SWAGGER_UI_SCHEMA"></redoc>
<script src="redoc.min.js"> </script>
</body>
</html>|]
| input-output-hk/cardano-sl | lib/src/Pos/Util/Swagger.hs | apache-2.0 | 1,345 | 0 | 9 | 335 | 128 | 77 | 51 | 16 | 1 |
{-
(c) The University of Glasgow 2006-2012
(c) The GRASP Project, Glasgow University, 1992-2002
Various types used during typechecking, please see TcRnMonad as well for
operations on these types. You probably want to import it, instead of this
module.
All the monads exported here are built on top of the same IOEnv monad. The
monad functions like a Reader monad in the way it passes the environment
around. This is done to allow the environment to be manipulated in a stack
like fashion when entering expressions... ect.
For state that is global and should be returned at the end (e.g not part
of the stack mechanism), you should use an TcRef (= IORef) to store them.
-}
{-# LANGUAGE CPP, ExistentialQuantification #-}
module Eta.TypeCheck.TcRnTypes(
TcRnIf, TcRn, TcM, RnM, IfM, IfL, IfG, -- The monad is opaque outside this module
TcRef,
-- The environment types
Env(..),
TcGblEnv(..), TcLclEnv(..),
IfGblEnv(..), IfLclEnv(..),
-- Ranamer types
ErrCtxt, RecFieldEnv(..),
ImportAvails(..), emptyImportAvails, plusImportAvails,
WhereFrom(..), mkModDeps, modDepsElts,
-- Typechecker types
TcTypeEnv, TcIdBinder(..),
TcTyThing(..), PromotionErr(..),
SelfBootInfo(..),
pprTcTyThingCategory, pprPECategory,
-- Desugaring types
DsM, DsLclEnv(..), DsGblEnv(..), PArrBuiltin(..),
DsMetaEnv, DsMetaVal(..),
-- Template Haskell
ThStage(..), SpliceType(..), PendingStuff(..), topStage, topAnnStage, topSpliceStage,
ThLevel, impLevel, outerLevel, thLevel,
-- Arrows
ArrowCtxt(..),
-- Canonical constraints
Xi, Ct(..), Cts, emptyCts, andCts, andManyCts, pprCts,
singleCt, listToCts, ctsElts, consCts, snocCts, extendCtsList,
isEmptyCts, isCTyEqCan, isCFunEqCan,
isCDictCan_Maybe, isCFunEqCan_maybe,
isCIrredEvCan, isCNonCanonical, isWantedCt, isDerivedCt,
isGivenCt, isHoleCt, isTypedHoleCt, isPartialTypeSigCt,
isUserTypeErrorCt, getUserTypeErrorMsg,
ctEvidence, ctLoc, ctPred, ctFlavour, ctEqRel,
mkNonCanonical, mkNonCanonicalCt,
ctEvPred, ctEvLoc, ctEvEqRel,
ctEvTerm, ctEvCoercion, ctEvId, ctEvCheckDepth,
WantedConstraints(..), insolubleWC, emptyWC, isEmptyWC,
andWC, unionsWC, addSimples, addImplics, mkSimpleWC, mkImplicWC, addInsols,
insolublesOnly, dropDerivedWC,
Implication(..),
SubGoalCounter(..),
SubGoalDepth, initialSubGoalDepth, maxSubGoalDepth,
bumpSubGoalDepth, subGoalCounterValue, subGoalDepthExceeded,
CtLoc(..), ctLocSpan, ctLocEnv, ctLocOrigin,
ctLocDepth, bumpCtLocDepth,
setCtLocOrigin, setCtLocEnv, setCtLocSpan,
CtOrigin(..), pprCtOrigin,
pushErrCtxt, pushErrCtxtSameOrigin,
SkolemInfo(..),
CtEvidence(..),
mkGivenLoc,
isWanted, isGiven, isDerived,
ctEvRole,
-- Constraint solver plugins
TcPlugin(..), TcPluginResult(..), TcPluginSolver,
TcPluginM, runTcPluginM, unsafeTcPluginTcM,
CtFlavour(..), ctEvFlavour,
-- Pretty printing
pprEvVarTheta,
pprEvVars, pprEvVarWithType,
pprArising, pprArisingAt,
-- Misc other types
TcId, TcIdSet, HoleSort(..),
NameShape(..)
) where
import Eta.HsSyn.HsSyn
import Eta.Core.CoreSyn
import Eta.Main.HscTypes
import Eta.TypeCheck.TcEvidence
import Eta.Types.Type
import Eta.Types.CoAxiom ( Role )
import Eta.Types.Class ( Class )
import Eta.Types.TyCon ( TyCon )
import Eta.BasicTypes.ConLike ( ConLike(..) )
import Eta.BasicTypes.DataCon ( DataCon, dataConUserType, dataConOrigArgTys )
import Eta.BasicTypes.PatSyn ( PatSyn, patSynType )
import Eta.Prelude.TysWiredIn ( coercibleClass )
import Eta.TypeCheck.TcType
import Eta.Main.Annotations
import Eta.Types.InstEnv
import Eta.Types.FamInstEnv
import Eta.Utils.IOEnv
import Eta.BasicTypes.RdrName
import Eta.BasicTypes.Name
import Eta.BasicTypes.NameEnv
import Eta.BasicTypes.NameSet
import Eta.BasicTypes.Avail
import Eta.BasicTypes.Var
import Eta.BasicTypes.VarEnv
import Eta.BasicTypes.Module
import Eta.BasicTypes.SrcLoc
import Eta.BasicTypes.VarSet
import Eta.Main.ErrUtils
import Eta.Utils.UniqFM
import Eta.BasicTypes.UniqSupply
import Eta.BasicTypes.BasicTypes
import Eta.Utils.Bag
import Eta.Main.DynFlags
import Eta.Utils.Outputable
import Eta.Utils.ListSetOps
import Eta.Utils.FastString
import Eta.DeSugar.PmExpr
import GHC.Fingerprint
import Data.Set (Set)
import qualified Data.Set as S
import Control.Monad (ap, liftM, msum)
#ifdef ETA_REPL
import Data.Map ( Map )
import Data.Dynamic ( Dynamic )
import Data.List ( sort )
import Data.Typeable ( TypeRep )
import Eta.REPL.RemoteTypes
#endif
-- | A 'NameShape' is a substitution on 'Name's that can be used
-- to refine the identities of a hole while we are renaming interfaces
-- (see 'RnModIface'). Specifically, a 'NameShape' for
-- 'ns_module_name' @A@, defines a mapping from @{A.T}@
-- (for some 'OccName' @T@) to some arbitrary other 'Name'.
--
-- The most intruiging thing about a 'NameShape', however, is
-- how it's constructed. A 'NameShape' is *implied* by the
-- exported 'AvailInfo's of the implementor of an interface:
-- if an implementor of signature @<H>@ exports @M.T@, you implicitly
-- define a substitution from @{H.T}@ to @M.T@. So a 'NameShape'
-- is computed from the list of 'AvailInfo's that are exported
-- by the implementation of a module, or successively merged
-- together by the export lists of signatures which are joining
-- together.
--
-- It's not the most obvious way to go about doing this, but it
-- does seem to work!
--
-- NB: Can't boot this and put it in NameShape because then we
-- start pulling in too many DynFlags things.
data NameShape = NameShape {
ns_mod_name :: ModuleName,
ns_exports :: [AvailInfo],
ns_map :: OccEnv Name
}
{-
************************************************************************
* *
Standard monad definition for TcRn
All the combinators for the monad can be found in TcRnMonad
* *
************************************************************************
The monad itself has to be defined here, because it is mentioned by ErrCtxt
-}
type TcRnIf a b = IOEnv (Env a b)
type TcRn = TcRnIf TcGblEnv TcLclEnv -- Type inference
type IfM lcl = TcRnIf IfGblEnv lcl -- Iface stuff
type IfG = IfM () -- Top level
type IfL = IfM IfLclEnv -- Nested
type DsM = TcRnIf DsGblEnv DsLclEnv -- Desugaring
-- TcRn is the type-checking and renaming monad: the main monad that
-- most type-checking takes place in. The global environment is
-- 'TcGblEnv', which tracks all of the top-level type-checking
-- information we've accumulated while checking a module, while the
-- local environment is 'TcLclEnv', which tracks local information as
-- we move inside expressions.
-- | Historical "renaming monad" (now it's just 'TcRn').
type RnM = TcRn
-- | Historical "type-checking monad" (now it's just 'TcRn').
type TcM = TcRn
-- We 'stack' these envs through the Reader like monad infastructure
-- as we move into an expression (although the change is focused in
-- the lcl type).
data Env gbl lcl
= Env {
env_top :: !HscEnv, -- Top-level stuff that never changes
-- Includes all info about imported things
-- BangPattern is to fix leak, see #15111
env_us :: {-# UNPACK #-} !(IORef UniqSupply),
-- Unique supply for local varibles
env_gbl :: gbl, -- Info about things defined at the top level
-- of the module being compiled
env_lcl :: lcl -- Nested stuff; changes as we go into
}
instance ContainsDynFlags (Env gbl lcl) where
extractDynFlags env = hsc_dflags (env_top env)
replaceDynFlags env dflags
= env {env_top = replaceDynFlags (env_top env) dflags}
instance ContainsModule gbl => ContainsModule (Env gbl lcl) where
extractModule env = extractModule (env_gbl env)
{-
************************************************************************
* *
The interface environments
Used when dealing with IfaceDecls
* *
************************************************************************
-}
data IfGblEnv
= IfGblEnv {
-- The type environment for the module being compiled,
-- in case the interface refers back to it via a reference that
-- was originally a hi-boot file.
-- We need the module name so we can test when it's appropriate
-- to look in this env.
-- See Note [Tying the knot] in TcIface
if_rec_types :: Maybe (Module, IfG TypeEnv)
-- Allows a read effect, so it can be in a mutable
-- variable; c.f. handling the external package type env
-- Nothing => interactive stuff, no loops possible
}
data IfLclEnv
= IfLclEnv {
-- The module for the current IfaceDecl
-- So if we see f = \x -> x
-- it means M.f = \x -> x, where M is the if_mod
-- NB: This is a semantic module, see
-- Note [Identity versus semantic module]
if_mod :: Module,
-- The field is used only for error reporting
-- if (say) there's a Lint error in it
if_boot :: Bool,
if_loc :: SDoc,
-- Where the interface came from:
-- .hi file, or GHCi state, or ext core
-- plus which bit is currently being examined
if_nsubst :: Maybe NameShape,
if_tv_env :: UniqFM TyVar, -- Nested tyvar bindings
-- (and coercions)
if_id_env :: UniqFM Id -- Nested id binding
}
{-
************************************************************************
* *
Desugarer monad
* *
************************************************************************
Now the mondo monad magic (yes, @DsM@ is a silly name)---carry around
a @UniqueSupply@ and some annotations, which
presumably include source-file location information:
-}
-- If '-XParallelArrays' is given, the desugarer populates this table with the corresponding
-- variables found in 'Data.Array.Parallel'.
--
data PArrBuiltin
= PArrBuiltin
{ lengthPVar :: Var -- ^ lengthP
, replicatePVar :: Var -- ^ replicateP
, singletonPVar :: Var -- ^ singletonP
, mapPVar :: Var -- ^ mapP
, filterPVar :: Var -- ^ filterP
, zipPVar :: Var -- ^ zipP
, crossMapPVar :: Var -- ^ crossMapP
, indexPVar :: Var -- ^ (!:)
, emptyPVar :: Var -- ^ emptyP
, appPVar :: Var -- ^ (+:+)
, enumFromToPVar :: Var -- ^ enumFromToP
, enumFromThenToPVar :: Var -- ^ enumFromThenToP
}
data DsGblEnv
= DsGblEnv
{ ds_mod :: Module -- For SCC profiling
, ds_fam_inst_env :: FamInstEnv -- Like tcg_fam_inst_env
, ds_unqual :: PrintUnqualified
, ds_msgs :: IORef Messages -- Warning messages
, ds_if_env :: (IfGblEnv, IfLclEnv) -- Used for looking up global,
-- possibly-imported things
, ds_dph_env :: GlobalRdrEnv -- exported entities of 'Data.Array.Parallel.Prim'
-- iff '-fvectorise' flag was given as well as
-- exported entities of 'Data.Array.Parallel' iff
-- '-XParallelArrays' was given; otherwise, empty
, ds_parr_bi :: PArrBuiltin -- desugarar names for '-XParallelArrays'
, ds_static_binds :: IORef [(Fingerprint, (Id,CoreExpr))]
-- ^ Bindings resulted from floating static forms
}
instance ContainsModule DsGblEnv where
extractModule = ds_mod
data DsLclEnv = DsLclEnv {
dsl_meta :: DsMetaEnv, -- Template Haskell bindings
dsl_loc :: RealSrcSpan, -- To put in pattern-matching error msgs
dsl_dicts :: Bag EvVar, -- Constraints from GADT pattern-matching
dsl_tm_cs :: Bag SimpleEq,
dsl_pm_iter :: IORef Int -- no iterations for pmcheck
}
-- Inside [| |] brackets, the desugarer looks
-- up variables in the DsMetaEnv
type DsMetaEnv = NameEnv DsMetaVal
data DsMetaVal
= DsBound Id -- Bound by a pattern inside the [| |].
-- Will be dynamically alpha renamed.
-- The Id has type THSyntax.Var
| DsSplice (HsExpr Id) -- These bindings are introduced by
-- the PendingSplices on a HsBracketOut
{-
************************************************************************
* *
Global typechecker environment
* *
************************************************************************
-}
-- | 'TcGblEnv' describes the top-level of the module at the
-- point at which the typechecker is finished work.
-- It is this structure that is handed on to the desugarer
-- For state that needs to be updated during the typechecking
-- phase and returned at end, use a 'TcRef' (= 'IORef').
data TcGblEnv
= TcGblEnv {
tcg_mod :: Module, -- ^ Module being compiled
tcg_semantic_mod :: Module, -- ^ If a signature, the backing module
-- See also Note [Identity versus semantic module]
tcg_src :: HscSource,
-- ^ What kind of module (regular Haskell, hs-boot, ext-core)
-- tcg_sig_of :: Maybe Module,
-- -- ^ Are we being compiled as a signature of an implementation?
-- tcg_impl_rdr_env :: Maybe GlobalRdrEnv,
-- ^ Environment used only during -sig-of for resolving top level
-- bindings. See Note [Signature parameters in TcGblEnv and DynFlags]
tcg_rdr_env :: GlobalRdrEnv, -- ^ Top level envt; used during renaming
tcg_default :: Maybe [Type],
-- ^ Types used for defaulting. @Nothing@ => no @default@ decl
tcg_fix_env :: FixityEnv, -- ^ Just for things in this module
tcg_field_env :: RecFieldEnv, -- ^ Just for things in this module
-- See Note [The interactive package] in HscTypes
tcg_type_env :: TypeEnv,
-- ^ Global type env for the module we are compiling now. All
-- TyCons and Classes (for this module) end up in here right away,
-- along with their derived constructors, selectors.
--
-- (Ids defined in this module start in the local envt, though they
-- move to the global envt during zonking)
--
-- NB: for what "things in this module" means, see
-- Note [The interactive package] in HscTypes
tcg_type_env_var :: TcRef TypeEnv,
-- Used only to initialise the interface-file
-- typechecker in initIfaceTcRn, so that it can see stuff
-- bound in this module when dealing with hi-boot recursions
-- Updated at intervals (e.g. after dealing with types and classes)
tcg_inst_env :: !InstEnv,
-- ^ Instance envt for all /home-package/ modules;
-- Includes the dfuns in tcg_insts
-- NB. BangPattern is to fix a leak, see #15111
tcg_fam_inst_env :: !FamInstEnv, -- ^ Ditto for family instances
-- NB. BangPattern is to fix a leak, see #15111
tcg_ann_env :: AnnEnv, -- ^ And for annotations
tcg_visible_orphan_mods :: ModuleSet,
-- ^ The set of orphan modules which transitively reachable from
-- direct imports. We use this to figure out if an orphan instance
-- in the global InstEnv should be considered visible.
-- See Note [Instance lookup and orphan instances] in InstEnv
-- Now a bunch of things about this module that are simply
-- accumulated, but never consulted until the end.
-- Nevertheless, it's convenient to accumulate them along
-- with the rest of the info from this module.
tcg_exports :: [AvailInfo], -- ^ What is exported
tcg_imports :: ImportAvails,
-- ^ Information about what was imported from where, including
-- things bound in this module. Also store Safe Haskell info
-- here about transative trusted packaage requirements.
tcg_dus :: DefUses, -- ^ What is defined in this module and what is used.
tcg_used_rdrnames :: TcRef (Set RdrName),
-- See Note [Tracking unused binding and imports]
tcg_keep :: TcRef NameSet,
-- ^ Locally-defined top-level names to keep alive.
--
-- "Keep alive" means give them an Exported flag, so that the
-- simplifier does not discard them as dead code, and so that they
-- are exposed in the interface file (but not to export to the
-- user).
--
-- Some things, like dict-fun Ids and default-method Ids are "born"
-- with the Exported flag on, for exactly the above reason, but some
-- we only discover as we go. Specifically:
--
-- * The to/from functions for generic data types
--
-- * Top-level variables appearing free in the RHS of an orphan
-- rule
--
-- * Top-level variables appearing free in a TH bracket
tcg_th_used :: TcRef Bool,
-- ^ @True@ <=> Template Haskell syntax used.
--
-- We need this so that we can generate a dependency on the
-- Template Haskell package, because the desugarer is going
-- to emit loads of references to TH symbols. The reference
-- is implicit rather than explicit, so we have to zap a
-- mutable variable.
tcg_th_splice_used :: TcRef Bool,
-- ^ @True@ <=> A Template Haskell splice was used.
--
-- Splices disable recompilation avoidance (see #481)
tcg_dfun_n :: TcRef OccSet,
-- ^ Allows us to choose unique DFun names.
tcg_merged :: [(Module, Fingerprint)],
-- ^ The requirements we merged with; we always have to recompile
-- if any of these changed.
-- The next fields accumulate the payload of the module
-- The binds, rules and foreign-decl fields are collected
-- initially in un-zonked form and are finally zonked in tcRnSrcDecls
tcg_rn_exports :: Maybe [Located (IE Name)],
tcg_rn_imports :: [LImportDecl Name],
-- Keep the renamed imports regardless. They are not
-- voluminous and are needed if you want to report unused imports
tcg_rn_decls :: Maybe (HsGroup Name),
-- ^ Renamed decls, maybe. @Nothing@ <=> Don't retain renamed
-- decls.
tcg_dependent_files :: TcRef [FilePath], -- ^ dependencies from addDependentFile
#ifdef ETA_REPL
tcg_th_topdecls :: TcRef [LHsDecl RdrName],
-- ^ Top-level declarations from addTopDecls
tcg_th_topnames :: TcRef NameSet,
-- ^ Exact names bound in top-level declarations in tcg_th_topdecls
tcg_th_modfinalizers :: TcRef [TcM ()],
-- ^ Template Haskell module finalizers.
--
-- They are computations in the @TcM@ monad rather than @Q@ because we
-- set them to use particular local environments.
tcg_th_state :: TcRef (Map TypeRep Dynamic),
tcg_th_remote_state :: TcRef (Maybe (ForeignRef (IORef ()))), -- QState
-- ^ Template Haskell state
#endif /* ETA_REPL */
tcg_ev_binds :: Bag EvBind, -- Top-level evidence bindings
-- Things defined in this module, or (in GHCi)
-- in the declarations for a single GHCi command.
-- For the latter, see Note [The interactive package] in HscTypes
tcg_binds :: LHsBinds Id, -- Value bindings in this module
tcg_sigs :: NameSet, -- ...Top-level names that *lack* a signature
tcg_imp_specs :: [LTcSpecPrag], -- ...SPECIALISE prags for imported Ids
tcg_warns :: Warnings, -- ...Warnings and deprecations
tcg_anns :: [Annotation], -- ...Annotations
tcg_tcs :: [TyCon], -- ...TyCons and Classes
tcg_insts :: [ClsInst], -- ...Instances
tcg_fam_insts :: [FamInst], -- ...Family instances
tcg_rules :: [LRuleDecl Id], -- ...Rules
tcg_fords :: [LForeignDecl Id], -- ...Foreign import & exports
tcg_vects :: [LVectDecl Id], -- ...Vectorisation declarations
tcg_patsyns :: [PatSyn], -- ...Pattern synonyms
tcg_doc_hdr :: Maybe LHsDocString, -- ^ Maybe Haddock header docs
tcg_hpc :: !AnyHpcUsage, -- ^ @True@ if any part of the
-- prog uses hpc instrumentation.
-- NB. BangPattern is to fix a leak, see #15111
tcg_self_boot :: SelfBootInfo, -- ^ Whether this module has a
-- corresponding hi-boot file
tcg_main :: Maybe Name, -- ^ The Name of the main
-- function, if this module is
-- the main module.
tcg_safeInfer :: TcRef Bool, -- Has the typechecker
-- inferred this module
-- as -XSafe (Safe Haskell)
-- | A list of user-defined plugins for the constraint solver.
tcg_tc_plugins :: [TcPluginSolver],
tcg_top_loc :: RealSrcSpan,
-- ^ The RealSrcSpan this module came from
tcg_static_wc :: TcRef WantedConstraints
-- ^ Wanted constraints of static forms.
-- See Note [Constraints in static forms].
}
-- Note [Constraints in static forms]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- When a static form produces constraints like
--
-- f :: StaticPtr (Bool -> String)
-- f = static show
--
-- we collect them in tcg_static_wc and resolve them at the end
-- of type checking. They need to be resolved separately because
-- we don't want to resolve them in the context of the enclosing
-- expression. Consider
--
-- g :: Show a => StaticPtr (a -> String)
-- g = static show
--
-- If the @Show a0@ constraint that the body of the static form produces was
-- resolved in the context of the enclosing expression, then the body of the
-- static form wouldn't be closed because the Show dictionary would come from
-- g's context instead of coming from the top level.
--
instance ContainsModule TcGblEnv where
extractModule env = tcg_semantic_mod env
data RecFieldEnv
= RecFields (NameEnv [Name]) -- Maps a constructor name *in this module*
-- to the fields for that constructor
NameSet -- Set of all fields declared *in this module*;
-- used to suppress name-shadowing complaints
-- when using record wild cards
-- E.g. let fld = e in C {..}
-- This is used when dealing with ".." notation in record
-- construction and pattern matching.
-- The FieldEnv deals *only* with constructors defined in *this*
-- module. For imported modules, we get the same info from the
-- TypeEnv
data SelfBootInfo
= NoSelfBoot -- No corresponding hi-boot file
| SelfBoot
{ sb_mds :: ModDetails -- There was a hi-boot file,
, sb_tcs :: NameSet } -- and these Ids
-- We need this info to compute a safe approximation to
-- recursive loops, to avoid infinite inlinings
{-
Note [Tracking unused binding and imports]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We gather two sorts of usage information
* tcg_dus (defs/uses)
Records *defined* Names (local, top-level)
and *used* Names (local or imported)
Used (a) to report "defined but not used"
(see RnNames.reportUnusedNames)
(b) to generate version-tracking usage info in interface
files (see MkIface.mkUsedNames)
This usage info is mainly gathered by the renamer's
gathering of free-variables
* tcg_used_rdrnames
Records used *imported* (not locally-defined) RdrNames
Used only to report unused import declarations
Notice that they are RdrNames, not Names, so we can
tell whether the reference was qualified or unqualified, which
is esssential in deciding whether a particular import decl
is unnecessary. This info isn't present in Names.
************************************************************************
* *
The local typechecker environment
* *
************************************************************************
Note [The Global-Env/Local-Env story]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During type checking, we keep in the tcg_type_env
* All types and classes
* All Ids derived from types and classes (constructors, selectors)
At the end of type checking, we zonk the local bindings,
and as we do so we add to the tcg_type_env
* Locally defined top-level Ids
Why? Because they are now Ids not TcIds. This final GlobalEnv is
a) fed back (via the knot) to typechecking the
unfoldings of interface signatures
b) used in the ModDetails of this module
-}
data TcLclEnv -- Changes as we move inside an expression
-- Discarded after typecheck/rename; not passed on to desugarer
= TcLclEnv {
tcl_loc :: RealSrcSpan, -- Source span
tcl_ctxt :: [ErrCtxt], -- Error context, innermost on top
tcl_tclvl :: TcLevel, -- Birthplace for new unification variables
tcl_th_ctxt :: ThStage, -- Template Haskell context
tcl_th_bndrs :: ThBindEnv, -- Binding level of in-scope Names
-- defined in this module (not imported)
tcl_arrow_ctxt :: ArrowCtxt, -- Arrow-notation context
tcl_rdr :: LocalRdrEnv, -- Local name envt
-- Maintained during renaming, of course, but also during
-- type checking, solely so that when renaming a Template-Haskell
-- splice we have the right environment for the renamer.
--
-- Does *not* include global name envt; may shadow it
-- Includes both ordinary variables and type variables;
-- they are kept distinct because tyvar have a different
-- occurrence contructor (Name.TvOcc)
-- We still need the unsullied global name env so that
-- we can look up record field names
tcl_env :: TcTypeEnv, -- The local type environment:
-- Ids and TyVars defined in this module
tcl_bndrs :: [TcIdBinder], -- Stack of locally-bound Ids, innermost on top
-- Used only for error reporting
tcl_tidy :: TidyEnv, -- Used for tidying types; contains all
-- in-scope type variables (but not term variables)
tcl_tyvars :: TcRef TcTyVarSet, -- The "global tyvars"
-- Namely, the in-scope TyVars bound in tcl_env,
-- plus the tyvars mentioned in the types of Ids bound
-- in tcl_lenv.
-- Why mutable? see notes with tcGetGlobalTyVars
tcl_lie :: TcRef WantedConstraints, -- Place to accumulate type constraints
tcl_errs :: TcRef Messages -- Place to accumulate errors
}
type TcTypeEnv = NameEnv TcTyThing
type ThBindEnv = NameEnv (TopLevelFlag, ThLevel)
-- Domain = all Ids bound in this module (ie not imported)
-- The TopLevelFlag tells if the binding is syntactically top level.
-- We need to know this, because the cross-stage persistence story allows
-- cross-stage at arbitrary types if the Id is bound at top level.
--
-- Nota bene: a ThLevel of 'outerLevel' is *not* the same as being
-- bound at top level! See Note [Template Haskell levels] in TcSplice
data TcIdBinder
= TcIdBndr
TcId
TopLevelFlag -- Tells whether the bindind is syntactically top-level
-- (The monomorphic Ids for a recursive group count
-- as not-top-level for this purpose.)
{- Note [Given Insts]
~~~~~~~~~~~~~~~~~~
Because of GADTs, we have to pass inwards the Insts provided by type signatures
and existential contexts. Consider
data T a where { T1 :: b -> b -> T [b] }
f :: Eq a => T a -> Bool
f (T1 x y) = [x]==[y]
The constructor T1 binds an existential variable 'b', and we need Eq [b].
Well, we have it, because Eq a refines to Eq [b], but we can only spot that if we
pass it inwards.
-}
-- | Type alias for 'IORef'; the convention is we'll use this for mutable
-- bits of data in 'TcGblEnv' which are updated during typechecking and
-- returned at the end.
type TcRef a = IORef a
-- ToDo: when should I refer to it as a 'TcId' instead of an 'Id'?
type TcId = Id
type TcIdSet = IdSet
---------------------------
-- Template Haskell stages and levels
---------------------------
data SpliceType = Typed | Untyped
data ThStage -- See Note [Template Haskell state diagram] in TcSplice
= Splice SpliceType -- Inside a top-level splice
-- This code will be run *at compile time*;
-- the result replaces the splice
-- Binding level = 0
| RunSplice (TcRef [ForeignRef ()])
-- Set when running a splice, i.e. NOT when renaming or typechecking the
-- Haskell code for the splice. See Note [RunSplice ThLevel].
--
-- Contains a list of mod finalizers collected while executing the splice.
--
-- 'addModFinalizer' inserts finalizers here, and from here they are taken
-- to construct an @HsSpliced@ annotation for untyped splices. See Note
-- [Delaying modFinalizers in untyped splices] in "RnSplice".
--
-- For typed splices, the typechecker takes finalizers from here and
-- inserts them in the list of finalizers in the global environment.
--
-- See Note [Collecting modFinalizers in typed splices] in "TcSplice".
| Comp -- Ordinary Haskell code
-- Binding level = 1
| Brack -- Inside brackets
ThStage -- Enclosing stage
PendingStuff
data PendingStuff
= RnPendingUntyped -- Renaming the inside of an *untyped* bracket
(TcRef [PendingRnSplice]) -- Pending splices in here
| RnPendingTyped -- Renaming the inside of a *typed* bracket
| TcPending -- Typechecking the inside of a typed bracket
(TcRef [PendingTcSplice]) -- Accumulate pending splices here
(TcRef WantedConstraints) -- and type constraints here
topStage, topAnnStage, topSpliceStage :: ThStage
topStage = Comp
topAnnStage = Splice Untyped
topSpliceStage = Splice Untyped
instance Outputable ThStage where
ppr (Splice _) = text "Splice"
ppr (RunSplice _) = text "RunSplice"
ppr Comp = text "Comp"
ppr (Brack s _) = text "Brack" <> parens (ppr s)
type ThLevel = Int
-- NB: see Note [Template Haskell levels] in TcSplice
-- Incremented when going inside a bracket,
-- decremented when going inside a splice
-- NB: ThLevel is one greater than the 'n' in Fig 2 of the
-- original "Template meta-programming for Haskell" paper
impLevel, outerLevel :: ThLevel
impLevel = 0 -- Imported things; they can be used inside a top level splice
outerLevel = 1 -- Things defined outside brackets
thLevel :: ThStage -> ThLevel
thLevel (Splice _) = 0
thLevel (RunSplice _) =
-- See Note [RunSplice ThLevel].
panic "thLevel: called when running a splice"
thLevel Comp = 1
thLevel (Brack s _) = thLevel s + 1
{- Node [RunSplice ThLevel]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The 'RunSplice' stage is set when executing a splice, and only when running a
splice. In particular it is not set when the splice is renamed or typechecked.
'RunSplice' is needed to provide a reference where 'addModFinalizer' can insert
the finalizer (see Note [Delaying modFinalizers in untyped splices]), and
'addModFinalizer' runs when doing Q things. Therefore, It doesn't make sense to
set 'RunSplice' when renaming or typechecking the splice, where 'Splice', 'Brak'
or 'Comp' are used instead.
-}
---------------------------
-- Arrow-notation context
---------------------------
{- Note [Escaping the arrow scope]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In arrow notation, a variable bound by a proc (or enclosed let/kappa)
is not in scope to the left of an arrow tail (-<) or the head of (|..|).
For example
proc x -> (e1 -< e2)
Here, x is not in scope in e1, but it is in scope in e2. This can get
a bit complicated:
let x = 3 in
proc y -> (proc z -> e1) -< e2
Here, x and z are in scope in e1, but y is not.
We implement this by
recording the environment when passing a proc (using newArrowScope),
and returning to that (using escapeArrowScope) on the left of -< and the
head of (|..|).
All this can be dealt with by the *renamer*. But the type checker needs
to be involved too. Example (arrowfail001)
class Foo a where foo :: a -> ()
data Bar = forall a. Foo a => Bar a
get :: Bar -> ()
get = proc x -> case x of Bar a -> foo -< a
Here the call of 'foo' gives rise to a (Foo a) constraint that should not
be captured by the pattern match on 'Bar'. Rather it should join the
constraints from further out. So we must capture the constraint bag
from further out in the ArrowCtxt that we push inwards.
-}
data ArrowCtxt -- Note [Escaping the arrow scope]
= NoArrowCtxt
| ArrowCtxt LocalRdrEnv (TcRef WantedConstraints)
---------------------------
-- TcTyThing
---------------------------
data TcTyThing
= AGlobal TyThing -- Used only in the return type of a lookup
| ATcId { -- Ids defined in this module; may not be fully zonked
tct_id :: TcId,
tct_closed :: TopLevelFlag } -- See Note [Bindings with closed types]
| ATyVar Name TcTyVar -- The type variable to which the lexically scoped type
-- variable is bound. We only need the Name
-- for error-message purposes; it is the corresponding
-- Name in the domain of the envt
| AThing TcKind -- Used temporarily, during kind checking, for the
-- tycons and clases in this recursive group
-- Can be a mono-kind or a poly-kind; in TcTyClsDcls see
-- Note [Type checking recursive type and class declarations]
| APromotionErr PromotionErr
data PromotionErr
= TyConPE -- TyCon used in a kind before we are ready
-- data T :: T -> * where ...
| ClassPE -- Ditto Class
| FamDataConPE -- Data constructor for a data family
-- See Note [AFamDataCon: not promoting data family constructors] in TcRnDriver
| RecDataConPE -- Data constructor in a recursive loop
-- See Note [ARecDataCon: recusion and promoting data constructors] in TcTyClsDecls
| NoDataKinds -- -XDataKinds not enabled
instance Outputable TcTyThing where -- Debugging only
ppr (AGlobal g) = pprTyThing g
ppr elt@(ATcId {}) = text "Identifier" <>
brackets (ppr (tct_id elt) <> dcolon
<> ppr (varType (tct_id elt)) <> comma
<+> ppr (tct_closed elt))
ppr (ATyVar n tv) = text "Type variable" <+> quotes (ppr n) <+> equals <+> ppr tv
ppr (AThing k) = text "AThing" <+> ppr k
ppr (APromotionErr err) = text "APromotionErr" <+> ppr err
instance Outputable PromotionErr where
ppr ClassPE = text "ClassPE"
ppr TyConPE = text "TyConPE"
ppr FamDataConPE = text "FamDataConPE"
ppr RecDataConPE = text "RecDataConPE"
ppr NoDataKinds = text "NoDataKinds"
pprTcTyThingCategory :: TcTyThing -> SDoc
pprTcTyThingCategory (AGlobal thing) = pprTyThingCategory thing
pprTcTyThingCategory (ATyVar {}) = ptext (sLit "Type variable")
pprTcTyThingCategory (ATcId {}) = ptext (sLit "Local identifier")
pprTcTyThingCategory (AThing {}) = ptext (sLit "Kinded thing")
pprTcTyThingCategory (APromotionErr pe) = pprPECategory pe
pprPECategory :: PromotionErr -> SDoc
pprPECategory ClassPE = ptext (sLit "Class")
pprPECategory TyConPE = ptext (sLit "Type constructor")
pprPECategory FamDataConPE = ptext (sLit "Data constructor")
pprPECategory RecDataConPE = ptext (sLit "Data constructor")
pprPECategory NoDataKinds = ptext (sLit "Data constructor")
{-
Note [Bindings with closed types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = let g ys = map not ys
in ...
Can we generalise 'g' under the OutsideIn algorithm? Yes,
because all g's free variables are top-level; that is they themselves
have no free type variables, and it is the type variables in the
environment that makes things tricky for OutsideIn generalisation.
Definition:
A variable is "closed", and has tct_closed set to TopLevel,
iff
a) all its free variables are imported, or are themselves closed
b) generalisation is not restricted by the monomorphism restriction
Under OutsideIn we are free to generalise a closed let-binding.
This is an extension compared to the JFP paper on OutsideIn, which
used "top-level" as a proxy for "closed". (It's not a good proxy
anyway -- the MR can make a top-level binding with a free type
variable.)
Note that:
* A top-level binding may not be closed, if it suffer from the MR
* A nested binding may be closed (eg 'g' in the example we started with)
Indeed, that's the point; whether a function is defined at top level
or nested is orthogonal to the question of whether or not it is closed
* A binding may be non-closed because it mentions a lexically scoped
*type variable* Eg
f :: forall a. blah
f x = let g y = ...(y::a)...
-}
type ErrCtxt = (Bool, TidyEnv -> TcM (TidyEnv, MsgDoc))
-- Monadic so that we have a chance
-- to deal with bound type variables just before error
-- message construction
-- Bool: True <=> this is a landmark context; do not
-- discard it when trimming for display
{-
************************************************************************
* *
Operations over ImportAvails
* *
************************************************************************
-}
-- | 'ImportAvails' summarises what was imported from where, irrespective of
-- whether the imported things are actually used or not. It is used:
--
-- * when processing the export list,
--
-- * when constructing usage info for the interface file,
--
-- * to identify the list of directly imported modules for initialisation
-- purposes and for optimised overlap checking of family instances,
--
-- * when figuring out what things are really unused
--
data ImportAvails
= ImportAvails {
imp_mods :: ImportedMods,
-- = ModuleEnv [(ModuleName, Bool, SrcSpan, Bool)],
-- ^ Domain is all directly-imported modules
-- The 'ModuleName' is what the module was imported as, e.g. in
-- @
-- import Foo as Bar
-- @
-- it is @Bar@.
--
-- The 'Bool' means:
--
-- - @True@ => import was @import Foo ()@
--
-- - @False@ => import was some other form
--
-- Used
--
-- (a) to help construct the usage information in the interface
-- file; if we import something we need to recompile if the
-- export version changes
--
-- (b) to specify what child modules to initialise
--
-- We need a full ModuleEnv rather than a ModuleNameEnv here,
-- because we might be importing modules of the same name from
-- different packages. (currently not the case, but might be in the
-- future).
imp_dep_mods :: ModuleNameEnv (ModuleName, IsBootInterface),
-- ^ Home-package modules needed by the module being compiled
--
-- It doesn't matter whether any of these dependencies
-- are actually /used/ when compiling the module; they
-- are listed if they are below it at all. For
-- example, suppose M imports A which imports X. Then
-- compiling M might not need to consult X.hi, but X
-- is still listed in M's dependencies.
imp_dep_pkgs :: Set InstalledUnitId,
-- ^ Packages needed by the module being compiled, whether directly,
-- or via other modules in this package, or via modules imported
-- from other packages.
imp_trust_pkgs :: Set InstalledUnitId,
-- ^ This is strictly a subset of imp_dep_pkgs and records the
-- packages the current module needs to trust for Safe Haskell
-- compilation to succeed. A package is required to be trusted if
-- we are dependent on a trustworthy module in that package.
-- While perhaps making imp_dep_pkgs a tuple of (UnitId, Bool)
-- where True for the bool indicates the package is required to be
-- trusted is the more logical design, doing so complicates a lot
-- of code not concerned with Safe Haskell.
-- See Note [RnNames . Tracking Trust Transitively]
imp_trust_own_pkg :: Bool,
-- ^ Do we require that our own package is trusted?
-- This is to handle efficiently the case where a Safe module imports
-- a Trustworthy module that resides in the same package as it.
-- See Note [RnNames . Trust Own Package]
imp_orphs :: [Module],
-- ^ Orphan modules below us in the import tree (and maybe including
-- us for imported modules)
imp_finsts :: [Module]
-- ^ Family instance modules below us in the import tree (and maybe
-- including us for imported modules)
}
mkModDeps :: [(ModuleName, IsBootInterface)]
-> ModuleNameEnv (ModuleName, IsBootInterface)
mkModDeps deps = foldl add emptyUFM deps
where
add env elt@(m,_) = addToUFM env m elt
modDepsElts
:: ModuleNameEnv (ModuleName, IsBootInterface)
-> [(ModuleName, IsBootInterface)]
modDepsElts = sort . nonDetEltsUFM
-- It's OK to use nonDetEltsUFM here because sorting by module names
-- restores determinism
emptyImportAvails :: ImportAvails
emptyImportAvails = ImportAvails { imp_mods = emptyModuleEnv,
imp_dep_mods = emptyUFM,
imp_dep_pkgs = S.empty,
imp_trust_pkgs = S.empty,
imp_trust_own_pkg = False,
imp_orphs = [],
imp_finsts = [] }
-- | Union two ImportAvails
--
-- This function is a key part of Import handling, basically
-- for each import we create a separate ImportAvails structure
-- and then union them all together with this function.
plusImportAvails :: ImportAvails -> ImportAvails -> ImportAvails
plusImportAvails
(ImportAvails { imp_mods = mods1,
imp_dep_mods = dmods1, imp_dep_pkgs = dpkgs1,
imp_trust_pkgs = tpkgs1, imp_trust_own_pkg = tself1,
imp_orphs = orphs1, imp_finsts = finsts1 })
(ImportAvails { imp_mods = mods2,
imp_dep_mods = dmods2, imp_dep_pkgs = dpkgs2,
imp_trust_pkgs = tpkgs2, imp_trust_own_pkg = tself2,
imp_orphs = orphs2, imp_finsts = finsts2 })
= ImportAvails { imp_mods = plusModuleEnv_C (++) mods1 mods2,
imp_dep_mods = plusUFM_C plus_mod_dep dmods1 dmods2,
imp_dep_pkgs = dpkgs1 `S.union` dpkgs2,
imp_trust_pkgs = tpkgs1 `S.union` tpkgs2,
imp_trust_own_pkg = tself1 || tself2,
imp_orphs = orphs1 `unionLists` orphs2,
imp_finsts = finsts1 `unionLists` finsts2 }
where
plus_mod_dep (m1, boot1) (_, boot2)
= --WARN( not (m1 == m2), (ppr m1 <+> ppr m2) $$ (ppr boot1 <+> ppr boot2) )
-- Check mod-names match
(m1, boot1 && boot2) -- If either side can "see" a non-hi-boot interface, use that
{-
************************************************************************
* *
\subsection{Where from}
* *
************************************************************************
The @WhereFrom@ type controls where the renamer looks for an interface file
-}
data WhereFrom
= ImportByUser IsBootInterface -- Ordinary user import (perhaps {-# SOURCE #-})
| ImportBySystem -- Non user import.
| ImportByPlugin -- Importing a plugin;
-- See Note [Care with plugin imports] in LoadIface
instance Outputable WhereFrom where
ppr (ImportByUser is_boot) | is_boot = ptext (sLit "{- SOURCE -}")
| otherwise = empty
ppr ImportBySystem = ptext (sLit "{- SYSTEM -}")
ppr ImportByPlugin = ptext (sLit "{- PLUGIN -}")
{-
************************************************************************
* *
* Canonical constraints *
* *
* These are the constraints the low-level simplifier works with *
* *
************************************************************************
-}
-- The syntax of xi types:
-- xi ::= a | T xis | xis -> xis | ... | forall a. tau
-- Two important notes:
-- (i) No type families, unless we are under a ForAll
-- (ii) Note that xi types can contain unexpanded type synonyms;
-- however, the (transitive) expansions of those type synonyms
-- will not contain any type functions, unless we are under a ForAll.
-- We enforce the structure of Xi types when we flatten (TcCanonical)
type Xi = Type -- In many comments, "xi" ranges over Xi
type Cts = Bag Ct
data Ct
-- Atomic canonical constraints
= CDictCan { -- e.g. Num xi
cc_ev :: CtEvidence, -- See Note [Ct/evidence invariant]
cc_class :: Class,
cc_tyargs :: [Xi] -- cc_tyargs are function-free, hence Xi
}
| CIrredEvCan { -- These stand for yet-unusable predicates
cc_ev :: CtEvidence -- See Note [Ct/evidence invariant]
-- The ctev_pred of the evidence is
-- of form (tv xi1 xi2 ... xin)
-- or (tv1 ~ ty2) where the CTyEqCan kind invariant fails
-- or (F tys ~ ty) where the CFunEqCan kind invariant fails
-- See Note [CIrredEvCan constraints]
}
| CTyEqCan { -- tv ~ rhs
-- Invariants:
-- * See Note [Applying the inert substitution] in TcFlatten
-- * tv not in tvs(rhs) (occurs check)
-- * If tv is a TauTv, then rhs has no foralls
-- (this avoids substituting a forall for the tyvar in other types)
-- * typeKind ty `subKind` typeKind tv
-- See Note [Kind orientation for CTyEqCan]
-- * rhs is not necessarily function-free,
-- but it has no top-level function.
-- E.g. a ~ [F b] is fine
-- but a ~ F b is not
-- * If the equality is representational, rhs has no top-level newtype
-- See Note [No top-level newtypes on RHS of representational
-- equalities] in TcCanonical
-- * If rhs is also a tv, then it is oriented to give best chance of
-- unification happening; eg if rhs is touchable then lhs is too
cc_ev :: CtEvidence, -- See Note [Ct/evidence invariant]
cc_tyvar :: TcTyVar,
cc_rhs :: TcType, -- Not necessarily function-free (hence not Xi)
-- See invariants above
cc_eq_rel :: EqRel
}
| CFunEqCan { -- F xis ~ fsk
-- Invariants:
-- * isTypeFamilyTyCon cc_fun
-- * typeKind (F xis) = tyVarKind fsk
-- * always Nominal role
-- * always Given or Wanted, never Derived
cc_ev :: CtEvidence, -- See Note [Ct/evidence invariant]
cc_fun :: TyCon, -- A type function
cc_tyargs :: [Xi], -- cc_tyargs are function-free (hence Xi)
-- Either under-saturated or exactly saturated
-- *never* over-saturated (because if so
-- we should have decomposed)
cc_fsk :: TcTyVar -- [Given] always a FlatSkol skolem
-- [Wanted] always a FlatMetaTv unification variable
-- See Note [The flattening story] in TcFlatten
}
| CNonCanonical { -- See Note [NonCanonical Semantics]
cc_ev :: CtEvidence
}
| CHoleCan { -- Treated as an "insoluble" constraint
-- See Note [Insoluble constraints]
cc_ev :: CtEvidence,
cc_occ :: OccName, -- The name of this hole
cc_hole :: HoleSort -- The sort of this hole (expr, type, ...)
}
-- | Used to indicate which sort of hole we have.
data HoleSort = ExprHole -- ^ A hole in an expression (TypedHoles)
| TypeHole -- ^ A hole in a type (PartialTypeSignatures)
{-
Note [Kind orientation for CTyEqCan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Given an equality (t:* ~ s:Open), we can't solve it by updating t:=s,
ragardless of how touchable 't' is, because the kinds don't work.
Instead we absolutely must re-orient it. Reason: if that gets into the
inert set we'll start replacing t's by s's, and that might make a
kind-correct type into a kind error. After re-orienting,
we may be able to solve by updating s:=t.
Hence in a CTyEqCan, (t:k1 ~ xi:k2) we require that k2 is a subkind of k1.
If the two have incompatible kinds, we just don't use a CTyEqCan at all.
See Note [Equalities with incompatible kinds] in TcCanonical
We can't require *equal* kinds, because
* wanted constraints don't necessarily have identical kinds
eg alpha::? ~ Int
* a solved wanted constraint becomes a given
Note [Kind orientation for CFunEqCan]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For (F xis ~ rhs) we require that kind(lhs) is a subkind of kind(rhs).
This really only maters when rhs is an Open type variable (since only type
variables have Open kinds):
F ty ~ (a:Open)
which can happen, say, from
f :: F a b
f = undefined -- The a:Open comes from instantiating 'undefined'
Note that the kind invariant is maintained by rewriting.
Eg wanted1 rewrites wanted2; if both were compatible kinds before,
wanted2 will be afterwards. Similarly givens.
Caveat:
- Givens from higher-rank, such as:
type family T b :: * -> * -> *
type instance T Bool = (->)
f :: forall a. ((T a ~ (->)) => ...) -> a -> ...
flop = f (...) True
Whereas we would be able to apply the type instance, we would not be able to
use the given (T Bool ~ (->)) in the body of 'flop'
Note [CIrredEvCan constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CIrredEvCan constraints are used for constraints that are "stuck"
- we can't solve them (yet)
- we can't use them to solve other constraints
- but they may become soluble if we substitute for some
of the type variables in the constraint
Example 1: (c Int), where c :: * -> Constraint. We can't do anything
with this yet, but if later c := Num, *then* we can solve it
Example 2: a ~ b, where a :: *, b :: k, where k is a kind variable
We don't want to use this to substitute 'b' for 'a', in case
'k' is subequently unifed with (say) *->*, because then
we'd have ill-kinded types floating about. Rather we want
to defer using the equality altogether until 'k' get resolved.
Note [Ct/evidence invariant]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If ct :: Ct, then extra fields of 'ct' cache precisely the ctev_pred field
of (cc_ev ct), and is fully rewritten wrt the substitution. Eg for CDictCan,
ctev_pred (cc_ev ct) = (cc_class ct) (cc_tyargs ct)
This holds by construction; look at the unique place where CDictCan is
built (in TcCanonical).
In contrast, the type of the evidence *term* (ccev_evtm or ctev_evar) in
the evidence may *not* be fully zonked; we are careful not to look at it
during constraint solving. See Note [Evidence field of CtEvidence]
-}
mkNonCanonical :: CtEvidence -> Ct
mkNonCanonical ev = CNonCanonical { cc_ev = ev }
mkNonCanonicalCt :: Ct -> Ct
mkNonCanonicalCt ct = CNonCanonical { cc_ev = cc_ev ct }
ctEvidence :: Ct -> CtEvidence
ctEvidence = cc_ev
ctLoc :: Ct -> CtLoc
ctLoc = ctEvLoc . ctEvidence
ctPred :: Ct -> PredType
-- See Note [Ct/evidence invariant]
ctPred ct = ctEvPred (cc_ev ct)
-- | Get the flavour of the given 'Ct'
ctFlavour :: Ct -> CtFlavour
ctFlavour = ctEvFlavour . ctEvidence
-- | Get the equality relation for the given 'Ct'
ctEqRel :: Ct -> EqRel
ctEqRel = ctEvEqRel . ctEvidence
dropDerivedWC :: WantedConstraints -> WantedConstraints
-- See Note [Dropping derived constraints]
dropDerivedWC wc@(WC { wc_simple = simples })
= wc { wc_simple = filterBag isWantedCt simples }
-- The wc_impl implications are already (recursively) filtered
{-
Note [Dropping derived constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we discard derived constraints at the end of constraint solving;
see dropDerivedWC. For example
* If we have an unsolved (Ord a), we don't want to complain about
an unsolved (Eq a) as well.
But we keep Derived *insoluble* constraints because they indicate a solid,
comprehensible error. Particularly:
* Insolubles Givens indicate unreachable code
* Insoluble kind equalities (e.g. [D] * ~ (* -> *)) may arise from
a type equality a ~ Int#, say
* Insoluble derived wanted equalities (e.g. [D] Int ~ Bool) may
arise from functional dependency interactions. We are careful
to keep a good CtOrigin on such constraints (FunDepOrigin1, FunDepOrigin2)
so that we can produce a good error message (Trac #9612)
Since we leave these Derived constraints in the residual WantedConstraints,
we must filter them out when we re-process the WantedConstraint,
in TcSimplify.solve_wanteds.
************************************************************************
* *
CtEvidence
The "flavor" of a canonical constraint
* *
************************************************************************
-}
isWantedCt :: Ct -> Bool
isWantedCt = isWanted . cc_ev
isGivenCt :: Ct -> Bool
isGivenCt = isGiven . cc_ev
isDerivedCt :: Ct -> Bool
isDerivedCt = isDerived . cc_ev
isCTyEqCan :: Ct -> Bool
isCTyEqCan (CTyEqCan {}) = True
isCTyEqCan (CFunEqCan {}) = False
isCTyEqCan _ = False
isCDictCan_Maybe :: Ct -> Maybe Class
isCDictCan_Maybe (CDictCan {cc_class = cls }) = Just cls
isCDictCan_Maybe _ = Nothing
isCIrredEvCan :: Ct -> Bool
isCIrredEvCan (CIrredEvCan {}) = True
isCIrredEvCan _ = False
isCFunEqCan_maybe :: Ct -> Maybe (TyCon, [Type])
isCFunEqCan_maybe (CFunEqCan { cc_fun = tc, cc_tyargs = xis }) = Just (tc, xis)
isCFunEqCan_maybe _ = Nothing
isCFunEqCan :: Ct -> Bool
isCFunEqCan (CFunEqCan {}) = True
isCFunEqCan _ = False
isCNonCanonical :: Ct -> Bool
isCNonCanonical (CNonCanonical {}) = True
isCNonCanonical _ = False
isHoleCt:: Ct -> Bool
isHoleCt (CHoleCan {}) = True
isHoleCt _ = False
-- TODO: Finish backport
-- isOutOfScopeCt :: Ct -> Bool
-- -- We treat expression holes representing out-of-scope variables a bit
-- -- differently when it comes to error reporting
-- isOutOfScopeCt (CHoleCan { cc_hole = ExprHole (OutOfScope {}) }) = True
-- isOutOfScopeCt _ = False
isTypedHoleCt :: Ct -> Bool
isTypedHoleCt (CHoleCan { cc_hole = ExprHole }) = True
isTypedHoleCt _ = False
-- | The following constraints are considered to be a custom type error:
-- 1. TypeError msg
-- 2. TypeError msg ~ Something (and the other way around)
-- 3. C (TypeError msg) (for any parameter of class constraint)
getUserTypeErrorMsg :: Ct -> Maybe (Kind, Type)
getUserTypeErrorMsg ct
| Just (_,t1,t2) <- getEqPredTys_maybe ctT = oneOf [t1,t2]
| Just (_,ts) <- getClassPredTys_maybe ctT = oneOf ts
| otherwise = isUserErrorTy ctT
where
ctT = ctPred ct
oneOf xs = msum (map isUserErrorTy xs)
isUserTypeErrorCt :: Ct -> Bool
isUserTypeErrorCt ct = case getUserTypeErrorMsg ct of
Just _ -> True
_ -> False
isPartialTypeSigCt :: Ct -> Bool
isPartialTypeSigCt (CHoleCan { cc_hole = TypeHole }) = True
isPartialTypeSigCt _ = False
instance Outputable Ct where
ppr ct = ppr (cc_ev ct) <+> parens (text ct_sort)
where ct_sort = case ct of
CTyEqCan {} -> "CTyEqCan"
CFunEqCan {} -> "CFunEqCan"
CNonCanonical {} -> "CNonCanonical"
CDictCan {} -> "CDictCan"
CIrredEvCan {} -> "CIrredEvCan"
CHoleCan {} -> "CHoleCan"
singleCt :: Ct -> Cts
singleCt = unitBag
andCts :: Cts -> Cts -> Cts
andCts = unionBags
listToCts :: [Ct] -> Cts
listToCts = listToBag
ctsElts :: Cts -> [Ct]
ctsElts = bagToList
consCts :: Ct -> Cts -> Cts
consCts = consBag
snocCts :: Cts -> Ct -> Cts
snocCts = snocBag
extendCtsList :: Cts -> [Ct] -> Cts
extendCtsList cts xs | null xs = cts
| otherwise = cts `unionBags` listToBag xs
andManyCts :: [Cts] -> Cts
andManyCts = unionManyBags
emptyCts :: Cts
emptyCts = emptyBag
isEmptyCts :: Cts -> Bool
isEmptyCts = isEmptyBag
pprCts :: Cts -> SDoc
pprCts cts = vcat (map ppr (bagToList cts))
{-
************************************************************************
* *
Wanted constraints
These are forced to be in TcRnTypes because
TcLclEnv mentions WantedConstraints
WantedConstraint mentions CtLoc
CtLoc mentions ErrCtxt
ErrCtxt mentions TcM
* *
v%************************************************************************
-}
data WantedConstraints
= WC { wc_simple :: Cts -- Unsolved constraints, all wanted
, wc_impl :: Bag Implication
, wc_insol :: Cts -- Insoluble constraints, can be
-- wanted, given, or derived
-- See Note [Insoluble constraints]
}
emptyWC :: WantedConstraints
emptyWC = WC { wc_simple = emptyBag, wc_impl = emptyBag, wc_insol = emptyBag }
mkSimpleWC :: [Ct] -> WantedConstraints
mkSimpleWC cts
= WC { wc_simple = listToBag cts, wc_impl = emptyBag, wc_insol = emptyBag }
mkImplicWC :: Bag Implication -> WantedConstraints
mkImplicWC implic
= WC { wc_simple = emptyBag, wc_impl = implic, wc_insol = emptyBag }
isEmptyWC :: WantedConstraints -> Bool
isEmptyWC (WC { wc_simple = f, wc_impl = i, wc_insol = n })
= isEmptyBag f && isEmptyBag i && isEmptyBag n
insolubleWC :: WantedConstraints -> Bool
-- True if there are any insoluble constraints in the wanted bag. Ignore
-- constraints arising from PartialTypeSignatures to solve as much of the
-- constraints as possible before reporting the holes.
insolubleWC wc = not (isEmptyBag (filterBag (not . isPartialTypeSigCt)
(wc_insol wc)))
|| anyBag ic_insol (wc_impl wc)
andWC :: WantedConstraints -> WantedConstraints -> WantedConstraints
andWC (WC { wc_simple = f1, wc_impl = i1, wc_insol = n1 })
(WC { wc_simple = f2, wc_impl = i2, wc_insol = n2 })
= WC { wc_simple = f1 `unionBags` f2
, wc_impl = i1 `unionBags` i2
, wc_insol = n1 `unionBags` n2 }
unionsWC :: [WantedConstraints] -> WantedConstraints
unionsWC = foldr andWC emptyWC
addSimples :: WantedConstraints -> Bag Ct -> WantedConstraints
addSimples wc cts
= wc { wc_simple = wc_simple wc `unionBags` cts }
addImplics :: WantedConstraints -> Bag Implication -> WantedConstraints
addImplics wc implic = wc { wc_impl = wc_impl wc `unionBags` implic }
addInsols :: WantedConstraints -> Bag Ct -> WantedConstraints
addInsols wc cts
= wc { wc_insol = wc_insol wc `unionBags` cts }
insolublesOnly :: WantedConstraints -> WantedConstraints
-- Keep only the insolubles
insolublesOnly (WC { wc_insol = insols, wc_impl = implics })
= WC { wc_simple = emptyBag
, wc_insol = insols
, wc_impl = mapBag implic_insols_only implics }
where
implic_insols_only implic
= implic { ic_wanted = insolublesOnly (ic_wanted implic) }
-- insolubleWantedCt :: Ct -> Bool
-- -- Definitely insoluble, in particular /excluding/ type-hole constraints
-- insolubleWantedCt ct
-- | isGivenCt ct = False -- See Note [Given insolubles]
-- | isHoleCt ct = isOutOfScopeCt ct -- See Note [Insoluble holes]
-- | insolubleEqCt ct = True
-- | otherwise = False
-- insolubleEqCt :: Ct -> Bool
-- -- Returns True of /equality/ constraints
-- -- that are /definitely/ insoluble
-- -- It won't detect some definite errors like
-- -- F a ~ T (F a)
-- -- where F is a type family, which actually has an occurs check
-- --
-- -- The function is tuned for application /after/ constraint solving
-- -- i.e. assuming canonicalisation has been done
-- -- E.g. It'll reply True for a ~ [a]
-- -- but False for [a] ~ a
-- -- and
-- -- True for Int ~ F a Int
-- -- but False for Maybe Int ~ F a Int Int
-- -- (where F is an arity-1 type function)
-- insolubleEqCt (CIrredCan { cc_insol = insol }) = insol
-- insolubleEqCt _ = False
instance Outputable WantedConstraints where
ppr (WC {wc_simple = s, wc_impl = i, wc_insol = n})
= ptext (sLit "WC") <+> braces (vcat
[ ppr_bag (ptext (sLit "wc_simple")) s
, ppr_bag (ptext (sLit "wc_insol")) n
, ppr_bag (ptext (sLit "wc_impl")) i ])
ppr_bag :: Outputable a => SDoc -> Bag a -> SDoc
ppr_bag doc bag
| isEmptyBag bag = empty
| otherwise = hang (doc <+> equals)
2 (foldrBag (($$) . ppr) empty bag)
{- Note [Given insolubles]
~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (Trac #14325, comment:)
class (a~b) => C a b
foo :: C a c => a -> c
foo x = x
hm3 :: C (f b) b => b -> f b
hm3 x = foo x
In the RHS of hm3, from the [G] C (f b) b we get the insoluble
[G] f b ~# b. Then we also get an unsolved [W] C b (f b).
Residual implication looks like
forall b. C (f b) b => [G] f b ~# b
[W] C f (f b)
We do /not/ want to set the implication status to IC_Insoluble,
because that'll suppress reports of [W] C b (f b). But we
may not report the insoluble [G] f b ~# b either (see Note [Given errors]
in TcErrors), so we may fail to report anything at all! Yikes.
Bottom line: insolubleWC (called in TcSimplify.setImplicationStatus)
should ignore givens even if they are insoluble.
Note [Insoluble holes]
~~~~~~~~~~~~~~~~~~~~~~
Hole constraints that ARE NOT treated as truly insoluble:
a) type holes, arising from PartialTypeSignatures,
b) "true" expression holes arising from TypedHoles
An "expression hole" or "type hole" constraint isn't really an error
at all; it's a report saying "_ :: Int" here. But an out-of-scope
variable masquerading as expression holes IS treated as truly
insoluble, so that it trumps other errors during error reporting.
Yuk!
************************************************************************
* *
Implication constraints
* *
************************************************************************
-}
data Implication
= Implic {
ic_tclvl :: TcLevel, -- TcLevel: unification variables
-- free in the environment
ic_skols :: [TcTyVar], -- Introduced skolems
ic_info :: SkolemInfo, -- See Note [Skolems in an implication]
-- See Note [Shadowing in a constraint]
ic_given :: [EvVar], -- Given evidence variables
-- (order does not matter)
-- See Invariant (GivenInv) in TcType
ic_no_eqs :: Bool, -- True <=> ic_givens have no equalities, for sure
-- False <=> ic_givens might have equalities
ic_env :: TcLclEnv, -- Gives the source location and error context
-- for the implicatdion, and hence for all the
-- given evidence variables
ic_wanted :: WantedConstraints, -- The wanted
ic_insol :: Bool, -- True iff insolubleWC ic_wanted is true
ic_binds :: EvBindsVar -- Points to the place to fill in the
-- abstraction and bindings
}
instance Outputable Implication where
ppr (Implic { ic_tclvl = tclvl, ic_skols = skols
, ic_given = given, ic_no_eqs = no_eqs
, ic_wanted = wanted, ic_insol = insol
, ic_binds = binds, ic_info = info })
= hang (ptext (sLit "Implic") <+> lbrace)
2 (sep [ ptext (sLit "TcLevel =") <+> ppr tclvl
, ptext (sLit "Skolems =") <+> pprTvBndrs skols
, ptext (sLit "No-eqs =") <+> ppr no_eqs
, ptext (sLit "Insol =") <+> ppr insol
, hang (ptext (sLit "Given =")) 2 (pprEvVars given)
, hang (ptext (sLit "Wanted =")) 2 (ppr wanted)
, ptext (sLit "Binds =") <+> ppr binds
, pprSkolInfo info ] <+> rbrace)
{-
Note [Shadowing in a constraint]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We assume NO SHADOWING in a constraint. Specifically
* The unification variables are all implicitly quantified at top
level, and are all unique
* The skolem varibles bound in ic_skols are all freah when the
implication is created.
So we can safely substitute. For example, if we have
forall a. a~Int => ...(forall b. ...a...)...
we can push the (a~Int) constraint inwards in the "givens" without
worrying that 'b' might clash.
Note [Skolems in an implication]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The skolems in an implication are not there to perform a skolem escape
check. That happens because all the environment variables are in the
untouchables, and therefore cannot be unified with anything at all,
let alone the skolems.
Instead, ic_skols is used only when considering floating a constraint
outside the implication in TcSimplify.floatEqualities or
TcSimplify.approximateImplications
Note [Insoluble constraints]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Some of the errors that we get during canonicalization are best
reported when all constraints have been simplified as much as
possible. For instance, assume that during simplification the
following constraints arise:
[Wanted] F alpha ~ uf1
[Wanted] beta ~ uf1 beta
When canonicalizing the wanted (beta ~ uf1 beta), if we eagerly fail
we will simply see a message:
'Can't construct the infinite type beta ~ uf1 beta'
and the user has no idea what the uf1 variable is.
Instead our plan is that we will NOT fail immediately, but:
(1) Record the "frozen" error in the ic_insols field
(2) Isolate the offending constraint from the rest of the inerts
(3) Keep on simplifying/canonicalizing
At the end, we will hopefully have substituted uf1 := F alpha, and we
will be able to report a more informative error:
'Can't construct the infinite type beta ~ F alpha beta'
Insoluble constraints *do* include Derived constraints. For example,
a functional dependency might give rise to [D] Int ~ Bool, and we must
report that. If insolubles did not contain Deriveds, reportErrors would
never see it.
************************************************************************
* *
Pretty printing
* *
************************************************************************
-}
pprEvVars :: [EvVar] -> SDoc -- Print with their types
pprEvVars ev_vars = vcat (map pprEvVarWithType ev_vars)
pprEvVarTheta :: [EvVar] -> SDoc
pprEvVarTheta ev_vars = pprTheta (map evVarPred ev_vars)
pprEvVarWithType :: EvVar -> SDoc
pprEvVarWithType v = ppr v <+> dcolon <+> pprType (evVarPred v)
{-
************************************************************************
* *
CtEvidence
* *
************************************************************************
Note [Evidence field of CtEvidence]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
During constraint solving we never look at the type of ctev_evtm, or
ctev_evar; instead we look at the cte_pred field. The evtm/evar field
may be un-zonked.
-}
data CtEvidence
= CtGiven { ctev_pred :: TcPredType -- See Note [Ct/evidence invariant]
, ctev_evtm :: EvTerm -- See Note [Evidence field of CtEvidence]
, ctev_loc :: CtLoc }
-- Truly given, not depending on subgoals
-- NB: Spontaneous unifications belong here
| CtWanted { ctev_pred :: TcPredType -- See Note [Ct/evidence invariant]
, ctev_evar :: EvVar -- See Note [Evidence field of CtEvidence]
, ctev_loc :: CtLoc }
-- Wanted goal
| CtDerived { ctev_pred :: TcPredType
, ctev_loc :: CtLoc }
-- A goal that we don't really have to solve and can't immediately
-- rewrite anything other than a derived (there's no evidence!)
-- but if we do manage to solve it may help in solving other goals.
ctEvPred :: CtEvidence -> TcPredType
-- The predicate of a flavor
ctEvPred = ctev_pred
ctEvLoc :: CtEvidence -> CtLoc
ctEvLoc = ctev_loc
-- | Get the equality relation relevant for a 'CtEvidence'
ctEvEqRel :: CtEvidence -> EqRel
ctEvEqRel = predTypeEqRel . ctEvPred
-- | Get the role relevant for a 'CtEvidence'
ctEvRole :: CtEvidence -> Role
ctEvRole = eqRelRole . ctEvEqRel
ctEvTerm :: CtEvidence -> EvTerm
ctEvTerm (CtGiven { ctev_evtm = tm }) = tm
ctEvTerm (CtWanted { ctev_evar = ev }) = EvId ev
ctEvTerm ctev@(CtDerived {}) = pprPanic "ctEvTerm: derived constraint cannot have id"
(ppr ctev)
ctEvCoercion :: CtEvidence -> TcCoercion
-- ctEvCoercion ev = evTermCoercion (ctEvTerm ev)
ctEvCoercion (CtGiven { ctev_evtm = tm }) = evTermCoercion tm
ctEvCoercion (CtWanted { ctev_evar = v }) = mkTcCoVarCo v
ctEvCoercion ctev@(CtDerived {}) = pprPanic "ctEvCoercion: derived constraint cannot have id"
(ppr ctev)
ctEvId :: CtEvidence -> TcId
ctEvId (CtWanted { ctev_evar = ev }) = ev
ctEvId ctev = pprPanic "ctEvId:" (ppr ctev)
instance Outputable CtEvidence where
ppr fl = case fl of
CtGiven {} -> ptext (sLit "[G]") <+> ppr (ctev_evtm fl) <+> ppr_pty
CtWanted {} -> ptext (sLit "[W]") <+> ppr (ctev_evar fl) <+> ppr_pty
CtDerived {} -> ptext (sLit "[D]") <+> text "_" <+> ppr_pty
where ppr_pty = dcolon <+> ppr (ctEvPred fl)
isWanted :: CtEvidence -> Bool
isWanted (CtWanted {}) = True
isWanted _ = False
isGiven :: CtEvidence -> Bool
isGiven (CtGiven {}) = True
isGiven _ = False
isDerived :: CtEvidence -> Bool
isDerived (CtDerived {}) = True
isDerived _ = False
{-
%************************************************************************
%* *
CtFlavour
%* *
%************************************************************************
Just an enum type that tracks whether a constraint is wanted, derived,
or given, when we need to separate that info from the constraint itself.
-}
data CtFlavour = Given | Wanted | Derived
deriving Eq
instance Outputable CtFlavour where
ppr Given = text "[G]"
ppr Wanted = text "[W]"
ppr Derived = text "[D]"
ctEvFlavour :: CtEvidence -> CtFlavour
ctEvFlavour (CtWanted {}) = Wanted
ctEvFlavour (CtGiven {}) = Given
ctEvFlavour (CtDerived {}) = Derived
{-
************************************************************************
* *
SubGoalDepth
* *
************************************************************************
Note [SubGoalDepth]
~~~~~~~~~~~~~~~~~~~
The 'SubGoalCounter' takes care of stopping the constraint solver from looping.
Because of the different use-cases of regular constaints and type function
applications, there are two independent counters. Therefore, this datatype is
abstract. See Note [WorkList]
Each counter starts at zero and increases.
* The "dictionary constraint counter" counts the depth of type class
instance declarations. Example:
[W] d{7} : Eq [Int]
That is d's dictionary-constraint depth is 7. If we use the instance
$dfEqList :: Eq a => Eq [a]
to simplify it, we get
d{7} = $dfEqList d'{8}
where d'{8} : Eq Int, and d' has dictionary-constraint depth 8.
For civilised (decidable) instance declarations, each increase of
depth removes a type constructor from the type, so the depth never
gets big; i.e. is bounded by the structural depth of the type.
The flag -fcontext-stack=n (not very well named!) fixes the maximium
level.
* The "type function reduction counter" does the same thing when resolving
* qualities involving type functions. Example:
Assume we have a wanted at depth 7:
[W] d{7} : F () ~ a
If thre is an type function equation "F () = Int", this would be rewritten to
[W] d{8} : Int ~ a
and remembered as having depth 8.
Again, without UndecidableInstances, this counter is bounded, but without it
can resolve things ad infinitum. Hence there is a maximum level. But we use a
different maximum, as we expect possibly many more type function reductions
in sensible programs than type class constraints.
The flag -ftype-function-depth=n fixes the maximium level.
-}
data SubGoalCounter = CountConstraints | CountTyFunApps
data SubGoalDepth -- See Note [SubGoalDepth]
= SubGoalDepth
{-# UNPACK #-} !Int -- Dictionary constraints
{-# UNPACK #-} !Int -- Type function reductions
deriving (Eq, Ord)
instance Outputable SubGoalDepth where
ppr (SubGoalDepth c f) = angleBrackets $
char 'C' <> colon <> int c <> comma <>
char 'F' <> colon <> int f
initialSubGoalDepth :: SubGoalDepth
initialSubGoalDepth = SubGoalDepth 0 0
maxSubGoalDepth :: DynFlags -> SubGoalDepth
maxSubGoalDepth dflags = SubGoalDepth (ctxtStkDepth dflags) (tyFunStkDepth dflags)
bumpSubGoalDepth :: SubGoalCounter -> SubGoalDepth -> SubGoalDepth
bumpSubGoalDepth CountConstraints (SubGoalDepth c f) = SubGoalDepth (c+1) f
bumpSubGoalDepth CountTyFunApps (SubGoalDepth c f) = SubGoalDepth c (f+1)
subGoalCounterValue :: SubGoalCounter -> SubGoalDepth -> Int
subGoalCounterValue CountConstraints (SubGoalDepth c _) = c
subGoalCounterValue CountTyFunApps (SubGoalDepth _ f) = f
subGoalDepthExceeded :: SubGoalDepth -> SubGoalDepth -> Maybe SubGoalCounter
subGoalDepthExceeded (SubGoalDepth mc mf) (SubGoalDepth c f)
| c > mc = Just CountConstraints
| f > mf = Just CountTyFunApps
| otherwise = Nothing
-- | Checks whether the evidence can be used to solve a goal with the given minimum depth
-- See Note [Preventing recursive dictionaries]
ctEvCheckDepth :: Class -> CtLoc -> CtEvidence -> Bool
ctEvCheckDepth cls target ev
| isWanted ev
, cls == coercibleClass -- The restriction applies only to Coercible
= ctLocDepth target <= ctLocDepth (ctEvLoc ev)
| otherwise = True
{-
Note [Preventing recursive dictionaries]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NB: this will go away when we start treating Coercible as an equality.
We have some classes where it is not very useful to build recursive
dictionaries (Coercible, at the moment). So we need the constraint solver to
prevent that. We conservatively ensure this property using the subgoal depth of
the constraints: When solving a Coercible constraint at depth d, we do not
consider evidence from a depth <= d as suitable.
Therefore we need to record the minimum depth allowed to solve a CtWanted. This
is done in the SubGoalDepth field of CtWanted. Most code now uses mkCtWanted,
which initializes it to initialSubGoalDepth (i.e. 0); but when requesting a
Coercible instance (requestCoercible in TcInteract), we bump the current depth
by one and use that.
There are two spots where wanted contraints attempted to be solved
using existing constraints: lookupInertDict and lookupSolvedDict in
TcSMonad. Both use ctEvCheckDepth to make the check. That function
ensures that a Given constraint can always be used to solve a goal
(i.e. they are at depth infinity, for our purposes)
************************************************************************
* *
CtLoc
* *
************************************************************************
The 'CtLoc' gives information about where a constraint came from.
This is important for decent error message reporting because
dictionaries don't appear in the original source code.
type will evolve...
-}
data CtLoc = CtLoc { ctl_origin :: CtOrigin
, ctl_env :: TcLclEnv
, ctl_depth :: !SubGoalDepth }
-- The TcLclEnv includes particularly
-- source location: tcl_loc :: RealSrcSpan
-- context: tcl_ctxt :: [ErrCtxt]
-- binder stack: tcl_bndrs :: [TcIdBinders]
-- level: tcl_tclvl :: TcLevel
mkGivenLoc :: TcLevel -> SkolemInfo -> TcLclEnv -> CtLoc
mkGivenLoc tclvl skol_info env
= CtLoc { ctl_origin = GivenOrigin skol_info
, ctl_env = env { tcl_tclvl = tclvl }
, ctl_depth = initialSubGoalDepth }
ctLocEnv :: CtLoc -> TcLclEnv
ctLocEnv = ctl_env
ctLocDepth :: CtLoc -> SubGoalDepth
ctLocDepth = ctl_depth
ctLocOrigin :: CtLoc -> CtOrigin
ctLocOrigin = ctl_origin
ctLocSpan :: CtLoc -> RealSrcSpan
ctLocSpan (CtLoc { ctl_env = lcl}) = tcl_loc lcl
setCtLocSpan :: CtLoc -> RealSrcSpan -> CtLoc
setCtLocSpan ctl@(CtLoc { ctl_env = lcl }) loc = setCtLocEnv ctl (lcl { tcl_loc = loc })
bumpCtLocDepth :: SubGoalCounter -> CtLoc -> CtLoc
bumpCtLocDepth cnt loc@(CtLoc { ctl_depth = d }) = loc { ctl_depth = bumpSubGoalDepth cnt d }
setCtLocOrigin :: CtLoc -> CtOrigin -> CtLoc
setCtLocOrigin ctl orig = ctl { ctl_origin = orig }
setCtLocEnv :: CtLoc -> TcLclEnv -> CtLoc
setCtLocEnv ctl env = ctl { ctl_env = env }
pushErrCtxt :: CtOrigin -> ErrCtxt -> CtLoc -> CtLoc
pushErrCtxt o err loc@(CtLoc { ctl_env = lcl })
= loc { ctl_origin = o, ctl_env = lcl { tcl_ctxt = err : tcl_ctxt lcl } }
pushErrCtxtSameOrigin :: ErrCtxt -> CtLoc -> CtLoc
-- Just add information w/o updating the origin!
pushErrCtxtSameOrigin err loc@(CtLoc { ctl_env = lcl })
= loc { ctl_env = lcl { tcl_ctxt = err : tcl_ctxt lcl } }
pprArising :: CtOrigin -> SDoc
-- Used for the main, top-level error message
-- We've done special processing for TypeEq and FunDep origins
pprArising (TypeEqOrigin {}) = empty
pprArising orig = pprCtOrigin orig
pprArisingAt :: CtLoc -> SDoc
pprArisingAt (CtLoc { ctl_origin = o, ctl_env = lcl})
= sep [ pprCtOrigin o
, text "at" <+> ppr (tcl_loc lcl)]
{-
************************************************************************
* *
SkolemInfo
* *
************************************************************************
-}
-- SkolemInfo gives the origin of *given* constraints
-- a) type variables are skolemised
-- b) an implication constraint is generated
data SkolemInfo
= SigSkol UserTypeCtxt -- A skolem that is created by instantiating
Type -- a programmer-supplied type signature
-- Location of the binding site is on the TyVar
-- The rest are for non-scoped skolems
| ClsSkol Class -- Bound at a class decl
| InstSkol -- Bound at an instance decl
| DataSkol -- Bound at a data type declaration
| FamInstSkol -- Bound at a family instance decl
| PatSkol -- An existential type variable bound by a pattern for
ConLike -- a data constructor with an existential type.
(HsMatchContext Name)
-- e.g. data T = forall a. Eq a => MkT a
-- f (MkT x) = ...
-- The pattern MkT x will allocate an existential type
-- variable for 'a'.
| ArrowSkol -- An arrow form (see TcArrows)
| IPSkol [HsIPName] -- Binding site of an implicit parameter
| RuleSkol RuleName -- The LHS of a RULE
| InferSkol [(Name,TcType)]
-- We have inferred a type for these (mutually-recursivive)
-- polymorphic Ids, and are now checking that their RHS
-- constraints are satisfied.
| BracketSkol -- Template Haskell bracket
| UnifyForAllSkol -- We are unifying two for-all types
[TcTyVar] -- The instantiated skolem variables
TcType -- The instantiated type *inside* the forall
| UnkSkol -- Unhelpful info (until I improve it)
instance Outputable SkolemInfo where
ppr = pprSkolInfo
pprSkolInfo :: SkolemInfo -> SDoc
-- Complete the sentence "is a rigid type variable bound by..."
pprSkolInfo (SigSkol (FunSigCtxt f) ty)
= hang (ptext (sLit "the type signature for"))
2 (pprPrefixOcc f <+> dcolon <+> ppr ty)
pprSkolInfo (SigSkol cx ty) = hang (pprUserTypeCtxt cx <> colon)
2 (ppr ty)
pprSkolInfo (IPSkol ips) = ptext (sLit "the implicit-parameter binding") <> plural ips <+> ptext (sLit "for")
<+> pprWithCommas ppr ips
pprSkolInfo (ClsSkol cls) = ptext (sLit "the class declaration for") <+> quotes (ppr cls)
pprSkolInfo InstSkol = ptext (sLit "the instance declaration")
pprSkolInfo DataSkol = ptext (sLit "the data type declaration")
pprSkolInfo FamInstSkol = ptext (sLit "the family instance declaration")
pprSkolInfo BracketSkol = ptext (sLit "a Template Haskell bracket")
pprSkolInfo (RuleSkol name) = ptext (sLit "the RULE") <+> doubleQuotes (ftext name)
pprSkolInfo ArrowSkol = ptext (sLit "the arrow form")
pprSkolInfo (PatSkol cl mc) = case cl of
RealDataCon dc -> sep [ ptext (sLit "a pattern with constructor")
, nest 2 $ ppr dc <+> dcolon
<+> pprType (dataConUserType dc) <> comma
-- pprType prints forall's regardless of -fprint-explict-foralls
-- which is what we want here, since we might be saying
-- type variable 't' is bound by ...
, ptext (sLit "in") <+> pprMatchContext mc ]
PatSynCon ps -> sep [ ptext (sLit "a pattern with pattern synonym")
, nest 2 $ ppr ps <+> dcolon
<+> pprType (patSynType ps) <> comma
, ptext (sLit "in") <+> pprMatchContext mc ]
pprSkolInfo (InferSkol ids) = sep [ ptext (sLit "the inferred type of")
, vcat [ ppr name <+> dcolon <+> ppr ty
| (name,ty) <- ids ]]
pprSkolInfo (UnifyForAllSkol tvs ty) = ptext (sLit "the type") <+> ppr (mkForAllTys tvs ty)
-- UnkSkol
-- For type variables the others are dealt with by pprSkolTvBinding.
-- For Insts, these cases should not happen
pprSkolInfo UnkSkol = {-WARN( True, text "pprSkolInfo: UnkSkol" )-} ptext (sLit "UnkSkol")
{-
************************************************************************
* *
CtOrigin
* *
************************************************************************
-}
data CtOrigin
= GivenOrigin SkolemInfo
-- All the others are for *wanted* constraints
| OccurrenceOf Name -- Occurrence of an overloaded identifier
| AppOrigin -- An application of some kind
| SpecPragOrigin Name -- Specialisation pragma for identifier
| TypeEqOrigin { uo_actual :: TcType
, uo_expected :: TcType }
| KindEqOrigin
TcType TcType -- A kind equality arising from unifying these two types
CtOrigin -- originally arising from this
| CoercibleOrigin TcType TcType -- a Coercible constraint
| IPOccOrigin HsIPName -- Occurrence of an implicit parameter
| OverLabelOrigin FastString -- Occurrence of an overloaded label
| LiteralOrigin (HsOverLit Name) -- Occurrence of a literal
| NegateOrigin -- Occurrence of syntactic negation
| ArithSeqOrigin (ArithSeqInfo Name) -- [x..], [x..y] etc
| PArrSeqOrigin (ArithSeqInfo Name) -- [:x..y:] and [:x,y..z:]
| SectionOrigin
| TupleOrigin -- (..,..)
| ExprSigOrigin -- e :: ty
| PatSigOrigin -- p :: ty
| PatOrigin -- Instantiating a polytyped pattern at a constructor
| RecordUpdOrigin
| ViewPatOrigin
| ScOrigin -- Typechecking superclasses of an instance declaration
| DerivOrigin -- Typechecking deriving
| DerivOriginDC DataCon Int
-- Checking constraints arising from this data con and field index
| DerivOriginCoerce Id Type Type
-- DerivOriginCoerce id ty1 ty2: Trying to coerce class method `id` from
-- `ty1` to `ty2`.
| StandAloneDerivOrigin -- Typechecking stand-alone deriving
| DefaultOrigin -- Typechecking a default decl
| DoOrigin -- Arising from a do expression
| MCompOrigin -- Arising from a monad comprehension
| IfOrigin -- Arising from an if statement
| ProcOrigin -- Arising from a proc expression
| AnnOrigin -- An annotation
| FunDepOrigin1 -- A functional dependency from combining
PredType CtLoc -- This constraint arising from ...
PredType CtLoc -- and this constraint arising from ...
| FunDepOrigin2 -- A functional dependency from combining
PredType CtOrigin -- This constraint arising from ...
PredType SrcSpan -- and this instance
-- We only need a CtOrigin on the first, because the location
-- is pinned on the entire error message
| HoleOrigin
| UnboundOccurrenceOf RdrName
| ListOrigin -- An overloaded list
| StaticOrigin -- A static form
| InstProvidedOrigin Module ClsInst
-- Skolem variable arose when we were testing if an instance
-- is solvable or not.
ctoHerald :: SDoc
ctoHerald = ptext (sLit "arising from")
pprCtOrigin :: CtOrigin -> SDoc
pprCtOrigin (GivenOrigin sk) = ctoHerald <+> ppr sk
pprCtOrigin (FunDepOrigin1 pred1 loc1 pred2 loc2)
= hang (ctoHerald <+> ptext (sLit "a functional dependency between constraints:"))
2 (vcat [ hang (quotes (ppr pred1)) 2 (pprArisingAt loc1)
, hang (quotes (ppr pred2)) 2 (pprArisingAt loc2) ])
pprCtOrigin (FunDepOrigin2 pred1 orig1 pred2 loc2)
= hang (ctoHerald <+> ptext (sLit "a functional dependency between:"))
2 (vcat [ hang (ptext (sLit "constraint") <+> quotes (ppr pred1))
2 (pprArising orig1 )
, hang (ptext (sLit "instance") <+> quotes (ppr pred2))
2 (ptext (sLit "at") <+> ppr loc2) ])
pprCtOrigin (KindEqOrigin t1 t2 _)
= hang (ctoHerald <+> ptext (sLit "a kind equality arising from"))
2 (sep [ppr t1, char '~', ppr t2])
pprCtOrigin (UnboundOccurrenceOf name)
= ctoHerald <+> ptext (sLit "an undeclared identifier") <+> quotes (ppr name)
pprCtOrigin (DerivOriginDC dc n)
= hang (ctoHerald <+> ptext (sLit "the") <+> speakNth n
<+> ptext (sLit "field of") <+> quotes (ppr dc))
2 (parens (ptext (sLit "type") <+> quotes (ppr ty)))
where
ty = dataConOrigArgTys dc !! (n-1)
pprCtOrigin (DerivOriginCoerce meth ty1 ty2)
= hang (ctoHerald <+> ptext (sLit "the coercion of the method") <+> quotes (ppr meth))
2 (sep [ text "from type" <+> quotes (ppr ty1)
, nest 2 $ text "to type" <+> quotes (ppr ty2) ])
pprCtOrigin (CoercibleOrigin ty1 ty2)
= hang (ctoHerald <+> text "trying to show that the representations of")
2 (quotes (ppr ty1) <+> text "and" $$
quotes (ppr ty2) <+> text "are the same")
pprCtOrigin (InstProvidedOrigin mod cls_inst)
= vcat [ text "arising when attempting to show that"
, ppr cls_inst
, text "is provided by" <+> quotes (ppr mod)]
pprCtOrigin simple_origin
= ctoHerald <+> pprCtO simple_origin
----------------
pprCtO :: CtOrigin -> SDoc -- Ones that are short one-liners
pprCtO (OccurrenceOf name) = hsep [ptext (sLit "a use of"), quotes (ppr name)]
pprCtO AppOrigin = ptext (sLit "an application")
pprCtO (SpecPragOrigin name) = hsep [ptext (sLit "a specialisation pragma for"), quotes (ppr name)]
pprCtO (IPOccOrigin name) = hsep [ptext (sLit "a use of implicit parameter"), quotes (ppr name)]
pprCtO (OverLabelOrigin l) = hsep [ptext (sLit "the overloaded label")
,quotes (char '#' <> ppr l)]
pprCtO RecordUpdOrigin = ptext (sLit "a record update")
pprCtO ExprSigOrigin = ptext (sLit "an expression type signature")
pprCtO PatSigOrigin = ptext (sLit "a pattern type signature")
pprCtO PatOrigin = ptext (sLit "a pattern")
pprCtO ViewPatOrigin = ptext (sLit "a view pattern")
pprCtO IfOrigin = ptext (sLit "an if statement")
pprCtO (LiteralOrigin lit) = hsep [ptext (sLit "the literal"), quotes (ppr lit)]
pprCtO (ArithSeqOrigin seq) = hsep [ptext (sLit "the arithmetic sequence"), quotes (ppr seq)]
pprCtO (PArrSeqOrigin seq) = hsep [ptext (sLit "the parallel array sequence"), quotes (ppr seq)]
pprCtO SectionOrigin = ptext (sLit "an operator section")
pprCtO TupleOrigin = ptext (sLit "a tuple")
pprCtO NegateOrigin = ptext (sLit "a use of syntactic negation")
pprCtO ScOrigin = ptext (sLit "the superclasses of an instance declaration")
pprCtO DerivOrigin = ptext (sLit "the 'deriving' clause of a data type declaration")
pprCtO StandAloneDerivOrigin = ptext (sLit "a 'deriving' declaration")
pprCtO DefaultOrigin = ptext (sLit "a 'default' declaration")
pprCtO DoOrigin = ptext (sLit "a do statement")
pprCtO MCompOrigin = ptext (sLit "a statement in a monad comprehension")
pprCtO ProcOrigin = ptext (sLit "a proc expression")
pprCtO (TypeEqOrigin t1 t2) = ptext (sLit "a type equality") <+> sep [ppr t1, char '~', ppr t2]
pprCtO AnnOrigin = ptext (sLit "an annotation")
pprCtO HoleOrigin = ptext (sLit "a use of") <+> quotes (ptext $ sLit "_")
pprCtO ListOrigin = ptext (sLit "an overloaded list")
pprCtO StaticOrigin = ptext (sLit "a static form")
pprCtO _ = panic "pprCtOrigin"
{-
Constraint Solver Plugins
-------------------------
-}
type TcPluginSolver = [Ct] -- given
-> [Ct] -- derived
-> [Ct] -- wanted
-> TcPluginM TcPluginResult
newtype TcPluginM a = TcPluginM (TcM a)
instance Functor TcPluginM where
fmap = liftM
instance Applicative TcPluginM where
pure = return
(<*>) = ap
instance Monad TcPluginM where
return x = TcPluginM (return x)
fail x = TcPluginM (fail x)
TcPluginM m >>= k =
TcPluginM (do a <- m
let TcPluginM m1 = k a
m1)
runTcPluginM :: TcPluginM a -> TcM a
runTcPluginM (TcPluginM m) = m
-- | This function provides an escape for direct access to
-- the 'TcM` monad. It should not be used lightly, and
-- the provided 'TcPluginM' API should be favoured instead.
unsafeTcPluginTcM :: TcM a -> TcPluginM a
unsafeTcPluginTcM = TcPluginM
data TcPlugin = forall s. TcPlugin
{ tcPluginInit :: TcPluginM s
-- ^ Initialize plugin, when entering type-checker.
, tcPluginSolve :: s -> TcPluginSolver
-- ^ Solve some constraints.
-- TODO: WRITE MORE DETAILS ON HOW THIS WORKS.
, tcPluginStop :: s -> TcPluginM ()
-- ^ Clean up after the plugin, when exiting the type-checker.
}
data TcPluginResult
= TcPluginContradiction [Ct]
-- ^ The plugin found a contradiction.
-- The returned constraints are removed from the inert set,
-- and recorded as insoluable.
| TcPluginOk [(EvTerm,Ct)] [Ct]
-- ^ The first field is for constraints that were solved.
-- These are removed from the inert set,
-- and the evidence for them is recorded.
-- The second field contains new work, that should be processed by
-- the constraint solver.
| rahulmutt/ghcvm | compiler/Eta/TypeCheck/TcRnTypes.hs | bsd-3-clause | 96,947 | 0 | 16 | 28,646 | 11,394 | 6,570 | 4,824 | 915 | 2 |
-----------------------------------------------------------------------------
--
-- Code generator utilities; mostly monadic
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module CgUtils (
addIdReps,
cgLit,
emitDataLits, mkDataLits,
emitRODataLits, mkRODataLits,
emitIf, emitIfThenElse,
emitRtsCall, emitRtsCallWithVols, emitRtsCallWithResult,
emitRtsCallGen,
assignTemp, assignTemp_, newTemp,
emitSimultaneously,
emitSwitch, emitLitSwitch,
tagToClosure,
callerSaves, callerSaveVolatileRegs, get_GlobalReg_addr,
activeStgRegs, fixStgRegisters,
cmmAndWord, cmmOrWord, cmmNegate, cmmEqWord, cmmNeWord,
cmmUGtWord, cmmSubWord, cmmMulWord, cmmAddWord, cmmUShrWord,
cmmOffsetExprW, cmmOffsetExprB,
cmmRegOffW, cmmRegOffB,
cmmLabelOffW, cmmLabelOffB,
cmmOffsetW, cmmOffsetB,
cmmOffsetLitW, cmmOffsetLitB,
cmmLoadIndexW,
cmmConstrTag, cmmConstrTag1,
tagForCon, tagCons, isSmallFamily,
cmmUntag, cmmIsTagged, cmmGetTag,
addToMem, addToMemE,
mkWordCLit,
newStringCLit, newByteStringCLit,
packHalfWordsCLit,
blankWord,
getSRTInfo
) where
#include "HsVersions.h"
#include "../includes/stg/MachRegs.h"
import BlockId
import CgMonad
import TyCon
import DataCon
import Id
import IdInfo
import Constants
import SMRep
import OldCmm
import OldCmmUtils
import CLabel
import ForeignCall
import ClosureInfo
import StgSyn (SRT(..))
import Module
import Literal
import Digraph
import ListSetOps
import Util
import DynFlags
import FastString
import Outputable
import Data.Char
import Data.Word
import Data.Maybe
-------------------------------------------------------------------------
--
-- Random small functions
--
-------------------------------------------------------------------------
addIdReps :: [Id] -> [(CgRep, Id)]
addIdReps ids = [(idCgRep id, id) | id <- ids]
-------------------------------------------------------------------------
--
-- Literals
--
-------------------------------------------------------------------------
cgLit :: Literal -> FCode CmmLit
cgLit (MachStr s) = newByteStringCLit (bytesFS s)
-- not unpackFS; we want the UTF-8 byte stream.
cgLit other_lit = return (mkSimpleLit other_lit)
mkSimpleLit :: Literal -> CmmLit
mkSimpleLit (MachChar c) = CmmInt (fromIntegral (ord c)) wordWidth
mkSimpleLit MachNullAddr = zeroCLit
mkSimpleLit (MachInt i) = CmmInt i wordWidth
mkSimpleLit (MachInt64 i) = CmmInt i W64
mkSimpleLit (MachWord i) = CmmInt i wordWidth
mkSimpleLit (MachWord64 i) = CmmInt i W64
mkSimpleLit (MachFloat r) = CmmFloat r W32
mkSimpleLit (MachDouble r) = CmmFloat r W64
mkSimpleLit (MachLabel fs ms fod)
= CmmLabel (mkForeignLabel fs ms labelSrc fod)
where
-- TODO: Literal labels might not actually be in the current package...
labelSrc = ForeignLabelInThisPackage
mkSimpleLit (MachStr _) = panic "mkSimpleLit: MachStr"
-- No LitInteger's should be left by the time this is called. CorePrep
-- should have converted them all to a real core representation.
mkSimpleLit (LitInteger {}) = panic "mkSimpleLit: LitInteger"
mkLtOp :: Literal -> MachOp
-- On signed literals we must do a signed comparison
mkLtOp (MachInt _) = MO_S_Lt wordWidth
mkLtOp (MachFloat _) = MO_F_Lt W32
mkLtOp (MachDouble _) = MO_F_Lt W64
mkLtOp lit = MO_U_Lt (typeWidth (cmmLitType (mkSimpleLit lit)))
---------------------------------------------------
--
-- Cmm data type functions
--
---------------------------------------------------
{-
The family size of a data type (the number of constructors)
can be either:
* small, if the family size < 2**tag_bits
* big, otherwise.
Small families can have the constructor tag in the tag
bits.
Big families only use the tag value 1 to represent
evaluatedness.
-}
isSmallFamily :: Int -> Bool
isSmallFamily fam_size = fam_size <= mAX_PTR_TAG
tagForCon :: DataCon -> ConTagZ
tagForCon con = tag
where
con_tag = dataConTagZ con
fam_size = tyConFamilySize (dataConTyCon con)
tag | isSmallFamily fam_size = con_tag + 1
| otherwise = 1
--Tag an expression, to do: refactor, this appears in some other module.
tagCons :: DataCon -> CmmExpr -> CmmExpr
tagCons con expr = cmmOffsetB expr (tagForCon con)
--------------------------------------------------------------------------
--
-- Incrementing a memory location
--
--------------------------------------------------------------------------
addToMem :: Width -- rep of the counter
-> CmmExpr -- Address
-> Int -- What to add (a word)
-> CmmStmt
addToMem width ptr n = addToMemE width ptr (CmmLit (CmmInt (toInteger n) width))
addToMemE :: Width -- rep of the counter
-> CmmExpr -- Address
-> CmmExpr -- What to add (a word-typed expression)
-> CmmStmt
addToMemE width ptr n
= CmmStore ptr (CmmMachOp (MO_Add width) [CmmLoad ptr (cmmBits width), n])
-------------------------------------------------------------------------
--
-- Converting a closure tag to a closure for enumeration types
-- (this is the implementation of tagToEnum#).
--
-------------------------------------------------------------------------
tagToClosure :: TyCon -> CmmExpr -> CmmExpr
tagToClosure tycon tag
= CmmLoad (cmmOffsetExprW closure_tbl tag) gcWord
where closure_tbl = CmmLit (CmmLabel lbl)
lbl = mkClosureTableLabel (tyConName tycon) NoCafRefs
-------------------------------------------------------------------------
--
-- Conditionals and rts calls
--
-------------------------------------------------------------------------
emitIf :: CmmExpr -- Boolean
-> Code -- Then part
-> Code
-- Emit (if e then x)
-- ToDo: reverse the condition to avoid the extra branch instruction if possible
-- (some conditionals aren't reversible. eg. floating point comparisons cannot
-- be inverted because there exist some values for which both comparisons
-- return False, such as NaN.)
emitIf cond then_part
= do { then_id <- newLabelC
; join_id <- newLabelC
; stmtC (CmmCondBranch cond then_id)
; stmtC (CmmBranch join_id)
; labelC then_id
; then_part
; labelC join_id
}
emitIfThenElse :: CmmExpr -- Boolean
-> Code -- Then part
-> Code -- Else part
-> Code
-- Emit (if e then x else y)
emitIfThenElse cond then_part else_part
= do { then_id <- newLabelC
; join_id <- newLabelC
; stmtC (CmmCondBranch cond then_id)
; else_part
; stmtC (CmmBranch join_id)
; labelC then_id
; then_part
; labelC join_id
}
-- | Emit code to call a Cmm function.
emitRtsCall
:: PackageId -- ^ package the function is in
-> FastString -- ^ name of function
-> [CmmHinted CmmExpr] -- ^ function args
-> Code -- ^ cmm code
emitRtsCall pkg fun args = emitRtsCallGen [] pkg fun args Nothing
-- The 'Nothing' says "save all global registers"
emitRtsCallWithVols :: PackageId -> FastString -> [CmmHinted CmmExpr] -> [GlobalReg] -> Code
emitRtsCallWithVols pkg fun args vols
= emitRtsCallGen [] pkg fun args (Just vols)
emitRtsCallWithResult
:: LocalReg -> ForeignHint
-> PackageId -> FastString
-> [CmmHinted CmmExpr] -> Code
emitRtsCallWithResult res hint pkg fun args
= emitRtsCallGen [CmmHinted res hint] pkg fun args Nothing
-- Make a call to an RTS C procedure
emitRtsCallGen
:: [CmmHinted LocalReg]
-> PackageId
-> FastString
-> [CmmHinted CmmExpr]
-> Maybe [GlobalReg]
-> Code
emitRtsCallGen res pkg fun args vols = do
stmtsC caller_save
stmtC (CmmCall target res args CmmMayReturn)
stmtsC caller_load
where
(caller_save, caller_load) = callerSaveVolatileRegs vols
target = CmmCallee fun_expr CCallConv
fun_expr = mkLblExpr (mkCmmCodeLabel pkg fun)
-----------------------------------------------------------------------------
--
-- Caller-Save Registers
--
-----------------------------------------------------------------------------
-- Here we generate the sequence of saves/restores required around a
-- foreign call instruction.
-- TODO: reconcile with includes/Regs.h
-- * Regs.h claims that BaseReg should be saved last and loaded first
-- * This might not have been tickled before since BaseReg is callee save
-- * Regs.h saves SparkHd, ParkT1, SparkBase and SparkLim
callerSaveVolatileRegs :: Maybe [GlobalReg] -> ([CmmStmt], [CmmStmt])
callerSaveVolatileRegs vols = (caller_save, caller_load)
where
caller_save = foldr ($!) [] (map callerSaveGlobalReg regs_to_save)
caller_load = foldr ($!) [] (map callerRestoreGlobalReg regs_to_save)
system_regs = [Sp,SpLim,Hp,HpLim,CCCS,CurrentTSO,CurrentNursery,
{-SparkHd,SparkTl,SparkBase,SparkLim,-}BaseReg ]
regs_to_save = system_regs ++ vol_list
vol_list = case vols of Nothing -> all_of_em; Just regs -> regs
all_of_em = [ VanillaReg n VNonGcPtr | n <- [0..mAX_Vanilla_REG] ]
-- The VNonGcPtr is a lie, but I don't think it matters
++ [ FloatReg n | n <- [0..mAX_Float_REG] ]
++ [ DoubleReg n | n <- [0..mAX_Double_REG] ]
++ [ LongReg n | n <- [0..mAX_Long_REG] ]
callerSaveGlobalReg reg next
| callerSaves reg =
CmmStore (get_GlobalReg_addr reg)
(CmmReg (CmmGlobal reg)) : next
| otherwise = next
callerRestoreGlobalReg reg next
| callerSaves reg =
CmmAssign (CmmGlobal reg)
(CmmLoad (get_GlobalReg_addr reg) (globalRegType reg))
: next
| otherwise = next
-- | Returns @True@ if this global register is stored in a caller-saves
-- machine register.
callerSaves :: GlobalReg -> Bool
#ifdef CALLER_SAVES_Base
callerSaves BaseReg = True
#endif
#ifdef CALLER_SAVES_R1
callerSaves (VanillaReg 1 _) = True
#endif
#ifdef CALLER_SAVES_R2
callerSaves (VanillaReg 2 _) = True
#endif
#ifdef CALLER_SAVES_R3
callerSaves (VanillaReg 3 _) = True
#endif
#ifdef CALLER_SAVES_R4
callerSaves (VanillaReg 4 _) = True
#endif
#ifdef CALLER_SAVES_R5
callerSaves (VanillaReg 5 _) = True
#endif
#ifdef CALLER_SAVES_R6
callerSaves (VanillaReg 6 _) = True
#endif
#ifdef CALLER_SAVES_R7
callerSaves (VanillaReg 7 _) = True
#endif
#ifdef CALLER_SAVES_R8
callerSaves (VanillaReg 8 _) = True
#endif
#ifdef CALLER_SAVES_R9
callerSaves (VanillaReg 9 _) = True
#endif
#ifdef CALLER_SAVES_R10
callerSaves (VanillaReg 10 _) = True
#endif
#ifdef CALLER_SAVES_F1
callerSaves (FloatReg 1) = True
#endif
#ifdef CALLER_SAVES_F2
callerSaves (FloatReg 2) = True
#endif
#ifdef CALLER_SAVES_F3
callerSaves (FloatReg 3) = True
#endif
#ifdef CALLER_SAVES_F4
callerSaves (FloatReg 4) = True
#endif
#ifdef CALLER_SAVES_D1
callerSaves (DoubleReg 1) = True
#endif
#ifdef CALLER_SAVES_D2
callerSaves (DoubleReg 2) = True
#endif
#ifdef CALLER_SAVES_L1
callerSaves (LongReg 1) = True
#endif
#ifdef CALLER_SAVES_Sp
callerSaves Sp = True
#endif
#ifdef CALLER_SAVES_SpLim
callerSaves SpLim = True
#endif
#ifdef CALLER_SAVES_Hp
callerSaves Hp = True
#endif
#ifdef CALLER_SAVES_HpLim
callerSaves HpLim = True
#endif
#ifdef CALLER_SAVES_CCCS
callerSaves CCCS = True
#endif
#ifdef CALLER_SAVES_CurrentTSO
callerSaves CurrentTSO = True
#endif
#ifdef CALLER_SAVES_CurrentNursery
callerSaves CurrentNursery = True
#endif
callerSaves _ = False
-- -----------------------------------------------------------------------------
-- Information about global registers
baseRegOffset :: GlobalReg -> Int
baseRegOffset (VanillaReg 1 _) = oFFSET_StgRegTable_rR1
baseRegOffset (VanillaReg 2 _) = oFFSET_StgRegTable_rR2
baseRegOffset (VanillaReg 3 _) = oFFSET_StgRegTable_rR3
baseRegOffset (VanillaReg 4 _) = oFFSET_StgRegTable_rR4
baseRegOffset (VanillaReg 5 _) = oFFSET_StgRegTable_rR5
baseRegOffset (VanillaReg 6 _) = oFFSET_StgRegTable_rR6
baseRegOffset (VanillaReg 7 _) = oFFSET_StgRegTable_rR7
baseRegOffset (VanillaReg 8 _) = oFFSET_StgRegTable_rR8
baseRegOffset (VanillaReg 9 _) = oFFSET_StgRegTable_rR9
baseRegOffset (VanillaReg 10 _) = oFFSET_StgRegTable_rR10
baseRegOffset (VanillaReg n _) = panic ("Registers above R10 are not supported (tried to use R" ++ show n ++ ")")
baseRegOffset (FloatReg 1) = oFFSET_StgRegTable_rF1
baseRegOffset (FloatReg 2) = oFFSET_StgRegTable_rF2
baseRegOffset (FloatReg 3) = oFFSET_StgRegTable_rF3
baseRegOffset (FloatReg 4) = oFFSET_StgRegTable_rF4
baseRegOffset (FloatReg n) = panic ("Registers above F4 are not supported (tried to use F" ++ show n ++ ")")
baseRegOffset (DoubleReg 1) = oFFSET_StgRegTable_rD1
baseRegOffset (DoubleReg 2) = oFFSET_StgRegTable_rD2
baseRegOffset (DoubleReg n) = panic ("Registers above D2 are not supported (tried to use D" ++ show n ++ ")")
baseRegOffset Sp = oFFSET_StgRegTable_rSp
baseRegOffset SpLim = oFFSET_StgRegTable_rSpLim
baseRegOffset (LongReg 1) = oFFSET_StgRegTable_rL1
baseRegOffset (LongReg n) = panic ("Registers above L1 are not supported (tried to use L" ++ show n ++ ")")
baseRegOffset Hp = oFFSET_StgRegTable_rHp
baseRegOffset HpLim = oFFSET_StgRegTable_rHpLim
baseRegOffset CCCS = oFFSET_StgRegTable_rCCCS
baseRegOffset CurrentTSO = oFFSET_StgRegTable_rCurrentTSO
baseRegOffset CurrentNursery = oFFSET_StgRegTable_rCurrentNursery
baseRegOffset HpAlloc = oFFSET_StgRegTable_rHpAlloc
baseRegOffset EagerBlackholeInfo = oFFSET_stgEagerBlackholeInfo
baseRegOffset GCEnter1 = oFFSET_stgGCEnter1
baseRegOffset GCFun = oFFSET_stgGCFun
baseRegOffset BaseReg = panic "baseRegOffset:BaseReg"
baseRegOffset PicBaseReg = panic "baseRegOffset:PicBaseReg"
-------------------------------------------------------------------------
--
-- Strings generate a top-level data block
--
-------------------------------------------------------------------------
emitDataLits :: CLabel -> [CmmLit] -> Code
-- Emit a data-segment data block
emitDataLits lbl lits = emitDecl (mkDataLits Data lbl lits)
emitRODataLits :: String -> CLabel -> [CmmLit] -> Code
-- Emit a read-only data block
emitRODataLits _caller lbl lits
= emitDecl (mkRODataLits lbl lits)
newStringCLit :: String -> FCode CmmLit
-- Make a global definition for the string,
-- and return its label
newStringCLit str = newByteStringCLit (map (fromIntegral.ord) str)
newByteStringCLit :: [Word8] -> FCode CmmLit
newByteStringCLit bytes
= do { uniq <- newUnique
; let (lit, decl) = mkByteStringCLit uniq bytes
; emitDecl decl
; return lit }
-------------------------------------------------------------------------
--
-- Assigning expressions to temporaries
--
-------------------------------------------------------------------------
-- | If the expression is trivial, return it. Otherwise, assign the
-- expression to a temporary register and return an expression
-- referring to this register.
assignTemp :: CmmExpr -> FCode CmmExpr
-- For a non-trivial expression, e, create a local
-- variable and assign the expression to it
assignTemp e
| isTrivialCmmExpr e = return e
| otherwise = do { reg <- newTemp (cmmExprType e)
; stmtC (CmmAssign (CmmLocal reg) e)
; return (CmmReg (CmmLocal reg)) }
-- | If the expression is trivial and doesn't refer to a global
-- register, return it. Otherwise, assign the expression to a
-- temporary register and return an expression referring to this
-- register.
assignTemp_ :: CmmExpr -> FCode CmmExpr
assignTemp_ e
| isTrivialCmmExpr e && hasNoGlobalRegs e = return e
| otherwise = do
reg <- newTemp (cmmExprType e)
stmtC (CmmAssign (CmmLocal reg) e)
return (CmmReg (CmmLocal reg))
newTemp :: CmmType -> FCode LocalReg
newTemp rep = do { uniq <- newUnique; return (LocalReg uniq rep) }
-------------------------------------------------------------------------
--
-- Building case analysis
--
-------------------------------------------------------------------------
emitSwitch
:: CmmExpr -- Tag to switch on
-> [(ConTagZ, CgStmts)] -- Tagged branches
-> Maybe CgStmts -- Default branch (if any)
-> ConTagZ -> ConTagZ -- Min and Max possible values; behaviour
-- outside this range is undefined
-> Code
-- ONLY A DEFAULT BRANCH: no case analysis to do
emitSwitch _ [] (Just stmts) _ _
= emitCgStmts stmts
-- Right, off we go
emitSwitch tag_expr branches mb_deflt lo_tag hi_tag
= -- Just sort the branches before calling mk_sritch
do { mb_deflt_id <-
case mb_deflt of
Nothing -> return Nothing
Just stmts -> do id <- forkCgStmts stmts; return (Just id)
; dflags <- getDynFlags
; let via_C | HscC <- hscTarget dflags = True
| otherwise = False
; stmts <- mk_switch tag_expr (sortLe le branches)
mb_deflt_id lo_tag hi_tag via_C
; emitCgStmts stmts
}
where
(t1,_) `le` (t2,_) = t1 <= t2
mk_switch :: CmmExpr -> [(ConTagZ, CgStmts)]
-> Maybe BlockId -> ConTagZ -> ConTagZ -> Bool
-> FCode CgStmts
-- SINGLETON TAG RANGE: no case analysis to do
mk_switch _tag_expr [(tag,stmts)] _ lo_tag hi_tag _via_C
| lo_tag == hi_tag
= ASSERT( tag == lo_tag )
return stmts
-- SINGLETON BRANCH, NO DEFAULT: no case analysis to do
mk_switch _tag_expr [(_tag,stmts)] Nothing _lo_tag _hi_tag _via_C
= return stmts
-- The simplifier might have eliminated a case
-- so we may have e.g. case xs of
-- [] -> e
-- In that situation we can be sure the (:) case
-- can't happen, so no need to test
-- SINGLETON BRANCH: one equality check to do
mk_switch tag_expr [(tag,stmts)] (Just deflt) _lo_tag _hi_tag _via_C
= return (CmmCondBranch cond deflt `consCgStmt` stmts)
where
cond = cmmNeWord tag_expr (CmmLit (mkIntCLit tag))
-- We have lo_tag < hi_tag, but there's only one branch,
-- so there must be a default
-- ToDo: we might want to check for the two branch case, where one of
-- the branches is the tag 0, because comparing '== 0' is likely to be
-- more efficient than other kinds of comparison.
-- DENSE TAG RANGE: use a switch statment.
--
-- We also use a switch uncoditionally when compiling via C, because
-- this will get emitted as a C switch statement and the C compiler
-- should do a good job of optimising it. Also, older GCC versions
-- (2.95 in particular) have problems compiling the complicated
-- if-trees generated by this code, so compiling to a switch every
-- time works around that problem.
--
mk_switch tag_expr branches mb_deflt lo_tag hi_tag via_C
| use_switch -- Use a switch
= do { branch_ids <- mapM forkCgStmts (map snd branches)
; let
tagged_blk_ids = zip (map fst branches) (map Just branch_ids)
find_branch :: ConTagZ -> Maybe BlockId
find_branch i = assocDefault mb_deflt tagged_blk_ids i
-- NB. we have eliminated impossible branches at
-- either end of the range (see below), so the first
-- tag of a real branch is real_lo_tag (not lo_tag).
arms = [ find_branch i | i <- [real_lo_tag..real_hi_tag]]
switch_stmt = CmmSwitch (cmmOffset tag_expr (- real_lo_tag)) arms
; ASSERT(not (all isNothing arms))
return (oneCgStmt switch_stmt)
}
-- if we can knock off a bunch of default cases with one if, then do so
| Just deflt <- mb_deflt, (lowest_branch - lo_tag) >= n_branches
= do { (assign_tag, tag_expr') <- assignTemp' tag_expr
; let cond = cmmULtWord tag_expr' (CmmLit (mkIntCLit lowest_branch))
branch = CmmCondBranch cond deflt
; stmts <- mk_switch tag_expr' branches mb_deflt
lowest_branch hi_tag via_C
; return (assign_tag `consCgStmt` (branch `consCgStmt` stmts))
}
| Just deflt <- mb_deflt, (hi_tag - highest_branch) >= n_branches
= do { (assign_tag, tag_expr') <- assignTemp' tag_expr
; let cond = cmmUGtWord tag_expr' (CmmLit (mkIntCLit highest_branch))
branch = CmmCondBranch cond deflt
; stmts <- mk_switch tag_expr' branches mb_deflt
lo_tag highest_branch via_C
; return (assign_tag `consCgStmt` (branch `consCgStmt` stmts))
}
| otherwise -- Use an if-tree
= do { (assign_tag, tag_expr') <- assignTemp' tag_expr
-- To avoid duplication
; lo_stmts <- mk_switch tag_expr' lo_branches mb_deflt
lo_tag (mid_tag-1) via_C
; hi_stmts <- mk_switch tag_expr' hi_branches mb_deflt
mid_tag hi_tag via_C
; hi_id <- forkCgStmts hi_stmts
; let cond = cmmUGeWord tag_expr' (CmmLit (mkIntCLit mid_tag))
branch_stmt = CmmCondBranch cond hi_id
; return (assign_tag `consCgStmt` (branch_stmt `consCgStmt` lo_stmts))
}
-- we test (e >= mid_tag) rather than (e < mid_tag), because
-- the former works better when e is a comparison, and there
-- are two tags 0 & 1 (mid_tag == 1). In this case, the code
-- generator can reduce the condition to e itself without
-- having to reverse the sense of the comparison: comparisons
-- can't always be easily reversed (eg. floating
-- pt. comparisons).
where
use_switch = {- pprTrace "mk_switch" (
ppr tag_expr <+> text "n_tags:" <+> int n_tags <+>
text "branches:" <+> ppr (map fst branches) <+>
text "n_branches:" <+> int n_branches <+>
text "lo_tag:" <+> int lo_tag <+>
text "hi_tag:" <+> int hi_tag <+>
text "real_lo_tag:" <+> int real_lo_tag <+>
text "real_hi_tag:" <+> int real_hi_tag) $ -}
ASSERT( n_branches > 1 && n_tags > 1 )
n_tags > 2 && (via_C || (dense && big_enough))
-- up to 4 branches we use a decision tree, otherwise
-- a switch (== jump table in the NCG). This seems to be
-- optimal, and corresponds with what gcc does.
big_enough = n_branches > 4
dense = n_branches > (n_tags `div` 2)
n_branches = length branches
-- ignore default slots at each end of the range if there's
-- no default branch defined.
lowest_branch = fst (head branches)
highest_branch = fst (last branches)
real_lo_tag
| isNothing mb_deflt = lowest_branch
| otherwise = lo_tag
real_hi_tag
| isNothing mb_deflt = highest_branch
| otherwise = hi_tag
n_tags = real_hi_tag - real_lo_tag + 1
-- INVARIANT: Provided hi_tag > lo_tag (which is true)
-- lo_tag <= mid_tag < hi_tag
-- lo_branches have tags < mid_tag
-- hi_branches have tags >= mid_tag
(mid_tag,_) = branches !! (n_branches `div` 2)
-- 2 branches => n_branches `div` 2 = 1
-- => branches !! 1 give the *second* tag
-- There are always at least 2 branches here
(lo_branches, hi_branches) = span is_lo branches
is_lo (t,_) = t < mid_tag
assignTemp' :: CmmExpr -> FCode (CmmStmt, CmmExpr)
assignTemp' e
| isTrivialCmmExpr e = return (CmmNop, e)
| otherwise = do { reg <- newTemp (cmmExprType e)
; return (CmmAssign (CmmLocal reg) e, CmmReg (CmmLocal reg)) }
emitLitSwitch :: CmmExpr -- Tag to switch on
-> [(Literal, CgStmts)] -- Tagged branches
-> CgStmts -- Default branch (always)
-> Code -- Emit the code
-- Used for general literals, whose size might not be a word,
-- where there is always a default case, and where we don't know
-- the range of values for certain. For simplicity we always generate a tree.
--
-- ToDo: for integers we could do better here, perhaps by generalising
-- mk_switch and using that. --SDM 15/09/2004
emitLitSwitch _ [] deflt = emitCgStmts deflt
emitLitSwitch scrut branches deflt_blk
= do { scrut' <- assignTemp scrut
; deflt_blk_id <- forkCgStmts deflt_blk
; blk <- mk_lit_switch scrut' deflt_blk_id (sortLe le branches)
; emitCgStmts blk }
where
le (t1,_) (t2,_) = t1 <= t2
mk_lit_switch :: CmmExpr -> BlockId
-> [(Literal,CgStmts)]
-> FCode CgStmts
mk_lit_switch scrut deflt_blk_id [(lit,blk)]
= return (consCgStmt if_stmt blk)
where
cmm_lit = mkSimpleLit lit
rep = cmmLitType cmm_lit
ne = if isFloatType rep then MO_F_Ne else MO_Ne
cond = CmmMachOp (ne (typeWidth rep)) [scrut, CmmLit cmm_lit]
if_stmt = CmmCondBranch cond deflt_blk_id
mk_lit_switch scrut deflt_blk_id branches
= do { hi_blk <- mk_lit_switch scrut deflt_blk_id hi_branches
; lo_blk <- mk_lit_switch scrut deflt_blk_id lo_branches
; lo_blk_id <- forkCgStmts lo_blk
; let if_stmt = CmmCondBranch cond lo_blk_id
; return (if_stmt `consCgStmt` hi_blk) }
where
n_branches = length branches
(mid_lit,_) = branches !! (n_branches `div` 2)
-- See notes above re mid_tag
(lo_branches, hi_branches) = span is_lo branches
is_lo (t,_) = t < mid_lit
cond = CmmMachOp (mkLtOp mid_lit)
[scrut, CmmLit (mkSimpleLit mid_lit)]
-------------------------------------------------------------------------
--
-- Simultaneous assignment
--
-------------------------------------------------------------------------
emitSimultaneously :: CmmStmts -> Code
-- Emit code to perform the assignments in the
-- input simultaneously, using temporary variables when necessary.
--
-- The Stmts must be:
-- CmmNop, CmmComment, CmmAssign, CmmStore
-- and nothing else
-- We use the strongly-connected component algorithm, in which
-- * the vertices are the statements
-- * an edge goes from s1 to s2 iff
-- s1 assigns to something s2 uses
-- that is, if s1 should *follow* s2 in the final order
type CVertex = (Int, CmmStmt) -- Give each vertex a unique number,
-- for fast comparison
emitSimultaneously stmts
= codeOnly $
case filterOut isNopStmt (stmtList stmts) of
-- Remove no-ops
[] -> nopC
[stmt] -> stmtC stmt -- It's often just one stmt
stmt_list -> doSimultaneously1 (zip [(1::Int)..] stmt_list)
doSimultaneously1 :: [CVertex] -> Code
doSimultaneously1 vertices
= let
edges = [ (vertex, key1, edges_from stmt1)
| vertex@(key1, stmt1) <- vertices
]
edges_from stmt1 = [ key2 | (key2, stmt2) <- vertices,
stmt1 `mustFollow` stmt2
]
components = stronglyConnCompFromEdgedVertices edges
-- do_components deal with one strongly-connected component
-- Not cyclic, or singleton? Just do it
do_component (AcyclicSCC (_n, stmt)) = stmtC stmt
do_component (CyclicSCC [])
= panic "doSimultaneously1: do_component (CyclicSCC [])"
do_component (CyclicSCC [(_n, stmt)]) = stmtC stmt
-- Cyclic? Then go via temporaries. Pick one to
-- break the loop and try again with the rest.
do_component (CyclicSCC ((_n, first_stmt) : rest))
= do { from_temp <- go_via_temp first_stmt
; doSimultaneously1 rest
; stmtC from_temp }
go_via_temp (CmmAssign dest src)
= do { tmp <- newTemp (cmmRegType dest) -- TODO FIXME NOW if the pair of assignments move across a call this will be wrong
; stmtC (CmmAssign (CmmLocal tmp) src)
; return (CmmAssign dest (CmmReg (CmmLocal tmp))) }
go_via_temp (CmmStore dest src)
= do { tmp <- newTemp (cmmExprType src) -- TODO FIXME NOW if the pair of assignments move across a call this will be wrong
; stmtC (CmmAssign (CmmLocal tmp) src)
; return (CmmStore dest (CmmReg (CmmLocal tmp))) }
go_via_temp _ = panic "doSimultaneously1: go_via_temp"
in
mapCs do_component components
mustFollow :: CmmStmt -> CmmStmt -> Bool
CmmAssign reg _ `mustFollow` stmt = anySrc (reg `regUsedIn`) stmt
CmmStore loc e `mustFollow` stmt = anySrc (locUsedIn loc (cmmExprType e)) stmt
CmmNop `mustFollow` _ = False
CmmComment _ `mustFollow` _ = False
_ `mustFollow` _ = panic "mustFollow"
anySrc :: (CmmExpr -> Bool) -> CmmStmt -> Bool
-- True if the fn is true of any input of the stmt
anySrc p (CmmAssign _ e) = p e
anySrc p (CmmStore e1 e2) = p e1 || p e2 -- Might be used in either side
anySrc _ (CmmComment _) = False
anySrc _ CmmNop = False
anySrc _ _ = True -- Conservative
locUsedIn :: CmmExpr -> CmmType -> CmmExpr -> Bool
-- (locUsedIn a r e) checks whether writing to r[a] could affect the value of
-- 'e'. Returns True if it's not sure.
locUsedIn _ _ (CmmLit _) = False
locUsedIn loc rep (CmmLoad e ld_rep) = possiblySameLoc loc rep e ld_rep
locUsedIn _ _ (CmmReg _) = False
locUsedIn _ _ (CmmRegOff _ _) = False
locUsedIn loc rep (CmmMachOp _ es) = any (locUsedIn loc rep) es
locUsedIn _ _ (CmmStackSlot _ _) = panic "locUsedIn: CmmStackSlot"
possiblySameLoc :: CmmExpr -> CmmType -> CmmExpr -> CmmType -> Bool
-- Assumes that distinct registers (eg Hp, Sp) do not
-- point to the same location, nor any offset thereof.
possiblySameLoc (CmmReg r1) _ (CmmReg r2) _ = r1 == r2
possiblySameLoc (CmmReg r1) _ (CmmRegOff r2 0) _ = r1 == r2
possiblySameLoc (CmmRegOff r1 0) _ (CmmReg r2) _ = r1 == r2
possiblySameLoc (CmmRegOff r1 start1) rep1 (CmmRegOff r2 start2) rep2
= r1==r2 && end1 > start2 && end2 > start1
where
end1 = start1 + widthInBytes (typeWidth rep1)
end2 = start2 + widthInBytes (typeWidth rep2)
possiblySameLoc _ _ (CmmLit _) _ = False
possiblySameLoc _ _ _ _ = True -- Conservative
-------------------------------------------------------------------------
--
-- Static Reference Tables
--
-------------------------------------------------------------------------
-- There is just one SRT for each top level binding; all the nested
-- bindings use sub-sections of this SRT. The label is passed down to
-- the nested bindings via the monad.
getSRTInfo :: FCode C_SRT
getSRTInfo = do
srt_lbl <- getSRTLabel
srt <- getSRT
case srt of
-- TODO: Should we panic in this case?
-- Someone obviously thinks there should be an SRT
NoSRT -> return NoC_SRT
SRTEntries {} -> panic "getSRTInfo: SRTEntries. Perhaps you forgot to run SimplStg?"
SRT off len bmp
| len > hALF_WORD_SIZE_IN_BITS || bmp == [fromIntegral srt_escape]
-> do id <- newUnique
let srt_desc_lbl = mkLargeSRTLabel id
emitRODataLits "getSRTInfo" srt_desc_lbl
( cmmLabelOffW srt_lbl off
: mkWordCLit (fromIntegral len)
: map mkWordCLit bmp)
return (C_SRT srt_desc_lbl 0 srt_escape)
| otherwise
-> return (C_SRT srt_lbl off (fromIntegral (head bmp)))
-- The fromIntegral converts to StgHalfWord
srt_escape :: StgHalfWord
srt_escape = -1
-- -----------------------------------------------------------------------------
--
-- STG/Cmm GlobalReg
--
-- -----------------------------------------------------------------------------
-- | Here is where the STG register map is defined for each target arch.
-- The order matters (for the llvm backend anyway)! We must make sure to
-- maintain the order here with the order used in the LLVM calling conventions.
-- Note that also, this isn't all registers, just the ones that are currently
-- possbily mapped to real registers.
activeStgRegs :: [GlobalReg]
activeStgRegs = [
#ifdef REG_Base
BaseReg
#endif
#ifdef REG_Sp
,Sp
#endif
#ifdef REG_Hp
,Hp
#endif
#ifdef REG_R1
,VanillaReg 1 VGcPtr
#endif
#ifdef REG_R2
,VanillaReg 2 VGcPtr
#endif
#ifdef REG_R3
,VanillaReg 3 VGcPtr
#endif
#ifdef REG_R4
,VanillaReg 4 VGcPtr
#endif
#ifdef REG_R5
,VanillaReg 5 VGcPtr
#endif
#ifdef REG_R6
,VanillaReg 6 VGcPtr
#endif
#ifdef REG_R7
,VanillaReg 7 VGcPtr
#endif
#ifdef REG_R8
,VanillaReg 8 VGcPtr
#endif
#ifdef REG_R9
,VanillaReg 9 VGcPtr
#endif
#ifdef REG_R10
,VanillaReg 10 VGcPtr
#endif
#ifdef REG_SpLim
,SpLim
#endif
#ifdef REG_F1
,FloatReg 1
#endif
#ifdef REG_F2
,FloatReg 2
#endif
#ifdef REG_F3
,FloatReg 3
#endif
#ifdef REG_F4
,FloatReg 4
#endif
#ifdef REG_D1
,DoubleReg 1
#endif
#ifdef REG_D2
,DoubleReg 2
#endif
]
-- | We map STG registers onto appropriate CmmExprs. Either they map
-- to real machine registers or stored as offsets from BaseReg. Given
-- a GlobalReg, get_GlobalReg_addr always produces the
-- register table address for it.
get_GlobalReg_addr :: GlobalReg -> CmmExpr
get_GlobalReg_addr BaseReg = regTableOffset 0
get_GlobalReg_addr mid = get_Regtable_addr_from_offset
(globalRegType mid) (baseRegOffset mid)
-- Calculate a literal representing an offset into the register table.
-- Used when we don't have an actual BaseReg to offset from.
regTableOffset :: Int -> CmmExpr
regTableOffset n =
CmmLit (CmmLabelOff mkMainCapabilityLabel (oFFSET_Capability_r + n))
get_Regtable_addr_from_offset :: CmmType -> Int -> CmmExpr
get_Regtable_addr_from_offset _ offset =
#ifdef REG_Base
CmmRegOff (CmmGlobal BaseReg) offset
#else
regTableOffset offset
#endif
-- | Fixup global registers so that they assign to locations within the
-- RegTable if they aren't pinned for the current target.
fixStgRegisters :: RawCmmDecl -> RawCmmDecl
fixStgRegisters top@(CmmData _ _) = top
fixStgRegisters (CmmProc info lbl (ListGraph blocks)) =
let blocks' = map fixStgRegBlock blocks
in CmmProc info lbl $ ListGraph blocks'
fixStgRegBlock :: CmmBasicBlock -> CmmBasicBlock
fixStgRegBlock (BasicBlock id stmts) =
let stmts' = map fixStgRegStmt stmts
in BasicBlock id stmts'
fixStgRegStmt :: CmmStmt -> CmmStmt
fixStgRegStmt stmt
= case stmt of
CmmAssign (CmmGlobal reg) src ->
let src' = fixStgRegExpr src
baseAddr = get_GlobalReg_addr reg
in case reg `elem` activeStgRegs of
True -> CmmAssign (CmmGlobal reg) src'
False -> CmmStore baseAddr src'
CmmAssign reg src ->
let src' = fixStgRegExpr src
in CmmAssign reg src'
CmmStore addr src -> CmmStore (fixStgRegExpr addr) (fixStgRegExpr src)
CmmCall target regs args returns ->
let target' = case target of
CmmCallee e conv -> CmmCallee (fixStgRegExpr e) conv
other -> other
args' = map (\(CmmHinted arg hint) ->
(CmmHinted (fixStgRegExpr arg) hint)) args
in CmmCall target' regs args' returns
CmmCondBranch test dest -> CmmCondBranch (fixStgRegExpr test) dest
CmmSwitch expr ids -> CmmSwitch (fixStgRegExpr expr) ids
CmmJump addr regs -> CmmJump (fixStgRegExpr addr) regs
-- CmmNop, CmmComment, CmmBranch, CmmReturn
_other -> stmt
fixStgRegExpr :: CmmExpr -> CmmExpr
fixStgRegExpr expr
= case expr of
CmmLoad addr ty -> CmmLoad (fixStgRegExpr addr) ty
CmmMachOp mop args -> CmmMachOp mop args'
where args' = map fixStgRegExpr args
CmmReg (CmmGlobal reg) ->
-- Replace register leaves with appropriate StixTrees for
-- the given target. MagicIds which map to a reg on this
-- arch are left unchanged. For the rest, BaseReg is taken
-- to mean the address of the reg table in MainCapability,
-- and for all others we generate an indirection to its
-- location in the register table.
case reg `elem` activeStgRegs of
True -> expr
False ->
let baseAddr = get_GlobalReg_addr reg
in case reg of
BaseReg -> fixStgRegExpr baseAddr
_other -> fixStgRegExpr
(CmmLoad baseAddr (globalRegType reg))
CmmRegOff (CmmGlobal reg) offset ->
-- RegOf leaves are just a shorthand form. If the reg maps
-- to a real reg, we keep the shorthand, otherwise, we just
-- expand it and defer to the above code.
case reg `elem` activeStgRegs of
True -> expr
False -> fixStgRegExpr (CmmMachOp (MO_Add wordWidth) [
CmmReg (CmmGlobal reg),
CmmLit (CmmInt (fromIntegral offset)
wordWidth)])
-- CmmLit, CmmReg (CmmLocal), CmmStackSlot
_other -> expr
| mcmaniac/ghc | compiler/codeGen/CgUtils.hs | bsd-3-clause | 38,355 | 0 | 22 | 10,231 | 7,639 | 4,043 | 3,596 | 522 | 10 |
module Distribution.Client.Mirror.Repo.Types (
SourceRepo(..)
, TargetRepo(..)
, targetCachedIndexPath
) where
-- stdlib
import Network.URI (URI)
import System.FilePath
-- hackage-security
import qualified Hackage.Security.Client as Sec
import qualified Hackage.Security.Client.Repository.Cache as Sec
-- | Source repositories
data SourceRepo =
-- | "New" style Hackage
--
-- (after the introduction of hackage-server, but before the introduction
-- of security)
SourceHackage2 {
sourceRepoURI :: URI
, sourceRepoCachePath :: FilePath
}
-- | Secure repo
| forall down. SourceSecure {
sourceRepository :: Sec.Repository down
, sourceRepoCache :: Sec.Cache
, sourceRepoRootKeys :: [Sec.KeyId]
, sourceRepoThreshold :: Sec.KeyThreshold
}
-- | Target repositories
data TargetRepo =
-- | "New" style Hackage (hackage-server)
TargetHackage2 {
targetRepoURI :: URI
, targetRepoCachePath :: FilePath
}
-- | Local repository
| TargetLocal {
targetRepoPath :: FilePath
, targetRepoCachePath :: FilePath
}
-- | File name of the cached index
--
-- NOTE: This stays the same whether it's actually 00-index or 01-index format
targetCachedIndexPath :: FilePath -> FilePath
targetCachedIndexPath targetCache = targetCache </> "cached-index.tar.gz"
| agrafix/hackage-server | Distribution/Client/Mirror/Repo/Types.hs | bsd-3-clause | 1,427 | 0 | 10 | 357 | 205 | 136 | 69 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-- | Tests for the :buffer ex command in the Vim keymap
--
module Vim.EditorManipulations.BufferExCommand (tests) where
import qualified Data.List.NonEmpty as NE
import Generic.TestUtils
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit
import Yi.Buffer
import Yi.Config (Config)
import Yi.Editor
import Yi.Rope ()
type BufferName = String
-- | Create three bufs and return the 'BufferRef' and buffer name of
-- each.
createInitialBuffers :: EditorM [(BufferRef, BufferName)]
createInitialBuffers = do
one <- newBufferE (FileBuffer "one") "Buffer one"
two <- newBufferE (FileBuffer "two") "Buffer two"
three <- newBufferE (FileBuffer "three") "Buffer three"
return [(one, "one"), (two, "two"), (three, "three")]
nthBufferRef :: Int -> [(BufferRef, BufferName)] -> BufferRef
nthBufferRef n bufs = fst $ bufs !! n
nthBufferName :: Int -> [(BufferRef, BufferName)] -> BufferName
nthBufferName n bufs = snd $ bufs !! n
tests :: Config -> KeyEval -> TestTree
tests c ev =
testGroup ":buffer" [
testCase ":buffer {bufname} switches to the named buffer" $ do
let setupActions = createInitialBuffers
preConditions editor bufs =
assertNotCurrentBuffer (nthBufferRef 1 bufs) editor
testActions bufs =
ev $ ":buffer " ++ nthBufferName 1 bufs ++ "<CR>"
assertions editor bufs = do
assertContentOfCurrentBuffer c "Buffer two" editor
assertCurrentBuffer (nthBufferRef 1 bufs) editor
runTest setupActions preConditions testActions assertions c
, testCase ":buffer N switches to the numbered buffer" $ do
let setupActions = createInitialBuffers
preConditions editor bufs =
assertNotCurrentBuffer (nthBufferRef 1 bufs) editor
testActions bufs =
let (BufferRef bref) = nthBufferRef 1 bufs
in ev $ ":buffer " ++ show bref ++ "<CR>"
assertions editor bufs = do
assertContentOfCurrentBuffer c "Buffer two" editor
assertCurrentBuffer (nthBufferRef 1 bufs) editor
runTest setupActions preConditions testActions assertions c
, testCase ":buffer # switches to the previous buffer" $ do
let setupActions = createInitialBuffers
preConditions editor bufs =
assertEqual "Unexpected buffer stack"
[nthBufferRef 2 bufs, nthBufferRef 1 bufs]
(take 2 . NE.toList $ bufferStack editor)
testActions _ =
ev $ ":buffer #<CR>"
assertions editor bufs = do
assertEqual "Unexpected buffer stack"
[nthBufferRef 1 bufs, nthBufferRef 2 bufs]
(take 2 . NE.toList $ bufferStack editor)
runTest setupActions preConditions testActions assertions c
, testCase ":buffer % is a no-op" $ do
let setupActions = createInitialBuffers
preConditions editor bufs =
assertCurrentBuffer (nthBufferRef 2 bufs) editor
testActions _ =
ev $ ":buffer %<CR>"
assertions editor bufs = do
assertContentOfCurrentBuffer c "Buffer three" editor
assertCurrentBuffer (nthBufferRef 2 bufs) editor
runTest setupActions preConditions testActions assertions c
, testCase ":buffer is a no-op" $ do
let setupActions = createInitialBuffers
preConditions editor bufs =
assertCurrentBuffer (nthBufferRef 2 bufs) editor
testActions _ =
ev $ ":buffer<CR>"
assertions editor bufs = do
assertContentOfCurrentBuffer c "Buffer three" editor
assertCurrentBuffer (nthBufferRef 2 bufs) editor
runTest setupActions preConditions testActions assertions c
, testCase "A modified buffer is not abandoned" $ do
let setupActions = createInitialBuffers
preConditions editor bufs =
assertNotCurrentBuffer (nthBufferRef 1 bufs) editor
testActions bufs = do
withCurrentBuffer $ insertN "The buffer is altered"
ev $ ":buffer " ++ nthBufferName 1 bufs ++ "<CR>"
assertions editor bufs = do
assertNotCurrentBuffer (nthBufferRef 1 bufs) editor
runTest setupActions preConditions testActions assertions c
, testCase "A modified buffer can be abandoned with a bang" $ do
let setupActions = createInitialBuffers
preConditions editor bufs =
assertNotCurrentBuffer (nthBufferRef 1 bufs) editor
testActions bufs = do
withCurrentBuffer $ insertN "The buffer is altered"
ev $ ":buffer! " ++ nthBufferName 1 bufs ++ "<CR>"
assertions editor bufs = do
assertCurrentBuffer (nthBufferRef 1 bufs) editor
runTest setupActions preConditions testActions assertions c
, testCase ":Nbuffer switches to the numbered buffer" $ do
let setupActions = createInitialBuffers
preConditions editor bufs =
assertNotCurrentBuffer (nthBufferRef 1 bufs) editor
testActions bufs =
-- return ()
let (BufferRef bref) = nthBufferRef 1 bufs
in ev $ ":" ++ show bref ++ "buffer<CR>"
-- in ev $ ":buffer " ++ show bref ++ "<CR>"
assertions editor bufs = do
-- assertContentOfCurrentBuffer c "Buffer two" editor
assertCurrentBuffer (nthBufferRef 1 bufs) editor
runTest setupActions preConditions testActions assertions c
-- , testCase "A named buffer can be shown in a split window" $ do
-- , testCase "A numbered buffer can be shown in a split window" $ do
]
| noughtmare/yi | yi-keymap-vim/tests/Vim/EditorManipulations/BufferExCommand.hs | gpl-2.0 | 6,349 | 0 | 19 | 2,254 | 1,289 | 630 | 659 | 108 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.IAM.AttachGroupPolicy
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Attaches the specified managed policy to the specified group.
--
-- You use this API to attach a managed policy to a group. To embed an inline
-- policy in a group, use 'PutGroupPolicy'.
--
-- For more information about policies, refer to <http://docs.aws.amazon.com/IAM/latest/UserGuide/policies-managed-vs-inline.html Managed Policies and InlinePolicies> in the /Using IAM/ guide.
--
-- <http://docs.aws.amazon.com/IAM/latest/APIReference/API_AttachGroupPolicy.html>
module Network.AWS.IAM.AttachGroupPolicy
(
-- * Request
AttachGroupPolicy
-- ** Request constructor
, attachGroupPolicy
-- ** Request lenses
, agpGroupName
, agpPolicyArn
-- * Response
, AttachGroupPolicyResponse
-- ** Response constructor
, attachGroupPolicyResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.IAM.Types
import qualified GHC.Exts
data AttachGroupPolicy = AttachGroupPolicy
{ _agpGroupName :: Text
, _agpPolicyArn :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'AttachGroupPolicy' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'agpGroupName' @::@ 'Text'
--
-- * 'agpPolicyArn' @::@ 'Text'
--
attachGroupPolicy :: Text -- ^ 'agpGroupName'
-> Text -- ^ 'agpPolicyArn'
-> AttachGroupPolicy
attachGroupPolicy p1 p2 = AttachGroupPolicy
{ _agpGroupName = p1
, _agpPolicyArn = p2
}
-- | The name (friendly name, not ARN) of the group to attach the policy to.
agpGroupName :: Lens' AttachGroupPolicy Text
agpGroupName = lens _agpGroupName (\s a -> s { _agpGroupName = a })
agpPolicyArn :: Lens' AttachGroupPolicy Text
agpPolicyArn = lens _agpPolicyArn (\s a -> s { _agpPolicyArn = a })
data AttachGroupPolicyResponse = AttachGroupPolicyResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'AttachGroupPolicyResponse' constructor.
attachGroupPolicyResponse :: AttachGroupPolicyResponse
attachGroupPolicyResponse = AttachGroupPolicyResponse
instance ToPath AttachGroupPolicy where
toPath = const "/"
instance ToQuery AttachGroupPolicy where
toQuery AttachGroupPolicy{..} = mconcat
[ "GroupName" =? _agpGroupName
, "PolicyArn" =? _agpPolicyArn
]
instance ToHeaders AttachGroupPolicy
instance AWSRequest AttachGroupPolicy where
type Sv AttachGroupPolicy = IAM
type Rs AttachGroupPolicy = AttachGroupPolicyResponse
request = post "AttachGroupPolicy"
response = nullResponse AttachGroupPolicyResponse
| romanb/amazonka | amazonka-iam/gen/Network/AWS/IAM/AttachGroupPolicy.hs | mpl-2.0 | 3,572 | 0 | 9 | 760 | 397 | 244 | 153 | 52 | 1 |
-- Module : Network.AWS.CognitoIdentity
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Amazon Cognito is a web service that facilitates the delivery of scoped,
-- temporary credentials to mobile devices or other untrusted environments.
-- Amazon Cognito uniquely identifies a device or user and supplies the user
-- with a consistent identity throughout the lifetime of an application. Amazon
-- Cognito lets users authenticate with third-party identity providers
-- (Facebook, Google, or Login with Amazon). As a developer, you decide which
-- identity providers to trust. You can also choose to support unauthenticated
-- access from your application. Your users are provided with Cognito tokens
-- that uniquely identify their device and any information provided about
-- third-party logins.
module Network.AWS.CognitoIdentity
( module Network.AWS.CognitoIdentity.CreateIdentityPool
, module Network.AWS.CognitoIdentity.DeleteIdentityPool
, module Network.AWS.CognitoIdentity.DescribeIdentity
, module Network.AWS.CognitoIdentity.DescribeIdentityPool
, module Network.AWS.CognitoIdentity.GetCredentialsForIdentity
, module Network.AWS.CognitoIdentity.GetId
, module Network.AWS.CognitoIdentity.GetIdentityPoolRoles
, module Network.AWS.CognitoIdentity.GetOpenIdToken
, module Network.AWS.CognitoIdentity.GetOpenIdTokenForDeveloperIdentity
, module Network.AWS.CognitoIdentity.ListIdentities
, module Network.AWS.CognitoIdentity.ListIdentityPools
, module Network.AWS.CognitoIdentity.LookupDeveloperIdentity
, module Network.AWS.CognitoIdentity.MergeDeveloperIdentities
, module Network.AWS.CognitoIdentity.SetIdentityPoolRoles
, module Network.AWS.CognitoIdentity.Types
, module Network.AWS.CognitoIdentity.UnlinkDeveloperIdentity
, module Network.AWS.CognitoIdentity.UnlinkIdentity
, module Network.AWS.CognitoIdentity.UpdateIdentityPool
) where
import Network.AWS.CognitoIdentity.CreateIdentityPool
import Network.AWS.CognitoIdentity.DeleteIdentityPool
import Network.AWS.CognitoIdentity.DescribeIdentity
import Network.AWS.CognitoIdentity.DescribeIdentityPool
import Network.AWS.CognitoIdentity.GetCredentialsForIdentity
import Network.AWS.CognitoIdentity.GetId
import Network.AWS.CognitoIdentity.GetIdentityPoolRoles
import Network.AWS.CognitoIdentity.GetOpenIdToken
import Network.AWS.CognitoIdentity.GetOpenIdTokenForDeveloperIdentity
import Network.AWS.CognitoIdentity.ListIdentities
import Network.AWS.CognitoIdentity.ListIdentityPools
import Network.AWS.CognitoIdentity.LookupDeveloperIdentity
import Network.AWS.CognitoIdentity.MergeDeveloperIdentities
import Network.AWS.CognitoIdentity.SetIdentityPoolRoles
import Network.AWS.CognitoIdentity.Types
import Network.AWS.CognitoIdentity.UnlinkDeveloperIdentity
import Network.AWS.CognitoIdentity.UnlinkIdentity
import Network.AWS.CognitoIdentity.UpdateIdentityPool
| kim/amazonka | amazonka-cognito-identity/gen/Network/AWS/CognitoIdentity.hs | mpl-2.0 | 3,397 | 0 | 5 | 417 | 300 | 225 | 75 | 37 | 0 |
--------------------------------------------------------------------------------
--
-- Module : Time
-- Copyright : (c) 2009 Trevor L. McDonell
-- License : BSD
--
-- Simple timing benchmarks
--
--------------------------------------------------------------------------------
module Time where
import System.CPUTime
import Control.Monad
-- Timing
--
data Time = Time { cpu_time :: Integer }
type TimeUnit = Integer -> Integer
picosecond, millisecond, second :: TimeUnit
picosecond n = n
millisecond n = n `div` 1000000000
second n = n `div` 1000000000000
getTime :: IO Time
getTime = Time `fmap` getCPUTime
timeIn :: TimeUnit -> Time -> Integer
timeIn u (Time t) = u t
elapsedTime :: Time -> Time -> Time
elapsedTime (Time t1) (Time t2) = Time (t2 - t1)
-- Simple benchmarking
--
{-# NOINLINE benchmark #-}
benchmark
:: Int -- Number of times to repeat test
-> IO a -- Test to run
-> IO b -- Finaliser to before measuring elapsed time
-> IO (Time,a)
benchmark n testee finaliser = do
t1 <- getTime
(r:_) <- replicateM n testee
_ <- finaliser
t2 <- getTime
return (elapsedTime t1 t2, r)
| mwu-tow/cuda | examples/common/src/Time.hs | bsd-3-clause | 1,180 | 0 | 9 | 268 | 298 | 166 | 132 | 27 | 1 |
module Parse.Helpers where
import Prelude hiding (until)
import Control.Applicative ((<$>),(<*>),(<*))
import Control.Monad (guard, join)
import Control.Monad.State (State)
import Data.Char (isUpper)
import qualified Data.Map as Map
import qualified Language.GLSL.Parser as GLP
import qualified Language.GLSL.Syntax as GLS
import Text.Parsec hiding (newline, spaces, State)
import Text.Parsec.Indent (indented, runIndent)
import qualified Text.Parsec.Token as T
import qualified AST.Declaration as Decl
import qualified AST.Expression.General as E
import qualified AST.Expression.Source as Source
import qualified AST.Helpers as Help
import qualified AST.Literal as L
import qualified AST.Variable as Variable
import qualified Reporting.Annotation as A
import qualified Reporting.Error.Syntax as Syntax
import qualified Reporting.Region as R
reserveds :: [String]
reserveds =
[ "if", "then", "else"
, "case", "of"
, "let", "in"
, "type"
, "module", "where"
, "import", "as", "hiding", "exposing"
, "port", "export", "foreign"
, "perform"
, "deriving"
]
-- ERROR HELP
expecting = flip (<?>)
-- SETUP
type OpTable = Map.Map String (Int, Decl.Assoc)
type SourceM = State SourcePos
type IParser a = ParsecT String OpTable SourceM a
iParse :: IParser a -> String -> Either ParseError a
iParse parser source =
iParseWithTable "" Map.empty parser source
iParseWithTable :: SourceName -> OpTable -> IParser a -> String -> Either ParseError a
iParseWithTable sourceName table aParser input =
runIndent sourceName $ runParserT aParser table sourceName input
-- VARIABLES
var :: IParser String
var =
makeVar (letter <|> char '_') <?> "a name"
lowVar :: IParser String
lowVar =
makeVar lower <?> "a lower case name"
capVar :: IParser String
capVar =
makeVar upper <?> "an upper case name"
qualifiedVar :: IParser String
qualifiedVar =
do vars <- many ((++) <$> capVar <*> string ".")
(++) (concat vars) <$> lowVar
rLabel :: IParser String
rLabel = lowVar
innerVarChar :: IParser Char
innerVarChar =
alphaNum <|> char '_' <|> char '\'' <?> "more letters in this name"
makeVar :: IParser Char -> IParser String
makeVar firstChar =
do variable <- (:) <$> firstChar <*> many innerVarChar
if variable `elem` reserveds
then fail (Syntax.keyword variable)
else return variable
reserved :: String -> IParser String
reserved word =
expecting ("reserved word `" ++ word ++ "`") $
do string word
notFollowedBy innerVarChar
return word
-- INFIX OPERATORS
anyOp :: IParser String
anyOp =
betwixt '`' '`' qualifiedVar
<|> symOp
<?> "an infix operator like (+)"
symOp :: IParser String
symOp =
do op <- many1 (satisfy Help.isSymbol)
guard (op `notElem` [ "=", "..", "->", "--", "|", "\8594", ":" ])
case op of
"." -> notFollowedBy lower >> return op
_ -> return op
-- COMMON SYMBOLS
equals :: IParser String
equals =
string "=" <?> "="
rightArrow :: IParser String
rightArrow =
string "->" <|> string "\8594" <?> "->"
leftArrow :: IParser String
leftArrow =
string "<-" <?> "<- for a record update"
hasType :: IParser String
hasType =
string ":" <?> "the \"has type\" symbol ':'"
commitIf check p =
commit <|> try p
where
commit =
try (lookAhead check) >> p
-- SEPARATORS
spaceySepBy1 :: IParser b -> IParser a -> IParser [a]
spaceySepBy1 sep parser =
do value <- parser
(value:) <$> spaceyPrefixBy sep parser
spaceyPrefixBy :: IParser sep -> IParser a -> IParser [a]
spaceyPrefixBy sep parser =
many (commitIf (whitespace >> sep) (padded sep >> parser))
comma :: IParser Char
comma =
char ',' <?> "a comma ','"
commaSep1 :: IParser a -> IParser [a]
commaSep1 =
spaceySepBy1 comma
commaSep :: IParser a -> IParser [a]
commaSep =
option [] . commaSep1
semiSep1 :: IParser a -> IParser [a]
semiSep1 =
spaceySepBy1 (char ';' <?> "a semicolon ';'")
pipeSep1 :: IParser a -> IParser [a]
pipeSep1 =
spaceySepBy1 (char '|' <?> "a vertical bar '|'")
consSep1 :: IParser a -> IParser [a]
consSep1 =
spaceySepBy1 (string "::" <?> "a cons operator '::'")
dotSep1 :: IParser a -> IParser [a]
dotSep1 p =
(:) <$> p <*> many (try (char '.') >> p)
spaceSep1 :: IParser a -> IParser [a]
spaceSep1 p =
(:) <$> p <*> spacePrefix p
spacePrefix p =
constrainedSpacePrefix p (\_ -> return ())
constrainedSpacePrefix parser constraint =
many $ choice
[ try (spacing >> lookAhead (oneOf "[({")) >> parser
, try (spacing >> parser)
]
where
spacing = do
n <- whitespace
constraint n <?> Syntax.whitespace
indented
-- SURROUNDED BY
followedBy a b =
do x <- a
b
return x
betwixt :: Char -> Char -> IParser a -> IParser a
betwixt a b c =
do char a
out <- c
char b <?> "a closing '" ++ [b] ++ "'"
return out
surround :: Char -> Char -> String -> IParser a -> IParser a
surround a z name p = do
char a
v <- padded p
char z <?> unwords ["a closing", name, show z]
return v
braces :: IParser a -> IParser a
braces =
surround '[' ']' "brace"
parens :: IParser a -> IParser a
parens =
surround '(' ')' "paren"
brackets :: IParser a -> IParser a
brackets =
surround '{' '}' "bracket"
-- HELPERS FOR EXPRESSIONS
getMyPosition :: IParser R.Position
getMyPosition =
R.fromSourcePos <$> getPosition
addLocation :: IParser a -> IParser (A.Located a)
addLocation expr =
do (start, e, end) <- located expr
return (A.at start end e)
located :: IParser a -> IParser (R.Position, a, R.Position)
located parser =
do start <- getMyPosition
value <- parser
end <- getMyPosition
return (start, value, end)
accessible :: IParser Source.Expr -> IParser Source.Expr
accessible exprParser =
do start <- getMyPosition
annotatedRootExpr@(A.A _ rootExpr) <- exprParser
access <- optionMaybe (try dot <?> "a field access like .name")
case access of
Nothing ->
return annotatedRootExpr
Just _ ->
accessible $
do v <- var
end <- getMyPosition
return . A.at start end $
case rootExpr of
E.Var (Variable.Raw name@(c:_))
| isUpper c ->
E.rawVar (name ++ '.' : v)
_ ->
E.Access annotatedRootExpr v
dot :: IParser ()
dot =
do char '.'
notFollowedBy (char '.')
-- WHITESPACE
padded :: IParser a -> IParser a
padded p =
do whitespace
out <- p
whitespace
return out
spaces :: IParser String
spaces =
let space = string " " <|> multiComment <?> Syntax.whitespace
in
concat <$> many1 space
forcedWS :: IParser String
forcedWS =
choice
[ (++) <$> spaces <*> (concat <$> many nl_space)
, concat <$> many1 nl_space
]
where
nl_space =
try ((++) <$> (concat <$> many1 newline) <*> spaces)
-- Just eats whitespace until the next meaningful character.
dumbWhitespace :: IParser String
dumbWhitespace =
concat <$> many (spaces <|> newline)
whitespace :: IParser String
whitespace =
option "" forcedWS
freshLine :: IParser [[String]]
freshLine =
try (many1 newline >> many space_nl) <|> try (many1 space_nl) <?> Syntax.freshLine
where
space_nl = try $ spaces >> many1 newline
newline :: IParser String
newline =
simpleNewline <|> lineComment <?> Syntax.newline
simpleNewline :: IParser String
simpleNewline =
try (string "\r\n") <|> string "\n"
lineComment :: IParser String
lineComment =
do try (string "--")
comment <- anyUntil $ simpleNewline <|> (eof >> return "\n")
return ("--" ++ comment)
docComment :: IParser String
docComment =
do try (string "{-|")
contents <- closeComment
let reversed =
dropWhile (`elem` " \n\r") . drop 2 $ reverse contents
return $ dropWhile (==' ') (reverse reversed)
multiComment :: IParser String
multiComment =
(++) <$> try (string "{-" <* notFollowedBy (string "|")) <*> closeComment
closeComment :: IParser String
closeComment =
anyUntil $
choice $
[ try (string "-}") <?> "the end of a comment -}"
, concat <$> sequence [ try (string "{-"), closeComment, closeComment ]
]
-- ODD COMBINATORS
failure msg = do
inp <- getInput
setInput ('x':inp)
anyToken
fail msg
until :: IParser a -> IParser b -> IParser b
until p end =
go
where
go = end <|> (p >> go)
anyUntil :: IParser String -> IParser String
anyUntil end =
go
where
go =
end <|> (:) <$> anyChar <*> go
ignoreUntil :: IParser a -> IParser (Maybe a)
ignoreUntil end =
go
where
ignore p =
const () <$> p
filler =
choice
[ try (ignore chr) <|> ignore str
, ignore multiComment
, ignore docComment
, ignore anyChar
]
go =
choice
[ Just <$> end
, filler `until` choice [ const Nothing <$> eof, newline >> go ]
]
onFreshLines :: (a -> b -> b) -> b -> IParser a -> IParser b
onFreshLines insert init thing =
go init
where
go values =
do optionValue <- ignoreUntil thing
case optionValue of
Nothing -> return values
Just v -> go (insert v values)
withSource :: IParser a -> IParser (String, a)
withSource p =
do start <- getParserState
result <- p
endPos <- getPosition
setParserState start
raw <- anyUntilPos endPos
return (raw, result)
anyUntilPos :: SourcePos -> IParser String
anyUntilPos pos =
do currentPos <- getPosition
if currentPos == pos
then return []
else (:) <$> anyChar <*> anyUntilPos pos
-- BASIC LANGUAGE LITERALS
shader :: IParser (String, L.GLShaderTipe)
shader =
do try (string "[glsl|")
rawSrc <- closeShader id
case glSource rawSrc of
Left err -> parserFail . show $ err
Right tipe -> return (rawSrc, tipe)
closeShader :: (String -> a) -> IParser a
closeShader builder =
choice
[ do try (string "|]")
return (builder "")
, do c <- anyChar
closeShader (builder . (c:))
]
glSource :: String -> Either ParseError L.GLShaderTipe
glSource src =
case GLP.parse src of
Left e -> Left e
Right (GLS.TranslationUnit decls) ->
map extractGLinputs decls
|> join
|> foldr addGLinput emptyDecls
|> Right
where
(|>) = flip ($)
emptyDecls = L.GLShaderTipe Map.empty Map.empty Map.empty
addGLinput (qual,tipe,name) glDecls =
case qual of
GLS.Attribute ->
glDecls { L.attribute = Map.insert name tipe $ L.attribute glDecls }
GLS.Uniform ->
glDecls { L.uniform = Map.insert name tipe $ L.uniform glDecls }
GLS.Varying ->
glDecls { L.varying = Map.insert name tipe $ L.varying glDecls }
_ -> error "Should never happen due to below filter"
extractGLinputs decl =
case decl of
GLS.Declaration
(GLS.InitDeclaration
(GLS.TypeDeclarator
(GLS.FullType
(Just (GLS.TypeQualSto qual))
(GLS.TypeSpec _prec (GLS.TypeSpecNoPrecision tipe _mexpr1))))
[GLS.InitDecl name _mexpr2 _mexpr3]
) ->
case elem qual [GLS.Attribute, GLS.Varying, GLS.Uniform] of
False -> []
True ->
case tipe of
GLS.Int -> return (qual, L.Int,name)
GLS.Float -> return (qual, L.Float,name)
GLS.Vec2 -> return (qual, L.V2,name)
GLS.Vec3 -> return (qual, L.V3,name)
GLS.Vec4 -> return (qual, L.V4,name)
GLS.Mat4 -> return (qual, L.M4,name)
GLS.Sampler2D -> return (qual, L.Texture,name)
_ -> []
_ -> []
str :: IParser String
str =
expecting "a string" $
do s <- choice [ multiStr, singleStr ]
processAs T.stringLiteral . sandwich '\"' $ concat s
where
rawString quote insides =
quote >> manyTill insides quote
multiStr = rawString (try (string "\"\"\"")) multilineStringChar
singleStr = rawString (char '"') stringChar
stringChar :: IParser String
stringChar = choice [ newlineChar, escaped '\"', (:[]) <$> satisfy (/= '\"') ]
multilineStringChar :: IParser String
multilineStringChar =
do noEnd
choice [ newlineChar, escaped '\"', expandQuote <$> anyChar ]
where
noEnd = notFollowedBy (string "\"\"\"")
expandQuote c = if c == '\"' then "\\\"" else [c]
newlineChar :: IParser String
newlineChar =
choice [ char '\n' >> return "\\n"
, char '\r' >> return "\\r" ]
sandwich :: Char -> String -> String
sandwich delim s =
delim : s ++ [delim]
escaped :: Char -> IParser String
escaped delim =
try $ do
char '\\'
c <- char '\\' <|> char delim
return ['\\', c]
chr :: IParser Char
chr =
betwixt '\'' '\'' character <?> "a character"
where
nonQuote = satisfy (/='\'')
character =
do c <- choice
[ escaped '\''
, (:) <$> char '\\' <*> many1 nonQuote
, (:[]) <$> nonQuote
]
processAs T.charLiteral $ sandwich '\'' c
processAs :: (T.GenTokenParser String u SourceM -> IParser a) -> String -> IParser a
processAs processor s =
calloutParser s (processor lexer)
where
calloutParser :: String -> IParser a -> IParser a
calloutParser inp p =
either (fail . show) return (iParse p inp)
lexer :: T.GenTokenParser String u SourceM
lexer = T.makeTokenParser elmDef
-- I don't know how many of these are necessary for charLiteral/stringLiteral
elmDef :: T.GenLanguageDef String u SourceM
elmDef =
T.LanguageDef
{ T.commentStart = "{-"
, T.commentEnd = "-}"
, T.commentLine = "--"
, T.nestedComments = True
, T.identStart = undefined
, T.identLetter = undefined
, T.opStart = undefined
, T.opLetter = undefined
, T.reservedNames = reserveds
, T.reservedOpNames = [":", "->", "<-", "|"]
, T.caseSensitive = True
}
| JoeyEremondi/elm-summer-opt | src/Parse/Helpers.hs | bsd-3-clause | 14,468 | 81 | 22 | 4,159 | 4,668 | 2,400 | 2,268 | 426 | 14 |
{-# LANGUAGE ScopedTypeVariables #-}
module PackageTests.Freeze.Check
( tests
) where
import PackageTests.PackageTester
import Test.Tasty
import Test.Tasty.HUnit
import qualified Control.Exception.Extensible as E
import Data.List (intercalate, isInfixOf)
import System.Directory (doesFileExist, removeFile)
import System.FilePath ((</>))
import System.IO.Error (isDoesNotExistError)
dir :: FilePath
dir = packageTestsDirectory </> "Freeze"
tests :: TestsPaths -> [TestTree]
tests paths =
[ testCase "runs without error" $ do
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
, testCase "freezes direct dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should have frozen base\n" ++ c) $
" base ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "freezes transitory dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should have frozen ghc-prim\n" ++ c) $
" ghc-prim ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "does not freeze packages which are not dependend upon" $ do
-- XXX Test this against a package installed in the sandbox but
-- not depended upon.
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen exceptions\n" ++ c) $ not $
" exceptions ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "does not include a constraint for the package being frozen" $ do
removeCabalConfig
result <- cabal_freeze paths dir []
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen self\n" ++ c) $ not $
" my ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "--dry-run does not modify the cabal.config file" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--dry-run"]
assertFreezeSucceeded result
c <- doesFileExist $ dir </> "cabal.config"
assertBool "cabal.config file should not have been created" (not c)
, testCase "--enable-tests freezes test dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--enable-tests"]
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should have frozen test-framework\n" ++ c) $
" test-framework ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "--disable-tests does not freeze test dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--disable-tests"]
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen test-framework\n" ++ c) $ not $
" test-framework ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "--enable-benchmarks freezes benchmark dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--disable-benchmarks"]
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen criterion\n" ++ c) $ not $
" criterion ==" `isInfixOf` (intercalate " " $ lines $ c)
, testCase "--disable-benchmarks does not freeze benchmark dependencies" $ do
removeCabalConfig
result <- cabal_freeze paths dir ["--disable-benchmarks"]
assertFreezeSucceeded result
c <- readCabalConfig
assertBool ("should not have frozen criterion\n" ++ c) $ not $
" criterion ==" `isInfixOf` (intercalate " " $ lines $ c)
]
removeCabalConfig :: IO ()
removeCabalConfig = do
removeFile (dir </> "cabal.config")
`E.catch` \ (e :: IOError) ->
if isDoesNotExistError e
then return ()
else E.throw e
readCabalConfig :: IO String
readCabalConfig = do
readFile $ dir </> "cabal.config"
| corngood/cabal | cabal-install/tests/PackageTests/Freeze/Check.hs | bsd-3-clause | 4,311 | 0 | 14 | 1,223 | 972 | 480 | 492 | 91 | 2 |
module KMeansHelper where
import Prelude hiding (zipWith)
import Data.List (sort, span, minimumBy)
import Data.Function (on)
import Data.Ord (comparing)
import Language.Haskell.Liquid.Prelude (liquidAssert, liquidError)
-- | Fixed-Length Lists
{-@ type List a N = {v : [a] | (len v) = N} @-}
-- | N Dimensional Points
{-@ type Point N = List Double N @-}
{-@ type NonEmptyList a = {v : [a] | (len v) > 0} @-}
-- | Clustering
{-@ type Clustering a = [(NonEmptyList a)] @-}
------------------------------------------------------------------
-- | Grouping By a Predicate -------------------------------------
------------------------------------------------------------------
{-@ groupBy :: (a -> a -> Bool) -> [a] -> (Clustering a) @-}
groupBy _ [] = []
groupBy eq (x:xs) = (x:ys) : groupBy eq zs
where (ys,zs) = span (eq x) xs
------------------------------------------------------------------
-- | Partitioning By a Size --------------------------------------
------------------------------------------------------------------
{-@ type PosInt = {v: Int | v > 0 } @-}
{-@ partition :: size:PosInt -> [a] -> (Clustering a) @-}
partition size [] = []
partition size ys@(_:_) = zs : partition size zs'
where
zs = take size ys
zs' = drop size ys
-----------------------------------------------------------------------
-- | Safe Zipping -----------------------------------------------------
-----------------------------------------------------------------------
{-@ zipWith :: (a -> b -> c) -> xs:[a] -> (List b (len xs)) -> (List c (len xs)) @-}
zipWith f (a:as) (b:bs) = f a b : zipWith f as bs
zipWith _ [] [] = []
-- Other cases only for exposition
zipWith _ (_:_) [] = liquidError "Dead Code"
zipWith _ [] (_:_) = liquidError "Dead Code"
-----------------------------------------------------------------------
-- | "Matrix" Transposition -------------------------------------------
-----------------------------------------------------------------------
{-@ type Matrix a Rows Cols = (List (List a Cols) Rows) @-}
{-@ transpose :: c:Int -> r:PosInt -> Matrix a r c -> Matrix a c r @-}
transpose :: Int -> Int -> [[a]] -> [[a]]
transpose 0 _ _ = []
transpose c r ((x:xs) : xss) = (x : map head xss) : transpose (c-1) r (xs : map tail xss)
-- Or, with comprehensions
-- transpose c r ((x:xs):xss) = (x : [ xs' | (x':_) <- xss ]) : transpose (c-1) r (xs : [xs' | (_ : xs') <- xss])
-- Not needed, just for exposition
transpose c r ([] : _) = liquidError "dead code"
transpose c r [] = liquidError "dead code"
| mightymoose/liquidhaskell | include/KMeansHelper.hs | bsd-3-clause | 2,694 | 0 | 9 | 566 | 490 | 273 | 217 | 22 | 1 |
module GTL.Data.MarkovDecisionProcess where
import GTL.Data.Dynamic (DynamicXA)
import GTL.Data.Utility (UtilityXA, Discount)
data MDP x a = MDP { dynamic :: DynamicXA x a
, utility :: UtilityXA x a
, discount :: Discount }
| dudebout/game-theoretic-learning | GTL/Data/MarkovDecisionProcess.hs | isc | 266 | 0 | 9 | 75 | 71 | 44 | 27 | 6 | 0 |
module Y2015.D03Spec (spec) where
import Y2015
import Test.Hspec
spec :: Spec
spec = parallel $ do
describe "Day 3" $ do
describe "santaRun" $ do
it "should deliver to 2 houses" $
santaRun ">" `shouldBe` 2
it "should deliver to 4 houses in a square" $
santaRun "^>v<" `shouldBe` 4
it "should deliver many presents to 2 houses" $
santaRun "^v^v^v^v^v" `shouldBe` 2
describe "roboSolve" $ do
it "should deliver to 3 houses" $
roboRun "^v" `shouldBe` 3
it "should deliver to 3 houses and return to origin" $
roboRun "^>v<" `shouldBe` 3
it "should deliver 11 presents" $
roboRun "^v^v^v^v^v" `shouldBe` 11
| tylerjl/adventofcode | test/Y2015/D03Spec.hs | mit | 794 | 0 | 16 | 295 | 177 | 86 | 91 | 20 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Data.Xlsx.Parser where
import Prelude hiding (sequence)
import Control.Applicative
import Control.Monad.IO.Class
import Control.Monad (join)
import Data.Char (ord)
import Data.List
import Data.Maybe
import Data.Ord
import Data.Function (on)
import qualified Data.IntMap as M
import qualified Data.IntSet as S
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Read as T
import qualified Data.ByteString.Lazy as L
--import Control.Monad.Trans.Resource (MonadThrow)
import Data.Conduit
import Data.XML.Types
import qualified Data.Conduit.List as CL
import qualified Text.XML.Stream.Parse as Xml
import qualified Codec.Archive.Zip as Zip
import System.FilePath
data Xlsx
= Xlsx
{archive :: Zip.Archive
,sharedStrings :: M.IntMap Text
}
data Columns
= AllColumns
| Columns [String]
data Cell = Cell
{cellIx :: (Text, Int)
,style :: Maybe Int
,value :: Maybe Text
}
deriving Show
type MapRow = Map.Map Text Text
-- | Read archive and preload sharedStrings
xlsx :: FilePath -> IO Xlsx
xlsx fname = do
ar <- Zip.toArchive <$> L.readFile fname
ss <- runResourceT $ getSharedStrings ar
return $ Xlsx ar ss
-- | Get data from specified worksheet.
sheet :: MonadThrow m => Xlsx -> Int -> [Text] -> Source m [Cell]
sheet x sheetId cols
= getSheetCells x sheetId
$= filterColumns (S.fromList $ map col2int cols)
$= groupRows
-- | Get all rows from specified worksheet.
sheetRows :: MonadThrow m => Xlsx -> Int -> Source m MapRow
sheetRows x sheetId
= getSheetCells x sheetId
$= groupRows
$= reverseRows
$= mkMapRows
-- | Make 'Conduit' from 'mkMapRowsSink'.
mkMapRows :: Monad m => Conduit [Cell] m MapRow
mkMapRows = sequence mkMapRowsSink =$= CL.concatMap id
-- | Make 'MapRow' from list of 'Cell's.
mkMapRowsSink :: Monad m => Sink [Cell] m [MapRow]
mkMapRowsSink = do
header <- fromMaybe [] <$> CL.head
rows <- CL.consume
return $ map (mkMapRow header) rows
where
mkMapRow header row = Map.fromList $ zipCells header row
zipCells :: [Cell] -> [Cell] -> [(Text, Text)]
zipCells [] _ = []
zipCells header [] = map (\h -> (txt h, "")) header
zipCells header@(h:hs) row@(r:rs) =
case comparing (fst . cellIx) h r of
LT -> (txt h , "" ) : zipCells hs row
EQ -> (txt h , txt r) : zipCells hs rs
GT -> ("" , txt r) : zipCells header rs
txt = fromMaybe "" . value
reverseRows = CL.map reverse
groupRows = CL.groupBy ((==) `on` (snd.cellIx))
filterColumns cs = CL.filter ((`S.member` cs) . col2int . fst . cellIx)
col2int = T.foldl' (\n c -> n*26 + ord c - ord 'A' + 1) 0
getSheetCells
:: MonadThrow m => Xlsx -> Int -> Source m Cell
getSheetCells (Xlsx{archive=ar,sharedStrings=ss}) sheetId
| sheetId < 0 || sheetId >= length sheets
= error "parseSheet: Invalid sheetId"
| otherwise
= case xmlSource ar (sheets !! sheetId) of
Nothing -> error "An impossible happened"
Just xml -> xml $= mkXmlCond (getCell ss)
where
sheets = sort
$ filter ((== "xl/worksheets") . takeDirectory)
$ Zip.filesInArchive ar
-- | Parse single cell from xml stream.
getCell
:: MonadThrow m => M.IntMap Text -> Sink Event m (Maybe Cell)
getCell ss = Xml.tagName (n"c") cAttrs cParser
where
cAttrs = do
cellIx <- Xml.requireAttr "r"
style <- Xml.optionalAttr "s"
sharing <- Xml.optionalAttr "t"
Xml.ignoreAttrs
return (cellIx,style,sharing)
cParser a@(ix,style,sharing) = do
val <- case sharing of
Just "inlineStr" -> tagSeq ["is", "t"]
Just "s" -> tagSeq ["v"]
>>= return . join . fmap ((`M.lookup` ss).int)
Nothing -> tagSeq ["v"]
return $ Cell (mkCellIx ix) (int <$> style) val
mkCellIx ix = let (c,r) = T.span (>'9') ix
in (c,int r)
-- | Add namespace to element names
n x = Name
{nameLocalName = x
,nameNamespace = Just "http://schemas.openxmlformats.org/spreadsheetml/2006/main"
,namePrefix = Nothing}
-- | Get text from several nested tags
tagSeq :: MonadThrow m => [Text] -> Sink Event m (Maybe Text)
tagSeq (x:xs)
= Xml.tagNoAttr (n x)
$ foldr (\x -> Xml.force "" . Xml.tagNoAttr (n x)) Xml.content xs
-- | Get xml event stream from the specified file inside the zip archive.
xmlSource
:: MonadThrow m => Zip.Archive -> FilePath -> Maybe (Source m Event)
xmlSource ar fname
= Xml.parseLBS Xml.def
. Zip.fromEntry
<$> Zip.findEntryByPath fname ar
-- Get shared strings (if there are some) into IntMap.
getSharedStrings
:: (MonadThrow m, Functor m)
=> Zip.Archive -> m (M.IntMap Text)
getSharedStrings x
= case xmlSource x "xl/sharedStrings.xml" of
Nothing -> return M.empty
Just xml -> (M.fromAscList . zip [0..]) <$> getText xml
-- | Fetch all text from xml stream.
getText xml = xml $= mkXmlCond Xml.contentMaybe $$ CL.consume
---------------------------------------------------------------------
int :: Text -> Int
int = either error fst . T.decimal
-- | Create conduit from xml sink
-- Resulting conduit filters nodes that `f` can consume and skips everything
-- else.
--
-- FIXME: Some benchmarking required: maybe it's not very efficient to `peek`i
-- each element twice. It's possible to swap call to `f` and `CL.peek`.
mkXmlCond f = sequenceSink () $ const
$ CL.peek >>= maybe -- try get current event form the stream
(return Stop) -- stop if stream is empty
(\_ -> f >>= maybe -- try consume current event
(CL.drop 1 >> return (Emit () [])) -- skip it if can't process
(return . Emit () . (:[]))) -- return result otherwise
| f-me/xlsx-parser | src/Data/Xlsx/Parser.hs | mit | 5,867 | 0 | 17 | 1,356 | 1,868 | 990 | 878 | 136 | 5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.