code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-
This module contains the splitting part of the HaCoTeB project.
-}
module HaCoTeB.Splitter where
{-
Split one text file in sections. Each section is separated by an empty line.
-}
split :: String -> [[String]]
split = span' . lines
where
span' text = let (s, ss) = span (/= "") text
in if ss /= [] then s : span' (tail ss) else [s]
|
mihaimaruseac/HaCoTeB
|
src/HaCoTeB/Splitter.hs
|
bsd-3-clause
| 361
| 0
| 13
| 92
| 99
| 55
| 44
| 5
| 2
|
{-# LANGUAGE OverloadedStrings #-}
module Views.MemberList (memberListT) where
import Data.Text.Lazy (Text)
import Data.Monoid
import Text.Blaze.Html5
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Views.Common
memberListT :: [Text] -> Text -> Html
memberListT members groupName =
pageT ("Members of the group '" <> groupName <> "'") $ do
H.script ! A.type_ "text/javascript" $ do
"var groupName = '"
toHtml groupName
"';"
H.p ! A.class_ "lead text-center" $ do
"Add, edit or delete members of "
H.b $ toHtml groupName
H.div ! A.class_ "text-center" ! A.style "margin: 10px auto;" $
H.table ! A.id "edittable"
! A.class_ "table table-condensed" $ do
H.thead $ do
H.tr $ do
H.th ! A.width "70%" ! A.class_ "text-center" $
"Member"
H.th ! A.width "30%" $ mempty
H.tbody $ do
mapM_ memberToHtml members
H.tfoot $ do
H.tr $ do
H.td $ inp "member" "%@luxola.com"
H.td $ H.button ! A.class_ "btn btn-success btn-xs add-btn"
$ "Add a member"
H.div ! A.class_ "alert alert-success" ! A.id "updatesuccess" $
"Successfully updated."
H.div ! A.class_ "alert alert-error" ! A.id "updateerror" $
"An error occured when trying to update the members in the DB."
H.div ! A.class_ "alert alert-error" ! A.id "fieldempty" $
"You left one of the fields empty."
H.script ! A.type_ "text/javascript"
! A.src "/sproxy/static/js/memberlist.js" $ mempty
where memberToHtml m = do
tr $ do
td ! A.class_ "edit member-edit" $ toHtml m
td $ a ! A.class_ "delete-btn btn btn-danger btn-xs" $ "Delete"
|
alpmestan/spw
|
src/Views/MemberList.hs
|
bsd-3-clause
| 2,104
| 0
| 21
| 819
| 516
| 245
| 271
| 45
| 1
|
import Math.Topology.CubeCmplx.DirCubeCmplx
import Math.Topology.CubeCmplx.CornerReduce
import Control.DeepSeq
main = print $ torus3d `deepseq` uncurry cmplxReduce $ torus3d
|
mmisamore/directed-cubical
|
Examples/torus3d.hs
|
bsd-3-clause
| 177
| 0
| 7
| 19
| 42
| 25
| 17
| 4
| 1
|
{-# LANGUAGE OverlappingInstances,
ParallelListComp,
TypeSynonymInstances,
FlexibleInstances,
FlexibleContexts #-}
--
-- Circuit compiler for the Faerieplay hardware-assisted secure
-- computation project at Dartmouth College.
--
-- Copyright (C) 2003-2007, Alexander Iliev <sasho@cs.dartmouth.edu> and
-- Sean W. Smith <sws@cs.dartmouth.edu>
--
-- All rights reserved.
--
-- This code is released under a BSD license.
-- Please see LICENSE.txt for the full license and disclaimers.
--
-- code to generate a circuit from an unrolled list of statements,
-- consisiting only of SAss, and SIfElse.
{-
How should non-input arrays be dealt with?
- need to establish a gate for the array
- add a series of initializations to zero the gates? not too
sensible, best to have that as part of the runtime. but, do add a
gate to initialize (to zero) the whole array
How is struct initialization dealt with?
- just have runtime rules about default values for uninitialized
gates?
- but, each gate needs an input, so will need to connect to the zero
literal (or whatever) anyway.
- so, it's actually the compiler which does the rules for this: when
a value is needed and it's unitialized, pull in from a zero
literal.
-}
module Faerieplay.CircGen (
Circuit,
Gate (..),
Op (..),
GateFlags (..),
genCircuit,
clip_circuit,
extractInputs,
showCctGraph,
CctShow(..),
testNextInt,
) where
import Array (array, (!))
import Monad (foldM, mplus, liftM, zipWithM)
import Maybe (fromJust, isJust, fromMaybe, catMaybes, isNothing)
import List
import Control.Monad.Trans (lift)
import Control.Exception (assert)
import Data.Graph.Inductive.Graph ((&))
import qualified Data.Graph.Inductive.Graph as Gr
import qualified Data.Graph.Inductive.Basic as GrBas
import qualified Data.Graph.Inductive.Graphviz as Graphviz
import qualified Data.Graph.Inductive.Tree as TreeGr
import qualified Data.Graph.Inductive.Query.DFS as GrDFS
import qualified Data.Graph.Inductive.Query.BFS as GrBFS
import qualified Text.PrettyPrint as PP
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Tree as Tree
-- import qualified Data.IntSet as IS
-- import qualified Data.IntMap as IM
import qualified Control.Monad.State as St
import IlievUtils.Misc as Lib
import IlievUtils.UDraw as UDraw
-- import Sasho.XDR
import qualified IlievUtils.TreeLib as TreeLib
import qualified Faerieplay.Container as Cont
import Faerieplay.Mapping as Mapping
-- import Faerieplay.Stack as Stack (Stack(..), maybeLookup)
import qualified IlievUtils.GraphLib as GrLib
import Faerieplay.Common (trace,LogPrio(..),logmsg,logProgress,
logDebug,logDump,RunFlag(..))
import Faerieplay.Intermediate as Im
-- should we generate SPrint gates for print() statements?
cDOPRINT = False
-- should we store ELit gate locations, and only generate one gate per literal?
cSTORE_LITS = False
-- | Which run-time flags are we interested in?
cRELEVANT_RUN_FLAGS = [DoPrint]
-- gate flags
data GateFlags = Output -- ^ an output gate
| Terminal -- ^ a gate which has to be reached, ie not trimmed out of
-- ^ the circuit. currently Output and Print gates
deriving (Eq,Ord
, Show, Read
)
-- the gate operations
data Op =
Bin Im.BinOp
| Un Im.UnOp
-- read from a dynamic array;
-- inputs: [enable, array, index];
-- if 'enable' is true, output is
-- 1: the new array value (or rather pointer, as the runtime will manage the actual
-- values), and
-- 2: of the array element type: either a basic type like Int etc, or a list of Slicer
-- gates which extract the basic components of the complex type in this array
-- if 'enable' is false, output is
-- 1: the new array value (probably same as the old one)
-- 2: NIL
| ReadDynArray
-- initialize an array
-- parameters:
-- 1) element size in bytes
-- 2) number of elems
| InitDynArray Int Integer
-- update array; inputs = [enable, array, index, val1, val2, ...]; output is
-- the updated array. The parameters are:
-- - a slice of where inputs (concatenated) should end up in the array element.
--
-- The input will consist of multiple gates in case of a complex
-- type, and the runtime can just concat the values to get a lump to
-- write to the (sliced) array location
--
-- if 'enable' is false, do not actually write the value.
| WriteDynArray Im.FieldLoc
-- an input gate
| Input
-- select one of two values based on a test; the input wires are to
-- be: [test, src_true, src_false]
| Select
-- a gate which takes only a part of its input, specified by a
-- zero-based offset and a length, in bytes;
-- used after a ReadDynArray, to collect the output of ReadDynArray
-- into separate gates
-- for internal use (in Runtime.hs), want to also keep a more high-level interpretation:
-- offset and length in GateVal's
| Slicer FieldLoc
| Lit Im.Lit -- a literal
| Print String -- ^ a void-type print gate: prints a value preceded by a
-- ^ fixed prompt
deriving (Eq, Show, Read)
--
-- QUESTION: do we include as part of Gate the numbers of the input
-- Gate's? That info should be available in the graph structure, BUT
-- the order of in-edges is important for non-commutative operators,
-- whereas in standard graphs the order of edges is immaterial. For
-- now I do include the input gate numbers
data Gate = Gate {gate_num :: Int, -- the gate number
gate_typ :: Typ, -- the output type of the gate
gate_op :: Op, -- gate operation (+,*, & etc)
gate_inputs :: [Int], -- numbers of the input gates, in the
-- right order. may do away with this
-- if the graph can mainatain
-- edge-order
gate_depth :: Int, -- depth in nested conditionals
gate_flags :: [GateFlags],
gate_doc :: GateDoc } -- some documentation, eg. a var name
deriving (Show, Read)
type GateDoc = [Exp]
--setFlags newfl g = g {flags=newfl}
--setGateType newt g = g {typ = newt}
-- NOTE: the gate number is the same as the Node value in the Graph
-- (which is also an Int)
-- helper to make a labelled Node from a Gate
gate2lnode :: Gate -> Gr.LNode Gate
gate2lnode g@(Gate {gate_num=i}) = (i,g)
gateProjNum f g@(Gate {gate_num=i}) = g { gate_num = f i }
gateProjSrcs f g@(Gate {gate_inputs=is}) = g { gate_inputs = f is }
gateProjDoc f g@(Gate {gate_doc=ds}) = g { gate_doc = f ds }
gateProjFlags f g@(Gate {gate_flags=fs}) = g { gate_flags = f fs }
-- needed to lookup a variable's current gate number
-- Structs and direct arrays occupy multiple gates, so need to actually store
-- a list of gate numbers
-- And now, need to place array references in a separate (non-stacked, shared across cond.
-- scopes) table. To this end:
-- - when adding a Var, its type needs to be supplied, so that we can find the component
-- EArr's.
-- - We keep the entire list in the normal (scoped) table, but each entry is annotated:
-- EArr entries have an annotation with the EArr, so can then look it up in the
-- ArrLocTable table.
-- Also need to keep the Var's type, as otherwise we cannot get it in
-- genCondExit.addSelect.
-- - The ArrLocTable maps from EArr (which is Exp) -> Node, and is used for all EArr with
-- a location.
type LocTableVal = (Gr.Node, Maybe LocAnnot)
type LocTable = Map.Map Var ([LocTableVal], Typ)
data LocAnnot = Array Exp -- this is an EArr, as specified
deriving Show
type ArrLocTable = Map.Map Exp -- ^ an EArr expression
Gr.Node -- ^ its location.
-- type OutMonad a = St.State MyState a
type OutMonad = St.State MyState
-- nodes are labelled with a Gate, and the edges are for now unlabelled
type Circuit = TreeGr.Gr Gate ()
type CircuitCtx = Gr.Context Gate ()
-- the main function here
{-
genCircuit :: TypeTable -> -- the type table
[Stm] -> -- the Unroll'd [Stm]
[TypedName] -> -- parameters of main()
Circuit -- the resulting circuit
-}
-- | Generate the circuit for these unrolled statements, with the given input parameters.
genCircuit rtFlags type_table stms args =
let startState = MyState { loctable = ([Map.empty], Map.empty),
cond_nodes = [], -- empty stack
counter = 0,
typetable = type_table,
flags = rtFlags `intersect` cRELEVANT_RUN_FLAGS
}
`logDebug`
("genCircuit rtFlags = " ++ show rtFlags)
-- the main circuit generation step.
(circ, st) = St.runState (genCircuitM stms args)
startState
bad_ctxs = checkCircuit circ
{-
_ = if not $ null bad_ctxs
then error ("Bad contexts after circuit generation:\n"
++ (concat $ intersperse "\n" $ map showCtx bad_ctxs)
)
else error "No bad contexts!"
circ' = Gr.delNodes (map Gr.node' bad_ctxs) circ
`trace` ("Bad contexts after circuit generation:\n"
++ (concat $ intersperse "\n" $ map showCtx bad_ctxs)
)
-}
unique_lits_cct = collapse_lit_gates circ
`logProgress`
("Initial circuit generation done; circ len="
<< Gr.noNodes circ
-- << showBadCtxs bad_ctxs
)
`logDump`
(let gr_pretty = showCctGraph circ
in "The generated graph:\n" ++ gr_pretty
)
-- `trace` ("The full circuit: " << show circ)
bad_ctxs2 = checkCircuit unique_lits_cct
clipped_circ = clip_circuit unique_lits_cct
`logProgress`
("collapse_lit_gates done; circuit size="
<< Gr.noNodes unique_lits_cct
)
{-
`trace` ("Number of Lit gates before trim: "
<< numLitGates unique_lits_cct
<< "; and without outgoing edges: "
<< (length $
GrBas.gsel
((isLit . Gr.lab') .&& ((== 0) . Gr.outdeg'))
(unique_lits_cct)
)
<< showBadCtxs bad_ctxs2
)
-}
renum_circ = renumber clipped_circ
`logProgress` ("clipped_circ done; Number of Lit gates after trim: "
<< numLitGates clipped_circ)
-- `trace` ("The clipped circuit " << show clipped_circ)
gate_list = flatten_circuit renum_circ
-- `trace` ("The renumbered circuit " << show renum_circ)
-- expand all Typ fields in the gates, as it helps with printing out the types
-- (using docTypMachine)
gate_list' = map (expandGateTyp type_table) gate_list
-- TODO: expand out the comments on some gates too, where the comments may be
-- shown to users, eg. for input and output gates.
in (renum_circ, gate_list')
`trace` ("The DFS forest: " -- << genDFSForest renum_circ
<< "And the flat circuit:\n" << map strShow gate_list')
where numLitGates = length . GrBas.gsel (isLit . Gr.lab')
isLit g = case gate_op g of (Lit _) -> True
_ -> False
-- | Check the circuit for consistency, return the list of bad contexts (hopefully empty)
checkCircuit :: Circuit -> [CircuitCtx]
checkCircuit c = let bads = catMaybes . map checkInvariants . GrLib.contexts $ c
in if null bads
then bads
else bads {-`trace` (let gr_pretty = showCctGraph c
in "The bad graph:\n"
<< gr_pretty << "\n") -}
-- | Return Nothing if the 'Context' is fine, Just ctx if it has a problem.
checkInvariants ctx@(ins,n,gate,outs) = if (sort (map snd ins) /= sort (gate_inputs gate) ||
gate_num gate /= n)
then Just ctx
else Nothing
showBadCtxs ctxs = if null ctxs
then ""
else "Bad circuit contexts:\n"
<< (concat $ intersperse "\n" $ map showCtxDbg ctxs)
showCtxDbg :: CircuitCtx -> String
showCtxDbg ctx@(ins,n,gate,outs) =
("Node " << n
<< "; ins=" << map snd ins
<< "; gate=" << gate
<< "; outs=" << map snd outs)
genDFSForest g = Tree.drawForest $ GrDFS.dffWith' strNode g
where strNode (_,n,_,_) = show n
-- keep only gates which are reverse-reachable from terminal gates
clip_circuit :: Circuit -> Circuit
clip_circuit c = let c_rev = {-# SCC "grev" #-} GrBas.grev c
out_nodes = map Gr.node' $
{-# SCC "gsel" #-} GrBas.gsel isTerminal c_rev
reach_nodes = {-# SCC "bfsn" #-} GrBFS.bfsn out_nodes c_rev
reach_gr = keepNodes reach_nodes c
in reach_gr
where isTerminal = elem Terminal . gate_flags . Gr.lab'
-- renumber the nodes so they are consecutive, in the same order as the original numbering
renumber :: Circuit -> Circuit
renumber g = Gr.gmap doRenum g
-- the returned map (an array) will map from current numbering to consecutive
-- numbering
where renumMap = let nodes = sort $ Gr.nodes g
in array (head nodes, last nodes)
(zip nodes [0..])
`trace` ("Renum array range=" << head nodes
<< "-" << last nodes
<< "; num nodes=" << Gr.noNodes g)
-- `trace` ("renumber nodes: " << nodes)
doRenum (ins,node,gate,outs) = ( map (projSnd renum) ins,
renum node,
gateProjNum renum $
gateProjSrcs (map renum) $ gate,
map (projSnd renum) outs
)
`trace`
("doRenum ins=" << map snd ins
<< "; outs=" << map snd outs
<< "; gate num=" << gate_num gate
<< "; gate srcs=" << gate_inputs gate
)
renum node = renumMap ! node
`trace` ("renumbering node " << node)
{-
renumIns g = g { gate_inputs = (let renum node = renumMap ! node
`trace`
("Renum ins on gate " << g
<< "; in node=" << node)
in map renum $ gate_inputs g
)
}
-}
flatten_circuit :: Circuit -> [Gate]
flatten_circuit c = let gates = GrDFS.topsort' c
gates' = ins_to_front gates
in gates'
-- get the input gates to the front of the list. this partition should not
-- disturb the topological sort
where ins_to_front gates =
let (ins, others) = List.partition ((== Input) . gate_op)
gates
in ins ++ others
-- | Collapse replicated Lit gates to one gate per literal value, and patch up the
-- circuit.
-- No need to remove the discarded Lit gates, they will be removed when the circuit is
-- trimmed.
collapse_lit_gates :: Circuit -> Circuit
collapse_lit_gates g = let (lit_map, replace_map) = build_maps g
toporder = GrDFS.topsort g
-- NOTE: doing the map in the right order is important. Otherwise
-- an edge may be created in the result graph to/from a vertex
-- which is not yet in there.
in GrLib.vmap_ordered (patch_ctx replace_map) toporder g
where -- Build two maps:
-- lit_map: at what gate is each Lit value? We use the first gate where the lit
-- value occurs
-- replace_map: map from current Lit gate numbers, to the new compacted lit gates.
-- note that using an IntMap for replace_map did not help performance but
-- worsened by 5% or so.
build_maps g = foldl add_gate_to_maps (Map.empty, Map.empty) $
map Gr.lab' $
-- make sure lowest numbered Contexts are first, so they are the
-- kept as the gates for each Lit value
sortBy (compareWith Gr.node') $
GrBas.gsel (isLit . gate_op . Gr.lab') g
add_gate_to_maps (lit_map, replace_map) g =
let (Lit l) = gate_op g
mb_last_entry = Mapping.lookup l lit_map
in case mb_last_entry of
Nothing -> -- new lit - add it to the lit_map
( Mapping.insert l (gate_num g) lit_map,
replace_map )
Just entry -> -- already have a gate $g$ for this lit,
-- add redirect from this gate number to
-- $g$
( lit_map,
Mapping.insert (gate_num g)
entry
replace_map )
-- update all the in-edges and gate inputs to point to the selected unique Lit
-- gates.
patch_ctx repl (ins,n,g,outs)
= let ins' = map (projSnd $ do_replace repl) ins
g_ins = map (do_replace repl) $ gate_inputs g
-- outs should not be affected anywhere
-- outs' = map (projSnd $ do_replace repl) outs
-- NOTE: we do not use outs here, and rely on
-- all edges to be added as in-edges of some
-- gate, which should be fine
ctx' = (ins', n, g {gate_inputs = g_ins}, [])
in ctx'
`trace`
("For Lit compaction on node " << n
<< " replacing " << map snd ins
<< " with " << map snd ins'
<< "; ctx'=" ++ showCtxDbg ctx')
-- replace a key with a value if key is in the map, otherwise just return the key.
do_replace map x = fromMaybe x (Mapping.lookup x map)
isLit (Lit _) = True
isLit _ = False
expandGateTyp :: TypeTable -> Gate -> Gate
expandGateTyp typetable g@(Gate { gate_typ = t }) =
let t' = Im.expandType' typetable [Im.DoAll] t
in
g { gate_typ = t'
-- `trace` ("expandGateTyp " << t << " -> " << t')
}
-- | remove the nodes (and associate edges) not in 'keeps' from 'g'.
keepNodes :: Gr.Graph gr => [Gr.Node] -> gr a b -> gr a b
-- profiling showed that the N^2 (\\) list difference operation was taking a lot of time!
-- So use a Set to do the difference operation.
keepNodes keeps g = let dels = Set.toList $
Set.difference (Set.fromList $ Gr.nodes g) (Set.fromList keeps)
in Gr.delNodes dels g
-- | the stateful computation to generate the whole circuit.
genCircuitM :: [Stm] -> [TypedName] -> OutMonad Circuit
genCircuitM stms args =
do input_gates_cct <- genInputs args
start_cct <- initCondNodes input_gates_cct
foldM genStm start_cct stms
-- | Generate the initial graph for the inputs; it will consist of several Input gates,
-- and no edges.
genInputs :: [TypedName] -> OutMonad Circuit
genInputs names = do gatess <- mapM createInputGates names
return (Gr.mkGraph (map gate2lnode (concat gatess)) [])
-- | Initialize the stack of condition nodes, by adding a "True" literal for the
-- unconditional area
-- returns the circuit with the new condition node.
initCondNodes cct = do i <- nextInt
d <- getDepth
let rootCondCtx = mkCtx $ Gate i
BoolT
(Lit $ LBool True)
[]
d
[] []
pushCondNode i
return $ rootCondCtx & cct
-- create the input gates for this argument to main(), and add the vars
-- to the top-level LocTable
-- will make this return a [Gate], for when we deal with arrays etc
--
-- we'll only insert mappings into the LocTable at the top level, not
-- in recursive calls (then we'd be inserting struct field names as
-- actual vars)
-- also have some gymnastics to generate full annotations for the gates
createInputGates :: TypedName -> OutMonad [Gate]
createInputGates (name, typ) =
do typ' <- Im.expandType [DoFields, DoTypeDefs] typ
createInputGates' Nothing Nothing (name, typ')
where
createInputGates' mb_parent_exp mb_field_num (name, typ) =
do let var = add_vflags [FormalParam] (VSimple name)
-- NOTE: the ExpT annotation here is important so we can later
-- print actual field names, and not just field numbers; see
-- Intermediate.docExp
this_exp = ExpT typ $ maybe (EVar var)
(\e -> EStruct
e
(fromJustMsg "createInputGates"
mb_field_num)
)
mb_parent_exp
case typ of
(StructT (fields,_))
-> do gatess <- zipWithM (createInputGates' (Just this_exp))
(map Just [0..])
fields
let gates = concat gatess
is = map gate_num gates
-- if we have a parent expression, must be a part of a parent
-- struct, so do not add gate locations.
if isNothing mb_parent_exp then setVarLocs (var,typ) is
else return ()
return gates
{-
(SimpleT tname)
-> let typ' = fromJust $ Map.lookup tname type_table
in createInputGates (name, typ')
-}
-- IntT, BoolT, ArrayT (for dynamic arrays):
_ -> do i <- nextInt
if isNothing mb_parent_exp
then setVarLocs (var,typ) [i]
`trace`
("inserting input var " << var << " into LocTable")
else return ()
gate <- mkGate i typ this_exp
return [gate]
mkGate i typ doc_exp = do typ_full <- Im.expandType [DoAll] typ
return $ Gate i
typ_full
Input
[]
0
[]
[doc_exp]
-- this just adds an obvious dummy entry of the correct size (ie. number of graph nodes)
-- in the loc table for this lval.
-- we only need to do it for structs which are variables, and not
-- expressions (ie. parts of other complex types)
-- PRE: the lval does not have a top-level ExpT
genComplexInit (lval, t) size =
case lval of
(EVar var) -> setVarLocs (var,t) (replicate size (-12345678))
`logDebug`
("genComplexInit setting dummy locs for var " << var)
_ -> return ()
`logDebug`
("genComplexInit on non-var lval " << lval)
-- get the gates corresponding to expression 'e', if necessary adding
-- new gates to the circuit, and add e_doc as the last annotation of
-- that gate
-- returns the new circuit, and the gates where 'e' is
-- also have the gates pass through an optional hook which may update them, eg. add
-- flags
genExpWithDoc :: Circuit ->
Maybe (Gate -> Gate) -> -- ^ A hook to apply to a gate generated for the
-- expression
Exp -> -- ^ The expression to translate
Exp -> -- ^ The doc expression
OutMonad (Circuit, [Gr.Node])
genExpWithDoc c ghook e e_doc =
do (c', res) <- genExp' c e
let (c'', nodes) = case res of
(Left [ctx]) ->
-- we got Contexts back, so
-- add the doc to the gate, and apply the
-- hook
let ctx' = processCtx ctx
in (ctx' & c', [Gr.node' ctx'])
(Left ctxs) ->
-- FIXME: adding the same doc to all the gates
let ctxs' = map processCtx ctxs
in ( foldl (flip (&)) c' ctxs',
map Gr.node' ctxs' )
`trace` "genExpWithDoc on multiple contexts"
(Right nodes) ->
-- no new gate generated, but we will
-- update the gate doc and apply the hook
let c'' = foldl (updateLabel processGate)
c'
nodes
in (c'', nodes)
return (c'', nodes)
where addGateDoc exp = gateProjDoc $ push $ stripExpT exp
processGate g = addGateDoc e_doc $ maybeApply ghook g
processCtx = tup4_proj_3 processGate
-- update the label of a graph Context
updateCtxLab new_label = tup4_proj_3 (const new_label)
-- get the offset of an expression, relative to the simple variable
-- under which this expression is, eg. for a struct expression x.y.z we want the
-- offset of field 'y.z' under struct 'x'; also return what is the root
-- variable, 'x' in this example
-- the 'loc_extr' parameter is a function which specifies which struct
-- locations we use, primitive type (getStrTLocs) or byte (getStrTByteLocs)
getRootvarOffset :: (TypeTableMonad m) =>
( ([TypedName], [FieldLoc]) -- the params of a StructT
-> [(Int,Int)] ) ->
Exp ->
m (Var, Typ, Int)
getRootvarOffset loc_extr exp =
do let recurse = getRootvarOffset loc_extr
case exp of
(ExpT t (EVar v)) -> return (v, t, 0)
(EVar v) -> error
("getRootvarOffset got an EVar " << exp
<< " without an ExpT annotation!")
(EStruct str_e idx) -> do (v, t, o) <- recurse str_e
fld_info <- getStrTParams str_e
let locs = loc_extr fld_info
preceding = sum $
map snd $
take idx locs
return (v, t, o + preceding)
(ExpT t e) -> recurse e
(EArr arr_e idx_e) -> recurse arr_e
e -> error $
"CircGen: getRootvarOffset: invalid lval "
<< e
-- get the field parameters of a StructT, from the expression carrying the struct
getStrTParams (ExpT t _) = do (StructT field_params) <- Im.expandType [DoTypeDefs] t
return field_params
getStrTParams e = error $
"getStrTParams: unexpected expression "
<< e
genStm :: Circuit -> Stm -> OutMonad Circuit
genStm circ stm =
let bad_ctxs = checkCircuit circ
in
case stm `logDump` ("genStm of " << stm)
{- << ": "
<< showBadCtxs (checkCircuit circ)) -}
of
-- this is a message from the typechecker that a Struct or such
-- will be coming along just now.
-- this case should be before the "gen-stm-ass-var" case, or an EStructInit may end
-- up in genExp which doesn't know how to treat it.
(SAss (ExpT _ lval) (ExpT t (EStructInit size))) ->
do genComplexInit (lval, t) size
`logDebug`
("calling genComplexInit, with stm=" << stm)
return circ
-- assignment to a variable.
s@(SAss (ExpT lval_t lval@(EVar var)) exp) ->
{-# SCC "gen-stm-ass-var" #-}
do circ' <- checkOutputVars circ var Nothing
{- `trace` ("genStm " << s <<
"; var=" << var <<
"; loctable=" << var_table) -}
(c',nodes) <- genExpWithDoc circ'
(addOutputFlag var)
exp
lval
setVarLocs (var, lval_t) nodes
return c'
-- do away with lval type annotations for now
-- NOTE: the cases above do actually use the ExpT annotation, so cannot be below
-- this case.
(SAss (ExpT _ lval) val) -> genStm circ (SAss lval val)
s@(SAss lval@(EStruct str_e idx)
val) ->
{-# SCC "stm-ass-estruct" #-}
do type_table <- getTypeTable
(_,locs) <- getStrTParams str_e
let (off,len) = valloc $ locs !! idx
(rv,rv_t,lval_off) <- getRootvarOffset getStrTLocs lval
c2 <- checkOutputVars circ rv (Just (lval_off,len))
(c3, gates) <- genExpWithDoc c2
(addOutputFlag rv)
val
lval
arr_info <- extractEArr lval
case arr_info of
Nothing -> {-# SCC "no-lval-array" #-}
-- just update the lval location(s)
do spliceVarLocs (lval_off,len) (rv,rv_t)
(gates
`logDebug`
("stm-ass-estruct no-lval-array splice = "
<< show (lval_off,len) << "; rv = " << rv
<< "; stm = " << stm << "; gates=" << gates)
)
return c3
Just ((EArr arr_e idx_e), arr_off, locs)
-> -- need to generate a WriteDynArray gate!
-- eg: for x.y[i].z, we will:
-- - add a WriteDynArray gate for x.y, limited to .z
-- - update the gate location for x.y
{-# SCC "have-lval-array" #-}
do (c4, [arr_n]) <- genExp c3 arr_e
(c5, [idx_n]) <- genExp c4 idx_e
condNode <- getCondNode
depth <- getDepth
i <- nextInt
let prep_slice locs = (fst $ head locs,
sum $ map snd locs)
-- get the slice parameters from the field
-- location info given by extractEArr above
slice = Im.FieldLoc
{ byteloc = prep_slice $
map Im.byteloc locs,
valloc = prep_slice $
map Im.valloc locs }
(ExpT arr_t _) = arr_e
-- Adding the index expression as an
-- annotation, so we can tell later if it
-- was static or not.
ctx = mkCtx $
Gate i
arr_t
(WriteDynArray slice)
([condNode, arr_n, idx_n]
++ gates)
depth
[] [idx_e]
spliceVarLocs (arr_off,1) (rv,rv_t) [i]
`trace`
("WriteDynArray: rv = " << rv
<< "; arr_off=" << arr_off)
return $ ctx & c5
-- quite similar to the above. really need to structure this better
s@(SAss lval@(EArr arr_e idx_e) val) ->
{-# SCC "stm-ass-earr" #-}
do (rv,rv_t,off) <- getRootvarOffset getStrTLocs arr_e
-- generate gates for the rval
(c3, gates) <- genExpWithDoc circ
(addOutputFlag rv)
val
lval
-- gate for the array lval
(c4, [arr_n]) <- genExp c3 arr_e
-- gate for the index
(c5, [idx_n]) <- genExp c4 idx_e
depth <- getDepth
cond_node <- getCondNode
i <- nextInt
let (ExpT arr_t _) = arr_e
ctx = {-# SCC "mkCtx" #-}
(mkCtx $ Gate i
arr_t
-- NOTE: writing -1 here to mean "the end"
(WriteDynArray $ Im.FieldLoc (0,-1) (0,-1))
([cond_node, arr_n, idx_n] ++ gates)
depth
[] [])
spliceVarLocs (off,1) (rv,rv_t) [i]
return $ {-# SCC "ctx-&-c5" #-} (ctx & c5)
`logDump`
("SAss to EArr: \"" ++ show stm ++
"\n; inserting " ++ show ctx)
-- NOTE: after HoistStm.hs, all conditional tests are EVar
(SIfElse test@(ExpT _ (EVar testVar))
(locs1, stms1)
(locs2, stms2)) ->
{-# SCC "stm-ifelse" #-}
do [testGate] <- lookupVarLocs testVar >>==
fromJustMsg ("Conditional var " << testVar
<< " not found, within " << stm)
-- add in the enable gates
(circ', t_en_node, f_en_node) <- prepareCondNodes circ testGate
-- do the recursive generation for both branches, and
-- save the resulting var scopes
pushCondNode t_en_node
pushScope
circ1' <- foldM genStm circ' stms1
ifScope <- popScope
popCondNode
pushCondNode f_en_node
pushScope
circ2' <- foldM genStm circ1' stms2
elseScope <- popScope
popCondNode
-- grab the parent scope
parentScope <- getsLocs fst
-- generate the conditional exit gates
circ'' <- genCondExit testGate
circ2'
(parentScope, ifScope, elseScope)
(locs1, locs2)
return circ''
-- we will wire all the variables in x to go through this gate
-- a bit of a hassle as there may be one or more gates feeding into here (in case of
-- a struct). So, add slice gates
(SPrint prompt xs) -> do flags <- getFlags
if not $ elem DoPrint flags
then return circ
else
do i <- nextInt
depth <- getDepth
-- generate parameters for each expression separately.
(circs,
x_gates's,
ts's,
slice_is's,
locs's) <- scanM (doSPrintExp prompt . \(f1,_,_,_,_)->f1)
(circ,[],[],[],[])
xs
>>== tail >>== unzip5
-- now need to shift the offsets of the slicers,
-- as each was made assuming it starts at 0
-- this is quite awkward, as we need to dig inside
-- the FieldLoc's and change the GateVal and byte
-- offsets only
-- 'extrLen' is to get the length out of the
-- FieldLoc struct
-- 'proj' is to project the addition onto the
-- correct offset field of the FieldLoc
let shiftlocs extrLen proj locss
= let lens = map (extrLen . last) locss
shifts = runSumFrom0 lens
in zipWith (\shift locs ->
map (proj (+ shift))
locs)
shifts
locss
locs's2 = shiftlocs (snd . Im.valloc)
proj_FL_val_off
locs's
`trace`
("Stm SPrint: locs's = " << locs's)
locs's3 = shiftlocs (snd . Im.byteloc)
proj_FL_byte_off
locs's2
slicer_ctxs = [mkCtx $
Gate si t (Slicer loc) [i] depth [] []
| si <- concat slice_is's
| t <- concat ts's
| loc <- concat locs's3]
-- and the actual print context
-- NOTE: can't really give it a type, as it can return
-- several values, noone should access it directly
-- anyway, just through the slicers.
let ctx = mkCtx $ Gate i
VoidT
(Print prompt)
(concat x_gates's)
depth
[]
[]
return $ addCtxs (last circs) (ctx:slicer_ctxs)
{- `trace`
-- FIXME: add StreamShow instances for tuples...
("On SPrint, adding contexts " << (ctx:slicer_ctxs)
<< ", with current circuit " << show (last circs))
) -}
s -> error $ "CircGen: unrecognized statement: " ++ (show s)
where addOutputFlag var =
case genVarFlags var of
[] -> Nothing -- no flags needed for this var.
flags -> Just (gateProjFlags (\fs -> fs `union` flags))
`logDebug`
("addOutputFlag adding flags " << (show flags)
<< " to var " << var)
-- | Make a gates to be the two condition nodes for this conditional scope - the
-- true-branch enable node is an AND of
-- the current condition, and the next higher cond node; the false-branch enable is
-- (parent AND (NOT cond))
-- returns the circuit with the enable gates added in, as well as the two gate addresses.
prepareCondNodes cct this_cond
= do parentNode <- getCondNode
[nCondNode, tNode, fNode] <- replicateM 3 nextInt
d <- getDepth
let not_cond_ctx =
mkCtx $ Gate nCondNode
BoolT
(Un Im.Not)
[this_cond]
d
[] []
true_ctx = mkCtx $ Gate tNode
BoolT
(Bin Im.And)
[parentNode, this_cond]
d
[] []
false_ctx = mkCtx $ Gate fNode
BoolT
(Bin Im.And)
[parentNode, nCondNode]
d
[] []
return (addCtxs cct [not_cond_ctx, true_ctx, false_ctx],
tNode,
fNode)
-- what flags to attach to a gate for this 'var'.
-- if it is called "main" and is a function return variable, it needs an Output flag.
genVarFlags var = if (elem RetVar $ vflags var) && (Im.varName var == Im.cMAINNAME)
then [Output, Terminal]
else []
-- do most of the work for a single expression in an SPrint
doSPrintExp prompt circ e =
do (circ', x_ns) <- genExp circ e
slice_is <- replicateM (length x_ns) nextInt
let (t,var) = case (e `trace` "SPrint e = " << e) of
(ExpT t (EVar v)) -> (t,v)
(ExpT t _) -> error ("SPrint `" << prompt
<< "' got a non-var: "
<< e
<< " of type "
<< t)
t_full <- Im.expandType [DoAll] t
let tinfo@(ts, locs) = case t_full of
(StructT (tn's, locs))
-> (map snd tn's,
locs)
_
-> let blen = Im.typeLength Im.tblen
t_full
in ([t_full],
[Im.FieldLoc (0,blen)
(0,1)])
setVarLocs (var, t) slice_is
return (circ', x_ns, ts, slice_is, locs)
-- extract an array sub-expression from the given Exp
-- also return the list of field-locations (in bytes) of the array element type
-- which are covered by the whole expression
-- and also return the offset of the array expression under its root variable.
-- by example, say we have an expression x.y[i].z.v
-- then, the outputs are
-- ( the array expression x.y[i],
-- the offset of .y under x,
-- the SliceAddr's of .z.v under x.y[i]
-- )
-- return Nothing if there is no array subexpression
extractEArr :: (TypeTableMonad m) => Exp -> m ( Maybe (Exp,
Int,
[Im.FieldLoc])
)
extractEArr = runMaybeT . extractEArr'
-- the typechecker actually inferred a more general type on its own...
extractEArr' :: (TypeTableMonad m) => Exp -> MaybeT m (Exp,
Int,
[Im.FieldLoc])
extractEArr' (ExpT elem_t exp@(EArr arr_e idx_e)) =
do (locs,_) <- lift $ getTypLocs elem_t
-- here we need the offset in words (int/bool)
(rv,_,off) <- lift $ getRootvarOffset getStrTLocs arr_e
return (exp, off, locs )
-- get a recursive answer and return just the slice of this field (idx)
extractEArr' e@(EStruct str_e idx) =
do (arr_exp,arr_off,sublocs) <- extractEArr' str_e
-- this struct's field locations
(_,locs) <- lift $ getStrTParams str_e
-- pick out the location (in words) for this field
let (off,len) = Im.valloc $ locs !! idx
return (arr_exp,
arr_off,
(take len $ drop off sublocs))
`logDebug`
("extractEArr' (" << e << ")" <<
"; sublocs=" << sublocs <<
"; off=" << off <<
"; len=" << len)
extractEArr' (ExpT _ e) = extractEArr' e
-- if we hit a primitive expression, there's no array in the exp. This ends up returning
-- Nothing, not causing a runtime error.
extractEArr' e = fail ""
-- see if this var is an output var, and if so remove the Output flag
-- on its current gates
-- Called when the location of a var is about to be updated
--
-- optionally an offset and length to limit the flag removal to some
-- of the gates, in case only part of a complex output var is being
-- modified.
checkOutputVars :: Circuit -> Var -> Maybe (Int,Int) -> OutMonad Circuit
checkOutputVars c var mb_gate_loc
| strip_var var == VSimple cMAINNAME =
-- remove the output flags there
do mb_vgates <- lookupVarLocs var
case mb_vgates of
Nothing -> return c
Just vgates ->
-- take a slice of the gates if mb_gate_loc is not Nothing
do let vgates' = maybe vgates
(\(off,len) -> take len $ drop off vgates)
mb_gate_loc
-- and update the Gate flags on those gates
-- FIXME: for now we just strip the flags, which may be excessive
-- when more flags are introduced.
--
-- FIXME: we get non-existant node numbers passed (with number
-- -12345678), for non-initialized struct fields. The filter is a
-- HACK around this
vgates'' = filter (/= -12345678) vgates'
c' = rmOutputFlag c vgates''
return c'
`logDebug` ("checkOutputVars removed Output flag from var "
<< var << " at gates " << vgates''
<< "; gates now="
<< map (fromJust . Gr.lab c') vgates')
| otherwise =
return c `logDebug` ("checkOutputVars non-matching var: " << var)
-- | remove the Output flag on the gate at this Node, if it is present
-- FIXME: removes both Output and Terminal flags for now
rmOutputFlag :: Circuit -> [Gr.Node] -> Circuit
rmOutputFlag c ns = foldl (updateLabel $ gateProjFlags (\fs -> fs \\ [Output,Terminal]))
c
ns
-- update the label for a given node in a graph, if this node is present
-- NOTE: strict evaluation
-- had a big space leak here, when using a table of Lit's to reduce the number of Lit
-- gates during circuit generation. The connection between ELit handling, and this
-- function is unclear, but the profiler clearly pointed to a space leak here. Hence all
-- the strictness annotations, which did not help.
updateLabel :: (Gr.DynGraph gr, Eq b) => (a -> a) -> gr a b -> Gr.Node -> gr a b
updateLabel f gr node = let (mctx,gr') = {-# SCC "#extract" #-}
strictEval $
Gr.match ({-# SCC "#node" #-} node)
({-# SCC "#gr" #-} gr)
ctx = fromJustMsg ("updateLabel " << node)
mctx
ctx' = (tup4_proj_3 (strictEval f) $! ctx)
g_out = ctx' & (strictEval gr')
in {-# SCC "re-insert" #-}
-- make sure that the modified context we re-inserted is the
-- same as the original, except the label
assert (let (mctx'',_) = Gr.match node g_out
ctx'' = fromJustMsg ("updateLabel assert") mctx''
(is,n,_,os) = ctx
(is',n',_,os') = ctx''
in (is,n,os) == (is',n',os')
)
g_out
{-
in case id $! mctx of
Nothing -> {-# SCC "ret-id" #-} gr
Just ctx -> {-# SCC "ret-new-gr" #-} (tup4_proj_3 f ctx) &
(strictEval gr')
-}
-- generate the gates needed when exiting a conditional block---to
-- conditionally update free variables updated in this scope
--
-- NOTE: the vars in the locals VarSets (ifLocalss and elseLocalss)
-- are without scopes
genCondExit :: (Cont.Container c Var) =>
Gr.Node -- ^ Number of the gate with the condition test value
-> Circuit -- ^ Starting circuit
-> ([LocTable], -- ^ The parent variable scope
LocTable, -- ^ Var scope from the true-branch
LocTable) -- ^ Var scope from the false-branch
-> (c, -- ^ all the variables declared in the true branch
c) -- ^ all the variables declared in the false branch
-> OutMonad Circuit -- ^ The circuit with all the Select gates added in.
genCondExit testGate
circ
(parentScope, -- ^ The parent
ifScope,
elseScope)
(ifLocals, elseLocals) =
let vars = List.nub $
filter nonLocal $
map fst $ -- get just the variables.
concatMap Map.toList [ifScope, elseScope]
sources = map varSources vars
-- `trace` ("non-local scope vars: " << vars)
-- make sure to not give vars with empty source lists to addSelect (eg. a struct
-- with all array elements
select_args = [ (v, (gs_true, gs_false))
| (v, (gs_true, gs_false)) <- zip vars sources , not $ null gs_true
]
in foldM addSelect circ select_args
`logDump`
("genCondExit select_args = " << select_args)
where -- a var is non-local if was not declared in this scope,
-- *and* it appears in the parent scope (needed in the case of
-- generated vars)
-- FIXME: do generated vars ever have to be selected? seems not! they are
-- intrinsically very local in their usage, and so do not need to persist across
-- scopes
nonLocal var = (not $ any (Cont.member (stripScope var))
[ifLocals, elseLocals])
&& (isJust $ maybeLookup var parentScope)
&& (notTemp var)
notTemp (VTemp _) = False
notTemp _ = True
-- return a pair with the the gate numbers where this var
-- can be found, if the cond
-- is true, and if it's false. This depends on which branch
-- (or both) it was updated in
varSources var = let scopes@(scopes_true, scopes_false) =
(case map (Map.member var) [ifScope, elseScope] of
[True, True] -> ([ifScope], [elseScope])
[True, False] -> ([ifScope], parentScope)
[False, True] -> (parentScope, [elseScope]))
out @(gates_true, gates_false) = mapTuple2 (gates var)
scopes
in assert (length gates_true == length gates_false)
out
-- `trace` ("varSources for " << var << ": "
-- << out)
-- find the gates for a var, looking in the given scope stack
-- keep just LocTable entries with a Nothing annotation (ie. not arrays)
-- WARNING: using the annotation details here is not good, it should be exposed
-- just in 'getVarLocs' etc.
gates var scopes = let leaves = fst $ fromJustMsg "genCondExit::gates" $
maybeLookup var scopes
ns = [ n | (n, Nothing) <- leaves ]
in ns
`logDump`
("genCondExit.gates(" << var
<< ") has scopes=" << show scopes
<< "; ns -> " << ns << ";"
)
-- add Select gates for a free variable, and update its
-- wire locations, to the new Select gates
-- need multiple select gates if it's a struct, but in this
-- case we add Select only for the gates which were actually
-- updated in this scope
-- PRE: the gates_true' and gates_false' inputs must be the same lenght, and not
-- null; the type of the var is not an array type
-- this function is quite nasty!
addSelect c (var, in_gates@(gates_true', gates_false'))
| assert (length gates_true' == length gates_false' &&
not (null gates_true'))
True
= do let changed_gates = filter (\(x,y) -> x /= y) $ uncurry zip in_gates
typ <- lookupVarTyp var
ts <- assert (not (arrTyp typ))
mapM (getSelType c) changed_gates
-- get the right number of new int's
is <- replicateM (length ts) nextInt
let ctxs = zipWith3 (mkCtx' var) is ts changed_gates
new_gates = foo (uncurry zip in_gates) is
-- remove Output flag on var's current gates (which will feed into
-- Select gates) if flag is present.
let c2 = rmOutputFlag c $
-- concat all the elements of the list of pairs
foldr (\(a,b) l -> (a:b:l)) [] changed_gates
setScalarLocs (var,typ) new_gates
`logDump`
("addSelect, var= " << var
<< "; in_gates = " << in_gates
<< "; typ = " << typ
<< "; new_gates = " << new_gates
)
-- work all the new Contexts into circuit c
return $ addCtxs c2 ctxs
-- make the Select Context, including gate flags if needed.
mkCtx' var i t (true_gate,false_gate) =
let src_gates = [testGate, true_gate, false_gate]
depth = (length parentScope) - 1
flags = genVarFlags var
doc = EVar var -- annotate gate with the variable name
in mkCtx (Gate i t Select src_gates depth flags [doc])
arrTyp (ArrayT _ _) = True
arrTyp _ = False
-- take a list of pairs, and where a pair is equal, pass on that value, but
-- where they're not equal, use the next value from the second list.
-- ideally, the number of non-equal pairs should be the same as the lenght of
-- the replacement list
foo :: (Eq a) => [(a,a)] -> [a] -> [a]
foo ((x,y):xys) rs
| x == y = (x : foo xys rs)
| otherwise = case rs of (r:rs') -> (r : foo xys rs')
[] -> error "CircGen.foo ran out of replacement values!"
foo [] (r:rs) = error "CircGen.foo had replacement values left over!"
foo [] [] = []
-- add a list of contexts (ie. gates) to a circuit, return the new circuit
addCtxs circ ctxs = foldl (flip (&)) circ ctxs
-- figure out the type of a Select gate, based on the type of its two
-- inputs
getSelType gr (node1, node2)
= do let gates = map (fromJustMsg "CircGen::getSelType" . Gr.lab gr) [node1, node2]
-- expanding types into a canonical form
types <- mapM (Im.expandType [DoTypeDefs]) $ map gate_typ gates
case types
-- `trace` ("getSelType of gates " << gates)
of
[Im.BoolT , Im.BoolT ] -> return Im.BoolT
[Im.IntT i1_e , Im.IntT i2_e ] ->
do let [i1, i2] = map Im.evalStaticOrDie [i1_e, i2_e]
return $ Im.IntT $ Im.lint (max i1 i2)
[a1@(Im.ArrayT _ _), a2@(Im.ArrayT _ _)] -> return a1
`trace` ("getSelType got array types "
<< a1 << " and " << a2)
[t1 , t2 ] ->
error ("getSelType got unexpected inputs of type "
<< t1 << " and " << t2)
-- adds the needed gates to the circuit (if any), and returns at which
-- gate/node numbers the result is
genExp :: Circuit -> Exp -> OutMonad (Circuit, [Gr.Node])
genExp c e = do (c', res) <- genExp' c e
case res of
-- extend the graph with the new contexts, and return it
(Left ctxs) -> return ( foldl (flip (&)) c' ctxs,
map Gr.node' ctxs )
-- just pass on the graph and node
(Right gateNums) -> return (c',
gateNums)
`trace`
("genExp " << e << " returning nodes " << gateNums)
-- this one is a little nasty - it returns the expanded circuit, and:
-- Left: the Context for this expression, which can be processed and
-- added to the circuit by the caller, or
-- Right: the gate number where this
-- expression can be found (in case it was already in the circuit)
--
-- In both cases, need to be able to return a list of nodes, in the case of a struct
-- or array
--
-- also need to be able to return a list of newly generated Contexts,
-- for a ReadDynArray gate with all its following Slicer gates
genExp' :: Circuit -> Exp -> OutMonad (Circuit, (Either
[CircuitCtx]
[Gr.Node]))
genExp' c exp =
do depth <- getDepth
case exp
-- `trace` ("genExp' " << exp)
of
(BinOp op e1 e2) -> do let genOperand circ opExp =
do (c1, gates1) <- genExp circ opExp
let gate1 = case gates1 of
[g1] -> g1
_ -> error ("BinOp arg " << opExp <<
" got (/= 1) gates: " <<
gates1)
return (c1, gate1)
(c1, gate1) <- genOperand c e1
(c2, gate2) <- genOperand c1 e2
-- NOTE: the VoidT type annotation used here is replaced
-- when the ExpT enclosing this BinOp is handled.
i <- nextInt
let ctx = mkCtx $ Gate i VoidT (Bin op) [gate1, gate2] depth [] []
return (c2, Left [ctx])
(UnOp op e1) -> do (c1, [gate1]) <- genExp c e1
i <- nextInt
let ctx = mkCtx $ Gate i VoidT (Un op) [gate1] depth [] []
return (c1, Left [ctx])
(EVar var) -> do gates <- lookupVarLocs var >>==
fromJustMsg ("Variable not found, within exp " << exp)
return (c, Right gates)
`logDebug`
("Found gates " << gates << " for var " << var)
{-
`trace`
("genExp' EVar " << var << ", loctable=" << show var_table <<
" -> " << gates)
-}
(ELit l) -> {-# SCC "Exp-ELit" #-}
do i <- nextInt
let ctx = mkCtx (Gate i (IntT 32) (Lit l) [] depth [] [])
return (c, Left [ctx])
-- here we try to update the Typ annotation on the Gate
-- generated recursively
(ExpT typ e) -> do (c', res) <- genExp' c e
case res of
(Left [(c1, c2, gate, c3)]) ->
let ctx' = (c1, c2, gate { gate_typ = typ }, c3)
in return (c', Left [ctx'])
-- FIXME: can't deal with multiple contexts
-- for now
(Left ctxs) ->
return (c', res)
(Right node) ->
return (c', res)
(EStruct str_e idx) ->
do (c', gates) <- genExp c str_e
(_,locs) <- getStrTParams str_e
`logDebug`
("genExp' EStruct " << exp
<< " struct gates = " << gates)
let (off,len) = (Im.valloc $ locs !! idx)
`logDebug`
("genExp' EStruct " << exp << " locs = " << locs)
return (c', Right $ take len $ drop off gates)
(EArr arr_e idx_e) ->
do (c1, arr_ns) <- genExp c arr_e
{-
`trace` ("Circuit before array gate generated:"
<< c)
-}
let arr_n = case arr_ns of
[arr_n] -> arr_n
_ ->
error ("Array " << arr_e <<
" should get one wire but got: "
<< arr_ns
{- << "; circuit at error: "
++ showCct c1 -}
)
(c2, [idx_n]) <- genExp c1 idx_e
readarr_n <- nextInt
depth <- getDepth
cond_node <- getCondNode
-- get the array type and the element type, from the array
-- expression annotations.
-- NOTE: we do not attach the array type to the ReadDynArray
-- gate; trying to see what happens if we do.
--
-- Add the index expression as an annotation, hopefully in
-- an EStatic if it is static.
let (ExpT arr_t@(ArrayT elem_t _) _) = arr_e
readarr_ctx = mkCtx (Gate readarr_n
arr_t
ReadDynArray
[cond_node, arr_n, idx_n]
depth
[] [idx_e])
(e_locs,e_typs) <- getTypLocs elem_t
-- build the array pointer slicer gate. it will get the new
-- array pointer value in the first cARRAY_BLEN bytes of the
-- ReadDynArray output.
arrptr_n <- nextInt
let arr_ptr_ctx = mkCtx $
Gate arrptr_n
arr_t
(Slicer $ FieldLoc {
Im.byteloc = (0, cARRAY_BLEN),
Im.valloc = (0, 1)
})
[readarr_n]
depth
[] []
-- add cARRAY_BLEN to all the byte offsets, as the array pointer
-- will be output first by the ReadDynArray gate
let e_blocs' = map (projFst (+ cARRAY_BLEN)) $ map Im.byteloc e_locs
-- slicer gates
is <- replicateM (length e_blocs') nextInt
let slicer_ctxs = map mkCtx [Gate i
t
(Slicer $ FieldLoc {
Im.byteloc = (boff,blen),
Im.valloc = (off,1)
})
[readarr_n]
depth
[] [] | i <- is
| (boff,blen) <- e_blocs'
| off <- [1..]
| t <- e_typs]
-- update the gate location of the array pointer
(rv,rv_t,off) <- getRootvarOffset getStrTLocs arr_e
spliceVarLocs (off,1) (rv,rv_t) [arrptr_n]
-- we'll add the ReadDynArray and the slicer for the
-- array pointer into the cct here, and not return it
-- with the other slicer contexts, as others should only
-- look for the return vars at the slicers
let c_out = foldl (flip (&)) c2 [readarr_ctx, arr_ptr_ctx]
return (c_out, Left slicer_ctxs)
(EStatic e) -> genExp' c e
(EArrayInit name elem_size len) ->
do i <- nextInt
let ctx = (mkCtx $ Gate i
-- the type field here is a dummy
(Im.IntT $ Im.lint 12345678)
(InitDynArray (fromIntegral elem_size)
len)
[]
depth
[] [])
`trace`
("Adding InitDynArray of len " << len)
return (c, Left [ctx])
e -> error $ "CircGen: unknown expression " ++ (show e)
-- make a graph Context for this gate
mkCtx :: Gate -> CircuitCtx
mkCtx gate = (map uAdj $ gate_inputs gate,
gate_num gate,
gate,
[]) -- no outgoing edges
-- | get all the scalar components within a Struct value of the given type.
getStructLeaves :: Typ -> Exp -> [(Typ,Exp)]
getStructLeaves typ e =
let leaves = TreeLib.leaves $ TreeLib.iterate expandLevel (typ,e)
in leaves
`logDump`
("getStructLeaves (" << typ << ", " << e << ") -> " << leaves)
where expandLevel (StructT (fields, _), e) =
let member_es = map (EStruct e) [0..length fields - 1]
member_ts = map snd fields
in zip member_ts member_es
expandLevel _ = []
-- return a type's list of contained types and their locations
getTypLocs :: (TypeTableMonad m) => Typ -> m ([FieldLoc], [Typ])
getTypLocs t =
do t_full <- Im.expandType [DoTypeDefs] t
return $ case t_full of
(StructT (fields,locs)) ->
let types = map snd fields
in (locs, types)
t ->
([Im.FieldLoc { Im.valloc =(0, 1),
Im.byteloc=(0, tblen t) }], [t])
-- make an unlabelled edge
uAdj n = ((), n)
-- extract the params of main() from a Prog
extractInputs prog =
let (Func _ _ t form_args stms) = Im.getMain prog
in form_args
--------------------
-- state and state access functions
--------------------
-- the state in these computations:
data MyState = MyState { loctable :: ([LocTable], -- ^ stack of tables for scalars.
ArrLocTable -- ^ A single table for arrays.
),
cond_nodes :: [Gr.Node], -- ^ the stack of the condition nodes in
-- the current stack of conditionals.
counter :: Int, -- ^ a counter to number the gates
-- sequentially
typetable :: TypeTable, -- ^ the type table is read-only, so
-- could be in a Reader, but easier to
-- stick it in here
flags :: [RunFlag] -- ^ The run-time configuration flags, read-only
}
getsLocs f = St.gets $ f . loctable
getsTypeTable f = St.gets $ f . typetable
getFlags = St.gets flags
getInt = St.gets counter
-- modify the various parts of the state with some function
modifyLocs f = St.modify $ \st@(MyState{ loctable = x }) -> st { loctable = f x }
modifyInt f = St.modify $ \st@(MyState{ counter = x }) -> st { counter = f x }
modifyCondNodes f = St.modify $ \st@(MyState{ cond_nodes = x }) -> st { cond_nodes = f x }
-- OutMonad does carry a TypeTable around
instance TypeTableMonad OutMonad where
getTypeTable = getsTypeTable id
-- update the locations of 'var' by some function of (Maybe [Node])
-- the new value always goes into the top-most scope
-- we always use this, even if the var is certain not to be present (eg. during static
-- initialization), for greater uniformity.
-- an update function which just sets all the gates, already present or not, is
-- (const new_gates)
-- updateScalar :: (Maybe [Gr.Node] -> [Gr.Node]) -> Var -> OutMonad ()
updateScalar f var maps = let curr = maybeLookup var maps
new = f curr
maps' = modtop (Map.insert var new) maps
in maps'
-- common usages:
-- | Set all the locations for this var
setVarLocs = setVarLocsFull (map (const True))
-- | Set a sublist of the locations
spliceVarLocs splice (var,t) ls = setVarLocsFull (mapSplice (const True) (const False)
(splice
`logDebug`
("spliceVarLocs (" << var << "): splice=" << splice
<< "; ns=" << ls)
)
)
(var,t)
ls
-- | Set just the scalar locs, leave the arrays alone.
setScalarLocs = setVarLocsFull
(map (\a -> case a of
(Just (Array _)) -> False
Nothing -> True))
-- update or set the locations of a variable, with the given type. deals with sending
-- array components of the variable to the ArrLocTable.
-- the hook function specifies which entries should and should not be inserted, ie. should
-- remain as before. If any fewer than all the locations are provided, obviously an entry
-- for this var must exist already.
-- if inly updating a subset of the locations, the caller will only provide those new
-- locations; we can deal with that here.
setVarLocsFull :: ([Maybe LocAnnot] -> [Bool])
-> (Var,Typ)
-> [Gr.Node]
-> OutMonad ()
setVarLocsFull hook (var,typ) new_locs =
do t_full <- Im.expandType [Im.DoTypeDefs, Im.DoFields] typ
mb_locs <- lookupVarLocs var
let -- prepare for debug messages
funcid = "setVarLocsFull (" << var << ", " << typ << ")"
all_leaves = getStructLeaves t_full (EVar var)
-- the old locs if present, or a list of undefines. If a list of undefs, they
-- should all be replaced in the spliceInIf call below
locs = fromMaybe (repeat (-1 :: Gr.Node))
mb_locs
-- make a preliminary version of the entries, using the old locs if they were
-- present. The annotations generated here are the final ones.
locmap_entry = zipWith mk_entry all_leaves locs
-- which entries do we set now, and which do we keep?
entries_keep = (hook $ map snd locmap_entry)
new_entry = if ( (isNothing mb_locs &&
not (and entries_keep)
)
`logDump`
(funcid
<< "; new_locs = " << new_locs
<< "; entries_keep = " << entries_keep
<< "; locmap_entry = " << locmap_entry
<< "; all_leaves = " << all_leaves
)
)
||
( not (or entries_keep) )
then error (funcid << " is not setting all locs for " << var
<< ", but it does not have an entry already;\
\ or no entries kept to set")
else -- put in the new locs where specified
spliceInIf snd
-- put in the new location
(\((_, a), True) loc_new -> (loc_new, a))
-- keep the old location, which was put in
-- locmap_entry already
(\((loc, a), False) -> (loc, a))
new_locs
(zip locmap_entry entries_keep)
-- entries for the array loc table
arr_entries = [(e,n) | (n, Just (Array e)) <- new_entry]
-- and now update the actual tables in our state.
setVarEntry (var,typ) new_entry arr_entries
`logDump`
(funcid << " has arr_entries = " << arr_entries)
where mk_entry (Im.ArrayT _ _, e) n = (n, Just $ Array e)
mk_entry _ n = (n, Nothing)
-- update a variable entry
setVarEntry (var,typ) entries arr_entries =
modifyLocs (\(locs,arr_locs) ->
((updateScalar (const (entries,typ))
var
locs
`logDebug`
(let old_entry = case maybeLookup var locs of
Just (locs,_typ) -> strShow locs
Nothing -> "not present"
in
"setVarEntry (" << var << "): entries=" << entries
<< "; arr_entries=" << arr_entries << ";old entry=" << old_entry)
)
,
(-- insert all (>= 0) new entries into ArrLocTable
insertMany arr_entries arr_locs
)
)
)
lookupVarLocs :: Var -> OutMonad (Maybe
[Gr.Node]
)
lookupVarLocs = extractVarLocs (const True)
-- drop entries with an Array annotation.
lookupScalarLocs = extractVarLocs $ \a -> case a of Nothing -> True
Just (Array _) -> False
lookupVarTyp var = getsLocs $
snd .
fromJust .
maybeLookup var .
fst
-- apply a transformation and filter before returning the nodes.
-- elements where f returns Nothing are filtered out
extractVarLocs :: (Maybe LocAnnot -> Bool)
-> Var
-> OutMonad (Maybe
[Gr.Node]
)
extractVarLocs p var =
do mb_loc_infos <- getsLocs $
maybeLookup var .
fst
case mb_loc_infos of
Nothing -> return Nothing
Just (loc_infos,_typ)
-> do let kepts = [ l | l <- loc_infos,
p $ snd l
]
kepts' <- mapM patch_info kepts
return $ Just kepts'
where patch_info (n, Nothing) = return n
-- lookup in the array table
patch_info (_, Just (Array exp)) = getsLocs $
fromJustMsg ("getVarLocs array lookup for "
<< exp) .
Mapping.lookup exp .
snd
-- the current depth inside nested conditionals, 0-based
getDepth = do len <- getsLocs $ length . fst -- the number of var tables (one per scope)
return $ case len of 0 -> 0
_ -> len-1
--------------------
-- state utility functions
--------------------
nextInt :: OutMonad Int
nextInt = do modifyInt (+1)
getInt
pushScope = modifyLocs $ projFst $ push Map.empty
-- pop the scope stack, and return the top scope/LocTable
popScope = do scope <- getsLocs $ peek . fst
modifyLocs $ projFst pop
return scope
-- | Add a conditional node on the stack
pushCondNode n = modifyCondNodes $ push n
-- | And pop a node on exit from an SIfElse
popCondNode = modifyCondNodes pop
-- | Returns the current conditional node, from the top of the stack.
getCondNode = St.gets (peek . cond_nodes)
instance StreamShow LocAnnot where strShows = showsPrec 0
instance StreamShow (Maybe LocAnnot) where strShows = showsPrec 0
-- ---------------
-- output stuff
-- ---------------
-- StreamShow instances
instance StreamShow Gate where
strShows = shows --cctShowsGate " :: " " ** "
instance StreamShow Op where
strShows = cctShowsOp
--
-- a look-alike of the Show class to serialize the circuit for the C++ runtime, so we can
-- use the builtin Show and Read to serialize circuits within Haskell
--
class CctShow a where
cctShows :: a -> ShowS
cctShow :: a -> String
-- and the same mutually recursive definitions: one or both must be provided
cctShow x = (cctShows x) ""
cctShows x = ((cctShow x) ++)
-- line-oriented format:
--
-- gate number
-- flags, or "noflags"
-- gate (output) type
-- gate operation
-- sources, or "nosrc"
-- comment (the name of the output wire usually)
-- <blank line>
cctShowsGate sep delim
g@(Gate i typ op srcs depth flags docs)
= (delim ++) .
-- 1: gate number
rec' i . (sep ++) .
-- 2: gate flags
recurse flags . (sep ++) .
-- 3: gate result type
(recurse typ) . (sep ++) .
-- 4: gate operation
recurse op . (sep ++) .
-- 5: source gates
(if null srcs
then ("nosrc" ++)
else (foldr1 (\f1 f2 -> f1 . (" " ++) . f2)
(map rec' srcs))) . (sep ++) .
-- 6: gate depth
showsPrec 0 depth . (sep ++) .
-- 7: comment
(if null docs
then ("nocomm" ++)
-- using the last annotation for all gates except Input, where the
-- first one should be the input variable.
else (let doc = case op of Input -> last docs
ReadDynArray -> last docs
WriteDynArray _ -> last docs
_ -> head docs
in ((strShow $ stripVarExp doc) ++) .
(case doc of EStatic e -> (("static " << strShow e) ++)
_ -> id)
)
) .
-- else ((strShow $ map strip doc) ++)) .
(delim ++)
where recurse x = cctShows x -- recurse
rec' = showsPrec 0 -- and go into the Show class
-- get rid of variable annotations in an expression
stripVarExp = mapExp f
where f (EVar v) = (EVar (strip_var v))
f e = e
-- need a different rendition of Typ's for the runtime
instance CctShow Typ where
cctShow = PP.render . Im.docTypMachine
instance CctShow Gate where
cctShows = cctShowsGate "\n" "\n"
instance CctShow GateFlags where
cctShows f = case f of Output -> ("Output" ++)
Terminal -> ("Terminal" ++)
instance CctShow [GateFlags] where
cctShow [] = "noflags"
cctShow flags = concat $ intersperse " " $ map cctShow $ flags
-- NOTE: strShows is from the StreamShow class, for which we have many instance
-- definitions in Intermediate.hs
cctShowsOp o =
case o of
(Bin op) -> str "BinOp " . strShows op
(Un op) -> str "UnOp " . strShows op
Input -> str "Input"
Select -> str "Select"
-- convert a LBool to the matching LInt before displaying.
(Lit l) -> str "Lit " . case l of
Im.LInt i
-> showsPrec 0 i
Im.LBool b
-> showsPrec 0 $ fromEnum b
(InitDynArray elemsize
len) -> str "InitDynArray " . rec' elemsize . sp .
rec' len
ReadDynArray -> str "ReadDynArray"
WriteDynArray
Im.FieldLoc { Im.byteloc = (off,len) }
-> str "WriteDynArray " . rec' off . sp . rec' len
Slicer
(Im.FieldLoc { Im.byteloc = (off,len) })
-> str "Slicer " . rec' off . sp . rec' len
(Print prompt) -> str "Print " . str prompt
where -- recurse y = cctShows y
rec' y = showsPrec 0 y
str = (++)
sp = (" " ++)
instance CctShow Op where
cctShows = cctShowsOp
showCctGraph :: (Gr.DynGraph gr) => gr Gate b -> String
showCctGraph g =
let inNodes = map Gr.node' $ GrBas.gsel isInCtx g
terms = UDraw.makeTerm (const "node")
(\gate -> ( ("OBJECT", myShow gate):
getAttribs gate ) )
inNodes
g
`trace`
("Calling UDraw.makeTerm with inNodes=" ++ show inNodes)
in
(PP.render $ Lib.doc terms)
`trace` ("UDraw.makeTerm done")
where isInCtx (ins,_,_,_) = null ins
myShow g = show (gate_num g)
-- ++ " @ " ++ show (gate_depth g)
++ "\\n" ++ cctShow (gate_op g)
-- show the variable name of input gates, eg:
-- "Input -> var_name"
++ concat [(case gate_op g of
Input -> " -> " ++ strShow (stripVarExp $ last $
gate_doc g)
_ -> ""),
(if elem Output $ gate_flags g
then "\\nOutput: " ++ strShow (stripVarExp $ last $
gate_doc g)
else "")
]
getAttribs g = concat [(if elem Output $ gate_flags g
then [("COLOR", "light blue")
--, ("_GO", "rhombus")
]
else []
),
(case gate_op g of
Input -> [("COLOR", "light green")
--, ("_GO", "rhombus")
]
Select -> [("_GO", "rhombus")
]
_ -> [] -- normal gates
)
]
-------------------------------------
-- some tests
-------------------------------------
testNextInt = let startState = MyState { loctable = ([Mapping.empty], Mapping.empty),
cond_nodes = [],
counter = 0,
typetable = Mapping.empty,
flags = []
}
(out,st) = St.runState test_f startState
in (out,st)
where test_f = do is <- replicateM 5 nextInt
return (is)
|
ailiev/faerieplay-compiler
|
Faerieplay/CircGen.hs
|
bsd-3-clause
| 97,746
| 7
| 29
| 50,229
| 14,524
| 7,837
| 6,687
| 1,111
| 14
|
{-# LANGUAGE FlexibleContexts, LambdaCase, RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
import Graphics.UI.GLFW.Pal
import Graphics.GL.Pal
import Graphics.VR.Pal
import Control.Monad
import Control.Monad.State
import Control.Monad.Reader
import Control.Lens.Extra
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
import System.Random
import Types
import CubeUniforms
import Physics.Bullet
data World = World
{ _wldPlayer :: !(Pose GLfloat)
, _wldCubes :: !(Map ObjectID Cube)
}
makeLenses ''World
newWorld :: World
newWorld = World
(Pose (V3 0 20 60) (axisAngle (V3 0 1 0) 0))
mempty
planeM44 :: M44 GLfloat
planeM44 = transformationFromPose $ newPose
& posOrientation .~ axisAngle (V3 1 0 0) (-pi/2)
main :: IO ()
main = do
let fov = 45
ghostShapeSize = 10 :: V3 GLfloat
ghostShapePose = newPose & posPosition .~ V3 0 5 0
VRPal{..} <- initVRPal "Bullet" []
shader <- createShaderProgram "test/shared/cube.vert" "test/shared/cube.frag"
cubeGeo <- cubeGeometry (1 :: V3 GLfloat) (V3 1 1 1)
cubeShape <- makeShape cubeGeo shader :: IO (Shape Uniforms)
ghostGeo <- cubeGeometry ghostShapeSize (V3 1 1 1)
ghostShape <- makeShape ghostGeo shader :: IO (Shape Uniforms)
planeGeo <- planeGeometry 1000 (V3 0 0 1) (V3 0 1 0) 1
planeShape <- makeShape planeGeo shader :: IO (Shape Uniforms)
dynamicsWorld <- createDynamicsWorld mempty
_ <- addGroundPlane dynamicsWorld (CollisionObjectID 0) 0
ghostBox <- createBoxShape ghostShapeSize
ghostObject <- addGhostObject dynamicsWorld (CollisionObjectID 1) ghostBox
mempty { rbPosition = ghostShapePose ^. posPosition, rbRotation = ghostShapePose ^. posOrientation }
glEnable GL_DEPTH_TEST
glBlendFunc GL_SRC_ALPHA GL_ONE_MINUS_SRC_ALPHA
glClearColor 0 0 0.1 1
void . flip runStateT newWorld $ do
boxShape <- createBoxShape (1 :: V3 GLfloat)
forM_ [10..100] $ \i -> do
rigidBody <- addRigidBody dynamicsWorld (CollisionObjectID i) boxShape mempty
{ rbPosition = V3 0 20 0
, rbRotation = Quaternion 0.5 (V3 0 1 1)
}
wldCubes . at (fromIntegral i) ?= Cube
{ _cubBody = rigidBody
, _cubColor = V4 1 0 1 1
}
whileWindow gpWindow $ do
projMat <- getWindowProjection gpWindow fov 0.1 1000
viewMat <- viewMatrixFromPose <$> use wldPlayer
(x,y,w,h) <- getWindowViewport gpWindow
glViewport x y w h
ghostX <- (* 5) . sin <$> getNow
let ghostShapePoseMoving = ghostShapePose
& posPosition . _x .~ ghostX
& posOrientation .~ axisAngle (V3 1 1 0) ghostX
setCollisionObjectWorldTransform ghostObject
(ghostShapePoseMoving ^. posPosition)
(ghostShapePoseMoving ^. posOrientation)
processEvents gpEvents $ \e -> do
closeOnEscape gpWindow e
onMouseDown e $ \_ -> do
playerPose <- use wldPlayer
cursorRay <- cursorPosToWorldRay gpWindow projMat playerPose
mBodyID <- mapM (getCollisionObjectID . rrCollisionObject)
=<< rayTestClosest dynamicsWorld cursorRay
forM_ mBodyID $ \bodyID -> do
liftIO $ putStrLn $ "Clicked Object " ++ (show (unCollisionObjectID bodyID))
let cubeID = fromIntegral (unCollisionObjectID bodyID)
[r,g,b] <- liftIO (replicateM 3 randomIO)
wldCubes . at cubeID . traverse . cubColor .= V4 r g b 1
applyMouseLook gpWindow wldPlayer
applyWASD gpWindow wldPlayer
stepSimulation dynamicsWorld 90
glClear (GL_COLOR_BUFFER_BIT .|. GL_DEPTH_BUFFER_BIT)
overlapping <- getGhostObjectOverlapping ghostObject
overlappingIDs <- Set.fromList <$> mapM getCollisionObjectID overlapping
let viewProj = projMat !*! viewMat
-- Begin cube batch
withShape cubeShape $ do
Uniforms{..} <- asks sUniforms
uniformV3 uCamera =<< use (wldPlayer . posPosition)
cubes <- Map.toList <$> use wldCubes
forM_ cubes $ \(cubeID, cube) -> do
(position, orientation) <- getBodyState (cube ^. cubBody)
let model = mkTransformation orientation position
cubeCollisionID = CollisionObjectID cubeID
finalColor = if Set.member cubeCollisionID overlappingIDs
then V4 1 1 1 1
else cube ^. cubColor
uniformM44 uModelViewProjection (viewProj !*! model)
uniformM44 uInverseModel (inv44 model)
uniformM44 uModel model
uniformV4 uDiffuse finalColor
drawShape
withShape planeShape $ do
Uniforms{..} <- asks sUniforms
uniformV3 uCamera =<< use (wldPlayer . posPosition)
let model = planeM44
uniformM44 uModelViewProjection (viewProj !*! model)
uniformM44 uModel model
uniformV4 uDiffuse (V4 0.1 0.0 0.5 1)
drawShape
glEnable GL_BLEND
withShape ghostShape $ do
Uniforms{..} <- asks sUniforms
uniformV3 uCamera =<< use (wldPlayer . posPosition)
let model = transformationFromPose ghostShapePoseMoving
uniformM44 uModelViewProjection (viewProj !*! model)
uniformM44 uModel model
uniformV4 uDiffuse (V4 0.5 0.0 0.5 0.5)
drawShape
glDisable GL_BLEND
swapBuffers gpWindow
|
lukexi/bullet-mini
|
test/GhostObjects.hs
|
bsd-3-clause
| 6,319
| 10
| 30
| 2,348
| 1,630
| 776
| 854
| -1
| -1
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedLabels #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RebindableSyntax #-}
{-# OPTIONS_GHC -Wall #-}
-- | SVG path manipulation
module Data.Path.Parser
( -- * Parsing
-- $parsing
parsePath,
svgToPathData,
pathDataToSvg,
PathCommand (..),
Origin (..),
)
where
import Chart.Data
import Control.Applicative
import Control.Monad.State.Lazy
import qualified Data.Attoparsec.Text as A
import Data.Either
import Data.FormatN
import Data.Functor
import Data.Path
import Data.Scientific (toRealFloat)
import Data.Text (Text, pack)
import qualified Data.Text as Text
import GHC.Generics
import GHC.OverloadedLabels
import NumHask.Prelude
import Optics.Core hiding ((<|))
-- import qualified Data.List as List
-- $parsing
-- Every element of an svg path can be thought of as exactly two points in space, with instructions of how to draw a curve between them. From this point of view, one which this library adopts, a path chart is thus very similar to a line chart. There's just a lot more information about the style of this line to deal with.
--
-- References:
--
-- [SVG d](https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/d)
--
-- [SVG path](https://developer.mozilla.org/en-US/docs/Web/SVG/Tutorial/Paths)
-- | Parse a raw path string.
--
-- >>> let outerseg1 = "M-1.0,0.5 A0.5 0.5 0.0 1 1 0.0,-1.2320508075688774 1.0 1.0 0.0 0 0 -0.5,-0.3660254037844387 1.0 1.0 0.0 0 0 -1.0,0.5 Z"
-- >>> parsePath outerseg1
-- Right [MoveTo OriginAbsolute [Point -1.0 0.5],EllipticalArc OriginAbsolute [(0.5,0.5,0.0,True,True,Point 0.0 -1.2320508075688774),(1.0,1.0,0.0,False,False,Point -0.5 -0.3660254037844387),(1.0,1.0,0.0,False,False,Point -1.0 0.5)],EndPath]
--
-- https://developer.mozilla.org/en-US/docs/Web/SVG/Attribute/d
parsePath :: Text -> Either String [PathCommand]
parsePath = A.parseOnly pathParser
commaWsp :: A.Parser ()
commaWsp = A.skipSpace *> A.option () (A.string "," $> ()) <* A.skipSpace
point :: A.Parser (Point Double)
point = Point <$> num <* commaWsp <*> num
points :: A.Parser [Point Double]
points = fromList <$> point `A.sepBy1` commaWsp
pointPair :: A.Parser (Point Double, Point Double)
pointPair = (,) <$> point <* commaWsp <*> point
pointPairs :: A.Parser [(Point Double, Point Double)]
pointPairs = fromList <$> pointPair `A.sepBy1` commaWsp
pathParser :: A.Parser [PathCommand]
pathParser = fromList <$> (A.skipSpace *> A.many1 command)
num :: A.Parser Double
num = realToFrac <$> (A.skipSpace *> plusMinus <* A.skipSpace)
where
doubleNumber :: A.Parser Double
doubleNumber = toRealFloat <$> A.scientific <|> shorthand
plusMinus =
negate <$ A.string "-" <*> doubleNumber
<|> A.string "+" *> doubleNumber
<|> doubleNumber
shorthand = process' <$> (A.string "." *> A.many1 A.digit)
process' = fromRight 0 . A.parseOnly doubleNumber . pack . (++) "0."
nums :: A.Parser [Double]
nums = num `A.sepBy1` commaWsp
flag :: A.Parser Bool
flag = fmap (/= '0') A.digit
command :: A.Parser PathCommand
command =
MoveTo OriginAbsolute <$ A.string "M" <*> points
<|> MoveTo OriginRelative <$ A.string "m" <*> points
<|> LineTo OriginAbsolute <$ A.string "L" <*> points
<|> LineTo OriginRelative <$ A.string "l" <*> points
<|> HorizontalTo OriginAbsolute <$ A.string "H" <*> nums
<|> HorizontalTo OriginRelative <$ A.string "h" <*> nums
<|> VerticalTo OriginAbsolute <$ A.string "V" <*> nums
<|> VerticalTo OriginRelative <$ A.string "v" <*> nums
<|> CurveTo OriginAbsolute <$ A.string "C" <*> fmap fromList (manyComma curveToArgs)
<|> CurveTo OriginRelative <$ A.string "c" <*> fmap fromList (manyComma curveToArgs)
<|> SmoothCurveTo OriginAbsolute <$ A.string "S" <*> pointPairs
<|> SmoothCurveTo OriginRelative <$ A.string "s" <*> pointPairs
<|> QuadraticBezier OriginAbsolute <$ A.string "Q" <*> pointPairs
<|> QuadraticBezier OriginRelative <$ A.string "q" <*> pointPairs
<|> SmoothQuadraticBezierCurveTo OriginAbsolute <$ A.string "T" <*> points
<|> SmoothQuadraticBezierCurveTo OriginRelative <$ A.string "t" <*> points
<|> EllipticalArc OriginAbsolute <$ A.string "A" <*> manyComma ellipticalArgs
<|> EllipticalArc OriginRelative <$ A.string "a" <*> manyComma ellipticalArgs
<|> EndPath <$ A.string "Z" <* commaWsp
<|> EndPath <$ A.string "z" <* commaWsp
where
curveToArgs =
(,,) <$> (point <* commaWsp)
<*> (point <* commaWsp)
<*> point
manyComma a = fromList <$> a `A.sepBy1` commaWsp
numComma = num <* commaWsp
flagComma = flag <* commaWsp
ellipticalArgs =
(,,,,,) <$> numComma
<*> numComma
<*> numComma
<*> flagComma
<*> flagComma
<*> point
-- | Path command definition (ripped from reanimate-svg).
data PathCommand
= -- | M or m command
MoveTo !Origin ![Point Double]
| -- | Line to, L or l Svg path command.
LineTo !Origin ![Point Double]
| -- | Equivalent to the H or h svg path command.
HorizontalTo !Origin ![Double]
| -- | Equivalent to the V or v svg path command.
VerticalTo !Origin ![Double]
| -- | Cubic bezier, C or c command
CurveTo !Origin ![(Point Double, Point Double, Point Double)]
| -- | Smooth cubic bezier, equivalent to S or s command
SmoothCurveTo !Origin ![(Point Double, Point Double)]
| -- | Quadratic bezier, Q or q command
QuadraticBezier !Origin ![(Point Double, Point Double)]
| -- | Quadratic bezier, T or t command
SmoothQuadraticBezierCurveTo !Origin ![Point Double]
| -- | Elliptical arc, A or a command.
EllipticalArc !Origin ![(Double, Double, Double, Bool, Bool, Point Double)]
| -- | Close the path, Z or z svg path command.
EndPath
deriving (Eq, Show, Generic)
-- | Tell if a path command is absolute (in the current
-- user coordiante) or relative to the previous point.
data Origin
= -- | Next point in absolute coordinate
OriginAbsolute
| -- | Next point relative to the previous
OriginRelative
deriving (Eq, Show, Generic)
-- | To fit in with the requirements of the library design, specifically the separation of what a chart is into XY data Points from representation of these points, path instructions need to be decontructed into:
--
-- - define a single chart element as a line.
--
-- - split a single path element into the start and end points of the line, which become the 'Chart.Types.xys' of a 'Chart.Types.Chart', and the rest of the information, which is called 'PathInfo' and incorporated into the 'Chart.Types.Chart' 'Chart.Types.annotation'.
--
-- An arc path is variant to affine transformations of the 'Chart.Types.xys' points: angles are not presevred in the new reference frame.
data PathInfo a
= StartI
| LineI
| CubicI (Point a) (Point a)
| QuadI (Point a)
| ArcI (ArcInfo a)
deriving (Show, Eq, Generic)
pointToSvgCoords :: Point Double -> Point Double
pointToSvgCoords (Point x y) = Point x (-y)
svgCoords :: PathData Double -> PathData Double
svgCoords (CubicP a b p) = CubicP (pointToSvgCoords a) (pointToSvgCoords b) (pointToSvgCoords p)
svgCoords (QuadP a p) = QuadP (pointToSvgCoords a) (pointToSvgCoords p)
svgCoords (StartP p) = StartP (pointToSvgCoords p)
svgCoords (LineP p) = LineP (pointToSvgCoords p)
svgCoords (ArcP i p) = ArcP i (pointToSvgCoords p)
-- | Convert from a path info, start point, end point triple to a path text clause.
--
-- Note that morally,
--
-- > toPathsAbsolute . toInfos . parsePath == id
--
-- but the round trip destroys much information, including:
--
-- - path text spacing
--
-- - "Z", which is replaced by a LineI instruction from the end point back to the original start of the path.
--
-- - Sequences of the same instruction type are uncompressed
--
-- - As the name suggests, relative paths are translated to absolute ones.
--
-- - implicit L's in multiple M instructions are separated.
--
-- In converting between chart-svg and SVG there are two changes in reference:
--
-- - arc rotation is expressed as positive degrees for a clockwise rotation in SVG, and counter-clockwise in radians for chart-svg
--
-- - A positive y-direction is down for SVG and up for chart-svg
toPathAbsolute ::
PathData Double ->
-- | path text
Text
toPathAbsolute (StartP p) = "M " <> pp p
toPathAbsolute (LineP p) = "L " <> pp p
toPathAbsolute (CubicP c1 c2 p) =
"C "
<> pp c1
<> " "
<> pp c2
<> " "
<> pp p
toPathAbsolute (QuadP control p) =
"Q "
<> pp control
<> " "
<> pp p
toPathAbsolute (ArcP (ArcInfo (Point x y) phi' l sw) x2) =
"A "
<> (pack . show) x
<> " "
<> (pack . show) y
<> " "
<> (pack . show) (-phi' * 180 / pi)
<> " "
<> bool "0" "1" l
<> " "
<> bool "0" "1" sw
<> " "
<> pp x2
-- | Render a point (including conversion to SVG Coordinates).
pp :: Point Double -> Text
pp (Point x y) =
formatOrShow (FixedStyle 4) Nothing x <> ","
<> formatOrShow (FixedStyle 4) Nothing (bool (-y) y (y == zero))
data PathCursor = PathCursor
{ -- | previous position
curPrevious :: Point Double,
-- | start point (to close out the path)
curStart :: Point Double,
-- | last control point
curControl :: Maybe (Point Double)
}
deriving (Eq, Show, Generic)
stateCur0 :: PathCursor
stateCur0 = PathCursor zero zero Nothing
-- | Convert an SVG d path text snippet to a [PathData Double]
svgToPathData :: Text -> [PathData Double]
svgToPathData = toPathDatas . either error id . parsePath
-- | Convert [PathData] to an SVG d path text.
pathDataToSvg :: [PathData Double] -> Text
pathDataToSvg xs = Text.intercalate " " $ fmap toPathAbsolute xs
-- | Convert from a path command list to a PathA specification
toPathDatas :: [PathCommand] -> [PathData Double]
toPathDatas xs = fmap svgCoords $ mconcat $ flip evalState stateCur0 $ sequence $ toInfo <$> xs
-- | Convert relative points to absolute points
relToAbs :: (Additive a) => a -> [a] -> [a]
relToAbs p xs = accsum (p : xs)
moveTo :: [Point Double] -> State PathCursor [PathData Double]
moveTo xs = do
put (PathCursor (last xs) (head xs) Nothing)
pure (StartP (head xs) : (LineP <$> tail xs))
lineTo :: [Point Double] -> State PathCursor [PathData Double]
lineTo xs = do
modify ((#curPrevious .~ last xs) . (#curControl .~ Nothing))
pure $ LineP <$> xs
horTo :: [Double] -> State PathCursor [PathData Double]
horTo xs = do
(PathCursor (Point _ y) _ _) <- get
lineTo (fmap (`Point` y) xs)
verTo :: [Double] -> State PathCursor [PathData Double]
verTo ys = do
(PathCursor (Point x _) _ _) <- get
lineTo (fmap (Point x) ys)
curveTo :: [(Point Double, Point Double, Point Double)] -> State PathCursor [PathData Double]
curveTo xs = do
modify
( (#curPrevious .~ (\(_, _, p) -> p) (last xs))
. (#curControl ?~ (\(_, c2, _) -> c2) (last xs))
)
pure $ (\(c1, c2, x2) -> CubicP c1 c2 x2) <$> xs
-- | Convert relative points to absolute points
relToAbs3 :: Additive a => a -> [(a, a, a)] -> [(a, a, a)]
relToAbs3 p xs = xs'
where
x1 = (\(x, _, _) -> x) <$> xs
x2 = (\(_, x, _) -> x) <$> xs
x3 = (\(_, _, x) -> x) <$> xs
x1' = fmap (p +) (accsum x1)
x2' = fmap (p +) (accsum x2)
x3' = fmap (p +) (accsum x3)
xs' = zip3 x1' x2' x3'
reflControlPoint :: State PathCursor (Point Double)
reflControlPoint = do
(PathCursor p _ c) <- get
case c of
Nothing -> pure p
Just c' -> pure (p - (c' - p))
smoothCurveToStep :: (Point Double, Point Double) -> State PathCursor (PathData Double)
smoothCurveToStep (c2, x2) = do
c1 <- reflControlPoint
modify ((#curControl ?~ c2) . (#curPrevious .~ x2))
pure (CubicP c1 c2 x2)
smoothCurveTo :: [(Point Double, Point Double)] -> State PathCursor [PathData Double]
smoothCurveTo xs =
sequence (smoothCurveToStep <$> xs)
-- | Convert relative points to absolute points
relToAbs2 :: Additive a => a -> [(a, a)] -> [(a, a)]
relToAbs2 p xs = xs'
where
x1 = fst <$> xs
x2 = snd <$> xs
x1' = fmap (p +) (accsum x1)
x2' = fmap (p +) (accsum x2)
xs' = zip x1' x2'
quad :: [(Point Double, Point Double)] -> State PathCursor [PathData Double]
quad xs = do
modify
( (#curPrevious .~ snd (last xs))
. (#curControl ?~ fst (last xs))
)
pure $ uncurry QuadP <$> xs
smoothQuadStep :: Point Double -> State PathCursor (PathData Double)
smoothQuadStep x2 = do
c1 <- reflControlPoint
modify ((#curControl ?~ c1) . (#curPrevious .~ x2))
pure (QuadP c1 x2)
smoothQuad :: [Point Double] -> State PathCursor [PathData Double]
smoothQuad xs =
sequence (smoothQuadStep <$> xs)
arcTo :: [(Double, Double, Double, Bool, Bool, Point Double)] -> State PathCursor [PathData Double]
arcTo xs = do
modify ((#curPrevious .~ (\(_, _, _, _, _, p) -> p) (last xs)) . (#curControl .~ Nothing))
pure $ fromPathEllipticalArc <$> xs
fromPathEllipticalArc :: (a, a, a, Bool, Bool, Point a) -> PathData a
fromPathEllipticalArc (x, y, r, l, s, p) = ArcP (ArcInfo (Point x y) r l s) p
-- | Convert relative points to absolute points
relToAbsArc :: Additive a => Point a -> [(a, a, a, Bool, Bool, Point a)] -> [(a, a, a, Bool, Bool, Point a)]
relToAbsArc p xs = xs'
where
ps = (\(_, _, _, _, _, pt) -> pt) <$> xs
ps' = fmap (p +) (accsum ps)
xs' = zipWith (\(x0, x1, x2, x3, x4, _) pt -> (x0, x1, x2, x3, x4, pt)) xs ps'
-- | Convert a path command fragment to PathData
--
-- flips the y-dimension of points.
toInfo :: PathCommand -> State PathCursor [PathData Double]
toInfo (MoveTo OriginAbsolute xs) = moveTo xs
toInfo (MoveTo OriginRelative xs) = do
(PathCursor p _ _) <- get
moveTo (relToAbs p xs)
toInfo EndPath = do
(PathCursor _ s _) <- get
pure [LineP s]
toInfo (LineTo OriginAbsolute xs) = lineTo xs
toInfo (LineTo OriginRelative xs) = do
(PathCursor p _ _) <- get
lineTo (relToAbs p xs)
toInfo (HorizontalTo OriginAbsolute xs) = horTo xs
toInfo (HorizontalTo OriginRelative xs) = do
(PathCursor (Point x _) _ _) <- get
horTo (relToAbs x xs)
toInfo (VerticalTo OriginAbsolute xs) = verTo xs
toInfo (VerticalTo OriginRelative ys) = do
(PathCursor (Point _ y) _ _) <- get
verTo (relToAbs y ys)
toInfo (CurveTo OriginAbsolute xs) = curveTo xs
toInfo (CurveTo OriginRelative xs) = do
(PathCursor p _ _) <- get
curveTo (relToAbs3 p xs)
toInfo (SmoothCurveTo OriginAbsolute xs) = smoothCurveTo xs
toInfo (SmoothCurveTo OriginRelative xs) = do
(PathCursor p _ _) <- get
smoothCurveTo (relToAbs2 p xs)
toInfo (QuadraticBezier OriginAbsolute xs) = quad xs
toInfo (QuadraticBezier OriginRelative xs) = do
(PathCursor p _ _) <- get
quad (relToAbs2 p xs)
toInfo (SmoothQuadraticBezierCurveTo OriginAbsolute xs) = smoothQuad xs
toInfo (SmoothQuadraticBezierCurveTo OriginRelative xs) = do
(PathCursor p _ _) <- get
smoothQuad (relToAbs p xs)
toInfo (EllipticalArc OriginAbsolute xs) = arcTo xs
toInfo (EllipticalArc OriginRelative xs) = do
(PathCursor p _ _) <- get
arcTo (relToAbsArc p xs)
|
tonyday567/chart-svg
|
src/Data/Path/Parser.hs
|
bsd-3-clause
| 15,006
| 0
| 65
| 3,056
| 4,676
| 2,437
| 2,239
| 325
| 2
|
{-# LANGUAGE PatternSynonyms #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.EXT.RescaleNormal
-- Copyright : (c) Sven Panne 2019
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.EXT.RescaleNormal (
-- * Extension Support
glGetEXTRescaleNormal,
gl_EXT_rescale_normal,
-- * Enums
pattern GL_RESCALE_NORMAL_EXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
|
haskell-opengl/OpenGLRaw
|
src/Graphics/GL/EXT/RescaleNormal.hs
|
bsd-3-clause
| 652
| 0
| 5
| 91
| 47
| 36
| 11
| 7
| 0
|
import Data.Aeson
import qualified Data.ByteString.Lazy as L
import Data.Text as T
import Data.Monoid
import Data.Maybe
import Data.Foldable
import Data.Functor
import System.Environment
import System.Exit
import System.Cmd (rawSystem)
-- NOTE that the map keys are not included
jsonArgs :: Value -> [String]
jsonArgs x0 = appEndo (go x0) []
where go (String s) = f (T.unpack s)
go (Number n) = f (show n)
go (Bool b) = f (if b then "true" else "false")
go Null = f "null"
go (Object m) = foldMap go m
go (Array xs) = foldMap go xs
f x = Endo (x:)
main :: IO ()
main = do cmd:argv <- getArgs
args <- jsonArgs -- main processing
. fromMaybe err -- error handling
. decode' <$> L.getContents -- reading the input
exitWith =<< rawSystem cmd (argv ++ args)
where err = error "Invalid JSON input"
|
np/json-tools
|
json-xargs.hs
|
bsd-3-clause
| 939
| 6
| 11
| 288
| 315
| 168
| 147
| 26
| 7
|
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
module Query where
import Control.Monad
import Database.Esqueleto
import qualified Database.Persist as P
--import Database.Persist.Class as PC
import Database.Persist.Sqlite (runSqlite)
import Database
-- import Chapter11.Gender
--getClientById :: (P.PersistQuery m, P.PersistMonadBackend m ~ P.PersistEntityBackend Client) => Int -> m (Maybe Client)
getClientById n = get $ toSqlKey $ fromIntegral n
--getPurchaseClient :: Purchase -> m (Maybe Client)
getPurchaseClient p = get (purchaseClient p)
--getPurchaseClient' :: Int -> m (Maybe Client)
getPurchaseClient' pId = do p <- get $ toSqlKey ( fromIntegral pId)
case p of
Just p' -> get $ purchaseClient p'
Nothing -> return Nothing
-- getClientByInfo :: String -> String -> String -> String -> m (Maybe Client)
getClientByInfo fName lName addr cnName = do
cn <- getBy $ UniqueCountryName cnName
case cn of
Just (Entity cId _) ->
do cl <- getBy $ UniqueClient fName lName addr cId
case cl of
Just (Entity _ client) -> return $ Just client
Nothing -> return Nothing
Nothing -> return Nothing
-- getAdultsOfSpainAndGermany :: m [Entity Client]
getAdultsOfSpainAndGermany = do
Just (Entity spId _) <- getBy $ UniqueCountryName "Spain"
Just (Entity geId _) <- getBy $ UniqueCountryName "Germany"
P.selectList [ ClientCountry P.<-. [spId, geId], ClientAge P.>=. Just 18 ] []
-- countAdultsOfSpainAndGermany :: m Integer
countAdultsOfSpainAndGermany = do
Just (Entity spId _) <- getBy $ UniqueCountryName "Spain"
Just (Entity geId _) <- getBy $ UniqueCountryName "Germany"
P.count [ ClientCountry P.<-. [spId, geId], ClientAge P.>=. Just 18 ]
-- getAdultsOfSpainAndUS :: m [Entity Client]
getAdultsOfSpainAndUS = do
Just (Entity spId _) <- getBy $ UniqueCountryName "Spain"
Just (Entity usId _) <- getBy $ UniqueCountryName "United States of America"
P.selectList ( [ ClientCountry P.==. spId, ClientAge P.>=. Just 18 ]
P.||. [ ClientCountry P.==. usId, ClientAge P.>=. Just 21 ] )
[ P.Desc ClientAge ]
-- getProductsPage :: Int -> m [Entity Product]
getProductsPage n = P.selectList [ ] [ P.Asc ProductPrice, P.LimitTo 10, P.OffsetBy ((n-1)*10) ]
-- getCountriesWithBigBuyers :: m [Country]
getCountriesWithBigBuyers = do
buyers <- P.selectKeysList [ ] [ ]
buyersAndPurchases <- mapM (\b -> P.count [ PurchaseClient P.==. b ] >>= \c -> return (b,c)) buyers
let buyersAndPurchases' = filter (\(_,c) -> c > 3) buyersAndPurchases
mapM (\(b,_) -> do Just cl <- get b
Just cn <- get $ clientCountry cl
return cn)
buyersAndPurchases'
-- getPeopleOver25 :: m [Entity Client]
getPeopleOver25 =
select $
from $ \client -> do
where_ (client ^. ClientAge >. just (val 25))
orderBy [ asc (client ^. ClientLastName), asc (client ^. ClientFirstName) ]
return client
-- getPeopleOver25FromSpainOrGermany :: m [Entity Client]
getPeopleOver25FromSpainOrGermany =
select $
from $ \(client, country) -> do
where_ ( client ^. ClientAge >. just (val 25)
&&. country ^. CountryName `in_` valList [ "Spain", "Germany" ]
&&. client ^. ClientCountry ==. country ^. CountryId )
return client
-- getPeopleOver25FromSpainOrGermanyJoin :: m [Entity Client]
getPeopleOver25FromSpainOrGermanyJoin =
select $
from $ \(client `InnerJoin` country) -> do
on (client ^. ClientCountry ==. country ^. CountryId)
where_ ( client ^. ClientAge >. just (val 25)
&&. country ^. CountryName `in_` valList [ "Spain", "Germany" ])
orderBy [ asc (client ^. ClientLastName), asc (client ^. ClientFirstName) ]
return client
-- getMoneyByClient :: m [(Entity Client, Value (Maybe Double))]
getMoneyByClient =
select $
from $ \(client `LeftOuterJoin` purchase) -> do
on (client ^. ClientId ==. purchase ^. PurchaseClient)
groupBy (client ^. ClientId)
let s = sum_ (purchase ^. PurchaseAmount)
return (client, s)
|
nrolland/persistentBHStyle
|
src/Query.hs
|
bsd-3-clause
| 4,246
| 0
| 21
| 997
| 1,203
| 603
| 600
| 75
| 3
|
{-# LANGUAGE TemplateHaskell #-}
module TapeAllocation
(
TapeAccessSequence(..)
, Allocation
, findAllocation
) where
import Control.Lens
import Control.Monad.State.Strict
import Control.Monad.Writer.Strict
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import Data.Hashable (Hashable)
import Data.List ((\\))
import qualified Data.DList as D
import qualified Data.Foldable as F
import Control.Monad.Tardis
import Types (Size, Position(..))
data TapeAccessSequence a = Access a Size | StartLoop | EndLoop
data TapeAccessSequenceNoLoops a = Start' a Size | End' a
type Allocation a = Map.HashMap a Position
data AnnotateEndState a = AnnotateEndState { _loopLevel :: Int
, _firstOccurrences :: Map.HashMap a Int
, _lingering :: Map.HashMap Int (Set.HashSet a)
}
makeLenses ''AnnotateEndState
annotateEnd :: (Eq a, Hashable a, F.Foldable t) => t (TapeAccessSequence a) -> [TapeAccessSequenceNoLoops a]
annotateEnd xs = D.toList . flip evalTardis initialTardis . execWriterT . flip evalStateT initialState $ go
where
initialTardis = (Set.empty, ())
initialState = AnnotateEndState 0 Map.empty Map.empty
go = F.for_ xs $ \el ->
case el of
StartLoop -> loopLevel += 1
EndLoop -> do { loopLevel' <- loopLevel <-= 1
; lingering' <- use lingering
; lingering' ^! at loopLevel' . _Just . folded . act tellEnd
; lingering %= Map.delete loopLevel'
}
Access v size -> do { tell' $ D.singleton (Start' v size)
; liftTardis $ modifyBackwards (Set.insert v)
; loopLevel' <- use loopLevel
; firstOccurrences %= Map.insertWith (const id) v loopLevel'
; firstOccurrenceLevel <- uses firstOccurrences (^?! ix v)
; if firstOccurrenceLevel == loopLevel'
then tellEnd v
else lingering %= (Map.insertWith mappend firstOccurrenceLevel $ Set.singleton v)
}
where
tell' = lift . tell
liftTardis = lift . lift
tellEnd a = do isLastOccurrence <- liftTardis $ getsFuture (not . Set.member a)
tell' $ if isLastOccurrence then D.singleton (End' a) else mempty
findAllocation :: (Hashable a, Eq a, F.Foldable t) => t (TapeAccessSequence a) -> Allocation a
findAllocation tapeAccessSequence = findAllocation' (F.toList $ annotateEnd tapeAccessSequence) Map.empty Map.empty
where
findAllocation' [] allocation _ = allocation
findAllocation' (x:xs) allocation liveVars =
case x of
Start' v size -> case allocation ^. at v of
Nothing -> let pos = findSeqOfLength size $ [0..] \\ (concat $ Map.elems liveVars)
in findAllocation' xs (Map.insert v (Position pos) allocation) (Map.insert v [pos..pos+size-1] liveVars)
_ -> findAllocation' xs allocation liveVars
End' v -> findAllocation' xs allocation (Map.delete v liveVars)
findSeqOfLength size xs = go xs Nothing size
where
go _ (Just r) 0 = r
go (x1:x2:xs') result size' = if size == 1 || x1 + 1 == x2
then case result of
Nothing -> go (x2:xs') (Just x1) (size' - 1)
just -> go (x2:xs') just (size' - 1)
else go (x2:xs') Nothing size
go _ _ _ = undefined -- should never happen on an infinite list
|
benma/bfc
|
src/TapeAllocation.hs
|
bsd-3-clause
| 3,815
| 0
| 21
| 1,317
| 1,111
| 582
| 529
| -1
| -1
|
module PolyPt2
( PolyPt2
, polyAreaPt2
, polyMidPt2
, polyAreaMidPt2
, polyNormPt2
, polyPt2Lines
, pt2InsideWindingPoly
, pt2InsideAlternatingPoly
, pt2PolyCrossingNumber, polyPt2Intersections
, polyPt2WindingIntersections, polyPt2AlternatingIntersections
, module LinePt2
, module TriPt2
) where
import LinePt2
import TriPt2
import Asteroids.Helpers
type PolyPt2 a = [Pt2 a]
polyToTriPt2 :: [Pt2 a] -> [TriPt2 a]
polyToTriPt2 (a:b:c:ps) = TriPt2 (a,b,c) : polyToTriPt2 (a:c:ps)
polyToTriPt2 _ = []
-- | NOTE: clockwise area is positive, counterclockwise is negative
polyAreaPt2 :: Fractional a => [Pt2 a] -> a
polyAreaPt2 p = sum $ fmap triAreaPt2 (polyToTriPt2 p )
-- | NOTE: clockwise area is positive, counterclockwise is negative
polyAreaMidPt2 :: Fractional a => [Pt2 a] -> (a, Pt2 a)
polyAreaMidPt2 p = (totalArea, centroid)
where
tris = polyToTriPt2 p
tps = fmap (triPart . triAreaMidPt2) tris
triPart (area,mid) = (area,mulPt2 mid area)
totalArea = sum $ fmap fst tps
centroid = divPt2 (sum $ fmap snd tps) totalArea
polyMidPt2 :: Fractional a => [Pt2 a] -> Pt2 a
polyMidPt2 ps = snd $ polyAreaMidPt2 ps
-- | Normalizes the polygon to have area of sz.
polyNormPt2 :: (Floating a, Ord a) => a -> [Pt2 a] -> [Pt2 a]
polyNormPt2 sz pts = fmap norm pts'
where
(a, midP) = polyAreaMidPt2 pts
da = sqrt ( abs ( sz / a ) )
norm p = mulPt2 ( p - midP ) da
pts' = if signum a * signum sz < 0 then reverse pts else pts
polyPt2Lines :: [Pt2 a] -> [LinePt2 a]
polyPt2Lines [] = []
polyPt2Lines ps = LinePt2 <$> wrappedPairs ps
pt2PolyCrossingNumber :: (Eq a, Ord a, Num a) => Pt2 a -> [Pt2 a] -> Int
pt2PolyCrossingNumber p ps = sum crossingNumbers
where crossingNumbers = fmap (pt2LineRightCrossingNumber p . LinePt2) sides
sides = wrappedPairs ps
pt2InsideWindingPoly :: (Eq a, Ord a, Num a) => Pt2 a -> [Pt2 a] -> Bool
pt2InsideWindingPoly p ps = pt2PolyCrossingNumber p ps > 0
pt2InsideAlternatingPoly :: (Eq a, Ord a, Num a) => Pt2 a -> [Pt2 a] -> Bool
pt2InsideAlternatingPoly p ps = odd n
where n = pt2PolyCrossingNumber p ps
polyPt2WindingIntersections :: (Eq a, Ord a, Num a) =>
[PolyPt2 a] -> [(PolyPt2 a, PolyPt2 a)]
polyPt2WindingIntersections = polyPt2Intersections pt2InsideWindingPoly
polyPt2AlternatingIntersections :: (Eq a, Ord a, Num a) =>
[PolyPt2 a] -> [(PolyPt2 a, PolyPt2 a)]
polyPt2AlternatingIntersections = polyPt2Intersections pt2InsideAlternatingPoly
polyPt2Intersections :: (Eq a, Ord a, Num a) =>
(Pt2 a -> [Pt2 a] -> Bool) -> [PolyPt2 a] -> [(PolyPt2 a, PolyPt2 a)]
polyPt2Intersections pinside ps = do
p1 <- ps
p2 <- ps
[(p1, p2) | p1 < p2 && intersects' p1 p2]
where intersects' [] _ = False
intersects' _ [] = False
intersects' p1 p2 = pinside (head p1) p2
|| pinside (head p2) p1
|| sidesCross p1 p2
sidesCross p1 p2 = any (anyCrossings $ polyPt2Lines p2) (polyPt2Lines p1)
anyCrossings lineList line = any (linesCrossed line) lineList
|
trenttobler/hs-asteroids
|
src/PolyPt2.hs
|
bsd-3-clause
| 3,189
| 0
| 11
| 800
| 1,155
| 598
| 557
| 68
| 3
|
{-|
Interactive Event-Log Browser
TODO Handle CTRL-D
-}
module Urbit.King.EventBrowser (run) where
import Urbit.Prelude
import Data.Conduit
import Urbit.Arvo
import Urbit.Time
import Urbit.Vere.Pier.Types
import Control.Monad.Trans.Maybe (MaybeT(..))
import Urbit.Vere.Log (EventLog)
import qualified Data.Conduit.Combinators as C
import qualified Urbit.Vere.Log as Log
--------------------------------------------------------------------------------
data Event = Event
{ num :: Word64
, mug :: Mug
, wen :: Wen
, ova :: Ev
}
deriving Show
data Input = Next | Prev | Quit | Trim | Effs | Init | Last
--------------------------------------------------------------------------------
run :: HasLogFunc e => EventLog -> RIO e ()
run log = do
hSetBuffering stdin NoBuffering
hSetEcho stdin False
logInfo $ displayShow (Log.identity log)
let cycle = fromIntegral $ lifecycleLen $ Log.identity log
las <- Log.lastEv log
loop cycle las las
where
failRead cur =
putStrLn ("ERROR: Failed to read event: " <> tshow cur)
input cyc las cur mFx = do
getInput las cur >>= \case
Next -> loop cyc las (succ cur)
Prev -> loop cyc las (pred cur)
Init -> loop cyc las 1
Last -> loop cyc las las
Quit -> pure ()
Trim -> trim cyc las cur mFx
Effs -> showEffects mFx >> input cyc las cur mFx
trim cyc las cur mFx = do
deleteFrom log las cur >>= \case
True -> loop cyc (pred cur) (pred cur)
False -> input cyc las cur mFx
loop cyc las 0 = loop cyc las 1
loop cyc las cur | cur > las = loop cyc las las
loop cyc las cur | cyc >= cur = do
putStrLn ""
putStrLn " [EVENT]"
putStrLn ""
putStrLn " Lifecycle Nock"
putStrLn ""
input cyc las cur (Just [])
loop cyc las cur = do
mEv <- peekEvent log cur
mFx <- peekEffect log cur
case mEv of
Nothing -> failRead cur
Just ev -> showEvent ev >> showEffectsTeaser mFx
input cyc las cur mFx
deleteFrom :: HasLogFunc e => EventLog -> Word64 -> Word64 -> RIO e Bool
deleteFrom log las cur = do
sure <- areYouSure
if sure then doDelete else abortDelete
pure sure
where
abortDelete = do
putStrLn "\n\n [ABORTED]\n"
putStrLn " Aborted delete, no events pruned.\n"
doDelete = do
Log.trimEvents log cur
putStrLn "\n\n [DELETED]\n"
putStrLn " It's gone forever!\n"
question =
if las == cur
then mconcat [ " This will permanently delete the last event (#"
, tshow las
, ")\n" ]
else mconcat [ " This will permanently delete all events in (#"
, tshow cur
, " - #"
, tshow las
, ")\n" ]
areYouSure = do
putStrLn "\n\n ARE YOU SURE????"
putStrLn ""
putStrLn question
putStr "(y|n) "
hFlush stdout
getChar <&> \case
'y' -> True
_ -> False
getInput :: Word64 -> Word64 -> RIO e Input
getInput las cur = do
putStr ("(" <> tshow cur <> "/" <> tshow las <> ") ")
hFlush stdout
getChar >>= \case
'j' -> pure Next
'k' -> pure Prev
'q' -> pure Quit
'f' -> pure Effs
'x' -> pure Trim
'0' -> pure Init
'G' -> pure Last
_ -> do putStrLn "\n"
putStrLn help
getInput las cur
where
help = unlines
[ " [HELP]"
, ""
, " k View the previous event"
, " j View the next event"
, " 0 View the first event"
, " G View the last event"
, " q Quit"
, " x Delete (only the last event)"
, " ? Show this help"
]
showEffectsTeaser :: Maybe FX -> RIO e ()
showEffectsTeaser Nothing = putStrLn " [No collected effects]\n"
showEffectsTeaser (Just []) = putStrLn " [No effects for this event]\n"
showEffectsTeaser (Just fx) = putStrLn $ mconcat
[ " ["
, tshow (length fx)
, " collected effects. Press 'f' to view]\n"
]
showEffects :: Maybe FX -> RIO e ()
showEffects Nothing = putStrLn " [No collected effects]\n"
showEffects (Just []) = putStrLn " [No effects for this event]\n"
showEffects (Just fx) = do
putStrLn "\n"
putStrLn " [EFFECTS]"
for_ fx $ \ef -> do
putStrLn ""
showEffect ef
putStrLn ""
showEffect :: Lenient Ef -> RIO e ()
showEffect (GoodParse ef) =
putStrLn $ unlines $ fmap (" " <>) $ lines $ pack $ ppShow ef
showEffect (FailParse n) =
putStrLn $ unlines $ fmap (" " <>) $ lines $ pack $ ppShow n
showEvent :: Event -> RIO e ()
showEvent ev = do
putStrLn "\n"
putStrLn " [EVENT]"
putStrLn ""
putStrLn $ unlines $ fmap (" " <>) $ lines $ pack $ ppShow (ova ev)
peekEffect :: HasLogFunc e => EventLog -> Word64 -> RIO e (Maybe FX)
peekEffect log eId = runMaybeT $ do
(id, bs) <- MaybeT $ runConduit (Log.streamEffectsRows log eId .| C.head)
guard (id == eId)
io $ cueBSExn bs >>= fromNounExn
peekEvent :: HasLogFunc e => EventLog -> Word64 -> RIO e (Maybe Event)
peekEvent log eId = runMaybeT $ do
octs <- MaybeT $ runConduit (Log.streamEvents log eId .| C.head)
noun <- io $ cueBSExn octs
(m,w,e) <- io $ fromNounExn noun
ovum <- fromNounExn e
pure (Event eId m w ovum)
|
jfranklin9000/urbit
|
pkg/hs/urbit-king/lib/Urbit/King/EventBrowser.hs
|
mit
| 5,652
| 0
| 15
| 1,936
| 1,748
| 840
| 908
| -1
| -1
|
{- |
Module : ./CASL/CCC/FreeTypes.hs
Description : consistency checking of free types
Copyright : (c) Mingyi Liu and Till Mossakowski and Uni Bremen 2004-2005
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : non-portable (imports Logic.Logic)
Consistency checking of free types
-}
module CASL.CCC.FreeTypes (checkFreeType) where
import CASL.AlphaConvert
import CASL.AS_Basic_CASL
import CASL.MapSentence
import CASL.Morphism
import CASL.Sign
import CASL.Simplify
import CASL.SimplifySen
import CASL.CCC.TermFormula
import CASL.CCC.TerminationProof (terminationProof)
import CASL.Overload (leqP)
import CASL.Quantification
import CASL.ToDoc
import CASL.Utils
import Common.AS_Annotation
import Common.Consistency (Conservativity (..))
import Common.DocUtils
import Common.Id
import Common.Result
import Common.Utils (number)
import qualified Common.Lib.MapSet as MapSet
import qualified Common.Lib.Rel as Rel
import Control.Monad
import Data.Either
import Data.Function
import Data.List
import Data.Maybe
import qualified Data.Set as Set
-- | check values of constructors (free types have only total ones)
inhabited :: Set.Set SORT -> [OP_SYMB] -> Set.Set SORT
inhabited sorts cons = iterateInhabited sorts where
argsRes = foldr (\ os -> case os of
Qual_op_name _ (Op_type Total args res _) _ -> ((args, res) :)
_ -> id) [] cons
iterateInhabited l =
if changed then iterateInhabited newL else newL where
(newL, changed) = foldr (\ (ags, rs) p@(l', _) ->
if all (`Set.member` l') ags && not (Set.member rs l')
then (Set.insert rs l', True) else p) (l, False) argsRes
-- | just filter out the axioms generated for free types
isGenAx :: Named (FORMULA f) -> Bool
isGenAx ax = case stripPrefix "ga_" $ senAttr ax of
Nothing -> True
Just rname -> all (not . (`isPrefixOf` rname))
["disjoint_", "injective_", "selector_"]
getFs :: [Named (FORMULA f)] -> [FORMULA f]
getFs = map sentence . filter isGenAx
-- | get the constraints from sort generation axioms
constraintOfAxiom :: [FORMULA f] -> [[Constraint]]
constraintOfAxiom = foldr (\ f -> case f of
Sort_gen_ax constrs _ -> (constrs :)
_ -> id) []
recoverSortsAndConstructors :: [FORMULA f] -> (Set.Set SORT, Set.Set OP_SYMB)
recoverSortsAndConstructors fs = let
(srts, cons, _) = unzip3 . map recover_Sort_gen_ax
$ constraintOfAxiom fs
in (Set.unions $ map Set.fromList srts, Set.unions $ map Set.fromList cons)
-- check that patterns do not overlap, if not, return proof obligation.
getOverlapQuery :: (FormExtension f, TermExtension f, Ord f) => Sign f e
-> [[FORMULA f]] -> [FORMULA f]
getOverlapQuery sig = filter (not . is_True_atom)
. mapMaybe (retrySubstForm sig) . concatMap pairs
convert2Forms :: (TermExtension f, FormExtension f, Ord f) => Sign f e
-> FORMULA f -> FORMULA f
-> Result ((Subst f, [FORMULA f]), (Subst f, [FORMULA f]))
-> (FORMULA f, FORMULA f, ((Subst f, [FORMULA f]), (Subst f, [FORMULA f])))
convert2Forms sig f1 f2 (Result ds m) =
if null ds then let Just r = m in (f1, f2, r) else let
(f3, c) = alphaConvert 1 id f1
f4 = convertFormula c id f2
Result _ (Just p) = getSubstForm sig f3 f4
in (f3, f4, p)
retrySubstForm :: (FormExtension f, TermExtension f, Ord f) => Sign f e
-> (FORMULA f, FORMULA f) -> Maybe (FORMULA f)
retrySubstForm sig (f1, f2) =
let r@(Result ds m) = getSubstForm sig f1 f2
in case m of
Nothing -> Nothing
Just s -> if null ds then Just $ mkOverlapEq sig s f1 f2
else let (f3, f4, s2) = convert2Forms sig f1 f2 r
in Just . stripQuant sig . convertFormula 1 id
$ mkOverlapEq sig s2 f3 f4
quant :: TermExtension f => Sign f e -> FORMULA f -> FORMULA f
quant sig f = quantFreeVars sig f nullRange
mkOverlapEq :: (TermExtension f, GetRange f, Ord f) => Sign f e
-> ((Subst f, [FORMULA f]), (Subst f, [FORMULA f]))
-> FORMULA f -> FORMULA f -> FORMULA f
mkOverlapEq sig ((s1, fs1), (s2, fs2)) f1 f2 = quant sig . simplifyFormula id
. mkImpl (conjunct $ map (replaceVarsF s1 id) fs2
++ map (replaceVarsF s2 id) fs1)
. overlapQuery (replaceVarsF s1 id $ stripAllQuant f1)
. replaceVarsF s2 id $ stripAllQuant f2
{-
check if leading symbols are new (not in the image of morphism),
if not, return it as proof obligation
-}
getDefsForOld :: GetRange f => Sign f e -> [FORMULA f]
-> [FORMULA f]
getDefsForOld sig axioms = let
oldOpMap = opMap sig
oldPredMap = predMap sig
in filter (\ f -> case leadingSym f of
Just (Left (Qual_op_name ident ot _))
| MapSet.member ident (toOpType ot) oldOpMap -> True
Just (Right (Qual_pred_name ident pt _))
| MapSet.member ident (toPredType pt) oldPredMap -> True
_ -> False) axioms
isFreeSortGen :: FORMULA f -> Bool
isFreeSortGen f = case f of
Sort_gen_ax _ True -> True
_ -> False
-- | non-inhabited non-empty sorts
getNefsorts :: Set.Set SORT -> Set.Set SORT -> Set.Set SORT
-> (Set.Set SORT, [OP_SYMB]) -> Set.Set SORT
getNefsorts oldSorts nSorts esorts (srts, cons) =
Set.difference fsorts $ inhabited oldSorts cons where
fsorts = Set.difference (Set.intersection nSorts srts) esorts
getDataStatus :: Set.Set SORT -> Set.Set SORT -> Set.Set SORT -> Conservativity
getDataStatus nSorts defSubs genSorts
| Set.null nSorts = Def
| Set.null $ Set.difference nSorts $ Set.union genSorts defSubs = Mono
| otherwise = Cons
getConStatus :: Conservativity -> [FORMULA f] -> Conservativity
getConStatus dataStatus fs = min dataStatus $ if null fs then Def else Cons
-- | check whether it is the domain of a partial function
isDomain :: FORMULA f -> Bool
isDomain = isJust . domainDef
-- | check whether it contains a definedness formula in correct form
correctDef :: FORMULA f -> Bool
correctDef f = case stripAllQuant f of
Relation (Definedness _ _) c _ _ | c /= Equivalence -> True
Negation (Definedness _ _) _ -> True
Definedness _ _ -> True
_ -> False
showForm :: (TermExtension f, FormExtension f) => Sign f e -> FORMULA f
-> String
showForm s = flip showDoc "" . simplifyCASLSen s
-- check the definitional form of the partial axioms
checkDefinitional :: (FormExtension f, TermExtension f)
=> Sign f e -> [FORMULA f] -> Maybe (Result (Conservativity, [FORMULA f]))
checkDefinitional tsig fs = let
formatAxiom = showForm tsig
(noLSyms, withLSyms) = partition (isNothing . fst . snd)
$ map (\ a -> (a, leadingSymPos a)) fs
partialLSyms = foldr (\ (a, (ma, _)) -> case ma of
Just (Left (Application t@(Qual_op_name _ (Op_type k _ _ _) _) _ _))
| k == Partial -> ((a, t) :)
_ -> id) [] withLSyms
(domainDefs, otherPartials) = partition (isDomain . fst) partialLSyms
(withDefs, withOutDefs) = partition (containDef . fst) otherPartials
(correctDefs, wrongDefs) = partition (correctDef . fst) withDefs
grDomainDefs = Rel.partList (on (sameOpSymbs tsig) snd) domainDefs
(multDomainDefs, oneDomainDefs) = partition (\ l -> case l of
[_] -> False
_ -> True) grDomainDefs
singleDomainDefs = map head oneDomainDefs
nonCompleteDomainDefs = filter (\ (da, _) -> case domainDef da of
Just (ta, _) | all isVar $ arguOfTerm ta -> False
_ -> True) singleDomainDefs
domainObls = concatMap (\ (da, dt) -> map (\ (de, _) -> (da, de))
$ filter (sameOpSymbs tsig dt . snd) correctDefs) singleDomainDefs
nonEqDoms = filter (\ (da, de) ->
case (domainDef da, stripAllQuant de) of
(Just (ta, _), Relation (Definedness te _) c _ _)
| c /= Equivalence && sameOpsApp tsig ta te ->
case leadingTermPredication de of
Just (Left t) | eqPattern tsig te t -> False
_ -> True
_ -> True) domainObls
defOpSymbs = Set.fromList $ map (snd . head) grDomainDefs
++ map snd correctDefs
wrongWithoutDefs = filter ((`Set.notMember` defOpSymbs) . snd)
withOutDefs
ds = map (\ (a, (_, pos)) -> Diag
Warning ("missing leading symbol in:\n " ++ formatAxiom a) pos)
noLSyms
++ map (\ (a, t) -> Diag
Warning ("definedness is not definitional:\n " ++ formatAxiom a)
$ getRange t) wrongDefs
++ map (\ l@((_, t) : _) -> Diag Warning (unlines $
("multiple domain axioms for: " ++ showDoc t "")
: map ((" " ++) . formatAxiom . fst) l) $ getRange t)
multDomainDefs
++ map (\ (a, t) -> Diag
Warning ("missing definedness condition for partial '"
++ showDoc t "' in:\n " ++ formatAxiom a)
$ getRange t) wrongWithoutDefs
++ map (\ (da, _) -> Diag
Warning ("non-complete domain axiom:\n " ++ formatAxiom da)
$ getRange da) nonCompleteDomainDefs
++ map (\ (da, de) -> Diag
Warning ("unexpected definedness condition:\n "
++ formatAxiom de
++ "\nin the presence of domain axiom:\n "
++ formatAxiom da) $ getRange de) nonEqDoms
in if null ds then Nothing else Just $ Result ds Nothing
{-
call the symbols in the image of the signature morphism "new"
- each new sort must be a free type,
i.e. it must occur in a sort generation constraint that is marked as free
(Sort_gen_ax constrs True)
such that the sort is in srts,
where (srts,ops,_)=recover_Sort_gen_ax constrs
if not, output "don't know"
and there must be one term of that sort (inhabited)
if not, output "no"
- group the axioms according to their leading operation/predicate symbol,
i.e. the f resp. the p in
forall x_1:s_n .... x_n:s_n . f(t_1,...,t_m)=t
forall x_1:s_n .... x_n:s_n . phi => f(t_1,...,t_m)=t
Implication Application Strong_equation
forall x_1:s_n .... x_n:s_n . p(t_1,...,t_m)<=>phi
forall x_1:s_n .... x_n:s_n . phi1 => p(t_1,...,t_m)<=>phi
Implication Predication Equivalence
if there are axioms not being of this form, output "don't know"
-}
checkSort :: Bool -> Set.Set SORT -> Set.Set SORT -> Set.Set SORT
-> Set.Set SORT -> Set.Set SORT -> Sign f e -> Sign f e
-> Maybe (Result (Conservativity, [FORMULA f]))
checkSort noSentence nSorts defSubsorts gSorts fSorts nefsorts sSig tSig
| noSentence && Set.null nSorts =
let cond = MapSet.null (diffOpMapSet (opMap tSig) $ opMap sSig)
&& MapSet.null (diffMapSet (predMap tSig) $ predMap sSig)
in Just $ justHint (if cond then Def else Cons, [])
$ (if cond then "neither symbols"
else "neither sorts") ++ " nor sentences have been added"
| not $ Set.null notFreeSorts =
mkUnknown "some types are not freely generated" notFreeSorts
| not $ Set.null nefsorts = mkWarn "some sorts are not inhabited"
nefsorts $ Just (Inconsistent, [])
| not $ Set.null genNotNew = mkUnknown "some defined sorts are not new"
genNotNew
| otherwise = Nothing
where
notFreeSorts = Set.intersection nSorts
$ Set.difference gSorts fSorts
genNotNew = Set.difference
(Set.unions [defSubsorts, gSorts, fSorts]) nSorts
mkWarn s i r = Just $ Result [mkDiag Warning s i] r
mkUnknown s i = mkWarn s i Nothing
checkLeadingTerms :: (FormExtension f, TermExtension f, Ord f)
=> Sign f e -> [FORMULA f] -> [OP_SYMB]
-> Maybe (Result (Conservativity, [FORMULA f]))
checkLeadingTerms tsig fsn constructors = let
ltp = mapMaybe leadingTermPredication fsn
formatTerm = flip showDoc "" . simplifyCASLTerm tsig
args = foldr (\ ei -> case ei of
Left (Application os ts qs) ->
((qs, "term for " ++ show (opSymbName os), ts) :)
Right (Predication ps ts qs) ->
((qs, "predicate " ++ show (predSymbName ps), ts) :)
_ -> id) [] ltp
ds = foldr (\ (qs, d, ts) l ->
let vs = concatMap varOfTerm ts
dupVs = vs \\ Set.toList (Set.fromList vs)
nonCs = checkTerms tsig constructors ts
td = " in leading " ++ d ++ ": "
in map (\ v -> Diag Warning
("duplicate variable" ++ td ++ formatTerm v) qs) dupVs
++ map (\ t -> Diag Warning
("non-constructor" ++ td ++ formatTerm t)
qs) nonCs
++ l) [] args
in if null ds then Nothing else Just $ Result ds Nothing
-- check the sufficient completeness
checkIncomplete :: (FormExtension f, TermExtension f, Ord f)
=> Sign f e -> [FORMULA f] -> [OP_SYMB] -> [[FORMULA f]] -> [OP_SYMB]
-> Maybe (Result (Conservativity, [FORMULA f]))
checkIncomplete sig obligations doms fsn cons =
case getNotComplete sig doms fsn cons of
[] -> Nothing
incomplete -> let
formatAxiom = showForm sig
in Just $ Result
(map (\ (Result ds mfs, fs@(hd : _)) -> let
(lSym, pos) = leadingSymPos hd
sname = case fmap extractLeadingSymb lSym of
Just (Left opS) -> opSymbName opS
Just (Right pS) -> predSymbName pS
_ -> error "CASL.CCC.FreeTypes.<Symb_Name>"
in Diag Warning (intercalate "\n" $
("the definition of " ++ show sname
++ (if null ds then " may not be" else " is not") ++ " complete")
: "the defining formula group is:"
: map (\ (f, n) -> " " ++ shows n ". "
++ formatAxiom f) (number fs)
++ map diagString ds
++ maybe []
(map (\ (p, f) -> "possibly incomplete pattern for: " ++ p
++ "\n with completeness condition: "
++ formatAxiom f)) mfs
) pos)
incomplete) $ Just (Cons, obligations)
renameVars :: Int -> [FORMULA f] -> (Int, [FORMULA f])
renameVars c = foldr (\ f (c1, l) ->
let (f2, c2) = alphaConvert c1 id f in
(c2, f2 : l)) (c, [])
checkTerminal :: (FormExtension f, TermExtension f, Ord f)
=> Sign f e -> Conservativity -> [FORMULA f] -> [FORMULA f] -> [FORMULA f]
-> IO (Result (Conservativity, [FORMULA f]))
checkTerminal sig conStatus obligations doms fsn =
if null fsn then return $ justHint (conStatus, obligations)
"no defining sentences"
else do
let (c, domains) = renameVars 1 doms
fs_terminalProof = snd $ renameVars c fsn
(proof, str) <- terminationProof sig fs_terminalProof domains
return $ case proof of
Just True -> justHint (conStatus, obligations) "termination succeeded"
_ -> warning (Cons, obligations)
(if isJust proof then "not terminating"
else "cannot prove termination: " ++ str) nullRange
checkPos :: FORMULA f -> Bool
checkPos f = case f of
Quantification _ _ f' _ -> checkPos f'
Junction _ cs _ -> all checkPos cs
Relation i1 c i2 _ -> let
c1 = checkPos i1
c2 = checkPos i2
in if c == Equivalence then c1 == c2 else c1 <= c2
Negation n _ -> not $ checkPos n
Atom b _ -> b
Predication {} -> True
Membership {} -> True
Definedness {} -> True
Equation {} -> True
Sort_gen_ax cs _ -> case cs of
[c] -> case opSymbs c of
[_] -> True
{- just a single constructor creates a unique value
even for multiple one-point components -}
_ -> False
_ -> False
_ -> False
partitionMaybe :: (a -> Maybe b) -> [a] -> ([b], [a])
partitionMaybe f = foldr (\ a (bs, as) ->
maybe (bs, a : as) (\ b -> (b : bs, as)) $ f a) ([], [])
{- -----------------------------------------------------------------------
function checkFreeType:
- check if leading symbols are new (not in the image of morphism),
if not, return them as obligations
- generated datatype is free
- if new sort is not etype or esort, it can not be empty.
- the leading terms consist of variables and constructors only,
if not, return Nothing
- split function leading_Symb into
leadingTermPredication
and
extractLeadingSymb
- collect all operation symbols from recover_Sort_gen_ax fconstrs
(= constructors)
- no variable occurs twice in a leading term, if not, return Nothing
- check that patterns do not overlap, if not, return obligations
This means:
in each group of the grouped axioms:
all patterns of leading terms/formulas are disjoint
this means: either leading symbol is a variable,
and there is just one axiom
otherwise, group axioms according to leading symbol
no symbol may be a variable
check recursively the arguments of
constructor in each group
- sufficient completeness
- termination proof
------------------------------------------------------------------------
free datatypes and recursive equations are consistent -}
checkFreeType :: (FormExtension f, TermExtension f, Ord f)
=> (Sign f e, [Named (FORMULA f)]) -> Morphism f e m
-> [Named (FORMULA f)] -> IO (Result (Conservativity, [FORMULA f]))
checkFreeType (_, osens) m axs = do
let sig = mtarget m
sSig = imageOfMorphism m
fs = getFs axs -- strip labels and generated sentences
(exQuants, fs2) = partition isExQuanti fs
(memShips, fs3) = partition isMembership fs2
(sortGens1, axioms) = partition isSortGen fs3
(freeSortGens, sortGens) = partition isFreeSortGen sortGens1
(domains, axLessDoms) = partition isDomain axioms
(genSorts1, cons1) =
recoverSortsAndConstructors $ getFs osens
(freeSorts, cons2) =
recoverSortsAndConstructors freeSortGens
(genSorts2, cons3) =
recoverSortsAndConstructors sortGens
sortCons@(genSorts, cons) =
(Set.unions
[freeSorts, genSorts2, Set.map (mapSort $ sort_map m) genSorts1]
, Set.toList $ Set.unions [cons2, cons3, Set.map (mapOpSymb m) cons1])
oldSorts = sortSet sSig
newSorts = Set.difference (sortSet sig) oldSorts
emptySorts = emptySortSet sig
nonInhabitedSorts = getNefsorts oldSorts newSorts emptySorts sortCons
(subsortDefns, memShips2) = partitionMaybe isSubsortDef memShips
defSubsorts = Set.fromList $ map (\ (s, _, _) -> s) subsortDefns
dataStatus = getDataStatus newSorts defSubsorts genSorts
defsForOld = getDefsForOld sSig axioms
opsPredsAndExAxioms = defsForOld ++ exQuants ++ memShips2
conStatus = getConStatus dataStatus opsPredsAndExAxioms
memObl = infoSubsorts emptySorts subsortDefns
axGroup = groupAxioms sig axLessDoms
overLaps = getOverlapQuery sig axGroup
obligations = opsPredsAndExAxioms ++ memObl ++ overLaps
domSyms = lefts $ mapMaybe leadingSym domains
ms =
[ checkDefinitional sig axioms
, checkSort (null axs) newSorts defSubsorts genSorts2 freeSorts
nonInhabitedSorts sSig sig
, checkLeadingTerms sig axioms cons
, checkIncomplete sig obligations domSyms axGroup cons ]
r <- case catMaybes ms of
[] -> checkTerminal sig conStatus obligations domains axLessDoms
a : _ -> return a
return $ case r of
Result ds Nothing ->
case filter (not . checkPos . sentence) $ axs ++ osens of
[] -> justHint (Cons, []) "theory is positive!"
l -> let
ps = map (\ f -> Diag Hint ("formula is not positive:\n "
++ show (printTheoryFormula $ mapNamed (simplifyCASLSen sig) f))
$ getRange f) $ take 2 l
in Result (ps ++ ds) Nothing
_ -> r
{- | group the axioms according to their leading symbol,
output Nothing if there is some axiom in incorrect form -}
groupAxioms :: GetRange f => Sign f e -> [FORMULA f] -> [[FORMULA f]]
groupAxioms sig phis = map (map snd)
$ Rel.partList (\ (e1, _) (e2, _) -> case (e1, e2) of
(Left o1, Left o2) -> sameOpSymbs sig o1 o2
(Right (Qual_pred_name p1 t1 _), Right (Qual_pred_name p2 t2 _)) ->
p1 == p2 && on (leqP sig) toPredType t1 t2
_ -> False)
$ foldr (\ f -> case leadingSym f of
Just ei -> ((ei, f) :)
Nothing -> id) [] phis
-- | return the non-constructor terms of arguments of a leading term
checkTerms :: Sign f e -> [OP_SYMB] -> [TERM f] -> [TERM f]
checkTerms sig cons = concatMap checkT
where checkT t = case unsortedTerm t of
Qual_var {} -> []
Application subop subts _ ->
if isCons sig cons subop then concatMap checkT subts else [t]
_ -> [t]
{- | check whether the operation symbol is a constructor
(or a related overloaded variant). -}
isCons :: Sign f e -> [OP_SYMB] -> OP_SYMB -> Bool
isCons sig cons os = any (sameOpSymbs sig os) cons
-- | create all possible pairs from a list
pairs :: [a] -> [(a, a)]
pairs ps = case ps of
hd : tl@(_ : _) -> map (\ x -> (hd, x)) tl ++ pairs tl
_ -> []
-- | create the proof obligation for a pair of overlapped formulas
overlapQuery :: GetRange f => FORMULA f -> FORMULA f -> FORMULA f
overlapQuery a1 a2 =
case leadingSym a1 of
Just (Left _)
| containNeg a1 && not (containNeg a2) ->
mkImpl (conjunct [con1, con2])
(mkNeg (Definedness resT2 nullRange))
| containNeg a2 && not (containNeg a1) ->
mkImpl (conjunct [con1, con2])
(mkNeg (Definedness resT1 nullRange))
| containNeg a1 && containNeg a2 -> trueForm
| otherwise ->
mkImpl (conjunct [con1, con2])
(mkStEq resT1 resT2)
Just (Right _)
| containNeg a1 && not (containNeg a2) ->
mkImpl (conjunct [con1, con2])
(mkNeg resA2)
| containNeg a2 && not (containNeg a1) ->
mkImpl (conjunct [con1, con2])
(mkNeg resA1)
| containNeg a1 && containNeg a2 -> trueForm
| otherwise ->
mkImpl (conjunct [con1, con2])
(conjunct [resA1, resA2])
_ -> error "CASL.CCC.FreeTypes.<overlapQuery>"
where [con1, con2] = map conditionAxiom [a1, a2]
[resT1, resT2] = map resultTerm [a1, a2]
[resA1, resA2] = map resultAxiom [a1, a2]
getNotComplete :: (Ord f, FormExtension f, TermExtension f)
=> Sign f e -> [OP_SYMB] -> [[FORMULA f]] -> [OP_SYMB]
-> [(Result [(String, FORMULA f)], [FORMULA f])]
getNotComplete sig doms fsn constructors =
let consMap = foldr (\ (Qual_op_name o ot _) ->
MapSet.insert (res_OP_TYPE ot) (o, ot)) MapSet.empty constructors
consMap2 = foldr (\ (Qual_op_name o ot _) ->
MapSet.insert o ot) MapSet.empty constructors
in
filter (\ (Result ds mfs, _) -> not (null ds)
|| maybe False (not . null) mfs)
$ map (\ g ->
(let l = map topIdOfAxiom g in
case Set.toList . Set.fromList $ map fst l of
[(p, i)] -> completePatterns sig doms consMap consMap2
([(showId p "", i)]
, zip g $ map snd l)
l2 -> fail $ "wrongly grouped leading terms "
++ show l2
, g)) fsn
type LeadArgs = [(String, Int)]
getNextArg :: Bool -> String -> LeadArgs -> (Bool, String, LeadArgs)
getNextArg b p l = case l of
[] -> (False, if b then p else "_", [])
h : r -> case h of
(i, c) -> if c == 0 then (b, i, r) else
let (b1, sl, r2) = getNextN c b p r
in (b1, i ++ "(" ++ intercalate ", " sl ++ ")", r2)
getNextN :: Int -> Bool -> String -> LeadArgs -> (Bool, [String], LeadArgs)
getNextN c b p l = if c <= 0 then (b, [], l) else
let (b1, s, r) = getNextArg b p l
(b2, sl, r2) = getNextN (c - 1) b1 p r
in (b2, s : sl, r2)
showLeadingArgs :: String -> LeadArgs -> String
showLeadingArgs p l = let (_, r, _) = getNextArg True p $ reverse l in r
-- | check whether the patterns of a function or predicate are complete
completePatterns :: (Ord f, FormExtension f, TermExtension f) => Sign f e
-> [OP_SYMB] -> MapSet.MapSet SORT (OP_NAME, OP_TYPE)
-> MapSet.MapSet OP_NAME OP_TYPE
-> (LeadArgs, [(FORMULA f, [TERM f])])
-> Result [(String, FORMULA f)]
completePatterns tsig doms consMap consMap2 (leadingArgs, pas)
| all (null . snd) pas =
let fs = checkExhaustive tsig doms $ map fst pas
in return $ map (\ f -> (showLeadingArgs "" leadingArgs, f)) fs
| any (null . snd) pas = fail "wrongly grouped leading terms"
| otherwise = let hds = map (\ (f, hd : _) -> (f, hd)) pas in
if all (isVar . snd) hds
then let
tls = map (\ (f, _ : tl) -> (f, tl)) pas
in completePatterns tsig doms consMap consMap2
(("_", 0) : leadingArgs, tls)
else let
consAppls@(_ : _) = mapMaybe (\ (f, t) -> case unsortedTerm t of
Application (Qual_op_name o ot _) _ _ ->
Just (f, o, Set.filter (sameOpTypes tsig ot)
$ MapSet.lookup o consMap2)
_ -> Nothing) hds
consSrt = foldr1 Set.intersection
$ map (\ (_, _, os) -> Set.map res_OP_TYPE os) consAppls
in case filter (not . Set.null . (`MapSet.lookup` consMap))
$ Set.toList consSrt of
[] -> fail $
"no common result type for constructors found: "
++ showDoc (map snd hds) ""
cSrt : _ -> do
let allCons = MapSet.lookup cSrt consMap
when (Set.null allCons) . fail
$ "no constructors for result type found: " ++ show cSrt
let cons_group = map (\ (c, ct) -> (c, ct,
filter (\ (_, h : _) -> case unsortedTerm h of
Application (Qual_op_name o ot _) _ _ ->
c == o && sameOpTypes tsig ct ot
_ -> False) pas)) $ Set.toList allCons
vars = filter (\ (_, h : _) -> isVar h) pas
ffs <- mapM (\ f -> checkConstructor leadingArgs vars f
>>= completePatterns tsig doms consMap consMap2)
cons_group
return $ concat ffs
mkVars :: [SORT] -> [TERM f]
mkVars = zipWith (\ i -> mkVarTerm (mkSimpleId $ 'c' : show i)) [1 :: Int ..]
checkConstructor :: (Ord f, FormExtension f, TermExtension f)
=> LeadArgs -> [(FORMULA f, [TERM f])]
-> (Id, OP_TYPE, [(FORMULA f, [TERM f])])
-> Result (LeadArgs, [(FORMULA f, [TERM f])])
checkConstructor leadingArgs vars (c, ct, es) = do
let args = args_OP_TYPE ct
nL = (showId c "", length args) : leadingArgs
varEqs = map (\ (f, _ : t) -> (f, mkVars args ++ t)) vars
pat = showLeadingArgs
(showId c "" ++ case args of
[] -> ""
l -> "(" ++ intercalate "," (map (const "_") l) ++ ")")
leadingArgs
case es of
[] -> do
when (null vars) $ justWarn ()
$ "missing pattern for: " ++ pat
return (nL, varEqs)
_ ->
return (nL, varEqs ++ map (\ (f, h : t) -> (f, arguOfTerm h ++ t)) es)
-- | get condition axiom without matching definedness condition
getCond :: (GetRange f, Eq f) => Sign f e -> [OP_SYMB] -> FORMULA f -> FORMULA f
getCond sig doms f =
let (cs, e) = splitAxiom f
in conjunct $ filter (\ c -> case leadingTermPredication e of
l@(Just (Left (Application os _ _)))
| any (sameOpSymbs sig os) doms
-> case c of -- pattern term must be identical
Definedness {} | leadingTermPredication c == l -> False
_ -> True
_ -> True) cs
checkExhaustive :: (Ord f, FormExtension f, TermExtension f)
=> Sign f e -> [OP_SYMB] -> [FORMULA f] -> [FORMULA f]
checkExhaustive sig doms es = case es of
f1 : rs ->
let sfs = map (\ f -> (getSubstForm sig f1 f, f)) rs
overlap = filter (isJust . maybeResult . fst) sfs
simpAndQuant = quant sig . simplifyFormula id
in case overlap of
[] -> filter (not . is_True_atom)
(map (simpAndQuant . getCond sig doms) [f1])
++ checkExhaustive sig doms rs
(r, f2) : rrs -> let
(f3, f4, ((s3, _), (s4, _))) = convert2Forms sig f1 f2 r
in checkExhaustive sig doms
$ (simpAndQuant
. mkImpl (disjunct [ replaceVarsF s3 id $ getCond sig doms f3
, replaceVarsF s4 id $ getCond sig doms f4 ])
. replaceVarsF s3 id $ restAxiom f3) : map snd rrs
[] -> []
|
spechub/Hets
|
CASL/CCC/FreeTypes.hs
|
gpl-2.0
| 29,070
| 0
| 32
| 8,621
| 9,780
| 5,021
| 4,759
| 537
| 14
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- {-# LANGUAGE TypeSynonymInstances #-}
module Main where
import Control.Applicative
import Control.Monad
import Foreign.C.Types
import Foreign.Marshal.Alloc
import Foreign.Ptr
import Foreign.Storable
import System.IO.Unsafe
--
import HROOT
data Point = Point { px :: CDouble, py :: CDouble, pz :: CDouble }
instance Storable Point where
sizeOf _ = sizeOf (undefined :: CDouble) + sizeOf (undefined :: CDouble) + sizeOf (undefined :: CDouble)
alignment _ = alignment (error "alignment" :: CDouble)
peek ptr = Point <$> peek (castPtr ptr)
<*> peek (castPtr ptr `plusPtr` sizeOf (error "peek" :: CDouble))
<*> peek (castPtr ptr `plusPtr` (2*sizeOf (error "peek" :: CDouble)))
poke ptr Point {..} = poke (castPtr ptr) px
>> poke (castPtr ptr `plusPtr` sizeOf (error "poke" :: CDouble)) py
>> poke (castPtr ptr `plusPtr` (2*sizeOf (error "poke" :: CDouble))) pz
main :: IO ()
main = do
tRandom <- newTRandom 65535
let generator = gaus tRandom 0 2
alloca $ \(ptrpnt :: Ptr Point) -> do
tree <- newTTree "T" "an example" 99
br <- branch1 tree "point" (castPtr ptrpnt) "x/D:y/D:z/D" 32000
let go = do p <- Point <$> generator <*> generator <*> generator
poke ptrpnt p
fillTree tree
replicateM_ 1000000 go
saveAs tree "treetest.root" ""
delete tree
|
wavewave/HROOT-generate
|
HROOT-generate/template/HROOT/example/treetest.hs
|
gpl-3.0
| 1,560
| 0
| 19
| 391
| 506
| 260
| 246
| 36
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Main where
import Data.String ( IsString(fromString) )
import Foreign.C.Types ( CDouble, CInt )
import Foreign.C.String ( CString, newCString )
import Foreign.Marshal.Alloc ( alloca )
import Foreign.Storable ( poke )
import System.IO.Unsafe ( unsafePerformIO )
--
import HROOT
instance IsString CString where
fromString s = unsafePerformIO $ newCString s
main :: IO ()
main = do
tcanvas <- newTCanvas ("Test"::CString) ("Test"::CString) 640 480
h2 <- newTH2F ("test"::CString) ("test"::CString) 100 (-5.0) 5.0 100 (-5.0) 5.0
tRandom <- newTRandom 65535
let generator = gaus tRandom 0 2
let go n | n < 0 = return ()
| otherwise = do
histfill generator generator h2
go (n-1)
go 1000000
draw h2 ("lego"::CString)
saveAs tcanvas ("random2d.pdf"::CString) (""::CString)
saveAs tcanvas ("random2d.jpg"::CString) (""::CString)
saveAs tcanvas ("random2d.png"::CString) (""::CString)
delete h2
delete tcanvas
histfill :: IO CDouble -> IO CDouble-> TH2F -> IO ()
histfill dist1 dist2 hist = do
x <- dist1
y <- dist2
fill2 hist x y
return ()
|
wavewave/HROOT
|
examples/random2d.hs
|
gpl-3.0
| 1,264
| 0
| 15
| 285
| 452
| 232
| 220
| 36
| 1
|
-- Copyright (c) 2015 Eric McCorkle. All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
-- 1. Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
-- 2. Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
-- 3. Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -funbox-strict-fields -Wall -Werror #-}
-- | Canonical formatting for the Salt language. This module defines
-- functions for canonical pretty-printing of the Salt AST, as well
-- collections of 'Doc's representing Salt syntax fragments.
module Language.Salt.Format(
-- * Low-Level Formatting Functions
recordDoc,
tupleDoc,
listDoc,
mapDoc,
compoundApplyDoc,
blockDoc,
stmsDoc,
casesDoc,
nestLevel
) where
import Text.Format
nestLevel :: Int
nestLevel = 2
-- | Format a list of @(field, value)@ bindings representing a record
-- value. There are two possible ways to do this:
--
-- > (field1 = value1, field2 = value2, ...)
--
-- > (field1 = value1,
-- > field2 = value2,
-- > ...)
recordDoc :: [(Doc, Doc)]
-- ^ A list of @(field, value)@ bindings
-> Doc
recordDoc binds =
let
softlines = map (\(field, val) -> field <+> equals </>
nest nestLevel val) binds
nosoftlines = map (\(field, val) -> field <+> equals <+> val) binds
nobreaks = hsep (punctuate comma nosoftlines)
alignbreaks = parens (align (vsep (punctuate comma softlines)))
nestbreaks = lparen <!> nest 2 (vsep (punctuate comma softlines)) <!> rparen
in case flatten nobreaks of
Just nolines -> choose [parens nolines, alignbreaks, nestbreaks]
Nothing -> choose [ alignbreaks, nestbreaks ]
-- | Format a list of 'Doc's representing a tuple value. There are
-- three possible ways to do this:
--
-- > (value1, value2, ...)
--
-- > (value1,
-- > value2,
-- > ...)
tupleDoc :: [Doc]
-- ^ The fields of the tuple.
-> Doc
tupleDoc fields =
let
nobreaks = hsep (punctuate comma fields)
alignbreaks = parens (align (vsep (punctuate comma fields)))
nestbreaks = lparen <!> nest 2 (vsep (punctuate comma fields)) <!> rparen
in case flatten nobreaks of
Just nolines -> choose [parens nolines, alignbreaks, nestbreaks]
Nothing -> choose [ alignbreaks, nestbreaks ]
-- | Format a list of 'Doc's representing a list value. There are
-- three possible ways to do this:
--
-- > [value1, value2, ...]
--
-- > preceeding [value1,
-- > value2,
-- > ...]
--
-- > preceeding [
-- > value1,
-- > value2,
-- > ...
-- > ]
listDoc :: [Doc]
-- ^ The fields of the list.
-> Doc
listDoc fields =
let
nobreaks = hsep (punctuate comma fields)
alignbreaks = brackets (align (vsep (punctuate comma fields)))
nestbreaks = lbrack <!> nest 2 (vsep (punctuate comma fields)) <!> rbrack
in case flatten nobreaks of
Just nolines -> choose [brackets nolines, alignbreaks, nestbreaks]
Nothing -> choose [ alignbreaks, nestbreaks ]
-- | Format a map as a list of key/value pairs.
mapDoc :: [(Doc, Doc)]
-- ^ The @(key, value)@ pairs for the map.
-> Doc
mapDoc = listDoc . map (\(a, b) -> tupleDoc [a, b])
-- | Format a 'Doc' and a list of @(field, value)@ bindings
-- representing arguments. There are three possible ways to do this:
--
-- > name (field1 = value1, field2 = value2, ...)
--
-- > name (field1 = value1,
-- > field2 = value2,
-- > ...)
--
-- > name (
-- > field1 = value1,
-- > field2 = value2
-- > ...
-- > )
compoundApplyDoc :: Doc
-- ^ The function or constructor.
-> [(Doc, Doc)]
-- ^ A list of @(field, value)@ bindings
-> Doc
compoundApplyDoc name = (name <+>) . recordDoc
-- | Format a list of 'Doc's representing statements in a block.
-- There are two possible ways to do this:
--
-- > { value1; value2; ... }
--
-- > preceeding {
-- > value1;
-- > value2;
-- > ...
-- > }
blockDoc :: [Doc]
-- ^ The content of the block
-> Doc
blockDoc stms =
let
nobreaks = hsep (punctuate semi stms)
breaks = vsep (punctuate semi stms)
breakopts = [ nest nestLevel (lbrace <!> nest nestLevel breaks <!> rbrace) ]
in case flatten nobreaks of
Just nolines -> choose (lbrace <+> nolines <+> rbrace : breakopts)
Nothing -> choose breakopts
-- | Format a list of 'Doc's representing statements.
-- There are two possible ways to do this:
--
-- > value1; value2; ...
--
-- > value1;
-- > value2;
-- > ...
stmsDoc :: [Doc]
-- ^ The content of the block
-> Doc
stmsDoc stms =
let
nobreaks = hsep (punctuate semi stms)
breaks = vsep (punctuate semi stms)
in case flatten nobreaks of
Just nolines -> choose [ nolines, breaks ]
Nothing -> breaks
-- | Format a 'Doc' and a list of bindings representing cases in a
-- pattern group. There are three possible ways to do this:
--
-- > preceeding case1 | case2 | ...
--
-- > preceeding case1
-- > | case2
-- > | ...
--
-- > preceeding
-- > case1
-- > | case2
-- > | ...
casesDoc :: [Doc]
-- ^ The cases.
-> Doc
casesDoc cases =
let
nobreaks = hsep (punctuate (string " | ") cases)
breaks = vsep (punctuate (string "| ") cases)
breakopts = [ alignOffset (-1) breaks,
nest nestLevel (hardline <> string " " <> breaks) ]
in case flatten nobreaks of
Just nolines -> choose (parens nolines : breakopts)
Nothing -> choose breakopts
|
emc2/saltlang
|
src/salt/Language/Salt/Format.hs
|
bsd-3-clause
| 6,863
| 0
| 15
| 1,724
| 1,147
| 650
| 497
| 84
| 2
|
--{-# OPTIONS_GHC -fglasgow-exts -fallow-undecidable-instances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE UndecidableInstances #-}
-- | A monad transformer for Maybe
module Control.Monad.Maybe ( MaybeT
, runMaybeT
, module Control.Monad
, module Control.Monad.Trans
) where
import Control.Applicative (Alternative (..))
import Control.Monad (mplus, mzero)
import Control.Monad.Reader
import Control.Monad.State
import Control.Monad.Trans
import Control.Monad.Writer
newtype MaybeT m a = MaybeT { runMaybeT :: m (Maybe a) }
instance Functor f => Functor (MaybeT f) where
fmap f x = MaybeT $ fmap f <$> runMaybeT x
instance Monad m => Applicative (MaybeT m) where
pure = return
(<*>) = ap
instance Monad m => Alternative (MaybeT m) where
(<|>) = mplus
empty = mzero
instance Monad m => Monad (MaybeT m) where
return = MaybeT . return . Just
x >>= f = MaybeT $ runMaybeT x >>= maybe (return Nothing) (runMaybeT . f)
fail _ = MaybeT $ return Nothing
instance Monad m => MonadPlus (MaybeT m) where
mzero = MaybeT $ return Nothing
mplus x y = MaybeT $ do
mx <- runMaybeT x
case mx of
Nothing -> runMaybeT y
Just _ -> return mx
instance MonadTrans MaybeT where
lift = MaybeT . liftM Just
instance MonadIO m => MonadIO (MaybeT m) where
liftIO = lift . liftIO
instance MonadState s m => MonadState s (MaybeT m) where
get = lift get
put = lift . put
instance MonadReader r m => MonadReader r (MaybeT m) where
ask = lift ask
local f = MaybeT . local f . runMaybeT
instance (Monoid w, MonadWriter w m) => MonadWriter w (MaybeT m) where
tell = lift . tell
listen m = MaybeT $ do
(mv, w) <- listen (runMaybeT m)
case mv of
Nothing -> return Nothing
Just v -> return $ Just (v, w)
pass m = MaybeT $ do
mvf <- runMaybeT m
case mvf of
Nothing -> return Nothing
Just (v,f) -> pass $ return (Just v, f)
|
themattchan/tandoori
|
library/Control/Monad/Maybe.hs
|
bsd-3-clause
| 2,135
| 0
| 15
| 626
| 720
| 372
| 348
| 55
| 0
|
module Grammatik.CF.Nullable where
-- $Id$
import Grammatik.Type
import Autolib.Set
import Autolib.Util.Fix
import Control.Monad ( guard )
-- | alle Variablen V mit V ->> Eps
nullable :: Grammatik -> Set Char
nullable g = fix ( \ ns -> mkSet $ do
( [ lhs ] , rhs ) <- rules g
guard $ and [ x `elementOf` ns | x <- rhs ]
return lhs ) emptySet
-- | Grammatik erzeugt leeres Wort?
creates_epsilon :: Grammatik -> Bool
creates_epsilon g = startsymbol g `elementOf` nullable g
|
florianpilz/autotool
|
src/Grammatik/CF/Nullable.hs
|
gpl-2.0
| 489
| 2
| 15
| 104
| 154
| 85
| 69
| 12
| 1
|
{-
Copyright 2015 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Foreign.ForeignPtr.Safe (module M) where
import "base" Foreign.ForeignPtr.Safe as M
|
Ye-Yong-Chi/codeworld
|
codeworld-base/src/Foreign/ForeignPtr/Safe.hs
|
apache-2.0
| 761
| 0
| 4
| 136
| 25
| 19
| 6
| 4
| 0
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# OPTIONS_HADDOCK hide #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.VertexAttributes
-- Copyright : (c) Sven Panne 2002-2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- This is a purely internal module for auxiliary vertex attributes.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.VertexAttributes (
TexCoord1(..), TexCoord2(..), TexCoord3(..), TexCoord4(..),
Normal3(..),
FogCoord1(..),
Color3(..), Color4(..),
Index1(..)
) where
import Control.Applicative
import Control.Monad
import Data.Foldable
import Data.Ix
import Data.Traversable
import Data.Typeable
import Foreign.Marshal.Array
import Foreign.Ptr
import Foreign.Storable
--------------------------------------------------------------------------------
-- | Texture coordinates with /t/=0, /r/=0, and /q/=1.
newtype TexCoord1 a = TexCoord1 a
deriving (Eq, Ord, Ix, Bounded, Show, Read, Typeable)
instance Functor TexCoord1 where
fmap f (TexCoord1 x) = TexCoord1 (f x)
instance Applicative TexCoord1 where
pure a = TexCoord1 a
TexCoord1 f <*> TexCoord1 x = TexCoord1 (f x)
instance Foldable TexCoord1 where
foldr f a (TexCoord1 x) = x `f ` a
foldl f a (TexCoord1 x) = a `f` x
foldr1 _ (TexCoord1 x) = x
foldl1 _ (TexCoord1 x) = x
instance Traversable TexCoord1 where
traverse f (TexCoord1 x) = pure TexCoord1 <*> f x
sequenceA (TexCoord1 x) = pure TexCoord1 <*> x
mapM f (TexCoord1 x) = return TexCoord1 `ap` f x
sequence (TexCoord1 x) = return TexCoord1 `ap` x
instance Storable a => Storable (TexCoord1 a) where
sizeOf ~(TexCoord1 s) = sizeOf s
alignment ~(TexCoord1 s) = alignment s
peek = peekApplicativeTraversable
poke = pokeFoldable
--------------------------------------------------------------------------------
-- | Texture coordinates with /r/=0 and /q/=1.
data TexCoord2 a = TexCoord2 !a !a
deriving (Eq, Ord, Ix, Bounded, Show, Read, Typeable)
instance Functor TexCoord2 where
fmap f (TexCoord2 x y) = TexCoord2 (f x) (f y)
instance Applicative TexCoord2 where
pure a = TexCoord2 a a
TexCoord2 f g <*> TexCoord2 x y = TexCoord2 (f x) (g y)
instance Foldable TexCoord2 where
foldr f a (TexCoord2 x y) = x `f ` (y `f` a)
foldl f a (TexCoord2 x y) = (a `f` x) `f` y
foldr1 f (TexCoord2 x y) = x `f` y
foldl1 f (TexCoord2 x y) = x `f` y
instance Traversable TexCoord2 where
traverse f (TexCoord2 x y) = pure TexCoord2 <*> f x <*> f y
sequenceA (TexCoord2 x y) = pure TexCoord2 <*> x <*> y
mapM f (TexCoord2 x y) = return TexCoord2 `ap` f x `ap` f y
sequence (TexCoord2 x y) = return TexCoord2 `ap` x `ap` y
instance Storable a => Storable (TexCoord2 a) where
sizeOf ~(TexCoord2 x _) = 2 * sizeOf x
alignment ~(TexCoord2 x _) = alignment x
peek = peekApplicativeTraversable
poke = pokeFoldable
--------------------------------------------------------------------------------
-- | Texture coordinates with /q/=1.
data TexCoord3 a = TexCoord3 !a !a !a
deriving (Eq, Ord, Ix, Bounded, Show, Read, Typeable)
instance Functor TexCoord3 where
fmap f (TexCoord3 x y z) = TexCoord3 (f x) (f y) (f z)
instance Applicative TexCoord3 where
pure a = TexCoord3 a a a
TexCoord3 f g h <*> TexCoord3 x y z = TexCoord3 (f x) (g y) (h z)
instance Foldable TexCoord3 where
foldr f a (TexCoord3 x y z) = x `f ` (y `f` (z `f` a))
foldl f a (TexCoord3 x y z) = ((a `f` x) `f` y) `f` z
foldr1 f (TexCoord3 x y z) = x `f` (y `f` z)
foldl1 f (TexCoord3 x y z) = (x `f` y) `f` z
instance Traversable TexCoord3 where
traverse f (TexCoord3 x y z) = pure TexCoord3 <*> f x <*> f y <*> f z
sequenceA (TexCoord3 x y z) = pure TexCoord3 <*> x <*> y <*> z
mapM f (TexCoord3 x y z) = return TexCoord3 `ap` f x `ap` f y `ap` f z
sequence (TexCoord3 x y z) = return TexCoord3 `ap` x `ap` y `ap` z
instance Storable a => Storable (TexCoord3 a) where
sizeOf ~(TexCoord3 x _ _) = 3 * sizeOf x
alignment ~(TexCoord3 x _ _) = alignment x
peek = peekApplicativeTraversable
poke = pokeFoldable
--------------------------------------------------------------------------------
-- | Fully-fledged four-dimensional texture coordinates.
data TexCoord4 a = TexCoord4 !a !a !a !a
deriving (Eq, Ord, Ix, Bounded, Show, Read, Typeable)
instance Functor TexCoord4 where
fmap f (TexCoord4 x y z w) = TexCoord4 (f x) (f y) (f z) (f w)
instance Applicative TexCoord4 where
pure a = TexCoord4 a a a a
TexCoord4 f g h i <*> TexCoord4 x y z w = TexCoord4 (f x) (g y) (h z) (i w)
instance Foldable TexCoord4 where
foldr f a (TexCoord4 x y z w) = x `f ` (y `f` (z `f` (w `f` a)))
foldl f a (TexCoord4 x y z w) = (((a `f` x) `f` y) `f` z) `f` w
foldr1 f (TexCoord4 x y z w) = x `f` (y `f` (z `f` w))
foldl1 f (TexCoord4 x y z w) = ((x `f` y) `f` z) `f` w
instance Traversable TexCoord4 where
traverse f (TexCoord4 x y z w) = pure TexCoord4 <*> f x <*> f y <*> f z <*> f w
sequenceA (TexCoord4 x y z w) = pure TexCoord4 <*> x <*> y <*> z <*> w
mapM f (TexCoord4 x y z w) = return TexCoord4 `ap` f x `ap` f y `ap` f z `ap` f w
sequence (TexCoord4 x y z w) = return TexCoord4 `ap` x `ap` y `ap` z `ap` w
instance Storable a => Storable (TexCoord4 a) where
sizeOf ~(TexCoord4 x _ _ _) = 4 * sizeOf x
alignment ~(TexCoord4 x _ _ _) = alignment x
peek = peekApplicativeTraversable
poke = pokeFoldable
--------------------------------------------------------------------------------
-- A three-dimensional normal.
data Normal3 a = Normal3 !a !a !a
deriving (Eq, Ord, Ix, Bounded, Show, Read, Typeable)
instance Functor Normal3 where
fmap f (Normal3 x y z) = Normal3 (f x) (f y) (f z)
instance Applicative Normal3 where
pure a = Normal3 a a a
Normal3 f g h <*> Normal3 x y z = Normal3 (f x) (g y) (h z)
instance Foldable Normal3 where
foldr f a (Normal3 x y z) = x `f ` (y `f` (z `f` a))
foldl f a (Normal3 x y z) = ((a `f` x) `f` y) `f` z
foldr1 f (Normal3 x y z) = x `f` (y `f` z)
foldl1 f (Normal3 x y z) = (x `f` y) `f` z
instance Traversable Normal3 where
traverse f (Normal3 x y z) = pure Normal3 <*> f x <*> f y <*> f z
sequenceA (Normal3 x y z) = pure Normal3 <*> x <*> y <*> z
mapM f (Normal3 x y z) = return Normal3 `ap` f x `ap` f y `ap` f z
sequence (Normal3 x y z) = return Normal3 `ap` x `ap` y `ap` z
instance Storable a => Storable (Normal3 a) where
sizeOf ~(Normal3 x _ _) = 3 * sizeOf x
alignment ~(Normal3 x _ _) = alignment x
peek = peekApplicativeTraversable
poke = pokeFoldable
--------------------------------------------------------------------------------
-- | A fog coordinate.
newtype FogCoord1 a = FogCoord1 a
deriving (Eq, Ord, Ix, Bounded, Show, Read, Typeable)
instance Functor FogCoord1 where
fmap f (FogCoord1 x) = FogCoord1 (f x)
instance Applicative FogCoord1 where
pure a = FogCoord1 a
FogCoord1 f <*> FogCoord1 x = FogCoord1 (f x)
instance Foldable FogCoord1 where
foldr f a (FogCoord1 x) = x `f ` a
foldl f a (FogCoord1 x) = a `f` x
foldr1 _ (FogCoord1 x) = x
foldl1 _ (FogCoord1 x) = x
instance Traversable FogCoord1 where
traverse f (FogCoord1 x) = pure FogCoord1 <*> f x
sequenceA (FogCoord1 x) = pure FogCoord1 <*> x
mapM f (FogCoord1 x) = return FogCoord1 `ap` f x
sequence (FogCoord1 x) = return FogCoord1 `ap` x
instance Storable a => Storable (FogCoord1 a) where
sizeOf ~(FogCoord1 s) = sizeOf s
alignment ~(FogCoord1 s) = alignment s
peek = peekApplicativeTraversable
poke = pokeFoldable
--------------------------------------------------------------------------------
-- An RGBA color with /A/=1.
data Color3 a = Color3 !a !a !a
deriving (Eq, Ord, Ix, Bounded, Show, Read, Typeable)
instance Functor Color3 where
fmap f (Color3 x y z) = Color3 (f x) (f y) (f z)
instance Applicative Color3 where
pure a = Color3 a a a
Color3 f g h <*> Color3 x y z = Color3 (f x) (g y) (h z)
instance Foldable Color3 where
foldr f a (Color3 x y z) = x `f ` (y `f` (z `f` a))
foldl f a (Color3 x y z) = ((a `f` x) `f` y) `f` z
foldr1 f (Color3 x y z) = x `f` (y `f` z)
foldl1 f (Color3 x y z) = (x `f` y) `f` z
instance Traversable Color3 where
traverse f (Color3 x y z) = pure Color3 <*> f x <*> f y <*> f z
sequenceA (Color3 x y z) = pure Color3 <*> x <*> y <*> z
mapM f (Color3 x y z) = return Color3 `ap` f x `ap` f y `ap` f z
sequence (Color3 x y z) = return Color3 `ap` x `ap` y `ap` z
instance Storable a => Storable (Color3 a) where
sizeOf ~(Color3 x _ _) = 3 * sizeOf x
alignment ~(Color3 x _ _) = alignment x
peek = peekApplicativeTraversable
poke = pokeFoldable
--------------------------------------------------------------------------------
-- | A fully-fledged RGBA color.
data Color4 a = Color4 !a !a !a !a
deriving (Eq, Ord, Ix, Bounded, Show, Read, Typeable)
instance Functor Color4 where
fmap f (Color4 x y z w) = Color4 (f x) (f y) (f z) (f w)
instance Applicative Color4 where
pure a = Color4 a a a a
Color4 f g h i <*> Color4 x y z w = Color4 (f x) (g y) (h z) (i w)
instance Foldable Color4 where
foldr f a (Color4 x y z w) = x `f ` (y `f` (z `f` (w `f` a)))
foldl f a (Color4 x y z w) = (((a `f` x) `f` y) `f` z) `f` w
foldr1 f (Color4 x y z w) = x `f` (y `f` (z `f` w))
foldl1 f (Color4 x y z w) = ((x `f` y) `f` z) `f` w
instance Traversable Color4 where
traverse f (Color4 x y z w) = pure Color4 <*> f x <*> f y <*> f z <*> f w
sequenceA (Color4 x y z w) = pure Color4 <*> x <*> y <*> z <*> w
mapM f (Color4 x y z w) = return Color4 `ap` f x `ap` f y `ap` f z `ap` f w
sequence (Color4 x y z w) = return Color4 `ap` x `ap` y `ap` z `ap` w
instance Storable a => Storable (Color4 a) where
sizeOf ~(Color4 x _ _ _) = 4 * sizeOf x
alignment ~(Color4 x _ _ _) = alignment x
peek = peekApplicativeTraversable
poke = pokeFoldable
--------------------------------------------------------------------------------
-- | A color index.
newtype Index1 a = Index1 a
deriving (Eq, Ord, Ix, Bounded, Show, Read, Typeable)
instance Functor Index1 where
fmap f (Index1 x) = Index1 (f x)
instance Applicative Index1 where
pure a = Index1 a
Index1 f <*> Index1 x = Index1 (f x)
instance Foldable Index1 where
foldr f a (Index1 x) = x `f ` a
foldl f a (Index1 x) = a `f` x
foldr1 _ (Index1 x) = x
foldl1 _ (Index1 x) = x
instance Traversable Index1 where
traverse f (Index1 x) = pure Index1 <*> f x
sequenceA (Index1 x) = pure Index1 <*> x
mapM f (Index1 x) = return Index1 `ap` f x
sequence (Index1 x) = return Index1 `ap` x
instance Storable a => Storable (Index1 a) where
sizeOf ~(Index1 s) = sizeOf s
alignment ~(Index1 s) = alignment s
peek = peekApplicativeTraversable
poke = pokeFoldable
--------------------------------------------------------------------------------
peekApplicativeTraversable :: (Applicative t, Traversable t, Storable a) => Ptr (t a) -> IO (t a)
peekApplicativeTraversable = Data.Traversable.mapM peek . addresses
addresses :: (Applicative t, Traversable t, Storable a) => Ptr (t a) -> t (Ptr a)
addresses = snd . mapAccumL nextPtr 0 . pure . castPtr
nextPtr :: Storable a => Int -> Ptr a -> (Int, Ptr a)
nextPtr offset ptr = (offset + 1, advancePtr ptr offset)
--------------------------------------------------------------------------------
pokeFoldable :: (Foldable t, Storable a) => Ptr (t a) -> t a -> IO ()
pokeFoldable ptr xs = foldlM pokeAndAdvance (castPtr ptr) xs >> return ()
pokeAndAdvance :: Storable a => Ptr a -> a -> IO (Ptr a)
pokeAndAdvance ptr value = do
poke ptr value
return $ ptr `plusPtr` sizeOf value
|
hesiod/OpenGL
|
src/Graphics/Rendering/OpenGL/GL/VertexAttributes.hs
|
bsd-3-clause
| 11,930
| 0
| 12
| 2,644
| 5,316
| 2,753
| 2,563
| 265
| 1
|
data Foo = Foo1 | Foo2
data Bar = Bar1 | Bar2
test = let v1 = Foo1
v2 = Bar1
(v3, v4) = (Foo2, Bar2)
in (v1, v2, v3, v4)
|
bitemyapp/tandoori
|
input/var.hs
|
bsd-3-clause
| 161
| 0
| 9
| 71
| 72
| 42
| 30
| 6
| 1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="pl-PL">
<title>>Run Applications | ZAP Extensions</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Zawartość</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Indeks</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Szukaj</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Ulubione</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/invoke/src/main/javahelp/org/zaproxy/zap/extension/invoke/resources/help_pl_PL/helpset_pl_PL.hs
|
apache-2.0
| 985
| 81
| 41
| 159
| 407
| 213
| 194
| -1
| -1
|
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fr-FR">
<title>Image Locaiton and Privacy Scanner | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset>
|
thc202/zap-extensions
|
addOns/imagelocationscanner/src/main/javahelp/org/zaproxy/zap/extension/imagelocationscanner/resources/help_fr_FR/helpset_fr_FR.hs
|
apache-2.0
| 995
| 83
| 52
| 162
| 402
| 212
| 190
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
{-| Utility functions for several parsers
This module holds the definition for some utility functions for two
parsers. The parser for the @/proc/stat@ file and the parser for the
@/proc/diskstats@ file.
-}
{-
Copyright (C) 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Parsers where
import Control.Applicative ((*>))
import qualified Data.Attoparsec.Text as A
import Data.Attoparsec.Text (Parser)
import Data.Text (unpack)
-- * Utility functions
-- | Our own space-skipping function, because A.skipSpace also skips
-- newline characters. It skips ZERO or more spaces, so it does not
-- fail if there are no spaces.
skipSpaces :: Parser ()
skipSpaces = A.skipWhile A.isHorizontalSpace
-- | A parser recognizing a number preceeded by spaces.
numberP :: Parser Int
numberP = skipSpaces *> A.decimal
-- | A parser recognizing a word preceded by spaces, and closed by a space.
stringP :: Parser String
stringP = skipSpaces *> fmap unpack (A.takeWhile $ not . A.isHorizontalSpace)
|
apyrgio/ganeti
|
src/Ganeti/Parsers.hs
|
bsd-2-clause
| 2,266
| 0
| 10
| 353
| 135
| 81
| 54
| 12
| 1
|
{-# LANGUAGE MultiParamTypeClasses, RankNTypes #-}
module T15438 where
class C a b
foo :: (forall a b. C a b => b -> b) -> Int
foo = error "urk"
|
sdiehl/ghc
|
testsuite/tests/typecheck/should_fail/T15438.hs
|
bsd-3-clause
| 149
| 0
| 9
| 34
| 53
| 29
| 24
| -1
| -1
|
{-# LANGUAGE RecordWildCards, ViewPatterns #-}
module Distribution.Server.Features.PreferredVersions.Backup
( restorePreferredVersions
, backupPreferredVersions
) where
import Distribution.Server.Framework.BackupRestore
import Distribution.Server.Framework.BackupDump
import Distribution.Server.Features.PreferredVersions.State
import Data.Version (Version(..))
import Distribution.Text (Text, display, simpleParse)
import Distribution.Package (PackageName)
import Distribution.Version (VersionRange)
import qualified Data.Map as Map
import Control.Applicative ((<$>))
import Text.CSV (CSV, Record)
import Control.Monad (guard)
{-------------------------------------------------------------------------------
Restore backup
-------------------------------------------------------------------------------}
restorePreferredVersions :: RestoreBackup PreferredVersions
restorePreferredVersions =
updatePreferredVersions (initialPreferredVersions True)
updatePreferredVersions :: PreferredVersions -> RestoreBackup PreferredVersions
updatePreferredVersions st = RestoreBackup {
restoreEntry = \entry -> updatePreferredVersions <$> importEntry st entry
, restoreFinalize = return st
}
importEntry :: PreferredVersions -> BackupEntry -> Restore PreferredVersions
importEntry st (BackupByteString ["package", pkgstr, "preferred.csv"] bs) = do
pkg <- parsePackageName pkgstr
csv <- importCSV "preferred.csv" bs
importPreferredCSV st pkg csv
importEntry st (BackupByteString ["package", pkgstr, "deprecated.csv"] bs) = do
pkg <- parsePackageName pkgstr
csv <- importCSV "deprecated.csv" bs
importDeprecatedCSV st pkg csv
importEntry st _ = return st
importPreferredCSV :: PreferredVersions
-> PackageName
-> CSV
-> Restore PreferredVersions
importPreferredCSV st pkg ( _version
: (match "preferredRanges" -> Just ranges)
: (match "deprecatedVersions" -> Just deprecated)
: (optionalSumRange -> Just sumRange)
) = do
let info = PreferredInfo { preferredRanges = ranges
, deprecatedVersions = deprecated
, sumRange = sumRange
}
return st { preferredMap = Map.insert pkg info (preferredMap st) }
importPreferredCSV _ _ _ = fail "Failed to read preferred.csv"
importDeprecatedCSV :: PreferredVersions
-> PackageName
-> CSV
-> Restore PreferredVersions
importDeprecatedCSV st pkg [ _version
, match "deprecatedFor" -> Just deprecatedFor
] =
return st { deprecatedMap = Map.insert pkg deprecatedFor (deprecatedMap st) }
importDeprecatedCSV _ _ _ = fail "Failed to read deprecated.csv"
match :: Text a => String -> Record -> Maybe [a]
match header (header' : xs) = guard (header == header') >> mapM simpleParse xs
match _ _ = Nothing
-- Outer maybe is Nothing on a parsing error; the inner maybe is because
-- the version range is optional
optionalSumRange :: CSV -> Maybe (Maybe VersionRange)
optionalSumRange [] = Just Nothing
optionalSumRange [["sumRange", simpleParse -> Just range]] = Just (Just range)
optionalSumRange _ = Nothing
parsePackageName :: String -> Restore PackageName
parsePackageName (simpleParse -> Just name) = return name
parsePackageName str = fail $ "Could not parse package name '" ++ str ++ "'"
{-------------------------------------------------------------------------------
Create backup
-------------------------------------------------------------------------------}
backupPreferredVersions :: PreferredVersions -> [BackupEntry]
backupPreferredVersions (PreferredVersions preferredMap deprecatedMap _) =
map backupPreferredInfo (Map.toList preferredMap)
++ map backupDeprecated (Map.toList deprecatedMap)
backupPreferredInfo :: (PackageName, PreferredInfo) -> BackupEntry
backupPreferredInfo (name, PreferredInfo {..}) =
csvToBackup (pkgPath name "preferred.csv") $ [
[display versionCSV]
, "preferredRanges" : map display preferredRanges
, "deprecatedVersions" : map display deprecatedVersions
] ++ case sumRange of
Nothing -> []
Just versionRange -> [["sumRange", display versionRange]]
where
versionCSV = Version [0,1] ["unstable"]
backupDeprecated :: (PackageName, [PackageName]) -> BackupEntry
backupDeprecated (name, deprecatedFor) =
csvToBackup (pkgPath name "deprecated.csv") [
[display versionCSV]
, "deprecatedFor" : map display deprecatedFor
]
where
versionCSV = Version [0,1] ["unstable"]
pkgPath :: PackageName -> String -> [String]
pkgPath pkgname file = ["package", display pkgname, file]
|
ocharles/hackage-server
|
Distribution/Server/Features/PreferredVersions/Backup.hs
|
bsd-3-clause
| 4,865
| 0
| 12
| 1,034
| 1,125
| 595
| 530
| 87
| 2
|
module Maps where
{-@ prop0 :: x:_ -> y:{_ | y == x} -> TT @-}
prop0 x y = (a == b)
where
a = get x emp
b = get y emp
{-@ prop1 :: x:_ -> y:{_ | y /= x} -> TT @-}
prop1 x y = (z == 10)
where
m1 = put x 10 emp
m2 = put y 20 m1
z = get x m2
{-@ prop2 :: x:_ -> y:{_ | y == x} -> TT @-}
prop2 x y = (z == 20)
where
m1 = put x 10 emp
m2 = put y 20 m1
z = get x m2
-----------------------------------------------------------------------
data Map k v = M
{-@ embed Map as Map_t @-}
{-@ measure Map_select :: Map k v -> k -> v @-}
{-@ measure Map_store :: Map k v -> k -> v -> Map k v @-}
emp :: Map Int Int
emp = undefined
{-@ get :: k:k -> m:Map k v -> {v:v | v = Map_select m k} @-}
get :: k -> Map k v -> v
get = undefined
{-@ put :: k:k -> v:v -> m:Map k v -> {n:Map k v | n = Map_store m k v} @-}
put :: k -> v -> Map k v -> Map k v
put = undefined
|
ssaavedra/liquidhaskell
|
tests/pos/maps.hs
|
bsd-3-clause
| 941
| 0
| 8
| 319
| 237
| 128
| 109
| 19
| 1
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module T10598_fail1 where
class Z f where
z :: f a b
data A = A Int deriving newtype Show
newtype B = B Int deriving stock Num
data C a b = C Int deriving anyclass Z
|
ezyang/ghc
|
testsuite/tests/deriving/should_fail/T10598_fail1.hs
|
bsd-3-clause
| 298
| 0
| 7
| 70
| 69
| 41
| 28
| 9
| 0
|
-- Exercise G
type Date = (Int, Int, Int)
months = ["January", "February", "March", "April", "May", "June", "July", "August",
"September", "October", "November", "December"]
suffix :: Int -> String
suffix n
| n == 11 || n == 21 || n == 31 = "st"
| n == 12 || n == 22 = "nd"
| otherwise = "th"
showDate :: Date -> String
showDate (d, m, y) = show d ++ suffix d ++ " " ++ months!!(m - 1) ++ ", " ++ show y
-- Exercise H
type CIN = String
getDigit :: Char -> Int
getDigit c = read [c]
addSum :: CIN -> CIN
addSum xs = xs ++ show (dsum xs)
where
dsum :: CIN -> Int
dsum = sum . map getDigit
valid :: CIN -> Bool
valid xs = xs == addSum raw
where raw = take 8 xs
|
dirkz/Thinking_Functionally_With_Haskell
|
2/Chapter2.hs
|
isc
| 694
| 0
| 12
| 182
| 316
| 169
| 147
| 20
| 1
|
{-# LANGUAGE OverloadedStrings #-}
module Shed.Types where
import Data.Aeson.Types (FromJSON (..), ToJSON (..), Value (..),
typeMismatch)
import Data.ByteString (ByteString)
import qualified Data.Map as M
import Data.Text (Text)
import qualified Data.Text as T
newtype SHA1 = SHA1 { unSHA1 :: Text }
instance Show SHA1 where
show (SHA1 s) = T.unpack s
data Key = Key { keyId :: Text, keyBlobRef :: SHA1 } deriving Show
instance FromJSON SHA1 where
parseJSON (String s) = return (SHA1 s)
parseJSON invalid = typeMismatch "SHA1" invalid
instance ToJSON SHA1 where
toJSON (SHA1 sha) = String sha
data Permanode = Permanode { sha1 :: SHA1
, attributes :: M.Map Text Text
, thumbnail :: Maybe ByteString
, preview :: Maybe Text
} deriving Show
|
dbp/shed
|
src/Shed/Types.hs
|
isc
| 962
| 0
| 10
| 342
| 257
| 149
| 108
| 22
| 0
|
test x = case x of
[y,' '] -> init x
_ -> "sdf"
test2 x = case x of
(t:e:d:e') -> unwords e'
_ -> "all"
|
mauriciofierrom/cis194-homework
|
homework02/test.hs
|
mit
| 161
| 0
| 11
| 84
| 76
| 38
| 38
| 6
| 2
|
----------------------------------------------------------------
--
-- | aartifact
-- http://www.aartifact.org/
--
-- @src\/ValidationComp.hs@
--
-- Representation of partially or completely computedresult
-- of a validation attempt.
--
----------------------------------------------------------------
--
module ValidationComp where
import ExpConst
import Exp
data Verification =
Verifiable Const
| Unknown
| Potential (() -> Verification)
----------------------------------------------------------------
-- | Functions for values of type Verification Bool.
(&&&) v1 v2 = case v1 of
Verifiable (B True) -> case v2 of
Verifiable (B True) -> Verifiable (B True)
Verifiable (B False) -> Verifiable (B False)
_ -> v2
Verifiable (B False) -> Verifiable (B False)
Unknown -> case v2 of
Verifiable (B False) -> Verifiable (B False)
_ -> Unknown
Potential vf -> case v2 of
Verifiable (B True) -> v1
Potential vf' -> Potential (\() -> vf () &&& vf' ())
falOrUnv -> falOrUnv
_ -> Unknown
(|||) v1 v2 = case v1 of
Verifiable (B True) -> v1
Verifiable (B False) -> case v2 of
Verifiable (B False) -> Verifiable (B False)
Verifiable (B True) -> v2
Potential vf -> Potential $ \() -> v1 ||| vf ()
Unknown -> v1
Potential vf -> case v2 of
Verifiable (B True) -> v2
Verifiable (B False) -> Potential $ \() -> vf () ||| v2
Potential vf' -> Potential (\() -> vf () ||| vf' ())
Unknown -> v1
_ -> Unknown
Unknown -> v2
_ -> Unknown
(|/|) v1 v2 = case v1 of
Verifiable (B True) -> v1
Verifiable (B False) -> case v2 of
Verifiable (B b) -> v2
Potential vf -> Potential $ \() -> v1 |/| vf ()
Unknown -> Unknown
Potential vf -> case v2 of
Verifiable (B True) -> v2
Verifiable (B False) -> Potential $ \() -> vf () |/| v2
Potential vf' -> Potential (\() -> vf () |/| vf' ())
Unknown -> Unknown
Unknown -> Unknown
_ -> Unknown
notV r = case r of
Verifiable (B b) -> Verifiable (B $ not b)
_ -> Unknown
orV :: [Verification] -> Verification
orV = foldl (|||) Unknown
orV' :: [Verification] -> Verification
orV' = foldl (|/|) (Verifiable (B False))
andV :: [Verification] -> Verification
andV = foldl (&&&) (Verifiable (B True))
boolToV b = if b then Verifiable (B True) else Unknown
isVTrue v = case v of Verifiable (B True)->True;_->False
isVTrue' v = case v of
Verifiable (B True) -> v
Potential vf -> Potential $ \() -> isVTrue' $ vf ()
_ -> Unknown
--eof
|
aartifact/aartifact-verifier
|
src/ValidationComp.hs
|
mit
| 2,498
| 0
| 16
| 571
| 1,007
| 506
| 501
| 64
| 12
|
{-# LANGUAGE CPP #-}
module GHCJS.DOM.CanvasRenderingContext (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.CanvasRenderingContext
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.CanvasRenderingContext
#else
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/CanvasRenderingContext.hs
|
mit
| 379
| 0
| 5
| 33
| 33
| 26
| 7
| 4
| 0
|
{-# LANGUAGE OverloadedStrings #-}
module Y2021.M02.D22.Exercise where
{--
OKAY!
We now have a graph-store of wines, wineries, reviews, prices, scores, ...
We good?
We GOOD!
So, there are several directions we can go from here.
One Direction, ...
... eheh, ...
One Direction is to track which Boy Bands buy what kinds of wines and start
to analyze why teen girls went crazy over that band for, oh, about a year
before they fell into utter obscurity.
But that's not the direction we're going today.
Another direction is to do some natural language processing on the reviews
and start to build models of wines, preferences, what-have-you, to build a
recommendation system, pairing wines with people and foods.
Not a direction we're taking today, either.
Another direction is to take that wikidata and see how we can find 'soft
aliases' to the wineries in our graph store to match wikidata wineries
and, consequently, marry their locations to the wineries in our graph-store.
Let's do that, with an eye toward building a geodesic model of wines and
wineries, ... for funsies.
We did, before collect wineries by country. Perhaps that can help to narrow
down our search of the wikidata wineries-by-country set.
So, the approach here is we're going to use various matching techniques,
including artificial artificial intelligence (a term made popular by google),
to winnow our wikidata list of wineries into our wineries on the graph-store.
What is the distinguishing characteristic of the matched wineries (other than
that they have matched)? The matched wineries have lat/longs in the graph-
store.
So! Finding what matched on the graph-side is simple. How do we know that a
wikidata winery has been matched?
Hm.
Back to aliasing.
We've aliased matched countries before, using the relation ALIAS_OF. Let's
continue in that vein for wineries that don't have an exact match, therefore
masking out already-matched wineries on the wikidata-side.
Using this approach, we now can apply different kinds of matchers, piecemeal,
and recover the most-recent state of matching the next day after leaving off
the previous approach.
Cool beans!
Okay, step zero. We need to collect all wineries from both sources and
eliminate already matched ones. Since the previous match was exact, the
elimination is reductive. But, also, we may as well also eliminate any aliased
wineries, even though, on this first day, there are none, just so that this
exercise works every time we take on a new matching technique.
So, today's Haskell exercise will be marshalling our data-sets. We'll look at
various matchers going forward.
--}
import Control.Arrow (second)
import Data.Aeson
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Text as T
import Data.Aeson.WikiDatum
import Graph.Query
import Graph.JSON.Cypher
import qualified Graph.JSON.Cypher.Read.Rows as RR
import Y2021.M01.D21.Solution (Idx)
import Y2021.M01.D22.Solution -- for wikidata wineries
import Y2021.M01.D26.Solution (ByCountry, WineriesByCountry, Country)
import Y2021.M01.D29.Solution -- For Namei type-class
{--
import Y2021.M01.D21.Solution -- remember 'Sweet Cheeks' winery? lol ;)
but it's fun looking over that 'old' code; seeing how much I have rolled
into the 1HaskellADay standard library set.
... also, Wineries in the graph-store are now more complex.
--}
import Y2021.M01.D25.Solution -- country-alias resolver
{--
First up: let's grab our country aliases NUPE! SEE BELOW!
>>> graphEndpoint >>= countryAliases
fromList [(WC "German Democratic Republic",Neo "Germany"),
(WC "United Kingdom",Neo "England"),
(WC "United States of America",Neo "US")]
Next, let's grab out wikidata:
>>> readWineries (wineriesDir ++ wineriesJSON)
...
>>> let wikiw = it
>>> head (Map.toList wikiw)
("21 Cellars",Winery {winery = WD {qid = "http://www.wikidata.org/entity/Q4630984",
name = "21 Cellars"},
country = WD {qid = "http://www.wikidata.org/entity/Q30",
name = "United States of America"},
location = point({ latitude: 47.2675, longitude: -122.471 })})
Now, let's normalize the wiki-countries to graph-countries
--}
normalizeWikiCountry :: Map WikiCountry Neo4jCountry -> Winery -> Winery
normalizeWikiCountry countryMap winery = undefined
-- Wait.
-- We have to redo our contryAliases to return the WikiDatum for countries
-- and their aliases so we can do a proper substitution here.
-- No, because aliased countries' QNames aren't in the graph-store. So we'll
-- use the QNames in wiki, because they do match. ... nvrmnd.
{--
>>> let normWikiw = Map.map (normalizeWikiCountry ca) wikiw
>>> head (Map.toList normWikiw)
("21 Cellars",Winery {winery = WD {qid = "http://www.wikidata.org/entity/Q4630984",
name = "21 Cellars"},
country = WD {qid = "http://www.wikidata.org/entity/Q30",
name = "US"},
location = point({ latitude: 47.2675, longitude: -122.471 })})
BOOM!
Let's group these by country ... or ... ByCountry. Eheh ;)
>>> let wwbc = wikiWineriesByCountry normWikiw
>>> second (take 3 . Set.toList) (head (Map.toList wwbc))
("Argentina",[Winery {winery = WD {qid = "http://www.wikidata.org/entity/Q2829326",
name = "Al Este"},
country = WD {qid = "http://www.wikidata.org/entity/Q414",
name = "Argentina"},
location = point({ latitude: -38.8, longitude: -62.68333333 })},
Winery {winery = WD {qid = "http://www.wikidata.org/entity/Q5731065",
name = "Bodega B\243rbore"},
country = WD {qid = "http://www.wikidata.org/entity/Q414",
name = "Argentina"},
location = point({ latitude: -31.54805556, longitude: -68.32722222 })}])
Turns out there's only 2 wineries listed in Argentina with lats/longs in
wikidata.
Now let's grab the wineries from the graph database, which may optionally
have a QName and a lat/long, which we'll have to parse (kinda) (not if I can
help it), also, we wish to associate the id in the graph database with this
object, as well as its country.
--}
wineriesQuery :: Cypher
wineriesQuery =
T.pack (unwords ["MATCH (c:Country)<--()<--(w:Winery)",
"RETURN id(w) as id, w.name as winery_name,",
"w.qid as winery_qid, c.name as country_name,",
"c.qid as country_qid, w.location as location"])
-- what we get back from that is:
data MbWikiDatum = MWD (Maybe Qname) Name -- not sure if we have a Qname
deriving (Eq, Ord, Show)
data NeoWinery = NeoWinery Idx MbWikiDatum MbWikiDatum (Maybe LongLat)
deriving (Eq, Ord, Show)
row2NeoWinery :: [Value] -> Maybe (Country, NeoWinery)
row2NeoWinery row = undefined
-- The reason for row2NeoWinery is because we wish to build a
-- ByCountry NeoWinery map so it returns Maybe (country-name, winery))
-- and with that, we can do this:
graphWineries :: Endpoint -> IO (ByCountry NeoWinery)
graphWineries url = undefined
{--
>>> graphEndpoint >>= graphWineries
...
>>> let gws = it
>>> second (take 2 . Set.toList) (head $ Map.toList gws)
("Argentina",[NeoWinery 480 (MWD Nothing "Kirkland Signature")
(MWD (Just "http://www.wikidata.org/entity/Q414") "Argentina")
Nothing,
NeoWinery 486 (MWD Nothing "Felix Lavaque")
(MWD (Just "http://www.wikidata.org/entity/Q414") "Argentina")
Nothing])
The thing is, we don't actually need maps, ... yet. We need maps once we've
eliminated already-matched wineries. So, let's convert our maps to sets.
--}
map2set :: Ord a => Map k (Set a) -> Set a
map2set = undefined
{--
>>> let gwss = map2set gws
>>> let wwbcs = map2set wwbc
>>> (Set.size gwss, Set.size wwbcs)
(16961,605)
That about jibes with my recollection.
NOW, let's eliminate matches from both sets. Which means we have to find the
matches (by exact-match on their names).
--}
exactMatches :: Set NeoWinery -> Set Winery -> Set Name
exactMatches = undefined
-- Remove those exact matches from both the neo-wineries and wiki-wineries
removeExactMatches :: Namei a => Set Name -> Set a -> Set a
removeExactMatches duplicates = undefined
-- removeExactMatches is a Set.removeBy-function
{--
>>> let smgwss = removeExactMatches em gwss
>>> Set.size smgwss
16836
>>> let smwwbcs = removeExactMatches em wwbcs
>>> Set.size smwwbcs
481
Okay. Now let's remove any aliased wineries, ... that we will eventually be
having.
--}
aliasedWineriesQuery :: Cypher
aliasedWineriesQuery = T.concat ["MATCH (aw:AliasedWinery)--(w:Winery) ",
"RETURN aw.name, w.name"]
-- Grab these aliases:
data WikiWinery = WW (Wiki Name)
deriving (Eq, Ord, Show)
instance Namei WikiWinery where
namei (WW (Wiki n)) = n
data Neo4jWinery = NW (Neo4j Name)
deriving (Eq, Ord, Show)
instance Namei Neo4jWinery where
namei (NW (Neo n)) = n
type AliasedWineries = Map WikiWinery Neo4jWinery
aliasedWineries :: Endpoint -> IO AliasedWineries
aliasedWineries url = undefined
{--
>>> graphEndpoint >>= aliasedWineries
fromList []
>>> let mappedWineries = it
... as we currently have no aliased wineries, this result makes sense.
Now we delete the aliases from the wiki winery set and the wineries aliased
from the neo4j graph-store set as done before.
And let's bring it all together.
--}
wineriesWIP :: FilePath -> Endpoint -> IO (Set Winery, Set NeoWinery)
wineriesWIP wikiFile url = undefined
{--
>>> graphEndpoint >>= wineriesWIP (wineriesDir ++ wineriesJSON)
fromList [...]
>>> let (wikis, neos) = it
>>> (Set.size wikis, Set.size neos)
(481,16836)
There you go! A set-up of wineries to winnow down using name-matching.
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2021/M02/D22/Exercise.hs
|
mit
| 10,130
| 0
| 10
| 2,244
| 661
| 378
| 283
| 56
| 1
|
module ShortestPath
( module KleeneAlgebra
, ShortestPath
, extract
, annotate
, labels
, distances
, distances'
, shortestPathsRegExps
, shortestPaths
) where
import Graph
import KleeneAlgebra
import Language
import RegExp
import TropicalSemiring
data ShortestPath a b =
ShortestPath (TropicalSemiring a) b
instance (Show a, Show b) => Show (ShortestPath a b)
where
show (ShortestPath a x) =
show x ++ "[" ++ show a ++ "]"
instance Functor (ShortestPath a)
where
fmap f (ShortestPath a x) =
ShortestPath a (f x)
instance (Ord a, Num a, Semiring b) => Semiring (ShortestPath a b)
where
zero =
ShortestPath zero zero
ShortestPath a x <+> ShortestPath b y
| c < b = ShortestPath a x
| c < a = ShortestPath b y
| otherwise = ShortestPath c (x <+> y)
where
c = a <+> b
one =
ShortestPath one one
ShortestPath a x <.> ShortestPath b y =
ShortestPath (a <.> b) (x <.> y)
instance (Ord a, Num a, StarSemiring b) => StarSemiring (ShortestPath a b)
where
star (ShortestPath x b)
| x == one = ShortestPath one (star b)
| otherwise = ShortestPath one one
instance (Ord a, Num a, KleeneAlgebra b) => KleeneAlgebra (ShortestPath a b)
extract :: ShortestPath a b -> b
extract (ShortestPath _ x) = x
annotate :: (Ix i, Bounded i, Ord a, Num a, Semiring b)
=> ((Edge i) -> b)
-> LabeledGraph i a
-> Matrix i (ShortestPath a b)
annotate f m =
go <$> m <*> labelWithEdge (unlabel m)
where
go v e =
ShortestPath (maybe zero TropicalSemiring v) (maybe zero f e)
labels :: (Functor f, Num a, Ord a)
=> f (Maybe a)
-> f (TropicalSemiring a)
labels =
fmap (maybe zero TropicalSemiring)
distances :: (Functor f, Num a, Ord a, StarSemiring (f (TropicalSemiring a)))
=> f (Maybe a)
-> f (TropicalSemiring a)
distances =
star . fmap (maybe zero TropicalSemiring)
distances' :: (Num a, Ord a, Bounded i, Ix i)
=> LabeledGraph i a
-> Matrix i (TropicalSemiring a)
distances' =
fmap (eval TropicalSemiring) . star . regExpMap
shortestPathsRegExps :: (Num a, Ord a, Bounded i, Ix i)
=> LabeledGraph i a
-> Matrix i (ShortestPath a (RegExp (Edge i)))
shortestPathsRegExps =
star . annotate regExp
shortestPaths :: (Num a, Ord a, Bounded i, Ix i)
=> LabeledGraph i a
-> LabeledGraph i [Edge i]
shortestPaths =
fmap (someWord . extract) . star . annotate letter
|
mietek/experiment-floyd-warshall
|
src/ShortestPath.hs
|
mit
| 2,632
| 0
| 13
| 806
| 1,034
| 519
| 515
| 75
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE DeriveGeneric #-}
module Nauva.Product.Nauva.Book.App
( bookApp
, catalogPages
) where
import Data.Text
import Data.Monoid
import qualified Data.Aeson as A
import Nauva.App
import Nauva.Catalog
import Nauva.Catalog.TH
data State = State
{ numberOfClicks :: Int
}
data Action
= Clicked
$( return [] )
instance Value Action where
parseValue _ = pure Clicked
initialState :: State
initialState = State
{ numberOfClicks = 0
}
updateState :: Action -> State -> State
updateState Clicked State{..} = State { numberOfClicks = numberOfClicks + 1 }
renderCounter :: State -> Element
renderCounter State{..} = div_
[ button_ [onClick_ onClickHandler] [str_ "Click Me!"]
, span_ [str_ ("Clicked " <> pack (show numberOfClicks) <> " times" :: Text)]
]
where
onClickHandler :: FE MouseEvent Action
onClickHandler = [njs| ev => {
ev.stopPropagation()
return $Clicked()
}|]
counterComponent :: Component () () State Action
counterComponent = createComponent $ \componentId -> Component
{ componentId = componentId
, componentDisplayName = "Counter"
, initialComponentState = \_ -> pure (initialState, [], [])
, componentEventListeners = const []
, componentHooks = emptyHooks
, processLifecycleEvent = \() _ s -> (s, [])
, receiveProps = \_ s -> pure (s, [], [])
, update = \a _ s -> (updateState a s, [])
, renderComponent = \_ -> renderCounter
, componentSnapshot = \_ -> A.object []
, restoreComponent = \_ s -> Right (s, [])
}
bookApp :: App
bookApp = App
{ rootElement = catalog . CatalogProps "Nauva" catalogPages
}
catalogPages :: [Page]
catalogPages =
[ PLeaf Leaf
{ leafHref = "/"
, leafTitle = "Introduction"
, leafElement = $(catalogPageFromFile
"../../../../../../../../docs/book/introduction.md")
}
, PLeaf Leaf
{ leafHref = "/getting-started"
, leafTitle = "Getting started"
, leafElement = $(catalogPageFromFile
"../../../../../../../../docs/book/getting-started.md")
}
, PLeaf Leaf
{ leafHref = "/markup"
, leafTitle = "Markup"
, leafElement = $(catalogPageFromFile
"../../../../../../../../docs/book/markup.md")
}
, PLeaf Leaf
{ leafHref = "/styles"
, leafTitle = "Styles"
, leafElement = $(catalogPageFromFile
"../../../../../../../../docs/book/styles.md")
}
, PLeaf Leaf
{ leafHref = "/thunks"
, leafTitle = "Thunks"
, leafElement = $(catalogPageFromFile
"../../../../../../../../docs/book/thunks.md")
}
, PLeaf Leaf
{ leafHref = "/components"
, leafTitle = "Components"
, leafElement = $(catalogPageFromFile
"../../../../../../../../docs/book/components.md")
}
]
|
wereHamster/nauva
|
product/nauva/shared/src/Nauva/Product/Nauva/Book/App.hs
|
mit
| 3,106
| 0
| 15
| 835
| 734
| 422
| 312
| 81
| 1
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Application
( getApplicationDev
, appMain
, develMain
, makeFoundation
, makeLogWare
-- * for DevelMain
, getApplicationRepl
, shutdownApp
-- * for GHCI
, handler
, db
) where
import Control.Lens (set)
import Control.Monad.Logger (liftLoc, runLoggingT)
import Database.Persist.Postgresql (createPostgresqlPool, pgConnStr,
pgPoolSize, runSqlPool)
import Import
import Language.Haskell.TH.Syntax (qLocation)
import Network.Wai (Middleware)
import Network.Wai.Handler.Warp (Settings, defaultSettings,
defaultShouldDisplayException,
runSettings, setHost,
setOnException, setPort, getPort)
import Network.Wai.Middleware.RequestLogger (Destination (Logger),
IPAddrSource (..),
OutputFormat (..), destination,
mkRequestLogger, outputFormat)
import System.Log.FastLogger (defaultBufSize, newStdoutLoggerSet,
toLogStr)
import LoadEnv (loadEnv)
-- Import all relevant handler modules here.
-- Don't forget to add new modules to your cabal file!
import Handler.Common
import Handler.Home
import Handler.Command
import Handler.Output
import qualified Network.AWS as AWS
-- This line actually creates our YesodDispatch instance. It is the second half
-- of the call to mkYesodData which occurs in Foundation.hs. Please see the
-- comments there for more details.
mkYesodDispatch "App" resourcesApp
-- | This function allocates resources (such as a database connection pool),
-- performs initialization and returns a foundation datatype value. This is also
-- the place to put your migrate statements to have automatic database
-- migrations handled by Yesod.
makeFoundation :: AppSettings -> IO App
makeFoundation appSettings = do
when (appDebug appSettings) $ do
putStrLn "=== App Settings ==="
print appSettings
putStrLn "=== App Settings ==="
-- Some basic initializations: HTTP connection manager, logger, and static
-- subsite.
appAWSEnv <- newAWSEnv $ appDebug appSettings
appHttpManager <- newManager
appLogger <- newStdoutLoggerSet defaultBufSize >>= makeYesodLogger
appStatic <-
(if appMutableStatic appSettings then staticDevel else static)
(appStaticDir appSettings)
-- We need a log function to create a connection pool. We need a connection
-- pool to create our foundation. And we need our foundation to get a
-- logging function. To get out of this loop, we initially create a
-- temporary foundation without a real connection pool, get a log function
-- from there, and then create the real foundation.
let mkFoundation appConnPool = App {..}
tempFoundation = mkFoundation $ error "connPool forced in tempFoundation"
logFunc = messageLoggerSource tempFoundation appLogger
-- Create the database connection pool
pool <- flip runLoggingT logFunc $ createPostgresqlPool
(pgConnStr $ appDatabaseConf appSettings)
(pgPoolSize $ appDatabaseConf appSettings)
-- Perform database migration using our application's logging settings.
runLoggingT (runSqlPool (runMigration migrateAll) pool) logFunc
-- Return the foundation
return $ mkFoundation pool
where
newAWSEnv debug = do
logger <- AWS.newLogger (if debug then AWS.Debug else AWS.Error) stdout
set AWS.envLogger logger <$> AWS.newEnv AWS.NorthVirginia AWS.Discover
-- | Convert our foundation to a WAI Application by calling @toWaiAppPlain@ and
-- applying some additional middlewares.
makeApplication :: App -> IO Application
makeApplication foundation = do
logWare <- makeLogWare foundation
-- Create the WAI application and apply middlewares
appPlain <- toWaiAppPlain foundation
return $ logWare $ defaultMiddlewaresNoLogging appPlain
makeLogWare :: App -> IO Middleware
makeLogWare foundation =
mkRequestLogger def
{ outputFormat =
if appDebug $ appSettings foundation
then Detailed True
else Apache
(if appIpFromHeader $ appSettings foundation
then FromFallback
else FromSocket)
, destination = Logger $ loggerSet $ appLogger foundation
}
-- | Warp settings for the given foundation value.
warpSettings :: App -> Settings
warpSettings foundation =
setPort (appPort $ appSettings foundation)
$ setHost (appHost $ appSettings foundation)
$ setOnException (\_req e ->
when (defaultShouldDisplayException e) $ messageLoggerSource
foundation
(appLogger foundation)
$(qLocation >>= liftLoc)
"yesod"
LevelError
(toLogStr $ "Exception from Warp: " ++ show e))
defaultSettings
-- | For yesod devel, return the Warp settings and WAI Application.
getApplicationDev :: IO (Settings, Application)
getApplicationDev = do
settings <- getAppSettings
foundation <- makeFoundation settings
wsettings <- getDevSettings $ warpSettings foundation
app <- makeApplication foundation
return (wsettings, app)
getAppSettings :: IO AppSettings
getAppSettings = do
loadEnv
loadAppSettings [configSettingsYml] [] useEnv
-- | main function for use by yesod devel
develMain :: IO ()
develMain = develMainHelper getApplicationDev
-- | The @main@ function for an executable running this site.
appMain :: IO ()
appMain = do
-- Get the settings from all relevant sources
settings <- getAppSettings
-- Generate the foundation from the settings
foundation <- makeFoundation settings
-- Generate a WAI Application from the foundation
app <- makeApplication foundation
-- Run the application with Warp
runSettings (warpSettings foundation) app
--------------------------------------------------------------
-- Functions for DevelMain.hs (a way to run the app from GHCi)
--------------------------------------------------------------
getApplicationRepl :: IO (Int, App, Application)
getApplicationRepl = do
settings <- getAppSettings
foundation <- makeFoundation settings
wsettings <- getDevSettings $ warpSettings foundation
app1 <- makeApplication foundation
return (getPort wsettings, foundation, app1)
shutdownApp :: App -> IO ()
shutdownApp _ = return ()
---------------------------------------------
-- Functions for use in development with GHCi
---------------------------------------------
-- | Run a handler
handler :: Handler a -> IO a
handler h = getAppSettings >>= makeFoundation >>= flip unsafeHandler h
-- | Run DB queries
db :: ReaderT SqlBackend (HandlerT App IO) a -> IO a
db = handler . runDB
|
mrb/tee-io
|
src/Application.hs
|
mit
| 7,165
| 0
| 13
| 1,890
| 1,189
| 627
| 562
| -1
| -1
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-|
Module : Data.Makefile
Copyright : (c) 2016 Nicolas Mattia
License : MIT
Maintainer : Nicolas Mattia <nicolas@nmattia.com>
Stability : experimental
This module defines the different types used when working with a Makefile.
@
# File: Makefile
hello = world
foo: bar
baz
@
@
Makefile {
entries =
[ Assignment RecursiveAssign "hello" "world"
, Rule (Target "foo") [Dependency "bar"] [Command "baz"]
]
}
@
-}
module Data.Makefile where
import Data.String (IsString)
import qualified Data.Text as T
-- | A Makefile object, a list of makefile entries
data Makefile = Makefile { entries :: [Entry] } deriving (Show, Read, Eq)
-- | A makefile entry, either a rule @(target: dep1 dep1; commands)@ or a
-- variable assignment (@hello = world@ or @hello := world@)
data Entry = Rule Target [Dependency] [Command]
| Assignment AssignmentType T.Text T.Text
| OtherLine T.Text
-- ^ Catch all value for comments, empty lines and lines that failed
-- to parse.
deriving (Show, Read, Eq)
data AssignmentType
= RecursiveAssign
-- ^ foo = bar
| SimpleAssign
-- ^ foo := bar
| SimplePosixAssign
-- ^ foo ::= bar
| ConditionalAssign
-- ^ foo ?= bar
| ShellAssign
-- ^ foo != bar
| AppendAssign
-- ^ foo += bar
deriving (Show, Read, Eq, Enum, Bounded)
-- | Makefile target (@foo@ in the example above)
newtype Target = Target T.Text deriving (Show, Read, Eq, IsString)
-- | Target dependency (@bar@ in the example above)
newtype Dependency = Dependency T.Text deriving (Show, Read, Eq, IsString)
-- | Command (@baz@ in the example above)
newtype Command = Command T.Text deriving (Show, Read, Eq, IsString)
|
nmattia/mask
|
src/Data/Makefile.hs
|
mit
| 1,807
| 0
| 9
| 443
| 259
| 155
| 104
| 20
| 0
|
module GRPDictionaryGenerator
( mkDictionary
) where
import System.Process (readProcessWithExitCode)
import System.Directory (removeFile, renameFile)
import Debug.Trace
import System.Random
import Data.Char (isSpace)
import Data.List
import Data.List.Split
import Language.Haskell.Exts.Parser
import Language.Haskell.Exts.Extension
import Language.Haskell.Exts.Syntax
import Language.Haskell.Exts.SrcLoc
import Control.Monad
import qualified Data.ByteString.Char8 as BS
import System.IO (openFile, hClose, IOMode(WriteMode), Handle)
import GhciEval
data ProgramEnvironment = ProgramEnvironment [Instance] [Definition] deriving (Show, Read)
data Instance = Instance String deriving (Show, Read)
data Definition = Function String Constraint TypeVariable TypeVariable | Constant String TypeVariable deriving (Show, Read)
data TypeVariable = TypeVariable String | Type String deriving (Show, Read)
data Constraint = Constraint TypeClass TypeVariable deriving (Show, Read)
data TypeClass = TypeClass String deriving (Show, Read)
--acquiring instance info can be done through use of :info Type
main = mkDictionary
mkDictionary :: IO ()
mkDictionary = do
let (bang,star) = (True, True)
(code, out, err) <- readProcessWithExitCode "ghci" ["GRPSeed.hs"] ((if bang then ":browse! " else ":browse ") ++ (if star then "*GRPSeed" else "GRPSeed"))
print code
unless (null err) $ putStrLn ("errors ahead: " ++ err)
let wordsToLookUp = nub $ words out
print wordsToLookUp
handle <- openFile "Dictionary.hs~" WriteMode
BS.hPut handle prefix
addDeclarations True handle wordsToLookUp
BS.hPut handle postfix
hClose handle
removeFile "Dictionary.hs"
renameFile "Dictionary.hs~" "Dictionary.hs"
--parseResult <- parseBrowseOutput out
--writeFile "Dictionary.hs" (unlines $ map show $ concatMap extractDecls parseResult)
createFile :: [(String, String)] -> IO ()
createFile decl = BS.writeFile "Dictionary.hs" (createModule decl)
--TODO: Filter the ones that have IO in the result
addDeclarations :: Bool -> Handle -> [String] -> IO ()
addDeclarations isFirstInList handle [] = return ()
addDeclarations True handle (word:rest) = do
result <- eval word
unless (result == "") (BS.hPut handle ((BS.pack . show) (word, result)))
addDeclarations (result == "") handle rest
addDeclarations False handle (word:rest) = do
result <- eval word
unless (result == "") (BS.hPut handle ((BS.pack ",\n ") `BS.append` (BS.pack . show) (word, result)))
addDeclarations False handle rest
createModule :: [(String, String)] -> BS.ByteString
createModule decls = BS.intercalate (BS.pack ",\n ") $ map (BS.pack . show) decls
prefix = BS.pack "module Dictionary (\ndeclarations\n) where\n\n--Generated automatically using GRPDictionaryGenerator:mkDictionary - regenerate if out of date.\n\ndeclarations :: [(String, String)]\ndeclarations =\n [\n "
postfix = BS.pack "\n ]\n"
extractDecls :: ParseResult (Module a) -> [Decl a]
extractDecls (ParseOk (Module _ _ _ _ decls)) = decls
extractDecls _ = []
parseBrowseOutput :: String -> IO [ParseResult (Module SrcSpanInfo)]
parseBrowseOutput str = do
writeFile "rawDict.hs" $ unlines (map show failedP ++ map show succP)
putStrLn ("Failed: " ++ show (length failedP) ++ ", succeeded: " ++ show (length succP))
return $ map snd succP --parseModuleWithMode mode modStr
where
modStr = drop 10 $ unlines $ drop 4 $ take (length (lines str) - 1) $lines str -- remove the CLI prompt and all the other prefix/suffix info
mode = defaultParseMode{baseLanguage = Haskell2010, extensions = extensions defaultParseMode ++ [EnableExtension ScopedTypeVariables]}
groupedLns :: [[String]]
groupedLns = drop 1 $ foldr (\line (grp:grps) -> if isSpace $ head line then (line:grp):grps else []:(line:grp):grps) [[]] $lines modStr
rawAndParseData = map (\lnGrp -> (concat lnGrp, parseModuleWithMode mode $ unlines lnGrp)) groupedLns
failedP = filter (\(grp, parse) -> case parse of ParseFailed _ _ -> True; otherwise -> False) rawAndParseData
succP = filter (\(grp, parse) -> case parse of ParseFailed _ _ -> False; otherwise -> True) rawAndParseData
{-
parseBrowseOutput :: String -> ProgramEnvironment
parseBrowseOutput output =
let
filteredLns = drop 4 $ lines output
preprocLns = init $ drop 10 (head filteredLns) : tail filteredLns
groupedLns = foldr (\line (grp:grps) -> if isSpace $ head line then (line:grp):grps else []:(line:grp):grps) [[]] preprocLns
in (trace $ show groupedLns) ProgramEnvironment [] []-}
parseFunction :: String -> Definition
parseFunction str =
let
[name, val] = splitOn "::" str
[constraint, conversion] = if "=>" `isInfixOf` val then splitOn "=>" val else ["", val]
(from,to) = splitOnArrow [] [] conversion
in
Function name (parseConstraint constraint) (parseTypeVariable from) $ parseTypeVariable to
splitOnArrow :: String -> [Char] -> String -> (String, String)
splitOnArrow prefix [] ('-':'>':xs) = (prefix,xs)
splitOnArrow prefix x ('(':xs) = splitOnArrow (prefix ++ ['(']) ('(':x) xs
splitOnArrow prefix ('(':x) (')':xs) = splitOnArrow (prefix ++ [')']) x xs
splitOnArrow prefix x (c:xs) = splitOnArrow (prefix ++ [c]) x xs
parseConstraint :: String -> Constraint
parseConstraint str =
let
[tclass, var] = words str
in Constraint (TypeClass tclass) $ Type var
parseTypeVariable :: String -> TypeVariable
parseTypeVariable s = TypeVariable s
|
vektordev/GP
|
src/GRPDictionaryGenerator.hs
|
gpl-2.0
| 5,408
| 0
| 15
| 861
| 1,721
| 905
| 816
| 89
| 4
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Keymap.Vim.Ex.Commands.BufferDelete
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
module Yi.Keymap.Vim.Ex.Commands.BufferDelete (parse) where
import Control.Applicative
import Control.Monad
import Data.Text ()
import qualified Text.ParserCombinators.Parsec as P
import Yi.Editor
import Yi.Keymap
import Yi.Keymap.Vim.Common
import qualified Yi.Keymap.Vim.Ex.Commands.Common as Common
import Yi.Keymap.Vim.Ex.Types
parse :: EventString -> Maybe ExCommand
parse = Common.parse $ do
void $ P.try ( P.string "bdelete") <|> P.try ( P.string "bdel") <|> P.try (P.string "bd")
return $ Common.pureExCommand {
cmdShow = "bdelete"
, cmdAction = EditorA closeBufferAndWindowE
}
|
atsukotakahashi/wi
|
src/library/Yi/Keymap/Vim/Ex/Commands/BufferDelete.hs
|
gpl-2.0
| 951
| 0
| 14
| 217
| 190
| 115
| 75
| 18
| 1
|
module SongMaker.Read.Sheet (isSheetLine) where
import SongMaker.Common
import Data.List
isSheetLine :: Line -> Bool
isSheetLine = ("|" `isPrefixOf`)
|
firefrorefiddle/songmaker
|
src/SongMaker/Read/Sheet.hs
|
gpl-2.0
| 152
| 0
| 5
| 19
| 42
| 27
| 15
| 5
| 1
|
----------------------------------------------------------------------------
-- |
-- Module : Text.XML.Schema.Validator
-- Copyright : (c) Simon Foster 2004
-- License : GPL version 2 (see COPYING)
--
-- Maintainer : aca01sdf@shef.ac.uk
-- Stability : experimental
-- Portability : non-portable (ghc >= 6 only)
--
-- A Validator for XML Schema.
--
-- @This file is part of HAIFA.@
--
-- @HAIFA is free software; you can redistribute it and\/or modify it under the terms of the
-- GNU General Public License as published by the Free Software Foundation; either version 2
-- of the License, or (at your option) any later version.@
--
-- @HAIFA is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
-- even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.@
--
-- @You should have received a copy of the GNU General Public License along with HAIFA; if not,
-- write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA@
----------------------------------------------------------------------------
module Text.XML.Schema.Validator where
import Text.XML.Schema.Structure
import Utils
import Data.List
import Control.Monad
import Control.Monad.Error
import Maybe
import Text.XML.Schema.BasicTypes
import Text.Regex
import Data.FiniteMap
import Governor
-- | The base type, which will validate any String.
anySimpleType =
Simp (S_Sel []) Nothing (return $ newNCName "anySimpleType") (Just (S_Restr (R_SimpRestr Nothing Nothing Nothing [] [] Nothing)))
simpleTypeValidate :: FiniteMap (String, String) SimpleType -> String -> SimpleType -> Either String ()
simpleTypeValidate m s t = do
x <- (maybe2Either "FIXME: Do we fail on no restriction or extension in SimpleType?" $ se_cont t)
case x of
S_Restr r -> validateSimpRestricted m s r
S_List _ lt st -> let l = (delimit s ' ') in
listType lt >>= valList l >> (st ?> (valList l . fromJust, return ()))
S_Union _ ut st -> let tl = (map fromJust $ filter isJust $ map lookupType ut) ++ st in
valUnion tl s
where
lookupType = lookupFM m . qn2pair
listType lt =
lt ?> (maybe2Either "Type not found in given namespaces" . lookupType . fromJust, return anySimpleType)
valList [] _ = return ()
valList (h:t) st = simpleTypeValidate m h st >> valList t st
valUnion [] _ = fail "Value does not validate against any types in the union type"
-- If we catch an error, we need to see if it validates against the next item in the Union. Otherwise
-- we have a valid value.
valUnion (h:t) st = catchError (simpleTypeValidate m st h) (\e -> valUnion t st)
-- | Validate a restricted type, first validate it by it's base type by calling simpleTypeValidate and
-- then check the list of restrictions on it.
validateSimpRestricted :: FiniteMap (String, String) SimpleType -> String -> Restriction -> Either String ()
validateSimpRestricted m s r = (validBase) >> (validSimp) >> (validRestr)
where
validBase = ((isNothing base) || ((snd basePair)=="anySimpleType")) ? (return (), baseType >>= simpleTypeValidate m s)
where
base = ra_base r
basePair = qn2pair $ fromJust base
baseType = maybe2Either "Type not found in given namespaces" $ lookupFM m basePair
validSimp = (re_simpleType r) ?> (simpleTypeValidate m s . fromJust, return ())
validRestr = {-checkEnums ens >>-} checkRestr res
where
ens = filter (\x->case x of Enumeration _ _->True;_->False) (re_restrParam r)
res = filter (\x->case x of Enumeration _ _->False;_->True) (re_restrParam r)
checkEnums [] = return (Enumeration Nothing "")
checkEnums e = maybe2Either "Does not match any of the enumeration options" $ find (\(Enumeration _ x)->x==s) e
checkRestr [] = return ()
checkRestr (h:t) = do
checkRestr t
case h of
MinExclusive _ x _ -> rng s x (>)
MinInclusive _ x _ -> rng s x (>=)
MaxExclusive _ x _ -> rng s x (<)
MaxInclusive _ x _ -> rng s x (<=)
TotalDigits _ x _ ->
((length $ delete '.' s)<=x) ? (return (), fail "Too many digits")
FractionDigits _ x _ ->
(('.' `elem` s) && ((length $ tail $ snd $ span (/='.') s)>=x)) ? (fail "Too many fractional digits", return ())
Length _ x _ -> len s x (==)
MinLength _ x _ -> len s x (>=)
MaxLength _ x _ -> len s x (<=)
WhiteSpace _ _ _ -> return () -- FIXME : Implement
Pattern _ p ->
(isNothing $ matchRegex (mkRegex $ formReg p) s) ? (fail "Does not match pattern", return ())
-- FIXME : The Pattern matcher is untested and I dunno if it provides the full set
-- of escape characters and shorthands.
where
rng s x f = (s `f` x) ? (return (), fail "Value out of range")
len s x f = ((length s) `f` x) ? (return (), fail "Incorrect length")
-- Nasty hack; Text.Regex won't accept the regular expressions supplied in XSD if
-- the contain escaped minus signs, thus this de-escapes them. Which is right and
-- which is wrong, I have no idea, but this works. If you have a better solution
-- please implement it!
formReg p = replace ("^"++p++"$") ("\\-", "-")
|
twopoint718/haifa
|
src/Text/XML/Schema/Validator.hs
|
gpl-2.0
| 6,104
| 0
| 22
| 1,995
| 1,399
| 730
| 669
| 62
| 15
|
module Eval(Val(..),eval,evalCall,numToVal,valToNum)
where
import Parse(Def(..),Expr(..),Fn,Pattern(..))
data Val = Ptr Val | Nil | Concat Val Val
numToVal :: Integer -> Val
numToVal x | x > 0 = Ptr (numToVal (x - 1)) | otherwise = Nil
valToNum :: Val -> Integer
valToNum x = toNum 0 x
where
toNum n Nil = n
toNum n (Ptr x) = toNum (n+1) x
toNum n (Concat v1 v2) = toNum (toNum n v1) v2
vnil :: Val -> Bool
vnil Nil = True
vnil (Concat v1 v2) = vnil v1 && vnil v2
vnil _ = False
vtail :: Val -> Int -> Maybe Val
vtail v 0 = Just v
vtail (Ptr v) n = vtail v (n-1)
vtail (Concat Nil v) n = vtail v n
vtail (Concat (Ptr Nil) v) n = vtail v (n-1)
vtail (Concat (Ptr v1) v2) n = vtail (Concat v1 v2) (n-1)
vtail Nil _ = Nothing
eval :: [Val] -> Expr -> Val
eval bindings (ExprLiteral n) = numToVal (fromIntegral n)
eval bindings (ExprBound i) = bindings !! i
eval bindings (ExprConcat e1 e2) = Concat (eval bindings e1) (eval bindings e2)
eval bindings (ExprFuncall fn exprs) = evalCall fn (map (eval bindings) exprs)
evalCall :: Fn -> [Val] -> Val
evalCall [] args = error "evalCall failed: No matching definition"
evalCall (Def patterns expr:defs) args =
maybe (evalCall defs args) (flip eval expr) (matchPats patterns args [])
where
matchPats [] [] bindings = Just (reverse bindings)
matchPats [] _ _ = Nothing
matchPats _ [] _ = Nothing
matchPats (PatternBound n:pats) (val:vals) bindings =
maybe Nothing (matchPats pats vals . (:bindings)) (vtail val n)
matchPats (PatternLiteral n:pats) (val:vals) bindings =
maybe Nothing
(\ v -> if vnil v then matchPats pats vals bindings else Nothing)
(vtail val n)
matchPats (PatternIgnore n:pats) (val:vals) bindings =
maybe Nothing (const (matchPats pats vals bindings)) (vtail val n)
|
qpliu/esolang
|
:_/hs/eval.hs
|
gpl-3.0
| 1,828
| 0
| 11
| 415
| 909
| 464
| 445
| 41
| 7
|
{-# LANGUAGE OverloadedStrings #-}
import Control.Arrow
import Control.Exception
import Control.Monad
import Control.Monad.Reader
import qualified Data.ByteString.Char8 as B
import Data.Char
import Data.List
import HIRC.Parser
import Network
import Network.IRC.Base
import Network.IRC.Commands
import Network.IRC.Parser
import System.Exit
import System.IO
import Text.Printf
-- | Pour les tests, on va emmerder le monde sur le chan de Teleragno :)
server = "irc.teleragno.fr" :: B.ByteString
port = 6667
chan = "#bistro" :: B.ByteString
nickname = "haskell-bot" :: B.ByteString
-- | The 'Net' monad, a wrapper over IO, carrying the bot's immutable state.
type Net = ReaderT Bot IO
data Bot = Bot { socket :: Handle }
-- | Fonction principale, se connecte au serveur IRC et lance la boucle d'écoute
main :: IO ()
main = bracket connect disconnect loop
where
disconnect = hClose . socket
loop = runReaderT run
-- | Connect to the server and return the initial bot state
connect :: IO Bot
connect = notify $ do
handle <- connectTo (B.unpack server) (PortNumber (fromIntegral port))
hSetBuffering handle NoBuffering
return (Bot handle)
where notify = bracket_ pre post
pre = printf "Connecting to %s ... " (B.unpack server) >> hFlush stdout
post = putStrLn "done."
-- | We're in the Net monad now, so we've connected successfully
-- Join a channel, and start processing commands
run :: Net ()
run = do
write $ nick nickname
write $ user nickname (B.pack "0") (B.pack "*") (B.pack "Haskell IRC Bot")
write $ joinChan chan
handle <- asks socket
listen handle
-- | Boucle d'écoute du serveur.
-- Reçoit les commandes entrantes et lance le traitement.
listen :: Handle -> Net ()
listen handle = forever $ do
command <- init `fmap` io (hGetLine handle)
io (putStrLn command)
let message = processIrcCommand command
maybe (return ()) write message
-- | Fonction qui évalue une commande IRC
processIrcCommand :: String -> Maybe Message
processIrcCommand x
| "PING :" `isPrefixOf` x = Just $ pong server
| ("PRIVMSG " ++ B.unpack chan) `isInfixOf` x = processUserCommand (clean x)
| otherwise = Nothing
where
clean = tail . dropWhile ( /= ':') . tail
-- | Fonction qui évalue une commande d'un utilisateur
processUserCommand :: String -> Maybe Message
processUserCommand x
| x == "!quit" = Just $ quit . Just . B.pack $ "Exiting"
| "!id " `isPrefixOf` x = Just $ privmsg chan (B.pack (drop 4 x))
| "coin" `isInfixOf` map toLower x = Just $ privmsg chan (B.pack "PAN !")
| otherwise = Nothing
write :: Message -> Net()
write message = do
handle <- asks socket
let string = B.unpack . encode $ message
io $ hPutStrLn handle string
io $ putStrLn string
-- Convenience.
io :: IO a -> Net a
io = liftIO
|
Taeradan/hirc-bot
|
src/Main.hs
|
gpl-3.0
| 3,204
| 0
| 13
| 963
| 824
| 419
| 405
| 67
| 1
|
module PropT46 where
import Prelude(Bool(..))
import Zeno
-- Definitions
True && x = x
_ && _ = False
False || x = x
_ || _ = True
not True = False
not False = True
-- Nats
data Nat = S Nat | Z
(+) :: Nat -> Nat -> Nat
Z + y = y
(S x) + y = S (x + y)
(*) :: Nat -> Nat -> Nat
Z * _ = Z
(S x) * y = y + (x * y)
(==),(/=) :: Nat -> Nat -> Bool
Z == Z = True
Z == _ = False
S _ == Z = False
S x == S y = x == y
x /= y = not (x == y)
(<=) :: Nat -> Nat -> Bool
Z <= _ = True
_ <= Z = False
S x <= S y = x <= y
one, zero :: Nat
zero = Z
one = S Z
double :: Nat -> Nat
double Z = Z
double (S x) = S (S (double x))
even :: Nat -> Bool
even Z = True
even (S Z) = False
even (S (S x)) = even x
half :: Nat -> Nat
half Z = Z
half (S Z) = Z
half (S (S x)) = S (half x)
mult :: Nat -> Nat -> Nat -> Nat
mult Z _ acc = acc
mult (S x) y acc = mult x y (y + acc)
fac :: Nat -> Nat
fac Z = S Z
fac (S x) = S x * fac x
qfac :: Nat -> Nat -> Nat
qfac Z acc = acc
qfac (S x) acc = qfac x (S x * acc)
exp :: Nat -> Nat -> Nat
exp _ Z = S Z
exp x (S n) = x * exp x n
qexp :: Nat -> Nat -> Nat -> Nat
qexp x Z acc = acc
qexp x (S n) acc = qexp x n (x * acc)
-- Lists
length :: [a] -> Nat
length [] = Z
length (_:xs) = S (length xs)
(++) :: [a] -> [a] -> [a]
[] ++ ys = ys
(x:xs) ++ ys = x : (xs ++ ys)
drop :: Nat -> [a] -> [a]
drop Z xs = xs
drop _ [] = []
drop (S x) (_:xs) = drop x xs
rev :: [a] -> [a]
rev [] = []
rev (x:xs) = rev xs ++ [x]
qrev :: [a] -> [a] -> [a]
qrev [] acc = acc
qrev (x:xs) acc = qrev xs (x:acc)
revflat :: [[a]] -> [a]
revflat [] = []
revflat ([]:xss) = revflat xss
revflat ((x:xs):xss) = revflat (xs:xss) ++ [x]
qrevflat :: [[a]] -> [a] -> [a]
qrevflat [] acc = acc
qrevflat ([]:xss) acc = qrevflat xss acc
qrevflat ((x:xs):xss) acc = qrevflat (xs:xss) (x:acc)
rotate :: Nat -> [a] -> [a]
rotate Z xs = xs
rotate _ [] = []
rotate (S n) (x:xs) = rotate n (xs ++ [x])
elem :: Nat -> [Nat] -> Bool
elem _ [] = False
elem n (x:xs) = n == x || elem n xs
subset :: [Nat] -> [Nat] -> Bool
subset [] ys = True
subset (x:xs) ys = x `elem` xs && subset xs ys
intersect,union :: [Nat] -> [Nat] -> [Nat]
(x:xs) `intersect` ys | x `elem` ys = x:(xs `intersect` ys)
| otherwise = xs `intersect` ys
[] `intersect` ys = []
union (x:xs) ys | x `elem` ys = union xs ys
| otherwise = x:(union xs ys)
union [] ys = ys
isort :: [Nat] -> [Nat]
isort [] = []
isort (x:xs) = insert x (isort xs)
insert :: Nat -> [Nat] -> [Nat]
insert n [] = [n]
insert n (x:xs) =
case n <= x of
True -> n : x : xs
False -> x : (insert n xs)
count :: Nat -> [Nat] -> Nat
count n (x:xs) | n == x = S (count n xs)
| otherwise = count n xs
count n [] = Z
sorted :: [Nat] -> Bool
sorted (x:y:xs) = x <= y && sorted (y:xs)
sorted _ = True
-- Theorem
prop_T46 :: Nat -> Nat -> [Nat] -> Prop
prop_T46 x y z = given (x :=: y)
$ proveBool (x `elem` insert y z)
|
danr/hipspec
|
testsuite/prod/zeno_version/PropT46.hs
|
gpl-3.0
| 3,016
| 0
| 10
| 939
| 2,013
| 1,048
| 965
| 115
| 2
|
{-
This file is part of HNH.
HNH is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
HNH is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with HNH. If not, see <http://www.gnu.org/licenses/>.
Copyright 2010 Francisco Ferreira
-}
module TypeUtils
(
addType -- TODO eliminate unused functions
, addPatType
, litToExp
, assembleInfixOperator
, getType
, getPatType
, resultingType
, getAltType
, getAltPatTypes
, getDataTypes
, DataType
, getConstType
, getConstTypeParams
, isVarDecl
, getTupleType
, constToFun
)
where
import Syntax
import Data.List(find)
-- returns the type resuling for the application of FuncType
-- or UnknownType if the type is not appropiate
resultingType :: Type -> Type
resultingType (FunType _ t) = t
resultingType _ = UnknownType
getType :: Exp -> Type
getType (VarExp _ t) = t
getType (ConExp _ _ t) = t
getType (LitExp _ t) = t
getType (InfixOpExp _ t) = t
getType (FExp _ _ t) = t
getType (MinusExp _ t) = t
getType (MinusFloatExp _ t) = t
getType (LambdaExp _ _ t) = t
getType (LetExp _ _ t) = t
getType (IfExp _ _ _ t) = t
getType (CaseExp _ _ t) = t
getType (ParensExp _ t) = t
getType (TupleExp _ t) = t
getType (ListExp _ t) = t
getType (IdVarExp _ t) = t
getType (IdConExp _ _ t) = t
getType (Prim _ _ t) = t
getType (IdPrim _ _ t) = t
getPatType :: Pattern -> Type
getPatType (VarPat _ t) = t
getPatType (ConPat _ _ t) = t
getPatType (TuplePat _ t) = t
getPatType (WildcardPat t) = t
getPatType (IdVarPat _ t) = t
getPatType (IdConPat _ _ _ t) = t
getPatType (IdTuplePat _ t) = t
getAltType :: Alternative -> Type
getAltType (Alternative _ e) = getType e
getAltPatTypes :: Alternative -> [Type]
getAltPatTypes (Alternative ps _) = map getPatType ps
-- addType adds type information to an expression
addType :: Exp -> Type -> Exp
addType (VarExp n _) t = VarExp n t
addType (ConExp n params _) t = ConExp n params t
addType (LitExp v _) t = LitExp v t
addType (InfixOpExp e _) t = InfixOpExp e t
addType (FExp e e' _) t = FExp e e' t
addType (MinusExp e _) t = MinusExp e t
addType (MinusFloatExp e _) t = MinusFloatExp e t
addType (LambdaExp p e _) t = LambdaExp p e t
addType (LetExp d e _) t = LetExp d e t
addType (IfExp e1 e2 e3 _) t = IfExp e1 e2 e3 t
addType (CaseExp e a _) t = CaseExp e a t
addType (ParensExp e _) t = ParensExp e t
addType (TupleExp e _) t = TupleExp e t
addType (ListExp e _) t = ListExp e t
addType (IdVarExp i _) t = IdVarExp i t
addType (IdConExp i params _) t = IdConExp i params t
addType (Prim n params _) t = Prim n params t
addType (IdPrim n params _) t = IdPrim n params t
addPatType :: Pattern -> Type -> Pattern
addPatType (VarPat n _ ) t = (VarPat n t)
addPatType (ConPat n ns _) t = (ConPat n ns t)
addPatType (TuplePat ns _) t = (TuplePat ns t)
addPatType (WildcardPat _) t = (WildcardPat t)
addPatType (IdVarPat i _) t = (IdVarPat i t)
addPatType (IdConPat n ids ts _) t = (IdConPat n ids ts t)
addPatType (IdTuplePat ids _) t = (IdTuplePat ids t)
-- litToExp creates an Expresion from a literal (with the right type)
litToExp :: LiteralValue -> Exp
litToExp val@(LiteralInt _) = LitExp val (PrimType "Int")
litToExp val@(LiteralFloat _) = LitExp val (PrimType "Float")
litToExp val@(LiteralString _) = LitExp val (PrimType "String")
litToExp val@(LiteralChar _) = LitExp val (PrimType "Char")
-- assembleInfixOperator builds an infix operator structure
-- (for fixity adaptation later)
assembleInfixOperator :: Exp -> Operator -> Exp -> Exp
assembleInfixOperator (InfixOpExp opEx1 _) op (InfixOpExp opEx2 _) =
InfixOpExp
(Op op opEx1 opEx2)
UnknownType
assembleInfixOperator (InfixOpExp opEx _) op e = InfixOpExp
(Op op opEx (LeafExp e))
UnknownType
assembleInfixOperator e op (InfixOpExp opEx _) = InfixOpExp
(Op op (LeafExp e) opEx)
UnknownType
assembleInfixOperator e1 op e2 = InfixOpExp
(Op op (LeafExp e1) (LeafExp e2))
UnknownType
-- Constructors to Functions
constToFun :: [Declaration] -> [Declaration]
constToFun decls = concatMap procDecl decls
where
procDecl (DataDcl t cons) = map (procCons t) cons
procDecl d = []
procCons t (ConDcl n []) = (PatBindDcl (VarPat n t) (ConExp n [] t))
procCons t (ConDcl n ts) = (PatBindDcl
(VarPat n (toFun (ts++[t])))
(LambdaExp
pats
(ConExp n params t)
UnknownType))
where
pats = map (\n -> (VarPat n UnknownType)) params
params = map (\c->[c]) (take (length ts) varNames)
toFun (t:[]) = t
toFun (t:ts) = FunType t (toFun ts)
varNames = "abcdefghijkalmnopqrstuvwxyz"
type DataType = (Type, [Constructor])
getDataTypes :: Program -> [DataType]
getDataTypes (Program decls) = map getDataT (filter isDataT decls)
where
isDataT (DataDcl _ _) = True
isDataT _ = False
getDataT (DataDcl t cs) = (t,cs)
getConstType :: [DataType] -> Name -> Maybe Type
getConstType dts n =
find isData dts >>= return . fst
where
isData (_, cons) = case find isCon cons of Just _ -> True
Nothing -> False
isCon (ConDcl n' _) = n == n'
getConstTypeParams :: [DataType] -> Name -> Maybe [Type]
getConstTypeParams dts n =
find isCon cons >>= return . getConType
where
cons = concat . snd . unzip $ dts
isCon (ConDcl n' _) = n == n'
getConType (ConDcl _ ts) = ts
isVarDecl :: Declaration -> Bool
isVarDecl (PatBindDcl _ _) = True
isVarDecl _ = False
getTupleType :: Type -> Int -> Type
getTupleType (TupleType ts) n = ts!!n
getTupleType t n = UnknownType
|
fferreira/hnh
|
TypeUtils.hs
|
gpl-3.0
| 6,527
| 0
| 15
| 1,888
| 2,260
| 1,149
| 1,111
| 140
| 4
|
module TestMain where
import Test.HUnit
import ConvertGrid --(toGrid, findEnemy, applyPlayerDamage, handlePlayerAttack, killDeadEnemies)
import Model
enemyA :: Enemy
enemyA = Enemy (0,0) 5 5 1
enemyB :: Enemy
enemyB = Enemy (1,1) 5 5 2
player :: Player
player = Player (2,2) 5 5 1 0
testGs :: GameState
testGs = GameState True [] [] [] [enemyA, enemyB] player 0
testToGrid :: Test
testToGrid = TestList
[ "Basic Grid test #1" ~: toGrid (31, 31) ~=? (0,0)
, "Basic Grid test #2" ~: toGrid (32, 32) ~=? (1,1)
]
testFindEnemy :: Test
testFindEnemy = TestList
[ "Find second enemy" ~: findEnemy (1,1) [enemyA, enemyB] ~=? 1
, "Find first enemy" ~: findEnemy (0,0) [enemyA, enemyB] ~=? 0
, "Find no enemy" ~: findEnemy (2,2) [enemyA, enemyB] ~=? (-1)
]
testPlayerAttack :: Test
testPlayerAttack = TestList
[ "Attack chosen enemy" ~: applyPlayerDamage player enemyA ~=? Enemy (0,0) 4 5 1
, "Attack enemy in array" ~: enemies (handlePlayerAttack (1,1) testGs) ~=?
[enemyA, enemyB { eHealth = 4}]
]
testPlayerExpGain :: Test
testPlayerExpGain = TestList
[ "Exp on kill two" ~: 3 ~=? playerExp
]
where playerExp = pExp ( gPlayer gs')
gs' = killDeadEnemies (testGs { enemies = [
enemyA {eHealth = 0},
enemyB {eHealth = -1}
]})
main :: IO Counts
main = runTestTT $ TestList
[ testToGrid
, testFindEnemy
, testPlayerAttack
, testPlayerExpGain
]
|
Spacejoker/DDclone
|
src/Test.hs
|
gpl-3.0
| 1,445
| 0
| 14
| 336
| 498
| 281
| 217
| 39
| 1
|
module Tests.Hakyll.Shortcode.Service.YouTube (
service_test_youtube
) where
import Hakyll.Shortcode
import Test.Tasty
import Test.Tasty.HUnit
service_test_youtube :: TestTree
service_test_youtube = testGroup "youtube"
[ youtube_no_params
]
-- | Must provide either @id@ or both @list@ and @list-type@.
youtube_no_params :: TestTree
youtube_no_params = testCase "youtube: no parameters" $ do
let text = "<p>[youtube]</p>"
let proc = expandShortcodes [YouTube] text
proc @?= "(Error: either the 'id' or the 'list' and 'list-type' parameter must be set.)"
|
nbloomf/hakyll-shortcode
|
test/Tests/Hakyll/Shortcode/Service/YouTube.hs
|
gpl-3.0
| 571
| 0
| 12
| 86
| 99
| 55
| 44
| 13
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.StorageGateway.UpdateVTLDeviceType
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | This operation updates the type of medium changer in a gateway-VTL. When you
-- activate a gateway-VTL, you select a medium changer type for the gateway-VTL.
-- This operation enables you to select a different type of medium changer after
-- a gateway-VTL is activated.
--
-- <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_UpdateVTLDeviceType.html>
module Network.AWS.StorageGateway.UpdateVTLDeviceType
(
-- * Request
UpdateVTLDeviceType
-- ** Request constructor
, updateVTLDeviceType
-- ** Request lenses
, uvtldtDeviceType
, uvtldtVTLDeviceARN
-- * Response
, UpdateVTLDeviceTypeResponse
-- ** Response constructor
, updateVTLDeviceTypeResponse
-- ** Response lenses
, uvtldtrVTLDeviceARN
) where
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.StorageGateway.Types
import qualified GHC.Exts
data UpdateVTLDeviceType = UpdateVTLDeviceType
{ _uvtldtDeviceType :: Text
, _uvtldtVTLDeviceARN :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'UpdateVTLDeviceType' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'uvtldtDeviceType' @::@ 'Text'
--
-- * 'uvtldtVTLDeviceARN' @::@ 'Text'
--
updateVTLDeviceType :: Text -- ^ 'uvtldtVTLDeviceARN'
-> Text -- ^ 'uvtldtDeviceType'
-> UpdateVTLDeviceType
updateVTLDeviceType p1 p2 = UpdateVTLDeviceType
{ _uvtldtVTLDeviceARN = p1
, _uvtldtDeviceType = p2
}
-- | The type of medium changer you want to select.
--
-- /Valid Values/: "STK-L700", "AWS-Gateway-VTL"
uvtldtDeviceType :: Lens' UpdateVTLDeviceType Text
uvtldtDeviceType = lens _uvtldtDeviceType (\s a -> s { _uvtldtDeviceType = a })
-- | The Amazon Resource Name (ARN) of the medium changer you want to select.
uvtldtVTLDeviceARN :: Lens' UpdateVTLDeviceType Text
uvtldtVTLDeviceARN =
lens _uvtldtVTLDeviceARN (\s a -> s { _uvtldtVTLDeviceARN = a })
newtype UpdateVTLDeviceTypeResponse = UpdateVTLDeviceTypeResponse
{ _uvtldtrVTLDeviceARN :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'UpdateVTLDeviceTypeResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'uvtldtrVTLDeviceARN' @::@ 'Maybe' 'Text'
--
updateVTLDeviceTypeResponse :: UpdateVTLDeviceTypeResponse
updateVTLDeviceTypeResponse = UpdateVTLDeviceTypeResponse
{ _uvtldtrVTLDeviceARN = Nothing
}
-- | The Amazon Resource Name (ARN) of the medium changer you have selected.
uvtldtrVTLDeviceARN :: Lens' UpdateVTLDeviceTypeResponse (Maybe Text)
uvtldtrVTLDeviceARN =
lens _uvtldtrVTLDeviceARN (\s a -> s { _uvtldtrVTLDeviceARN = a })
instance ToPath UpdateVTLDeviceType where
toPath = const "/"
instance ToQuery UpdateVTLDeviceType where
toQuery = const mempty
instance ToHeaders UpdateVTLDeviceType
instance ToJSON UpdateVTLDeviceType where
toJSON UpdateVTLDeviceType{..} = object
[ "VTLDeviceARN" .= _uvtldtVTLDeviceARN
, "DeviceType" .= _uvtldtDeviceType
]
instance AWSRequest UpdateVTLDeviceType where
type Sv UpdateVTLDeviceType = StorageGateway
type Rs UpdateVTLDeviceType = UpdateVTLDeviceTypeResponse
request = post "UpdateVTLDeviceType"
response = jsonResponse
instance FromJSON UpdateVTLDeviceTypeResponse where
parseJSON = withObject "UpdateVTLDeviceTypeResponse" $ \o -> UpdateVTLDeviceTypeResponse
<$> o .:? "VTLDeviceARN"
|
dysinger/amazonka
|
amazonka-storagegateway/gen/Network/AWS/StorageGateway/UpdateVTLDeviceType.hs
|
mpl-2.0
| 4,517
| 0
| 9
| 915
| 515
| 313
| 202
| 64
| 1
|
-- This Source Code Form is subject to the terms of the Mozilla Public
-- License, v. 2.0. If a copy of the MPL was not distributed with this
-- file, You can obtain one at http://mozilla.org/MPL/2.0/.
{-# LANGUAGE DataKinds #-}
module Network.Kafka.Protocol.JoinGroup
( JoinGroupRequest
, JoinGroupRequestFields
, JoinGroupResponse
, JoinGroupResponseFields
, FAssignedPartitions
, FPartitionAssignmentStrategy
, FSessionTimeout
, assignedPartitions
, partitionAssignmentStrategy
, sessionTimeout
)
where
import Data.Proxy
import Data.Word
import Network.Kafka.Protocol.Instances ()
import Network.Kafka.Protocol.Primitive
import Network.Kafka.Protocol.Universe
type FPartitionAssignmentStrategy = '("partition_assignment_strategy", ShortString)
type FSessionTimeout = '("session_timeout" , Word32)
partitionAssignmentStrategy :: Proxy FPartitionAssignmentStrategy
partitionAssignmentStrategy = Proxy
sessionTimeout :: Proxy FSessionTimeout
sessionTimeout = Proxy
type JoinGroupRequestFields
= '[ FConsumerGroup
, FSessionTimeout
, FTopics
, FConsumerId
, FPartitionAssignmentStrategy
]
type JoinGroupRequest = Req 11 0 JoinGroupRequestFields
type FAssignedPartitions = '("assigned_partitions", TopicKeyed Partition)
assignedPartitions :: Proxy FAssignedPartitions
assignedPartitions = Proxy
type JoinGroupResponseFields
= '[ FErrorCode
, FGeneration
, FConsumerId
, FAssignedPartitions
]
type JoinGroupResponse = Resp JoinGroupResponseFields
|
kim/kafka-protocol
|
src/Network/Kafka/Protocol/JoinGroup.hs
|
mpl-2.0
| 1,602
| 0
| 7
| 310
| 227
| 143
| 84
| 39
| 1
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Search.CSE.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns metadata about the search performed, metadata about the engine
-- used for the search, and the search results.
--
-- /See:/ <https://developers.google.com/custom-search/v1/introduction Custom Search API Reference> for @search.cse.list@.
module Network.Google.Resource.Search.CSE.List
(
-- * REST Resource
CSEListResource
-- * Creating a Request
, cSEList
, CSEList
-- * Request Lenses
, cselImgDominantColor
, cselXgafv
, cselUploadProtocol
, cselSiteSearchFilter
, cselC2coff
, cselOrTerms
, cselAccessToken
, cselStart
, cselRights
, cselUploadType
, cselExcludeTerms
, cselNum
, cselFileType
, cselSearchType
, cselLr
, cselQ
, cselGooglehost
, cselRelatedSite
, cselHl
, cselSort
, cselSiteSearch
, cselFilter
, cselDateRestrict
, cselLinkSite
, cselLowRange
, cselImgType
, cselGl
, cselCx
, cselImgColorType
, cselImgSize
, cselExactTerms
, cselCr
, cselSafe
, cselHq
, cselCallback
, cselHighRange
) where
import Network.Google.CustomSearch.Types
import Network.Google.Prelude
-- | A resource alias for @search.cse.list@ method which the
-- 'CSEList' request conforms to.
type CSEListResource =
"customsearch" :>
"v1" :>
QueryParam "imgDominantColor" CSEListImgDominantColor
:>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "siteSearchFilter" CSEListSiteSearchFilter
:>
QueryParam "c2coff" Text :>
QueryParam "orTerms" Text :>
QueryParam "access_token" Text :>
QueryParam "start" (Textual Word32) :>
QueryParam "rights" Text :>
QueryParam "uploadType" Text :>
QueryParam "excludeTerms" Text :>
QueryParam "num" (Textual Int32) :>
QueryParam "fileType" Text :>
QueryParam "searchType" CSEListSearchType :>
QueryParam "lr" Text :>
QueryParam "q" Text :>
QueryParam "googlehost" Text :>
QueryParam "relatedSite" Text :>
QueryParam "hl" Text :>
QueryParam "sort" Text :>
QueryParam "siteSearch" Text :>
QueryParam "filter" Text :>
QueryParam "dateRestrict"
Text
:>
QueryParam "linkSite"
Text
:>
QueryParam "lowRange"
Text
:>
QueryParam "imgType"
CSEListImgType
:>
QueryParam "gl"
Text
:>
QueryParam "cx"
Text
:>
QueryParam
"imgColorType"
CSEListImgColorType
:>
QueryParam
"imgSize"
CSEListImgSize
:>
QueryParam
"exactTerms"
Text
:>
QueryParam
"cr"
Text
:>
QueryParam
"safe"
CSEListSafe
:>
QueryParam
"hq"
Text
:>
QueryParam
"callback"
Text
:>
QueryParam
"highRange"
Text
:>
QueryParam
"alt"
AltJSON
:>
Get
'[JSON]
Search
-- | Returns metadata about the search performed, metadata about the engine
-- used for the search, and the search results.
--
-- /See:/ 'cSEList' smart constructor.
data CSEList =
CSEList'
{ _cselImgDominantColor :: !(Maybe CSEListImgDominantColor)
, _cselXgafv :: !(Maybe Xgafv)
, _cselUploadProtocol :: !(Maybe Text)
, _cselSiteSearchFilter :: !(Maybe CSEListSiteSearchFilter)
, _cselC2coff :: !(Maybe Text)
, _cselOrTerms :: !(Maybe Text)
, _cselAccessToken :: !(Maybe Text)
, _cselStart :: !(Maybe (Textual Word32))
, _cselRights :: !(Maybe Text)
, _cselUploadType :: !(Maybe Text)
, _cselExcludeTerms :: !(Maybe Text)
, _cselNum :: !(Maybe (Textual Int32))
, _cselFileType :: !(Maybe Text)
, _cselSearchType :: !(Maybe CSEListSearchType)
, _cselLr :: !(Maybe Text)
, _cselQ :: !(Maybe Text)
, _cselGooglehost :: !(Maybe Text)
, _cselRelatedSite :: !(Maybe Text)
, _cselHl :: !(Maybe Text)
, _cselSort :: !(Maybe Text)
, _cselSiteSearch :: !(Maybe Text)
, _cselFilter :: !(Maybe Text)
, _cselDateRestrict :: !(Maybe Text)
, _cselLinkSite :: !(Maybe Text)
, _cselLowRange :: !(Maybe Text)
, _cselImgType :: !(Maybe CSEListImgType)
, _cselGl :: !(Maybe Text)
, _cselCx :: !(Maybe Text)
, _cselImgColorType :: !(Maybe CSEListImgColorType)
, _cselImgSize :: !(Maybe CSEListImgSize)
, _cselExactTerms :: !(Maybe Text)
, _cselCr :: !(Maybe Text)
, _cselSafe :: !(Maybe CSEListSafe)
, _cselHq :: !(Maybe Text)
, _cselCallback :: !(Maybe Text)
, _cselHighRange :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'CSEList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'cselImgDominantColor'
--
-- * 'cselXgafv'
--
-- * 'cselUploadProtocol'
--
-- * 'cselSiteSearchFilter'
--
-- * 'cselC2coff'
--
-- * 'cselOrTerms'
--
-- * 'cselAccessToken'
--
-- * 'cselStart'
--
-- * 'cselRights'
--
-- * 'cselUploadType'
--
-- * 'cselExcludeTerms'
--
-- * 'cselNum'
--
-- * 'cselFileType'
--
-- * 'cselSearchType'
--
-- * 'cselLr'
--
-- * 'cselQ'
--
-- * 'cselGooglehost'
--
-- * 'cselRelatedSite'
--
-- * 'cselHl'
--
-- * 'cselSort'
--
-- * 'cselSiteSearch'
--
-- * 'cselFilter'
--
-- * 'cselDateRestrict'
--
-- * 'cselLinkSite'
--
-- * 'cselLowRange'
--
-- * 'cselImgType'
--
-- * 'cselGl'
--
-- * 'cselCx'
--
-- * 'cselImgColorType'
--
-- * 'cselImgSize'
--
-- * 'cselExactTerms'
--
-- * 'cselCr'
--
-- * 'cselSafe'
--
-- * 'cselHq'
--
-- * 'cselCallback'
--
-- * 'cselHighRange'
cSEList
:: CSEList
cSEList =
CSEList'
{ _cselImgDominantColor = Nothing
, _cselXgafv = Nothing
, _cselUploadProtocol = Nothing
, _cselSiteSearchFilter = Nothing
, _cselC2coff = Nothing
, _cselOrTerms = Nothing
, _cselAccessToken = Nothing
, _cselStart = Nothing
, _cselRights = Nothing
, _cselUploadType = Nothing
, _cselExcludeTerms = Nothing
, _cselNum = Nothing
, _cselFileType = Nothing
, _cselSearchType = Nothing
, _cselLr = Nothing
, _cselQ = Nothing
, _cselGooglehost = Nothing
, _cselRelatedSite = Nothing
, _cselHl = Nothing
, _cselSort = Nothing
, _cselSiteSearch = Nothing
, _cselFilter = Nothing
, _cselDateRestrict = Nothing
, _cselLinkSite = Nothing
, _cselLowRange = Nothing
, _cselImgType = Nothing
, _cselGl = Nothing
, _cselCx = Nothing
, _cselImgColorType = Nothing
, _cselImgSize = Nothing
, _cselExactTerms = Nothing
, _cselCr = Nothing
, _cselSafe = Nothing
, _cselHq = Nothing
, _cselCallback = Nothing
, _cselHighRange = Nothing
}
-- | Returns images of a specific dominant color. Acceptable values are: *
-- \`\"black\"\` * \`\"blue\"\` * \`\"brown\"\` * \`\"gray\"\` *
-- \`\"green\"\` * \`\"orange\"\` * \`\"pink\"\` * \`\"purple\"\` *
-- \`\"red\"\` * \`\"teal\"\` * \`\"white\"\` * \`\"yellow\"\`
cselImgDominantColor :: Lens' CSEList (Maybe CSEListImgDominantColor)
cselImgDominantColor
= lens _cselImgDominantColor
(\ s a -> s{_cselImgDominantColor = a})
-- | V1 error format.
cselXgafv :: Lens' CSEList (Maybe Xgafv)
cselXgafv
= lens _cselXgafv (\ s a -> s{_cselXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
cselUploadProtocol :: Lens' CSEList (Maybe Text)
cselUploadProtocol
= lens _cselUploadProtocol
(\ s a -> s{_cselUploadProtocol = a})
-- | Controls whether to include or exclude results from the site named in
-- the \`siteSearch\` parameter. Acceptable values are: * \`\"e\"\`:
-- exclude * \`\"i\"\`: include
cselSiteSearchFilter :: Lens' CSEList (Maybe CSEListSiteSearchFilter)
cselSiteSearchFilter
= lens _cselSiteSearchFilter
(\ s a -> s{_cselSiteSearchFilter = a})
-- | Enables or disables [Simplified and Traditional Chinese
-- Search](https:\/\/developers.google.com\/custom-search\/docs\/xml_results#chineseSearch).
-- The default value for this parameter is 0 (zero), meaning that the
-- feature is enabled. Supported values are: * \`1\`: Disabled * \`0\`:
-- Enabled (default)
cselC2coff :: Lens' CSEList (Maybe Text)
cselC2coff
= lens _cselC2coff (\ s a -> s{_cselC2coff = a})
-- | Provides additional search terms to check for in a document, where each
-- document in the search results must contain at least one of the
-- additional search terms.
cselOrTerms :: Lens' CSEList (Maybe Text)
cselOrTerms
= lens _cselOrTerms (\ s a -> s{_cselOrTerms = a})
-- | OAuth access token.
cselAccessToken :: Lens' CSEList (Maybe Text)
cselAccessToken
= lens _cselAccessToken
(\ s a -> s{_cselAccessToken = a})
-- | The index of the first result to return. The default number of results
-- per page is 10, so \`&start=11\` would start at the top of the second
-- page of results. **Note**: The JSON API will never return more than 100
-- results, even if more than 100 documents match the query, so setting the
-- sum of \`start + num\` to a number greater than 100 will produce an
-- error. Also note that the maximum value for \`num\` is 10.
cselStart :: Lens' CSEList (Maybe Word32)
cselStart
= lens _cselStart (\ s a -> s{_cselStart = a}) .
mapping _Coerce
-- | Filters based on licensing. Supported values include:
-- \`cc_publicdomain\`, \`cc_attribute\`, \`cc_sharealike\`,
-- \`cc_noncommercial\`, \`cc_nonderived\` and combinations of these. See
-- [typical
-- combinations](https:\/\/wiki.creativecommons.org\/wiki\/CC_Search_integration).
cselRights :: Lens' CSEList (Maybe Text)
cselRights
= lens _cselRights (\ s a -> s{_cselRights = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
cselUploadType :: Lens' CSEList (Maybe Text)
cselUploadType
= lens _cselUploadType
(\ s a -> s{_cselUploadType = a})
-- | Identifies a word or phrase that should not appear in any documents in
-- the search results.
cselExcludeTerms :: Lens' CSEList (Maybe Text)
cselExcludeTerms
= lens _cselExcludeTerms
(\ s a -> s{_cselExcludeTerms = a})
-- | Number of search results to return. * Valid values are integers between
-- 1 and 10, inclusive.
cselNum :: Lens' CSEList (Maybe Int32)
cselNum
= lens _cselNum (\ s a -> s{_cselNum = a}) .
mapping _Coerce
-- | Restricts results to files of a specified extension. A list of file
-- types indexable by Google can be found in Search Console [Help
-- Center](https:\/\/support.google.com\/webmasters\/answer\/35287).
cselFileType :: Lens' CSEList (Maybe Text)
cselFileType
= lens _cselFileType (\ s a -> s{_cselFileType = a})
-- | Specifies the search type: \`image\`. If unspecified, results are
-- limited to webpages. Acceptable values are: * \`\"image\"\`: custom
-- image search.
cselSearchType :: Lens' CSEList (Maybe CSEListSearchType)
cselSearchType
= lens _cselSearchType
(\ s a -> s{_cselSearchType = a})
-- | Restricts the search to documents written in a particular language
-- (e.g., \`lr=lang_ja\`). Acceptable values are: * \`\"lang_ar\"\`: Arabic
-- * \`\"lang_bg\"\`: Bulgarian * \`\"lang_ca\"\`: Catalan *
-- \`\"lang_cs\"\`: Czech * \`\"lang_da\"\`: Danish * \`\"lang_de\"\`:
-- German * \`\"lang_el\"\`: Greek * \`\"lang_en\"\`: English *
-- \`\"lang_es\"\`: Spanish * \`\"lang_et\"\`: Estonian * \`\"lang_fi\"\`:
-- Finnish * \`\"lang_fr\"\`: French * \`\"lang_hr\"\`: Croatian *
-- \`\"lang_hu\"\`: Hungarian * \`\"lang_id\"\`: Indonesian *
-- \`\"lang_is\"\`: Icelandic * \`\"lang_it\"\`: Italian * \`\"lang_iw\"\`:
-- Hebrew * \`\"lang_ja\"\`: Japanese * \`\"lang_ko\"\`: Korean *
-- \`\"lang_lt\"\`: Lithuanian * \`\"lang_lv\"\`: Latvian *
-- \`\"lang_nl\"\`: Dutch * \`\"lang_no\"\`: Norwegian * \`\"lang_pl\"\`:
-- Polish * \`\"lang_pt\"\`: Portuguese * \`\"lang_ro\"\`: Romanian *
-- \`\"lang_ru\"\`: Russian * \`\"lang_sk\"\`: Slovak * \`\"lang_sl\"\`:
-- Slovenian * \`\"lang_sr\"\`: Serbian * \`\"lang_sv\"\`: Swedish *
-- \`\"lang_tr\"\`: Turkish * \`\"lang_zh-CN\"\`: Chinese (Simplified) *
-- \`\"lang_zh-TW\"\`: Chinese (Traditional)
cselLr :: Lens' CSEList (Maybe Text)
cselLr = lens _cselLr (\ s a -> s{_cselLr = a})
-- | Query
cselQ :: Lens' CSEList (Maybe Text)
cselQ = lens _cselQ (\ s a -> s{_cselQ = a})
-- | **Deprecated**. Use the \`gl\` parameter for a similar effect. The local
-- Google domain (for example, google.com, google.de, or google.fr) to use
-- to perform the search.
cselGooglehost :: Lens' CSEList (Maybe Text)
cselGooglehost
= lens _cselGooglehost
(\ s a -> s{_cselGooglehost = a})
-- | Specifies that all search results should be pages that are related to
-- the specified URL.
cselRelatedSite :: Lens' CSEList (Maybe Text)
cselRelatedSite
= lens _cselRelatedSite
(\ s a -> s{_cselRelatedSite = a})
-- | Sets the user interface language. * Explicitly setting this parameter
-- improves the performance and the quality of your search results. * See
-- the [Interface
-- Languages](https:\/\/developers.google.com\/custom-search\/docs\/xml_results#wsInterfaceLanguages)
-- section of [Internationalizing Queries and Results
-- Presentation](https:\/\/developers.google.com\/custom-search\/docs\/xml_results#wsInternationalizing)
-- for more information, and (Supported Interface
-- Languages)[https:\/\/developers.google.com\/custom-search\/docs\/xml_results_appendices#interfaceLanguages]
-- for a list of supported languages.
cselHl :: Lens' CSEList (Maybe Text)
cselHl = lens _cselHl (\ s a -> s{_cselHl = a})
-- | The sort expression to apply to the results. The sort parameter
-- specifies that the results be sorted according to the specified
-- expression i.e. sort by date. [Example:
-- sort=date](https:\/\/developers.google.com\/custom-search\/docs\/structured_search#sort-by-attribute).
cselSort :: Lens' CSEList (Maybe Text)
cselSort = lens _cselSort (\ s a -> s{_cselSort = a})
-- | Specifies a given site which should always be included or excluded from
-- results (see \`siteSearchFilter\` parameter, below).
cselSiteSearch :: Lens' CSEList (Maybe Text)
cselSiteSearch
= lens _cselSiteSearch
(\ s a -> s{_cselSiteSearch = a})
-- | Controls turning on or off the duplicate content filter. * See
-- [Automatic
-- Filtering](https:\/\/developers.google.com\/custom-search\/docs\/xml_results#automaticFiltering)
-- for more information about Google\'s search results filters. Note that
-- host crowding filtering applies only to multi-site searches. * By
-- default, Google applies filtering to all search results to improve the
-- quality of those results. Acceptable values are: * \`0\`: Turns off
-- duplicate content filter. * \`1\`: Turns on duplicate content filter.
cselFilter :: Lens' CSEList (Maybe Text)
cselFilter
= lens _cselFilter (\ s a -> s{_cselFilter = a})
-- | Restricts results to URLs based on date. Supported values include: *
-- \`d[number]\`: requests results from the specified number of past days.
-- * \`w[number]\`: requests results from the specified number of past
-- weeks. * \`m[number]\`: requests results from the specified number of
-- past months. * \`y[number]\`: requests results from the specified number
-- of past years.
cselDateRestrict :: Lens' CSEList (Maybe Text)
cselDateRestrict
= lens _cselDateRestrict
(\ s a -> s{_cselDateRestrict = a})
-- | Specifies that all search results should contain a link to a particular
-- URL.
cselLinkSite :: Lens' CSEList (Maybe Text)
cselLinkSite
= lens _cselLinkSite (\ s a -> s{_cselLinkSite = a})
-- | Specifies the starting value for a search range. Use \`lowRange\` and
-- \`highRange\` to append an inclusive search range of
-- \`lowRange...highRange\` to the query.
cselLowRange :: Lens' CSEList (Maybe Text)
cselLowRange
= lens _cselLowRange (\ s a -> s{_cselLowRange = a})
-- | Returns images of a type. Acceptable values are: * \`\"clipart\"\` *
-- \`\"face\"\` * \`\"lineart\"\` * \`\"stock\"\` * \`\"photo\"\` *
-- \`\"animated\"\`
cselImgType :: Lens' CSEList (Maybe CSEListImgType)
cselImgType
= lens _cselImgType (\ s a -> s{_cselImgType = a})
-- | Geolocation of end user. * The \`gl\` parameter value is a two-letter
-- country code. The \`gl\` parameter boosts search results whose country
-- of origin matches the parameter value. See the [Country
-- Codes](https:\/\/developers.google.com\/custom-search\/docs\/xml_results_appendices#countryCodes)
-- page for a list of valid values. * Specifying a \`gl\` parameter value
-- should lead to more relevant results. This is particularly true for
-- international customers and, even more specifically, for customers in
-- English- speaking countries other than the United States.
cselGl :: Lens' CSEList (Maybe Text)
cselGl = lens _cselGl (\ s a -> s{_cselGl = a})
-- | The Programmable Search Engine ID to use for this request.
cselCx :: Lens' CSEList (Maybe Text)
cselCx = lens _cselCx (\ s a -> s{_cselCx = a})
-- | Returns black and white, grayscale, transparent, or color images.
-- Acceptable values are: * \`\"color\"\` * \`\"gray\"\` * \`\"mono\"\`:
-- black and white * \`\"trans\"\`: transparent background
cselImgColorType :: Lens' CSEList (Maybe CSEListImgColorType)
cselImgColorType
= lens _cselImgColorType
(\ s a -> s{_cselImgColorType = a})
-- | Returns images of a specified size. Acceptable values are: *
-- \`\"huge\"\` * \`\"icon\"\` * \`\"large\"\` * \`\"medium\"\` *
-- \`\"small\"\` * \`\"xlarge\"\` * \`\"xxlarge\"\`
cselImgSize :: Lens' CSEList (Maybe CSEListImgSize)
cselImgSize
= lens _cselImgSize (\ s a -> s{_cselImgSize = a})
-- | Identifies a phrase that all documents in the search results must
-- contain.
cselExactTerms :: Lens' CSEList (Maybe Text)
cselExactTerms
= lens _cselExactTerms
(\ s a -> s{_cselExactTerms = a})
-- | Restricts search results to documents originating in a particular
-- country. You may use [Boolean
-- operators](https:\/\/developers.google.com\/custom-search\/docs\/xml_results_appendices#booleanOperators)
-- in the cr parameter\'s value. Google Search determines the country of a
-- document by analyzing: * the top-level domain (TLD) of the document\'s
-- URL * the geographic location of the Web server\'s IP address See the
-- [Country Parameter
-- Values](https:\/\/developers.google.com\/custom-search\/docs\/xml_results_appendices#countryCollections)
-- page for a list of valid values for this parameter.
cselCr :: Lens' CSEList (Maybe Text)
cselCr = lens _cselCr (\ s a -> s{_cselCr = a})
-- | Search safety level. Acceptable values are: * \`\"active\"\`: Enables
-- SafeSearch filtering. * \`\"off\"\`: Disables SafeSearch filtering.
-- (default)
cselSafe :: Lens' CSEList (Maybe CSEListSafe)
cselSafe = lens _cselSafe (\ s a -> s{_cselSafe = a})
-- | Appends the specified query terms to the query, as if they were combined
-- with a logical AND operator.
cselHq :: Lens' CSEList (Maybe Text)
cselHq = lens _cselHq (\ s a -> s{_cselHq = a})
-- | JSONP
cselCallback :: Lens' CSEList (Maybe Text)
cselCallback
= lens _cselCallback (\ s a -> s{_cselCallback = a})
-- | Specifies the ending value for a search range. * Use \`lowRange\` and
-- \`highRange\` to append an inclusive search range of
-- \`lowRange...highRange\` to the query.
cselHighRange :: Lens' CSEList (Maybe Text)
cselHighRange
= lens _cselHighRange
(\ s a -> s{_cselHighRange = a})
instance GoogleRequest CSEList where
type Rs CSEList = Search
type Scopes CSEList = '[]
requestClient CSEList'{..}
= go _cselImgDominantColor _cselXgafv
_cselUploadProtocol
_cselSiteSearchFilter
_cselC2coff
_cselOrTerms
_cselAccessToken
_cselStart
_cselRights
_cselUploadType
_cselExcludeTerms
_cselNum
_cselFileType
_cselSearchType
_cselLr
_cselQ
_cselGooglehost
_cselRelatedSite
_cselHl
_cselSort
_cselSiteSearch
_cselFilter
_cselDateRestrict
_cselLinkSite
_cselLowRange
_cselImgType
_cselGl
_cselCx
_cselImgColorType
_cselImgSize
_cselExactTerms
_cselCr
_cselSafe
_cselHq
_cselCallback
_cselHighRange
(Just AltJSON)
customSearchService
where go
= buildClient (Proxy :: Proxy CSEListResource) mempty
|
brendanhay/gogol
|
gogol-customsearch/gen/Network/Google/Resource/Search/CSE/List.hs
|
mpl-2.0
| 25,636
| 0
| 46
| 8,800
| 3,240
| 1,877
| 1,363
| 448
| 1
|
module Expr where
import Data.List (intercalate)
data Token = FUN
| LET
| IN
| FORALL
| IDENT String
| LPAREN
| RPAREN
| LBRACKET
| RBRACKET
| EQUALS
| ARROW
| COMMA
deriving (Eq,Show)
type Name = String
data Expr = Var Name
| Call Expr [Expr]
| Fun [Name] Expr
| Let Name Expr Expr
deriving (Eq,Show)
type Id = Int
type Level = Int
data Ty = TConst Name
| TVar Name
| TApp Ty [Ty]
| TArrow [Ty] Ty
| TScheme [Name] Ty
deriving (Eq,Ord)
show' t@(TArrow _ _) = "(" ++ show t ++ ")"
show' t = show t
instance Show Ty where
show (TConst n) = n
show (TVar n) = n
show (TApp t ts) = show t ++ "[" ++ intercalate ", " (map show ts) ++ "]"
show (TArrow ts t) = intercalate " -> " (map show' ts) ++ " -> " ++ show t
show (TScheme ns t) = "forall[" ++ unwords ns ++ "] " ++ show t
|
scturtle/InferW
|
Expr.hs
|
unlicense
| 1,042
| 0
| 10
| 441
| 384
| 208
| 176
| 37
| 1
|
module Palindromes.A249642 (a249642) where
import Helpers.PalindromeCounter (countPalindromes)
a249642 :: Int -> Integer
a249642 = (!!) a249642_list
a249642_list :: [Integer]
a249642_list = countPalindromes 9
|
peterokagey/haskellOEIS
|
src/Palindromes/A249642.hs
|
apache-2.0
| 211
| 0
| 5
| 25
| 56
| 33
| 23
| 6
| 1
|
module BrownPLT.TypedJS.Prelude
( module Data.Generics
, module Control.Monad
, ErrorT
, MonadError
, module Data.List
, module Data.Maybe
, module Data.Tree
, SourcePos
, initialPos
, setSourceName
, sourceName
, sourceLine
, sourceColumn
, (!)
, Map
, Set
, Foldable
, Traversable
, printf
-- common functions
, noPos
, everythingBut
, catastrophe
, Node
, trace
, accumError
) where
import Data.Tree
import Data.List
import Data.Generics
import Data.Maybe
import Control.Monad
import Text.ParserCombinators.Parsec.Pos
import Data.Map (Map,(!))
import Data.Set (Set)
import Data.Foldable (Foldable)
import Data.Traversable (Traversable)
import Text.PrettyPrint.HughesPJ
import Text.Printf
import Data.Graph.Inductive.PatriciaTree (Gr)
import Data.Graph.Inductive (Node, Graph)
import System.IO.Unsafe
import Control.Monad.Error
trace :: String -> a -> a
trace s r = (unsafePerformIO $ putStrLn s) `seq` r
noPos :: SourcePos
noPos = initialPos "unknown position"
-- |Similar to 'everything'. 'everythingBut' descends into 'term' only if
-- the generic predicate is 'True'. If the predicate is 'False',
-- the query is still applied to 'term'.
everythingBut :: (r -> r -> r) -- ^combines results
-> GenericQ Bool -- ^generic predicate that determines whether
-- to descend into a value
-> GenericQ r -- ^generic query
-> GenericQ r
everythingBut combine canDescend query term = case canDescend term of
False -> query term -- does not descend
True -> foldl' combine (query term)
(gmapQ (everythingBut combine canDescend query) term)
catastrophe :: Monad m
=> SourcePos
-> String
-> m a
catastrophe loc msg =
fail $ printf "CATASTROPHIC FAILURE: %s (at %s)" msg (show loc)
accumError :: MonadError String m
=> String
-> m a
-> m a
accumError msg m = catchError m (\msg' -> throwError (msg ++ ('\n':msg')))
|
brownplt/strobe-old
|
src/BrownPLT/TypedJS/Prelude.hs
|
bsd-2-clause
| 2,046
| 0
| 12
| 512
| 506
| 291
| 215
| -1
| -1
|
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Edward Kmett and Dan Doel 2014
-- License : BSD3
-- Maintainer: Dan Doel <dan.doel@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--------------------------------------------------------------------
module Ermine.Core.Optimizer
( optimize
, rewriteCore
, rewriteCoreDown
) where
import Bound
import Bound.Var
import Bound.Scope
import Control.Applicative
import Control.Lens
import Control.Monad.Writer
import Data.List (genericLength, genericSplitAt, genericIndex)
import Data.Traversable (sequenceA)
import Data.Word
import Ermine.Syntax
import Ermine.Syntax.Convention
import Ermine.Syntax.Core
import Ermine.Syntax.Scope
import Ermine.Unification.Sharing
-- | Optimize core expressions by alternating between the different optimization passes several times.
optimize :: Core Convention c -> Core Convention c
optimize c = runIdentity . runSharing c $ optimize' 10 c
optimize' :: (Applicative m, MonadWriter Any m)
=> Int -> Core Convention c -> m (Core Convention c)
optimize' 0 c = return c
optimize' n c = do (c', Any b) <- listen $ suite c
if b then optimize' (n-1) c' else return c
where
suite = rewriteCoreDown lamlam
>=> rewriteCore betaVar
>=> rewriteCoreDown specCase
>=> rewriteCore etaDict
rewriteCoreDown :: forall m c cc. (Applicative m, MonadWriter Any m)
=> (forall d. Core cc d -> m (Core cc d)) -> Core cc c -> m (Core cc c)
rewriteCoreDown opt = go
where
go :: forall e. Core cc e -> m (Core cc e)
go c = sharing c (opt c) >>= \ xs -> case xs of
l@(Lam cc e) -> sharing l $ Lam cc <$> goS e
d@(Data cc n g l) -> sharing d $ Data cc n g <$> traverse go l
d@(Prim cc r g l) -> sharing d $ Prim cc r g <$> traverse go l
a@(App cc f x) -> sharing a $ App cc <$> go f <*> go x
l@(Let d b) -> sharing l $ Let <$> sharing d (traverse goS d) <*> goS b
s@(Case e b d) -> sharing s $ Case <$> go e <*> sharing b ((traverse.matchBody) goS b) <*> sharing d (traverse goS d)
d@(Dict su sl) -> sharing d $ Dict <$> sharing su (traverse go su) <*> sharing sl (traverse goS sl)
x@HardCore{} -> return x
x@Var{} -> return x
goS :: forall b e. Scope b (Core cc) e -> m (Scope b (Core cc) e)
goS s = sharing s . inScope go $ s
rewriteCore :: forall m c cc. (Applicative m, MonadWriter Any m)
=> (forall d. Core cc d -> m (Core cc d)) -> Core cc c -> m (Core cc c)
rewriteCore opt = go
where
go :: forall e. Core cc e -> m (Core cc e)
go c = sharing c $ opt =<< case c of
l@(Lam cc e) -> sharing l $ Lam cc <$> goS e
d@(Data cc n g l) -> sharing d $ Data cc n g <$> traverse go l
d@(Prim cc r g l) -> sharing d $ Prim cc r g <$> traverse go l
a@(App cc f x) -> sharing a $ App cc <$> go f <*> go x
l@(Let d b) -> sharing l $ Let <$> sharing d (traverse goS d) <*> goS b
s@(Case e b d) -> sharing s $ Case <$> go e <*> sharing b ((traverse.matchBody) goS b) <*> sharing d (traverse goS d)
d@(Dict su sl) -> sharing d $ Dict <$> sharing su (traverse go su) <*> sharing sl (traverse goS sl)
x@HardCore{} -> return x
x@Var{} -> return x
goS :: forall b e. Scope b (Core cc) e -> m (Scope b (Core cc) e)
goS s = sharing s . inScope go $ s
-- | Turns @\{x..} -> \{y..} -> ...@ into @\{x.. y..} -> ...@ for all lambda variants
lamlam :: forall c cc m. (Functor m, MonadWriter Any m) => Core cc c -> m (Core cc c)
lamlam (Lam cc0 e0) = slurp False cc0 (fromScope e0)
where
slurp :: forall e. Bool -> [cc] -> Core cc (Var Word64 e) -> m (Core cc e)
slurp _ m (Lam n b) | j <- fromIntegral $ length m = slurp True (m ++ n) (instantiate (\i -> pure $ B $ j + i) b)
slurp b m c = Lam m (toScope c) <$ tell (Any b)
lamlam c = return c
-- | 'Lam D' is strict, so η-reduction for it is sound. η-reduce.
--
-- Todo: generalize this to larger lambdas and also extend it to cover other strict lambdas like U and N
etaDict :: forall c m. (Functor m, MonadWriter Any m) => Core Convention c -> m (Core Convention c)
etaDict c@(Lam [D] (Scope (App D f (Var (B 0))))) = case sequenceA f of
B _ -> return c
F g -> join g <$ tell (Any True)
etaDict c = return c
-- | β-reduces redexes like @(\x.. -> e) v..@ where @v..@ is all variables for all calling conventions
betaVar :: forall c m. (Applicative m, MonadWriter Any m) => Core Convention c -> m (Core Convention c)
betaVar = collapse []
where
collapse stk (App C f x)
| has _Var x || has _Slot x || has _Super x || has _Lit x = collapse (x:stk) f
collapse stk (Lam cc body@(Scope body'))
| len < n = do
tell (Any True)
let replace i
| i < len = F $ stk `genericIndex` i
| otherwise = B $ i - len
return $ Lam (drop (fromIntegral len) cc) $ Scope $ fmap (unvar replace F) body'
| (args, stk') <- genericSplitAt n stk = do
tell (Any True)
collapse stk' $ instantiate (genericIndex args) body
where
len = genericLength stk
n = genericLength cc
collapse stk c = return $ apps c stk
-- | Specializes a case expression to a known constructor.
--
-- TODO: switch to
--
-- (\\ a b c -> let x = Foo a b c in body) arg1 .. argN
--
-- and rely on beta reduction and other checks for work-safe application reduction
--
-- This has the benefit that it works for all calling conventions
specCase :: forall m c. (Applicative m, MonadWriter Any m) => Core Convention c -> m (Core Convention c)
specCase c@(Case dat@(Data cc tg g as) bs d)
-- TODO: Use a LamHash around this for all the unboxed arguments, and AppHash each to an argument.
| any (/=C) cc = pure c -- until we change this to use Lam as described above
| Just (Match _cc _g body) <- bs ^. at tg =
Let ((Scope . Data cc tg g $ pure . B <$> [1..fromIntegral arity]) : map lift as)
(mapBound fromIntegral body)
<$ tell (Any True)
| Just e <- d = Let [lift dat] (mapBound (const 0) e) <$ tell (Any True)
| otherwise = error "PANIC: non-exhaustive case with no default"
where arity = length cc
specCase c = pure c
|
ekmett/ermine
|
src/Ermine/Core/Optimizer.hs
|
bsd-2-clause
| 6,364
| 0
| 18
| 1,581
| 2,583
| 1,287
| 1,296
| 105
| 9
|
--
-- Copyright (c) 2013, Carl Joachim Svenn
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- 1. Redistributions of source code must retain the above copyright notice, this
-- list of conditions and the following disclaimer.
-- 2. Redistributions in binary form must reproduce the above copyright notice,
-- this list of conditions and the following disclaimer in the documentation
-- and/or other materials provided with the distribution.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-- ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-- WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-- DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-- FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-- (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-- LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-- SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--
module Main
(
main,
) where
import MyPrelude
#ifdef GRID_PLATFORM_IOS
import Main.IOS
#endif
#ifdef GRID_PLATFORM_GLFW
import Main.GLFW
#endif
#ifdef DEBUG
import OpenGL
import OpenGL.Helpers
import Foreign
#endif
main :: IO ()
main = do
#ifdef DEBUG
-- we assume the following bitsizes in our code.
-- otherwise, the program will probably fail...
assert (sizeOf (undefined :: GLubyte) == 1) $ "sizeof GLubyte == 1"
assert (sizeOf (undefined :: GLbyte) == 1) $ "sizeof GLbyte == 1"
assert (sizeOf (undefined :: GLushort) == 2) $ "sizeof GLushort == 2"
assert (sizeOf (undefined :: GLshort) == 2) $ "sizeof GLshort == 2"
assert (sizeOf (undefined :: GLfloat) == 4) $ "sizeof GLfloat == 4"
#endif
-- platform main
main'
|
karamellpelle/MEnv
|
source/Main.hs
|
bsd-2-clause
| 2,243
| 0
| 12
| 495
| 217
| 131
| 86
| 7
| 1
|
{-| An implementation of the mutex from
<http://zookeeper.apache.org/doc/trunk/recipes.html#sc_recipes_Locks>.
The mutex node will be automatically created and removed as necessary, and
must not be used for any other purposes. The parent node of the mutex node
must already exist.
-}
module Zookeeper.Mutex ( withMutex ) where
import Control.Concurrent.MVar ( newEmptyMVar, putMVar, takeMVar )
import Control.Exception as E ( bracket, catch, catches, Handler(..) )
import Control.Monad ( liftM )
import Control.Monad.Fix ( fix )
import Data.List ( sort )
import System.FilePath.Posix ( (</>), takeDirectory, takeFileName )
import qualified Zookeeper.Core as C
acquireMutex :: C.Handle -> String -> IO String
acquireMutex handle path = do
-- Create the parent node to store the child nodes in. If the parent node already exists then
-- this will cause a NodeExists exception; we catch (and ignore) this exception.
(C.create handle path "" [] C.openAclUnsafe >> return ()) `E.catch` (\C.NodeExists -> return ())
-- XXX There is a race here where a release call removes this directory. Should we instead just
-- not even attempt to do the automatic removal?
-- Now create our child node in the directory.
let child = path </> "lock-"
c <- takeFileName `liftM` C.create handle child "" [C.Ephemeral, C.Sequence] C.openAclUnsafe
fix $ \f -> do
cs <- sort `liftM` C.getChildren handle path Nothing
if c == head cs -- Our child node is at the head; we're free to go.
then return (path </> c)
else do -- Else wait on the child node that is lexicographically before us.
let c' = last $ takeWhile (< c) cs
m <- newEmptyMVar
e' <- C.exists handle (path </> c') $ Just $ \_ _ _ _ -> putMVar m ()
if e' == Nothing then f else takeMVar m >> return (path </> c)
releaseMutex :: C.Handle -> String -> IO ()
releaseMutex handle child = do
-- Delete the child node.
C.delete handle child Nothing
-- Try deleting the parent node. If there are still extant child nodes then this will cause
-- a NotEmpty exception; we catch (and ignore) this exception. We also catch the case where
-- another there is another aquire/release that removes the parent node ahead of us.
C.delete handle (takeDirectory child) Nothing `E.catches`
[E.Handler (\C.NotEmpty -> return ()), E.Handler (\C.NoNode -> return ())]
-- | Perform an action under mutual exclusion.
withMutex :: C.Handle -- ^ The ZooKeeper handle.
-> String -- ^ The mutex node.
-> IO () -- ^ The action to perform under mutual exclusion.
-> IO ()
withMutex handle path act = do
E.bracket (acquireMutex handle path) (releaseMutex handle) $ \_ -> act
|
jnb/zookeeper
|
src/Zookeeper/Mutex.hs
|
bsd-2-clause
| 2,778
| 0
| 19
| 632
| 619
| 334
| 285
| 33
| 3
|
module Main where
import qualified Options as O
import System.Console.CmdArgs
import System.Environment
import System.IO
import Data.Maybe
import Control.Monad
import Misc.Misc
import System.Exit
import qualified System.Random as R
import qualified Math.Misc.Nat as Nat
import qualified Math.VectorSpaces.Euclidean as Euc
import qualified Math.Simplicial.LandmarkSelection as LS
import qualified Math.Simplicial.PreScale as PS
import qualified Math.Simplicial.NeighborhoodGraph as NG
import qualified Math.Simplicial.VietorisRipsExpansion as VRE
import qualified Math.Simplicial.FilteredComplex as FC
import qualified Math.PersistentHomology.PersistentHomology as PH
import qualified Math.PersistentHomology.BarcodeCollection as BCC
-- | I suspect this is horribly inefficient.
stringToEuclidean :: String -> Euc.Euclidean
stringToEuclidean = Euc.fromList . (map read) . words
inHandle :: O.Options -> IO Handle
inHandle opts = if O.useStdin opts
then return stdin
else openFile (fromJust (O.input opts)) ReadMode
outHandle :: O.Options -> IO Handle
outHandle opts = if O.useStdout opts
then return stdout
else openFile (fromJust (O.output opts)) WriteMode
validate :: O.Options -> IO ()
validate opts = when (isNothing (O.maxScale opts) && isNothing (O.autoscale opts) && not (O.useWitness opts))
(
putStrLn "When not using the witness complex, you must specify a maximum scale for the 1-skeleton using the \"-s\" option." >>
exitWith (ExitFailure 1)
) >>
when (isJust (O.landmarks opts) && isJust (O.landmarkProbability opts))
(
putStrLn "Options -l and -p are incompatible." >>
exitWith (ExitFailure 1)
) >>
when (isJust (O.topDimension opts) && fromJust (O.topDimension opts) < 1)
(
putStrLn "Argument to -d option must be at least 1." >>
exitWith (ExitFailure 1)
)
randomGenerator :: O.Options -> IO R.StdGen
randomGenerator opts = if isNothing (O.randomSeed opts)
then R.getStdGen
else return $ R.mkStdGen (fromJust (O.randomSeed opts))
main :: IO ()
main = getArgs >>= \args ->
(
if null args
then withArgs ["--help"] O.getOpts
else O.getOpts
) >>= \opts ->
validate opts >>
randomGenerator opts >>= \rGen ->
inHandle opts >>= hGetContents >>= \input ->
let
cloud = map stringToEuclidean (lines input)
landmarkSelection = if O.complexType opts == O.MaxMinWitness
then LS.maxmin (fromJust (O.landmarks opts)) cloud
else if O.complexType opts == O.RandomWitness
then fst $ LS.uniform rGen (fromJust (O.landmarkProbability opts)) cloud
else undefined
maxScale = if isJust (O.autoscale opts)
then PS.ConditionFactor $ fromJust (O.autoscale opts)
else PS.Absolute $ fromJust (O.maxScale opts)
g = if O.useWitness opts
then NG.lazyWitness landmarkSelection 2 maxScale
else NG.exact cloud maxScale
actualMaxScale = NG.scale g
topDimension = if isJust (O.topDimension opts)
then fromJust (O.topDimension opts)
else 3
flag = VRE.inductive topDimension g
divisions = if isJust (O.divisions opts)
then fromJust (O.divisions opts)
else 1000
filt = VRE.filtration flag (linspace 0 actualMaxScale divisions)
hom = PH.rationalPersistentHomology filt
bc = PH.toBarcode hom (Nat.fromInt (topDimension - 1))
bc' = BCC.sort $ BCC.timeIndex (FC.filtrationTimes filt) bc
quiet = O.quiet opts
in
unless quiet
(
putStrLn "Beginning calculations." >>
when (O.useWitness opts)
(
putStrLn "Using witness complex." >>
when (O.complexType opts == O.MaxMinWitness)
(
putStrLn "Max-min witness selection..."
) >>
when (O.complexType opts == O.RandomWitness)
(
putStrLn "Random witness selection..."
) >>
putStrLn ("Chose " ++ show (LS.numLandmarks landmarkSelection) ++ " landmarks and " ++ show (LS.numWitnesses landmarkSelection) ++ " witnesses.")
) >>
unless (O.useWitness opts)
(
putStrLn "Using full Vietoris-Rips complex."
) >>
putStrLn ("Using maximum scale " ++ show actualMaxScale ++ ".") >>
putStrLn ("Total number of simplices in complex: " ++ show (FC.count filt) ++ ".")
) >>
outHandle opts >>= \outH ->
(
case O.outputStyle opts of
O.Standard -> hPrint outH bc'
O.Javaplex -> hPutStrLn outH (BCC.javaPlexShow bc')
O.Compact -> hPutStrLn outH ("MaxScale " ++ show actualMaxScale) >>
hPutStrLn outH (BCC.compactShow bc')
) >>
hClose outH
|
michiexile/hplex
|
pershom/src/Main.hs
|
bsd-3-clause
| 5,358
| 0
| 35
| 1,796
| 1,368
| 707
| 661
| 116
| 10
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE TypeFamilies #-}
{- |
Type safe combination of expressions that represent scalars or signals.
-}
module EFA.Symbolic.Mixed where
import qualified EFA.Equation.Arithmetic as Arith
import EFA.Equation.Arithmetic
(Sum, (~+), (~-),
Product, (~*), (~/),
ZeroTestable, allZeros, coincidingZeros,
Constant, zero,
Integrate, integrate)
import qualified EFA.Report.Format as Format
import EFA.Report.FormatValue (FormatValue, formatValue)
import EFA.Utility (Pointed, point)
{- |
The scalar parameter is needed for the Integrate instance.
We may also need it for future extensions.
-}
newtype Signal term scalar signal = Signal {getSignal :: term signal}
liftSignal ::
(term signal -> term signal) ->
Signal term scalar signal ->
Signal term scalar signal
liftSignal f (Signal x) = Signal $ f x
liftSignal2 ::
(term signal -> term signal -> term signal) ->
Signal term scalar signal ->
Signal term scalar signal ->
Signal term scalar signal
liftSignal2 f (Signal x) (Signal y) = Signal $ f x y
instance
(Eq (term signal)) =>
Eq (Signal term scalar signal) where
(Signal x) == (Signal y) = x==y
instance
(Ord (term signal)) =>
Ord (Signal term scalar signal) where
compare (Signal x) (Signal y) = compare x y
instance
(Sum (term signal)) =>
Sum (Signal term scalar signal) where
(~+) = liftSignal2 (~+)
(~-) = liftSignal2 (~-)
negate = liftSignal Arith.negate
instance
(Product (term signal)) =>
Product (Signal term scalar signal) where
(~*) = liftSignal2 (~*)
(~/) = liftSignal2 (~/)
recip = liftSignal Arith.recip
constOne = liftSignal Arith.constOne
instance
(Constant (term signal)) =>
Constant (Signal term scalar signal) where
zero = Signal zero
fromInteger = Signal . Arith.fromInteger
fromRational = Signal . Arith.fromRational
instance
(ZeroTestable (term signal)) =>
ZeroTestable (Signal term scalar signal) where
allZeros (Signal x) = allZeros x
coincidingZeros (Signal x) (Signal y) = coincidingZeros x y
instance
(FormatValue (term signal)) =>
FormatValue (Signal term scalar signal) where
formatValue (Signal term) = formatValue term
data
ScalarAtom term scalar signal =
ScalarVariable scalar
| Integral (Signal term scalar signal)
instance
(Eq scalar, Eq (term signal)) =>
Eq (ScalarAtom term scalar signal) where
(ScalarVariable x) == (ScalarVariable y) = x==y
(Integral x) == (Integral y) = x==y
_ == _ = False
instance
(Ord scalar, Ord (term signal)) =>
Ord (ScalarAtom term scalar signal) where
compare (ScalarVariable x) (ScalarVariable y) = compare x y
compare (Integral x) (Integral y) = compare x y
compare (ScalarVariable _) (Integral _) = LT
compare (Integral _) (ScalarVariable _) = GT
instance
(FormatValue scalar, FormatValue (term signal)) =>
FormatValue (ScalarAtom term scalar signal) where
formatValue (ScalarVariable var) = formatValue var
formatValue (Integral signal) =
Format.integral $ formatValue signal
newtype
Scalar term scalar signal =
Scalar {getScalar :: term (ScalarAtom term scalar signal)}
liftScalar ::
(term (ScalarAtom term scalar signal) ->
term (ScalarAtom term scalar signal)) ->
Scalar term scalar signal ->
Scalar term scalar signal
liftScalar f (Scalar x) = Scalar $ f x
liftScalar2 ::
(term (ScalarAtom term scalar signal) ->
term (ScalarAtom term scalar signal) ->
term (ScalarAtom term scalar signal)) ->
Scalar term scalar signal ->
Scalar term scalar signal ->
Scalar term scalar signal
liftScalar2 f (Scalar x) (Scalar y) = Scalar $ f x y
instance
(Eq (term (ScalarAtom term scalar signal))) =>
Eq (Scalar term scalar signal) where
(Scalar x) == (Scalar y) = x==y
instance
(Sum (term (ScalarAtom term scalar signal))) =>
Sum (Scalar term scalar signal) where
(~+) = liftScalar2 (~+)
(~-) = liftScalar2 (~-)
negate = liftScalar Arith.negate
instance
(Product (term (ScalarAtom term scalar signal))) =>
Product (Scalar term scalar signal) where
(~*) = liftScalar2 (~*)
(~/) = liftScalar2 (~/)
recip = liftScalar Arith.recip
constOne = liftScalar Arith.constOne
instance
(Constant (term (ScalarAtom term scalar signal))) =>
Constant (Scalar term scalar signal) where
zero = Scalar zero
fromInteger = Scalar . Arith.fromInteger
fromRational = Scalar . Arith.fromRational
instance
(ZeroTestable (term (ScalarAtom term scalar signal))) =>
ZeroTestable (Scalar term scalar signal) where
allZeros (Scalar x) = allZeros x
coincidingZeros (Scalar x) (Scalar y) = coincidingZeros x y
instance
(FormatValue (term (ScalarAtom term scalar signal))) =>
FormatValue (Scalar term scalar signal) where
formatValue (Scalar term) = formatValue term
instance (Pointed term) => Integrate (Signal term scalar signal) where
type Scalar (Signal term scalar signal) = Scalar term scalar signal
integrate = Scalar . point . Integral
mapSignal ::
(term signal0 -> term signal1) ->
Signal term scalar signal0 ->
Signal term scalar signal1
mapSignal f (Signal x) = Signal $ f x
mapScalar ::
((ScalarAtom term scalar0 signal0 ->
ScalarAtom term scalar1 signal1) ->
(term (ScalarAtom term scalar0 signal0) ->
term (ScalarAtom term scalar1 signal1))) ->
(scalar0 -> scalar1) ->
(term signal0 -> term signal1) ->
Scalar term scalar0 signal0 ->
Scalar term scalar1 signal1
mapScalar mp f g (Scalar scalar) =
Scalar $ flip mp scalar $ \scalarAtom ->
case scalarAtom of
ScalarVariable symbol -> ScalarVariable $ f symbol
Integral (Signal signal) -> Integral $ Signal $ g signal
|
energyflowanalysis/efa-2.1
|
src/EFA/Symbolic/Mixed.hs
|
bsd-3-clause
| 5,900
| 0
| 12
| 1,328
| 2,102
| 1,091
| 1,011
| 156
| 2
|
module Main where
import LOGL.Application
import Foreign.Ptr
import Graphics.UI.GLFW as GLFW
import Graphics.Rendering.OpenGL.GL as GL
import Graphics.GLUtil
import System.FilePath
import Graphics.Rendering.OpenGL.GL.Shaders.ProgramObjects
import Linear.Matrix
import Linear.V3
import Linear.Quaternion
vertices :: [GLfloat]
vertices = [
--Positions Colors Texture Coords
0.5, 0.5, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, -- Top Right
0.5, -0.5, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, -- Bottom Right
-0.5, -0.5, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, -- Bottom Left
-0.5, 0.5, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0 ] -- Top Left
indices :: [GLuint]
indices = [ -- Note that we start from 0!
0, 1, 3, -- First Triangle
1, 2, 3] -- Second Triangle
main :: IO ()
main = do
GLFW.init
w <- createAppWindow 800 600 "LearnOpenGL"
shader <- simpleShaderProgram ("data" </> "1_Getting-started" </> "5_Transformations" </> "transformations.vs")
("data" </> "1_Getting-started" </> "5_Transformations" </> "transformations.frag")
(vao, vbo, ebo) <- createVAO
-- load and create texture
t0 <- createTexture ("data" </> "1_Getting-started" </> "4_Textures" </> "Textures" </> "container.jpg")
t1 <- createTexture ("data" </> "1_Getting-started" </> "4_Textures" </> "Textures-combined" </> "awesomeface3.png")
--polygonMode $= (Line, Line)
runAppLoop w $ do
pollEvents
clearColor $= Color4 0.2 0.3 0.3 1.0
clear [ColorBuffer]
-- Draw our first triangle
currentProgram $= Just (program shader)
activeTexture $= TextureUnit 0
textureBinding Texture2D $= Just t0
setUniform shader "ourTexture1" (TextureUnit 0)
activeTexture $= TextureUnit 1
textureBinding Texture2D $= Just t1
setUniform shader "ourTexture2" (TextureUnit 1)
Just t <- getTime
let angle = 0.87266462599 * t
rot = axisAngle (V3 (0.0 :: GLfloat) 0.0 1.0) (realToFrac angle)
mat = mkTransformation rot (V3 0.5 (-0.5) (0.0 :: GLfloat))
setUniform shader "transform" mat
withVAO vao $ drawElements Triangles 6 UnsignedInt nullPtr
swap w
deleteObjectName vao
deleteObjectName vbo
deleteObjectName ebo
terminate
createVAO :: IO (VertexArrayObject, BufferObject, BufferObject)
createVAO = do
vao <- genObjectName
bindVertexArrayObject $= Just vao
vbo <- makeBuffer ArrayBuffer vertices
ebo <- makeBuffer ElementArrayBuffer indices
vertexAttribPointer (AttribLocation 0) $= (ToFloat, VertexArrayDescriptor 3 Float (8*4) offset0)
vertexAttribArray (AttribLocation 0) $= Enabled
vertexAttribPointer (AttribLocation 1) $= (ToFloat, VertexArrayDescriptor 3 Float (8*4) (offsetPtr (3*4)))
vertexAttribArray (AttribLocation 1) $= Enabled
vertexAttribPointer (AttribLocation 2) $= (ToFloat, VertexArrayDescriptor 2 Float (8*4) (offsetPtr (6*4)))
vertexAttribArray (AttribLocation 2) $= Enabled
bindVertexArrayObject $= Nothing
return (vao, vbo, ebo)
|
atwupack/LearnOpenGL
|
app/1_Getting-started/5_Transformations/Transformations.hs
|
bsd-3-clause
| 3,104
| 0
| 17
| 715
| 921
| 478
| 443
| 66
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE StandaloneDeriving #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Distributed.Process.Platform.Async.AsyncChan
-- Copyright : (c) Tim Watson 2012
-- License : BSD3 (see the file LICENSE)
--
-- Maintainer : Tim Watson <watson.timothy@gmail.com>
-- Stability : experimental
-- Portability : non-portable (requires concurrency)
--
-- This module provides a set of operations for spawning Process operations
-- and waiting for their results. It is a thin layer over the basic
-- concurrency operations provided by "Control.Distributed.Process".
-- The main feature it provides is a pre-canned set of APIs for waiting on the
-- result of one or more asynchronously running (and potentially distributed)
-- processes.
--
-- The async handles returned by this module cannot be used by processes other
-- than the caller of 'async', and are not 'Serializable'. Specifically, calls
-- that block until an async worker completes (i.e., all variants of 'wait')
-- will /never return/ if called from a different process. For example:
--
-- > h <- newEmptyMVar
-- > outer <- spawnLocal $ async runMyAsyncTask >>= liftIO $ putMVar h
-- > hAsync <- liftIO $ takeMVar h
-- > say "the next expression will never complete, because hAsync belongs to 'outer'"
-- > wait hAsync
--
-- As with "Control.Distributed.Platform.Async.AsyncSTM", workers can be
-- started on a local or remote node.
--
-- See "Control.Distributed.Platform.Async".
-----------------------------------------------------------------------------
module Control.Distributed.Process.Platform.Async.AsyncChan
( -- * Exported types
AsyncRef
, AsyncTask(..)
, AsyncChan(worker)
, AsyncResult(..)
, Async(asyncWorker)
-- * Spawning asynchronous operations
, async
, asyncLinked
, newAsync
-- * Cancelling asynchronous operations
, cancel
, cancelWait
, cancelWith
, cancelKill
-- * Querying for results
, poll
, check
, wait
, waitAny
, waitAnyCancel
-- * Waiting with timeouts
, waitAnyTimeout
, waitTimeout
, waitCancelTimeout
, waitCheckTimeout
) where
import Control.Distributed.Process
import Control.Distributed.Process.Platform.Async.Types
import Control.Distributed.Process.Platform.Time
import Control.Distributed.Process.Platform.Internal.Types
import Control.Distributed.Process.Serializable
import Data.Maybe
( fromMaybe
)
-- | Private channel used to synchronise task results
type InternalChannel a = (SendPort (AsyncResult a), ReceivePort (AsyncResult a))
--------------------------------------------------------------------------------
-- Cloud Haskell Typed Channel Async API --
--------------------------------------------------------------------------------
-- | A handle for an asynchronous action spawned by 'async'.
-- Asynchronous actions are run in a separate process, and
-- operations are provided for waiting for asynchronous actions to
-- complete and obtaining their results (see e.g. 'wait').
--
-- Handles of this type cannot cross remote boundaries. Furthermore, handles
-- of this type /must not/ be passed to functions in this module by processes
-- other than the caller of 'async' - that is, this module provides asynchronous
-- actions whose results are accessible *only* by the initiating process. This
-- limitation is imposed becuase of the use of typed channels, for which the
-- @ReceivePort@ component is effectively /thread local/.
--
-- See 'async'
data AsyncChan a = AsyncChan {
worker :: AsyncRef
, insulator :: AsyncRef
, channel :: (InternalChannel a)
}
-- | Create a new 'AsyncChan' and wrap it in an 'Async' record.
--
-- Used by "Control.Distributed.Process.Platform.Async".
newAsync :: (Serializable a)
=> (AsyncTask a -> Process (AsyncChan a))
-> AsyncTask a -> Process (Async a)
newAsync new t = do
hAsync <- new t
return Async {
hPoll = poll hAsync
, hWait = wait hAsync
, hWaitTimeout = (flip waitTimeout) hAsync
, hCancel = cancel hAsync
, asyncWorker = worker hAsync
}
-- | Spawns an asynchronous action in a new process.
-- We ensure that if the caller's process exits, that the worker is killed.
-- Because an @AsyncChan@ can only be used by the initial caller's process, if
-- that process dies then the result (if any) is discarded. If a process other
-- than the initial caller attempts to obtain the result of an asynchronous
-- action, the behaviour is undefined. It is /highly likely/ that such a
-- process will block indefinitely, quite possible that such behaviour could lead
-- to deadlock and almost certain that resource starvation will occur. /Do Not/
-- share the handles returned by this function across multiple processes.
--
-- If you need to spawn an asynchronous operation whose handle can be shared by
-- multiple processes then use the 'AsyncSTM' module instead.
--
-- There is currently a contract for async workers, that they should
-- exit normally (i.e., they should not call the @exit@ or @kill@ with their own
-- 'ProcessId' nor use the @terminate@ primitive to cease functining), otherwise
-- the 'AsyncResult' will end up being @AsyncFailed DiedException@ instead of
-- containing the desired result.
--
async :: (Serializable a) => AsyncTask a -> Process (AsyncChan a)
async = asyncDo True
-- | For *AsyncChan*, 'async' already ensures an @AsyncChan@ is
-- never left running unintentionally. This function is provided for compatibility
-- with other /async/ implementations that may offer different semantics for
-- @async@ with regards linking.
--
-- @asyncLinked = async@
--
asyncLinked :: (Serializable a) => AsyncTask a -> Process (AsyncChan a)
asyncLinked = async
asyncDo :: (Serializable a) => Bool -> AsyncTask a -> Process (AsyncChan a)
asyncDo shouldLink (AsyncRemoteTask d n c) =
let proc = call d n c in asyncDo shouldLink AsyncTask { asyncTask = proc }
asyncDo shouldLink (AsyncTask proc) = do
(wpid, gpid, chan) <- spawnWorkers proc shouldLink
return AsyncChan {
worker = wpid
, insulator = gpid
, channel = chan
}
-- private API
spawnWorkers :: (Serializable a)
=> Process a
-> Bool
-> Process (AsyncRef, AsyncRef, InternalChannel a)
spawnWorkers task shouldLink = do
root <- getSelfPid
chan <- newChan
-- listener/response proxy
insulatorPid <- spawnLocal $ do
workerPid <- spawnLocal $ do
() <- expect
r <- task
sendChan (fst chan) (AsyncDone r)
send root workerPid -- let the parent process know the worker pid
wref <- monitor workerPid
rref <- case shouldLink of
True -> monitor root >>= return . Just
False -> return Nothing
finally (pollUntilExit workerPid chan)
(unmonitor wref >>
return (maybe (return ()) unmonitor rref))
workerPid <- expect
send workerPid ()
return (workerPid, insulatorPid, chan)
where
-- blocking receive until we see an input message
pollUntilExit :: (Serializable a)
=> ProcessId
-> (SendPort (AsyncResult a), ReceivePort (AsyncResult a))
-> Process ()
pollUntilExit wpid (replyTo, _) = do
r <- receiveWait [
match (\(ProcessMonitorNotification _ pid' r) ->
return (Right (pid', r)))
, match (\c@(CancelWait) -> kill wpid "cancel" >> return (Left c))
]
case r of
Left CancelWait -> sendChan replyTo AsyncCancelled
Right (fpid, d)
| fpid == wpid -> case d of
DiedNormal -> return ()
_ -> sendChan replyTo (AsyncFailed d)
| otherwise -> kill wpid "linkFailed"
-- | Check whether an 'AsyncChan' has completed yet.
--
-- See "Control.Distributed.Process.Platform.Async".
poll :: (Serializable a) => AsyncChan a -> Process (AsyncResult a)
poll hAsync = do
r <- receiveChanTimeout 0 $ snd (channel hAsync)
return $ fromMaybe (AsyncPending) r
-- | Like 'poll' but returns 'Nothing' if @(poll hAsync) == AsyncPending@.
--
-- See "Control.Distributed.Process.Platform.Async".
check :: (Serializable a) => AsyncChan a -> Process (Maybe (AsyncResult a))
check hAsync = poll hAsync >>= \r -> case r of
AsyncPending -> return Nothing
ar -> return (Just ar)
-- | Wait for an asynchronous operation to complete or timeout.
--
-- See "Control.Distributed.Process.Platform.Async".
waitCheckTimeout :: (Serializable a) =>
TimeInterval -> AsyncChan a -> Process (AsyncResult a)
waitCheckTimeout t hAsync =
waitTimeout t hAsync >>= return . fromMaybe (AsyncPending)
-- | Wait for an asynchronous action to complete, and return its
-- value. The outcome of the action is encoded as an 'AsyncResult'.
--
-- See "Control.Distributed.Process.Platform.Async".
wait :: (Serializable a) => AsyncChan a -> Process (AsyncResult a)
wait hAsync = receiveChan $ snd (channel hAsync)
-- | Wait for an asynchronous operation to complete or timeout.
--
-- See "Control.Distributed.Process.Platform.Async".
waitTimeout :: (Serializable a) =>
TimeInterval -> AsyncChan a -> Process (Maybe (AsyncResult a))
waitTimeout t hAsync =
receiveChanTimeout (asTimeout t) $ snd (channel hAsync)
-- | Wait for an asynchronous operation to complete or timeout. If it times out,
-- then 'cancelWait' the async handle instead.
--
waitCancelTimeout :: (Serializable a)
=> TimeInterval
-> AsyncChan a
-> Process (AsyncResult a)
waitCancelTimeout t hAsync = do
r <- waitTimeout t hAsync
case r of
Nothing -> cancelWait hAsync
Just ar -> return ar
-- | Wait for any of the supplied @AsyncChans@s to complete. If multiple
-- 'Async's complete, then the value returned corresponds to the first
-- completed 'Async' in the list. Only /unread/ 'Async's are of value here,
-- because 'AsyncChan' does not hold on to its result after it has been read!
--
-- This function is analagous to the @mergePortsBiased@ primitive.
--
-- See "Control.Distibuted.Process.mergePortsBiased".
waitAny :: (Serializable a)
=> [AsyncChan a]
-> Process (AsyncResult a)
waitAny asyncs =
let ports = map (snd . channel) asyncs in recv ports
where recv :: (Serializable a) => [ReceivePort a] -> Process a
recv ps = mergePortsBiased ps >>= receiveChan
-- | Like 'waitAny', but also cancels the other asynchronous
-- operations as soon as one has completed.
--
waitAnyCancel :: (Serializable a)
=> [AsyncChan a] -> Process (AsyncResult a)
waitAnyCancel asyncs =
waitAny asyncs `finally` mapM_ cancel asyncs
-- | Like 'waitAny' but times out after the specified delay.
waitAnyTimeout :: (Serializable a)
=> TimeInterval
-> [AsyncChan a]
-> Process (Maybe (AsyncResult a))
waitAnyTimeout delay asyncs =
let ports = map (snd . channel) asyncs
in mergePortsBiased ports >>= receiveChanTimeout (asTimeout delay)
-- | Cancel an asynchronous operation. Cancellation is asynchronous in nature.
--
-- See "Control.Distributed.Process.Platform.Async".
cancel :: AsyncChan a -> Process ()
cancel (AsyncChan _ g _) = send g CancelWait
-- | Cancel an asynchronous operation and wait for the cancellation to complete.
--
-- See "Control.Distributed.Process.Platform.Async".
cancelWait :: (Serializable a) => AsyncChan a -> Process (AsyncResult a)
cancelWait hAsync = cancel hAsync >> wait hAsync
-- | Cancel an asynchronous operation immediately.
--
-- See "Control.Distributed.Process.Platform.Async".
cancelWith :: (Serializable b) => b -> AsyncChan a -> Process ()
cancelWith reason = (flip exit) reason . worker
-- | Like 'cancelWith' but sends a @kill@ instruction instead of an exit.
--
-- See "Control.Distributed.Process.Platform.Async".
cancelKill :: String -> AsyncChan a -> Process ()
cancelKill reason = (flip kill) reason . worker
|
haskell-distributed/distributed-process-platform
|
src/Control/Distributed/Process/Platform/Async/AsyncChan.hs
|
bsd-3-clause
| 12,236
| 0
| 19
| 2,656
| 2,065
| 1,113
| 952
| 159
| 4
|
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-}
module Transformations.Optimising.DeadFunctionEliminationSpec where
import Test.Hspec
import Test.Hspec.PipelineExample
import Pipeline.Pipeline hiding (pipeline)
import Grin.TH
runTests :: IO ()
runTests = hspec spec
spec :: Spec
spec = do
describe "Dead Function Elimination" $ do
let deadFunctionEliminationPipeline =
[ T DeadFunctionElimination
]
it "app_side_effect_1" $ do
let before = [prog|
grinMain =
p0 <- store (CInt 0)
y0 <- f p0
y1 <- fetch p0
pure y1
f p =
update p (CInt 1)
pure 0
|]
let after = [prog|
grinMain =
p0 <- store (CInt 0)
y0 <- f p0
y1 <- fetch p0
pure y1
f p =
update p (CInt 1)
pure 0
|]
pipelineSrc before after deadFunctionEliminationPipeline
it "mutually_recursive" $ do
let before = [prog|
grinMain = pure 0
f x = g x
g y = f y
|]
let after = [prog|
grinMain = pure 0
|]
pipelineSrc before after deadFunctionEliminationPipeline
it "replace_node" $ do
let before = [prog|
grinMain =
n0 <- f 0
pure 0
f x =
p <- store (CInt 5)
pure (CNode p)
|]
let after = [prog|
grinMain =
n0 <- pure (#undefined :: {CNode[#ptr]})
pure 0
|]
pipelineSrc before after deadFunctionEliminationPipeline
it "replace_simple_type" $ do
let before = [prog|
grinMain =
y0 <- f 0
pure 0
f x = pure x
|]
let after = [prog|
grinMain =
y0 <- pure (#undefined :: T_Int64)
pure 0
|]
pipelineSrc before after deadFunctionEliminationPipeline
it "simple" $ do
let before = [prog|
grinMain = pure 0
f x = pure x
|]
let after = [prog|
grinMain = pure 0
|]
pipelineSrc before after deadFunctionEliminationPipeline
it "true_side_effect_min" $ do
let before = [prog|
grinMain =
result_main <- Main.main1 $
pure ()
Main.main1 =
_prim_int_print $ 1
|]
let after = [prog|
grinMain =
result_main <- Main.main1 $
pure ()
Main.main1 =
_prim_int_print $ 1
|]
pipelineSrc before after deadFunctionEliminationPipeline
|
andorp/grin
|
grin/test/Transformations/Optimising/DeadFunctionEliminationSpec.hs
|
bsd-3-clause
| 2,796
| 0
| 15
| 1,260
| 364
| 193
| 171
| 37
| 1
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.GHC
-- Copyright : Isaac Jones 2003-2007
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This is a fairly large module. It contains most of the GHC-specific code for
-- configuring, building and installing packages. It also exports a function
-- for finding out what packages are already installed. Configuring involves
-- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions
-- this version of ghc supports and returning a 'Compiler' value.
--
-- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out
-- what packages are installed.
--
-- Building is somewhat complex as there is quite a bit of information to take
-- into account. We have to build libs and programs, possibly for profiling and
-- shared libs. We have to support building libraries that will be usable by
-- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files
-- using ghc. Linking, especially for @split-objs@ is remarkably complex,
-- partly because there tend to be 1,000's of @.o@ files and this can often be
-- more than we can pass to the @ld@ or @ar@ programs in one go.
--
-- Installing for libs and exes involves finding the right files and copying
-- them to the right places. One of the more tricky things about this module is
-- remembering the layout of files in the build directory (which is not
-- explicitly documented) and thus what search dirs are used for various kinds
-- of files.
module Distribution.Simple.GHC (
getGhcInfo,
configure,
getInstalledPackages,
getInstalledPackagesMonitorFiles,
getPackageDBContents,
buildLib, buildFLib, buildExe,
replLib, replFLib, replExe,
startInterpreter,
installLib, installFLib, installExe,
libAbiHash,
hcPkgInfo,
registerPackage,
componentGhcOptions,
componentCcGhcOptions,
getLibDir,
isDynamic,
getGlobalPackageDB,
pkgRoot,
-- * Constructing GHC environment files
Internal.GhcEnvironmentFileEntry(..),
Internal.simpleGhcEnvironmentFile,
Internal.writeGhcEnvironmentFile,
-- * Version-specific implementation quirks
getImplInfo,
GhcImplInfo(..)
) where
import Prelude ()
import Distribution.Compat.Prelude
import qualified Distribution.Simple.GHC.IPI642 as IPI642
import qualified Distribution.Simple.GHC.Internal as Internal
import Distribution.Simple.GHC.ImplInfo
import Distribution.PackageDescription.Utils (cabalBug)
import Distribution.PackageDescription as PD
import Distribution.InstalledPackageInfo (InstalledPackageInfo)
import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo
import Distribution.Simple.PackageIndex (InstalledPackageIndex)
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Distribution.Simple.LocalBuildInfo
import Distribution.Types.ComponentLocalBuildInfo
import qualified Distribution.Simple.Hpc as Hpc
import Distribution.Simple.BuildPaths
import Distribution.Simple.Utils
import Distribution.Package
import qualified Distribution.ModuleName as ModuleName
import Distribution.ModuleName (ModuleName)
import Distribution.Simple.Program
import Distribution.Simple.Program.Builtin (runghcProgram)
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import qualified Distribution.Simple.Program.Ar as Ar
import qualified Distribution.Simple.Program.Ld as Ld
import qualified Distribution.Simple.Program.Strip as Strip
import Distribution.Simple.Program.GHC
import Distribution.Simple.Setup
import qualified Distribution.Simple.Setup as Cabal
import Distribution.Simple.Compiler hiding (Flag)
import Distribution.Version
import Distribution.System
import Distribution.Verbosity
import Distribution.Text
import Distribution.Types.ForeignLib
import Distribution.Types.ForeignLibType
import Distribution.Types.ForeignLibOption
import Distribution.Types.UnqualComponentName
import Distribution.Utils.NubList
import Language.Haskell.Extension
import Control.Monad (msum)
import Data.Char (isLower)
import qualified Data.Map as Map
import System.Directory
( doesFileExist, getAppUserDataDirectory, createDirectoryIfMissing
, canonicalizePath, removeFile, renameFile )
import System.FilePath ( (</>), (<.>), takeExtension
, takeDirectory, replaceExtension
,isRelative )
import qualified System.Info
#ifndef mingw32_HOST_OS
import System.Posix (createSymbolicLink)
#endif /* mingw32_HOST_OS */
-- -----------------------------------------------------------------------------
-- Configuring
configure :: Verbosity -> Maybe FilePath -> Maybe FilePath
-> ProgramDb
-> IO (Compiler, Maybe Platform, ProgramDb)
configure verbosity hcPath hcPkgPath conf0 = do
(ghcProg, ghcVersion, progdb1) <-
requireProgramVersion verbosity ghcProgram
(orLaterVersion (mkVersion [6,11]))
(userMaybeSpecifyPath "ghc" hcPath conf0)
let implInfo = ghcVersionImplInfo ghcVersion
-- This is slightly tricky, we have to configure ghc first, then we use the
-- location of ghc to help find ghc-pkg in the case that the user did not
-- specify the location of ghc-pkg directly:
(ghcPkgProg, ghcPkgVersion, progdb2) <-
requireProgramVersion verbosity ghcPkgProgram {
programFindLocation = guessGhcPkgFromGhcPath ghcProg
}
anyVersion (userMaybeSpecifyPath "ghc-pkg" hcPkgPath progdb1)
when (ghcVersion /= ghcPkgVersion) $ die' verbosity $
"Version mismatch between ghc and ghc-pkg: "
++ programPath ghcProg ++ " is version " ++ display ghcVersion ++ " "
++ programPath ghcPkgProg ++ " is version " ++ display ghcPkgVersion
-- Likewise we try to find the matching hsc2hs and haddock programs.
let hsc2hsProgram' = hsc2hsProgram {
programFindLocation = guessHsc2hsFromGhcPath ghcProg
}
haddockProgram' = haddockProgram {
programFindLocation = guessHaddockFromGhcPath ghcProg
}
hpcProgram' = hpcProgram {
programFindLocation = guessHpcFromGhcPath ghcProg
}
runghcProgram' = runghcProgram {
programFindLocation = guessRunghcFromGhcPath ghcProg
}
progdb3 = addKnownProgram haddockProgram' $
addKnownProgram hsc2hsProgram' $
addKnownProgram hpcProgram' $
addKnownProgram runghcProgram' progdb2
languages <- Internal.getLanguages verbosity implInfo ghcProg
extensions0 <- Internal.getExtensions verbosity implInfo ghcProg
ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcProg
let ghcInfoMap = Map.fromList ghcInfo
extensions = -- workaround https://ghc.haskell.org/ticket/11214
filterExt JavaScriptFFI $
-- see 'filterExtTH' comment below
filterExtTH $ extensions0
-- starting with GHC 8.0, `TemplateHaskell` will be omitted from
-- `--supported-extensions` when it's not available.
-- for older GHCs we can use the "Have interpreter" property to
-- filter out `TemplateHaskell`
filterExtTH | ghcVersion < mkVersion [8]
, Just "NO" <- Map.lookup "Have interpreter" ghcInfoMap
= filterExt TemplateHaskell
| otherwise = id
filterExt ext = filter ((/= EnableExtension ext) . fst)
let comp = Compiler {
compilerId = CompilerId GHC ghcVersion,
compilerAbiTag = NoAbiTag,
compilerCompat = [],
compilerLanguages = languages,
compilerExtensions = extensions,
compilerProperties = ghcInfoMap
}
compPlatform = Internal.targetPlatform ghcInfo
-- configure gcc and ld
progdb4 = Internal.configureToolchain implInfo ghcProg ghcInfoMap progdb3
return (comp, compPlatform, progdb4)
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find
-- the corresponding tool; e.g. if the tool is ghc-pkg, we try looking
-- for a versioned or unversioned ghc-pkg in the same dir, that is:
--
-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg(.exe)
--
guessToolFromGhcPath :: Program -> ConfiguredProgram
-> Verbosity -> ProgramSearchPath
-> IO (Maybe (FilePath, [FilePath]))
guessToolFromGhcPath tool ghcProg verbosity searchpath
= do let toolname = programName tool
given_path = programPath ghcProg
given_dir = takeDirectory given_path
real_path <- canonicalizePath given_path
let real_dir = takeDirectory real_path
versionSuffix path = takeVersionSuffix (dropExeExtension path)
given_suf = versionSuffix given_path
real_suf = versionSuffix real_path
guessNormal dir = dir </> toolname <.> exeExtension
guessGhcVersioned dir suf = dir </> (toolname ++ "-ghc" ++ suf)
<.> exeExtension
guessVersioned dir suf = dir </> (toolname ++ suf)
<.> exeExtension
mkGuesses dir suf | null suf = [guessNormal dir]
| otherwise = [guessGhcVersioned dir suf,
guessVersioned dir suf,
guessNormal dir]
guesses = mkGuesses given_dir given_suf ++
if real_path == given_path
then []
else mkGuesses real_dir real_suf
info verbosity $ "looking for tool " ++ toolname
++ " near compiler in " ++ given_dir
debug verbosity $ "candidate locations: " ++ show guesses
exists <- traverse doesFileExist guesses
case [ file | (file, True) <- zip guesses exists ] of
-- If we can't find it near ghc, fall back to the usual
-- method.
[] -> programFindLocation tool verbosity searchpath
(fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp
let lookedAt = map fst
. takeWhile (\(_file, exist) -> not exist)
$ zip guesses exists
return (Just (fp, lookedAt))
where takeVersionSuffix :: FilePath -> String
takeVersionSuffix = takeWhileEndLE isSuffixChar
isSuffixChar :: Char -> Bool
isSuffixChar c = isDigit c || c == '.' || c == '-'
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding ghc-pkg, we try looking for both a versioned and unversioned
-- ghc-pkg in the same dir, that is:
--
-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg-6.6.1(.exe)
-- > /usr/local/bin/ghc-pkg(.exe)
--
guessGhcPkgFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath
-> IO (Maybe (FilePath, [FilePath]))
guessGhcPkgFromGhcPath = guessToolFromGhcPath ghcPkgProgram
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding hsc2hs, we try looking for both a versioned and unversioned
-- hsc2hs in the same dir, that is:
--
-- > /usr/local/bin/hsc2hs-ghc-6.6.1(.exe)
-- > /usr/local/bin/hsc2hs-6.6.1(.exe)
-- > /usr/local/bin/hsc2hs(.exe)
--
guessHsc2hsFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath
-> IO (Maybe (FilePath, [FilePath]))
guessHsc2hsFromGhcPath = guessToolFromGhcPath hsc2hsProgram
-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a
-- corresponding haddock, we try looking for both a versioned and unversioned
-- haddock in the same dir, that is:
--
-- > /usr/local/bin/haddock-ghc-6.6.1(.exe)
-- > /usr/local/bin/haddock-6.6.1(.exe)
-- > /usr/local/bin/haddock(.exe)
--
guessHaddockFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath
-> IO (Maybe (FilePath, [FilePath]))
guessHaddockFromGhcPath = guessToolFromGhcPath haddockProgram
guessHpcFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath
-> IO (Maybe (FilePath, [FilePath]))
guessHpcFromGhcPath = guessToolFromGhcPath hpcProgram
guessRunghcFromGhcPath :: ConfiguredProgram
-> Verbosity -> ProgramSearchPath
-> IO (Maybe (FilePath, [FilePath]))
guessRunghcFromGhcPath = guessToolFromGhcPath runghcProgram
getGhcInfo :: Verbosity -> ConfiguredProgram -> IO [(String, String)]
getGhcInfo verbosity ghcProg = Internal.getGhcInfo verbosity implInfo ghcProg
where
Just version = programVersion ghcProg
implInfo = ghcVersionImplInfo version
-- | Given a single package DB, return all installed packages.
getPackageDBContents :: Verbosity -> PackageDB -> ProgramDb
-> IO InstalledPackageIndex
getPackageDBContents verbosity packagedb progdb = do
pkgss <- getInstalledPackages' verbosity [packagedb] progdb
toPackageIndex verbosity pkgss progdb
-- | Given a package DB stack, return all installed packages.
getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack
-> ProgramDb
-> IO InstalledPackageIndex
getInstalledPackages verbosity comp packagedbs progdb = do
checkPackageDbEnvVar verbosity
checkPackageDbStack verbosity comp packagedbs
pkgss <- getInstalledPackages' verbosity packagedbs progdb
index <- toPackageIndex verbosity pkgss progdb
return $! hackRtsPackage index
where
hackRtsPackage index =
case PackageIndex.lookupPackageName index (mkPackageName "rts") of
[(_,[rts])]
-> PackageIndex.insert (removeMingwIncludeDir rts) index
_ -> index -- No (or multiple) ghc rts package is registered!!
-- Feh, whatever, the ghc test suite does some crazy stuff.
-- | Given a list of @(PackageDB, InstalledPackageInfo)@ pairs, produce a
-- @PackageIndex@. Helper function used by 'getPackageDBContents' and
-- 'getInstalledPackages'.
toPackageIndex :: Verbosity
-> [(PackageDB, [InstalledPackageInfo])]
-> ProgramDb
-> IO InstalledPackageIndex
toPackageIndex verbosity pkgss progdb = do
-- On Windows, various fields have $topdir/foo rather than full
-- paths. We need to substitute the right value in so that when
-- we, for example, call gcc, we have proper paths to give it.
topDir <- getLibDir' verbosity ghcProg
let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs)
| (_, pkgs) <- pkgss ]
return $! mconcat indices
where
Just ghcProg = lookupProgram ghcProgram progdb
getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath
getLibDir verbosity lbi =
dropWhileEndLE isSpace `fmap`
getDbProgramOutput verbosity ghcProgram
(withPrograms lbi) ["--print-libdir"]
getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath
getLibDir' verbosity ghcProg =
dropWhileEndLE isSpace `fmap`
getProgramOutput verbosity ghcProg ["--print-libdir"]
-- | Return the 'FilePath' to the global GHC package database.
getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath
getGlobalPackageDB verbosity ghcProg =
dropWhileEndLE isSpace `fmap`
getProgramOutput verbosity ghcProg ["--print-global-package-db"]
-- | Return the 'FilePath' to the per-user GHC package database.
getUserPackageDB :: Verbosity -> ConfiguredProgram -> Platform -> NoCallStackIO FilePath
getUserPackageDB _verbosity ghcProg platform = do
-- It's rather annoying that we have to reconstruct this, because ghc
-- hides this information from us otherwise. But for certain use cases
-- like change monitoring it really can't remain hidden.
appdir <- getAppUserDataDirectory "ghc"
return (appdir </> platformAndVersion </> packageConfFileName)
where
platformAndVersion = Internal.ghcPlatformAndVersionString
platform ghcVersion
packageConfFileName
| ghcVersion >= mkVersion [6,12] = "package.conf.d"
| otherwise = "package.conf"
Just ghcVersion = programVersion ghcProg
checkPackageDbEnvVar :: Verbosity -> IO ()
checkPackageDbEnvVar verbosity =
Internal.checkPackageDbEnvVar verbosity "GHC" "GHC_PACKAGE_PATH"
checkPackageDbStack :: Verbosity -> Compiler -> PackageDBStack -> IO ()
checkPackageDbStack verbosity comp =
if flagPackageConf implInfo
then checkPackageDbStackPre76 verbosity
else checkPackageDbStackPost76 verbosity
where implInfo = ghcVersionImplInfo (compilerVersion comp)
checkPackageDbStackPost76 :: Verbosity -> PackageDBStack -> IO ()
checkPackageDbStackPost76 _ (GlobalPackageDB:rest)
| GlobalPackageDB `notElem` rest = return ()
checkPackageDbStackPost76 verbosity rest
| GlobalPackageDB `elem` rest =
die' verbosity $ "If the global package db is specified, it must be "
++ "specified first and cannot be specified multiple times"
checkPackageDbStackPost76 _ _ = return ()
checkPackageDbStackPre76 :: Verbosity -> PackageDBStack -> IO ()
checkPackageDbStackPre76 _ (GlobalPackageDB:rest)
| GlobalPackageDB `notElem` rest = return ()
checkPackageDbStackPre76 verbosity rest
| GlobalPackageDB `notElem` rest =
die' verbosity $ "With current ghc versions the global package db is always used "
++ "and must be listed first. This ghc limitation is lifted in GHC 7.6,"
++ "see http://hackage.haskell.org/trac/ghc/ticket/5977"
checkPackageDbStackPre76 verbosity _ =
die' verbosity $ "If the global package db is specified, it must be "
++ "specified first and cannot be specified multiple times"
-- GHC < 6.10 put "$topdir/include/mingw" in rts's installDirs. This
-- breaks when you want to use a different gcc, so we need to filter
-- it out.
removeMingwIncludeDir :: InstalledPackageInfo -> InstalledPackageInfo
removeMingwIncludeDir pkg =
let ids = InstalledPackageInfo.includeDirs pkg
ids' = filter (not . ("mingw" `isSuffixOf`)) ids
in pkg { InstalledPackageInfo.includeDirs = ids' }
-- | Get the packages from specific PackageDBs, not cumulative.
--
getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramDb
-> IO [(PackageDB, [InstalledPackageInfo])]
getInstalledPackages' verbosity packagedbs progdb
| ghcVersion >= mkVersion [6,9] =
sequenceA
[ do pkgs <- HcPkg.dump (hcPkgInfo progdb) verbosity packagedb
return (packagedb, pkgs)
| packagedb <- packagedbs ]
where
Just ghcProg = lookupProgram ghcProgram progdb
Just ghcVersion = programVersion ghcProg
getInstalledPackages' verbosity packagedbs progdb = do
str <- getDbProgramOutput verbosity ghcPkgProgram progdb ["list"]
let pkgFiles = [ init line | line <- lines str, last line == ':' ]
dbFile packagedb = case (packagedb, pkgFiles) of
(GlobalPackageDB, global:_) -> return $ Just global
(UserPackageDB, _global:user:_) -> return $ Just user
(UserPackageDB, _global:_) -> return $ Nothing
(SpecificPackageDB specific, _) -> return $ Just specific
_ -> die' verbosity "cannot read ghc-pkg package listing"
pkgFiles' <- traverse dbFile packagedbs
sequenceA [ withFileContents file $ \content -> do
pkgs <- readPackages file content
return (db, pkgs)
| (db , Just file) <- zip packagedbs pkgFiles' ]
where
-- Depending on the version of ghc we use a different type's Read
-- instance to parse the package file and then convert.
-- It's a bit yuck. But that's what we get for using Read/Show.
readPackages
| ghcVersion >= mkVersion [6,4,2]
= \file content -> case reads content of
[(pkgs, _)] -> return (map IPI642.toCurrent pkgs)
_ -> failToRead file
-- We dropped support for 6.4.2 and earlier.
| otherwise
= \file _ -> failToRead file
Just ghcProg = lookupProgram ghcProgram progdb
Just ghcVersion = programVersion ghcProg
failToRead file = die' verbosity $ "cannot read ghc package database " ++ file
getInstalledPackagesMonitorFiles :: Verbosity -> Platform
-> ProgramDb
-> [PackageDB]
-> IO [FilePath]
getInstalledPackagesMonitorFiles verbosity platform progdb =
traverse getPackageDBPath
where
getPackageDBPath :: PackageDB -> IO FilePath
getPackageDBPath GlobalPackageDB =
selectMonitorFile =<< getGlobalPackageDB verbosity ghcProg
getPackageDBPath UserPackageDB =
selectMonitorFile =<< getUserPackageDB verbosity ghcProg platform
getPackageDBPath (SpecificPackageDB path) = selectMonitorFile path
-- GHC has old style file dbs, and new style directory dbs.
-- Note that for dir style dbs, we only need to monitor the cache file, not
-- the whole directory. The ghc program itself only reads the cache file
-- so it's safe to only monitor this one file.
selectMonitorFile path = do
isFileStyle <- doesFileExist path
if isFileStyle then return path
else return (path </> "package.cache")
Just ghcProg = lookupProgram ghcProgram progdb
-- -----------------------------------------------------------------------------
-- Building a library
buildLib, replLib :: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildLib = buildOrReplLib False
replLib = buildOrReplLib True
buildOrReplLib :: Bool -> Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO ()
buildOrReplLib forRepl verbosity numJobs pkg_descr lbi lib clbi = do
let uid = componentUnitId clbi
libTargetDir = componentBuildDir lbi clbi
whenVanillaLib forceVanilla =
when (forceVanilla || withVanillaLib lbi)
whenProfLib = when (withProfLib lbi)
whenSharedLib forceShared =
when (forceShared || withSharedLib lbi)
whenGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi)
ifReplLib = when forRepl
comp = compiler lbi
ghcVersion = compilerVersion comp
implInfo = getImplInfo comp
platform@(Platform _hostArch hostOS) = hostPlatform lbi
has_code = not (componentIsIndefinite clbi)
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
let runGhcProg = runGHC verbosity ghcProg comp platform
libBi <- hackThreadedFlag verbosity
comp (withProfLib lbi) (libBuildInfo lib)
let isGhcDynamic = isDynamic comp
dynamicTooSupported = supportsDynamicToo comp
doingTH = EnableExtension TemplateHaskell `elem` allExtensions libBi
forceVanillaLib = doingTH && not isGhcDynamic
forceSharedLib = doingTH && isGhcDynamic
-- TH always needs default libs, even when building for profiling
-- Determine if program coverage should be enabled and if so, what
-- '-hpcdir' should be.
let isCoverageEnabled = libCoverage lbi
-- TODO: Historically HPC files have been put into a directory which
-- has the package name. I'm going to avoid changing this for
-- now, but it would probably be better for this to be the
-- component ID instead...
pkg_name = display (PD.package pkg_descr)
distPref = fromFlag $ configDistPref $ configFlags lbi
hpcdir way
| forRepl = mempty -- HPC is not supported in ghci
| isCoverageEnabled = toFlag $ Hpc.mixDir distPref way pkg_name
| otherwise = mempty
createDirectoryIfMissingVerbose verbosity True libTargetDir
-- TODO: do we need to put hs-boot files into place for mutually recursive
-- modules?
let cObjs = map (`replaceExtension` objExtension) (cSources libBi)
baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir
vanillaOpts = baseOpts `mappend` mempty {
ghcOptMode = toFlag GhcModeMake,
ghcOptNumJobs = numJobs,
ghcOptInputModules = toNubListR $ allLibModules lib clbi,
ghcOptHPCDir = hpcdir Hpc.Vanilla
}
profOpts = vanillaOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptProfilingAuto = Internal.profDetailLevelFlag True
(withProfLibDetail lbi),
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR $ hcProfOptions GHC libBi,
ghcOptHPCDir = hpcdir Hpc.Prof
}
sharedOpts = vanillaOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi,
ghcOptHPCDir = hpcdir Hpc.Dyn
}
linkerOpts = mempty {
ghcOptLinkOptions = toNubListR $ PD.ldOptions libBi,
ghcOptLinkLibs = toNubListR $ extraLibs libBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi,
ghcOptLinkFrameworks = toNubListR $
PD.frameworks libBi,
ghcOptLinkFrameworkDirs = toNubListR $
PD.extraFrameworkDirs libBi,
ghcOptInputFiles = toNubListR
[libTargetDir </> x | x <- cObjs]
}
replOpts = vanillaOpts {
ghcOptExtra = overNubListR
Internal.filterGhciFlags $
ghcOptExtra vanillaOpts,
ghcOptNumJobs = mempty
}
`mappend` linkerOpts
`mappend` mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptOptimisation = toFlag GhcNoOptimisation
}
vanillaSharedOpts = vanillaOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticAndDynamic,
ghcOptDynHiSuffix = toFlag "dyn_hi",
ghcOptDynObjSuffix = toFlag "dyn_o",
ghcOptHPCDir = hpcdir Hpc.Dyn
}
unless (forRepl || null (allLibModules lib clbi)) $
do let vanilla = whenVanillaLib forceVanillaLib (runGhcProg vanillaOpts)
shared = whenSharedLib forceSharedLib (runGhcProg sharedOpts)
useDynToo = dynamicTooSupported &&
(forceVanillaLib || withVanillaLib lbi) &&
(forceSharedLib || withSharedLib lbi) &&
null (hcSharedOptions GHC libBi)
if not has_code
then vanilla
else
if useDynToo
then do
runGhcProg vanillaSharedOpts
case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of
(Cabal.Flag dynDir, Cabal.Flag vanillaDir) ->
-- When the vanilla and shared library builds are done
-- in one pass, only one set of HPC module interfaces
-- are generated. This set should suffice for both
-- static and dynamically linked executables. We copy
-- the modules interfaces so they are available under
-- both ways.
copyDirectoryRecursive verbosity dynDir vanillaDir
_ -> return ()
else if isGhcDynamic
then do shared; vanilla
else do vanilla; shared
when has_code $ whenProfLib (runGhcProg profOpts)
-- build any C sources
unless (not has_code || null (cSources libBi)) $ do
info verbosity "Building C Sources..."
sequence_
[ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo
lbi libBi clbi libTargetDir filename
vanillaCcOpts = if isGhcDynamic
-- Dynamic GHC requires C sources to be built
-- with -fPIC for REPL to work. See #2207.
then baseCcOpts { ghcOptFPic = toFlag True }
else baseCcOpts
profCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptObjSuffix = toFlag "p_o"
}
sharedCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptFPic = toFlag True,
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptObjSuffix = toFlag "dyn_o"
}
odir = fromFlag (ghcOptObjDir vanillaCcOpts)
createDirectoryIfMissingVerbose verbosity True odir
let runGhcProgIfNeeded ccOpts = do
needsRecomp <- checkNeedsRecompilation filename ccOpts
when needsRecomp $ runGhcProg ccOpts
runGhcProgIfNeeded vanillaCcOpts
unless forRepl $
whenSharedLib forceSharedLib (runGhcProgIfNeeded sharedCcOpts)
unless forRepl $ whenProfLib (runGhcProgIfNeeded profCcOpts)
| filename <- cSources libBi]
-- TODO: problem here is we need the .c files built first, so we can load them
-- with ghci, but .c files can depend on .h files generated by ghc by ffi
-- exports.
when has_code . ifReplLib $ do
when (null (allLibModules lib clbi)) $ warn verbosity "No exposed modules"
ifReplLib (runGhcProg replOpts)
-- link:
when has_code . unless forRepl $ do
info verbosity "Linking..."
let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension))
(cSources libBi)
cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension))
(cSources libBi)
compiler_id = compilerId (compiler lbi)
vanillaLibFilePath = libTargetDir </> mkLibName uid
profileLibFilePath = libTargetDir </> mkProfLibName uid
sharedLibFilePath = libTargetDir </> mkSharedLibName compiler_id uid
ghciLibFilePath = libTargetDir </> Internal.mkGHCiLibName uid
libInstallPath = libdir $ absoluteComponentInstallDirs pkg_descr lbi uid NoCopyDest
sharedLibInstallPath = libInstallPath </> mkSharedLibName compiler_id uid
stubObjs <- catMaybes <$> sequenceA
[ findFileWithExtension [objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < mkVersion [7,2] -- ghc-7.2+ does not make _stub.o files
, x <- allLibModules lib clbi ]
stubProfObjs <- catMaybes <$> sequenceA
[ findFileWithExtension ["p_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < mkVersion [7,2] -- ghc-7.2+ does not make _stub.o files
, x <- allLibModules lib clbi ]
stubSharedObjs <- catMaybes <$> sequenceA
[ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir]
(ModuleName.toFilePath x ++"_stub")
| ghcVersion < mkVersion [7,2] -- ghc-7.2+ does not make _stub.o files
, x <- allLibModules lib clbi ]
hObjs <- Internal.getHaskellObjects implInfo lib lbi clbi
libTargetDir objExtension True
hProfObjs <-
if withProfLib lbi
then Internal.getHaskellObjects implInfo lib lbi clbi
libTargetDir ("p_" ++ objExtension) True
else return []
hSharedObjs <-
if withSharedLib lbi
then Internal.getHaskellObjects implInfo lib lbi clbi
libTargetDir ("dyn_" ++ objExtension) False
else return []
unless (null hObjs && null cObjs && null stubObjs) $ do
rpaths <- getRPaths lbi clbi
let staticObjectFiles =
hObjs
++ map (libTargetDir </>) cObjs
++ stubObjs
profObjectFiles =
hProfObjs
++ map (libTargetDir </>) cProfObjs
++ stubProfObjs
ghciObjFiles =
hObjs
++ map (libTargetDir </>) cObjs
++ stubObjs
dynamicObjectFiles =
hSharedObjs
++ map (libTargetDir </>) cSharedObjs
++ stubSharedObjs
-- After the relocation lib is created we invoke ghc -shared
-- with the dependencies spelled out as -package arguments
-- and ghc invokes the linker with the proper library paths
ghcSharedLinkArgs =
mempty {
ghcOptShared = toFlag True,
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptInputFiles = toNubListR dynamicObjectFiles,
ghcOptOutputFile = toFlag sharedLibFilePath,
ghcOptExtra = toNubListR $
hcSharedOptions GHC libBi,
-- For dynamic libs, Mac OS/X needs to know the install location
-- at build time. This only applies to GHC < 7.8 - see the
-- discussion in #1660.
ghcOptDylibName = if hostOS == OSX
&& ghcVersion < mkVersion [7,8]
then toFlag sharedLibInstallPath
else mempty,
ghcOptHideAllPackages = toFlag True,
ghcOptNoAutoLinkPackages = toFlag True,
ghcOptPackageDBs = withPackageDB lbi,
ghcOptThisUnitId = case clbi of
LibComponentLocalBuildInfo { componentCompatPackageKey = pk }
-> toFlag pk
_ -> mempty,
ghcOptThisComponentId = case clbi of
LibComponentLocalBuildInfo { componentInstantiatedWith = insts } ->
if null insts
then mempty
else toFlag (componentComponentId clbi)
_ -> mempty,
ghcOptInstantiatedWith = case clbi of
LibComponentLocalBuildInfo { componentInstantiatedWith = insts }
-> insts
_ -> [],
ghcOptPackages = toNubListR $
Internal.mkGhcOptPackages clbi ,
ghcOptLinkLibs = toNubListR $ extraLibs libBi,
ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi,
ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi,
ghcOptLinkFrameworkDirs =
toNubListR $ PD.extraFrameworkDirs libBi,
ghcOptRPaths = rpaths
}
info verbosity (show (ghcOptPackages ghcSharedLinkArgs))
whenVanillaLib False $
Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles
whenProfLib $
Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles
whenGHCiLib $ do
(ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi)
Ld.combineObjectFiles verbosity ldProg
ghciLibFilePath ghciObjFiles
whenSharedLib False $
runGhcProg ghcSharedLinkArgs
-- | Start a REPL without loading any source files.
startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform
-> PackageDBStack -> IO ()
startInterpreter verbosity progdb comp platform packageDBs = do
let replOpts = mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptPackageDBs = packageDBs
}
checkPackageDbStack verbosity comp packageDBs
(ghcProg, _) <- requireProgram verbosity ghcProgram progdb
runGHC verbosity ghcProg comp platform replOpts
-- -----------------------------------------------------------------------------
-- Building an executable or foreign library
-- | Build a foreign library
buildFLib, replFLib
:: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> ForeignLib -> ComponentLocalBuildInfo -> IO ()
buildFLib v njobs pkg lbi = gbuild v njobs pkg lbi . GBuildFLib
replFLib v njobs pkg lbi = gbuild v njobs pkg lbi . GReplFLib
-- | Build an executable with GHC.
--
buildExe, replExe
:: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> Executable -> ComponentLocalBuildInfo -> IO ()
buildExe v njobs pkg lbi = gbuild v njobs pkg lbi . GBuildExe
replExe v njobs pkg lbi = gbuild v njobs pkg lbi . GReplExe
-- | Building an executable, starting the REPL, and building foreign
-- libraries are all very similar and implemented in 'gbuild'. The
-- 'GBuildMode' distinguishes between the various kinds of operation.
data GBuildMode =
GBuildExe Executable
| GReplExe Executable
| GBuildFLib ForeignLib
| GReplFLib ForeignLib
gbuildInfo :: GBuildMode -> BuildInfo
gbuildInfo (GBuildExe exe) = buildInfo exe
gbuildInfo (GReplExe exe) = buildInfo exe
gbuildInfo (GBuildFLib flib) = foreignLibBuildInfo flib
gbuildInfo (GReplFLib flib) = foreignLibBuildInfo flib
gbuildName :: GBuildMode -> String
gbuildName (GBuildExe exe) = unUnqualComponentName $ exeName exe
gbuildName (GReplExe exe) = unUnqualComponentName $ exeName exe
gbuildName (GBuildFLib flib) = unUnqualComponentName $ foreignLibName flib
gbuildName (GReplFLib flib) = unUnqualComponentName $ foreignLibName flib
gbuildTargetName :: LocalBuildInfo -> GBuildMode -> String
gbuildTargetName _lbi (GBuildExe exe) = exeTargetName exe
gbuildTargetName _lbi (GReplExe exe) = exeTargetName exe
gbuildTargetName lbi (GBuildFLib flib) = flibTargetName lbi flib
gbuildTargetName lbi (GReplFLib flib) = flibTargetName lbi flib
exeTargetName :: Executable -> String
exeTargetName exe = unUnqualComponentName (exeName exe) `withExt` exeExtension
-- | Target name for a foreign library (the actual file name)
--
-- We do not use mkLibName and co here because the naming for foreign libraries
-- is slightly different (we don't use "_p" or compiler version suffices, and we
-- don't want the "lib" prefix on Windows).
--
-- TODO: We do use `dllExtension` and co here, but really that's wrong: they
-- use the OS used to build cabal to determine which extension to use, rather
-- than the target OS (but this is wrong elsewhere in Cabal as well).
flibTargetName :: LocalBuildInfo -> ForeignLib -> String
flibTargetName lbi flib =
case (os, foreignLibType flib) of
(Windows, ForeignLibNativeShared) -> nm <.> "dll"
(Windows, ForeignLibNativeStatic) -> nm <.> "lib"
(Linux, ForeignLibNativeShared) -> "lib" ++ nm <.> versionedExt
(_other, ForeignLibNativeShared) -> "lib" ++ nm <.> dllExtension
(_other, ForeignLibNativeStatic) -> "lib" ++ nm <.> staticLibExtension
(_any, ForeignLibTypeUnknown) -> cabalBug "unknown foreign lib type"
where
nm :: String
nm = unUnqualComponentName $ foreignLibName flib
os :: OS
os = let (Platform _ os') = hostPlatform lbi
in os'
-- If a foreign lib foo has lib-version-info 5:1:2 or
-- lib-version-linux 3.2.1, it should be built as libfoo.so.3.2.1
-- Libtool's version-info data is translated into library versions in a
-- nontrivial way: so refer to libtool documentation.
versionedExt :: String
versionedExt =
let nums = foreignLibVersion flib os
in foldl (<.>) "so" (map show nums)
-- | Name for the library when building.
--
-- If the `lib-version-info` field or the `lib-version-linux` field of
-- a foreign library target is set, we need to incorporate that
-- version into the SONAME field.
--
-- If a foreign library foo has lib-version-info 5:1:2, it should be
-- built as libfoo.so.3.2.1. We want it to get soname libfoo.so.3.
-- However, GHC does not allow overriding soname by setting linker
-- options, as it sets a soname of its own (namely the output
-- filename), after the user-supplied linker options. Hence, we have
-- to compile the library with the soname as its filename. We rename
-- the compiled binary afterwards.
--
-- This method allows to adjust the name of the library at build time
-- such that the correct soname can be set.
flibBuildName :: LocalBuildInfo -> ForeignLib -> String
flibBuildName lbi flib
-- On linux, if a foreign-library has version data, the first digit is used
-- to produce the SONAME.
| (os, foreignLibType flib) ==
(Linux, ForeignLibNativeShared)
= let nums = foreignLibVersion flib os
in "lib" ++ nm <.> foldl (<.>) "so" (map show (take 1 nums))
| otherwise = flibTargetName lbi flib
where
os :: OS
os = let (Platform _ os') = hostPlatform lbi
in os'
nm :: String
nm = unUnqualComponentName $ foreignLibName flib
gbuildIsRepl :: GBuildMode -> Bool
gbuildIsRepl (GBuildExe _) = False
gbuildIsRepl (GReplExe _) = True
gbuildIsRepl (GBuildFLib _) = False
gbuildIsRepl (GReplFLib _) = True
gbuildNeedDynamic :: LocalBuildInfo -> GBuildMode -> Bool
gbuildNeedDynamic lbi bm =
case bm of
GBuildExe _ -> withDynExe lbi
GReplExe _ -> withDynExe lbi
GBuildFLib flib -> withDynFLib flib
GReplFLib flib -> withDynFLib flib
where
withDynFLib flib =
case foreignLibType flib of
ForeignLibNativeShared ->
ForeignLibStandalone `notElem` foreignLibOptions flib
ForeignLibNativeStatic ->
False
ForeignLibTypeUnknown ->
cabalBug "unknown foreign lib type"
gbuildModDefFiles :: GBuildMode -> [FilePath]
gbuildModDefFiles (GBuildExe _) = []
gbuildModDefFiles (GReplExe _) = []
gbuildModDefFiles (GBuildFLib flib) = foreignLibModDefFile flib
gbuildModDefFiles (GReplFLib flib) = foreignLibModDefFile flib
-- | "Main" module name when overridden by @ghc-options: -main-is ...@
-- or 'Nothing' if no @-main-is@ flag could be found.
--
-- In case of 'Nothing', 'Distribution.ModuleName.main' can be assumed.
exeMainModuleName :: Executable -> Maybe ModuleName
exeMainModuleName Executable{buildInfo = bnfo} =
-- GHC honors the last occurence of a module name updated via -main-is
--
-- Moreover, -main-is when parsed left-to-right can update either
-- the "Main" module name, or the "main" function name, or both,
-- see also 'decodeMainIsArg'.
msum $ reverse $ map decodeMainIsArg $ findIsMainArgs ghcopts
where
ghcopts = hcOptions GHC bnfo
findIsMainArgs [] = []
findIsMainArgs ("-main-is":arg:rest) = arg : findIsMainArgs rest
findIsMainArgs (_:rest) = findIsMainArgs rest
-- | Decode argument to '-main-is'
--
-- Returns 'Nothing' if argument set only the function name.
--
-- This code has been stolen/refactored from GHC's DynFlags.setMainIs
-- function. The logic here is deliberately imperfect as it is
-- intended to be bug-compatible with GHC's parser. See discussion in
-- https://github.com/haskell/cabal/pull/4539#discussion_r118981753.
decodeMainIsArg :: String -> Maybe ModuleName
decodeMainIsArg arg
| not (null main_fn) && isLower (head main_fn)
-- The arg looked like "Foo.Bar.baz"
= Just (ModuleName.fromString main_mod)
| isUpper (head arg) -- The arg looked like "Foo" or "Foo.Bar"
= Just (ModuleName.fromString arg)
| otherwise -- The arg looked like "baz"
= Nothing
where
(main_mod, main_fn) = splitLongestPrefix arg (== '.')
splitLongestPrefix :: String -> (Char -> Bool) -> (String,String)
splitLongestPrefix str pred'
| null r_pre = (str, [])
| otherwise = (reverse (tail r_pre), reverse r_suf)
-- 'tail' drops the char satisfying 'pred'
where (r_suf, r_pre) = break pred' (reverse str)
-- | Return C sources, GHC input files and GHC input modules
gbuildSources :: Verbosity
-> Version -- ^ specVersion
-> FilePath
-> GBuildMode
-> IO ([FilePath], [FilePath], [ModuleName])
gbuildSources verbosity specVer tmpDir bm =
case bm of
GBuildExe exe -> exeSources exe
GReplExe exe -> exeSources exe
GBuildFLib flib -> return $ flibSources flib
GReplFLib flib -> return $ flibSources flib
where
exeSources :: Executable -> IO ([FilePath], [FilePath], [ModuleName])
exeSources exe@Executable{buildInfo = bnfo, modulePath = modPath} = do
main <- findFile (tmpDir : hsSourceDirs bnfo) modPath
let mainModName = fromMaybe ModuleName.main $ exeMainModuleName exe
otherModNames = exeModules exe
if isHaskell main
then
if specVer < mkVersion [2] && (mainModName `elem` otherModNames)
then do
-- The cabal manual clearly states that `other-modules` is
-- intended for non-main modules. However, there's at least one
-- important package on Hackage (happy-1.19.5) which
-- violates this. We workaround this here so that we don't
-- invoke GHC with e.g. 'ghc --make Main src/Main.hs' which
-- would result in GHC complaining about duplicate Main
-- modules.
--
-- Finally, we only enable this workaround for
-- specVersion < 2, as 'cabal-version:>=2.0' cabal files
-- have no excuse anymore to keep doing it wrong... ;-)
warn verbosity $ "Enabling workaround for Main module '"
++ display mainModName
++ "' listed in 'other-modules' illegaly!"
return (cSources bnfo, [main],
filter (/= mainModName) (exeModules exe))
else return (cSources bnfo, [main], exeModules exe)
else return (main : cSources bnfo, [], exeModules exe)
flibSources :: ForeignLib -> ([FilePath], [FilePath], [ModuleName])
flibSources flib@ForeignLib{foreignLibBuildInfo = bnfo} =
(cSources bnfo, [], foreignLibModules flib)
isHaskell :: FilePath -> Bool
isHaskell fp = elem (takeExtension fp) [".hs", ".lhs"]
-- | Generic build function. See comment for 'GBuildMode'.
gbuild :: Verbosity -> Cabal.Flag (Maybe Int)
-> PackageDescription -> LocalBuildInfo
-> GBuildMode -> ComponentLocalBuildInfo -> IO ()
gbuild verbosity numJobs pkg_descr lbi bm clbi = do
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
let comp = compiler lbi
platform = hostPlatform lbi
implInfo = getImplInfo comp
runGhcProg = runGHC verbosity ghcProg comp platform
bnfo <- hackThreadedFlag verbosity
comp (withProfExe lbi) (gbuildInfo bm)
-- the name that GHC really uses (e.g., with .exe on Windows for executables)
let targetName = gbuildTargetName lbi bm
let targetDir = buildDir lbi </> (gbuildName bm)
let tmpDir = targetDir </> (gbuildName bm ++ "-tmp")
createDirectoryIfMissingVerbose verbosity True targetDir
createDirectoryIfMissingVerbose verbosity True tmpDir
-- TODO: do we need to put hs-boot files into place for mutually recursive
-- modules? FIX: what about exeName.hi-boot?
-- Determine if program coverage should be enabled and if so, what
-- '-hpcdir' should be.
let isCoverageEnabled = exeCoverage lbi
distPref = fromFlag $ configDistPref $ configFlags lbi
hpcdir way
| gbuildIsRepl bm = mempty -- HPC is not supported in ghci
| isCoverageEnabled = toFlag $ Hpc.mixDir distPref way (gbuildName bm)
| otherwise = mempty
rpaths <- getRPaths lbi clbi
(cSrcs, inputFiles, inputModules) <- gbuildSources verbosity
(specVersion pkg_descr) tmpDir bm
let isGhcDynamic = isDynamic comp
dynamicTooSupported = supportsDynamicToo comp
cObjs = map (`replaceExtension` objExtension) cSrcs
needDynamic = gbuildNeedDynamic lbi bm
needProfiling = withProfExe lbi
-- build executables
baseOpts = (componentGhcOptions verbosity lbi bnfo clbi tmpDir)
`mappend` mempty {
ghcOptMode = toFlag GhcModeMake,
ghcOptInputFiles = toNubListR inputFiles,
ghcOptInputModules = toNubListR inputModules
}
staticOpts = baseOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticOnly,
ghcOptHPCDir = hpcdir Hpc.Vanilla
}
profOpts = baseOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptProfilingAuto = Internal.profDetailLevelFlag False
(withProfExeDetail lbi),
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR
(hcProfOptions GHC bnfo),
ghcOptHPCDir = hpcdir Hpc.Prof
}
dynOpts = baseOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
-- TODO: Does it hurt to set -fPIC for executables?
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $
hcSharedOptions GHC bnfo,
ghcOptHPCDir = hpcdir Hpc.Dyn
}
dynTooOpts = staticOpts `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcStaticAndDynamic,
ghcOptDynHiSuffix = toFlag "dyn_hi",
ghcOptDynObjSuffix = toFlag "dyn_o",
ghcOptHPCDir = hpcdir Hpc.Dyn
}
linkerOpts = mempty {
ghcOptLinkOptions = toNubListR $ PD.ldOptions bnfo,
ghcOptLinkLibs = toNubListR $ extraLibs bnfo,
ghcOptLinkLibPath = toNubListR $ extraLibDirs bnfo,
ghcOptLinkFrameworks = toNubListR $
PD.frameworks bnfo,
ghcOptLinkFrameworkDirs = toNubListR $
PD.extraFrameworkDirs bnfo,
ghcOptInputFiles = toNubListR
[tmpDir </> x | x <- cObjs]
}
dynLinkerOpts = mempty {
ghcOptRPaths = rpaths
}
replOpts = baseOpts {
ghcOptExtra = overNubListR
Internal.filterGhciFlags
(ghcOptExtra baseOpts)
}
-- For a normal compile we do separate invocations of ghc for
-- compiling as for linking. But for repl we have to do just
-- the one invocation, so that one has to include all the
-- linker stuff too, like -l flags and any .o files from C
-- files etc.
`mappend` linkerOpts
`mappend` mempty {
ghcOptMode = toFlag GhcModeInteractive,
ghcOptOptimisation = toFlag GhcNoOptimisation
}
commonOpts | needProfiling = profOpts
| needDynamic = dynOpts
| otherwise = staticOpts
compileOpts | useDynToo = dynTooOpts
| otherwise = commonOpts
withStaticExe = not needProfiling && not needDynamic
-- For building exe's that use TH with -prof or -dynamic we actually have
-- to build twice, once without -prof/-dynamic and then again with
-- -prof/-dynamic. This is because the code that TH needs to run at
-- compile time needs to be the vanilla ABI so it can be loaded up and run
-- by the compiler.
-- With dynamic-by-default GHC the TH object files loaded at compile-time
-- need to be .dyn_o instead of .o.
doingTH = EnableExtension TemplateHaskell `elem` allExtensions bnfo
-- Should we use -dynamic-too instead of compiling twice?
useDynToo = dynamicTooSupported && isGhcDynamic
&& doingTH && withStaticExe
&& null (hcSharedOptions GHC bnfo)
compileTHOpts | isGhcDynamic = dynOpts
| otherwise = staticOpts
compileForTH
| gbuildIsRepl bm = False
| useDynToo = False
| isGhcDynamic = doingTH && (needProfiling || withStaticExe)
| otherwise = doingTH && (needProfiling || needDynamic)
-- Build static/dynamic object files for TH, if needed.
when compileForTH $
runGhcProg compileTHOpts { ghcOptNoLink = toFlag True
, ghcOptNumJobs = numJobs }
unless (gbuildIsRepl bm) $
runGhcProg compileOpts { ghcOptNoLink = toFlag True
, ghcOptNumJobs = numJobs }
-- build any C sources
unless (null cSrcs) $ do
info verbosity "Building C Sources..."
sequence_
[ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo
lbi bnfo clbi tmpDir filename
vanillaCcOpts = if isGhcDynamic
-- Dynamic GHC requires C sources to be built
-- with -fPIC for REPL to work. See #2207.
then baseCcOpts { ghcOptFPic = toFlag True }
else baseCcOpts
profCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptProfilingMode = toFlag True
}
sharedCcOpts = vanillaCcOpts `mappend` mempty {
ghcOptFPic = toFlag True,
ghcOptDynLinkMode = toFlag GhcDynamicOnly
}
opts | needProfiling = profCcOpts
| needDynamic = sharedCcOpts
| otherwise = vanillaCcOpts
odir = fromFlag (ghcOptObjDir opts)
createDirectoryIfMissingVerbose verbosity True odir
needsRecomp <- checkNeedsRecompilation filename opts
when needsRecomp $
runGhcProg opts
| filename <- cSrcs ]
-- TODO: problem here is we need the .c files built first, so we can load them
-- with ghci, but .c files can depend on .h files generated by ghc by ffi
-- exports.
case bm of
GReplExe _ -> runGhcProg replOpts
GReplFLib _ -> runGhcProg replOpts
GBuildExe _ -> do
let linkOpts = commonOpts
`mappend` linkerOpts
`mappend` mempty {
ghcOptLinkNoHsMain = toFlag (null inputFiles)
}
`mappend` (if withDynExe lbi then dynLinkerOpts else mempty)
info verbosity "Linking..."
-- Work around old GHCs not relinking in this
-- situation, see #3294
let target = targetDir </> targetName
when (compilerVersion comp < mkVersion [7,7]) $ do
e <- doesFileExist target
when e (removeFile target)
runGhcProg linkOpts { ghcOptOutputFile = toFlag target }
GBuildFLib flib -> do
let rtsInfo = extractRtsInfo lbi
linkOpts = case foreignLibType flib of
ForeignLibNativeShared ->
commonOpts
`mappend` linkerOpts
`mappend` dynLinkerOpts
`mappend` mempty {
ghcOptLinkNoHsMain = toFlag True,
ghcOptShared = toFlag True,
ghcOptLinkLibs = toNubListR [
if needDynamic
then rtsDynamicLib rtsInfo
else rtsStaticLib rtsInfo
],
ghcOptLinkLibPath = toNubListR $ rtsLibPaths rtsInfo,
ghcOptFPic = toFlag True,
ghcOptLinkModDefFiles = toNubListR $ gbuildModDefFiles bm
}
-- See Note [RPATH]
`mappend` ifNeedsRPathWorkaround lbi mempty {
ghcOptLinkOptions = toNubListR ["-Wl,--no-as-needed"]
, ghcOptLinkLibs = toNubListR ["ffi"]
}
ForeignLibNativeStatic ->
-- this should be caught by buildFLib
-- (and if we do implement tihs, we probably don't even want to call
-- ghc here, but rather Ar.createArLibArchive or something)
cabalBug "static libraries not yet implemented"
ForeignLibTypeUnknown ->
cabalBug "unknown foreign lib type"
-- We build under a (potentially) different filename to set a
-- soname on supported platforms. See also the note for
-- @flibBuildName@.
info verbosity "Linking..."
let buildName = flibBuildName lbi flib
runGhcProg linkOpts { ghcOptOutputFile = toFlag (targetDir </> buildName) }
renameFile (targetDir </> buildName) (targetDir </> targetName)
{-
Note [RPATH]
~~~~~~~~~~~~
Suppose that the dynamic library depends on `base`, but not (directly) on
`integer-gmp` (which, however, is a dependency of `base`). We will link the
library as
gcc ... -lHSbase-4.7.0.2-ghc7.8.4 -lHSinteger-gmp-0.5.1.0-ghc7.8.4 ...
However, on systems (like Ubuntu) where the linker gets called with `-as-needed`
by default, the linker will notice that `integer-gmp` isn't actually a direct
dependency and hence omit the link.
Then when we attempt to link a C program against this dynamic library, the
_static_ linker will attempt to verify that all symbols can be resolved. The
dynamic library itself does not require any symbols from `integer-gmp`, but
`base` does. In order to verify that the symbols used by `base` can be
resolved, the static linker needs to be able to _find_ integer-gmp.
Finding the `base` dependency is simple, because the dynamic elf header
(`readelf -d`) for the library that we have created looks something like
(NEEDED) Shared library: [libHSbase-4.7.0.2-ghc7.8.4.so]
(RPATH) Library rpath: [/path/to/base-4.7.0.2:...]
However, when it comes to resolving the dependency on `integer-gmp`, it needs
to look at the dynamic header for `base`. On modern ghc (7.8 and higher) this
looks something like
(NEEDED) Shared library: [libHSinteger-gmp-0.5.1.0-ghc7.8.4.so]
(RPATH) Library rpath: [$ORIGIN/../integer-gmp-0.5.1.0:...]
This specifies the location of `integer-gmp` _in terms of_ the location of base
(using the `$ORIGIN`) variable. But here's the crux: when the static linker
attempts to verify that all symbols can be resolved, [**IT DOES NOT RESOLVE
`$ORIGIN`**](http://stackoverflow.com/questions/6323603/ld-using-rpath-origin-inside-a-shared-library-recursive).
As a consequence, it will not be able to resolve the symbols and report the
missing symbols as errors, _even though the dynamic linker **would** be able to
resolve these symbols_. We can tell the static linker not to report these
errors by using `--unresolved-symbols=ignore-all` and all will be fine when we
run the program ([(indeed, this is what the gold linker
does)](https://sourceware.org/ml/binutils/2013-05/msg00038.html), but it makes
the resulting library more difficult to use.
Instead what we can do is make sure that the generated dynamic library has
explicit top-level dependencies on these libraries. This means that the static
linker knows where to find them, and when we have transitive dependencies on
the same libraries the linker will only load them once, so we avoid needing to
look at the `RPATH` of our dependencies. We can do this by passing
`--no-as-needed` to the linker, so that it doesn't omit any libraries.
Note that on older ghc (7.6 and before) the Haskell libraries don't have an
RPATH set at all, which makes it even more important that we make these
top-level dependencies.
Finally, we have to explicitly link against `libffi` for the same reason. For
newer ghc this _happens_ to be unnecessary on many systems because `libffi` is
a library which is not specific to GHC, and when the static linker verifies
that all symbols can be resolved it will find the `libffi` that is globally
installed (completely independent from ghc). Of course, this may well be the
_wrong_ version of `libffi`, but it's quite possible that symbol resolution
happens to work. This is of course the wrong approach, which is why we link
explicitly against `libffi` so that we will find the _right_ version of
`libffi`.
-}
-- | Do we need the RPATH workaround?
--
-- See Note [RPATH].
ifNeedsRPathWorkaround :: Monoid a => LocalBuildInfo -> a -> a
ifNeedsRPathWorkaround lbi a =
case hostPlatform lbi of
Platform _ Linux -> a
_otherwise -> mempty
data RtsInfo = RtsInfo {
rtsDynamicLib :: FilePath
, rtsStaticLib :: FilePath
, rtsLibPaths :: [FilePath]
}
-- | Extract (and compute) information about the RTS library
--
-- TODO: This hardcodes the name as @HSrts-ghc<version>@. I don't know if we can
-- find this information somewhere. We can lookup the 'hsLibraries' field of
-- 'InstalledPackageInfo' but it will tell us @["HSrts", "Cffi"]@, which
-- doesn't really help.
extractRtsInfo :: LocalBuildInfo -> RtsInfo
extractRtsInfo lbi =
case PackageIndex.lookupPackageName (installedPkgs lbi) (mkPackageName "rts") of
[(_, [rts])] -> aux rts
_otherwise -> error "No (or multiple) ghc rts package is registered"
where
aux :: InstalledPackageInfo -> RtsInfo
aux rts = RtsInfo {
rtsDynamicLib = "HSrts-ghc" ++ display ghcVersion
, rtsStaticLib = "HSrts"
, rtsLibPaths = InstalledPackageInfo.libraryDirs rts
}
ghcVersion :: Version
ghcVersion = compilerVersion (compiler lbi)
-- | Returns True if the modification date of the given source file is newer than
-- the object file we last compiled for it, or if no object file exists yet.
checkNeedsRecompilation :: FilePath -> GhcOptions -> NoCallStackIO Bool
checkNeedsRecompilation filename opts = filename `moreRecentFile` oname
where oname = getObjectFileName filename opts
-- | Finds the object file name of the given source file
getObjectFileName :: FilePath -> GhcOptions -> FilePath
getObjectFileName filename opts = oname
where odir = fromFlag (ghcOptObjDir opts)
oext = fromFlagOrDefault "o" (ghcOptObjSuffix opts)
oname = odir </> replaceExtension filename oext
-- | Calculate the RPATHs for the component we are building.
--
-- Calculates relative RPATHs when 'relocatable' is set.
getRPaths :: LocalBuildInfo
-> ComponentLocalBuildInfo -- ^ Component we are building
-> NoCallStackIO (NubListR FilePath)
getRPaths lbi clbi | supportRPaths hostOS = do
libraryPaths <- depLibraryPaths False (relocatable lbi) lbi clbi
let hostPref = case hostOS of
OSX -> "@loader_path"
_ -> "$ORIGIN"
relPath p = if isRelative p then hostPref </> p else p
rpaths = toNubListR (map relPath libraryPaths)
return rpaths
where
(Platform _ hostOS) = hostPlatform lbi
-- The list of RPath-supported operating systems below reflects the
-- platforms on which Cabal's RPATH handling is tested. It does _NOT_
-- reflect whether the OS supports RPATH.
-- E.g. when this comment was written, the *BSD operating systems were
-- untested with regards to Cabal RPATH handling, and were hence set to
-- 'False', while those operating systems themselves do support RPATH.
supportRPaths Linux = True
supportRPaths Windows = False
supportRPaths OSX = True
supportRPaths FreeBSD = False
supportRPaths OpenBSD = False
supportRPaths NetBSD = False
supportRPaths DragonFly = False
supportRPaths Solaris = False
supportRPaths AIX = False
supportRPaths HPUX = False
supportRPaths IRIX = False
supportRPaths HaLVM = False
supportRPaths IOS = False
supportRPaths Android = False
supportRPaths Ghcjs = False
supportRPaths Hurd = False
supportRPaths (OtherOS _) = False
-- Do _not_ add a default case so that we get a warning here when a new OS
-- is added.
getRPaths _ _ = return mempty
-- | Filter the "-threaded" flag when profiling as it does not
-- work with ghc-6.8 and older.
hackThreadedFlag :: Verbosity -> Compiler -> Bool -> BuildInfo -> IO BuildInfo
hackThreadedFlag verbosity comp prof bi
| not mustFilterThreaded = return bi
| otherwise = do
warn verbosity $ "The ghc flag '-threaded' is not compatible with "
++ "profiling in ghc-6.8 and older. It will be disabled."
return bi { options = filterHcOptions (/= "-threaded") (options bi) }
where
mustFilterThreaded = prof && compilerVersion comp < mkVersion [6, 10]
&& "-threaded" `elem` hcOptions GHC bi
filterHcOptions p hcoptss =
[ (hc, if hc == GHC then filter p opts else opts)
| (hc, opts) <- hcoptss ]
-- | Extracts a String representing a hash of the ABI of a built
-- library. It can fail if the library has not yet been built.
--
libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo
-> Library -> ComponentLocalBuildInfo -> IO String
libAbiHash verbosity _pkg_descr lbi lib clbi = do
libBi <- hackThreadedFlag verbosity
(compiler lbi) (withProfLib lbi) (libBuildInfo lib)
let
comp = compiler lbi
platform = hostPlatform lbi
vanillaArgs0 =
(componentGhcOptions verbosity lbi libBi clbi (componentBuildDir lbi clbi))
`mappend` mempty {
ghcOptMode = toFlag GhcModeAbiHash,
ghcOptInputModules = toNubListR $ exposedModules lib
}
vanillaArgs =
-- Package DBs unnecessary, and break ghc-cabal. See #3633
-- BUT, put at least the global database so that 7.4 doesn't
-- break.
vanillaArgs0 { ghcOptPackageDBs = [GlobalPackageDB]
, ghcOptPackages = mempty }
sharedArgs = vanillaArgs `mappend` mempty {
ghcOptDynLinkMode = toFlag GhcDynamicOnly,
ghcOptFPic = toFlag True,
ghcOptHiSuffix = toFlag "dyn_hi",
ghcOptObjSuffix = toFlag "dyn_o",
ghcOptExtra = toNubListR $ hcSharedOptions GHC libBi
}
profArgs = vanillaArgs `mappend` mempty {
ghcOptProfilingMode = toFlag True,
ghcOptProfilingAuto = Internal.profDetailLevelFlag True
(withProfLibDetail lbi),
ghcOptHiSuffix = toFlag "p_hi",
ghcOptObjSuffix = toFlag "p_o",
ghcOptExtra = toNubListR $ hcProfOptions GHC libBi
}
ghcArgs
| withVanillaLib lbi = vanillaArgs
| withSharedLib lbi = sharedArgs
| withProfLib lbi = profArgs
| otherwise = error "libAbiHash: Can't find an enabled library way"
(ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi)
hash <- getProgramInvocationOutput verbosity
(ghcInvocation ghcProg comp platform ghcArgs)
return (takeWhile (not . isSpace) hash)
componentGhcOptions :: Verbosity -> LocalBuildInfo
-> BuildInfo -> ComponentLocalBuildInfo -> FilePath
-> GhcOptions
componentGhcOptions verbosity lbi =
Internal.componentGhcOptions verbosity implInfo lbi
where
comp = compiler lbi
implInfo = getImplInfo comp
componentCcGhcOptions :: Verbosity -> LocalBuildInfo
-> BuildInfo -> ComponentLocalBuildInfo
-> FilePath -> FilePath
-> GhcOptions
componentCcGhcOptions verbosity lbi =
Internal.componentCcGhcOptions verbosity implInfo lbi
where
comp = compiler lbi
implInfo = getImplInfo comp
-- -----------------------------------------------------------------------------
-- Installing
-- |Install executables for GHC.
installExe :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^Where to copy the files to
-> FilePath -- ^Build location
-> (FilePath, FilePath) -- ^Executable (prefix,suffix)
-> PackageDescription
-> Executable
-> IO ()
installExe verbosity lbi binDir buildPref
(progprefix, progsuffix) _pkg exe = do
createDirectoryIfMissingVerbose verbosity True binDir
let exeName' = unUnqualComponentName $ exeName exe
exeFileName = exeTargetName exe
fixedExeBaseName = progprefix ++ exeName' ++ progsuffix
installBinary dest = do
installExecutableFile verbosity
(buildPref </> exeName' </> exeFileName)
(dest <.> exeExtension)
when (stripExes lbi) $
Strip.stripExe verbosity (hostPlatform lbi) (withPrograms lbi)
(dest <.> exeExtension)
installBinary (binDir </> fixedExeBaseName)
-- |Install foreign library for GHC.
installFLib :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^install location
-> FilePath -- ^Build location
-> PackageDescription
-> ForeignLib
-> IO ()
installFLib verbosity lbi targetDir builtDir _pkg flib =
install (foreignLibIsShared flib)
builtDir
targetDir
(flibTargetName lbi flib)
where
install isShared srcDir dstDir name = do
let src = srcDir </> name
dst = dstDir </> name
createDirectoryIfMissingVerbose verbosity True targetDir
-- TODO: Should we strip? (stripLibs lbi)
if isShared
then installExecutableFile verbosity src dst
else installOrdinaryFile verbosity src dst
-- Now install appropriate symlinks if library is versioned
let (Platform _ os) = hostPlatform lbi
when (not (null (foreignLibVersion flib os))) $ do
when (os /= Linux) $ die' verbosity
-- It should be impossible to get here.
"Can't install foreign-library symlink on non-Linux OS"
#ifndef mingw32_HOST_OS
-- 'createSymbolicLink file1 file2' creates a symbolic link
-- named 'file2' which points to the file 'file1'.
-- Note that we do want a symlink to 'name' rather than
-- 'dst', because the symlink will be relative to the
-- directory it's created in.
-- Finally, we first create the symlinks in a temporary
-- directory and then rename to simulate 'ln --force'.
withTempDirectory verbosity dstDir nm $ \tmpDir -> do
let link1 = flibBuildName lbi flib
link2 = "lib" ++ nm <.> "so"
createSymbolicLink name (tmpDir </> link1)
renameFile (tmpDir </> link1) (dstDir </> link1)
createSymbolicLink name (tmpDir </> link2)
renameFile (tmpDir </> link2) (dstDir </> link2)
where
nm :: String
nm = unUnqualComponentName $ foreignLibName flib
#endif /* mingw32_HOST_OS */
-- |Install for ghc, .hi, .a and, if --with-ghci given, .o
installLib :: Verbosity
-> LocalBuildInfo
-> FilePath -- ^install location
-> FilePath -- ^install location for dynamic libraries
-> FilePath -- ^Build location
-> PackageDescription
-> Library
-> ComponentLocalBuildInfo
-> IO ()
installLib verbosity lbi targetDir dynlibTargetDir _builtDir _pkg lib clbi = do
-- copy .hi files over:
whenVanilla $ copyModuleFiles "hi"
whenProf $ copyModuleFiles "p_hi"
whenShared $ copyModuleFiles "dyn_hi"
-- copy the built library files over:
whenHasCode $ do
whenVanilla $ installOrdinary builtDir targetDir vanillaLibName
whenProf $ installOrdinary builtDir targetDir profileLibName
whenGHCi $ installOrdinary builtDir targetDir ghciLibName
whenShared $ installShared builtDir dynlibTargetDir sharedLibName
where
builtDir = componentBuildDir lbi clbi
install isShared srcDir dstDir name = do
let src = srcDir </> name
dst = dstDir </> name
createDirectoryIfMissingVerbose verbosity True dstDir
if isShared
then installExecutableFile verbosity src dst
else installOrdinaryFile verbosity src dst
when (stripLibs lbi) $ Strip.stripLib verbosity
(hostPlatform lbi) (withPrograms lbi) dst
installOrdinary = install False
installShared = install True
copyModuleFiles ext =
findModuleFiles [builtDir] [ext] (allLibModules lib clbi)
>>= installOrdinaryFiles verbosity targetDir
compiler_id = compilerId (compiler lbi)
uid = componentUnitId clbi
vanillaLibName = mkLibName uid
profileLibName = mkProfLibName uid
ghciLibName = Internal.mkGHCiLibName uid
sharedLibName = (mkSharedLibName compiler_id) uid
hasLib = not $ null (allLibModules lib clbi)
&& null (cSources (libBuildInfo lib))
has_code = not (componentIsIndefinite clbi)
whenHasCode = when has_code
whenVanilla = when (hasLib && withVanillaLib lbi)
whenProf = when (hasLib && withProfLib lbi && has_code)
whenGHCi = when (hasLib && withGHCiLib lbi && has_code)
whenShared = when (hasLib && withSharedLib lbi && has_code)
-- -----------------------------------------------------------------------------
-- Registering
hcPkgInfo :: ProgramDb -> HcPkg.HcPkgInfo
hcPkgInfo progdb = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcPkgProg
, HcPkg.noPkgDbStack = v < [6,9]
, HcPkg.noVerboseFlag = v < [6,11]
, HcPkg.flagPackageConf = v < [7,5]
, HcPkg.supportsDirDbs = v >= [6,8]
, HcPkg.requiresDirDbs = v >= [7,10]
, HcPkg.nativeMultiInstance = v >= [7,10]
, HcPkg.recacheMultiInstance = v >= [6,12]
, HcPkg.suppressFilesCheck = v >= [6,6]
}
where
v = versionNumbers ver
Just ghcPkgProg = lookupProgram ghcPkgProgram progdb
Just ver = programVersion ghcPkgProg
registerPackage
:: Verbosity
-> ProgramDb
-> PackageDBStack
-> InstalledPackageInfo
-> HcPkg.RegisterOptions
-> IO ()
registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions =
HcPkg.register (hcPkgInfo progdb) verbosity packageDbs
installedPkgInfo registerOptions
pkgRoot :: Verbosity -> LocalBuildInfo -> PackageDB -> IO FilePath
pkgRoot verbosity lbi = pkgRoot'
where
pkgRoot' GlobalPackageDB =
let Just ghcProg = lookupProgram ghcProgram (withPrograms lbi)
in fmap takeDirectory (getGlobalPackageDB verbosity ghcProg)
pkgRoot' UserPackageDB = do
appDir <- getAppUserDataDirectory "ghc"
let ver = compilerVersion (compiler lbi)
subdir = System.Info.arch ++ '-':System.Info.os
++ '-':display ver
rootDir = appDir </> subdir
-- We must create the root directory for the user package database if it
-- does not yet exists. Otherwise '${pkgroot}' will resolve to a
-- directory at the time of 'ghc-pkg register', and registration will
-- fail.
createDirectoryIfMissing True rootDir
return rootDir
pkgRoot' (SpecificPackageDB fp) = return (takeDirectory fp)
-- -----------------------------------------------------------------------------
-- Utils
isDynamic :: Compiler -> Bool
isDynamic = Internal.ghcLookupProperty "GHC Dynamic"
supportsDynamicToo :: Compiler -> Bool
supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too"
withExt :: FilePath -> String -> FilePath
withExt fp ext = fp <.> if takeExtension fp /= ('.':ext) then ext else ""
|
mydaum/cabal
|
Cabal/Distribution/Simple/GHC.hs
|
bsd-3-clause
| 78,398
| 19
| 25
| 23,305
| 13,621
| 7,088
| 6,533
| 1,154
| 19
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Facebook.Object.Marketing.AdVideo where
import Facebook.Records hiding (get)
import qualified Facebook.Records as Rec
import Facebook.Types hiding (Id)
import Facebook.Pager
import Facebook.Monad
import Facebook.Graph
import Facebook.Base (FacebookException(..))
import Data.Time.Format
import Data.Aeson hiding (Error)
import Data.Aeson.Types hiding (Error)
import Control.Applicative
import Data.Text (Text)
import Data.Text.Read (decimal)
import Data.Scientific (toBoundedInteger)
import qualified Data.Text.Encoding as TE
import GHC.Generics (Generic)
import qualified Data.Map.Strict as Map
import Data.Vector (Vector)
import qualified Data.Vector as V
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import qualified Data.ByteString.Builder as BSB
import qualified Data.ByteString.Lazy as BSL
import qualified Control.Monad.Trans.Resource as R
import Control.Monad.Trans.Control (MonadBaseControl)
#if MIN_VERSION_time(1,5,0)
import System.Locale hiding (defaultTimeLocale, rfc822DateFormat)
import Data.Time.Clock
#else
import System.Locale
import Data.Time.Clock hiding (defaultTimeLocale, rfc822DateFormat)
#endif
import Facebook.Object.Marketing.Types
import Control.Monad.IO.Class
import qualified Data.Text as T
import System.IO
import System.Directory (removeFile)
import Control.Concurrent (threadDelay)
import Data.Char (toLower)
data UploadPhaseADT = Start | Transfer | Finish deriving (Show, Generic)
instance FromJSON UploadPhaseADT
instance ToJSON UploadPhaseADT
data UploadPhase = UploadPhase
newtype UploadPhase_ = UploadPhase_ UploadPhaseADT deriving (Show, Generic)
instance Field UploadPhase where
type FieldValue UploadPhase = UploadPhase_
fieldName _ = "upload_phase"
fieldLabel = UploadPhase
unUploadPhase_ :: UploadPhase_ -> UploadPhaseADT
unUploadPhase_ (UploadPhase_ x) = x
instance FromJSON UploadPhase_
instance ToJSON UploadPhase_
instance ToBS UploadPhaseADT where
toBS Start = toBS ("start" :: String)
toBS Transfer = toBS ("transfer" :: String)
toBS Finish = toBS ("finish" :: String)
instance ToBS UploadPhase_ where
toBS (UploadPhase_ a) = toBS a
uploadPhase r = r `Rec.get` UploadPhase
data Filesize = Filesize
newtype Filesize_ = Filesize_ Int deriving (Show, Generic)
instance Field Filesize where
type FieldValue Filesize = Filesize_
fieldName _ = "file_size"
fieldLabel = Filesize
unFilesize_ :: Filesize_ -> Int
unFilesize_ (Filesize_ x) = x
instance FromJSON Filesize_
instance ToJSON Filesize_
instance ToBS Filesize_ where
toBS (Filesize_ a) = toBS a
data UploadStartResp = UploadStartResp {
uploadSessionId :: Int
, videoId :: Integer
, startOffset :: Int
, endOffset :: Int
} deriving Show
instance FromJSON UploadStartResp where
parseJSON (Object v) = do
sessionId <- liftA read $ v .: "upload_session_id"
videoId <- liftA read $ v .: "video_id"
start <- liftA read $ v .: "start_offset"
end <- liftA read $ v .: "end_offset"
return $ UploadStartResp sessionId videoId start end
data UploadSessId = UploadSessId
newtype UploadSessId_ = UploadSessId_ Int deriving (Show, Generic)
instance Field UploadSessId where
type FieldValue UploadSessId = UploadSessId_
fieldName _ = "upload_session_id"
fieldLabel = UploadSessId
unUploadSessId_ :: UploadSessId_ -> Int
unUploadSessId_ (UploadSessId_ x) = x
instance FromJSON UploadSessId_
instance ToJSON UploadSessId_
instance ToBS UploadSessId_ where
toBS (UploadSessId_ a) = toBS a
data StartOffset = StartOffset
newtype StartOffset_ = StartOffset_ Int deriving (Show, Generic)
instance Field StartOffset where
type FieldValue StartOffset = StartOffset_
fieldName _ = "start_offset"
fieldLabel = StartOffset
unStartOffset_ :: StartOffset_ -> Int
unStartOffset_ (StartOffset_ x) = x
instance FromJSON StartOffset_
instance ToJSON StartOffset_
instance ToBS StartOffset_ where
toBS (StartOffset_ a) = toBS a
data VideoChunk = VideoChunk
newtype VideoChunk_ = VideoChunk_ FilePath deriving (Show, Generic)
instance Field VideoChunk where
type FieldValue VideoChunk = VideoChunk_
fieldName _ = "video_file_chunk"
fieldLabel = VideoChunk
unVideoChunk_ :: VideoChunk_ -> FilePath
unVideoChunk_ (VideoChunk_ x) = x
instance ToBS VideoChunk_ where
toBS (VideoChunk_ a) = toBS a
data UploadTransferResp = UploadTransferResp {
start :: Int
, end :: Int
} deriving Show
instance FromJSON UploadTransferResp where
parseJSON (Object v) = do
start <- liftA read $ v .: "start_offset"
end <- liftA read $ v .: "end_offset"
return $ UploadTransferResp start end
type VideoId = Integer
uploadVideo :: (R.MonadResource m, MonadBaseControl IO m) =>
Id_ -- ^ Ad Account Id
-> FilePath -- ^ Arguments to be passed to Facebook.
-> T.Text
-> UserAccessToken -- ^ Optional user access token.
-> FacebookT Auth m (Either FacebookException VideoId)
uploadVideo (Id_ id) fp videoTitle mtoken = do
bs <- liftIO $ BS.readFile fp
ret <- sendVideoStart id mtoken bs
case ret of
Left fbExp -> return $ Left fbExp
Right resp -> do
ret <- sendVideoChunks id mtoken bs $ startResp2ChunkResp resp
case ret of
Left fbExp -> return $ Left fbExp
Right _ -> do
ret <- sendVideoFinish id mtoken videoTitle resp
case ret of
Left fbExp -> return $ Left fbExp
Right _ -> return $ Right $ videoId resp
startResp2ChunkResp (UploadStartResp sess _ start end) =
(sess, UploadTransferResp start end)
sendVideoFinish :: (R.MonadResource m, MonadBaseControl IO m) =>
T.Text
-> UserAccessToken
-> T.Text
-> UploadStartResp
-> FacebookT Auth m (Either FacebookException Success)
sendVideoFinish id tok title (UploadStartResp sess _ _ _) = do
let r = toForm $ (UploadPhase, UploadPhase_ Finish) :*:
(UploadSessId, UploadSessId_ sess) :*:
(Title, Title_ title) :*: Nil
postFormVideo ("/v2.7/" <> id <> "/advideos") r tok
sendVideoChunks :: (R.MonadResource m, MonadBaseControl IO m) =>
T.Text
-> UserAccessToken
-> BS.ByteString
-> (Int, UploadTransferResp)
-> FacebookT Auth m (Either FacebookException Bool)
sendVideoChunks id tok bs (sess, transResp) = do
(tmpFp, hdl) <- liftIO $ openBinaryTempFile "/tmp" "foo.bar"
liftIO $ hClose hdl
ret <- go transResp tmpFp
liftIO $ removeFile tmpFp
return ret
where
go (UploadTransferResp start end) fp
| start == end = return $ Right True
| otherwise = do
let chunk = BS.take (end-start) $ BS.drop start bs
liftIO $ BS.writeFile fp chunk
let r = toForm $ (UploadPhase, UploadPhase_ Transfer) :*:
(UploadSessId, UploadSessId_ sess) :*:
(StartOffset, StartOffset_ start) :*:
(VideoChunk, VideoChunk_ fp) :*: Nil
ret <- postFormVideo ("/v2.7/" <> id <> "/advideos") r tok
case ret of
Right resp -> go resp fp
Left x -> return $ Left x
sendVideoStart :: (R.MonadResource m, MonadBaseControl IO m) =>
T.Text -- ^ Ad Account Id
-> UserAccessToken -- ^ Optional user access token.
-> BS.ByteString
-> FacebookT Auth m (Either FacebookException UploadStartResp)
sendVideoStart id mtoken bs = do
let r = toForm $ (UploadPhase, UploadPhase_ Start) :*:
(Filesize, Filesize_ $ BS.length bs) :*:
Nil
postFormVideo ("/v2.7/" <> id <> "/advideos") r mtoken
data Thumbnail = Thumbnail {
height :: Int
, id :: T.Text
, is_preferred :: Bool
, scale :: Int
, uri :: T.Text
, width :: Int
} deriving (Show, Generic)
instance FromJSON Thumbnail
getPrefThumbnail:: (R.MonadResource m, MonadBaseControl IO m) =>
VideoId --
-> UserAccessToken -- ^ Optional user access token.
-> FacebookT Auth m Thumbnail
getPrefThumbnail vId tok = do
Pager thumbs _ _ <- getThumbnails vId tok
let filtered = filter is_preferred thumbs
if length filtered >= 1
then return $ head filtered
else return $ head thumbs
getThumbnails:: (R.MonadResource m, MonadBaseControl IO m) =>
VideoId --
-> UserAccessToken -- ^ Optional user access token.
-> FacebookT Auth m (Pager Thumbnail)
getThumbnails vId tok = getObject ("/v2.7/" <> (T.pack $ show vId) <> "/thumbnails") [] $ Just tok
data Video = Video { -- more fields if needed: https://developers.facebook.com/docs/graph-api/reference/video
status :: VideoStatus
} deriving (Show, Generic)
instance FromJSON Video
data VideoStatusADT = Ready | Processing | Error deriving (Show, Generic, Eq)
instance ToJSON VideoStatusADT
instance FromJSON VideoStatusADT where
parseJSON = genericParseJSON defaultOptions { constructorTagModifier = map toLower }
data VideoStatus = VideoStatus {
video_status :: VideoStatusADT
} deriving (Show, Generic)
instance FromJSON VideoStatus
isVideoReady :: (R.MonadResource m, MonadBaseControl IO m) =>
VideoId --
-> UserAccessToken -- ^ Optional user access token.
-> FacebookT Auth m VideoStatusADT
isVideoReady vId tok = do
vid <- getObject ("/v2.7/" <> (T.pack $ show vId)) [("fields", "status")] (Just tok)
liftIO $ print $ status vid
return $ video_status (status vid)
waitForVideo :: (R.MonadResource m, MonadBaseControl IO m) =>
VideoId --
-> UserAccessToken -- ^ Optional user access token.
-> FacebookT Auth m Bool
waitForVideo vId tok = do -- FIXME: Add timeout
st <- isVideoReady vId tok
case st of
Error -> return False
Ready -> return True
Processing -> do
liftIO $ print "Waiting for 15 sec"
liftIO $ threadDelay $ 15 * 1000000
waitForVideo vId tok
|
BeautifulDestinations/fb
|
src/Facebook/Object/Marketing/AdVideo.hs
|
bsd-3-clause
| 9,820
| 0
| 21
| 1,872
| 2,862
| 1,501
| 1,361
| -1
| -1
|
{-# LANGUAGE OverloadedStrings #-}
module Azure.DocDB.Auth (
DocDBSignature(..),
SigningParams(..),
MSDate(..),
signRequestInfo,
signingPayload,
) where
import qualified Crypto.Hash.Algorithms as CH
import qualified Crypto.MAC.HMAC as HM
import qualified Data.ByteArray as BA
import qualified Data.ByteString as B
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Char8 as B8
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time.Clock (UTCTime)
import Data.Time.Format (formatTime, defaultTimeLocale)
import Network.HTTP.Types (StdMethod(..), renderQuery, queryTextToQuery)
import Web.HttpApiData (ToHttpApiData(..))
-- | Parameters needed to generate a signing header
data SigningParams = SigningParams {
spMethod :: StdMethod,
spResourceType :: T.Text,
spPath :: T.Text,
spWhen :: MSDate
} deriving (Eq, Ord)
newtype MSDate = MSDate UTCTime deriving (Eq, Ord)
-- | Computed signature data, sufficient to generate an authorization header
data DocDBSignature = DocDBSignature
{ dbSigType :: T.Text
, dbSigVer :: T.Text
, dbSigSig :: T.Text
} deriving (Eq, Ord)
instance ToHttpApiData DocDBSignature where
-- e.g.: type=master&ver=1.0&sig=5mDuQBYA0kb70WDJoTUzSBMTG3owkC0/cEN4fqa18/s=
toHeader sig = renderQuery False makeQS
where
makeQS = queryTextToQuery
[ ("type", Just (dbSigType sig))
, ("ver", Just (dbSigVer sig))
, ("sig", Just (dbSigSig sig))
]
toUrlPiece = T.decodeUtf8 . toHeader
instance Show DocDBSignature where
show = B8.unpack . toHeader
-- | Compute an HMAC (base64 encoded)
computeHMAC256 :: B.ByteString -> T.Text -> T.Text
computeHMAC256 key text =
T.decodeUtf8 . B64.encode . B.pack . BA.unpack . HM.hmacGetDigest $ hmacedValue
where
hmacedValue :: HM.HMAC CH.SHA256
hmacedValue = HM.hmac key (T.encodeUtf8 text)
-- | Build a signature from the parameters and the signing key
signRequestInfo :: B.ByteString -> SigningParams -> DocDBSignature
signRequestInfo key params = DocDBSignature {
dbSigType = "master",
dbSigVer = "1.0",
dbSigSig = signature
}
where
signature = computeHMAC256 key $ signingPayload params
-- | Format a date per RFC 7321
-- e.g. Tue, 01 Nov 1994 08:12:31 GMT
instance Show MSDate where
show (MSDate d) = formatTime defaultTimeLocale "%a, %0d %b %Y %H:%M:%S GMT" d
instance ToHttpApiData MSDate where
toHeader = B8.pack . show
toQueryParam = T.pack . show
-- | Get the text which would need to be signed.
signingPayload :: SigningParams -> T.Text
signingPayload (SigningParams method resourceType path when) =
mconcat . appendNL $ parts
where
parts = [lcMethod, lcRecourceType, path, lcWhen, ""]
appendNL pieces = pieces >>= flip (:) ["\n"]
lcMethod = T.toLower . T.pack . show $ method
lcWhen = T.toLower . T.pack . show $ when
lcRecourceType = T.toLower resourceType
|
jnonce/azure-docdb
|
lib/Azure/DocDB/Auth.hs
|
bsd-3-clause
| 2,977
| 0
| 13
| 580
| 750
| 438
| 312
| 64
| 1
|
{-# LANGUAGE OverloadedStrings, MultiParamTypeClasses, FlexibleInstances #-}
module Data.Iota.Stateful.Tests.Text
where
import Blaze.ByteString.Builder
import Blaze.ByteString.Builder.Internal.Buffer
import Data.Attoparsec.Text
import Data.Iota.Stateful.Text
import Data.Functor.Identity
import Control.Monad.Identity
import Control.Applicative
import Data.Monoid
import Control.Arrow
import Data.Text
import qualified Data.Text.IO as T
data CParserTest = CData
| CForwardSlash Builder
| CBlockComment
| CBlockCommentAsterisk
| CLineComment
deriving (Show)
-- This instance counts the number of characters emitted.
instance IotaS CParserTest Int where
initStateS = (CData, 0)
-- example of special behavior:
parseIotaS CData 10 =
anyChar .>= writeTextI "/*ten*/" (CData, (+3))
<|> endI Terminal |>> writeTextI "/*ten*/" CData
parseIotaS CData _ =
char '/' .>> bufferI CForwardSlash
<|> anyChar .>= emitI (CData, (+1))
<|> endI Terminal |>> ignoreI CData
parseIotaS (CForwardSlash buffer) _ =
char '*' .>> ignoreI CBlockComment
<|> char '/' .>> ignoreI CLineComment
<|> otherwiseI |>= prependI buffer (CData, (+1))
parseIotaS CBlockComment _ =
char '*' .>> ignoreI CBlockCommentAsterisk
<|> anyChar .>> ignoreI CBlockComment
<|> otherwiseI |>> ignoreI CData
parseIotaS CBlockCommentAsterisk _ =
char '/' .>> ignoreI CData
<|> anyChar .>> ignoreI CBlockComment
<|> otherwiseI |>> ignoreI CData
parseIotaS CLineComment _ =
string "\r\n" +>> emitI CData
<|> char '\n' .>> emitI CData
<|> anyChar .>> ignoreI CLineComment
<|> otherwiseI |>> ignoreI CData
data HaskellParserTest = HData
| HBlockComment HaskellParserTest
| HLineComment HaskellParserTest
| HQuotedC (IotaResultS CParserTest Int)
deriving (Show)
instance IotaS HaskellParserTest [Text] where
initStateS = (HData, ["Data"])
parseIotaS HData _ =
string "--" +>= ignoreI (HLineComment HData, ("Line Comment" :))
<|> string "{-" +>= ignoreI (HBlockComment HData, ("Block Comment" :))
<|> string "[C|" +>= emitI (HQuotedC initIotaS, ("Quoted C" :))
<|> anyChar .>> emitI HData
<|> endI Terminal |>= ignoreI (HData, ("End" :))
parseIotaS (HBlockComment prior) _ =
string "-}" +>= ignoreI (prior, ("Data" :))
<|> anyChar .>> ignoreI (HBlockComment prior)
<|> endI Reparse |>> ignoreI prior
parseIotaS (HLineComment prior) _ =
string "\r\n" +>= emitI (prior, ("Data" :))
<|> char '\n' .>= emitI (prior, ("Data" :))
<|> anyChar .>> ignoreI (HLineComment prior)
<|> endI Reparse |>> ignoreI prior
parseIotaS p@(HQuotedC cparser) _ =
string "\\]" +>> substI "]" (feedInnerS cparser HQuotedC)
<|> string "{-" +>= ignoreI (HBlockComment p, ("Block Comment" :))
<|> string "--" +>= ignoreI (HLineComment p, ("Line Comment" :))
<|> char ']' .>= closeInnerS cparser (HData, ("Data" :))
<|> anyChar .>> feedInnerS cparser HQuotedC
<|> endI Reparse |>> substI "]" (closeInnerS cparser HData)
type HaskellParser = (HaskellParserTest, [Text])
main = do
x <- T.getLine
print $ iotaS (initStateS :: HaskellParser) x
|
AaronFriel/Iota
|
Data/Iota/Stateful/Tests/Text.hs
|
bsd-3-clause
| 3,445
| 0
| 19
| 891
| 979
| 512
| 467
| 78
| 1
|
module Signal.Wavelet.ReferenceITest where
import Data.Array.Repa as R
import Data.Vector.Generic as V
import qualified Signal.Wavelet.C1 as C1
import qualified Signal.Wavelet.Eval1 as E1
import qualified Signal.Wavelet.Eval2 as E2
import qualified Signal.Wavelet.List1 as L1
import qualified Signal.Wavelet.List2 as L2
import qualified Signal.Wavelet.Repa1 as R1
import qualified Signal.Wavelet.Repa2 as R2
import qualified Signal.Wavelet.Repa3 as R3
import qualified Signal.Wavelet.Vector1 as V1
import Signal.Wavelet.Eval.Common as EC
import Signal.Wavelet.List.Common as LC
import Signal.Wavelet.Repa.Common (forceS)
import Test.ArbitraryInstances
import Test.Utils ((=~))
-- Note [Verifying equivalence of all lattice implementations]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- List.Common implementation of one lattice layer is assumed to be a reference
-- one. Each implementation verifies that it is identical to another one. In
-- case of Repa-based algorithms only sequential implementations are
-- tested. This assumes that instances of Repa's Load type class are implemented
-- correctly. The following dependencies are used:
--
-- List.Common -> Eval.Common
-- List.Common -> Repa1 -> Repa2
-- List.Common -> C1 -> Vector1 -> Repa3
--
-- Read "List.Common -> C1" as "List.Commonb implementation serves as a
-- reference to C1 implementation".
-- Note [Lattice modifier]
-- ~~~~~~~~~~~~~~~~~~~~~~~
--
-- Implementations operating in situ (C1, Vector1 and Repa3) need one additional
-- parameter compared to other implementations. This parameter takes value of 0
-- or 1 and denotes the shift of base operations:
--
-- 0 - no shift
-- 1 - one base operation wraps arounf first and last element of a signal
--
-- C1 implementation can only be compared to List implementation when there is
-- no shift. Vector1, C1 and Repa3 can be compared if they are passed identical
-- shift value.
propLatticeEvalLikeList :: Double -> [Double] -> Bool
propLatticeEvalLikeList d xs =
LC.latticeSeq (s, c) ys =~ EC.latticePar (s, c) ys
where (s, c) = (sin d, cos d)
ys = xs Prelude.++ xs
propLatticeRepa1LikeList :: DwtInputRepa -> Bool
propLatticeRepa1LikeList (DwtInputRepa (ls, sig)) =
LC.latticeSeq (s, c) (R.toList sig) =~ (R.toList . forceS $
R1.lattice (s, c) sig)
where (s, c) = (sin d, cos d)
d = ls R.! (Z :. 0)
propLatticeRepa2LikeRepa1 :: DwtInputRepa -> Bool
propLatticeRepa2LikeRepa1 (DwtInputRepa (ls, sig)) =
forceS (R1.lattice (s, c) sig) =~ forceS (R2.lattice (s, c) sig)
where (s, c) = (sin d, cos d)
d = ls R.! (Z :. 0)
propLatticeC1LikeList :: DwtInputC -> Bool
propLatticeC1LikeList (DwtInputC (ls, sig)) =
LC.latticeSeq (s, c) (V.toList sig) =~ (V.toList $ C1.lattice 0 (s, c) sig)
where (s, c) = (sin d, cos d)
d = ls V.! 0
-- See: Note [Lattice modifier]
propLatticeVector1LikeC1 :: Int -> DwtInputC -> Bool
propLatticeVector1LikeC1 a (DwtInputC (ls, sig)) =
(V.convert $ C1.lattice l (s, c) sig) =~ V1.lattice l (s, c) (V.convert sig)
where (s, c) = (sin d, cos d)
d = ls V.! 0
l = abs a `rem` 2
propLatticeRepa3LikeVector1 :: Int -> DwtInputRepa -> Bool
propLatticeRepa3LikeVector1 a (DwtInputRepa (ls, sig)) =
V1.lattice l (s, c) vSig =~ (toUnboxed . forceS . R3.lattice l (s, c) $ sig)
where vSig = toUnboxed $ sig
(s, c) = (sin d, cos d)
d = ls R.! (Z :. 0)
l = abs a `rem` 2
-- Note [Verifying equivalence of all DWT/IDWT implementations]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- List1 implementation of DWT and IDWT is assumed to be a reference one. Each
-- implementation verifies that it is identical to another one. In case of
-- Repa-based algorithms only sequential implementations are tested. This
-- assumes that instances of Repa's Load type class are implemented
-- correctly. The following dependencies are used:
--
-- List1 -> Eval1 -> List2 -> Eva2
-- List1 -> Repa1 -> Repa2
-- List1 -> C1 -> Vector1 -> Repa3
--
-- Read "List1 -> C1" as "List1 implementation serves as a reference to C1
-- implementation".
-- Note: [Shifting input/output signal]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Implementations operating without shifts and signal extending (C1, Vector1
-- and Repa3):
--
-- a) return shifted signal in case of DWT
-- b) require that input signal is shofted in case of IDWT
--
-- This means that signals need to be shifted accordingly in order to compare
-- them with different implementation. Test dependencies are designed in such a
-- way that only one test requires shifting of signals (List1 -> C1).
propDWTEval1LikeList1 :: DwtInputList -> Bool
propDWTEval1LikeList1 (DwtInputList (ls, sig)) =
L1.dwt ls sig =~ E1.dwt ls sig
propIDWTEval1LikeList1 :: DwtInputList -> Bool
propIDWTEval1LikeList1 (DwtInputList (ls, sig)) =
L1.idwt ls sig =~ E1.idwt ls sig
propDWTList2LikeEval1 :: DwtInputList -> Bool
propDWTList2LikeEval1 (DwtInputList (ls, sig)) =
E1.dwt ls sig =~ L2.dwt ls sig
propIDWTList2LikeEval1 :: DwtInputList -> Bool
propIDWTList2LikeEval1 (DwtInputList (ls, sig)) =
E1.idwt ls sig =~ L2.idwt ls sig
propDWTEval2LikeList2 :: DwtInputList -> Bool
propDWTEval2LikeList2 (DwtInputList (ls, sig)) =
L2.dwt ls sig =~ E2.dwt ls sig
propIDWTEval2LikeList2 :: DwtInputList -> Bool
propIDWTEval2LikeList2 (DwtInputList (ls, sig)) =
L2.idwt ls sig =~ E2.idwt ls sig
propDWTRepa1LikeList1 :: DwtInputRepa -> Bool
propDWTRepa1LikeList1 (DwtInputRepa (ls, sig)) =
L1.dwt (R.toList ls) (R.toList sig) =~ (R.toList $ R1.dwtS ls sig)
propIDWTRepa1LikeList1 :: DwtInputRepa -> Bool
propIDWTRepa1LikeList1 (DwtInputRepa (ls, sig)) =
L1.idwt (R.toList ls) (R.toList sig) =~ (R.toList $ R1.idwtS ls sig)
propDWTRepa2LikeRepa1 :: DwtInputRepa -> Bool
propDWTRepa2LikeRepa1 (DwtInputRepa (ls, sig)) =
R1.dwtS ls sig =~ R2.dwtS ls sig
propIDWTRepa2LikeRepa1 :: DwtInputRepa -> Bool
propIDWTRepa2LikeRepa1 (DwtInputRepa (ls, sig)) =
R1.idwtS ls sig =~ R2.idwtS ls sig
-- See Note: [Shifting input/output signal]
propDWTC1LikeList1 :: DwtInputC -> Bool
propDWTC1LikeList1 (DwtInputC (ls, sig)) =
listDwt =~ cDwt
where
listDwt = L1.dwt (V.toList ls) (V.toList sig)
cDwt = LC.cslN (V.length ls - 1) $ V.toList (C1.dwt ls sig)
propIDWTC1LikeList1 :: DwtInputC -> Bool
propIDWTC1LikeList1 (DwtInputC (ls, sig)) =
listIdwt =~ cIdwt
where
listIdwt = L1.idwt (V.toList ls) (V.toList sig)
cIdwt = V.toList . C1.idwt ls . shiftedSig ls $ sig
shiftedSig xs ys = V.fromList (LC.csrN (V.length xs - 1) (V.toList ys))
propDWTVector1LikeC1 :: DwtInputVector -> Bool
propDWTVector1LikeC1 (DwtInputVector (ls, sig)) =
C1.dwt (V.convert ls) (V.convert sig) =~ (V.convert $ V1.dwt ls sig)
propIDWTVector1LikeC1 :: DwtInputVector -> Bool
propIDWTVector1LikeC1 (DwtInputVector (ls, sig)) =
C1.idwt (V.convert ls) (V.convert sig) =~ (V.convert $ V1.idwt ls sig)
propDWTRepa3LikeVector1 :: DwtInputRepa -> Bool
propDWTRepa3LikeVector1 (DwtInputRepa (ls, sig)) =
V1.dwt (toUnboxed ls) (toUnboxed sig) =~ toUnboxed (R3.dwtS ls sig)
propIDWTRepa3LikeVector1 :: DwtInputRepa -> Bool
propIDWTRepa3LikeVector1 (DwtInputRepa (ls, sig)) =
V1.idwt (toUnboxed ls) (toUnboxed sig) =~ toUnboxed (R3.idwtS ls sig)
|
jstolarek/lattice-structure-hs
|
tests/Signal/Wavelet/ReferenceITest.hs
|
bsd-3-clause
| 7,609
| 0
| 13
| 1,578
| 1,974
| 1,093
| 881
| 104
| 1
|
{-# LANGUAGE TemplateHaskell #-}
module Data.API.TH
( applicativeE
, optionalInstanceD
, funSigD
, simpleD
, simpleSigD
) where
import Data.API.Tools.Combinators
import Control.Applicative
import Control.Monad
import Language.Haskell.TH
-- | Construct an idiomatic expression (an expression in an
-- Applicative context), i.e.
-- app ke [] => ke
-- app ke [e1,e2,...,en] => ke <$> e1 <*> e2 ... <*> en
applicativeE :: ExpQ -> [ExpQ] -> ExpQ
applicativeE ke es0 =
case es0 of
[] -> ke
e:es -> app' (ke `dl` e) es
where
app' e [] = e
app' e (e':es) = app' (e `st` e') es
st e1 e2 = appE (appE (varE '(<*>)) e1) e2
dl e1 e2 = appE (appE (varE '(<$>)) e1) e2
-- | Add an instance declaration for a class, if such an instance does
-- not already exist
optionalInstanceD :: ToolSettings -> Name -> [TypeQ] -> [DecQ] -> Q [Dec]
optionalInstanceD stgs c tqs dqs = do
ts <- sequence tqs
ds <- sequence dqs
exists <- isInstance c ts
if exists then do when (warnOnOmittedInstance stgs) $ reportWarning $ msg ts
return []
else return [InstanceD [] (foldl AppT (ConT c) ts) ds]
where
msg ts = "instance " ++ pprint c ++ " " ++ pprint ts ++ " already exists, so it was not generated"
-- | Construct a TH function with a type signature
funSigD :: Name -> TypeQ -> [ClauseQ] -> Q [Dec]
funSigD n t cs = (\ x y -> [x,y]) <$> sigD n t <*> funD n cs
-- | Construct a simple TH definition
simpleD :: Name -> ExpQ -> Q Dec
simpleD n e = funD n [clause [] (normalB e) []]
-- | Construct a simple TH definition with a type signature
simpleSigD :: Name -> TypeQ -> ExpQ -> Q [Dec]
simpleSigD n t e = funSigD n t [clause [] (normalB e) []]
|
adinapoli/api-tools
|
src/Data/API/TH.hs
|
bsd-3-clause
| 1,821
| 0
| 14
| 520
| 597
| 313
| 284
| 35
| 3
|
-----------------------------------------------------------------------------
-- |
-- Module : Codec.Archive.Tar.Read
-- Copyright : (c) 2007 Bjorn Bringert,
-- 2008 Andrea Vezzosi,
-- 2008-2009 Duncan Coutts,
-- 2011 Max Bolingbroke
-- License : BSD3
--
-- Maintainer : duncan@community.haskell.org
-- Portability : portable
--
-----------------------------------------------------------------------------
module Codec.Archive.Tar.Read (read, FormatError(..)) where
import Codec.Archive.Tar.Types
import Data.Char (ord)
import Data.Int (Int64)
import Numeric (readOct)
import Control.Exception (Exception)
import Control.Applicative (Applicative(..))
import Control.Monad (liftM, ap)
import Data.Typeable (Typeable)
import qualified Data.ByteString.Lazy as BS
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
import Data.ByteString.Lazy (ByteString)
import Prelude hiding (read)
-- | Errors that can be encountered when parsing a Tar archive.
data FormatError
= TruncatedArchive
| ShortTrailer
| BadTrailer
| TrailingJunk
| ChecksumIncorrect
| NotTarFormat
| UnrecognisedTarFormat
| HeaderBadNumericEncoding
deriving (Typeable)
instance Show FormatError where
show TruncatedArchive = "truncated tar archive"
show ShortTrailer = "short tar trailer"
show BadTrailer = "bad tar trailer"
show TrailingJunk = "tar file has trailing junk"
show ChecksumIncorrect = "tar checksum error"
show NotTarFormat = "data is not in tar format"
show UnrecognisedTarFormat = "tar entry not in a recognised format"
show HeaderBadNumericEncoding = "tar header is malformed (bad numeric encoding)"
instance Exception FormatError
-- | Convert a data stream in the tar file format into an internal data
-- structure. Decoding errors are reported by the 'Fail' constructor of the
-- 'Entries' type.
--
-- * The conversion is done lazily.
--
read :: ByteString -> Entries FormatError
read = unfoldEntries getEntry
getEntry :: ByteString -> Either FormatError (Maybe (Entry, ByteString))
getEntry bs
| BS.length header < 512 = Left TruncatedArchive
-- Tar files end with at least two blocks of all '0'. Checking this serves
-- two purposes. It checks the format but also forces the tail of the data
-- which is necessary to close the file if it came from a lazily read file.
| BS.head bs == 0 = case BS.splitAt 1024 bs of
(end, trailing)
| BS.length end /= 1024 -> Left ShortTrailer
| not (BS.all (== 0) end) -> Left BadTrailer
| not (BS.all (== 0) trailing) -> Left TrailingJunk
| otherwise -> Right Nothing
| otherwise = partial $ do
case (chksum_, format_) of
(Ok chksum, _ ) | correctChecksum header chksum -> return ()
(Ok _, Ok _) -> Error ChecksumIncorrect
_ -> Error NotTarFormat
-- These fields are partial, have to check them
format <- format_; mode <- mode_;
uid <- uid_; gid <- gid_;
size <- size_; mtime <- mtime_;
devmajor <- devmajor_; devminor <- devminor_;
let content = BS.take size (BS.drop 512 bs)
padding = (512 - size) `mod` 512
bs' = BS.drop (512 + size + padding) bs
entry = Entry {
entryTarPath = TarPath name prefix,
entryContent = case typecode of
'\0' -> NormalFile content size
'0' -> NormalFile content size
'1' -> HardLink (LinkTarget linkname)
'2' -> SymbolicLink (LinkTarget linkname)
'3' -> CharacterDevice devmajor devminor
'4' -> BlockDevice devmajor devminor
'5' -> Directory
'6' -> NamedPipe
'7' -> NormalFile content size
_ -> OtherEntryType typecode content size,
entryPermissions = mode,
entryOwnership = Ownership uname gname uid gid,
entryTime = mtime,
entryFormat = format
}
return (Just (entry, bs'))
where
header = BS.take 512 bs
name = getString 0 100 header
mode_ = getOct 100 8 header
uid_ = getOct 108 8 header
gid_ = getOct 116 8 header
size_ = getOct 124 12 header
mtime_ = getOct 136 12 header
chksum_ = getOct 148 8 header
typecode = getByte 156 header
linkname = getString 157 100 header
magic = getChars 257 8 header
uname = getString 265 32 header
gname = getString 297 32 header
devmajor_ = getOct 329 8 header
devminor_ = getOct 337 8 header
prefix = getString 345 155 header
-- trailing = getBytes 500 12 header
format_ = case magic of
"\0\0\0\0\0\0\0\0" -> return V7Format
"ustar\NUL00" -> return UstarFormat
"ustar \NUL" -> return GnuFormat
_ -> Error UnrecognisedTarFormat
correctChecksum :: ByteString -> Int -> Bool
correctChecksum header checksum = checksum == checksum'
where
-- sum of all 512 bytes in the header block,
-- treating each byte as an 8-bit unsigned value
checksum' = BS.Char8.foldl' (\x y -> x + ord y) 0 header'
-- treating the 8 bytes of chksum as blank characters.
header' = BS.concat [BS.take 148 header,
BS.Char8.replicate 8 ' ',
BS.drop 156 header]
-- * TAR format primitive input
getOct :: Integral a => Int64 -> Int64 -> ByteString -> Partial FormatError a
getOct off len = parseOct
. BS.Char8.unpack
. BS.Char8.takeWhile (\c -> c /= '\NUL' && c /= ' ')
. BS.Char8.dropWhile (== ' ')
. getBytes off len
where
parseOct "" = return 0
-- As a star extension, octal fields can hold a base-256 value if the high
-- bit of the initial character is set. The initial character can be:
-- 0x80 ==> trailing characters hold a positive base-256 value
-- 0xFF ==> trailing characters hold a negative base-256 value
--
-- In both cases, there won't be a trailing NUL/space.
--
-- GNU tar seems to contain a half-implementation of code that deals with
-- extra bits in the first character, but I don't think it works and the
-- docs I can find on star seem to suggest that these will always be 0,
-- which is what I will assume.
parseOct ('\128':xs) = return (readBytes xs)
parseOct ('\255':xs) = return (negate (readBytes xs))
parseOct s = case readOct s of
[(x,[])] -> return x
_ -> Error HeaderBadNumericEncoding
readBytes = go 0
where go acc [] = acc
go acc (x:xs) = go (acc * 256 + fromIntegral (ord x)) xs
getBytes :: Int64 -> Int64 -> ByteString -> ByteString
getBytes off len = BS.take len . BS.drop off
getByte :: Int64 -> ByteString -> Char
getByte off bs = BS.Char8.index bs off
getChars :: Int64 -> Int64 -> ByteString -> String
getChars off len = BS.Char8.unpack . getBytes off len
getString :: Int64 -> Int64 -> ByteString -> String
getString off len = BS.Char8.unpack . BS.Char8.takeWhile (/='\0') . getBytes off len
data Partial e a = Error e | Ok a
instance Functor (Partial e) where
fmap = liftM
instance Applicative (Partial e) where
pure = return
(<*>) = ap
partial :: Partial e a -> Either e a
partial (Error msg) = Left msg
partial (Ok x) = Right x
instance Monad (Partial e) where
return = Ok
Error m >>= _ = Error m
Ok x >>= k = k x
fail = error "fail @(Partial e)"
|
Kludgy/tar-fork
|
Codec/Archive/Tar/Read.hs
|
bsd-3-clause
| 7,815
| 0
| 19
| 2,342
| 1,857
| 974
| 883
| 138
| 15
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Temperature.KO.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Temperature.KO.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "KO Tests"
[ makeCorpusTest [Seal Temperature] corpus
]
|
facebookincubator/duckling
|
tests/Duckling/Temperature/KO/Tests.hs
|
bsd-3-clause
| 516
| 0
| 9
| 78
| 79
| 50
| 29
| 11
| 1
|
{-# LANGUAGE DataKinds, Arrows, ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
-- #define DEBUG
module Main where
#ifdef DEBUG
import Debug.Trace (trace)
#define maybeTrace trace
#define maybeTraceIO putStrLn
#else
#define maybeTrace (\ (_ :: String) t -> t)
#define maybeTraceIO (\ (_ :: String) -> return ())
#endif
import MixedTypesNumPrelude
-- import Text.Printf
import System.Environment (getArgs)
import System.IO.Unsafe (unsafePerformIO)
import Control.Arrow
import AERN2.Utils.Arrows
import Data.Complex
import AERN2.MP
import AERN2.QA.Protocol
import AERN2.QA.NetLog
import AERN2.QA.Strategy.Cached
import AERN2.QA.Strategy.Parallel
import AERN2.Real
import AERN2.MPBallWithGlobalPrec
import BenchTasks.Logistic as Logistic
import BenchTasks.Fourier as Fourier
import CIDR
main :: IO ()
main =
do
(implName : benchName : benchParamsS) <- getArgs
let (benchDecription, resultDecription) = bench implName benchName benchParamsS
putStrLn benchDecription
putStrLn resultDecription
bench :: String -> String -> [String] -> (String, String)
bench implName benchName benchParamsS =
(benchDecription, implName ++ ": " ++ resultDecription)
where
benchParams :: [Integer]
benchParams = map read benchParamsS
benchDecription =
case benchName of
"logistic" -> logisticAux benchParams
"fft" -> fftAux benchParams
"dft" -> dftAux benchParams
"mysqrt" -> mysqrtAux benchParams
_ ->
error $ "unknown benchmark: " ++ benchName
where
logisticAux [n,_acS, _acG] = Logistic.taskDescription n
logisticAux _ = error "logistic requires 3 integer parameters \"n\", \"acS\" and \"acG\""
fftAux [k,acS, acG] = taskFFTDescription k acS acG
fftAux _ = error "fft requires 3 integer parameters \"k\", \"acS\" and \"acG\""
dftAux [k,acS, acG] = taskDFTDescription k acS acG
dftAux _ = error "dft requires 3 integer parameters \"k\", \"acS\" and \"acG\""
mysqrtAux [_acS, _acG] = "mysqrt 2"
mysqrtAux _ = error "mysqrt requires 2 integer parameters \"acS\" and \"acG\""
resultDecription =
case (benchName, benchParams) of
("logistic", [n,acS, acG]) ->
case implName of
"MP" -> show (logistic_MP n)
-- "CR_AC_plain" -> show (logistic_CR_AC_plain n)
"CR_cachedUnsafe" -> show (logistic_CR_cachedUnsafe n (bitsSG acS acG))
"CR_cachedArrow" -> show (logistic_CR_cachedArrow n (bitsSG acS acG))
_ -> error $ "unknown implementation: " ++ implName
("fft", [k,acS, acG]) -> fourier True k acS acG
("dft", [k,acS, acG]) -> fourier False k acS acG
("mysqrt", [acS, acG]) ->
case implName of
"MP" -> show (mysqrt (mpBallP (prec acG) 2))
"CR" -> show (mysqrt (real 2) ? (bitsSG acS acG))
_ -> error $ "unsupported implementation: " ++ implName
_ -> error ""
where
fourier isFFT k acS acG =
case implName of
"Double" -> showL (fft_FP isFFT k)
"MP" -> showL (case fft_MP isFFT k (bitsSG acS acG) of Just rs -> rs; _ -> error "no result")
"MP_parArrow" -> showL (case fft_MP_parArrow isFFT k (bitsSG acS acG) of Just rs -> rs; _ -> error "no result")
"CR_cachedUnsafe" -> showL (fft_CR_cachedUnsafe isFFT k (bitsSG acS acG))
"CR_cachedArrow" -> showL (fft_CR_cachedArrow isFFT k (bitsSG acS acG))
"CR_parArrow" -> showL (fft_CR_parArrow isFFT k (bitsSG acS acG))
_ -> error $ "unknown implementation: " ++ implName
where
-- n = 2^k
-- acHalf = ac `div` 2
showL xs = "\n" ++ (unlines $ map show xs)
logistic_CR_cachedUnsafe :: Integer -> AccuracySG -> MPBall
logistic_CR_cachedUnsafe n acSG =
(taskLogistic n $ real Logistic.x0) ? acSG
logistic_CR_cachedArrow :: Integer -> AccuracySG -> MPBall
logistic_CR_cachedArrow n acSG =
maybeTrace (formatQALog 0 netlog) $
result
where
(netlog, result) =
executeQACachedA $
proc () ->
do
x0R <- (-:-) -< realA Logistic.x0
(Just x) <-taskLogisticWithHookA n hookA -< x0R
realWithAccuracyA Nothing -< (x, acSG)
hookA i =
proc r ->
do
rNext <- (-:-)-< (rename r)
returnA -< Just rNext
where
rename = realRename (\_ -> "x_" ++ show i)
logistic_MP :: Integer -> Maybe MPBall
logistic_MP n =
snd $ last $ iterateUntilAccurate (bits (50 :: Integer)) $ withP
where
withP p =
(taskLogisticWithHook n (const checkAccuracy)) x0
where
x0 = mpBallP p Logistic.x0
checkAccuracy :: MPBall -> Maybe MPBall
checkAccuracy ball
| getAccuracy ball < (bits 50) = Nothing
| otherwise = Just ball
fft_CR_cachedUnsafe :: Bool -> Integer -> AccuracySG -> [Complex MPBall]
fft_CR_cachedUnsafe isFFT k acSG =
map approx $ task
where
task
| isFFT = taskFFT (\r -> r :+ (real 0)) k
| otherwise = taskDFT (\r -> r :+ (real 0)) k
approx :: Complex CauchyReal -> Complex MPBall
approx (a :+ i) = (a ? acSG) :+ (i ? acSG)
fft_CR_cachedArrow :: Bool -> Integer -> AccuracySG -> [Complex MPBall]
fft_CR_cachedArrow isFFT k acSG =
-- seq (unsafePerformIO $ writeNetLogJSON netlog) $
maybeTrace (formatQALog 0 netlog) $
results
where
approxA =
proc (aR :+ iR) ->
do
a <- (-?-) -< (aR, acSG)
i <- (-?-) -< (iR, acSG)
returnA -< a :+ i
(netlog, results) =
executeQACachedA $
proc () ->
do
resultRs <- task -< ()
mapA approxA -< resultRs :: [Complex (CauchyRealA QACachedA)]
task
| isFFT = taskFFTA k
-- | otherwise = taskDFTA k
fft_CR_parArrow :: Bool -> Integer -> AccuracySG -> [Complex MPBall]
fft_CR_parArrow isFFT k acSG =
unsafePerformIO $
do
-- (netlog, results) <-
-- executeQAParAwithLog $
executeQAParA $
proc () ->
do
resultRs <- task -< ()
promises <- mapA getPromiseComplexA -< resultRs :: [Complex (CauchyRealA QAParA)]
mapA fulfilPromiseComplex -< promises
-- writeNetLogJSON netlog
-- return results
where
getPromiseComplexA =
proc (aR :+ iR) ->
do
aProm <- (-?..-) -< (aR, acSG)
iProm <- (-?..-) -< (iR, acSG)
returnA -< aProm :+ iProm
fulfilPromiseComplex =
proc (aProm :+ iProm) ->
do
a <- qaFulfilPromiseA -< aProm
i <- qaFulfilPromiseA -< iProm
returnA -< a :+ i
task
| isFFT = taskFFTA k
-- | otherwise = taskDFTA k
fft_MP :: Bool -> Integer -> AccuracySG -> Maybe [Complex MPBall]
fft_MP isFFT k _acSG@(AccuracySG acS _) =
snd $ last $ iterateUntilAccurate acS $ withP
where
withP p =
task
where
task
| isFFT = taskFFTWithHook (\ r -> r * c1) checkCAccuracy k
| otherwise = taskDFTWithHook (\ r -> r * c1) checkCAccuracy k
c1 = (mpBallP p 1 :+ mpBallP p 0)
checkCAccuracy (a :+ i) =
do
a2 <- checkAccuracy a
i2 <- checkAccuracy i
return $ setPrecision p (a2 :+ i2)
fft_MP_parArrow :: Bool -> Integer -> AccuracySG -> Maybe [Complex MPBall]
fft_MP_parArrow isFFT k _acSG@(AccuracySG acS _) =
snd $ last $ iterateUntilAccurate acS $ withP
where
withP p =
Just $ unsafePerformIO $
do
-- (netlog, results) <-
-- executeQAParAwithLog $
results <-
executeQAParA $
proc () ->
do
resultRs <- task -< ()
promises <- mapA getPromiseComplexA -< resultRs :: [Complex (MPBallWithGlobalPrecA QAParA)]
mapA fulfilPromiseComplex -< promises
-- writeNetLogJSON netlog
-- putStrLn $ printf "p = %s: accuracy = %s" (show p) (show $ getAccuracy results)
return results
where
getPromiseComplexA =
proc (aR :+ iR) ->
do
aProm <- (-?..-) -< (aR, p)
iProm <- (-?..-) -< (iR, p)
returnA -< aProm :+ iProm
fulfilPromiseComplex =
proc (aProm :+ iProm) ->
do
a <- qaFulfilPromiseA -< aProm
i <- qaFulfilPromiseA -< iProm
returnA -< a :+ i
task
| isFFT = taskFFTA k
-- | otherwise = taskDFTA k
-- checkCAccuracy (a :+ i) =
-- do
-- a2 <- checkAccuracy a
-- i2 <- checkAccuracy i
-- return $ setPrecision p (a2 :+ i2)
fft_FP :: Bool -> Integer -> [Complex Double]
fft_FP True k = taskFFT (\r -> (double r :+ double 0)) k
fft_FP False k = taskDFT (\r -> (double r :+ double 0)) k
|
michalkonecny/aern2
|
aern2-real/attic/bench/BenchMain.hs
|
bsd-3-clause
| 8,818
| 10
| 19
| 2,665
| 2,580
| 1,320
| 1,260
| 198
| 26
|
module LOGL.Objects
(
simpleCube, cubeWithTexture, cubeWithNormals, cubeWithNormalsAndTexture, cubeMesh
)
where
import Graphics.Rendering.OpenGL.GL as GL hiding (normal, position, Vertex)
import LOGL.Mesh
import Linear.V2
import Linear.V3
cubeMesh :: [Texture] -> IO Mesh
cubeMesh = createMesh vertices indices
where
indices = [0..35]
vertices = nextVertex cubeWithNormalsAndTexture
nextVertex :: [GLfloat] -> [Vertex]
nextVertex [] = []
nextVertex (x:y:z:nx:ny:nz:tx:ty:rest) = v : nextVertex rest
where
v = Vertex { position = V3 x y z, normal = V3 nx ny nz, texCoords = V2 tx ty}
cubeWithNormalsAndTexture :: [GLfloat]
cubeWithNormalsAndTexture = [
-0.5, -0.5, -0.5, 0.0, 0.0, -1.0, 0.0, 0.0,
0.5, -0.5, -0.5, 0.0, 0.0, -1.0, 1.0, 0.0,
0.5, 0.5, -0.5, 0.0, 0.0, -1.0, 1.0, 1.0,
0.5, 0.5, -0.5, 0.0, 0.0, -1.0, 1.0, 1.0,
-0.5, 0.5, -0.5, 0.0, 0.0, -1.0, 0.0, 1.0,
-0.5, -0.5, -0.5, 0.0, 0.0, -1.0, 0.0, 0.0,
-0.5, -0.5, 0.5, 0.0, 0.0, 1.0, 0.0, 0.0,
0.5, -0.5, 0.5, 0.0, 0.0, 1.0, 1.0, 0.0,
0.5, 0.5, 0.5, 0.0, 0.0, 1.0, 1.0, 1.0,
0.5, 0.5, 0.5, 0.0, 0.0, 1.0, 1.0, 1.0,
-0.5, 0.5, 0.5, 0.0, 0.0, 1.0, 0.0, 1.0,
-0.5, -0.5, 0.5, 0.0, 0.0, 1.0, 0.0, 0.0,
-0.5, 0.5, 0.5, -1.0, 0.0, 0.0, 1.0, 0.0,
-0.5, 0.5, -0.5, -1.0, 0.0, 0.0, 1.0, 1.0,
-0.5, -0.5, -0.5, -1.0, 0.0, 0.0, 0.0, 1.0,
-0.5, -0.5, -0.5, -1.0, 0.0, 0.0, 0.0, 1.0,
-0.5, -0.5, 0.5, -1.0, 0.0, 0.0, 0.0, 0.0,
-0.5, 0.5, 0.5, -1.0, 0.0, 0.0, 1.0, 0.0,
0.5, 0.5, 0.5, 1.0, 0.0, 0.0, 1.0, 0.0,
0.5, 0.5, -0.5, 1.0, 0.0, 0.0, 1.0, 1.0,
0.5, -0.5, -0.5, 1.0, 0.0, 0.0, 0.0, 1.0,
0.5, -0.5, -0.5, 1.0, 0.0, 0.0, 0.0, 1.0,
0.5, -0.5, 0.5, 1.0, 0.0, 0.0, 0.0, 0.0,
0.5, 0.5, 0.5, 1.0, 0.0, 0.0, 1.0, 0.0,
-0.5, -0.5, -0.5, 0.0, -1.0, 0.0, 0.0, 1.0,
0.5, -0.5, -0.5, 0.0, -1.0, 0.0, 1.0, 1.0,
0.5, -0.5, 0.5, 0.0, -1.0, 0.0, 1.0, 0.0,
0.5, -0.5, 0.5, 0.0, -1.0, 0.0, 1.0, 0.0,
-0.5, -0.5, 0.5, 0.0, -1.0, 0.0, 0.0, 0.0,
-0.5, -0.5, -0.5, 0.0, -1.0, 0.0, 0.0, 1.0,
-0.5, 0.5, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0,
0.5, 0.5, -0.5, 0.0, 1.0, 0.0, 1.0, 1.0,
0.5, 0.5, 0.5, 0.0, 1.0, 0.0, 1.0, 0.0,
0.5, 0.5, 0.5, 0.0, 1.0, 0.0, 1.0, 0.0,
-0.5, 0.5, 0.5, 0.0, 1.0, 0.0, 0.0, 0.0,
-0.5, 0.5, -0.5, 0.0, 1.0, 0.0, 0.0, 1.0]
cubeWithTexture :: [GLfloat]
cubeWithTexture = [
-0.5, -0.5, -0.5, 0.0, 0.0,
0.5, -0.5, -0.5, 1.0, 0.0,
0.5, 0.5, -0.5, 1.0, 1.0,
0.5, 0.5, -0.5, 1.0, 1.0,
-0.5, 0.5, -0.5, 0.0, 1.0,
-0.5, -0.5, -0.5, 0.0, 0.0,
-0.5, -0.5, 0.5, 0.0, 0.0,
0.5, -0.5, 0.5, 1.0, 0.0,
0.5, 0.5, 0.5, 1.0, 1.0,
0.5, 0.5, 0.5, 1.0, 1.0,
-0.5, 0.5, 0.5, 0.0, 1.0,
-0.5, -0.5, 0.5, 0.0, 0.0,
-0.5, 0.5, 0.5, 1.0, 0.0,
-0.5, 0.5, -0.5, 1.0, 1.0,
-0.5, -0.5, -0.5, 0.0, 1.0,
-0.5, -0.5, -0.5, 0.0, 1.0,
-0.5, -0.5, 0.5, 0.0, 0.0,
-0.5, 0.5, 0.5, 1.0, 0.0,
0.5, 0.5, 0.5, 1.0, 0.0,
0.5, 0.5, -0.5, 1.0, 1.0,
0.5, -0.5, -0.5, 0.0, 1.0,
0.5, -0.5, -0.5, 0.0, 1.0,
0.5, -0.5, 0.5, 0.0, 0.0,
0.5, 0.5, 0.5, 1.0, 0.0,
-0.5, -0.5, -0.5, 0.0, 1.0,
0.5, -0.5, -0.5, 1.0, 1.0,
0.5, -0.5, 0.5, 1.0, 0.0,
0.5, -0.5, 0.5, 1.0, 0.0,
-0.5, -0.5, 0.5, 0.0, 0.0,
-0.5, -0.5, -0.5, 0.0, 1.0,
-0.5, 0.5, -0.5, 0.0, 1.0,
0.5, 0.5, -0.5, 1.0, 1.0,
0.5, 0.5, 0.5, 1.0, 0.0,
0.5, 0.5, 0.5, 1.0, 0.0,
-0.5, 0.5, 0.5, 0.0, 0.0,
-0.5, 0.5, -0.5, 0.0, 1.0]
cubeWithNormals :: [GLfloat]
cubeWithNormals = [
-0.5, -0.5, -0.5, 0.0, 0.0, -1.0,
0.5, -0.5, -0.5, 0.0, 0.0, -1.0,
0.5, 0.5, -0.5, 0.0, 0.0, -1.0,
0.5, 0.5, -0.5, 0.0, 0.0, -1.0,
-0.5, 0.5, -0.5, 0.0, 0.0, -1.0,
-0.5, -0.5, -0.5, 0.0, 0.0, -1.0,
-0.5, -0.5, 0.5, 0.0, 0.0, 1.0,
0.5, -0.5, 0.5, 0.0, 0.0, 1.0,
0.5, 0.5, 0.5, 0.0, 0.0, 1.0,
0.5, 0.5, 0.5, 0.0, 0.0, 1.0,
-0.5, 0.5, 0.5, 0.0, 0.0, 1.0,
-0.5, -0.5, 0.5, 0.0, 0.0, 1.0,
-0.5, 0.5, 0.5, -1.0, 0.0, 0.0,
-0.5, 0.5, -0.5, -1.0, 0.0, 0.0,
-0.5, -0.5, -0.5, -1.0, 0.0, 0.0,
-0.5, -0.5, -0.5, -1.0, 0.0, 0.0,
-0.5, -0.5, 0.5, -1.0, 0.0, 0.0,
-0.5, 0.5, 0.5, -1.0, 0.0, 0.0,
0.5, 0.5, 0.5, 1.0, 0.0, 0.0,
0.5, 0.5, -0.5, 1.0, 0.0, 0.0,
0.5, -0.5, -0.5, 1.0, 0.0, 0.0,
0.5, -0.5, -0.5, 1.0, 0.0, 0.0,
0.5, -0.5, 0.5, 1.0, 0.0, 0.0,
0.5, 0.5, 0.5, 1.0, 0.0, 0.0,
-0.5, -0.5, -0.5, 0.0, -1.0, 0.0,
0.5, -0.5, -0.5, 0.0, -1.0, 0.0,
0.5, -0.5, 0.5, 0.0, -1.0, 0.0,
0.5, -0.5, 0.5, 0.0, -1.0, 0.0,
-0.5, -0.5, 0.5, 0.0, -1.0, 0.0,
-0.5, -0.5, -0.5, 0.0, -1.0, 0.0,
-0.5, 0.5, -0.5, 0.0, 1.0, 0.0,
0.5, 0.5, -0.5, 0.0, 1.0, 0.0,
0.5, 0.5, 0.5, 0.0, 1.0, 0.0,
0.5, 0.5, 0.5, 0.0, 1.0, 0.0,
-0.5, 0.5, 0.5, 0.0, 1.0, 0.0,
-0.5, 0.5, -0.5, 0.0, 1.0, 0.0]
simpleCube :: [GLfloat]
simpleCube = [
-0.5, -0.5, -0.5,
0.5, -0.5, -0.5,
0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
-0.5, 0.5, -0.5,
-0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, 0.5,
-0.5, -0.5, 0.5,
-0.5, 0.5, 0.5,
-0.5, 0.5, -0.5,
-0.5, -0.5, -0.5,
-0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
-0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, -0.5, -0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
-0.5, -0.5, 0.5,
-0.5, -0.5, -0.5,
-0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, 0.5,
-0.5, 0.5, -0.5]
|
atwupack/LearnOpenGL
|
src/LOGL/Objects.hs
|
bsd-3-clause
| 6,033
| 0
| 14
| 1,929
| 3,159
| 1,996
| 1,163
| 167
| 1
|
module FileStat
( FileStat
, createFileStat
, getFSPath
, getFSChanges
, getFSAdditions
, getFSDeletions
) where
data FileStat = FileStat { path :: FilePath
, additions :: Int
, deletions :: Int
} deriving (Show)
createFileStat :: String -> String -> String -> FileStat
createFileStat a d p = FileStat {additions=read a :: Int, deletions=read d :: Int, path=p}
getFSPath :: FileStat -> String
getFSPath = path
getFSChanges :: FileStat -> (Int, Int)
getFSChanges f = (additions f, deletions f)
getFSAdditions :: FileStat -> Int
getFSAdditions = additions
getFSDeletions :: FileStat -> Int
getFSDeletions = deletions
|
LFDM/hstats
|
src/lib/FileStat.hs
|
bsd-3-clause
| 697
| 0
| 8
| 178
| 196
| 114
| 82
| 21
| 1
|
module UnbalancedTree where
import Bit
data UnbalancedTree = Leaf | Branch Int UnbalancedTree UnbalancedTree
deriving Show
sizeOf Leaf = 1
sizeOf (Branch size _ _) = size
newSubTree 0 = Leaf
newSubTree depth =
let t = newSubTree (depth - 1)
in Branch (2 * sizeOf t) t t
unbalancedSubTree = unbalancedSubTree' . natToBits
unbalancedSubTree' [] = Leaf
unbalancedSubTree' (True:bs) =
let l = newSubTree (1 + length bs)
r = unbalancedSubTree' bs
in Branch (sizeOf l + sizeOf r) l r
unbalancedSubTree' (False:bs) =
let l = unbalancedSubTree' bs
r = newSubTree (length bs)
in Branch (sizeOf l + sizeOf r) l r
bitsToIndex subTree Leaf bs =
(0, Just (subTree, bs))
bitsToIndex _subTree (Branch _ _ _) [] =
(1, Nothing)
bitsToIndex subTree (Branch size left right) (False:bs) =
case bitsToIndex subTree left bs of
(index, Just (left', bs')) -> (index, Just (Branch (size+1) left' right, bs'))
(index, Nothing) -> (index + 1, Nothing)
bitsToIndex subTree (Branch size left right) (True:bs) =
case bitsToIndex subTree right bs of
(index, Just (right', bs')) -> (index + sizeOf left, Just (Branch (size+1) left right', bs'))
(index, Nothing) -> (index + sizeOf left, Nothing)
indexToBits subTree Leaf _index = ([], subTree)
indexToBits subTree (Branch size left right) index =
if index >= sizeOf left
then let (bs, right') = indexToBits subTree right (index - sizeOf left)
in(True : bs, Branch (size+1) left right')
else let (bs, left') = indexToBits subTree left index
in(False : bs, Branch (size+1) left' right)
--does not change tree
internalIndexToBits _ 1 = []
internalIndexToBits (Branch _ left right) index =
if index > sizeOf left
then True : internalIndexToBits right (index - sizeOf left)
else False : internalIndexToBits left (index - 1)
internalIndexToBits Leaf _index = error "index is not internal to tree, it is too large."
---------------------------------------------
-- second argument should initially be 1
-- returns either
-- (x, Just bs) x in [0, max)
-- (y, Nothing) y in [1, max)
prefixCodeToInt 1 bs = (0, Just bs)
prefixCodeToInt max bs = pCTI max 1 bs where
pCTI max n (b:bs) =
let n' = doubleIf n b
in if n' >= max
then (n' - max, Just bs)
else pCTI max n' bs
pCTI _ n [] = (n, Nothing)
-----------------------------------------------------------
-- second argument should initially be 0
-- returns either
-- (x, Just bs) x in [0, (base - 1) * max]
-- (y, Nothing) y in [0, max)
nonbinaryPrefixCodeToInt _base 0 _ bs = (0, Just bs)
nonbinaryPrefixCodeToInt base max n (x:xs) =
let n' = base * n + x
in if n' >= max
then (n' - max, Just xs)
else nonbinaryPrefixCodeToInt base max n' xs
nonbinaryPrefixCodeToInt _base _max n [] = (n, Nothing)
-----------------------------------------------------------
codeToIndices incr max bs =
case prefixCodeToInt max bs of
(index, Just bs') -> index : codeToIndices incr (max + incr) bs'
(index, Nothing) -> [index]
indicesToCode _incr _max [] = []
indicesToCode _incr _max (n:[]) = [natToBits n]
indicesToCode incr max (n:ns) = natToBits (n + max) : indicesToCode incr (max + incr) ns
------------------------------------------------------
computeIndices subTree tree bs =
case bitsToIndex subTree tree bs of
(index, Just (tree', bs')) -> index : computeIndices subTree tree' bs'
(index, Nothing) -> [index]
translateIndices _subTree _tree [] = []
translateIndices _subTree tree (n:[]) = [internalIndexToBits tree n]
translateIndices subTree tree (n:ns) =
let (bits, tree') = indexToBits subTree tree n
in bits : translateIndices subTree tree' ns
-----------------------------------------------------------------------------------------------
encode subTree = concat . indicesToCode (sizeOf subTree - 1) 1 . computeIndices subTree Leaf
decode subTree = concat . translateIndices subTree Leaf . codeToIndices (sizeOf subTree - 1) 1
depthEncode depth = encode (newSubTree depth)
depthDecode depth = decode (newSubTree depth)
{-
natEncode max = codeToIndices 0 max .
encode (unbalancedSubTree max) .
concatMap (natToBits . (+) max)
natDecode max = codeToIndices 0 max .
decode (unbalancedSubTree max) .
concatMap (natToBits . (+) max)
-}
|
cullina/Extractor
|
src/UnbalancedTree.hs
|
bsd-3-clause
| 4,580
| 0
| 13
| 1,140
| 1,500
| 776
| 724
| 80
| 3
|
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
module React.Flux.Mui.Card.CardTitle where
import Protolude
import Data.Aeson
import Data.Aeson.Casing
import Data.String (String)
import React.Flux
import React.Flux.Mui.Util
data CardTitle = CardTitle
{ cardTitleActAsExpander :: !(Maybe Bool)
, cardTitleExpandable :: !(Maybe Bool)
, cardTitleShowExpandableButton :: !(Maybe Bool)
, cardTitleSubtitleColor :: !(Maybe Text)
, cardTitleTitleColor :: !(Maybe Text)
} deriving (Generic, Show)
instance ToJSON CardTitle where
toJSON = genericToJSON $ aesonDrop (length ("CardTitle" :: String)) camelCase
defCardTitle :: CardTitle
defCardTitle =
CardTitle
{ cardTitleActAsExpander = Nothing
, cardTitleExpandable = Nothing
, cardTitleShowExpandableButton = Nothing
, cardTitleSubtitleColor = Nothing
, cardTitleTitleColor = Nothing
}
cardTitle_ ::
CardTitle
-> [PropertyOrHandler handler]
-> ReactElementM handler ()
-> ReactElementM handler ()
cardTitle_ args props =
foreign_ "CardTitle" (fromMaybe [] (toProps args) ++ props)
|
pbogdan/react-flux-mui
|
react-flux-mui/src/React/Flux/Mui/Card/CardTitle.hs
|
bsd-3-clause
| 1,089
| 0
| 11
| 172
| 280
| 158
| 122
| 43
| 1
|
{-# LANGUAGE RecordWildCards #-}
module Pos.Core.Common.Script
( Script (..)
, ScriptVersion
) where
import Universum
import Data.Aeson (FromJSON (..), ToJSON (toJSON), object, withObject,
(.:), (.=))
import Data.SafeCopy (base, deriveSafeCopySimple)
import Formatting (bprint, int, (%))
import qualified Formatting.Buildable as Buildable
import Serokell.Util.Base64 (JsonByteString (..))
import qualified Pos.Binary.Class as Bi
-- | Version of script
type ScriptVersion = Word16
-- | A script for inclusion into a transaction.
data Script = Script
{ scrVersion :: ScriptVersion -- ^ Version
, scrScript :: ByteString -- ^ Serialized script
} deriving (Eq, Show, Generic, Typeable)
instance NFData Script
instance Hashable Script
instance Buildable Script where
build Script{..} = bprint ("<script v"%int%">") scrVersion
instance ToJSON Script where
toJSON Script{..} = object [
"version" .= scrVersion,
"script" .= JsonByteString scrScript ]
instance FromJSON Script where
parseJSON = withObject "Script" $ \obj -> do
scrVersion <- obj .: "version"
scrScript <- getJsonByteString <$> obj .: "script"
pure $ Script {..}
Bi.deriveSimpleBi ''Script [
Bi.Cons 'Script [
Bi.Field [| scrVersion :: ScriptVersion |],
Bi.Field [| scrScript :: ByteString |]
]]
deriveSafeCopySimple 0 'base ''Script
|
input-output-hk/pos-haskell-prototype
|
core/src/Pos/Core/Common/Script.hs
|
mit
| 1,493
| 0
| 12
| 378
| 387
| 224
| 163
| -1
| -1
|
{-# LANGUAGE CPP #-}
module WeiXin.PublicPlatform
( module WeiXin.PublicPlatform.Error
, module WeiXin.PublicPlatform.Types
, module WeiXin.PublicPlatform.Class
, module WeiXin.PublicPlatform.WS
#if defined(VERSION_acid_state)
, module WeiXin.PublicPlatform.Acid
#endif
, module WeiXin.PublicPlatform.Security
, module WeiXin.PublicPlatform.Media
, module WeiXin.PublicPlatform.Material
, module WeiXin.PublicPlatform.AutoReplyRules
, module WeiXin.PublicPlatform.Message
, module WeiXin.PublicPlatform.Message.Template
, module WeiXin.PublicPlatform.InMsgHandler
, module WeiXin.PublicPlatform.Menu
, module WeiXin.PublicPlatform.CS
, module WeiXin.PublicPlatform.QRCode
, module WeiXin.PublicPlatform.EndUser
, module WeiXin.PublicPlatform.EndUser.Tag
, module WeiXin.PublicPlatform.Propagate
, module WeiXin.PublicPlatform.Yesod.Utils
, module WeiXin.PublicPlatform.Yesod.Site
, module WeiXin.PublicPlatform.Yesod.Types
, module WeiXin.PublicPlatform.Yesod.Model
, module WeiXin.PublicPlatform.Yesod.Site.Function
, module WeiXin.PublicPlatform.Yesod.Site.Data
, module WeiXin.PublicPlatform.BgWork
-- , module WeiXin.PublicPlatform.Utils
, module WeiXin.PublicPlatform.Misc
, module WeiXin.PublicPlatform.Conversation
, module WeiXin.PublicPlatform.Conversation.Yesod
, module WeiXin.PublicPlatform.Conversation.Message
-- , module WeiXin.PublicPlatform.Conversation.TextParser
, module WeiXin.PublicPlatform.Center
, module WeiXin.PublicPlatform.OAuth
, module WeiXin.PublicPlatform.JS
, module WeiXin.PublicPlatform.Pay
, module WeiXin.PublicPlatform.ThirdParty
#if defined(CLOUD_HASKELL)
, module WeiXin.PublicPlatform.CloudHaskell
#endif
) where
import WeiXin.PublicPlatform.Error
import WeiXin.PublicPlatform.Types
import WeiXin.PublicPlatform.Class
import WeiXin.PublicPlatform.WS
#if defined(VERSION_acid_state)
import WeiXin.PublicPlatform.Acid
#endif
import WeiXin.PublicPlatform.Security
import WeiXin.PublicPlatform.Media
import WeiXin.PublicPlatform.Material
import WeiXin.PublicPlatform.AutoReplyRules
import WeiXin.PublicPlatform.Message
import WeiXin.PublicPlatform.Message.Template
import WeiXin.PublicPlatform.InMsgHandler
import WeiXin.PublicPlatform.Menu
import WeiXin.PublicPlatform.CS
import WeiXin.PublicPlatform.QRCode
import WeiXin.PublicPlatform.EndUser
import WeiXin.PublicPlatform.EndUser.Tag
import WeiXin.PublicPlatform.Propagate
import WeiXin.PublicPlatform.Yesod.Utils
import WeiXin.PublicPlatform.Yesod.Site
import WeiXin.PublicPlatform.Yesod.Types
import WeiXin.PublicPlatform.Yesod.Model
import WeiXin.PublicPlatform.Yesod.Site.Function
import WeiXin.PublicPlatform.Yesod.Site.Data
import WeiXin.PublicPlatform.BgWork
-- import WeiXin.PublicPlatform.Utils
import WeiXin.PublicPlatform.Misc
import WeiXin.PublicPlatform.Conversation
import WeiXin.PublicPlatform.Conversation.Yesod
import WeiXin.PublicPlatform.Conversation.Message
-- import WeiXin.PublicPlatform.Conversation.TextParser
import WeiXin.PublicPlatform.Center
import WeiXin.PublicPlatform.OAuth
import WeiXin.PublicPlatform.JS
import WeiXin.PublicPlatform.Pay
import WeiXin.PublicPlatform.ThirdParty
#if defined(CLOUD_HASKELL)
import WeiXin.PublicPlatform.CloudHaskell
#endif
|
yoo-e/weixin-mp-sdk
|
WeiXin/PublicPlatform.hs
|
mit
| 3,330
| 0
| 5
| 363
| 500
| 357
| 143
| 68
| 0
|
-- | Read numbers from a file with a just a number on each line, find the
-- minimum of those numbers. The file contains different kinds of numbers:
--
-- * Decimals
--
-- * Hexadecimals
--
-- * Floating point numbers
--
-- * Floating point numbers in scientific notation
--
-- The different benchmarks will only take into account the values they can
-- parse.
--
-- Tested in this benchmark:
--
-- * Lexing/parsing of different numerical types
--
module Benchmarks.ReadNumbers
( benchmark
) where
import Criterion (Benchmark, bgroup, bench, whnf)
import Data.List (foldl')
import Numeric (readDec, readFloat, readHex)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.ByteString.Lex.Fractional as B
import qualified Data.Text as T
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.IO as TL
import qualified Data.Text.Lazy.Read as TL
import qualified Data.Text.Read as T
benchmark :: FilePath -> IO Benchmark
benchmark fp = do
-- Read all files into lines: string, text, lazy text, bytestring, lazy
-- bytestring
s <- lines `fmap` readFile fp
t <- T.lines `fmap` T.readFile fp
tl <- TL.lines `fmap` TL.readFile fp
b <- B.lines `fmap` B.readFile fp
bl <- BL.lines `fmap` BL.readFile fp
return $ bgroup "ReadNumbers"
[ bench "DecimalString" $ whnf (int . string readDec) s
, bench "HexadecimalString" $ whnf (int . string readHex) s
, bench "DoubleString" $ whnf (double . string readFloat) s
, bench "DecimalText" $ whnf (int . text (T.signed T.decimal)) t
, bench "HexadecimalText" $ whnf (int . text (T.signed T.hexadecimal)) t
, bench "DoubleText" $ whnf (double . text T.double) t
, bench "RationalText" $ whnf (double . text T.rational) t
, bench "DecimalLazyText" $
whnf (int . text (TL.signed TL.decimal)) tl
, bench "HexadecimalLazyText" $
whnf (int . text (TL.signed TL.hexadecimal)) tl
, bench "DoubleLazyText" $
whnf (double . text TL.double) tl
, bench "RationalLazyText" $
whnf (double . text TL.rational) tl
, bench "DecimalByteString" $ whnf (int . byteString B.readInt) b
, bench "DoubleByteString" $ whnf (double . byteString B.readDecimal) b
, bench "DecimalLazyByteString" $
whnf (int . byteString BL.readInt) bl
]
where
-- Used for fixing types
int :: Int -> Int
int = id
double :: Double -> Double
double = id
string :: (Ord a, Num a) => (t -> [(a, t)]) -> [t] -> a
string reader = foldl' go 1000000
where
go z t = case reader t of [(n, _)] -> min n z
_ -> z
text :: (Ord a, Num a) => (t -> Either String (a,t)) -> [t] -> a
text reader = foldl' go 1000000
where
go z t = case reader t of Left _ -> z
Right (n, _) -> min n z
byteString :: (Ord a, Num a) => (t -> Maybe (a,t)) -> [t] -> a
byteString reader = foldl' go 1000000
where
go z t = case reader t of Nothing -> z
Just (n, _) -> min n z
|
text-utf8/text
|
benchmarks/haskell/Benchmarks/ReadNumbers.hs
|
bsd-2-clause
| 3,232
| 0
| 17
| 892
| 1,039
| 561
| 478
| 57
| 2
|
{-# LANGUAGE CPP, DeriveDataTypeable #-}
{-# OPTIONS -Wall #-}
-- | An 'Annotation' that selects whether the data should be
-- stored globally on memory or to be calculated.
module Language.Paraiso.Annotation.Allocation (
Allocation(..), AllocationChoice(..)
) where
import Data.Dynamic
import Language.Paraiso.Prelude
import Prelude (Eq, Show)
data Allocation
= Existing -- ^ This entity is already allocated as a static variable.
| Manifest -- ^ Allocate additional memory for this entity.
| Delayed -- ^ Do not allocate, re-compute it whenever if needed.
deriving (Eq, Show, Typeable)
data AllocationChoice = AllocationChoice [Allocation]
deriving (Eq, Show, Typeable)
|
nushio3/Paraiso
|
Language/Paraiso/Annotation/Allocation.hs
|
bsd-3-clause
| 693
| 0
| 7
| 115
| 108
| 69
| 39
| 14
| 0
|
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Encoding.Failure
-- Copyright : (c) The University of Glasgow, 2008-2011
-- License : see libraries/base/LICENSE
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable
--
-- Types for specifying how text encoding/decoding fails
--
-----------------------------------------------------------------------------
module GHC.IO.Encoding.Failure (
CodingFailureMode(..), codingFailureModeSuffix,
isSurrogate,
recoverDecode, recoverEncode
) where
import GHC.IO
import GHC.IO.Buffer
import GHC.IO.Exception
import GHC.Base
import GHC.Char
import GHC.Word
import GHC.Show
import GHC.Num
import GHC.Real ( fromIntegral )
--import System.Posix.Internals
-- | The 'CodingFailureMode' is used to construct 'TextEncoding's, and
-- specifies how they handle illegal sequences.
data CodingFailureMode
= ErrorOnCodingFailure
-- ^ Throw an error when an illegal sequence is encountered
| IgnoreCodingFailure
-- ^ Attempt to ignore and recover if an illegal sequence is
-- encountered
| TransliterateCodingFailure
-- ^ Replace with the closest visual match upon an illegal
-- sequence
| RoundtripFailure
-- ^ Use the private-use escape mechanism to attempt to allow
-- illegal sequences to be roundtripped.
deriving (Show)
-- This will only work properly for those encodings which are
-- strict supersets of ASCII in the sense that valid ASCII data
-- is also valid in that encoding. This is not true for
-- e.g. UTF-16, because ASCII characters must be padded to two
-- bytes to retain their meaning.
-- Note [Roundtripping]
-- ~~~~~~~~~~~~~~~~~~~~
--
-- Roundtripping is based on the ideas of PEP383.
--
-- We used to use the range of private-use characters from 0xEF80 to
-- 0xEFFF designated for "encoding hacks" by the ConScript Unicode Registery
-- to encode these characters.
--
-- However, people didn't like this because it means we don't get
-- guaranteed roundtripping for byte sequences that look like a UTF-8
-- encoded codepoint 0xEFxx.
--
-- So now like PEP383 we use lone surrogate codepoints 0xDCxx to escape
-- undecodable bytes, even though that may confuse Unicode processing
-- software written in Haskell. This guarantees roundtripping because
-- unicode input that includes lone surrogate codepoints is invalid by
-- definition.
--
-- When we used private-use characters there was a technical problem when it
-- came to encoding back to bytes using iconv. The iconv code will not fail when
-- it tries to encode a private-use character (as it would if trying to encode
-- a surrogate), which means that we wouldn't get a chance to replace it
-- with the byte we originally escaped.
--
-- To work around this, when filling the buffer to be encoded (in
-- writeBlocks/withEncodedCString/newEncodedCString), we replaced the
-- private-use characters with lone surrogates again! Likewise, when
-- reading from a buffer (unpack/unpack_nl/peekEncodedCString) we had
-- to do the inverse process.
--
-- The user of String would never see these lone surrogates, but it
-- ensured that iconv will throw an error when encountering them. We
-- used lone surrogates in the range 0xDC00 to 0xDCFF for this purpose.
codingFailureModeSuffix :: CodingFailureMode -> String
codingFailureModeSuffix ErrorOnCodingFailure = ""
codingFailureModeSuffix IgnoreCodingFailure = "//IGNORE"
codingFailureModeSuffix TransliterateCodingFailure = "//TRANSLIT"
codingFailureModeSuffix RoundtripFailure = "//ROUNDTRIP"
-- | In transliterate mode, we use this character when decoding
-- unknown bytes.
--
-- This is the defined Unicode replacement character:
-- <http://www.fileformat.info/info/unicode/char/0fffd/index.htm>
unrepresentableChar :: Char
unrepresentableChar = '\xFFFD'
-- It is extraordinarily important that this series of
-- predicates/transformers gets inlined, because they tend to be used
-- in inner loops related to text encoding. In particular,
-- surrogatifyRoundtripCharacter must be inlined (see #5536)
-- | Some characters are actually "surrogate" codepoints defined for
-- use in UTF-16. We need to signal an invalid character if we detect
-- them when encoding a sequence of 'Char's into 'Word8's because they
-- won't give valid Unicode.
--
-- We may also need to signal an invalid character if we detect them
-- when encoding a sequence of 'Char's into 'Word8's because the
-- 'RoundtripFailure' mode creates these to round-trip bytes through
-- our internal UTF-16 encoding.
{-# INLINE isSurrogate #-}
isSurrogate :: Char -> Bool
isSurrogate c = (0xD800 <= x && x <= 0xDBFF)
|| (0xDC00 <= x && x <= 0xDFFF)
where x = ord c
-- Bytes (in Buffer Word8) --> lone surrogates (in Buffer CharBufElem)
{-# INLINE escapeToRoundtripCharacterSurrogate #-}
escapeToRoundtripCharacterSurrogate :: Word8 -> Char
escapeToRoundtripCharacterSurrogate b
| b < 128 = chr (fromIntegral b)
-- Disallow 'smuggling' of ASCII bytes. For roundtripping to
-- work, this assumes encoding is ASCII-superset.
| otherwise = chr (0xDC00 + fromIntegral b)
-- Lone surrogates (in Buffer CharBufElem) --> bytes (in Buffer Word8)
{-# INLINE unescapeRoundtripCharacterSurrogate #-}
unescapeRoundtripCharacterSurrogate :: Char -> Maybe Word8
unescapeRoundtripCharacterSurrogate c
| 0xDC80 <= x && x < 0xDD00 = Just (fromIntegral x) -- Discard high byte
| otherwise = Nothing
where x = ord c
recoverDecode :: CodingFailureMode -> Buffer Word8 -> Buffer Char
-> IO (Buffer Word8, Buffer Char)
recoverDecode cfm input@Buffer{ bufRaw=iraw, bufL=ir, bufR=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow } = do
--puts $ "recoverDecode " ++ show ir
case cfm of
ErrorOnCodingFailure -> ioe_decodingError
IgnoreCodingFailure -> return (input { bufL=ir+1 }, output)
TransliterateCodingFailure -> do
ow' <- writeCharBuf oraw ow unrepresentableChar
return (input { bufL=ir+1 }, output { bufR=ow' })
RoundtripFailure -> do
b <- readWord8Buf iraw ir
ow' <- writeCharBuf oraw ow (escapeToRoundtripCharacterSurrogate b)
return (input { bufL=ir+1 }, output { bufR=ow' })
recoverEncode :: CodingFailureMode -> Buffer Char -> Buffer Word8
-> IO (Buffer Char, Buffer Word8)
recoverEncode cfm input@Buffer{ bufRaw=iraw, bufL=ir, bufR=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow } = do
(c,ir') <- readCharBuf iraw ir
--puts $ "recoverEncode " ++ show ir ++ " " ++ show ir'
case cfm of
IgnoreCodingFailure -> return (input { bufL=ir' }, output)
TransliterateCodingFailure -> do
if c == '?'
then return (input { bufL=ir' }, output)
else do
-- XXX: evil hack! To implement transliteration, we just
-- poke an ASCII ? into the input buffer and tell the caller
-- to try and decode again. This is *probably* safe given
-- current uses of TextEncoding.
--
-- The "if" test above ensures we skip if the encoding fails
-- to deal with the ?, though this should never happen in
-- practice as all encodings are in fact capable of
-- reperesenting all ASCII characters.
_ir' <- writeCharBuf iraw ir '?'
return (input, output)
-- This implementation does not work because e.g. UTF-16
-- requires 2 bytes to encode a simple ASCII value
--writeWord8Buf oraw ow unrepresentableByte
--return (input { bufL=ir' }, output { bufR=ow+1 })
RoundtripFailure | Just x <- unescapeRoundtripCharacterSurrogate c -> do
writeWord8Buf oraw ow x
return (input { bufL=ir' }, output { bufR=ow+1 })
_ -> ioe_encodingError
ioe_decodingError :: IO a
ioe_decodingError = ioException
(IOError Nothing InvalidArgument "recoverDecode"
"invalid byte sequence" Nothing Nothing)
ioe_encodingError :: IO a
ioe_encodingError = ioException
(IOError Nothing InvalidArgument "recoverEncode"
"invalid character" Nothing Nothing)
|
rahulmutt/ghcvm
|
libraries/base/GHC/IO/Encoding/Failure.hs
|
bsd-3-clause
| 8,360
| 0
| 17
| 1,756
| 1,049
| 603
| 446
| 83
| 5
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS -Wall #-}
module Sample (
helloWorld
) where
import Claris as C
import Data.Dynamic
import Util
tInt :: TypeRep
tInt = typeOf (undefined :: Int)
helloWorld :: C.Program
helloWorld =
C.Program {
progName = "hello",
topLevel =
[PragmaDecl $ PragmaInclude "iostream" False Chevron,
FuncDef $ Function "main" [] tInt [] body]
}
where
body =
[StmtExpr coutExpr,
StmtReturn $ Imm $ toDyn (0::Int) ]
cout = VarExpr $ Var unknownType "std::cout"
endl = VarExpr $ Var unknownType "std::endl"
message = Imm $ toDyn ("Hello, world!"::Text)
infixl 1 <<
(<<) = Op2Infix "<<"
coutExpr = cout << message << endl
|
nushio3/Paraiso
|
attic/Protoclaris/Sample.hs
|
bsd-3-clause
| 740
| 0
| 10
| 201
| 214
| 121
| 93
| 25
| 1
|
{-# LANGUAGE Haskell98 #-}
{-# LINE 1 "Data/CaseInsensitive/Unsafe.hs" #-}
{-# LANGUAGE CPP, NoImplicitPrelude #-}
{-# LANGUAGE Unsafe #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.CaseInsensitive.Unsafe
-- Copyright : (c) 2011-2013 Bas van Dijk
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Bas van Dijk <v.dijk.bas@gmail.com>
--
-- Provides an unsafe way to create a case insensitive string-like value.
--
-----------------------------------------------------------------------------
module Data.CaseInsensitive.Unsafe ( unsafeMk ) where
import Data.CaseInsensitive.Internal ( unsafeMk )
|
phischu/fragnix
|
tests/packages/scotty/Data.CaseInsensitive.Unsafe.hs
|
bsd-3-clause
| 743
| 0
| 5
| 153
| 37
| 29
| 8
| 6
| 0
|
---Factorial-------------------
myFactorial :: Int -> Int
myFactorial 0 = 1
myFactorial n = n * myFactorial (n-1)
---------Add 2 Vectors-----------
addVectors :: (Double, Double)->(Double, Double)->(Double,Double)
addVectors (x1, y1) (x2, y2) = (x1+x2, y1+y2)
--------Third element in tuple---
third :: (a,b,c)->c
third (_,_,c) = c
-------As Pattern----------------
asPattern :: String -> String
asPattern all@(x:y:ys) = all++" shows "++[x,y]++ys
------Initials-------------------
initials :: String->String->String
initials fname lname = [f] ++ "." ++ [l] ++ "."
where (f:_) = fname
(l:_) = lname
------Square List----------------
squareList :: [Int] -> [Int]
squareList [] = [mysquare x | x<-[2]]
where mysquare x = x*x
-------Describe List-------------
describeList :: [a]->String
describeList ls = "The List is " ++ case ls of [] -> "empty"
[x] -> "singleton"
xs -> "long list"
------Replicate------------------
myReplicate :: Int->a->[a]
myReplicate a b |a<=0 = []
|otherwise = (b:myReplicate (a-1) b)
------Take-----------------------
myTake :: Int -> [a] -> [a]
myTake _ [] = []
myTake n (x:xs)
|n<=0 = []
|otherwise = x:(myTake (n-1) xs)
-------Compare with 100----------
compareWith100 :: Int->Ordering
compareWith100 = compare 100
-------HOF apply twice-----------
applyTwice :: (a->a)->a->a
applyTwice f x = f (f x)
-------Zip with hof--------------
--zipWith' :: (a->b->c)->[a]->[b]->[c]
zipWith' _ [] _ = []
zipWith' _ _ [] = []
zipWith' f (x:xs) (y:ys) = f x y:zipWith' f xs xs
-------Flip hof------------------
flip' :: (a->b->c)->(b->a->c)
flip' f = g
where g x y = f y x
------Filter---------------------
filter' :: (a->Bool)->[a]->[a]
filter' _ [] = []
filter' f (x:xs)
|f x = x:filter' f xs
|otherwise = filter' f xs
------Lambda---------------------
numChain :: Num a => [[a]]->Int
numChain l = length (filter (\l -> length l > 15) l)
-------Foldr -> map--------------
map' :: (a->b)->[a]->[b]
map' f l = foldr (\x acc -> f x:acc) [] l
-------Foldl -> map--------------
map'' :: (a->b)->[a]->[b]
map'' f = foldl (\acc x -> acc ++ [f x]) []
------Foldr ->`elem`-------------
elem' :: (Eq a) => a->[a]->Bool
elem' e l = foldr f False l
where f x acc = (if x == e then True else acc)
------Foldr -> reverse-----------
reverse' :: [a] -> [a]
reverse' l = foldr (\x acc -> acc ++ [x]) [] l
-----Foldr ->Filter--------------
filter'' :: (a->Bool)->[a]->[a]
filter'' f l = foldr (\x acc -> (if f x == True then x:acc else acc)) [] l
|
sushantmahajan/programs
|
haskell/practise.hs
|
cc0-1.0
| 2,474
| 6
| 11
| 411
| 1,195
| 643
| 552
| 56
| 3
|
-- C->Haskell Compiler: interface to C processing routines
--
-- Author : Manuel M. T. Chakravarty
-- Created: 12 August 99
--
-- Version $Revision: 1.3 $ from $Date: 2005/06/22 16:01:20 $
--
-- Copyright (c) 1999 Manuel M. T. Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This modules provides access to the C processing routines for the rest of
-- the compiler.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
--- TODO ----------------------------------------------------------------------
--
--
module C (-- interface to KL for all non-KL modules
--
-- stuff from `Common' (reexported)
--
Pos(posOf),
--
-- structure tree
--
module CAST,
--
-- attributed structure tree with operations (reexported from
-- `CAttrs')
--
AttrC, getCHeader,
CObj(..), CTag(..), CDef(..), lookupDefObjC, lookupDefTagC,
getDefOfIdentC,
--
-- support for C structure tree traversals
--
module CTrav,
--
-- support for pretty printing C abstract trees
--
module CPretty,
--
loadAttrC, -- locally defined
--
-- misc. reexported stuff
--
Ident, Attrs, Attr(..),
--
-- misc. own stuff
--
csuffix, hsuffix, isuffix)
where
import Position (Position(..), Pos(posOf))
import Idents (Ident, lexemeToIdent)
import Attributes (Attrs, Attr(..))
import C2HSState (CST, IOMode(..),
readCST, transCST, runCST, nop,
readFileCIO, writeFileCIO, openFileCIO, hCloseCIO,
fatal, errorsPresent, showErrors,
Traces(..), putTraceStr)
import CAST
import CParser (parseC)
import CPretty
import CAttrs (AttrC, attrC, getCHeader,
CObj(..), CTag(..), CDef(..), lookupDefObjC, lookupDefTagC,
getDefOfIdentC)
import CNames (nameAnalysis)
import CTrav
-- suffix for files containing C (EXPORTED)
--
csuffix, hsuffix, isuffix :: String
csuffix = ".c"
hsuffix = ".h"
isuffix = ".i"
-- given a file name (with suffix), parse that file as a C header and do the
-- static analysis (collect defined names) (EXPORTED)
--
-- * currently, lexical and syntactical errors are reported immediately and
-- abort the program; others are reported as part of the fatal error message;
-- warnings are returned together with the read unit
--
loadAttrC :: String -> CST s (AttrC, String)
loadAttrC fname = do
-- read file
--
traceInfoRead fname
contents <- readFileCIO fname
-- parse
--
traceInfoParse
rawHeader <- parseC contents (Position fname 1 1)
let header = attrC rawHeader
-- name analysis
--
traceInfoNA
headerWithAttrs <- nameAnalysis header
-- check for errors and finalize
--
errs <- errorsPresent
if errs
then do
traceInfoErr
errmsgs <- showErrors
fatal ("C header contains \
\errors:\n\n" ++ errmsgs) -- fatal error
else do
traceInfoOK
warnmsgs <- showErrors
return (headerWithAttrs, warnmsgs)
where
traceInfoRead fname = putTraceStr tracePhasesSW
("Attempting to read file `"
++ fname ++ "'...\n")
traceInfoParse = putTraceStr tracePhasesSW
("...parsing `"
++ fname ++ "'...\n")
traceInfoNA = putTraceStr tracePhasesSW
("...name analysis of `"
++ fname ++ "'...\n")
traceInfoErr = putTraceStr tracePhasesSW
("...error(s) detected in `"
++ fname ++ "'.\n")
traceInfoOK = putTraceStr tracePhasesSW
("...successfully loaded `"
++ fname ++ "'.\n")
|
phischu/gtk2hs
|
tools/c2hs/c/C.hs
|
lgpl-3.0
| 4,260
| 48
| 12
| 1,094
| 671
| 417
| 254
| 68
| 2
|
{-# LANGUAGE GADTs, PolyKinds, RankNTypes #-}
module T11963 where
-- this module should be rejected without TypeInType
import Data.Proxy
-- see code in RnTypes.extract_hs_tv_bndrs which checks for these bad cases
-- bndr_kvs vs body_tvs
data Typ k t where
Typ :: (forall (a :: k -> *). a t -> a t) -> Typ k t
-- bndr_kvs vs acc_tvs
foo :: (forall (t :: k). Proxy t) -> Proxy k
foo _ = undefined
-- locals vs body_kvs
bar :: forall k. forall (t :: k). Proxy t
bar = undefined
-- body_kvs vs acc_tvs
quux :: (forall t. Proxy (t :: k)) -> Proxy k
quux _ = undefined
-- body_tvs vs acc_kvs
blargh :: (forall a. a -> Proxy k) -> Proxy (t :: k)
blargh _ = undefined
|
ezyang/ghc
|
testsuite/tests/typecheck/should_fail/T11963.hs
|
bsd-3-clause
| 684
| 0
| 11
| 156
| 209
| 120
| 89
| 13
| 1
|
module TiProp where
import PropSyntax
import TI
--import TiBaseStruct
import FreeNamesProp
import DefinedNamesProp
import NameMapsProp
import TiPropStruct(tcPD,checkPredicateRec)
import TiBaseStruct(tcE,tcD,checkTypeSynRec,checkClassRec)
import TiPropInstances
import PrettyPrint
--import MUtils(( # ))
instance (TypeId i,ValueId i,PrintableOp i,Fresh i,HasSrcLoc i,TypedId PId i)
=> TypeCheckDecl i (HsDeclI i) [HsDeclI i] where
tcDecl bs = recprop (tcD bs) (tcPD bs)
instance Eq i => CheckRecursion i (HsDeclI i) where
checkRecursion ds = do checkTypeSynRec ds
checkClassRec ds
checkPredicateRec ds
instance (TypeId i,ValueId i,PrintableOp i,Fresh i,HasSrcLoc i,TypedId PId i)
=> TypeCheck i (HsExpI i) (Typed i (HsExpI i)) where
tc (Exp e) = tcE e
instance (TypeId i,ValueId i,PrintableOp i,Fresh i,HasSrcLoc i,TypedId PId i)
=> TypeCheck i (AssertionI i) (Typed i (AssertionI i)) where
tc (PA e) = tc e
instance (TypeId i,ValueId i,PrintableOp i,Fresh i,HasSrcLoc i,TypedId PId i)
=> TypeCheck i (PredicateI i) (Typed i (PredicateI i)) where
tc (PP e) = tc e
instance ({-ValueId i,-}TypeVar i) => KindCheck i (HsDeclI i) () where
kc = recprop kc kcPD
where kcPD _ = return () -- hmm
|
forste/haReFork
|
tools/property/TI/TiProp.hs
|
bsd-3-clause
| 1,248
| 2
| 9
| 231
| 519
| 268
| 251
| -1
| -1
|
import "hint" HLint.HLint
|
ivanperez-keera/hcwiid
|
tests/HLint.hs
|
gpl-2.0
| 27
| 0
| 4
| 4
| 7
| 4
| 3
| -1
| -1
|
module UtilSpec
( main
, spec
) where
import Pwn.Internal
import qualified System.Directory as SD
import System.Environment
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "Util.getTemporaryDirectory" $ do
it "use $XDG_RUNTIME_DIR if available" $ do
setEnv "XDG_RUNTIME_DIR" "/path/to/tmpdir"
tempdir1 <- getTemporaryDirectory
tempdir1 `shouldBe` "/path/to/tmpdir"
unsetEnv "XDG_RUNTIME_DIR"
tempdir2 <- getTemporaryDirectory
tempdir3 <- SD.getTemporaryDirectory
tempdir2 `shouldBe` tempdir3
|
Tosainu/pwn.hs
|
test/UtilSpec.hs
|
mit
| 616
| 0
| 14
| 154
| 138
| 71
| 67
| 20
| 1
|
import Infix
import RPN
import Tokenize
evaluate :: String -> Float
evaluate = evaluateRpn . translateToRpn . tokenizeExpr
|
DanielBrookRoberge/learning-calculator
|
haskell/evaluate.hs
|
mit
| 123
| 0
| 6
| 18
| 32
| 18
| 14
| 5
| 1
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.InternalSettings
(js_setTouchEventEmulationEnabled, setTouchEventEmulationEnabled,
js_setStandardFontFamily, setStandardFontFamily,
js_setSerifFontFamily, setSerifFontFamily,
js_setSansSerifFontFamily, setSansSerifFontFamily,
js_setFixedFontFamily, setFixedFontFamily, js_setCursiveFontFamily,
setCursiveFontFamily, js_setFantasyFontFamily,
setFantasyFontFamily, js_setPictographFontFamily,
setPictographFontFamily, js_setFontFallbackPrefersPictographs,
setFontFallbackPrefersPictographs, js_setTextAutosizingEnabled,
setTextAutosizingEnabled, js_setTextAutosizingWindowSizeOverride,
setTextAutosizingWindowSizeOverride,
js_setTextAutosizingFontScaleFactor,
setTextAutosizingFontScaleFactor, js_setCSSShapesEnabled,
setCSSShapesEnabled, js_setCanStartMedia, setCanStartMedia,
js_setShouldDisplayTrackKind, setShouldDisplayTrackKind,
js_shouldDisplayTrackKind, shouldDisplayTrackKind,
js_setDefaultVideoPosterURL, setDefaultVideoPosterURL,
js_setTimeWithoutMouseMovementBeforeHidingControls,
setTimeWithoutMouseMovementBeforeHidingControls,
js_setMediaTypeOverride, setMediaTypeOverride,
js_setPluginReplacementEnabled, setPluginReplacementEnabled,
js_setEditingBehavior, setEditingBehavior,
js_setShouldConvertPositionStyleOnCopy,
setShouldConvertPositionStyleOnCopy,
js_setLangAttributeAwareFormControlUIEnabled,
setLangAttributeAwareFormControlUIEnabled,
js_setStorageBlockingPolicy, setStorageBlockingPolicy,
js_setImagesEnabled, setImagesEnabled,
js_setUseLegacyBackgroundSizeShorthandBehavior,
setUseLegacyBackgroundSizeShorthandBehavior,
js_setAutoscrollForDragAndDropEnabled,
setAutoscrollForDragAndDropEnabled,
js_setBackgroundShouldExtendBeyondPage,
setBackgroundShouldExtendBeyondPage,
js_setScrollingTreeIncludesFrames, setScrollingTreeIncludesFrames,
js_setMinimumTimerInterval, setMinimumTimerInterval,
InternalSettings, castToInternalSettings, gTypeInternalSettings)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSRef(..), JSString, castRef)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSRef(..), FromJSRef(..))
import GHCJS.Marshal.Pure (PToJSRef(..), PFromJSRef(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.Enums
foreign import javascript unsafe
"$1[\"setTouchEventEmulationEnabled\"]($2)"
js_setTouchEventEmulationEnabled ::
JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setTouchEventEmulationEnabled Mozilla InternalSettings.setTouchEventEmulationEnabled documentation>
setTouchEventEmulationEnabled ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setTouchEventEmulationEnabled self enabled
= liftIO
(js_setTouchEventEmulationEnabled (unInternalSettings self)
enabled)
foreign import javascript unsafe
"$1[\"setStandardFontFamily\"]($2,\n$3)" js_setStandardFontFamily
:: JSRef InternalSettings -> JSString -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setStandardFontFamily Mozilla InternalSettings.setStandardFontFamily documentation>
setStandardFontFamily ::
(MonadIO m, ToJSString family', ToJSString script) =>
InternalSettings -> family' -> script -> m ()
setStandardFontFamily self family' script
= liftIO
(js_setStandardFontFamily (unInternalSettings self)
(toJSString family')
(toJSString script))
foreign import javascript unsafe
"$1[\"setSerifFontFamily\"]($2, $3)" js_setSerifFontFamily ::
JSRef InternalSettings -> JSString -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setSerifFontFamily Mozilla InternalSettings.setSerifFontFamily documentation>
setSerifFontFamily ::
(MonadIO m, ToJSString family', ToJSString script) =>
InternalSettings -> family' -> script -> m ()
setSerifFontFamily self family' script
= liftIO
(js_setSerifFontFamily (unInternalSettings self)
(toJSString family')
(toJSString script))
foreign import javascript unsafe
"$1[\"setSansSerifFontFamily\"]($2,\n$3)" js_setSansSerifFontFamily
:: JSRef InternalSettings -> JSString -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setSansSerifFontFamily Mozilla InternalSettings.setSansSerifFontFamily documentation>
setSansSerifFontFamily ::
(MonadIO m, ToJSString family', ToJSString script) =>
InternalSettings -> family' -> script -> m ()
setSansSerifFontFamily self family' script
= liftIO
(js_setSansSerifFontFamily (unInternalSettings self)
(toJSString family')
(toJSString script))
foreign import javascript unsafe
"$1[\"setFixedFontFamily\"]($2, $3)" js_setFixedFontFamily ::
JSRef InternalSettings -> JSString -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setFixedFontFamily Mozilla InternalSettings.setFixedFontFamily documentation>
setFixedFontFamily ::
(MonadIO m, ToJSString family', ToJSString script) =>
InternalSettings -> family' -> script -> m ()
setFixedFontFamily self family' script
= liftIO
(js_setFixedFontFamily (unInternalSettings self)
(toJSString family')
(toJSString script))
foreign import javascript unsafe
"$1[\"setCursiveFontFamily\"]($2,\n$3)" js_setCursiveFontFamily ::
JSRef InternalSettings -> JSString -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setCursiveFontFamily Mozilla InternalSettings.setCursiveFontFamily documentation>
setCursiveFontFamily ::
(MonadIO m, ToJSString family', ToJSString script) =>
InternalSettings -> family' -> script -> m ()
setCursiveFontFamily self family' script
= liftIO
(js_setCursiveFontFamily (unInternalSettings self)
(toJSString family')
(toJSString script))
foreign import javascript unsafe
"$1[\"setFantasyFontFamily\"]($2,\n$3)" js_setFantasyFontFamily ::
JSRef InternalSettings -> JSString -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setFantasyFontFamily Mozilla InternalSettings.setFantasyFontFamily documentation>
setFantasyFontFamily ::
(MonadIO m, ToJSString family', ToJSString script) =>
InternalSettings -> family' -> script -> m ()
setFantasyFontFamily self family' script
= liftIO
(js_setFantasyFontFamily (unInternalSettings self)
(toJSString family')
(toJSString script))
foreign import javascript unsafe
"$1[\"setPictographFontFamily\"]($2,\n$3)"
js_setPictographFontFamily ::
JSRef InternalSettings -> JSString -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setPictographFontFamily Mozilla InternalSettings.setPictographFontFamily documentation>
setPictographFontFamily ::
(MonadIO m, ToJSString family', ToJSString script) =>
InternalSettings -> family' -> script -> m ()
setPictographFontFamily self family' script
= liftIO
(js_setPictographFontFamily (unInternalSettings self)
(toJSString family')
(toJSString script))
foreign import javascript unsafe
"$1[\"setFontFallbackPrefersPictographs\"]($2)"
js_setFontFallbackPrefersPictographs ::
JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setFontFallbackPrefersPictographs Mozilla InternalSettings.setFontFallbackPrefersPictographs documentation>
setFontFallbackPrefersPictographs ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setFontFallbackPrefersPictographs self preferPictographs
= liftIO
(js_setFontFallbackPrefersPictographs (unInternalSettings self)
preferPictographs)
foreign import javascript unsafe
"$1[\"setTextAutosizingEnabled\"]($2)" js_setTextAutosizingEnabled
:: JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setTextAutosizingEnabled Mozilla InternalSettings.setTextAutosizingEnabled documentation>
setTextAutosizingEnabled ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setTextAutosizingEnabled self enabled
= liftIO
(js_setTextAutosizingEnabled (unInternalSettings self) enabled)
foreign import javascript unsafe
"$1[\"setTextAutosizingWindowSizeOverride\"]($2,\n$3)"
js_setTextAutosizingWindowSizeOverride ::
JSRef InternalSettings -> Int -> Int -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setTextAutosizingWindowSizeOverride Mozilla InternalSettings.setTextAutosizingWindowSizeOverride documentation>
setTextAutosizingWindowSizeOverride ::
(MonadIO m) => InternalSettings -> Int -> Int -> m ()
setTextAutosizingWindowSizeOverride self width height
= liftIO
(js_setTextAutosizingWindowSizeOverride (unInternalSettings self)
width
height)
foreign import javascript unsafe
"$1[\"setTextAutosizingFontScaleFactor\"]($2)"
js_setTextAutosizingFontScaleFactor ::
JSRef InternalSettings -> Float -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setTextAutosizingFontScaleFactor Mozilla InternalSettings.setTextAutosizingFontScaleFactor documentation>
setTextAutosizingFontScaleFactor ::
(MonadIO m) => InternalSettings -> Float -> m ()
setTextAutosizingFontScaleFactor self fontScaleFactor
= liftIO
(js_setTextAutosizingFontScaleFactor (unInternalSettings self)
fontScaleFactor)
foreign import javascript unsafe "$1[\"setCSSShapesEnabled\"]($2)"
js_setCSSShapesEnabled :: JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setCSSShapesEnabled Mozilla InternalSettings.setCSSShapesEnabled documentation>
setCSSShapesEnabled ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setCSSShapesEnabled self enabled
= liftIO (js_setCSSShapesEnabled (unInternalSettings self) enabled)
foreign import javascript unsafe "$1[\"setCanStartMedia\"]($2)"
js_setCanStartMedia :: JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setCanStartMedia Mozilla InternalSettings.setCanStartMedia documentation>
setCanStartMedia :: (MonadIO m) => InternalSettings -> Bool -> m ()
setCanStartMedia self enabled
= liftIO (js_setCanStartMedia (unInternalSettings self) enabled)
foreign import javascript unsafe
"$1[\"setShouldDisplayTrackKind\"]($2,\n$3)"
js_setShouldDisplayTrackKind ::
JSRef InternalSettings -> JSString -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setShouldDisplayTrackKind Mozilla InternalSettings.setShouldDisplayTrackKind documentation>
setShouldDisplayTrackKind ::
(MonadIO m, ToJSString kind) =>
InternalSettings -> kind -> Bool -> m ()
setShouldDisplayTrackKind self kind enabled
= liftIO
(js_setShouldDisplayTrackKind (unInternalSettings self)
(toJSString kind)
enabled)
foreign import javascript unsafe
"($1[\"shouldDisplayTrackKind\"]($2) ? 1 : 0)"
js_shouldDisplayTrackKind ::
JSRef InternalSettings -> JSString -> IO Bool
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.shouldDisplayTrackKind Mozilla InternalSettings.shouldDisplayTrackKind documentation>
shouldDisplayTrackKind ::
(MonadIO m, ToJSString trackKind) =>
InternalSettings -> trackKind -> m Bool
shouldDisplayTrackKind self trackKind
= liftIO
(js_shouldDisplayTrackKind (unInternalSettings self)
(toJSString trackKind))
foreign import javascript unsafe
"$1[\"setDefaultVideoPosterURL\"]($2)" js_setDefaultVideoPosterURL
:: JSRef InternalSettings -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setDefaultVideoPosterURL Mozilla InternalSettings.setDefaultVideoPosterURL documentation>
setDefaultVideoPosterURL ::
(MonadIO m, ToJSString poster) =>
InternalSettings -> poster -> m ()
setDefaultVideoPosterURL self poster
= liftIO
(js_setDefaultVideoPosterURL (unInternalSettings self)
(toJSString poster))
foreign import javascript unsafe
"$1[\"setTimeWithoutMouseMovementBeforeHidingControls\"]($2)"
js_setTimeWithoutMouseMovementBeforeHidingControls ::
JSRef InternalSettings -> Double -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setTimeWithoutMouseMovementBeforeHidingControls Mozilla InternalSettings.setTimeWithoutMouseMovementBeforeHidingControls documentation>
setTimeWithoutMouseMovementBeforeHidingControls ::
(MonadIO m) => InternalSettings -> Double -> m ()
setTimeWithoutMouseMovementBeforeHidingControls self time
= liftIO
(js_setTimeWithoutMouseMovementBeforeHidingControls
(unInternalSettings self)
time)
foreign import javascript unsafe "$1[\"setMediaTypeOverride\"]($2)"
js_setMediaTypeOverride ::
JSRef InternalSettings -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setMediaTypeOverride Mozilla InternalSettings.setMediaTypeOverride documentation>
setMediaTypeOverride ::
(MonadIO m, ToJSString mediaTypeOverride) =>
InternalSettings -> mediaTypeOverride -> m ()
setMediaTypeOverride self mediaTypeOverride
= liftIO
(js_setMediaTypeOverride (unInternalSettings self)
(toJSString mediaTypeOverride))
foreign import javascript unsafe
"$1[\"setPluginReplacementEnabled\"]($2)"
js_setPluginReplacementEnabled ::
JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setPluginReplacementEnabled Mozilla InternalSettings.setPluginReplacementEnabled documentation>
setPluginReplacementEnabled ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setPluginReplacementEnabled self enabled
= liftIO
(js_setPluginReplacementEnabled (unInternalSettings self) enabled)
foreign import javascript unsafe "$1[\"setEditingBehavior\"]($2)"
js_setEditingBehavior ::
JSRef InternalSettings -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setEditingBehavior Mozilla InternalSettings.setEditingBehavior documentation>
setEditingBehavior ::
(MonadIO m, ToJSString behavior) =>
InternalSettings -> behavior -> m ()
setEditingBehavior self behavior
= liftIO
(js_setEditingBehavior (unInternalSettings self)
(toJSString behavior))
foreign import javascript unsafe
"$1[\"setShouldConvertPositionStyleOnCopy\"]($2)"
js_setShouldConvertPositionStyleOnCopy ::
JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setShouldConvertPositionStyleOnCopy Mozilla InternalSettings.setShouldConvertPositionStyleOnCopy documentation>
setShouldConvertPositionStyleOnCopy ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setShouldConvertPositionStyleOnCopy self convert
= liftIO
(js_setShouldConvertPositionStyleOnCopy (unInternalSettings self)
convert)
foreign import javascript unsafe
"$1[\"setLangAttributeAwareFormControlUIEnabled\"]($2)"
js_setLangAttributeAwareFormControlUIEnabled ::
JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setLangAttributeAwareFormControlUIEnabled Mozilla InternalSettings.setLangAttributeAwareFormControlUIEnabled documentation>
setLangAttributeAwareFormControlUIEnabled ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setLangAttributeAwareFormControlUIEnabled self enabled
= liftIO
(js_setLangAttributeAwareFormControlUIEnabled
(unInternalSettings self)
enabled)
foreign import javascript unsafe
"$1[\"setStorageBlockingPolicy\"]($2)" js_setStorageBlockingPolicy
:: JSRef InternalSettings -> JSString -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setStorageBlockingPolicy Mozilla InternalSettings.setStorageBlockingPolicy documentation>
setStorageBlockingPolicy ::
(MonadIO m, ToJSString policy) =>
InternalSettings -> policy -> m ()
setStorageBlockingPolicy self policy
= liftIO
(js_setStorageBlockingPolicy (unInternalSettings self)
(toJSString policy))
foreign import javascript unsafe "$1[\"setImagesEnabled\"]($2)"
js_setImagesEnabled :: JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setImagesEnabled Mozilla InternalSettings.setImagesEnabled documentation>
setImagesEnabled :: (MonadIO m) => InternalSettings -> Bool -> m ()
setImagesEnabled self enabled
= liftIO (js_setImagesEnabled (unInternalSettings self) enabled)
foreign import javascript unsafe
"$1[\"setUseLegacyBackgroundSizeShorthandBehavior\"]($2)"
js_setUseLegacyBackgroundSizeShorthandBehavior ::
JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setUseLegacyBackgroundSizeShorthandBehavior Mozilla InternalSettings.setUseLegacyBackgroundSizeShorthandBehavior documentation>
setUseLegacyBackgroundSizeShorthandBehavior ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setUseLegacyBackgroundSizeShorthandBehavior self enabled
= liftIO
(js_setUseLegacyBackgroundSizeShorthandBehavior
(unInternalSettings self)
enabled)
foreign import javascript unsafe
"$1[\"setAutoscrollForDragAndDropEnabled\"]($2)"
js_setAutoscrollForDragAndDropEnabled ::
JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setAutoscrollForDragAndDropEnabled Mozilla InternalSettings.setAutoscrollForDragAndDropEnabled documentation>
setAutoscrollForDragAndDropEnabled ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setAutoscrollForDragAndDropEnabled self enabled
= liftIO
(js_setAutoscrollForDragAndDropEnabled (unInternalSettings self)
enabled)
foreign import javascript unsafe
"$1[\"setBackgroundShouldExtendBeyondPage\"]($2)"
js_setBackgroundShouldExtendBeyondPage ::
JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setBackgroundShouldExtendBeyondPage Mozilla InternalSettings.setBackgroundShouldExtendBeyondPage documentation>
setBackgroundShouldExtendBeyondPage ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setBackgroundShouldExtendBeyondPage self hasExtendedBackground
= liftIO
(js_setBackgroundShouldExtendBeyondPage (unInternalSettings self)
hasExtendedBackground)
foreign import javascript unsafe
"$1[\"setScrollingTreeIncludesFrames\"]($2)"
js_setScrollingTreeIncludesFrames ::
JSRef InternalSettings -> Bool -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setScrollingTreeIncludesFrames Mozilla InternalSettings.setScrollingTreeIncludesFrames documentation>
setScrollingTreeIncludesFrames ::
(MonadIO m) => InternalSettings -> Bool -> m ()
setScrollingTreeIncludesFrames self enabled
= liftIO
(js_setScrollingTreeIncludesFrames (unInternalSettings self)
enabled)
foreign import javascript unsafe
"$1[\"setMinimumTimerInterval\"]($2)" js_setMinimumTimerInterval ::
JSRef InternalSettings -> Double -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/InternalSettings.setMinimumTimerInterval Mozilla InternalSettings.setMinimumTimerInterval documentation>
setMinimumTimerInterval ::
(MonadIO m) => InternalSettings -> Double -> m ()
setMinimumTimerInterval self intervalInSeconds
= liftIO
(js_setMinimumTimerInterval (unInternalSettings self)
intervalInSeconds)
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/JSFFI/Generated/InternalSettings.hs
|
mit
| 21,896
| 258
| 9
| 4,117
| 3,357
| 1,771
| 1,586
| 335
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
module Hate.UI.Controls.Button where
import Hate.UI.Types
import Hate.UI.Controls.Label
import Hate.UI.Util
import Hate.Graphics
import Hate.Math
import Control.Monad.State (state)
-- In order to keep things simple, button cannot nest arbitrary controls
data Button s = Button Vec2 Vec2 (Label s) (s -> s)
instance Element s (Button s) where
drawElement ub s (Button p sz lab _) = (translate p) <$> drawElement ub s lab ++ (box (Vec2 0 0) sz)
click mp (b@(Button pos sz _ action)) = if between (pos, pos + sz) mp
then (action, enlargeButton b)
else (id, b)
-- TEMP: this is an example of how an effect such as OnHover could be implemented
enlargeButton (Button p (Vec2 sx sy) lab act) = Button p (Vec2 (sx + 2) sy) lab act
button :: forall s. Vec2 -> Vec2 -> String -> (s -> s) -> Button s
button pos sz str action = buttonBnd pos sz (PlainValue str) action
buttonBnd :: forall s. Vec2 -> Vec2 -> Binding s String -> (s -> s) -> Button s
buttonBnd pos sz bnd action = (Button pos sz (Label (Vec2 1 1) bnd :: Label s) action :: Button s)
|
bananu7/Hate-UI
|
src/Hate/UI/Controls/Button.hs
|
mit
| 1,275
| 0
| 11
| 279
| 439
| 237
| 202
| 23
| 1
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apitgateway-method-integration-integrationresponse.html
module Stratosphere.ResourceProperties.ApiGatewayMethodIntegrationResponse where
import Stratosphere.ResourceImports
-- | Full data type definition for ApiGatewayMethodIntegrationResponse. See
-- 'apiGatewayMethodIntegrationResponse' for a more convenient constructor.
data ApiGatewayMethodIntegrationResponse =
ApiGatewayMethodIntegrationResponse
{ _apiGatewayMethodIntegrationResponseContentHandling :: Maybe (Val Text)
, _apiGatewayMethodIntegrationResponseResponseParameters :: Maybe Object
, _apiGatewayMethodIntegrationResponseResponseTemplates :: Maybe Object
, _apiGatewayMethodIntegrationResponseSelectionPattern :: Maybe (Val Text)
, _apiGatewayMethodIntegrationResponseStatusCode :: Val Text
} deriving (Show, Eq)
instance ToJSON ApiGatewayMethodIntegrationResponse where
toJSON ApiGatewayMethodIntegrationResponse{..} =
object $
catMaybes
[ fmap (("ContentHandling",) . toJSON) _apiGatewayMethodIntegrationResponseContentHandling
, fmap (("ResponseParameters",) . toJSON) _apiGatewayMethodIntegrationResponseResponseParameters
, fmap (("ResponseTemplates",) . toJSON) _apiGatewayMethodIntegrationResponseResponseTemplates
, fmap (("SelectionPattern",) . toJSON) _apiGatewayMethodIntegrationResponseSelectionPattern
, (Just . ("StatusCode",) . toJSON) _apiGatewayMethodIntegrationResponseStatusCode
]
-- | Constructor for 'ApiGatewayMethodIntegrationResponse' containing required
-- fields as arguments.
apiGatewayMethodIntegrationResponse
:: Val Text -- ^ 'agmirStatusCode'
-> ApiGatewayMethodIntegrationResponse
apiGatewayMethodIntegrationResponse statusCodearg =
ApiGatewayMethodIntegrationResponse
{ _apiGatewayMethodIntegrationResponseContentHandling = Nothing
, _apiGatewayMethodIntegrationResponseResponseParameters = Nothing
, _apiGatewayMethodIntegrationResponseResponseTemplates = Nothing
, _apiGatewayMethodIntegrationResponseSelectionPattern = Nothing
, _apiGatewayMethodIntegrationResponseStatusCode = statusCodearg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apitgateway-method-integration-integrationresponse.html#cfn-apigateway-method-integrationresponse-contenthandling
agmirContentHandling :: Lens' ApiGatewayMethodIntegrationResponse (Maybe (Val Text))
agmirContentHandling = lens _apiGatewayMethodIntegrationResponseContentHandling (\s a -> s { _apiGatewayMethodIntegrationResponseContentHandling = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apitgateway-method-integration-integrationresponse.html#cfn-apigateway-method-integration-integrationresponse-responseparameters
agmirResponseParameters :: Lens' ApiGatewayMethodIntegrationResponse (Maybe Object)
agmirResponseParameters = lens _apiGatewayMethodIntegrationResponseResponseParameters (\s a -> s { _apiGatewayMethodIntegrationResponseResponseParameters = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apitgateway-method-integration-integrationresponse.html#cfn-apigateway-method-integration-integrationresponse-responsetemplates
agmirResponseTemplates :: Lens' ApiGatewayMethodIntegrationResponse (Maybe Object)
agmirResponseTemplates = lens _apiGatewayMethodIntegrationResponseResponseTemplates (\s a -> s { _apiGatewayMethodIntegrationResponseResponseTemplates = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apitgateway-method-integration-integrationresponse.html#cfn-apigateway-method-integration-integrationresponse-selectionpattern
agmirSelectionPattern :: Lens' ApiGatewayMethodIntegrationResponse (Maybe (Val Text))
agmirSelectionPattern = lens _apiGatewayMethodIntegrationResponseSelectionPattern (\s a -> s { _apiGatewayMethodIntegrationResponseSelectionPattern = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apitgateway-method-integration-integrationresponse.html#cfn-apigateway-method-integration-integrationresponse-statuscode
agmirStatusCode :: Lens' ApiGatewayMethodIntegrationResponse (Val Text)
agmirStatusCode = lens _apiGatewayMethodIntegrationResponseStatusCode (\s a -> s { _apiGatewayMethodIntegrationResponseStatusCode = a })
|
frontrowed/stratosphere
|
library-gen/Stratosphere/ResourceProperties/ApiGatewayMethodIntegrationResponse.hs
|
mit
| 4,487
| 0
| 13
| 352
| 514
| 292
| 222
| 43
| 1
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeSynonymInstances #-}
module IHaskell.Display.Widgets.Button (
-- * The Button Widget
Button,
-- * Create a new button
mkButton) where
-- To keep `cabal repl` happy when running from the ihaskell repo
import Prelude
import Control.Monad (when, join)
import Data.Aeson
import Data.HashMap.Strict as HM
import Data.IORef (newIORef)
import Data.Text (Text)
import Data.Vinyl (Rec(..), (<+>))
import IHaskell.Display
import IHaskell.Eval.Widgets
import IHaskell.IPython.Message.UUID as U
import IHaskell.Display.Widgets.Types
import IHaskell.Display.Widgets.Common
-- | A 'Button' represents a Button from IPython.html.widgets.
type Button = IPythonWidget ButtonType
-- | Create a new button
mkButton :: IO Button
mkButton = do
-- Default properties, with a random uuid
uuid <- U.random
let dom = defaultDOMWidget "ButtonView"
but = (Description =:: "")
:& (Tooltip =:: "")
:& (Disabled =:: False)
:& (Icon =:: "")
:& (ButtonStyle =:: DefaultButton)
:& (ClickHandler =:: return ())
:& RNil
buttonState = WidgetState (dom <+> but)
stateIO <- newIORef buttonState
let button = IPythonWidget uuid stateIO
-- Open a comm for this widget, and store it in the kernel state
widgetSendOpen button $ toJSON buttonState
-- Return the button widget
return button
instance IHaskellDisplay Button where
display b = do
widgetSendView b
return $ Display []
instance IHaskellWidget Button where
getCommUUID = uuid
comm widget (Object dict1) _ = do
let key1 = "content" :: Text
key2 = "event" :: Text
Just (Object dict2) = HM.lookup key1 dict1
Just (String event) = HM.lookup key2 dict2
when (event == "click") $ triggerClick widget
|
beni55/IHaskell
|
ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Button.hs
|
mit
| 2,024
| 0
| 17
| 550
| 456
| 249
| 207
| 48
| 1
|
{-# LANGUAGE FlexibleContexts #-}
module IAFinance.Environment.Context(
Has(..),
EnvContext,
Context,
insert,
select,
remove
) where
-----------------------------------------------------------------------------
-- import
import qualified Data.Map.Strict as Map
import Data.Text (Text)
import Data.Time.Clock.POSIX (getPOSIXTime)
import Control.Monad.Reader (MonadReader, MonadIO, ask, liftIO)
import Control.Concurrent.Async.Lifted.Safe ()
import Control.Concurrent (MVar, newMVar)
import Control.Concurrent.STM (atomically, modifyTVar', readTVarIO)
import qualified IAFinance.Environment.Config as Cfg
import IAFinance.Server.Data.State (State(..))
import IAFinance.Environment.Config (ServerConfig(..), SessionConfig(..))
import IAFinance.Environment.Internal (
Has(..),
EnvContext,
Context,
EnvConfig,
Config(..))
-----------------------------------------------------------------------------
-- Context
insert :: (MonadReader a m, Has EnvConfig a, Has EnvContext a, MonadIO m)
=> Text -> m (Maybe (MVar State))
insert k = release >>= \ok ->
if ok then do
s <- io $ newMVar Idle
t <- io $ timestamp
modify $ Map.insert k (t, s)
pure $ Just s
else
pure Nothing
select :: (MonadReader env m, Has EnvContext env, MonadIO m)
=> Text -> m (Maybe (MVar State))
select k = do
t <- io $ timestamp
modify $ Map.adjust (\p -> (t, snd p)) k
get >>= pure . maybe Nothing (Just . snd) . Map.lookup k
remove :: (MonadReader env m, Has EnvContext env, MonadIO m)
=> Text -> m ()
remove k = modify $ Map.update (const Nothing) k
-----------------------------------------------------------------------------
-- helper
release :: (MonadReader a m, Has EnvConfig a, Has EnvContext a, MonadIO m)
=> m Bool
release = do
c <- session <$> base <$> Cfg.get
t <- (+) (toInteger $ timeout c) <$> (io timestamp)
modify . Map.filter $ (< t) . fst
let s = capacity c
(< s) <$> Map.size <$> get
modify :: (MonadReader env m, Has EnvContext env, MonadIO m)
=> (Context -> Context) -> m ()
modify f = ask >>= \env -> io . atomically . flip modifyTVar' f . member $env
get :: (MonadReader env m, Has EnvContext env, MonadIO m)
=> m Context
get = ask >>= \env -> io . readTVarIO . member $ env
io :: (MonadIO m) => IO a -> m a
io = liftIO
timestamp :: IO Integer
timestamp = (round . (* 1000)) <$> getPOSIXTime
{-
[Is the whole Map copied when a new binding is inserted?]
(https://stackoverflow.com/a/10003037)
-}
|
wiryls/HomeworkCollectionOfMYLS
|
2018.ProjectV/src/IAFinance/Environment/Context.hs
|
mit
| 2,564
| 0
| 12
| 534
| 889
| 487
| 402
| 60
| 2
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE TypeSynonymInstances #-}
-- This module is public domain as far as I'm concerned
module NLP.GenI.ErrorIO where
import Control.Monad.Trans.Error
import Data.Text (Text)
import qualified Data.Text as T
type ErrorIO = ErrorT Text IO
instance Error Text where
strMsg = T.pack
liftEither :: (Error e, Monad m) => Either e a -> ErrorT e m a
liftEither (Left e) = throwError e
liftEither (Right x) = return x
|
kowey/GenI
|
src/NLP/GenI/ErrorIO.hs
|
gpl-2.0
| 515
| 0
| 7
| 134
| 131
| 73
| 58
| 12
| 1
|
{-# LANGUAGE
RecordWildCards
, BangPatterns
#-}
module Texture.IsoSphere
( IsoSphere (subLevel, vertices, faces, centers)
, isoSphereZero
, isoSphere
, refineIsoSphere
, scaleIsoSphere
, angularDistance
, nearestPoint
, genIsoSphereSO3Grid
, getOptSubDivisionLevel
, renderIsoSphereFaces
, renderQuaternions
) where
import qualified Data.List as L
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as UM
import qualified Data.Vector.Mutable as VM
import Data.Function (on)
import Control.Monad.ST (runST, ST)
import Data.STRef
import Linear.Vect
import Hammer.VTK
import Texture.Orientation
import Texture.Symmetry
-- | IsoSphere is an geodesic grid on sphere that is based on the subdivision of
-- isodecahedron. It has an uniform and homogeneous distribution of points.
data IsoSphere
= IsoSphere
{ subLevel :: !Int
, vertices :: U.Vector Vec3D
, faces :: V.Vector (U.Vector (Int, Int, Int))
, centers :: V.Vector (U.Vector Vec3D)
} deriving (Show)
-- | Creates an IsoShpere with given subdivision level.
isoSphere :: Int -> IsoSphere
isoSphere n = U.foldl' (\acc _ -> refineIsoSphere acc) isoSphereZero (U.generate (abs n) id)
-- | Creates an IsoShpere with no subdivision (isodecahedron).
isoSphereZero :: IsoSphere
isoSphereZero = let
t = 0.5 * (1 + sqrt 5)
vs = U.map normalize $ U.fromList
[ Vec3 (-1) t 0, Vec3 1 t 0, Vec3 (-1) (-t) 0, Vec3 1 (-t) 0
, Vec3 0 (-1) t, Vec3 0 1 t, Vec3 0 (-1) (-t), Vec3 0 1 (-t)
, Vec3 t 0 (-1), Vec3 t 0 1, Vec3 (-t) 0 (-1), Vec3 (-t) 0 1 ]
fs = U.fromList
-- 5 faces around point 0
[ (0, 11, 5), (0, 5, 1), (0, 1, 7), (0, 7, 10), (0, 10, 11)
-- 5 adjacent faces
, (1, 5, 9), (5, 11, 4), (11, 10, 2), (10, 7, 6), (7, 1, 8)
-- 5 faces around point 3
, (3, 9, 4), (3, 4, 2), (3, 2, 6), (3, 6, 8), (3, 8, 9)
-- 5 adjacent faces
, (4, 9, 5), (2, 4, 11), (6, 2, 10), (8, 6, 7), (9, 8, 1) ]
in IsoSphere
{ subLevel = 0
, vertices = vs
, faces = V.singleton fs
, centers = V.singleton (U.map (getFaceCenter vs) fs)
}
getFaceCenter :: (DotProd a v, LinearMap a v, Norm a v, UM.Unbox (v a)) => U.Vector (v a) -> (Int, Int, Int) -> v a
getFaceCenter vs (i1, i2, i3) = normalize $ (v1 &+ v3 &+ v2) &* (1/3)
where
v1 = vs U.! i1
v2 = vs U.! i2
v3 = vs U.! i3
-- | Refine IsoSphere by subdividing each triangular face in four new triangles.
refineIsoSphere :: IsoSphere -> IsoSphere
refineIsoSphere ms@IsoSphere {..} = let
fs = if V.null faces then U.empty else V.last faces
vsize = U.length vertices
fsize = U.length fs
-- Each face gives rise to 4 new faces
newfsize = 4 * fsize
addF mf poll fid (v1, v2, v3) = do
let foffset = 4 * fid
v12 <- addAddGetNewPos poll (v1, v2)
v23 <- addAddGetNewPos poll (v2, v3)
v31 <- addAddGetNewPos poll (v3, v1)
UM.write mf foffset (v12, v23, v31)
UM.write mf (foffset + 1) (v31, v1, v12)
UM.write mf (foffset + 2) (v12, v2, v23)
UM.write mf (foffset + 3) (v23, v3, v31)
in runST $ do
-- new list of faces
mf <- UM.replicate newfsize (-1,-1,-1)
-- initialize table of edges
me <- VM.replicate vsize U.empty
-- initialize counter to the next position after 'vertices'
mk <- newSTRef vsize
-- run subdivision (index only)
U.zipWithM_ (addF mf (mk, me)) (U.enumFromN 0 fsize) fs
-- retrieve all mutable data
ff <- U.unsafeFreeze mf
ef <- V.unsafeFreeze me
kf <- readSTRef mk
-- calculate subdivision points
ps <- fillVertices ef kf vertices
-- new list of centers
let cs = U.map (getFaceCenter ps) ff
return $ ms
{ subLevel = subLevel + 1
, faces = V.snoc faces ff
, vertices = ps
, centers = V.snoc centers cs
}
-- | Calculate a new points for each existing edge and add it to its correspondent position
-- in a new vertex list.
fillVertices :: V.Vector (U.Vector (Int, Int)) -> Int -> U.Vector Vec3D -> ST s (U.Vector Vec3D)
fillVertices edges vmax points = do
mv <- UM.new vmax
let
func i = do
-- outer Vector of edges and points must have the same size
-- copy original point
UM.write mv i (points U.! i)
let es = edges V.! i
-- add new points (one per edge)
U.mapM_ (\(j, vid) -> UM.write mv vid (getV i j)) es
mapM_ func [0..n - 1]
U.unsafeFreeze mv
where
n = U.length points
-- subdivision rule for new points (one per edge)
getV ia ib = let
va = points U.! ia
vb = points U.! ib
in normalize $ 0.5 *& (va &+ vb)
-- | Verify if an edge (Int, Int) between two vertices was already assigned then retrieves
-- its correspondent position otherwise register the edge to the next available position.
addAddGetNewPos :: (UM.Unbox a, Num a)=> (STRef s a, VM.MVector s (U.Vector (Int, a)))
-> (Int, Int) -> ST s a
addAddGetNewPos (kref, vs) (i, j) = do
-- the edges are stored in Vector (Vector (Int, a)) with the lowest value of edge in the
-- outer Vector and the higher value in the first element of the inner Vector. The second
-- element of the inner Vector stores the position assigned to this edge. This position
-- will be used to store the new point (correspondent to each edge) in the new list of
-- points.
xs <- VM.read vs a
case U.find ((==b) . fst) xs of
Just o -> return (snd o) -- edge already exist, return its correspondent position
_ -> do
-- get current free position
!k <- readSTRef kref
-- assigned edge to the current free position
VM.write vs a (U.cons (b, k) xs)
-- set counter to the next free position
writeSTRef kref (k+1)
return k
where
a = min i j
b = max i j
scaleIsoSphere :: Double -> IsoSphere -> IsoSphere
scaleIsoSphere k iso = iso {vertices = U.map (k *&) (vertices iso)}
-- | Angular distance (Radians) between neighboring points at a given subdivision level.
angularDistance :: Int -> Double
angularDistance = (k /) . fromIntegral . ((2 :: Int)^) . abs
where k = pi * (72 / 180)
genIsoSphereSO3Grid :: (Angle a)=> Symm -> a -> U.Vector Quaternion
genIsoSphereSO3Grid symm a = vs
where
(n, w) = getOptSubDivisionLevel a
step = fromAngle (w :: Rad)
nstep = floor (pi / step)
iso = isoSphere n
minw = getMinDistFZPlanes symm
vs = U.cons mempty (U.concatMap getLayer (U.enumFromStepN step step nstep))
toQ t = U.map (\v -> toQuaternion (mkAxisPair v (Rad t)))
getLayer t
| t <= minw = toQ t (vertices iso)
| otherwise = U.filter (isInRodriFZ symm) (toQ t (vertices iso))
-- | Find the minimum subdivision level that provides the given angular step size.
getOptSubDivisionLevel :: (Angle a0, Angle a1)=> a0 -> (Int, a1)
getOptSubDivisionLevel a = go 0
where
w = fromAngle a
go !n
| o > w = go (n + 1)
| otherwise = (n, toAngle o)
where o = angularDistance n
-- | Fast query to the nearest point. N-aray tree search with expected time complexity
-- O(l) where l is the subdivision level.
nearestPoint :: IsoSphere -> Vec3D -> (Int, Vec3D, Double)
nearestPoint IsoSphere{..} q = getClosest 0 fid0
where
lmax = V.length faces - 1
fid0 = U.minIndex $ U.map getD (centers V.! 0)
qn = normalize q
getD = acos . (qn &.)
-- find the face position the closest child
getNextChild !l !fid = let
cs = centers V.! (l+1)
fo = 4 * fid
fids = U.enumFromStepN fo 1 4
i = U.minIndex $ U.map (getD . (cs U.!)) fids
in fids U.! i
-- recursively find the closest face
getClosest !l !fid
| l >= lmax = getClosestOnFace face
| otherwise = getClosest (l+1) next
where
fs = faces V.! l
face = fs U.! fid
next = getNextChild l fid
-- find the closest vertex on the closest face
getClosestOnFace (ia, ib, ic) = let
xs = map (\i -> let v = vertices U.! i in (i, v, getD v)) [ia, ib, ic]
in L.minimumBy (compare `on` (\(_, _, d) -> d)) xs
-- | Render IsoSphere faces.
renderIsoSphereFaces :: IsoSphere -> [VTKAttrPointValue Vec3D] -> VTK Vec3D
renderIsoSphereFaces IsoSphere{..} attrs = mkUGVTK "IsoSphere" vertices fs attrs []
where fs = if V.null faces then U.empty else V.last faces
-- | Render quaternion points.
renderQuaternions :: U.Vector Quaternion -> [VTKAttrPointValue Vec3D] -> VTK Vec3D
renderQuaternions qs attrs = mkUGVTK "IsoSpace" (U.map quaternionToVec3 qs) is attrs []
where is = U.generate (U.length qs) id
quaternionToVec3 :: Quaternion -> Vec3D
quaternionToVec3 q = let
(q0, qv) = splitQuaternion q
omega = 2 * acos q0
in omega *& normalize qv
|
lostbean/sledge
|
src/Texture/IsoSphere.hs
|
gpl-3.0
| 8,851
| 0
| 18
| 2,323
| 3,073
| 1,659
| 1,414
| 177
| 2
|
module Language.Lambda.Codegen (genProg) where
import Language.Lambda.AST
import Language.Haskell.Syntax
genProg :: Program -> HsModule
genProg = mkMain . genExp 0
mkMain :: HsExp -> HsModule
mkMain exp = HsModule emptySrcLoc (Module "Main") Nothing []
$ [ HsFunBind $ [ HsMatch emptySrcLoc (HsIdent "main") [] body [] ] ]
where
hsPutStrLn, hsShow :: HsExp
hsPutStrLn = HsVar . UnQual . HsIdent $ "putStrLn"
hsShow = HsVar . UnQual . HsIdent $ "show"
hsCompose, hsApp :: HsQOp
hsCompose = HsQVarOp . UnQual . HsSymbol $ "."
hsApp = HsQVarOp . UnQual . HsSymbol $ "$"
body :: HsRhs
body = HsUnGuardedRhs $
HsInfixApp (HsInfixApp hsPutStrLn hsCompose hsShow) hsApp exp
-- The depth is used for generating variable patterns in lambdas.
genExp :: Int -> Annotated a -> HsExp
genExp depth exp = case _expression exp of
LambdaExp body -> HsParen $ genLambda depth body
AppExp fn arg -> HsApp (genExp depth fn) (genExp depth arg)
VarExp (Var id) -> HsVar . UnQual . genVar $ id
TupleExp components -> HsTuple $ map (genExp depth) components
StringLiteral str -> HsLit $ HsString str
BoolLiteral bool -> HsCon . UnQual . HsIdent $ show bool
IntLiteral int -> HsLit . HsInt $ toInteger int
genLambda :: Int -> Annotated a -> HsExp
genLambda depth exp = HsLambda emptySrcLoc [HsPVar $ genVar depth]
$ genExp (depth + 1) exp
genVar :: Int -> HsName
genVar var = HsIdent $ "v" ++ show var
emptySrcLoc :: SrcLoc
emptySrcLoc = SrcLoc "" 0 0
|
justinmanley/lambda
|
src/Language/Lambda/Codegen.hs
|
gpl-3.0
| 1,558
| 0
| 11
| 377
| 526
| 267
| 259
| 33
| 7
|
module Main
(
main
) where
import Test.Tasty
import Test.Tasty.Golden
import Functions
main :: IO ()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Empty" []
--tests = testGroup "Golden File Tests" [part1]
--part1 = test "Part 1" "..\data\output1_1.txt"
--let
--goldenVsString "Part 1"
--type Part1Input = (Int, Int, Int, [[Char]], Char)
--runPart1 :: FilePath -> String
--runPart1 file = let input = do
-- contents <- readFile file;
-- return read contents;
-- in trymove input
|
cable729/plc2
|
tests/tests.hs
|
gpl-3.0
| 515
| 2
| 6
| 99
| 70
| 44
| 26
| 10
| 1
|
module Tests.SharedTestData where
import Data.Maybe
import Wordify.Rules.Tile
import Wordify.Rules.Pos
import Wordify.Rules.Square
import Wordify.Rules.Pos.Internal
import qualified Data.Map as M
import Wordify.Rules.LetterBag
horizontalPositions = catMaybes $ map posAt $ iterate (\(x,y) -> (x + 1, y)) (5,7)
horizontalSquares = [Normal $ Just (Letter 'H' 4), Normal $ Just (Letter 'E' 1), DoubleLetter $ Just (Letter 'L' 1), Normal $ Just (Letter 'L' 1), DoubleLetter $ Just (Letter 'O' 1)]
rogueLeft = (Pos 3 7 "C7", DoubleLetter $ Just (Letter 'X' 2))
rogueRight = (Pos 11 7 "K7", Normal $ Just (Letter 'Z' 2))
horizontals = zip horizontalPositions horizontalSquares
verticalPositions = catMaybes $ map posAt $ iterate (\(x,y) -> (x, y + 1)) (7,5)
verticalSquares = [Normal $ Just (Letter 'T' 1), Normal $ Just (Letter 'E' 1), DoubleLetter $ Just (Letter 'L' 1), Normal $ Just (Letter 'L' 1), DoubleLetter $ Just (Letter 'Y' 4)]
rogueAbove = (Pos 7 3 "G3", DoubleLetter $ Just (Letter 'X' 2))
rogueBelow = (Pos 7 11 "G11", Normal $ Just (Letter 'Z' 2))
verticals = zip verticalPositions verticalSquares
isValid :: Either a b -> Bool
isValid (Right _ ) = True
isValid _ = False
|
Happy0/haskellscrabble
|
test/Tests/SharedTestData.hs
|
gpl-3.0
| 1,264
| 0
| 10
| 272
| 530
| 284
| 246
| 21
| 1
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.IAM.AttachUserPolicy
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Attaches the specified managed policy to the specified user.
--
-- You use this API to attach a managed policy to a user. To embed an
-- inline policy in a user, use PutUserPolicy.
--
-- For more information about policies, refer to
-- <http://docs.aws.amazon.com/IAM/latest/UserGuide/policies-managed-vs-inline.html Managed Policies and Inline Policies>
-- in the /IAM User Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/IAM/latest/APIReference/API_AttachUserPolicy.html AWS API Reference> for AttachUserPolicy.
module Network.AWS.IAM.AttachUserPolicy
(
-- * Creating a Request
attachUserPolicy
, AttachUserPolicy
-- * Request Lenses
, aupUserName
, aupPolicyARN
-- * Destructuring the Response
, attachUserPolicyResponse
, AttachUserPolicyResponse
) where
import Network.AWS.IAM.Types
import Network.AWS.IAM.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'attachUserPolicy' smart constructor.
data AttachUserPolicy = AttachUserPolicy'
{ _aupUserName :: !Text
, _aupPolicyARN :: !Text
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AttachUserPolicy' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aupUserName'
--
-- * 'aupPolicyARN'
attachUserPolicy
:: Text -- ^ 'aupUserName'
-> Text -- ^ 'aupPolicyARN'
-> AttachUserPolicy
attachUserPolicy pUserName_ pPolicyARN_ =
AttachUserPolicy'
{ _aupUserName = pUserName_
, _aupPolicyARN = pPolicyARN_
}
-- | The name (friendly name, not ARN) of the user to attach the policy to.
aupUserName :: Lens' AttachUserPolicy Text
aupUserName = lens _aupUserName (\ s a -> s{_aupUserName = a});
-- | Undocumented member.
aupPolicyARN :: Lens' AttachUserPolicy Text
aupPolicyARN = lens _aupPolicyARN (\ s a -> s{_aupPolicyARN = a});
instance AWSRequest AttachUserPolicy where
type Rs AttachUserPolicy = AttachUserPolicyResponse
request = postQuery iAM
response = receiveNull AttachUserPolicyResponse'
instance ToHeaders AttachUserPolicy where
toHeaders = const mempty
instance ToPath AttachUserPolicy where
toPath = const "/"
instance ToQuery AttachUserPolicy where
toQuery AttachUserPolicy'{..}
= mconcat
["Action" =: ("AttachUserPolicy" :: ByteString),
"Version" =: ("2010-05-08" :: ByteString),
"UserName" =: _aupUserName,
"PolicyArn" =: _aupPolicyARN]
-- | /See:/ 'attachUserPolicyResponse' smart constructor.
data AttachUserPolicyResponse =
AttachUserPolicyResponse'
deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'AttachUserPolicyResponse' with the minimum fields required to make a request.
--
attachUserPolicyResponse
:: AttachUserPolicyResponse
attachUserPolicyResponse = AttachUserPolicyResponse'
|
olorin/amazonka
|
amazonka-iam/gen/Network/AWS/IAM/AttachUserPolicy.hs
|
mpl-2.0
| 3,691
| 0
| 9
| 728
| 445
| 272
| 173
| 62
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.