code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-|
/NOTE/: This module is preliminary and may change at a future date.
This module is intended to help converting a list of tags into a
tree of tags.
-}
module Text.HTML.TagSoup.Tree
(
TagTree(..), tagTree,
flattenTree, transformTree, universeTree
) where
import Text.HTML.TagSoup.Type
import Control.Arrow
data TagTree str = TagBranch str [Attribute str] [TagTree str]
| TagLeaf (Tag str)
deriving (Eq,Ord,Show)
instance Functor TagTree where
fmap f (TagBranch x y z) = TagBranch (f x) (map (f***f) y) (map (fmap f) z)
fmap f (TagLeaf x) = TagLeaf (fmap f x)
-- | Convert a list of tags into a tree. This version is not lazy at
-- all, that is saved for version 2.
tagTree :: Eq str => [Tag str] -> [TagTree str]
tagTree = g
where
g :: Eq str => [Tag str] -> [TagTree str]
g [] = []
g xs = a ++ map TagLeaf (take 1 b) ++ g (drop 1 b)
where (a,b) = f xs
-- the second tuple is either null or starts with a close
f :: Eq str => [Tag str] -> ([TagTree str],[Tag str])
f (TagOpen name atts:rest) =
case f rest of
(inner,[]) -> (TagLeaf (TagOpen name atts):inner, [])
(inner,TagClose x:xs)
| x == name -> let (a,b) = f xs in (TagBranch name atts inner:a, b)
| otherwise -> (TagLeaf (TagOpen name atts):inner, TagClose x:xs)
_ -> error "TagSoup.Tree.tagTree: safe as - forall x . isTagClose (snd (f x))"
f (TagClose x:xs) = ([], TagClose x:xs)
f (x:xs) = (TagLeaf x:a,b)
where (a,b) = f xs
f [] = ([], [])
flattenTree :: [TagTree str] -> [Tag str]
flattenTree xs = concatMap f xs
where
f (TagBranch name atts inner) =
TagOpen name atts : flattenTree inner ++ [TagClose name]
f (TagLeaf x) = [x]
-- | This operation is based on the Uniplate @universe@ function. Given a
-- list of trees, it returns those trees, and all the children trees at
-- any level. For example:
--
-- > universeTree
-- > [TagBranch "a" [("href","url")] [TagBranch "b" [] [TagLeaf (TagText "text")]]]
-- > == [TagBranch "a" [("href","url")] [TagBranch "b" [] [TagLeaf (TagText "text")]]]
-- > ,TagBranch "b" [] [TagLeaf (TagText "text")]]
--
-- This operation is particularly useful for queries. To collect all @\"a\"@
-- tags in a tree, simply do:
--
-- > [x | x@(TagTree "a" _ _) <- universeTree tree]
universeTree :: [TagTree str] -> [TagTree str]
universeTree = concatMap f
where
f t@(TagBranch _ _ inner) = t : universeTree inner
f x = [x]
-- | This operation is based on the Uniplate @transform@ function. Given a
-- list of trees, it applies the function to every tree in a bottom-up
-- manner. This operation is useful for manipulating a tree - for example
-- to make all tag names upper case:
--
-- > upperCase = transformTree f
-- > where f (TagBranch name atts inner) = [TagBranch (map toUpper name) atts inner]
-- > f x = x
transformTree :: (TagTree str -> [TagTree str]) -> [TagTree str] -> [TagTree str]
transformTree act = concatMap f
where
f (TagBranch a b inner) = act $ TagBranch a b (transformTree act inner)
f x = act x
| silkapp/tagsoup | Text/HTML/TagSoup/Tree.hs | bsd-3-clause | 3,300 | 0 | 15 | 929 | 927 | 488 | 439 | 43 | 7 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Physics.Falling.RigidBody.OrderedRigidBody
(
OrderedRigidBody
, rigidBody
, orderRigidBody
, mapOnBody
)
where
import Physics.Falling.Math.Transform
import Physics.Falling.Shape.VolumetricShape
import Physics.Falling.Shape.TransformableShape
import Physics.Falling.RigidBody.RigidBody
import Physics.Falling.Integrator.Integrable
import qualified Physics.Falling.Identification.Identifiable as I
import Physics.Falling.Identification.IndexGenerator
import qualified Physics.Falling.Identification.SignedIndexGenerator as IG
data (Ord identifierType
, TransformSystem transformType linearVelocityType angularVelocityType
, TransformableShape dynamicCollisionVolumeType transformType transformedDynamicCollisionVolumeType
, TransformableShape staticCollisionVolumeType transformType transformedStaticCollisionVolumeType
, VolumetricShape dynamicCollisionVolumeType
inertiaTensorType
inverseInertiaTensorType
angularVelocityType
transformType
) =>
OrderedRigidBody identifierType -- FIXME: put this parameter at the end
transformType
linearVelocityType
angularVelocityType
inertiaTensorType
inverseInertiaTensorType
dynamicCollisionVolumeType
staticCollisionVolumeType
transformedDynamicCollisionVolumeType
transformedStaticCollisionVolumeType
= OrderedRigidBody {
identifier :: identifierType
, rigidBody :: RigidBody transformType
linearVelocityType
angularVelocityType
inertiaTensorType
inverseInertiaTensorType
dynamicCollisionVolumeType
staticCollisionVolumeType
transformedDynamicCollisionVolumeType
transformedStaticCollisionVolumeType
}
deriving(Show)
instance (Ord idt
, TransformSystem t lv av
, VolumetricShape dvt i ii av t
, TransformableShape dvt t dvt'
, TransformableShape svt t svt')
=> I.Identifiable (OrderedRigidBody idt t lv av i ii dvt svt dvt' svt') idt where
identifier = identifier
instance (Ord idt
, TransformSystem t lv av
, VolumetricShape dvt i ii av t
, TransformableShape dvt t dvt'
, TransformableShape svt t svt')
=> Integrable (OrderedRigidBody idt t lv av i ii dvt svt dvt' svt') where
integrateVelocity dt = mapOnBody $ integrateVelocity dt
integratePosition dt = mapOnBody $ integratePosition dt
instance (Ord idt
, TransformSystem t lv av
, VolumetricShape dvt i ii av t
, TransformableShape dvt t dvt'
, TransformableShape svt t svt')
=> IndexGenerator IG.SignedIndexGenerator (OrderedRigidBody idt t lv av i ii dvt svt dvt' svt') where
generate b = generate $ rigidBody b
recycle _ = IG.recycle
instance (Ord idt
, TransformSystem t lv av
, VolumetricShape dvt i ii av t
, TransformableShape dvt t dvt'
, TransformableShape svt t svt')
=> Eq (OrderedRigidBody idt t lv av i ii dvt svt dvt' svt') where
a == b = identifier a == identifier b
instance (Ord idt
, TransformSystem t lv av
, VolumetricShape dvt i ii av t
, TransformableShape dvt t dvt'
, TransformableShape svt t svt')
=> Ord (OrderedRigidBody idt t lv av i ii dvt svt dvt' svt') where
a <= b = identifier a <= identifier b
orderRigidBody :: (Ord idt
, TransformSystem t lv av
, VolumetricShape dvt i ii av t
, TransformableShape dvt t dvt'
, TransformableShape svt t svt') =>
idt -> RigidBody t lv av i ii dvt svt dvt' svt' -> OrderedRigidBody idt t lv av i ii dvt svt dvt' svt'
orderRigidBody i b = OrderedRigidBody {
identifier = i
, rigidBody = b
}
mapOnBody :: (Ord idt
, TransformSystem t lv av
, VolumetricShape dvt i ii av t
, TransformableShape dvt t dvt'
, TransformableShape svt t svt') =>
(RigidBody t lv av i ii dvt svt dvt' svt' -> RigidBody t lv av i ii dvt svt dvt' svt') ->
OrderedRigidBody idt t lv av i ii dvt svt dvt' svt' ->
OrderedRigidBody idt t lv av i ii dvt svt dvt' svt'
mapOnBody f ob = ob { rigidBody = f $ rigidBody ob }
| sebcrozet/falling | Physics/Falling/RigidBody/OrderedRigidBody.hs | bsd-3-clause | 5,124 | 0 | 9 | 1,894 | 1,040 | 549 | 491 | 102 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeFamilies #-}
module Modules.Output.Plot where
import Modules.Input.Setting (varRestPower, varLocalPower)
import qualified EFA.Application.Utility as AppUt
import qualified Modules.Input.Setting as ModSet
import qualified Modules.Input.System as System
import qualified EFA.Application.Optimisation.Base as Base
import qualified EFA.Application.Plot as AppPlot
import qualified EFA.Application.Optimisation.Sweep as Sweep
import qualified EFA.Application.Type as Type
import qualified EFA.Application.Optimisation.Params as Params
import qualified EFA.Application.Optimisation.Balance as Balance
import qualified EFA.Signal.Signal as Sig
import qualified EFA.Signal.Plot as Plot
import qualified EFA.Signal.Record as Record
import qualified EFA.Signal.Vector as Vector
import qualified EFA.Flow.SequenceState.Index as Idx
import qualified EFA.Flow.Sequence.Algorithm as SeqAlgo
import qualified EFA.Flow.Draw as Draw
import qualified EFA.Flow.Topology.Index as TopoIdx
import qualified EFA.Flow.State.Quantity as StateQty
import qualified EFA.Flow.State.Index as StateIdx
import qualified EFA.Graph as Graph
import qualified EFA.Graph.Topology.Node as Node
import EFA.Equation.Result (Result(Determined))
import qualified EFA.Equation.Arithmetic as Arith
import qualified EFA.Report.FormatValue as FormatValue
import EFA.Signal.Plot (label)
import EFA.Utility.Async (concurrentlyMany_)
import EFA.Utility.List (vhead, vlast)
import EFA.Utility.Filename (filename, Filename)
import EFA.Utility.Show (showEdge, showNode)
import qualified Graphics.Gnuplot.Terminal.Default as DefaultTerm
import qualified Graphics.Gnuplot.Terminal.PNG as PNG
import qualified Graphics.Gnuplot.Terminal.SVG as SVG
import qualified Graphics.Gnuplot.Terminal.PostScript as PS
import qualified Graphics.Gnuplot.Terminal as Terminal
import qualified Graphics.Gnuplot.Frame.OptionSet as Opts
import qualified Graphics.Gnuplot.Graph.ThreeDimensional as Graph3D
import qualified Graphics.Gnuplot.LineSpecification as LineSpec
import qualified Graphics.Gnuplot.ColorSpecification as ColorSpec
import qualified Graphics.Gnuplot.Value.Tuple as Tuple
import qualified Graphics.Gnuplot.Value.Atom as Atom
import Data.GraphViz.Attributes.Colors.X11 (X11Color(DarkSeaGreen2, Lavender))
import qualified Data.Map as Map; import Data.Map (Map)
import Data.GraphViz.Types.Canonical (DotGraph)
import Data.Text.Lazy (Text)
import qualified Data.Vector.Unboxed as UV
import Data.Vector (Vector)
--import Data.Tuple.HT (fst3, snd3, thd3)
import qualified Data.Maybe as Maybe
import Data.Monoid ((<>), mconcat)
import Control.Applicative (liftA2)
import System.Directory (createDirectoryIfMissing)
import System.FilePath.Posix ((</>), (<.>))
import Text.Printf (printf)
import Debug.Trace(trace)
frameOpts ::
(Atom.C a, Fractional a, Tuple.C a) =>
-- Opts.T (Graph3D.T a a a) -> Opts.T (Graph3D.T a a a)) ->
Opts.T (Graph3D.T a a a) ->
Opts.T (Graph3D.T a a a)
frameOpts =
-- Plot.heatmap .
-- Plot.xyzrange3d (1.9, 3) (0.1, 1.1) (0.16, 0.31) .
-- Plot.cbrange (0.2, 1) .
Plot.xyzlabelnode
(Just System.LocalRest)
(Just System.Rest)
Nothing
. Plot.missing "NaN"
. Plot.paletteGH
. Plot.depthorder
{-plotMaps ::
(Filename state, Show state,
Terminal.C term,
Plot.Surface
(Sig.PSignal2 Vector vec a)
(Sig.PSignal2 Vector vec a)
tcZ,
Plot.Value tcZ ~ Double) =>
(FilePath -> IO term) ->
(a -> tcZ) ->
String ->
Map state a ->
IO ()-}
plotMaps ::
(Show k, Filename k, Terminal.C term,
Plot.Surface
(Sig.PSignal2 Vector UV.Vector Double)
(Sig.PSignal2 Vector UV.Vector Double)
tcZ,
Plot.Value tcZ ~ Double) =>
(String -> IO term) -> (a -> tcZ) -> [Char] -> Map k a -> IO ()
plotMaps terminal func title =
concurrentlyMany_ . Map.elems . Map.mapWithKey f
where f state mat = do
let str = filename (title, state)
t <- terminal str
AppPlot.surfaceWithOpts
(title ++ ", " ++ show state)
t
id
id -- (Graph3D.typ "lines")
frameOpts varLocalPower varRestPower
(func mat)
plotSweeps ::
(Filename state, Show state,
Terminal.C term,
Plot.Surface
(Sig.PSignal2 Vector Vector Double)
(Sig.PSignal2 Vector Vector Double)
tcZ,Plot.Surface
(Sig.PSignal2 Vector UV.Vector Double)
(Sig.PSignal2 Vector UV.Vector Double)
tcZ,
Plot.Value tcZ ~ Double) =>
(FilePath -> IO term) ->
(a -> tcZ) ->
String ->
Map state a ->
IO ()
plotSweeps terminal func title =
concurrentlyMany_ . Map.elems . Map.mapWithKey f
where f state mat = do
let str = filename (title, state)
t <- terminal str
AppPlot.surfaceWithOpts
(title ++ ", " ++ show state)
t
id
id -- (Graph3D.typ "lines")
(Opts.key False . frameOpts) varLocalPower varRestPower
(func mat)
plotMapOfMaps ::
(Show a, Terminal.C term, Show state, Filename state, a ~ Double) =>
(FilePath -> IO term) ->
Map a (Map state (Sig.PSignal2 Vector Vector (Maybe (Result a)))) ->
IO ()
plotMapOfMaps terminal =
concurrentlyMany_
. Map.elems
. Map.mapWithKey (plotMaps terminal (Sig.map AppUt.nothing2Nan) . show)
plotGraphMaps ::
(FormatValue.FormatValue a, Show a, Filename [a], Node.C node) =>
(String -> DotGraph Text -> IO ()) ->
String ->
Map [a] (Maybe (a, a, Int, Type.EnvResult node a)) ->
IO ()
plotGraphMaps terminal title =
sequence_ . Map.elems . Map.mapWithKey
(\reqs -> maybe (return ())
(\(objVal, eta, _, graph) -> do
let str = filename title </> filename reqs
terminal str
$ Draw.bgcolour Lavender
$ Draw.title (title ++ "\\lreqs " ++ show reqs
++ "\\lObjective Value " ++ show objVal
++ "\\leta " ++ show eta
++ "\\l")
$ Draw.stateFlowGraph Draw.optionsDefault graph))
plotGraphMapOfMaps ::
(FormatValue.FormatValue a, Show a, Filename [a], Node.C node) =>
(String -> DotGraph Text -> IO ()) ->
Type.OptimalSolutionPerState node a ->
IO ()
plotGraphMapOfMaps terminal =
sequence_ . Map.elems . Map.mapWithKey (plotGraphMaps terminal . show)
optimalObjectivePerState ::
(Show a, FormatValue.FormatValue a, Filename [a], Node.C node) =>
(String -> DotGraph Text -> IO ()) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
optimalObjectivePerState terminal =
plotGraphMapOfMaps terminal . Type.optimalSolutionPerState
perEdge ::
(Vector.Walker l, Vector.Storage l a, Vector.FromList l,
Arith.Constant a,
Tuple.C a, Atom.C a,
Terminal.C term,
Node.C node, Show node, Filename node) =>
(FilePath -> IO term) ->
Params.System node a ->
Record.PowerRecord node l a ->
IO ()
perEdge terminal sysParams rec =
let recs = map f $ Graph.edges $ Params.systemTopology sysParams
f (Graph.DirEdge fr to) =
Record.extract [TopoIdx.ppos fr to, TopoIdx.ppos to fr] rec
g r = do
let ti = "Simulation Per Edge"
str = filename ti </> (filename $ Map.keys $ Record.recordSignalMap r)
t <- terminal str
AppPlot.record ti t showEdge id r
in concurrentlyMany_ $ map g recs
simulationSignalsPerEdge ::
(Show node, Node.C node, Filename node,
Arith.Constant a, Tuple.C a, Atom.C a,
Terminal.C term,
Vector.Walker simVec,
Vector.Storage simVec a,
Vector.FromList simVec) =>
(FilePath -> IO term) ->
Params.System node a ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
simulationSignalsPerEdge terminal sysParams =
perEdge terminal sysParams . Type.signals . Type.simulation
record ::
(Ord node, Node.C node, Terminal.C term,
Vector.Walker l, Vector.Storage l a,
Vector.FromList l, Arith.Constant a,
Tuple.C a, Atom.C a) =>
(FilePath -> IO term) ->
String -> Record.PowerRecord node l a -> IO ()
record terminal ti rec = do
t <- terminal $ filename ti
AppPlot.record ti t showEdge id rec
simulationSignals ::
(Show node, Ord node, Node.C node,
Terminal.C term,
Arith.Constant a,
Tuple.C a, Atom.C a,
Vector.Walker simVec,
Vector.Storage simVec a,
Vector.FromList simVec) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
simulationSignals terminal opt = do
let str = "Simulation Signals"
t <- terminal $ filename str
AppPlot.record str t showEdge id
$ Type.signals
$ Type.simulation opt
{-
givenSignals ::
(Show node, Ord node, Node.C node,
Terminal.C term,
Arith.Constant a,
Tuple.C a, Atom.C a,
Vector.Walker intVec,
Vector.Storage intVec a,
Vector.FromList intVec) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
givenSignals terminal opt = do
let str = "Given Signals"
t <- terminal $ filename str
AppPlot.record str t showEdge id
$ Type.reqsAndDofsSignals
$ Type.interpolation opt
-}
to2DMatrix :: (Ord b) => Map [b] a -> Sig.PSignal2 Vector Vector a
to2DMatrix = AppUt.to2DMatrix
m2n :: (Arith.Constant a) => Maybe a -> a
m2n Nothing = AppUt.nan
m2n (Just x) = x
defaultPlot ::
(Terminal.C term, a ~ Double) =>
IO term -> String -> Sig.PSignal2 Vector Vector a -> IO ()
defaultPlot terminal title xs = do
t <- terminal
AppPlot.surfaceWithOpts
-- title t (LineSpec.title "") (Graph3D.typ "lines") frameOpts varLocalPower varRestPower xs
title t (LineSpec.title "") id frameOpts varLocalPower varRestPower xs
{-
withFuncToMatrix ::
(Ord b, Arith.Constant b, a~b) =>
-- (Type.OptimalSolution node a -> a) ->
((b, b, Idx.State, Int, Type.EnvResult node b) -> b) ->
Type.SignalBasedOptimisation node sweep sweepVec a intVec b simVec c efaVec d ->
Sig.PSignal2 Vector Vector b
withFuncToMatrix func =
to2DMatrix
. Map.map (maybe AppUt.nan func)
. Type.optimalSolution
plotMax ::
(Terminal.C term, b ~ a, a ~ Double) =>
IO term ->
String ->
((b, b, Idx.State, Int, Type.EnvResult node b) -> b) ->
Type.SignalBasedOptimisation node sweep sweepVec a intVec b simVec c efaVec d ->
IO ()
plotMax term title func =
defaultPlot term title
. withFuncToMatrix func
-- TODO: g Nothing = Arith.zero is dangerous -- better solution ?
maxPos ::
(Ord node, Show node, Filename node, Node.C node,Arith.Constant b,a ~ b,b ~ Double,
Terminal.C term) =>
TopoIdx.Position node ->
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep sweepVec a intVec b simVec c efaVec d ->
IO ()
maxPos pos@(TopoIdx.Position f t) terminal =
plotMax (terminal $ filename ("maxPos", pos))
("Maximal Value for: " ++ showEdge pos)
(\(_, _, st, _, env) -> g $ StateQty.lookup (StateIdx.power st f t) env)
where g (Just (Determined x)) = x
g Nothing = Arith.zero -- show Power of inactive Edges
g (Just (Undetermined)) = AppUt.nan
maxEta ::
(Terminal.C term, a ~ b,b ~ Double) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep sweepVec a intVec b simVec c efaVec d ->
IO ()
maxEta term =
plotMax (term "maxEta") "Maximal Eta of All States"
AppUt.snd5
maxObj ::
(Terminal.C term, a ~ b,b ~ Double) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep sweepVec a intVec b simVec c efaVec d ->
IO ()
maxObj term =
plotMax (term "maxObj") "Maximal Objective of All States" AppUt.fst5
bestStateCurve ::
(Ord b, Arith.Constant a, Num a, a ~ b) =>
Type.SignalBasedOptimisation node sweep sweepVec a intVec b simVec c efaVec d ->
Sig.PSignal2 Vector Vector a
bestStateCurve =
withFuncToMatrix ((\(Idx.State state) -> fromIntegral state) . AppUt.thd5)
-}
stateRange2 ::
(Ord a, Terminal.C term, b ~ a) =>
([Char] -> IO term) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
stateRange2 term opt = do
t <- term "StateRanges"
plotOptimal t (\(Idx.State st) _ -> fromIntegral st) "stateRange2" opt
stateRange ::
(Terminal.C term, Ord a) =>
term ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d
-> IO ()
stateRange terminal =
AppPlot.surfaceWithOpts "StateRanges"
terminal
id
id -- (Graph3D.typ "lines")
frameOpts varLocalPower varRestPower
. Map.elems
. Map.mapWithKey (\state@(Idx.State st) -> label (show state) . to2DMatrix . fmap (m2n . fmap (f st)))
. Type.optimalSolutionPerState
where f st _ = fromIntegral st
{-
maxState ::
(Terminal.C term, a ~ b, b ~ Double) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep sweepVec a intVec b simVec c efaVec d ->
IO ()
maxState term =
defaultPlot (term "maxState") "Best State of All States"
. bestStateCurve
maxStateContour ::
(Terminal.C term, Ord a, a ~ b, a ~ Double, b ~ Double) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep sweepVec a intVec b simVec c efaVec d ->
IO ()
maxStateContour terminal opt = do
term <- terminal "maxStateContour"
AppPlot.surfaceWithOpts
"Best State of All States"
term id id (Plot.contour . frameOpts) varLocalPower varRestPower
$ bestStateCurve opt
-}
maxPerState ::
(Terminal.C term,a ~ Double) =>
(FilePath -> IO term) ->
String ->
(Type.OptimalSolutionPerState node a ->
Map Idx.State (Map [Double] Double)) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
maxPerState terminal title func =
plotMaps terminal id title
. Map.map to2DMatrix
. func
. Type.optimalSolutionPerState
maxObjPerState, maxEtaPerState,maxIndexPerState ::
(Terminal.C term, a ~ Double) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
maxObjPerState terminal =
maxPerState terminal "Maximal Objective Per State" AppUt.getMaxObj
maxEtaPerState terminal =
maxPerState terminal "Chosen Eta Per State" AppUt.getMaxEta
maxIndexPerState terminal =
maxPerState terminal "Chosen Index Per State" AppUt.getMaxIndex
expectedEtaPerState ::
(Terminal.C term, a ~ Double) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
expectedEtaPerState terminal =
plotSweeps terminal id "Expected Value Per State"
. Map.map (to2DMatrix . Map.map m2n)
. Type.averageSolutionPerState
expectedEtaDifferencePerState ::
(Terminal.C term, a ~ Double) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
expectedEtaDifferencePerState terminal opt =
plotSweeps terminal id "Difference Between Maximal Eta and Expected Value Per State"
$ Map.map to2DMatrix mat
where ev = Map.map (Map.map m2n) $
Type.averageSolutionPerState opt
eta = AppUt.getMaxEta
$ Type.optimalSolutionPerState opt
mat = Map.intersectionWith (Map.intersectionWith (Arith.~-)) eta ev
maxPosPerState ::
(Show (qty node), Ord node,
Show part, StateQty.Lookup (Idx.InPart part qty),
Terminal.C term, a ~ Double) =>
(FilePath -> IO term) ->
Idx.InPart part qty node ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
maxPosPerState terminal pos =
maxPerState
terminal
("Maximal Position " ++ show pos ++ " per state")
(AppUt.getMaxPos pos)
matrix2ListOfMatrices ::
Int -> Sig.PSignal2 Vector Vector [a] -> [Sig.PSignal2 Vector Vector a]
matrix2ListOfMatrices len = zipWith g [0 .. len-1] . repeat
where g idx m = Sig.map (!! idx) m
sweepResultTo2DMatrix ::
(Ord b, Sweep.SweepClass sweep vec a, Arith.Constant a) =>
Int ->
Map [b] (Result (sweep vec a)) -> Sig.PSignal2 Vector Vector (sweep vec a)
sweepResultTo2DMatrix len = Sig.map f . to2DMatrix
where f (Determined x) = x
f _ = Sweep.fromRational len AppUt.nan
sweepStackPerStateEta ::
(Show (vec Double),a ~ Double,
Node.C node,
Arith.Product (sweep vec a),
Sweep.SweepVector vec a,
Sweep.SweepClass sweep vec a,
Terminal.C term) =>
(FilePath -> IO term) ->
Params.Optimisation node f sweep vec a ->
Type.Sweep node sweep vec a ->
IO ()
sweepStackPerStateEta terminal params =
let len = Params.sweepLength params
in plotSweeps terminal id "Per State Sweep -- Eta"
. Map.map (matrix2ListOfMatrices len
. Sig.map Sweep.toList
. sweepResultTo2DMatrix len)
. Map.map (Map.map Type.etaSys)
sweepStackPerStateStoragePower ::
(Show (vec Double),a ~ Double,Show node,
Node.C node,
Arith.Product (sweep vec a),
Sweep.SweepVector vec a,
Sweep.SweepClass sweep vec a,
Terminal.C term) =>
(FilePath -> IO term) ->
Params.Optimisation node f sweep vec a ->
node ->
Type.Sweep node sweep vec a ->
IO ()
sweepStackPerStateStoragePower terminal params node =
let len = Params.sweepLength params
f m = Map.lookup node m
g (Just (Just x)) = x
g _ = error ("Error in sweepStackPerStateStoragePower - no StoragePower found for node: " ++ show node)
in plotSweeps terminal id "Per State Sweep -- StoragePower"
. Map.map (matrix2ListOfMatrices len
. Sig.map Sweep.toList
. sweepResultTo2DMatrix len)
. Map.map (Map.map (g . f . Type.storagePowerMap))
sweepStackPerStateOpt ::
(Show (vec Double),a ~ Double,vec ~ UV.Vector,Show node,Sweep.SweepClass sweep UV.Vector (Double, Double),
Node.C node,
Arith.Product (sweep vec a),
Sweep.SweepVector vec a,
Sweep.SweepClass sweep vec a,
Terminal.C term) =>
(FilePath -> IO term) ->
Params.Optimisation node f sweep vec a ->
Balance.Forcing node a ->
Type.Sweep node sweep vec a ->
IO ()
sweepStackPerStateOpt terminal params balanceForcing =
let len = Params.sweepLength params
in plotSweeps terminal id "Per State Sweep -- Opt"
. Map.map (matrix2ListOfMatrices len
. Sig.map Sweep.toList
. sweepResultTo2DMatrix len)
. (Base.optStackPerState params balanceForcing)
sweepStackPerStateCondition ::
(Show (vec Double),a ~ Double,Show node,sweep ~ Sweep.Sweep,vec ~ UV.Vector,
Node.C node,
Arith.Product (sweep vec a),
Sweep.SweepVector vec a,
Sweep.SweepClass sweep vec a,
Terminal.C term) =>
(FilePath -> IO term) ->
Params.Optimisation node f sweep vec a ->
Type.Sweep node sweep vec a ->
IO ()
sweepStackPerStateCondition terminal params =
let len = Params.sweepLength params
f (Determined (Sweep.Sweep vec)) = Determined $ Sweep.Sweep $ UV.imap g vec
f _ = error "Error in sweepStackPerStateCondition - undetermined Condition"
g idx True = fromIntegral idx
g _ False = 0/0
in plotSweeps terminal id "Per State Sweep -- Validity"
. Map.map (matrix2ListOfMatrices len
. Sig.map Sweep.toList
. sweepResultTo2DMatrix len)
. Map.map (Map.map (f . Type.condVec))
{-
sweepStackPerStatePowerPos ::
(Show (vec Double),a ~ Double,Show node,sweep ~ Sweep.Sweep,vec ~ UV.Vector,
Node.C node,
Arith.Product (sweep vec a),
Sweep.SweepVector vec a,
Sweep.SweepClass sweep vec a,
Terminal.C term) =>
(FilePath -> IO term) ->
Params.Optimisation node f sweep vec a ->
TopoIdx.Position node ->
Type.Sweep node sweep vec a ->
IO ()
sweepStackPerStatePowerPos terminal params pos@(TopoIdx.Position f t) =
let len = Params.sweepLength params
g (Just (Just x)) = x
g _ = error ("Error in sweepStackPerStateStoragePower - no Power found for Position: " ++ show pos)
in plotSweeps terminal id ("Per State Sweep -- PowerPosition: " ++ show pos)
. Map.map (matrix2ListOfMatrices len
. Sig.map Sweep.toList
. sweepResultTo2DMatrix len)
. Map.mapWithKey (\st x -> (Map.map (g . StateQty.lookup (StateIdx.power st f t) . Type.envResult) x))
-}
{-
g $ StateQty.lookup (StateIdx.power st f t) env)
where g (Just (Determined x)) = x
g _ = AppUt.nan
-}
plotOptimal ::
(Terminal.C term, Ord b, a~b) =>
term ->
(Idx.State -> (b, b, Int,Type.EnvResult node b) -> Double) ->
String -> Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d -> IO ()
plotOptimal terminal f title =
AppPlot.surfaceWithOpts title
terminal
id
id -- (Graph3D.typ "lines")
frameOpts varLocalPower varRestPower
. Map.elems
. Map.mapWithKey (\state -> label (show state) . to2DMatrix . fmap (m2n . fmap (f state)))
. Type.optimalSolutionPerState
optimalObjs, optimalEtas ::
(Terminal.C term, a ~ Double,b~Double) =>
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
optimalObjs terminal opt = do
t <- terminal "optimalObjs"
plotOptimal t (const AppUt.fst4) "Maximal Objective Function Surfaces" opt
optimalEtas terminal opt = do
t <- terminal "optimalEtas"
plotOptimal t (const AppUt.snd4) "Maximal Eta Surfaces" opt
optimalPos ::
(Node.C node, Filename node, Terminal.C term, a ~ Double, b ~ Double) =>
TopoIdx.Position node ->
(FilePath -> IO term) ->
Type.SignalBasedOptimisation node sweep vec a intVec b simVec c efaVec d ->
IO ()
optimalPos pos@(TopoIdx.Position f t) terminal opt = do
term <- terminal $ filename ("optimalPos", pos)
let str = "Optimal " ++ showEdge pos
plotOptimal term (\st -> (g . StateQty.lookup (StateIdx.power st f t) . AppUt.frth4)) str opt
where g (Just (Determined x)) = x
g _ = AppUt.nan
findTile :: (Ord t, Show t) => [t] -> [t] -> t -> t -> [(t, t)]
findTile xs ys x y =
let (xa, xb) = findInterval x xs
(ya, yb) = findInterval y ys
--findInterval :: (Ord a) => a -> [a] -> (a, a)
findInterval z zs = trace (show as ++ show bs) $ (vlast "findTile" as, vhead "findTile" bs)
where (as, bs) = span (<=z) zs
sort [(x0, y0), (x1, y1), (x2, y2), (x3, y3)] =
[(x0, y0), (x2, y2), (x3, y3), (x1, y1), (x0, y0)]
sort _ = error "findTile: sort failed"
in sort $ liftA2 (,) [xa, xb] [ya, yb]
reqsRec ::
(Show (v Double), Vector.Walker v, Vector.Storage v Double,
Vector.FromList v, Terminal.C term) =>
(FilePath -> IO term) ->
Record.PowerRecord node v Double ->
IO ()
reqsRec terminal (Record.Record _ pMap) = mapM_ f $ zip (init xs) (tail xs)
where xs = Map.elems pMap
f (x,y) = requirements terminal x y
requirements ::
(Terminal.C term, Show (v Double),
Vector.Walker v,
Vector.Storage v Double,
Vector.FromList v) =>
(FilePath -> IO term) ->
Sig.PSignal v Double ->
Sig.PSignal v Double ->
IO ()
requirements terminal plocal prest = do
let rs = Sig.toList prest
ls = Sig.toList plocal
ts :: [([Double], [Double])]
ts = map unzip $ zipWith (findTile ModSet.local ModSet.rest ) rs ls
sigStyle _ =
LineSpec.pointSize 1 $
LineSpec.pointType 7 $
LineSpec.lineWidth 2 $
LineSpec.lineColor ColorSpec.red $
LineSpec.deflt
tileStyle _ =
LineSpec.pointSize 5 $
LineSpec.pointType 5 $
LineSpec.lineWidth 5 $
LineSpec.lineColor ColorSpec.springGreen $
LineSpec.deflt
f (xs, ys) =
let
xsSig, ysSig :: Sig.PSignal Vector Double
xsSig = Sig.fromList xs
ysSig = Sig.fromList ys
in Plot.xy tileStyle xsSig [AppPlot.label "" ysSig]
t <- terminal "requirements"
Plot.run t
( Opts.yLabel (showNode System.Rest) $
Opts.xLabel (showNode System.LocalRest) $
Plot.xyFrameAttr "Requirements" prest plocal)
( (mconcat $ map f ts)
<> Plot.xy sigStyle [prest] [AppPlot.label "" plocal])
simulationGraphs ::
(FormatValue.FormatValue b, UV.Unbox b,
Vector.Storage efaVec d,
Vector.FromList efaVec,
Vector.Walker efaVec,
Arith.ZeroTestable d,
Arith.Constant d,
FormatValue.FormatValue d,
Node.C node) =>
(String -> DotGraph Text -> IO ()) ->
Type.SignalBasedOptimisation node sweep sweepVec a intVec b simVec c efaVec d ->
IO ()
simulationGraphs terminal (Type.SignalBasedOptimisation _ _ _ _ efa _) = do
let g = id
terminal "simulationGraphsSequence"
$ Draw.bgcolour DarkSeaGreen2
$ Draw.title "Sequence Flow Graph from Simulation"
$ Draw.seqFlowGraph Draw.optionsDefault (Type.sequenceFlowGraph efa)
terminal "simulationGraphsSequenceAccumulated"
$ Draw.bgcolour DarkSeaGreen2
$ Draw.title "Accumulated Sequence Flow Graph from Simulation"
$ Draw.seqFlowGraph Draw.optionsDefault
$ SeqAlgo.accumulate (Type.sequenceFlowGraph efa)
terminal "simulationGraphsState"
$ Draw.bgcolour Lavender
$ Draw.title "State Flow Graph from Simulation"
$ Draw.stateFlowGraph Draw.optionsDefault
$ StateQty.mapGraph g g (Type.stateFlowGraph efa)
dot ::
(FilePath -> DotGraph Text -> IO ()) ->
String -> String -> Int -> FilePath -> DotGraph Text -> IO ()
dot terminal suffix time n dir g = do
let thisdir = "tmp" </> filename time </> dir
fname = thisdir </> printf "%6.6d" n <.> suffix
createDirectoryIfMissing True thisdir
terminal fname g
dotXTerm :: b -> DotGraph Text -> IO ()
dotXTerm = const Draw.xterm
dotPNG :: String -> Int -> FilePath -> DotGraph Text -> IO ()
dotPNG = dot Draw.png "png"
dotSVG :: String -> Int -> FilePath -> DotGraph Text -> IO ()
dotSVG = dot Draw.svg "svg"
dotPS :: String -> Int -> FilePath -> DotGraph Text -> IO ()
dotPS = dot Draw.eps "eps"
gp :: (FilePath -> term) -> String -> String -> Int -> FilePath -> IO term
gp terminal suffix time n dir = do
let thisdir = "tmp" </> filename time </> dir
fname = thisdir </> printf "%6.6d" n <.> suffix
createDirectoryIfMissing True thisdir
return $ terminal fname
gpXTerm :: b -> IO (DefaultTerm.T)
gpXTerm = const $ return DefaultTerm.cons
gpPNG :: String -> Int -> FilePath -> IO PNG.T
gpPNG = gp PNG.cons "png"
gpSVG :: String -> Int -> FilePath -> IO SVG.T
gpSVG = gp SVG.cons "svg"
gpPS :: String -> Int -> FilePath -> IO PS.T
gpPS = gp PS.cons "ps"
{-
class PNG a where
png :: UTCTime -> Int -> FilePath -> a
type T a = DotGraph Text -> IO ()
instance PNG (DotGraph Text -> IO ()) where
png = dotPNG
instance PNG (IO PNG.T) where
png = gp PNG.cons
-}
-- TODO: linearer sweepIndex der richtige Weg ?
drawSweepStateFlowGraph ::
(Node.C node, FormatValue.FormatValue b,
Sweep.SweepVector vec b, Sweep.SweepClass sweep vec b) =>
String ->
Int ->
StateQty.Graph node (Result (sweep vec b)) (Result (sweep vec b)) ->
IO ()
drawSweepStateFlowGraph title sweepIndex sfgSweep =
Draw.xterm $ Draw.title title $ Draw.stateFlowGraph Draw.optionsDefault
$ StateQty.mapGraph g g sfgSweep
where g = fmap (vhead "simulationGraphs" . drop sweepIndex . Sweep.toList)
drawSweepStackStateFlowGraph ::
(Ord [a], Node.C node,Show a,FormatValue.FormatValue b,
Sweep.SweepVector vec b,
Sweep.SweepClass sweep vec b) =>
Idx.State ->
[a] ->
Int ->
Map Idx.State (Map [a] (Type.SweepPerReq node sweep vec b)) ->
IO ()
drawSweepStackStateFlowGraph state reqsPos sweepIndex sweep =
drawSweepStateFlowGraph ("StateFlowGraph from SweepStack - State : " ++ show state ++
" - Requirement Position :" ++ show reqsPos ++ "- Sweep Index: " ++ show sweepIndex) sweepIndex sfgSweep
where sfgSweep = Type.envResult $
Maybe.maybe (error $ "drawSweepStackStateFlowGraph - Position not found: "++ show reqsPos)
id $ Map.lookup reqsPos $
maybe (error $ "drawSweepStackStateFlowGraph - State not found: " ++ show state)
id $ Map.lookup state sweep
| energyflowanalysis/efa-2.1 | examples/advanced/energy/src/Modules/Output/Plot.hs | bsd-3-clause | 28,043 | 0 | 24 | 6,242 | 8,040 | 4,118 | 3,922 | 587 | 3 |
-- | Killer move heuristics
--
-- https://chessprogramming.wikispaces.com/Killer+Heuristic. This is called
-- from the search iteration loop with updates, where a fail high node would
-- cause the update. The store is queried before the iteration and moves are
-- rearranged accordingly.
module Chess.Killer
( Killer
-- * Constructor
, mkKiller
-- * Data manipulation
, insert
, clearLevel
-- * Query
, heuristics
) where
------------------------------------------------------------------------------
import Data.Function (on)
import Data.List (findIndex, insertBy)
import Data.Vector ((!), (//))
import qualified Data.Vector as V
import Chess.Move
import Data.List.Extras
------------------------------------------------------------------------------
-- The maximum depth we can handle
maxKillerSize :: Int
maxKillerSize = 30
------------------------------------------------------------------------------
-- The maximum entries per level
maxLevelEntries :: Int
maxLevelEntries = 2
------------------------------------------------------------------------------
-- | at a given depth entries are sorted by hit count in reverse order
newtype Killer = Killer (V.Vector [ (Int, Move) ])
------------------------------------------------------------------------------
-- | Creates a killer store
mkKiller :: Killer
mkKiller = Killer $ V.replicate maxKillerSize []
------------------------------------------------------------------------------
-- | inserts a Move to the store
insert
:: Int -- ^ distance from start depth ( or in other words real depth )
-> Move -- ^ move to insert
-> Killer
-> Killer
insert d m k@(Killer v)
| d >= maxKillerSize = k
| otherwise =
let e = v ! d
mix = findIndex ((== m) . snd) e
nv = case mix of
-- e contains m therefore suf /= []
Just ix -> let (pref, suf) = splitAt ix e
(oCnt, _) = head suf
in pref
++ insertBy (compare `on` fst) (oCnt + 1, m) (tail suf)
Nothing -> (1, m) : if length e >= maxLevelEntries
then tail e
else e
in Killer $ v // [ (d, nv) ]
------------------------------------------------------------------------------
-- | Clears the specified depth of the store
clearLevel :: Int -> Killer -> Killer
clearLevel d k@(Killer v)
| d >= maxKillerSize = k
| otherwise = Killer $ v // [ (d, []) ]
------------------------------------------------------------------------------
-- | the new move list with the heuristics applied
heuristics
:: Int -- ^ distance from start depth ( or in other words real depth )
-> [ PseudoLegalMove ] -- ^ previous move list
-> Killer
-> [ PseudoLegalMove ]
heuristics d ms (Killer v) =
let m = map (mkPseudo . snd) $ v ! d
-- the reverse order of the entries nicely matches toFront
ms' = foldl (flip toFront) ms m
in if d >= maxKillerSize then ms else ms'
| phaul/chess | Chess/Killer.hs | bsd-3-clause | 3,118 | 0 | 18 | 799 | 586 | 334 | 252 | -1 | -1 |
module Main where
import Prelude hiding (putStr, getContents)
import Data.Aeson (Value)
import Data.Aeson.Encode.Pretty (encodePretty)
import Data.ByteString (getContents)
import Data.ByteString.Lazy (putStr)
import System.Environment (getArgs)
import System.Exit
import Data.Yaml (decodeFileEither, decodeEither')
helpMessage :: IO ()
helpMessage = putStrLn "yaml2json FILE\n use - as FILE to indicate stdin" >> exitFailure
showJSON :: Show a => Either a Value -> IO b
showJSON ejson =
case ejson of
Left err -> print err >> exitFailure
Right res -> putStr (encodePretty (res :: Value)) >> exitSuccess
main :: IO ()
main = do
args <- getArgs
case args of
-- strict getContents will read in all of stdin at once
(["-"]) -> getContents >>= showJSON . decodeEither'
([f]) -> decodeFileEither f >>= showJSON
_ -> helpMessage
| Greif-IT/hs-yesod-code-fragment-generator | hs-yaml2json/src/Main.hs | bsd-3-clause | 885 | 0 | 12 | 184 | 264 | 143 | 121 | 23 | 3 |
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE RecursiveDo #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ImpredicativeTypes #-}
module Main where
import Control.Monad.Error
import Control.Monad.State
import Control.Monad.Writer
import Language.JavaScript.Parser
import System.Environment
import System.Process
import System.Exit
import System.IO
import System.FilePath
import System.Directory
import System.Info
import Network.Mime
import Text.Printf
import Data.Either
import Data.Generics
import Data.Char
import Data.List
import Data.Data
import Data.Typeable
import Data.Maybe
import Data.String
import qualified Data.Text as T
import qualified Data.ByteString.Char8 as BS
import qualified Development.Cake3 as C3
import Development.Cake3(runMake,makevar,cmd,rule,extvar,File(..),phony,depend)
import qualified Development.Cake3.Rules.UrWeb as C3
import Development.Cake3.Rules.UrWeb (Config(..), urdeps,defaultConfig)
import Options.Applicative
import Paths_urembed
io :: (MonadIO m) => IO a -> m a
io = liftIO
hio :: (MonadIO m) => Handle -> String -> m ()
hio h = io . hPutStrLn h
err,out :: (MonadIO m) => String -> m ()
err = hio stderr
out = hio stdout
span2 :: String -> String -> Maybe (String,String)
span2 inf s = span' [] s where
span' _ [] = Nothing
span' acc (c:cs)
| isPrefixOf inf (c:cs) = Just (acc, drop (length inf) (c:cs))
| otherwise = span' (acc++[c]) cs
data JSFunc = JSFunc {
urdecl :: String
, urname :: String
, jsname :: String
} deriving(Show)
data JSType = JSType {
urtdecl :: String
} deriving(Show)
-- | Parse the JavaScript file, extract top-level functions, convert their
-- signatures into Ur/Web format, return them as the list of strings
parse_js :: FilePath -> IO (Either String ([JSType],[JSFunc]))
parse_js file = do
s <- readFile file
runErrorT $ do
c <- either fail return (parse s file)
f <- concat <$> (forM (findTopLevelFunctions c) $ \f@(fn:_) -> (do
ts <- mapM extractEmbeddedType (f`zip`(False:repeat True))
let urdecl_ = urs_line ts
let urname_ = (fst (head ts))
let jsname_ = fn
return [JSFunc urdecl_ urname_ jsname_]
) `catchError` (\(e::String) -> do
err $ printf "ignoring function %s, reason:\n\t%s" fn e
return []))
t <- concat <$> (forM (findTopLevelVars c) $ \vn -> (do
(n,t) <- extractEmbeddedType (vn,False)
return [JSType $ printf "type %s" t]
)`catchError` (\(e::String) -> do
err $ printf "ignoring variable %s, reason:\n\t%s" vn e
return []))
return (t,f)
where
urs_line :: [(String,String)] -> String
urs_line [] = error "wrong function signature"
urs_line ((n,nt):args) = printf "val %s : %s" n (fmtargs args) where
fmtargs :: [(String,String)] -> String
fmtargs ((an,at):as) = printf "%s -> %s" at (fmtargs as)
fmtargs [] = let pf = stripPrefix "pure_" nt in
case pf of
Just p -> p
Nothing -> printf "transaction %s" nt
extractEmbeddedType :: (Monad m) => (String,Bool) -> m (String,String)
extractEmbeddedType ([],_) = error "BUG: empty identifier"
extractEmbeddedType (name,fallback) = check (msum [span2 "__" name , span2 "_as_" name]) where
check (Just (n,t)) = return (n,t)
check _ | fallback == True = return (name,name)
| fallback == False = fail $ printf "Can't extract the type from the identifier '%s'" name
findTopLevelFunctions :: JSNode -> [[String]]
findTopLevelFunctions top = map decls $ listify is_func top where
is_func n@(JSFunction a b c d e f) = True
is_func _ = False
decls (JSFunction a b c d e f) = (identifiers b) ++ (identifiers d)
findTopLevelVars :: JSNode -> [String]
findTopLevelVars top = map decls $ listify is_var top where
is_var n@(JSVarDecl a []) = True
is_var _ = False
decls (JSVarDecl a _) = (head $ identifiers a);
identifiers x = map name $ listify ids x where
ids i@(JSIdentifier s) = True
ids _ = False
name (JSIdentifier n) = n
data Args = A
{ tgtdir :: FilePath
, version :: Bool
, files :: [FilePath]
}
pargs :: Parser Args
pargs = A
<$> strOption
( long "output"
<> short 'o'
<> metavar "FILE.urp"
<> help "Name of the Ur/Web project being generated"
<> value "")
<*> flag False True ( long "version" <> help "Show version information" )
<*> arguments str ( metavar "FILE" <> help "File to embed" )
where
osdefgcc | isInfixOf "linux" os = "/usr/bin/gcc"
| isInfixOf "windows" os = "c:\\cygwin\\usr\\bin\\gcc"
| otherwise = "/usr/local/bin/gcc"
replaceExtensions f x = addExtension (dropExtensions f) x
f .= x = replaceExtensions f x
guessMime inf = fixup $ BS.unpack (defaultMimeLookup (fromString inf)) where
fixup "application/javascript" = "text/javascript"
fixup m = m
-- readBinaryFile name = BS.openBinaryFile name ReadMode >>= BS.hGetContents
main :: IO ()
main = do
h <- (getDataFileName >=> readFile) "Help.txt"
main_ =<< execParser (
info (helper <*> pargs)
( fullDesc
<> progDesc h
<> header "UrEmebed is the Ur/Web module generator" ))
main_ (A tgturp True ins) = do
hPutStrLn stderr "urembed version 0.5.0.0"
main_ (A tgturp False ins) = do
let tgtdir = takeDirectory tgturp
when (null tgtdir) $ do
fail "An output directory should be specified, use -o"
when (null ins) $ do
fail "At least one file should be specified, see --help"
exists <- doesDirectoryExist tgtdir
when (not exists) $ do
fail "Output is not a directory"
let indest n = tgtdir </> n
let write n wr = writeFile (indest n) $ execWriter $ wr
forM_ ins $ \inf -> do
hPutStrLn stderr (printf "Processing %s" inf)
let modname = (mkname inf)
let modname_c = modname ++ "_c"
let blobname = modname ++ "_c_blob"
let modname_js = modname ++ "_js"
let mime = guessMime inf
-- Module_c.urp
let binfunc = printf "uw_%s_binary" modname_c
let textfunc = printf "uw_%s_text" modname_c
write (replaceExtension modname_c ".urs") $ do
line $ "val binary : unit -> transaction blob"
line $ "val text : unit -> transaction string"
content <- liftIO $ BS.readFile inf
let csrc = replaceExtension modname_c ".c"
write csrc $ do
line $ "// Thanks, http://stupefydeveloper.blogspot.ru/2008/08/cc-embed-binary-data-into-elf.html"
line $ "#include <urweb.h>"
line $ "#include <stdio.h>"
-- let start = printf "_binary___%s_start" blobname
-- let size = printf "_binary___%s_size" blobname
line $ printf "#define BLOBSZ %d" (BS.length content)
line $ "static char blob[BLOBSZ];"
line $ "uw_Basis_blob " ++ binfunc ++ " (uw_context ctx, uw_unit unit)"
line $ "{"
line $ " uw_Basis_blob uwblob;"
line $ " uwblob.data = &blob[0];"
line $ " uwblob.size = BLOBSZ;"
line $ " return uwblob;"
line $ "}"
line $ ""
line $ "uw_Basis_string " ++ textfunc ++ " (uw_context ctx, uw_unit unit) {"
line $ " char* data = &blob[0];"
line $ " size_t size = sizeof(blob);"
line $ " char * c = uw_malloc(ctx, size+1);"
line $ " char * write = c;"
line $ " int i;"
line $ " for (i = 0; i < size; i++) {"
line $ " *write = data[i];"
line $ " if (*write == '\\0')"
line $ " *write = '\\n';"
line $ " *write++;"
line $ " }"
line $ " *write=0;"
line $ " return c;"
line $ " }"
line $ ""
let append n wr = BS.appendFile (indest n) $ execWriter $ wr
append csrc $ do
let line s = tell ((BS.pack s)`mappend`(BS.pack "\n"))
line $ ""
line $ "static char blob[BLOBSZ] = {"
let buf = reverse $ BS.foldl (\a c -> (BS.pack (printf "0x%02X ," c)) : a) [] content
tell (BS.concat buf)
line $ "};"
line $ ""
let header = (replaceExtension modname_c ".h")
write header $ do
line $ "#include <urweb.h>"
line $ "uw_Basis_blob " ++ binfunc ++ " (uw_context ctx, uw_unit unit);"
line $ "uw_Basis_string " ++ textfunc ++ " (uw_context ctx, uw_unit unit);"
let binobj = replaceExtension modname_c ".o"
-- let dataobj = replaceExtension modname_c ".data.o"
write (replaceExtension modname_c ".urp") $ do
line $ "ffi " ++ modname_c
line $ "include " ++ header
line $ "link " ++ binobj
-- line $ "link " ++ dataobj
-- Copy the file to the target dir and run linker from there. Thus the names
-- it places will be correct (see start,size in _c)
-- copyFile inf (indest blobname)
-- Module_js.urp
(jstypes,jsdecls) <- if ((takeExtension inf) == ".js") then do
e <- parse_js inf
case e of
Left e -> do
err $ printf "Error while parsing %s" (takeFileName inf)
fail e
Right decls -> do
-- err (show decls)
return decls
else
return ([],[])
write (replaceExtension modname_js ".urs") $ do
forM_ jstypes $ \decl -> line (urtdecl decl)
forM_ jsdecls $ \decl -> line (urdecl decl)
write (replaceExtension modname_js ".urp") $ do
line $ "ffi " ++ modname_js
forM_ jsdecls $ \decl -> do
line $ printf "jsFunc %s.%s = %s" modname_js (urname decl) (jsname decl)
line $ printf "benignEffectful %s.%s" modname_js (urname decl)
-- Module.urp
write (replaceExtension modname ".urs") $ do
line $ "val binary : unit -> transaction blob"
line $ "val text : unit -> transaction string"
line $ "val blobpage : unit -> transaction page"
line $ "val geturl : url"
forM_ jstypes $ \decl -> line (urtdecl decl)
forM_ jsdecls $ \d -> line (urdecl d)
write (replaceExtension modname ".ur") $ do
line $ "val binary = " ++ modname_c ++ ".binary"
line $ "val text = " ++ modname_c ++ ".text"
forM_ jsdecls $ \d ->
line $ printf "val %s = %s.%s" (urname d) modname_js (urname d)
line $ printf "fun blobpage {} = b <- binary () ; returnBlob b (blessMime \"%s\")" mime
line $ "val geturl = url(blobpage {})"
write (replaceExtension modname ".urp") $ do
line $ "library " ++ modname_c
line $ "library " ++ modname_js
line $ printf "safeGet %s/blobpage" modname
line $ printf "safeGet %s/blob" modname
line $ ""
line $ modname
-- Static.urp
let tgt_in = replaceExtensions tgturp ".urp.in"
writeFile tgt_in $ execWriter $ do
forM_ ins $ \inf -> do
line $ printf "library %s" (mkname inf)
line []
line (takeBaseName tgturp)
let datatype = execWriter $ do
tell "datatype content = "
tell (mkname (head ins))
forM_ (tail ins) (\f -> tell $ printf " | %s" (mkname f))
writeFile (replaceExtensions tgt_in "urs") $ execWriter $ do
line datatype
line "val binary : content -> transaction blob"
line "val text : content -> transaction string"
line "val blobpage : content -> transaction page"
line "val urls : list url"
writeFile (replaceExtensions tgt_in "ur") $ execWriter $ do
line datatype
line $ "fun binary c = case c of"
line $ printf " %s => %s.binary ()" (mkname (head ins)) (mkname (head ins))
forM_ (tail ins) (\f -> line $
printf " | %s => %s.binary ()" (mkname f) (mkname f))
line $ "fun blobpage c = case c of"
line $ printf " %s => %s.blobpage ()" (mkname (head ins)) (mkname (head ins))
forM_ (tail ins) (\f -> line $
printf " | %s => %s.blobpage ()" (mkname f) (mkname f))
line $ "fun text c = case c of"
line $ printf " %s => %s.text ()" (mkname (head ins)) (mkname (head ins))
forM_ (tail ins) (\f -> line $
printf " | %s => %s.text ()" (mkname f) (mkname f))
line $ "val urls ="
forM_ ins (\f -> line $
printf " %s.geturl :: " (mkname f))
line $ " []"
-- Build the Makefile
setCurrentDirectory tgtdir
writeFile ((takeBaseName tgturp) .= ".mk") =<< (mdo
let file x = C3.file' tgtdir tgtdir x
let cc = extvar "CC"
let ld = extvar "LD"
let incl = extvar "UR_INCLUDE_DIR"
let tgt_in = file (takeBaseName tgturp .= ".urp.in")
let tgt = file (takeBaseName tgturp .= ".urp")
urp_in <- C3.ruleM tgt_in $ do
flip urdeps tgt_in (
defaultConfig {
urObjRule = \f -> rule f $ do
case isInfixOf "data" (C3.takeExtensions f) of
True -> do
let src = C3.fromFilePath . (++"_blob") . dropExtensions . C3.toFilePath $ f
C3.shell [cmd| $(ld) -r -b binary -o $f $(src :: File) |]
False -> do
let src = C3.fromFilePath . flip replaceExtensions "c" . C3.toFilePath $ f
C3.shell [cmd| $(cc) -c -I $incl -o $f $(src :: File) |]
})
urp <- C3.ruleM tgt $ do
C3.shell [cmd|cp $(urp_in) $(urp) |]
C3.shell [cmd|echo $urp|]
runMake $ do
C3.place (phony "urp" (depend urp))
)
hPutStrLn stderr "Done"
where
line s = tell (s++"\n")
process = process' Nothing
process' wd args = do
(_,hout,herr,ph) <- runInteractiveProcess (head args) (tail args) wd Nothing
code <- waitForProcess ph
when (code /= ExitSuccess) $ do
hGetContents hout >>= hPutStrLn stderr
hGetContents herr >>= hPutStrLn stderr
fail $ printf "process %s failed to complete with %s" (show args) (show code)
return ()
mkname f = upper1 . notnum . map under . takeFileName $ f where
under c | c`elem`"_-. /" = '_'
| otherwise = c
upper1 [] = []
upper1 (x:xs) = (toUpper x) : xs
notnum n@(x:xs) | isDigit x = "f" ++ n
| otherwise = n
| grwlf/urembed | src/Urembed.hs | bsd-3-clause | 14,171 | 0 | 35 | 4,080 | 4,553 | 2,217 | 2,336 | 325 | 9 |
module LawsTests where
import Common
type TB a = BatchT (Req Integer) (Writer [Integer]) a
type V = Integer
type F = Fun' Integer Integer
type MF = Fun' Integer (TB Integer)
-- TODO: dunno if these Arbitrary instances are any good.
instance Arbitrary (TB Integer) where
arbitrary = oneof
[ pure <$> arbitrary
, liftA2 (>>) (lift . tell . pure <$> arbitrary) arbitrary
, liftA2 (\f r -> f <$> request r) arbitrary arbitrary
, liftA2 (<*>) (fmap apply' <$> (arbitrary :: Gen (TB (Fun' Integer Integer)))) arbitrary
]
instance Arbitrary (TB F) where
arbitrary = oneof
[ pure <$> arbitrary
, liftA2 (>>) (arbitrary :: Gen (TB Integer)) arbitrary
]
instance Show a => Show (TB a) where
show = show . run
run = runWriter . runBatchT handleReq id
handleReq :: [Req a] -> Writer [Integer] [a]
handleReq = pure . map unReq
-- A property meaning 'a and b have the same effect and return value'
a =~= b = run a === run b
infix 0 =~=
-- Functor laws
prop_functorId (b :: TB V) =
fmap id b =~= b
prop_functorComp :: TB V -> F -> F -> Property
prop_functorComp b (apply' -> f) (apply' -> g) =
fmap (f . g) b =~= fmap f (fmap g b)
-- Applicative laws
prop_applicativeId (b :: TB V) =
pure id <*> b =~= b
prop_applicativeComp :: TB F -> TB F -> TB V -> Property
prop_applicativeComp (fmap apply' -> u) (fmap apply' -> v) w =
pure (.) <*> u <*> v <*> w =~= u <*> (v <*> w)
prop_applicativeHomomorphism :: F -> V -> Property
prop_applicativeHomomorphism (apply' -> f) x =
pure f <*> pure x =~= pure (f x)
-- Monad laws
prop_monadRightId (b :: TB V) =
b >>= return =~= b
prop_monadLeftId (v :: V) (apply' -> k) =
return v >>= k =~= k v
prop_monadAssoc :: TB V -> MF -> MF -> Property
prop_monadAssoc b (apply' -> f) (apply' -> g) =
(b >>= f) >>= g =~= b >>= (\x -> f x >>= g)
return []
runTests = $quickCheckAll
| zyla/monad-batch | test/LawsTests.hs | bsd-3-clause | 1,919 | 0 | 16 | 484 | 799 | 413 | 386 | -1 | -1 |
{-# LANGUAGE MultiParamTypeClasses, TypeFamilies, UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Feature.ConcurrentSpec where
import Control.Monad (void)
import Control.Monad.Base
import Control.Monad.Trans.Control
import Control.Concurrent.Async (mapConcurrently)
import Test.Hspec hiding (pendingWith)
import Test.Hspec.Wai.Internal
import Test.Hspec.Wai
import Test.Hspec.Wai.JSON
import Network.Wai.Test (Session)
import Network.Wai (Application)
import Protolude hiding (get)
spec :: SpecWith Application
spec =
describe "Queryiny in parallel" $
it "should not raise 'transaction in progress' error" $
raceTest 10 $
get "/fakefake"
`shouldRespondWith` [json|
{ "hint": null,
"details":null,
"code":"42P01",
"message":"relation \"test.fakefake\" does not exist"
} |]
{ matchStatus = 404
, matchHeaders = []
}
raceTest :: Int -> WaiExpectation -> WaiExpectation
raceTest times = liftBaseDiscard go
where
go test = void $ mapConcurrently (const test) [1..times]
instance MonadBaseControl IO WaiSession where
type StM WaiSession a = StM Session a
liftBaseWith f = WaiSession $
liftBaseWith $ \runInBase ->
f $ \k -> runInBase (unWaiSession k)
restoreM = WaiSession . restoreM
{-# INLINE liftBaseWith #-}
{-# INLINE restoreM #-}
instance MonadBase IO WaiSession where
liftBase = liftIO
| Skyfold/postgrest | test/Feature/ConcurrentSpec.hs | mit | 1,480 | 0 | 12 | 334 | 311 | 178 | 133 | -1 | -1 |
{-# LANGUAGE TypeApplications, DeriveGeneric #-}
{-# LANGUAGE DataKinds, ExistentialQuantification #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE StandaloneDeriving #-}
module Database.Persist.TH.SharedPrimaryKeyImportedSpec where
import TemplateTestImports
import Data.Proxy
import Test.Hspec
import Database.Persist
import Database.Persist.Sql
import Database.Persist.Sql.Util
import Database.Persist.TH
import Language.Haskell.TH
import Control.Monad.IO.Class
import Database.Persist.TH.SharedPrimaryKeySpec (User, UserId)
mkPersistWith sqlSettings $(discoverEntities) [persistLowerCase|
ProfileX
Id UserId
email String
|]
-- This test is very similar to the one in SharedPrimaryKeyTest, but it is
-- able to use 'UserId' directly, since the type is imported from another
-- module.
spec :: Spec
spec = describe "Shared Primary Keys Imported" $ do
describe "PersistFieldSql" $ do
it "should match underlying key" $ do
sqlType (Proxy @UserId)
`shouldBe`
sqlType (Proxy @ProfileXId)
describe "getEntityId FieldDef" $ do
it "should match underlying primary key" $ do
let
getSqlType :: PersistEntity a => Proxy a -> SqlType
getSqlType p =
case getEntityId (entityDef p) of
EntityIdField fd ->
fieldSqlType fd
_ ->
SqlOther "Composite Key"
getSqlType (Proxy @User)
`shouldBe`
getSqlType (Proxy @ProfileX)
describe "foreign reference should work" $ do
it "should have a foreign reference" $ do
pendingWith "issue #1289"
let
Just fd =
getEntityIdField (entityDef (Proxy @ProfileX))
fieldReference fd
`shouldBe`
ForeignRef (EntityNameHS "User")
| yesodweb/persistent | persistent/test/Database/Persist/TH/SharedPrimaryKeyImportedSpec.hs | mit | 2,264 | 0 | 22 | 659 | 367 | 191 | 176 | 53 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
#if __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Unsafe #-}
#endif
{-# OPTIONS_HADDOCK not-home #-}
-- |
-- Module : Data.ByteString.Lazy.Internal
-- Copyright : (c) Don Stewart 2006-2008
-- (c) Duncan Coutts 2006-2011
-- License : BSD-style
-- Maintainer : dons00@gmail.com, duncan@community.haskell.org
-- Stability : unstable
-- Portability : non-portable
--
-- A module containing semi-public 'ByteString' internals. This exposes
-- the 'ByteString' representation and low level construction functions.
-- Modules which extend the 'ByteString' system will need to use this module
-- while ideally most users will be able to make do with the public interface
-- modules.
--
module Data.ByteString.Lazy.Internal (
-- * The lazy @ByteString@ type and representation
ByteString(..), -- instances: Eq, Ord, Show, Read, Data, Typeable
chunk,
foldrChunks,
foldlChunks,
-- * Data type invariant and abstraction function
invariant,
checkInvariant,
-- * Chunk allocation sizes
defaultChunkSize,
smallChunkSize,
chunkOverhead,
-- * Conversion with lists: packing and unpacking
packBytes, packChars,
unpackBytes, unpackChars,
) where
import Prelude hiding (concat)
import qualified Data.ByteString.Internal as S
import qualified Data.ByteString as S (length, take, drop)
import Data.Word (Word8)
import Foreign.Storable (Storable(sizeOf))
#if !(MIN_VERSION_base(4,11,0)) && MIN_VERSION_base(4,9,0)
import Data.Semigroup (Semigroup((<>)))
#endif
#if !(MIN_VERSION_base(4,8,0))
import Data.Monoid (Monoid(..))
#endif
import Control.DeepSeq (NFData, rnf)
import Data.String (IsString(..))
import Data.Typeable (Typeable)
import Data.Data (Data(..), mkNoRepType)
-- | A space-efficient representation of a 'Word8' vector, supporting many
-- efficient operations.
--
-- A lazy 'ByteString' contains 8-bit bytes, or by using the operations
-- from "Data.ByteString.Lazy.Char8" it can be interpreted as containing
-- 8-bit characters.
--
data ByteString = Empty | Chunk {-# UNPACK #-} !S.ByteString ByteString
deriving (Typeable)
-- See 'invariant' function later in this module for internal invariants.
instance Eq ByteString where
(==) = eq
instance Ord ByteString where
compare = cmp
#if MIN_VERSION_base(4,9,0)
instance Semigroup ByteString where
(<>) = append
#endif
instance Monoid ByteString where
mempty = Empty
#if MIN_VERSION_base(4,9,0)
mappend = (<>)
#else
mappend = append
#endif
mconcat = concat
instance NFData ByteString where
rnf Empty = ()
rnf (Chunk _ b) = rnf b
instance Show ByteString where
showsPrec p ps r = showsPrec p (unpackChars ps) r
instance Read ByteString where
readsPrec p str = [ (packChars x, y) | (x, y) <- readsPrec p str ]
instance IsString ByteString where
fromString = packChars
instance Data ByteString where
gfoldl f z txt = z packBytes `f` unpackBytes txt
toConstr _ = error "Data.ByteString.Lazy.ByteString.toConstr"
gunfold _ _ = error "Data.ByteString.Lazy.ByteString.gunfold"
dataTypeOf _ = mkNoRepType "Data.ByteString.Lazy.ByteString"
------------------------------------------------------------------------
-- Packing and unpacking from lists
packBytes :: [Word8] -> ByteString
packBytes cs0 =
packChunks 32 cs0
where
packChunks n cs = case S.packUptoLenBytes n cs of
(bs, []) -> chunk bs Empty
(bs, cs') -> Chunk bs (packChunks (min (n * 2) smallChunkSize) cs')
packChars :: [Char] -> ByteString
packChars cs0 = packChunks 32 cs0
where
packChunks n cs = case S.packUptoLenChars n cs of
(bs, []) -> chunk bs Empty
(bs, cs') -> Chunk bs (packChunks (min (n * 2) smallChunkSize) cs')
unpackBytes :: ByteString -> [Word8]
unpackBytes Empty = []
unpackBytes (Chunk c cs) = S.unpackAppendBytesLazy c (unpackBytes cs)
unpackChars :: ByteString -> [Char]
unpackChars Empty = []
unpackChars (Chunk c cs) = S.unpackAppendCharsLazy c (unpackChars cs)
------------------------------------------------------------------------
-- | The data type invariant:
-- Every ByteString is either 'Empty' or consists of non-null 'S.ByteString's.
-- All functions must preserve this, and the QC properties must check this.
--
invariant :: ByteString -> Bool
invariant Empty = True
invariant (Chunk (S.PS _ _ len) cs) = len > 0 && invariant cs
-- | In a form that checks the invariant lazily.
checkInvariant :: ByteString -> ByteString
checkInvariant Empty = Empty
checkInvariant (Chunk c@(S.PS _ _ len) cs)
| len > 0 = Chunk c (checkInvariant cs)
| otherwise = error $ "Data.ByteString.Lazy: invariant violation:"
++ show (Chunk c cs)
------------------------------------------------------------------------
-- | Smart constructor for 'Chunk'. Guarantees the data type invariant.
chunk :: S.ByteString -> ByteString -> ByteString
chunk c@(S.PS _ _ len) cs | len == 0 = cs
| otherwise = Chunk c cs
{-# INLINE chunk #-}
-- | Consume the chunks of a lazy ByteString with a natural right fold.
foldrChunks :: (S.ByteString -> a -> a) -> a -> ByteString -> a
foldrChunks f z = go
where go Empty = z
go (Chunk c cs) = f c (go cs)
{-# INLINE foldrChunks #-}
-- | Consume the chunks of a lazy ByteString with a strict, tail-recursive,
-- accumulating left fold.
foldlChunks :: (a -> S.ByteString -> a) -> a -> ByteString -> a
foldlChunks f z = go z
where go a _ | a `seq` False = undefined
go a Empty = a
go a (Chunk c cs) = go (f a c) cs
{-# INLINE foldlChunks #-}
------------------------------------------------------------------------
-- The representation uses lists of packed chunks. When we have to convert from
-- a lazy list to the chunked representation, then by default we use this
-- chunk size. Some functions give you more control over the chunk size.
--
-- Measurements here:
-- http://www.cse.unsw.edu.au/~dons/tmp/chunksize_v_cache.png
--
-- indicate that a value around 0.5 to 1 x your L2 cache is best.
-- The following value assumes people have something greater than 128k,
-- and need to share the cache with other programs.
-- | The chunk size used for I\/O. Currently set to 32k, less the memory management overhead
defaultChunkSize :: Int
defaultChunkSize = 32 * k - chunkOverhead
where k = 1024
-- | The recommended chunk size. Currently set to 4k, less the memory management overhead
smallChunkSize :: Int
smallChunkSize = 4 * k - chunkOverhead
where k = 1024
-- | The memory management overhead. Currently this is tuned for GHC only.
chunkOverhead :: Int
chunkOverhead = 2 * sizeOf (undefined :: Int)
------------------------------------------------------------------------
-- Implementations for Eq, Ord and Monoid instances
eq :: ByteString -> ByteString -> Bool
eq Empty Empty = True
eq Empty _ = False
eq _ Empty = False
eq (Chunk a as) (Chunk b bs) =
case compare (S.length a) (S.length b) of
LT -> a == S.take (S.length a) b && eq as (Chunk (S.drop (S.length a) b) bs)
EQ -> a == b && eq as bs
GT -> S.take (S.length b) a == b && eq (Chunk (S.drop (S.length b) a) as) bs
cmp :: ByteString -> ByteString -> Ordering
cmp Empty Empty = EQ
cmp Empty _ = LT
cmp _ Empty = GT
cmp (Chunk a as) (Chunk b bs) =
case compare (S.length a) (S.length b) of
LT -> case compare a (S.take (S.length a) b) of
EQ -> cmp as (Chunk (S.drop (S.length a) b) bs)
result -> result
EQ -> case compare a b of
EQ -> cmp as bs
result -> result
GT -> case compare (S.take (S.length b) a) b of
EQ -> cmp (Chunk (S.drop (S.length b) a) as) bs
result -> result
append :: ByteString -> ByteString -> ByteString
append xs ys = foldrChunks Chunk ys xs
concat :: [ByteString] -> ByteString
concat css0 = to css0
where
go Empty css = to css
go (Chunk c cs) css = Chunk c (go cs css)
to [] = Empty
to (cs:css) = go cs css
| CloudI/CloudI | src/api/haskell/external/bytestring-0.10.10.0/Data/ByteString/Lazy/Internal.hs | mit | 8,281 | 0 | 18 | 1,905 | 1,997 | 1,076 | 921 | 130 | 6 |
-- Copyright (c) 2011-14, Nicola Bonelli
-- All rights reserved.
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions are met:
--
-- * Redistributions of source code must retain the above copyright notice,
-- this list of conditions and the following disclaimer.
-- * Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
-- * Neither the name of University of Pisa nor the names of its contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-- IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-- ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
-- LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-- CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-- SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-- CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-- ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-- POSSIBILITY OF SUCH DAMAGE.
--
--
{-# LANGUAGE ImpredicativeTypes #-}
module Network.PFq.Experimental
(
-- * Experimental Functions
-- | This set of experimental functions may be subject to changes in future releases
dummy ,
dummy_vector ,
dummy_string ,
dummy_strings,
crc16 ,
class' ,
deliver ,
par3,
par4,
par5,
par6,
par7,
par8,
steer_gtp_usr,
gtp,
gtp_cp,
gtp_up,
is_gtp,
is_gtp_cp,
is_gtp_up
) where
import Network.PFq.Lang
import Foreign.C.Types
-- Experimental in-kernel computations
-- | Specify the class mask for the given packet.
class' :: CInt -> NetFunction
class' n = MFunction "class" n () () () () () () ()
deliver :: CInt -> NetFunction
deliver n = MFunction "deliver" n () () () () () () ()
dummy :: CInt -> NetFunction
dummy n = MFunction "dummy" n () () () () () () ()
dummy_vector :: [CInt] -> NetFunction
dummy_vector xs = MFunction "dummy_vector" xs () () () () () () ()
dummy_string :: String -> NetFunction
dummy_string xs = MFunction "dummy_string" xs () () () () () () ()
dummy_strings :: [String] -> NetFunction
dummy_strings xs = MFunction "dummy_strings" xs () () () () () () ()
crc16 :: NetFunction
crc16 = MFunction "crc16" () () () () () () () ()
-- | Function that returns the parallel of 3 monadic NetFunctions.
par3 :: NetFunction -> NetFunction -> NetFunction -> NetFunction
par3 a b c = MFunction "par3" a b c () () () () ()
par4 :: NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction
par4 a b c d = MFunction "par4" a b c d () () () ()
par5 :: NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction
par5 a b c d e = MFunction "par5" a b c d e () () ()
par6 :: NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction
par6 a b c d e f = MFunction "par6" a b c d e f () ()
par7 :: NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction
par7 a b c d e f g = MFunction "par7" a b c d e f g ()
par8 :: NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction -> NetFunction
par8 a b c d e f g h = MFunction "par8" a b c d e f g h
-- | Dispatch the packet across the sockets
-- with a randomized algorithm that maintains the integrity of
-- per-user flows on top of GTP tunnel protocol (Control-Plane packets
-- are broadcasted to all sockets).
--
-- > (steer_gtp_usr "192.168.0.0" 16)
steer_gtp_usr :: IPv4 -> CInt -> NetFunction
steer_gtp_usr net prefix = MFunction "steer_gtp_usr" net prefix () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ in case of GTP packet, /Drop/ it otherwise.
gtp = MFunction "gtp" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ in case of GTP Control-Plane packet, /Drop/ it otherwise.
gtp_cp = MFunction "gtp_cp" () () () () () () () () :: NetFunction
-- | Evaluate to /Pass SkBuff/ in case of GTP User-Plane packet, /Drop/ it otherwise.
gtp_up = MFunction "gtp_up" () () () () () () () () :: NetFunction
-- | Evaluate to /True/ if the SkBuff is a GTP packet.
is_gtp = Predicate "is_gtp" () () () () () () () () :: NetPredicate
-- | Evaluate to /True/ if the SkBuff is a GTP Control-Plane packet.
is_gtp_cp = Predicate "is_gtp_cp" () () () () () () () () :: NetPredicate
-- | Evaluate to /True/ if the SkBuff is a GTP User-Plane packet.
is_gtp_up = Predicate "is_gtp_up" () () () () () () () () :: NetPredicate
| Mr-Click/PFQ | user/Haskell/Network/PFq/Experimental.hs | gpl-2.0 | 5,291 | 0 | 12 | 1,209 | 1,201 | 642 | 559 | 59 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.EC2.DescribeAvailabilityZones
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Describes one or more of the Availability Zones that are available to
-- you. The results include zones only for the region you\'re currently
-- using. If there is an event impacting an Availability Zone, you can use
-- this request to view the state and any provided message for that
-- Availability Zone.
--
-- For more information, see
-- <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html Regions and Availability Zones>
-- in the /Amazon Elastic Compute Cloud User Guide/.
--
-- /See:/ <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-DescribeAvailabilityZones.html AWS API Reference> for DescribeAvailabilityZones.
module Network.AWS.EC2.DescribeAvailabilityZones
(
-- * Creating a Request
describeAvailabilityZones
, DescribeAvailabilityZones
-- * Request Lenses
, dazZoneNames
, dazFilters
, dazDryRun
-- * Destructuring the Response
, describeAvailabilityZonesResponse
, DescribeAvailabilityZonesResponse
-- * Response Lenses
, dazrsAvailabilityZones
, dazrsResponseStatus
) where
import Network.AWS.EC2.Types
import Network.AWS.EC2.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'describeAvailabilityZones' smart constructor.
data DescribeAvailabilityZones = DescribeAvailabilityZones'
{ _dazZoneNames :: !(Maybe [Text])
, _dazFilters :: !(Maybe [Filter])
, _dazDryRun :: !(Maybe Bool)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeAvailabilityZones' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dazZoneNames'
--
-- * 'dazFilters'
--
-- * 'dazDryRun'
describeAvailabilityZones
:: DescribeAvailabilityZones
describeAvailabilityZones =
DescribeAvailabilityZones'
{ _dazZoneNames = Nothing
, _dazFilters = Nothing
, _dazDryRun = Nothing
}
-- | The names of one or more Availability Zones.
dazZoneNames :: Lens' DescribeAvailabilityZones [Text]
dazZoneNames = lens _dazZoneNames (\ s a -> s{_dazZoneNames = a}) . _Default . _Coerce;
-- | One or more filters.
--
-- - 'message' - Information about the Availability Zone.
--
-- - 'region-name' - The name of the region for the Availability Zone
-- (for example, 'us-east-1').
--
-- - 'state' - The state of the Availability Zone ('available' |
-- 'impaired' | 'unavailable').
--
-- - 'zone-name' - The name of the Availability Zone (for example,
-- 'us-east-1a').
--
dazFilters :: Lens' DescribeAvailabilityZones [Filter]
dazFilters = lens _dazFilters (\ s a -> s{_dazFilters = a}) . _Default . _Coerce;
-- | Checks whether you have the required permissions for the action, without
-- actually making the request, and provides an error response. If you have
-- the required permissions, the error response is 'DryRunOperation'.
-- Otherwise, it is 'UnauthorizedOperation'.
dazDryRun :: Lens' DescribeAvailabilityZones (Maybe Bool)
dazDryRun = lens _dazDryRun (\ s a -> s{_dazDryRun = a});
instance AWSRequest DescribeAvailabilityZones where
type Rs DescribeAvailabilityZones =
DescribeAvailabilityZonesResponse
request = postQuery eC2
response
= receiveXML
(\ s h x ->
DescribeAvailabilityZonesResponse' <$>
(x .@? "availabilityZoneInfo" .!@ mempty >>=
may (parseXMLList "item"))
<*> (pure (fromEnum s)))
instance ToHeaders DescribeAvailabilityZones where
toHeaders = const mempty
instance ToPath DescribeAvailabilityZones where
toPath = const "/"
instance ToQuery DescribeAvailabilityZones where
toQuery DescribeAvailabilityZones'{..}
= mconcat
["Action" =:
("DescribeAvailabilityZones" :: ByteString),
"Version" =: ("2015-04-15" :: ByteString),
toQuery (toQueryList "ZoneName" <$> _dazZoneNames),
toQuery (toQueryList "Filter" <$> _dazFilters),
"DryRun" =: _dazDryRun]
-- | /See:/ 'describeAvailabilityZonesResponse' smart constructor.
data DescribeAvailabilityZonesResponse = DescribeAvailabilityZonesResponse'
{ _dazrsAvailabilityZones :: !(Maybe [AvailabilityZone])
, _dazrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'DescribeAvailabilityZonesResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'dazrsAvailabilityZones'
--
-- * 'dazrsResponseStatus'
describeAvailabilityZonesResponse
:: Int -- ^ 'dazrsResponseStatus'
-> DescribeAvailabilityZonesResponse
describeAvailabilityZonesResponse pResponseStatus_ =
DescribeAvailabilityZonesResponse'
{ _dazrsAvailabilityZones = Nothing
, _dazrsResponseStatus = pResponseStatus_
}
-- | Information about one or more Availability Zones.
dazrsAvailabilityZones :: Lens' DescribeAvailabilityZonesResponse [AvailabilityZone]
dazrsAvailabilityZones = lens _dazrsAvailabilityZones (\ s a -> s{_dazrsAvailabilityZones = a}) . _Default . _Coerce;
-- | The response status code.
dazrsResponseStatus :: Lens' DescribeAvailabilityZonesResponse Int
dazrsResponseStatus = lens _dazrsResponseStatus (\ s a -> s{_dazrsResponseStatus = a});
| fmapfmapfmap/amazonka | amazonka-ec2/gen/Network/AWS/EC2/DescribeAvailabilityZones.hs | mpl-2.0 | 6,197 | 0 | 15 | 1,223 | 787 | 473 | 314 | 91 | 1 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__
{-# LANGUAGE MagicHash, BangPatterns, DeriveDataTypeable, StandaloneDeriving #-}
#endif
#if !defined(TESTING) && __GLASGOW_HASKELL__ >= 703
{-# LANGUAGE Trustworthy #-}
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Data.IntSet.Base
-- Copyright : (c) Daan Leijen 2002
-- (c) Joachim Breitner 2011
-- License : BSD-style
-- Maintainer : libraries@haskell.org
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of integer sets.
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
-- > import Data.IntSet (IntSet)
-- > import qualified Data.IntSet as IntSet
--
-- The implementation is based on /big-endian patricia trees/. This data
-- structure performs especially well on binary operations like 'union'
-- and 'intersection'. However, my benchmarks show that it is also
-- (much) faster on insertions and deletions when compared to a generic
-- size-balanced set implementation (see "Data.Set").
--
-- * Chris Okasaki and Andy Gill, \"/Fast Mergeable Integer Maps/\",
-- Workshop on ML, September 1998, pages 77-86,
-- <http://citeseer.ist.psu.edu/okasaki98fast.html>
--
-- * D.R. Morrison, \"/PATRICIA -- Practical Algorithm To Retrieve
-- Information Coded In Alphanumeric/\", Journal of the ACM, 15(4),
-- October 1968, pages 514-534.
--
-- Additionally, this implementation places bitmaps in the leaves of the tree.
-- Their size is the natural size of a machine word (32 or 64 bits) and greatly
-- reduce memory footprint and execution times for dense sets, e.g. sets where
-- it is likely that many values lie close to each other. The asymptotics are
-- not affected by this optimization.
--
-- Many operations have a worst-case complexity of /O(min(n,W))/.
-- This means that the operation can become linear in the number of
-- elements with a maximum of /W/ -- the number of bits in an 'Int'
-- (32 or 64).
-----------------------------------------------------------------------------
-- [Note: INLINE bit fiddling]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- It is essential that the bit fiddling functions like mask, zero, branchMask
-- etc are inlined. If they do not, the memory allocation skyrockets. The GHC
-- usually gets it right, but it is disastrous if it does not. Therefore we
-- explicitly mark these functions INLINE.
-- [Note: Local 'go' functions and capturing]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Care must be taken when using 'go' function which captures an argument.
-- Sometimes (for example when the argument is passed to a data constructor,
-- as in insert), GHC heap-allocates more than necessary. Therefore C-- code
-- must be checked for increased allocation when creating and modifying such
-- functions.
-- [Note: Order of constructors]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- The order of constructors of IntSet matters when considering performance.
-- Currently in GHC 7.0, when type has 3 constructors, they are matched from
-- the first to the last -- the best performance is achieved when the
-- constructors are ordered by frequency.
-- On GHC 7.0, reordering constructors from Nil | Tip | Bin to Bin | Tip | Nil
-- improves the benchmark by circa 10%.
module Data.IntSet.Base (
-- * Set type
IntSet(..), Key -- instance Eq,Show
-- * Operators
, (\\)
-- * Query
, null
, size
, member
, notMember
, lookupLT
, lookupGT
, lookupLE
, lookupGE
, isSubsetOf
, isProperSubsetOf
-- * Construction
, empty
, singleton
, insert
, delete
-- * Combine
, union
, unions
, difference
, intersection
-- * Filter
, filter
, partition
, split
, splitMember
, splitRoot
-- * Map
, map
-- * Folds
, foldr
, foldl
-- ** Strict folds
, foldr'
, foldl'
-- ** Legacy folds
, fold
-- * Min\/Max
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, maxView
, minView
-- * Conversion
-- ** List
, elems
, toList
, fromList
-- ** Ordered list
, toAscList
, toDescList
, fromAscList
, fromDistinctAscList
-- * Debugging
, showTree
, showTreeWith
-- * Internals
, match
, suffixBitMask
, prefixBitMask
, bitmapOf
) where
-- We want to be able to compile without cabal. Nevertheless
-- #if defined(MIN_VERSION_base) && MIN_VERSION_base(4,5,0)
-- does not work, because if MIN_VERSION_base is undefined,
-- the last condition is syntactically wrong.
#define MIN_VERSION_base_4_5_0 0
#ifdef MIN_VERSION_base
#if MIN_VERSION_base(4,5,0)
#undef MIN_VERSION_base_4_5_0
#define MIN_VERSION_base_4_5_0 1
#endif
#endif
#define MIN_VERSION_base_4_7_0 0
#ifdef MIN_VERSION_base
#if MIN_VERSION_base(4,7,0)
#undef MIN_VERSION_base_4_7_0
#define MIN_VERSION_base_4_7_0 1
#endif
#endif
import Control.DeepSeq (NFData)
import Data.Bits
import qualified Data.List as List
import Data.Maybe (fromMaybe)
import Data.Monoid (Monoid(..))
import Data.Typeable
import Data.Word (Word)
import Prelude hiding (filter, foldr, foldl, null, map)
import Data.BitUtil
import Data.StrictPair
#if __GLASGOW_HASKELL__
import Data.Data (Data(..), Constr, mkConstr, constrIndex, Fixity(Prefix), DataType, mkDataType)
import Text.Read
#endif
#if __GLASGOW_HASKELL__
import GHC.Exts (Int(..), build)
import GHC.Prim (indexInt8OffAddr#)
#endif
-- On GHC, include MachDeps.h to get WORD_SIZE_IN_BITS macro.
#if defined(__GLASGOW_HASKELL__)
# include "MachDeps.h"
#endif
-- Use macros to define strictness of functions.
-- STRICT_x_OF_y denotes an y-ary function strict in the x-th parameter.
-- We do not use BangPatterns, because they are not in any standard and we
-- want the compilers to be compiled by as many compilers as possible.
#define STRICT_1_OF_2(fn) fn arg _ | arg `seq` False = undefined
#define STRICT_2_OF_2(fn) fn _ arg | arg `seq` False = undefined
#define STRICT_1_OF_3(fn) fn arg _ _ | arg `seq` False = undefined
#define STRICT_2_OF_3(fn) fn _ arg _ | arg `seq` False = undefined
infixl 9 \\{-This comment teaches CPP correct behaviour -}
-- A "Nat" is a natural machine word (an unsigned Int)
type Nat = Word
natFromInt :: Int -> Nat
natFromInt i = fromIntegral i
{-# INLINE natFromInt #-}
intFromNat :: Nat -> Int
intFromNat w = fromIntegral w
{-# INLINE intFromNat #-}
{--------------------------------------------------------------------
Operators
--------------------------------------------------------------------}
-- | /O(n+m)/. See 'difference'.
(\\) :: IntSet -> IntSet -> IntSet
m1 \\ m2 = difference m1 m2
{--------------------------------------------------------------------
Types
--------------------------------------------------------------------}
-- | A set of integers.
-- See Note: Order of constructors
data IntSet = Bin {-# UNPACK #-} !Prefix {-# UNPACK #-} !Mask !IntSet !IntSet
-- Invariant: Nil is never found as a child of Bin.
-- Invariant: The Mask is a power of 2. It is the largest bit position at which
-- two elements of the set differ.
-- Invariant: Prefix is the common high-order bits that all elements share to
-- the left of the Mask bit.
-- Invariant: In Bin prefix mask left right, left consists of the elements that
-- don't have the mask bit set; right is all the elements that do.
| Tip {-# UNPACK #-} !Prefix {-# UNPACK #-} !BitMap
-- Invariant: The Prefix is zero for all but the last 5 (on 32 bit arches) or 6
-- bits (on 64 bit arches). The values of the map represented by a tip
-- are the prefix plus the indices of the set bits in the bit map.
| Nil
-- A number stored in a set is stored as
-- * Prefix (all but last 5-6 bits) and
-- * BitMap (last 5-6 bits stored as a bitmask)
-- Last 5-6 bits are called a Suffix.
type Prefix = Int
type Mask = Int
type BitMap = Word
type Key = Int
instance Monoid IntSet where
mempty = empty
mappend = union
mconcat = unions
#if __GLASGOW_HASKELL__
{--------------------------------------------------------------------
A Data instance
--------------------------------------------------------------------}
-- This instance preserves data abstraction at the cost of inefficiency.
-- We provide limited reflection services for the sake of data abstraction.
instance Data IntSet where
gfoldl f z is = z fromList `f` (toList is)
toConstr _ = fromListConstr
gunfold k z c = case constrIndex c of
1 -> k (z fromList)
_ -> error "gunfold"
dataTypeOf _ = intSetDataType
fromListConstr :: Constr
fromListConstr = mkConstr intSetDataType "fromList" [] Prefix
intSetDataType :: DataType
intSetDataType = mkDataType "Data.IntSet.Base.IntSet" [fromListConstr]
#endif
{--------------------------------------------------------------------
Query
--------------------------------------------------------------------}
-- | /O(1)/. Is the set empty?
null :: IntSet -> Bool
null Nil = True
null _ = False
{-# INLINE null #-}
-- | /O(n)/. Cardinality of the set.
size :: IntSet -> Int
size t
= case t of
Bin _ _ l r -> size l + size r
Tip _ bm -> bitcount 0 bm
Nil -> 0
-- | /O(min(n,W))/. Is the value a member of the set?
-- See Note: Local 'go' functions and capturing]
member :: Key -> IntSet -> Bool
member x = x `seq` go
where
go (Bin p m l r)
| nomatch x p m = False
| zero x m = go l
| otherwise = go r
go (Tip y bm) = prefixOf x == y && bitmapOf x .&. bm /= 0
go Nil = False
-- | /O(min(n,W))/. Is the element not in the set?
notMember :: Key -> IntSet -> Bool
notMember k = not . member k
-- | /O(log n)/. Find largest element smaller than the given one.
--
-- > lookupLT 3 (fromList [3, 5]) == Nothing
-- > lookupLT 5 (fromList [3, 5]) == Just 3
-- See Note: Local 'go' functions and capturing.
lookupLT :: Key -> IntSet -> Maybe Key
lookupLT x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go r l else go Nil r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMax def else unsafeFindMax r
| zero x m = go def l
| otherwise = go l r
go def (Tip kx bm) | prefixOf x > kx = Just $ kx + highestBitSet bm
| prefixOf x == kx && maskLT /= 0 = Just $ kx + highestBitSet maskLT
| otherwise = unsafeFindMax def
where maskLT = (bitmapOf x - 1) .&. bm
go def Nil = unsafeFindMax def
-- | /O(log n)/. Find smallest element greater than the given one.
--
-- > lookupGT 4 (fromList [3, 5]) == Just 5
-- > lookupGT 5 (fromList [3, 5]) == Nothing
-- See Note: Local 'go' functions and capturing.
lookupGT :: Key -> IntSet -> Maybe Key
lookupGT x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go Nil l else go l r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMin l else unsafeFindMin def
| zero x m = go r l
| otherwise = go def r
go def (Tip kx bm) | prefixOf x < kx = Just $ kx + lowestBitSet bm
| prefixOf x == kx && maskGT /= 0 = Just $ kx + lowestBitSet maskGT
| otherwise = unsafeFindMin def
where maskGT = (- ((bitmapOf x) `shiftLL` 1)) .&. bm
go def Nil = unsafeFindMin def
-- | /O(log n)/. Find largest element smaller or equal to the given one.
--
-- > lookupLE 2 (fromList [3, 5]) == Nothing
-- > lookupLE 4 (fromList [3, 5]) == Just 3
-- > lookupLE 5 (fromList [3, 5]) == Just 5
-- See Note: Local 'go' functions and capturing.
lookupLE :: Key -> IntSet -> Maybe Key
lookupLE x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go r l else go Nil r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMax def else unsafeFindMax r
| zero x m = go def l
| otherwise = go l r
go def (Tip kx bm) | prefixOf x > kx = Just $ kx + highestBitSet bm
| prefixOf x == kx && maskLE /= 0 = Just $ kx + highestBitSet maskLE
| otherwise = unsafeFindMax def
where maskLE = (((bitmapOf x) `shiftLL` 1) - 1) .&. bm
go def Nil = unsafeFindMax def
-- | /O(log n)/. Find smallest element greater or equal to the given one.
--
-- > lookupGE 3 (fromList [3, 5]) == Just 3
-- > lookupGE 4 (fromList [3, 5]) == Just 5
-- > lookupGE 6 (fromList [3, 5]) == Nothing
-- See Note: Local 'go' functions and capturing.
lookupGE :: Key -> IntSet -> Maybe Key
lookupGE x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go Nil l else go l r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMin l else unsafeFindMin def
| zero x m = go r l
| otherwise = go def r
go def (Tip kx bm) | prefixOf x < kx = Just $ kx + lowestBitSet bm
| prefixOf x == kx && maskGE /= 0 = Just $ kx + lowestBitSet maskGE
| otherwise = unsafeFindMin def
where maskGE = (- (bitmapOf x)) .&. bm
go def Nil = unsafeFindMin def
-- Helper function for lookupGE and lookupGT. It assumes that if a Bin node is
-- given, it has m > 0.
unsafeFindMin :: IntSet -> Maybe Key
unsafeFindMin Nil = Nothing
unsafeFindMin (Tip kx bm) = Just $ kx + lowestBitSet bm
unsafeFindMin (Bin _ _ l _) = unsafeFindMin l
-- Helper function for lookupLE and lookupLT. It assumes that if a Bin node is
-- given, it has m > 0.
unsafeFindMax :: IntSet -> Maybe Key
unsafeFindMax Nil = Nothing
unsafeFindMax (Tip kx bm) = Just $ kx + highestBitSet bm
unsafeFindMax (Bin _ _ _ r) = unsafeFindMax r
{--------------------------------------------------------------------
Construction
--------------------------------------------------------------------}
-- | /O(1)/. The empty set.
empty :: IntSet
empty
= Nil
{-# INLINE empty #-}
-- | /O(1)/. A set of one element.
singleton :: Key -> IntSet
singleton x
= Tip (prefixOf x) (bitmapOf x)
{-# INLINE singleton #-}
{--------------------------------------------------------------------
Insert
--------------------------------------------------------------------}
-- | /O(min(n,W))/. Add a value to the set. There is no left- or right bias for
-- IntSets.
insert :: Key -> IntSet -> IntSet
insert x = x `seq` insertBM (prefixOf x) (bitmapOf x)
-- Helper function for insert and union.
insertBM :: Prefix -> BitMap -> IntSet -> IntSet
insertBM kx bm t = kx `seq` bm `seq`
case t of
Bin p m l r
| nomatch kx p m -> link kx (Tip kx bm) p t
| zero kx m -> Bin p m (insertBM kx bm l) r
| otherwise -> Bin p m l (insertBM kx bm r)
Tip kx' bm'
| kx' == kx -> Tip kx' (bm .|. bm')
| otherwise -> link kx (Tip kx bm) kx' t
Nil -> Tip kx bm
-- | /O(min(n,W))/. Delete a value in the set. Returns the
-- original set when the value was not present.
delete :: Key -> IntSet -> IntSet
delete x = x `seq` deleteBM (prefixOf x) (bitmapOf x)
-- Deletes all values mentioned in the BitMap from the set.
-- Helper function for delete and difference.
deleteBM :: Prefix -> BitMap -> IntSet -> IntSet
deleteBM kx bm t = kx `seq` bm `seq`
case t of
Bin p m l r
| nomatch kx p m -> t
| zero kx m -> bin p m (deleteBM kx bm l) r
| otherwise -> bin p m l (deleteBM kx bm r)
Tip kx' bm'
| kx' == kx -> tip kx (bm' .&. complement bm)
| otherwise -> t
Nil -> Nil
{--------------------------------------------------------------------
Union
--------------------------------------------------------------------}
-- | The union of a list of sets.
unions :: [IntSet] -> IntSet
unions xs
= foldlStrict union empty xs
-- | /O(n+m)/. The union of two sets.
union :: IntSet -> IntSet -> IntSet
union t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = union1
| shorter m2 m1 = union2
| p1 == p2 = Bin p1 m1 (union l1 l2) (union r1 r2)
| otherwise = link p1 t1 p2 t2
where
union1 | nomatch p2 p1 m1 = link p1 t1 p2 t2
| zero p2 m1 = Bin p1 m1 (union l1 t2) r1
| otherwise = Bin p1 m1 l1 (union r1 t2)
union2 | nomatch p1 p2 m2 = link p1 t1 p2 t2
| zero p1 m2 = Bin p2 m2 (union t1 l2) r2
| otherwise = Bin p2 m2 l2 (union t1 r2)
union t@(Bin _ _ _ _) (Tip kx bm) = insertBM kx bm t
union t@(Bin _ _ _ _) Nil = t
union (Tip kx bm) t = insertBM kx bm t
union Nil t = t
{--------------------------------------------------------------------
Difference
--------------------------------------------------------------------}
-- | /O(n+m)/. Difference between two sets.
difference :: IntSet -> IntSet -> IntSet
difference t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = difference1
| shorter m2 m1 = difference2
| p1 == p2 = bin p1 m1 (difference l1 l2) (difference r1 r2)
| otherwise = t1
where
difference1 | nomatch p2 p1 m1 = t1
| zero p2 m1 = bin p1 m1 (difference l1 t2) r1
| otherwise = bin p1 m1 l1 (difference r1 t2)
difference2 | nomatch p1 p2 m2 = t1
| zero p1 m2 = difference t1 l2
| otherwise = difference t1 r2
difference t@(Bin _ _ _ _) (Tip kx bm) = deleteBM kx bm t
difference t@(Bin _ _ _ _) Nil = t
difference t1@(Tip kx bm) t2 = differenceTip t2
where differenceTip (Bin p2 m2 l2 r2) | nomatch kx p2 m2 = t1
| zero kx m2 = differenceTip l2
| otherwise = differenceTip r2
differenceTip (Tip kx2 bm2) | kx == kx2 = tip kx (bm .&. complement bm2)
| otherwise = t1
differenceTip Nil = t1
difference Nil _ = Nil
{--------------------------------------------------------------------
Intersection
--------------------------------------------------------------------}
-- | /O(n+m)/. The intersection of two sets.
intersection :: IntSet -> IntSet -> IntSet
intersection t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = intersection1
| shorter m2 m1 = intersection2
| p1 == p2 = bin p1 m1 (intersection l1 l2) (intersection r1 r2)
| otherwise = Nil
where
intersection1 | nomatch p2 p1 m1 = Nil
| zero p2 m1 = intersection l1 t2
| otherwise = intersection r1 t2
intersection2 | nomatch p1 p2 m2 = Nil
| zero p1 m2 = intersection t1 l2
| otherwise = intersection t1 r2
intersection t1@(Bin _ _ _ _) (Tip kx2 bm2) = intersectBM t1
where intersectBM (Bin p1 m1 l1 r1) | nomatch kx2 p1 m1 = Nil
| zero kx2 m1 = intersectBM l1
| otherwise = intersectBM r1
intersectBM (Tip kx1 bm1) | kx1 == kx2 = tip kx1 (bm1 .&. bm2)
| otherwise = Nil
intersectBM Nil = Nil
intersection (Bin _ _ _ _) Nil = Nil
intersection (Tip kx1 bm1) t2 = intersectBM t2
where intersectBM (Bin p2 m2 l2 r2) | nomatch kx1 p2 m2 = Nil
| zero kx1 m2 = intersectBM l2
| otherwise = intersectBM r2
intersectBM (Tip kx2 bm2) | kx1 == kx2 = tip kx1 (bm1 .&. bm2)
| otherwise = Nil
intersectBM Nil = Nil
intersection Nil _ = Nil
{--------------------------------------------------------------------
Subset
--------------------------------------------------------------------}
-- | /O(n+m)/. Is this a proper subset? (ie. a subset but not equal).
isProperSubsetOf :: IntSet -> IntSet -> Bool
isProperSubsetOf t1 t2
= case subsetCmp t1 t2 of
LT -> True
_ -> False
subsetCmp :: IntSet -> IntSet -> Ordering
subsetCmp t1@(Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
| shorter m1 m2 = GT
| shorter m2 m1 = case subsetCmpLt of
GT -> GT
_ -> LT
| p1 == p2 = subsetCmpEq
| otherwise = GT -- disjoint
where
subsetCmpLt | nomatch p1 p2 m2 = GT
| zero p1 m2 = subsetCmp t1 l2
| otherwise = subsetCmp t1 r2
subsetCmpEq = case (subsetCmp l1 l2, subsetCmp r1 r2) of
(GT,_ ) -> GT
(_ ,GT) -> GT
(EQ,EQ) -> EQ
_ -> LT
subsetCmp (Bin _ _ _ _) _ = GT
subsetCmp (Tip kx1 bm1) (Tip kx2 bm2)
| kx1 /= kx2 = GT -- disjoint
| bm1 == bm2 = EQ
| bm1 .&. complement bm2 == 0 = LT
| otherwise = GT
subsetCmp t1@(Tip kx _) (Bin p m l r)
| nomatch kx p m = GT
| zero kx m = case subsetCmp t1 l of GT -> GT ; _ -> LT
| otherwise = case subsetCmp t1 r of GT -> GT ; _ -> LT
subsetCmp (Tip _ _) Nil = GT -- disjoint
subsetCmp Nil Nil = EQ
subsetCmp Nil _ = LT
-- | /O(n+m)/. Is this a subset?
-- @(s1 `isSubsetOf` s2)@ tells whether @s1@ is a subset of @s2@.
isSubsetOf :: IntSet -> IntSet -> Bool
isSubsetOf t1@(Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
| shorter m1 m2 = False
| shorter m2 m1 = match p1 p2 m2 && (if zero p1 m2 then isSubsetOf t1 l2
else isSubsetOf t1 r2)
| otherwise = (p1==p2) && isSubsetOf l1 l2 && isSubsetOf r1 r2
isSubsetOf (Bin _ _ _ _) _ = False
isSubsetOf (Tip kx1 bm1) (Tip kx2 bm2) = kx1 == kx2 && bm1 .&. complement bm2 == 0
isSubsetOf t1@(Tip kx _) (Bin p m l r)
| nomatch kx p m = False
| zero kx m = isSubsetOf t1 l
| otherwise = isSubsetOf t1 r
isSubsetOf (Tip _ _) Nil = False
isSubsetOf Nil _ = True
{--------------------------------------------------------------------
Filter
--------------------------------------------------------------------}
-- | /O(n)/. Filter all elements that satisfy some predicate.
filter :: (Key -> Bool) -> IntSet -> IntSet
filter predicate t
= case t of
Bin p m l r
-> bin p m (filter predicate l) (filter predicate r)
Tip kx bm
-> tip kx (foldl'Bits 0 (bitPred kx) 0 bm)
Nil -> Nil
where bitPred kx bm bi | predicate (kx + bi) = bm .|. bitmapOfSuffix bi
| otherwise = bm
{-# INLINE bitPred #-}
-- | /O(n)/. partition the set according to some predicate.
partition :: (Key -> Bool) -> IntSet -> (IntSet,IntSet)
partition predicate0 t0 = toPair $ go predicate0 t0
where
go predicate t
= case t of
Bin p m l r
-> let (l1 :*: l2) = go predicate l
(r1 :*: r2) = go predicate r
in bin p m l1 r1 :*: bin p m l2 r2
Tip kx bm
-> let bm1 = foldl'Bits 0 (bitPred kx) 0 bm
in tip kx bm1 :*: tip kx (bm `xor` bm1)
Nil -> (Nil :*: Nil)
where bitPred kx bm bi | predicate (kx + bi) = bm .|. bitmapOfSuffix bi
| otherwise = bm
{-# INLINE bitPred #-}
-- | /O(min(n,W))/. The expression (@'split' x set@) is a pair @(set1,set2)@
-- where @set1@ comprises the elements of @set@ less than @x@ and @set2@
-- comprises the elements of @set@ greater than @x@.
--
-- > split 3 (fromList [1..5]) == (fromList [1,2], fromList [4,5])
split :: Key -> IntSet -> (IntSet,IntSet)
split x t =
case t of
Bin _ m l r
| m < 0 -> if x >= 0 -- handle negative numbers.
then case go x l of (lt :*: gt) -> let lt' = union lt r
in lt' `seq` (lt', gt)
else case go x r of (lt :*: gt) -> let gt' = union gt l
in gt' `seq` (lt, gt')
_ -> case go x t of
(lt :*: gt) -> (lt, gt)
where
go !x' t'@(Bin p m l r)
| match x' p m = if zero x' m
then case go x' l of
(lt :*: gt) -> lt :*: union gt r
else case go x' r of
(lt :*: gt) -> union lt l :*: gt
| otherwise = if x' < p then (Nil :*: t')
else (t' :*: Nil)
go x' t'@(Tip kx' bm)
| kx' > x' = (Nil :*: t')
-- equivalent to kx' > prefixOf x'
| kx' < prefixOf x' = (t' :*: Nil)
| otherwise = tip kx' (bm .&. lowerBitmap) :*: tip kx' (bm .&. higherBitmap)
where lowerBitmap = bitmapOf x' - 1
higherBitmap = complement (lowerBitmap + bitmapOf x')
go _ Nil = (Nil :*: Nil)
-- | /O(min(n,W))/. Performs a 'split' but also returns whether the pivot
-- element was found in the original set.
splitMember :: Key -> IntSet -> (IntSet,Bool,IntSet)
splitMember x t =
case t of
Bin _ m l r | m < 0 -> if x >= 0
then case go x l of
(lt, fnd, gt) -> let lt' = union lt r
in lt' `seq` (lt', fnd, gt)
else case go x r of
(lt, fnd, gt) -> let gt' = union gt l
in gt' `seq` (lt, fnd, gt')
_ -> go x t
where
go x' t'@(Bin p m l r)
| match x' p m = if zero x' m
then case go x' l of
(lt, fnd, gt) -> (lt, fnd, union gt r)
else case go x' r of
(lt, fnd, gt) -> (union lt l, fnd, gt)
| otherwise = if x' < p then (Nil, False, t') else (t', False, Nil)
go x' t'@(Tip kx' bm)
| kx' > x' = (Nil, False, t')
-- equivalent to kx' > prefixOf x'
| kx' < prefixOf x' = (t', False, Nil)
| otherwise = let lt = tip kx' (bm .&. lowerBitmap)
found = (bm .&. bitmapOfx') /= 0
gt = tip kx' (bm .&. higherBitmap)
in lt `seq` found `seq` gt `seq` (lt, found, gt)
where bitmapOfx' = bitmapOf x'
lowerBitmap = bitmapOfx' - 1
higherBitmap = complement (lowerBitmap + bitmapOfx')
go _ Nil = (Nil, False, Nil)
{----------------------------------------------------------------------
Min/Max
----------------------------------------------------------------------}
-- | /O(min(n,W))/. Retrieves the maximal key of the set, and the set
-- stripped of that element, or 'Nothing' if passed an empty set.
maxView :: IntSet -> Maybe (Key, IntSet)
maxView t =
case t of Nil -> Nothing
Bin p m l r | m < 0 -> case go l of (result, l') -> Just (result, bin p m l' r)
_ -> Just (go t)
where
go (Bin p m l r) = case go r of (result, r') -> (result, bin p m l r')
go (Tip kx bm) = case highestBitSet bm of bi -> (kx + bi, tip kx (bm .&. complement (bitmapOfSuffix bi)))
go Nil = error "maxView Nil"
-- | /O(min(n,W))/. Retrieves the minimal key of the set, and the set
-- stripped of that element, or 'Nothing' if passed an empty set.
minView :: IntSet -> Maybe (Key, IntSet)
minView t =
case t of Nil -> Nothing
Bin p m l r | m < 0 -> case go r of (result, r') -> Just (result, bin p m l r')
_ -> Just (go t)
where
go (Bin p m l r) = case go l of (result, l') -> (result, bin p m l' r)
go (Tip kx bm) = case lowestBitSet bm of bi -> (kx + bi, tip kx (bm .&. complement (bitmapOfSuffix bi)))
go Nil = error "minView Nil"
-- | /O(min(n,W))/. Delete and find the minimal element.
--
-- > deleteFindMin set = (findMin set, deleteMin set)
deleteFindMin :: IntSet -> (Key, IntSet)
deleteFindMin = fromMaybe (error "deleteFindMin: empty set has no minimal element") . minView
-- | /O(min(n,W))/. Delete and find the maximal element.
--
-- > deleteFindMax set = (findMax set, deleteMax set)
deleteFindMax :: IntSet -> (Key, IntSet)
deleteFindMax = fromMaybe (error "deleteFindMax: empty set has no maximal element") . maxView
-- | /O(min(n,W))/. The minimal element of the set.
findMin :: IntSet -> Key
findMin Nil = error "findMin: empty set has no minimal element"
findMin (Tip kx bm) = kx + lowestBitSet bm
findMin (Bin _ m l r)
| m < 0 = find r
| otherwise = find l
where find (Tip kx bm) = kx + lowestBitSet bm
find (Bin _ _ l' _) = find l'
find Nil = error "findMin Nil"
-- | /O(min(n,W))/. The maximal element of a set.
findMax :: IntSet -> Key
findMax Nil = error "findMax: empty set has no maximal element"
findMax (Tip kx bm) = kx + highestBitSet bm
findMax (Bin _ m l r)
| m < 0 = find l
| otherwise = find r
where find (Tip kx bm) = kx + highestBitSet bm
find (Bin _ _ _ r') = find r'
find Nil = error "findMax Nil"
-- | /O(min(n,W))/. Delete the minimal element. Returns an empty set if the set is empty.
--
-- Note that this is a change of behaviour for consistency with 'Data.Set.Set' –
-- versions prior to 0.5 threw an error if the 'IntSet' was already empty.
deleteMin :: IntSet -> IntSet
deleteMin = maybe Nil snd . minView
-- | /O(min(n,W))/. Delete the maximal element. Returns an empty set if the set is empty.
--
-- Note that this is a change of behaviour for consistency with 'Data.Set.Set' –
-- versions prior to 0.5 threw an error if the 'IntSet' was already empty.
deleteMax :: IntSet -> IntSet
deleteMax = maybe Nil snd . maxView
{----------------------------------------------------------------------
Map
----------------------------------------------------------------------}
-- | /O(n*min(n,W))/.
-- @'map' f s@ is the set obtained by applying @f@ to each element of @s@.
--
-- It's worth noting that the size of the result may be smaller if,
-- for some @(x,y)@, @x \/= y && f x == f y@
map :: (Key -> Key) -> IntSet -> IntSet
map f = fromList . List.map f . toList
{--------------------------------------------------------------------
Fold
--------------------------------------------------------------------}
-- | /O(n)/. Fold the elements in the set using the given right-associative
-- binary operator. This function is an equivalent of 'foldr' and is present
-- for compatibility only.
--
-- /Please note that fold will be deprecated in the future and removed./
fold :: (Key -> b -> b) -> b -> IntSet -> b
fold = foldr
{-# INLINE fold #-}
-- | /O(n)/. Fold the elements in the set using the given right-associative
-- binary operator, such that @'foldr' f z == 'Prelude.foldr' f z . 'toAscList'@.
--
-- For example,
--
-- > toAscList set = foldr (:) [] set
foldr :: (Key -> b -> b) -> b -> IntSet -> b
foldr f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z l) r -- put negative numbers before
| otherwise -> go (go z r) l
_ -> go z t
where
go z' Nil = z'
go z' (Tip kx bm) = foldrBits kx f z' bm
go z' (Bin _ _ l r) = go (go z' r) l
{-# INLINE foldr #-}
-- | /O(n)/. A strict version of 'foldr'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldr' :: (Key -> b -> b) -> b -> IntSet -> b
foldr' f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z l) r -- put negative numbers before
| otherwise -> go (go z r) l
_ -> go z t
where
STRICT_1_OF_2(go)
go z' Nil = z'
go z' (Tip kx bm) = foldr'Bits kx f z' bm
go z' (Bin _ _ l r) = go (go z' r) l
{-# INLINE foldr' #-}
-- | /O(n)/. Fold the elements in the set using the given left-associative
-- binary operator, such that @'foldl' f z == 'Prelude.foldl' f z . 'toAscList'@.
--
-- For example,
--
-- > toDescList set = foldl (flip (:)) [] set
foldl :: (a -> Key -> a) -> a -> IntSet -> a
foldl f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z r) l -- put negative numbers before
| otherwise -> go (go z l) r
_ -> go z t
where
STRICT_1_OF_2(go)
go z' Nil = z'
go z' (Tip kx bm) = foldlBits kx f z' bm
go z' (Bin _ _ l r) = go (go z' l) r
{-# INLINE foldl #-}
-- | /O(n)/. A strict version of 'foldl'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldl' :: (a -> Key -> a) -> a -> IntSet -> a
foldl' f z = \t -> -- Use lambda t to be inlinable with two arguments only.
case t of Bin _ m l r | m < 0 -> go (go z r) l -- put negative numbers before
| otherwise -> go (go z l) r
_ -> go z t
where
STRICT_1_OF_2(go)
go z' Nil = z'
go z' (Tip kx bm) = foldl'Bits kx f z' bm
go z' (Bin _ _ l r) = go (go z' l) r
{-# INLINE foldl' #-}
{--------------------------------------------------------------------
List variations
--------------------------------------------------------------------}
-- | /O(n)/. An alias of 'toAscList'. The elements of a set in ascending order.
-- Subject to list fusion.
elems :: IntSet -> [Key]
elems
= toAscList
{--------------------------------------------------------------------
Lists
--------------------------------------------------------------------}
-- | /O(n)/. Convert the set to a list of elements. Subject to list fusion.
toList :: IntSet -> [Key]
toList
= toAscList
-- | /O(n)/. Convert the set to an ascending list of elements. Subject to list
-- fusion.
toAscList :: IntSet -> [Key]
toAscList = foldr (:) []
-- | /O(n)/. Convert the set to a descending list of elements. Subject to list
-- fusion.
toDescList :: IntSet -> [Key]
toDescList = foldl (flip (:)) []
-- List fusion for the list generating functions.
#if __GLASGOW_HASKELL__
-- The foldrFB and foldlFB are foldr and foldl equivalents, used for list fusion.
-- They are important to convert unfused to{Asc,Desc}List back, see mapFB in prelude.
foldrFB :: (Key -> b -> b) -> b -> IntSet -> b
foldrFB = foldr
{-# INLINE[0] foldrFB #-}
foldlFB :: (a -> Key -> a) -> a -> IntSet -> a
foldlFB = foldl
{-# INLINE[0] foldlFB #-}
-- Inline elems and toList, so that we need to fuse only toAscList.
{-# INLINE elems #-}
{-# INLINE toList #-}
-- The fusion is enabled up to phase 2 included. If it does not succeed,
-- convert in phase 1 the expanded to{Asc,Desc}List calls back to
-- to{Asc,Desc}List. In phase 0, we inline fold{lr}FB (which were used in
-- a list fusion, otherwise it would go away in phase 1), and let compiler do
-- whatever it wants with to{Asc,Desc}List -- it was forbidden to inline it
-- before phase 0, otherwise the fusion rules would not fire at all.
{-# NOINLINE[0] toAscList #-}
{-# NOINLINE[0] toDescList #-}
{-# RULES "IntSet.toAscList" [~1] forall s . toAscList s = build (\c n -> foldrFB c n s) #-}
{-# RULES "IntSet.toAscListBack" [1] foldrFB (:) [] = toAscList #-}
{-# RULES "IntSet.toDescList" [~1] forall s . toDescList s = build (\c n -> foldlFB (\xs x -> c x xs) n s) #-}
{-# RULES "IntSet.toDescListBack" [1] foldlFB (\xs x -> x : xs) [] = toDescList #-}
#endif
-- | /O(n*min(n,W))/. Create a set from a list of integers.
fromList :: [Key] -> IntSet
fromList xs
= foldlStrict ins empty xs
where
ins t x = insert x t
-- | /O(n)/. Build a set from an ascending list of elements.
-- /The precondition (input list is ascending) is not checked./
fromAscList :: [Key] -> IntSet
fromAscList [] = Nil
fromAscList (x0 : xs0) = fromDistinctAscList (combineEq x0 xs0)
where
combineEq x' [] = [x']
combineEq x' (x:xs)
| x==x' = combineEq x' xs
| otherwise = x' : combineEq x xs
-- | /O(n)/. Build a set from an ascending list of distinct elements.
-- /The precondition (input list is strictly ascending) is not checked./
fromDistinctAscList :: [Key] -> IntSet
fromDistinctAscList [] = Nil
fromDistinctAscList (z0 : zs0) = work (prefixOf z0) (bitmapOf z0) zs0 Nada
where
-- 'work' accumulates all values that go into one tip, before passing this Tip
-- to 'reduce'
work kx bm [] stk = finish kx (Tip kx bm) stk
work kx bm (z:zs) stk | kx == prefixOf z = work kx (bm .|. bitmapOf z) zs stk
work kx bm (z:zs) stk = reduce z zs (branchMask z kx) kx (Tip kx bm) stk
reduce z zs _ px tx Nada = work (prefixOf z) (bitmapOf z) zs (Push px tx Nada)
reduce z zs m px tx stk@(Push py ty stk') =
let mxy = branchMask px py
pxy = mask px mxy
in if shorter m mxy
then reduce z zs m pxy (Bin pxy mxy ty tx) stk'
else work (prefixOf z) (bitmapOf z) zs (Push px tx stk)
finish _ t Nada = t
finish px tx (Push py ty stk) = finish p (link py ty px tx) stk
where m = branchMask px py
p = mask px m
data Stack = Push {-# UNPACK #-} !Prefix !IntSet !Stack | Nada
{--------------------------------------------------------------------
Eq
--------------------------------------------------------------------}
instance Eq IntSet where
t1 == t2 = equal t1 t2
t1 /= t2 = nequal t1 t2
equal :: IntSet -> IntSet -> Bool
equal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 == m2) && (p1 == p2) && (equal l1 l2) && (equal r1 r2)
equal (Tip kx1 bm1) (Tip kx2 bm2)
= kx1 == kx2 && bm1 == bm2
equal Nil Nil = True
equal _ _ = False
nequal :: IntSet -> IntSet -> Bool
nequal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 /= m2) || (p1 /= p2) || (nequal l1 l2) || (nequal r1 r2)
nequal (Tip kx1 bm1) (Tip kx2 bm2)
= kx1 /= kx2 || bm1 /= bm2
nequal Nil Nil = False
nequal _ _ = True
{--------------------------------------------------------------------
Ord
--------------------------------------------------------------------}
instance Ord IntSet where
compare s1 s2 = compare (toAscList s1) (toAscList s2)
-- tentative implementation. See if more efficient exists.
{--------------------------------------------------------------------
Show
--------------------------------------------------------------------}
instance Show IntSet where
showsPrec p xs = showParen (p > 10) $
showString "fromList " . shows (toList xs)
{--------------------------------------------------------------------
Read
--------------------------------------------------------------------}
instance Read IntSet where
#ifdef __GLASGOW_HASKELL__
readPrec = parens $ prec 10 $ do
Ident "fromList" <- lexP
xs <- readPrec
return (fromList xs)
readListPrec = readListPrecDefault
#else
readsPrec p = readParen (p > 10) $ \ r -> do
("fromList",s) <- lex r
(xs,t) <- reads s
return (fromList xs,t)
#endif
{--------------------------------------------------------------------
Typeable
--------------------------------------------------------------------}
#include "Typeable.h"
INSTANCE_TYPEABLE0(IntSet,intSetTc,"IntSet")
{--------------------------------------------------------------------
NFData
--------------------------------------------------------------------}
-- The IntSet constructors consist only of strict fields of Ints and
-- IntSets, thus the default NFData instance which evaluates to whnf
-- should suffice
instance NFData IntSet
{--------------------------------------------------------------------
Debugging
--------------------------------------------------------------------}
-- | /O(n)/. Show the tree that implements the set. The tree is shown
-- in a compressed, hanging format.
showTree :: IntSet -> String
showTree s
= showTreeWith True False s
{- | /O(n)/. The expression (@'showTreeWith' hang wide map@) shows
the tree that implements the set. If @hang@ is
'True', a /hanging/ tree is shown otherwise a rotated tree is shown. If
@wide@ is 'True', an extra wide version is shown.
-}
showTreeWith :: Bool -> Bool -> IntSet -> String
showTreeWith hang wide t
| hang = (showsTreeHang wide [] t) ""
| otherwise = (showsTree wide [] [] t) ""
showsTree :: Bool -> [String] -> [String] -> IntSet -> ShowS
showsTree wide lbars rbars t
= case t of
Bin p m l r
-> showsTree wide (withBar rbars) (withEmpty rbars) r .
showWide wide rbars .
showsBars lbars . showString (showBin p m) . showString "\n" .
showWide wide lbars .
showsTree wide (withEmpty lbars) (withBar lbars) l
Tip kx bm
-> showsBars lbars . showString " " . shows kx . showString " + " .
showsBitMap bm . showString "\n"
Nil -> showsBars lbars . showString "|\n"
showsTreeHang :: Bool -> [String] -> IntSet -> ShowS
showsTreeHang wide bars t
= case t of
Bin p m l r
-> showsBars bars . showString (showBin p m) . showString "\n" .
showWide wide bars .
showsTreeHang wide (withBar bars) l .
showWide wide bars .
showsTreeHang wide (withEmpty bars) r
Tip kx bm
-> showsBars bars . showString " " . shows kx . showString " + " .
showsBitMap bm . showString "\n"
Nil -> showsBars bars . showString "|\n"
showBin :: Prefix -> Mask -> String
showBin _ _
= "*" -- ++ show (p,m)
showWide :: Bool -> [String] -> String -> String
showWide wide bars
| wide = showString (concat (reverse bars)) . showString "|\n"
| otherwise = id
showsBars :: [String] -> ShowS
showsBars bars
= case bars of
[] -> id
_ -> showString (concat (reverse (tail bars))) . showString node
showsBitMap :: Word -> ShowS
showsBitMap = showString . showBitMap
showBitMap :: Word -> String
showBitMap w = show $ foldrBits 0 (:) [] w
node :: String
node = "+--"
withBar, withEmpty :: [String] -> [String]
withBar bars = "| ":bars
withEmpty bars = " ":bars
{--------------------------------------------------------------------
Helpers
--------------------------------------------------------------------}
{--------------------------------------------------------------------
Link
--------------------------------------------------------------------}
link :: Prefix -> IntSet -> Prefix -> IntSet -> IntSet
link p1 t1 p2 t2
| zero p1 m = Bin p m t1 t2
| otherwise = Bin p m t2 t1
where
m = branchMask p1 p2
p = mask p1 m
{-# INLINE link #-}
{--------------------------------------------------------------------
@bin@ assures that we never have empty trees within a tree.
--------------------------------------------------------------------}
bin :: Prefix -> Mask -> IntSet -> IntSet -> IntSet
bin _ _ l Nil = l
bin _ _ Nil r = r
bin p m l r = Bin p m l r
{-# INLINE bin #-}
{--------------------------------------------------------------------
@tip@ assures that we never have empty bitmaps within a tree.
--------------------------------------------------------------------}
tip :: Prefix -> BitMap -> IntSet
tip _ 0 = Nil
tip kx bm = Tip kx bm
{-# INLINE tip #-}
{----------------------------------------------------------------------
Functions that generate Prefix and BitMap of a Key or a Suffix.
----------------------------------------------------------------------}
suffixBitMask :: Int
#if MIN_VERSION_base_4_7_0
suffixBitMask = finiteBitSize (undefined::Word) - 1
#else
suffixBitMask = bitSize (undefined::Word) - 1
#endif
{-# INLINE suffixBitMask #-}
prefixBitMask :: Int
prefixBitMask = complement suffixBitMask
{-# INLINE prefixBitMask #-}
prefixOf :: Int -> Prefix
prefixOf x = x .&. prefixBitMask
{-# INLINE prefixOf #-}
suffixOf :: Int -> Int
suffixOf x = x .&. suffixBitMask
{-# INLINE suffixOf #-}
bitmapOfSuffix :: Int -> BitMap
bitmapOfSuffix s = 1 `shiftLL` s
{-# INLINE bitmapOfSuffix #-}
bitmapOf :: Int -> BitMap
bitmapOf x = bitmapOfSuffix (suffixOf x)
{-# INLINE bitmapOf #-}
{--------------------------------------------------------------------
Endian independent bit twiddling
--------------------------------------------------------------------}
zero :: Int -> Mask -> Bool
zero i m
= (natFromInt i) .&. (natFromInt m) == 0
{-# INLINE zero #-}
nomatch,match :: Int -> Prefix -> Mask -> Bool
nomatch i p m
= (mask i m) /= p
{-# INLINE nomatch #-}
match i p m
= (mask i m) == p
{-# INLINE match #-}
-- Suppose a is largest such that 2^a divides 2*m.
-- Then mask i m is i with the low a bits zeroed out.
mask :: Int -> Mask -> Prefix
mask i m
= maskW (natFromInt i) (natFromInt m)
{-# INLINE mask #-}
{--------------------------------------------------------------------
Big endian operations
--------------------------------------------------------------------}
maskW :: Nat -> Nat -> Prefix
maskW i m
= intFromNat (i .&. (complement (m-1) `xor` m))
{-# INLINE maskW #-}
shorter :: Mask -> Mask -> Bool
shorter m1 m2
= (natFromInt m1) > (natFromInt m2)
{-# INLINE shorter #-}
branchMask :: Prefix -> Prefix -> Mask
branchMask p1 p2
= intFromNat (highestBitMask (natFromInt p1 `xor` natFromInt p2))
{-# INLINE branchMask #-}
{----------------------------------------------------------------------
To get best performance, we provide fast implementations of
lowestBitSet, highestBitSet and fold[lr][l]Bits for GHC.
If the intel bsf and bsr instructions ever become GHC primops,
this code should be reimplemented using these.
Performance of this code is crucial for folds, toList, filter, partition.
The signatures of methods in question are placed after this comment.
----------------------------------------------------------------------}
lowestBitSet :: Nat -> Int
highestBitSet :: Nat -> Int
foldlBits :: Int -> (a -> Int -> a) -> a -> Nat -> a
foldl'Bits :: Int -> (a -> Int -> a) -> a -> Nat -> a
foldrBits :: Int -> (Int -> a -> a) -> a -> Nat -> a
foldr'Bits :: Int -> (Int -> a -> a) -> a -> Nat -> a
{-# INLINE lowestBitSet #-}
{-# INLINE highestBitSet #-}
{-# INLINE foldlBits #-}
{-# INLINE foldl'Bits #-}
{-# INLINE foldrBits #-}
{-# INLINE foldr'Bits #-}
#if defined(__GLASGOW_HASKELL__) && (WORD_SIZE_IN_BITS==32 || WORD_SIZE_IN_BITS==64)
{----------------------------------------------------------------------
For lowestBitSet we use wordsize-dependant implementation based on
multiplication and DeBrujn indeces, which was proposed by Edward Kmett
<http://haskell.org/pipermail/libraries/2011-September/016749.html>
The core of this implementation is fast indexOfTheOnlyBit,
which is given a Nat with exactly one bit set, and returns
its index.
Lot of effort was put in these implementations, please benchmark carefully
before changing this code.
----------------------------------------------------------------------}
indexOfTheOnlyBit :: Nat -> Int
{-# INLINE indexOfTheOnlyBit #-}
indexOfTheOnlyBit bitmask =
I# (lsbArray `indexInt8OffAddr#` unboxInt (intFromNat ((bitmask * magic) `shiftRL` offset)))
where unboxInt (I# i) = i
#if WORD_SIZE_IN_BITS==32
magic = 0x077CB531
offset = 27
!lsbArray = "\0\1\28\2\29\14\24\3\30\22\20\15\25\17\4\8\31\27\13\23\21\19\16\7\26\12\18\6\11\5\10\9"#
#else
magic = 0x07EDD5E59A4E28C2
offset = 58
!lsbArray = "\63\0\58\1\59\47\53\2\60\39\48\27\54\33\42\3\61\51\37\40\49\18\28\20\55\30\34\11\43\14\22\4\62\57\46\52\38\26\32\41\50\36\17\19\29\10\13\21\56\45\25\31\35\16\9\12\44\24\15\8\23\7\6\5"#
#endif
-- The lsbArray gets inlined to every call site of indexOfTheOnlyBit.
-- That cannot be easily avoided, as GHC forbids top-level Addr# literal.
-- One could go around that by supplying getLsbArray :: () -> Addr# marked
-- as NOINLINE. But the code size of calling it and processing the result
-- is 48B on 32-bit and 56B on 64-bit architectures -- so the 32B and 64B array
-- is actually improvement on 32-bit and only a 8B size increase on 64-bit.
lowestBitMask :: Nat -> Nat
lowestBitMask x = x .&. negate x
{-# INLINE lowestBitMask #-}
-- Reverse the order of bits in the Nat.
revNat :: Nat -> Nat
#if WORD_SIZE_IN_BITS==32
revNat x1 = case ((x1 `shiftRL` 1) .&. 0x55555555) .|. ((x1 .&. 0x55555555) `shiftLL` 1) of
x2 -> case ((x2 `shiftRL` 2) .&. 0x33333333) .|. ((x2 .&. 0x33333333) `shiftLL` 2) of
x3 -> case ((x3 `shiftRL` 4) .&. 0x0F0F0F0F) .|. ((x3 .&. 0x0F0F0F0F) `shiftLL` 4) of
x4 -> case ((x4 `shiftRL` 8) .&. 0x00FF00FF) .|. ((x4 .&. 0x00FF00FF) `shiftLL` 8) of
x5 -> ( x5 `shiftRL` 16 ) .|. ( x5 `shiftLL` 16);
#else
revNat x1 = case ((x1 `shiftRL` 1) .&. 0x5555555555555555) .|. ((x1 .&. 0x5555555555555555) `shiftLL` 1) of
x2 -> case ((x2 `shiftRL` 2) .&. 0x3333333333333333) .|. ((x2 .&. 0x3333333333333333) `shiftLL` 2) of
x3 -> case ((x3 `shiftRL` 4) .&. 0x0F0F0F0F0F0F0F0F) .|. ((x3 .&. 0x0F0F0F0F0F0F0F0F) `shiftLL` 4) of
x4 -> case ((x4 `shiftRL` 8) .&. 0x00FF00FF00FF00FF) .|. ((x4 .&. 0x00FF00FF00FF00FF) `shiftLL` 8) of
x5 -> case ((x5 `shiftRL` 16) .&. 0x0000FFFF0000FFFF) .|. ((x5 .&. 0x0000FFFF0000FFFF) `shiftLL` 16) of
x6 -> ( x6 `shiftRL` 32 ) .|. ( x6 `shiftLL` 32);
#endif
lowestBitSet x = indexOfTheOnlyBit (lowestBitMask x)
highestBitSet x = indexOfTheOnlyBit (highestBitMask x)
foldlBits prefix f z bitmap = go bitmap z
where go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f acc) $! (prefix+bi))
foldl'Bits prefix f z bitmap = go bitmap z
where STRICT_2_OF_2(go)
go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f acc) $! (prefix+bi))
foldrBits prefix f z bitmap = go (revNat bitmap) z
where go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f $! (prefix+(WORD_SIZE_IN_BITS-1)-bi)) acc)
foldr'Bits prefix f z bitmap = go (revNat bitmap) z
where STRICT_2_OF_2(go)
go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f $! (prefix+(WORD_SIZE_IN_BITS-1)-bi)) acc)
#else
{----------------------------------------------------------------------
In general case we use logarithmic implementation of
lowestBitSet and highestBitSet, which works up to bit sizes of 64.
Folds are linear scans.
----------------------------------------------------------------------}
lowestBitSet n0 =
let (n1,b1) = if n0 .&. 0xFFFFFFFF /= 0 then (n0,0) else (n0 `shiftRL` 32, 32)
(n2,b2) = if n1 .&. 0xFFFF /= 0 then (n1,b1) else (n1 `shiftRL` 16, 16+b1)
(n3,b3) = if n2 .&. 0xFF /= 0 then (n2,b2) else (n2 `shiftRL` 8, 8+b2)
(n4,b4) = if n3 .&. 0xF /= 0 then (n3,b3) else (n3 `shiftRL` 4, 4+b3)
(n5,b5) = if n4 .&. 0x3 /= 0 then (n4,b4) else (n4 `shiftRL` 2, 2+b4)
b6 = if n5 .&. 0x1 /= 0 then b5 else 1+b5
in b6
highestBitSet n0 =
let (n1,b1) = if n0 .&. 0xFFFFFFFF00000000 /= 0 then (n0 `shiftRL` 32, 32) else (n0,0)
(n2,b2) = if n1 .&. 0xFFFF0000 /= 0 then (n1 `shiftRL` 16, 16+b1) else (n1,b1)
(n3,b3) = if n2 .&. 0xFF00 /= 0 then (n2 `shiftRL` 8, 8+b2) else (n2,b2)
(n4,b4) = if n3 .&. 0xF0 /= 0 then (n3 `shiftRL` 4, 4+b3) else (n3,b3)
(n5,b5) = if n4 .&. 0xC /= 0 then (n4 `shiftRL` 2, 2+b4) else (n4,b4)
b6 = if n5 .&. 0x2 /= 0 then 1+b5 else b5
in b6
foldlBits prefix f z bm = let lb = lowestBitSet bm
in go (prefix+lb) z (bm `shiftRL` lb)
where STRICT_1_OF_3(go)
go _ acc 0 = acc
go bi acc n | n `testBit` 0 = go (bi + 1) (f acc bi) (n `shiftRL` 1)
| otherwise = go (bi + 1) acc (n `shiftRL` 1)
foldl'Bits prefix f z bm = let lb = lowestBitSet bm
in go (prefix+lb) z (bm `shiftRL` lb)
where STRICT_1_OF_3(go)
STRICT_2_OF_3(go)
go _ acc 0 = acc
go bi acc n | n `testBit` 0 = go (bi + 1) (f acc bi) (n `shiftRL` 1)
| otherwise = go (bi + 1) acc (n `shiftRL` 1)
foldrBits prefix f z bm = let lb = lowestBitSet bm
in go (prefix+lb) (bm `shiftRL` lb)
where STRICT_1_OF_2(go)
go _ 0 = z
go bi n | n `testBit` 0 = f bi (go (bi + 1) (n `shiftRL` 1))
| otherwise = go (bi + 1) (n `shiftRL` 1)
foldr'Bits prefix f z bm = let lb = lowestBitSet bm
in go (prefix+lb) (bm `shiftRL` lb)
where STRICT_1_OF_2(go)
go _ 0 = z
go bi n | n `testBit` 0 = f bi $! go (bi + 1) (n `shiftRL` 1)
| otherwise = go (bi + 1) (n `shiftRL` 1)
#endif
{----------------------------------------------------------------------
[bitcount] as posted by David F. Place to haskell-cafe on April 11, 2006,
based on the code on
http://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetKernighan,
where the following source is given:
Published in 1988, the C Programming Language 2nd Ed. (by Brian W.
Kernighan and Dennis M. Ritchie) mentions this in exercise 2-9. On April
19, 2006 Don Knuth pointed out to me that this method "was first published
by Peter Wegner in CACM 3 (1960), 322. (Also discovered independently by
Derrick Lehmer and published in 1964 in a book edited by Beckenbach.)"
----------------------------------------------------------------------}
bitcount :: Int -> Word -> Int
#if MIN_VERSION_base_4_5_0
bitcount a x = a + popCount x
#else
bitcount a0 x0 = go a0 x0
where go a 0 = a
go a x = go (a + 1) (x .&. (x-1))
#endif
{-# INLINE bitcount #-}
{--------------------------------------------------------------------
Utilities
--------------------------------------------------------------------}
foldlStrict :: (a -> b -> a) -> a -> [b] -> a
foldlStrict f = go
where
go z [] = z
go z (x:xs) = let z' = f z x in z' `seq` go z' xs
{-# INLINE foldlStrict #-}
-- | /O(1)/. Decompose a set into pieces based on the structure of the underlying
-- tree. This function is useful for consuming a set in parallel.
--
-- No guarantee is made as to the sizes of the pieces; an internal, but
-- deterministic process determines this. However, it is guaranteed that the
-- pieces returned will be in ascending order (all elements in the first submap
-- less than all elements in the second, and so on).
--
-- Examples:
--
-- > splitRoot (fromList [1..120]) == [fromList [1..63],fromList [64..120]]
-- > splitRoot empty == []
--
-- Note that the current implementation does not return more than two subsets,
-- but you should not depend on this behaviour because it can change in the
-- future without notice. Also, the current version does not continue
-- splitting all the way to individual singleton sets -- it stops at some
-- point.
splitRoot :: IntSet -> [IntSet]
splitRoot orig =
case orig of
Nil -> []
-- NOTE: we don't currently split below Tip, but we could.
x@(Tip _ _) -> [x]
Bin _ m l r | m < 0 -> [r, l]
| otherwise -> [l, r]
{-# INLINE splitRoot #-}
| hackern/ghc-7.8.3 | libraries/containers/Data/IntSet/Base.hs | bsd-3-clause | 56,394 | 0 | 25 | 15,253 | 12,740 | 6,574 | 6,166 | -1 | -1 |
-- | This file exports the most commonly used modules within HLearn-classifiers. Most likely this is the only file you will have to import.
module HLearn.Models.Classifiers
( module HLearn.Models.Classifiers.Common
, module HLearn.Models.Classifiers.NearestNeighbor
, module HLearn.Models.Classifiers.Perceptron
, module HLearn.Evaluation.CrossValidation
)
where
import HLearn.Models.Classifiers.Common
import HLearn.Models.Classifiers.NearestNeighbor
import HLearn.Models.Classifiers.Perceptron
import HLearn.Evaluation.CrossValidation
| iamkingmaker/HLearn | src/HLearn/Models/Classifiers.hs | bsd-3-clause | 564 | 0 | 5 | 75 | 68 | 49 | 19 | 9 | 0 |
{-# LANGUAGE OverloadedStrings, ExtendedDefaultRules #-}
module Queries where
import Database.MongoDB
import qualified Data.String.Utils as S
import Data.List.Split
import Types
import ActionRunner
getTitle (String s) = unpack s
queryListed :: String -> UString -> String -> Pipe -> IO [(String,[String])]
queryListed ns sectionName elemNs pipe = do
maybeDocs <- run pipe $ find (select ["ns" =: ns] "page")
{project = ["title" =: 1, "sections" =: 1, "_id" =: 0]}
>>= rest
return $ case maybeDocs of
Right docs -> map (\doc -> (getTitle $ valueAt "title" $ doc,
getX sectionName elemNs $ valueAt "sections" $ doc)) docs
Left e -> error $ show e
queryCoverage = queryListed "101implementation" "Features" "101feature"
queryLangUsage = queryListed "101implementation" "Languages" "Language"
queryTechUsage = queryListed "101implementation" "Technologies" "Technology"
getX :: UString -> String -> Value -> [String]
getX sectionName elemNs (Array secs) = saveHead $ map extractX$ filter isFSec secs
where
saveHead [] = []
saveHead [x] = x
isFSec (Doc c) = valueAt "tag" c == String sectionName
isFSec _ = False
extractX (Doc c) = case (valueAt "content" c) of
String s -> map (S.replace (elemNs ++ ":") "") $
filter (S.startswith (elemNs ++ ":")) $
map (S.replace "*[[" "") $
S.split "]]" $
S.replace "* " "*" $
head $ (splitOn "|") $
S.replace "\n" "" $
unpack s
queryFeatures = queryNamespace "101feature"
queryImpls = queryNamespace "101implementation"
queryTechs = queryNamespace "Technology"
queryLangs = queryNamespace "Language"
queryNamespace :: String -> Pipe -> IO [String]
queryNamespace ns pipe = do
maybeDocs <- run pipe $ find (select ["ns" =: ns] "page")
{project = ["title" =: 1, "_id" =: 0]}
>>= rest
return $ case maybeDocs of
Right docs -> map (getTitle.(valueAt "title")) docs
Left e -> error $ show e | hendrysuwanda/101dev | tools/mongoRDF/Queries.hs | gpl-3.0 | 2,440 | 1 | 22 | 905 | 714 | 360 | 354 | 47 | 3 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
-}
{-# LANGUAGE DeriveDataTypeable #-}
-- |
-- #name_types#
-- GHC uses several kinds of name internally:
--
-- * 'OccName.OccName' represents names as strings with just a little more information:
-- the \"namespace\" that the name came from, e.g. the namespace of value, type constructors or
-- data constructors
--
-- * 'RdrName.RdrName': see "RdrName#name_types"
--
-- * 'Name.Name': see "Name#name_types"
--
-- * 'Id.Id': see "Id#name_types"
--
-- * 'Var.Var': see "Var#name_types"
module OccName (
-- * The 'NameSpace' type
NameSpace, -- Abstract
nameSpacesRelated,
-- ** Construction
-- $real_vs_source_data_constructors
tcName, clsName, tcClsName, dataName, varName,
tvName, srcDataName,
-- ** Pretty Printing
pprNameSpace, pprNonVarNameSpace, pprNameSpaceBrief,
-- * The 'OccName' type
OccName, -- Abstract, instance of Outputable
pprOccName,
-- ** Construction
mkOccName, mkOccNameFS,
mkVarOcc, mkVarOccFS,
mkDataOcc, mkDataOccFS,
mkTyVarOcc, mkTyVarOccFS,
mkTcOcc, mkTcOccFS,
mkClsOcc, mkClsOccFS,
mkDFunOcc,
setOccNameSpace,
demoteOccName,
HasOccName(..),
-- ** Derived 'OccName's
isDerivedOccName,
mkDataConWrapperOcc, mkWorkerOcc,
mkMatcherOcc, mkBuilderOcc,
mkDefaultMethodOcc,
mkGenDefMethodOcc,
mkDerivedTyConOcc, mkNewTyCoOcc, mkClassOpAuxOcc,
mkCon2TagOcc, mkTag2ConOcc, mkMaxTagOcc,
mkClassDataConOcc, mkDictOcc, mkIPOcc,
mkSpecOcc, mkForeignExportOcc, mkRepEqOcc, mkGenOcc1, mkGenOcc2,
mkGenD, mkGenR, mkGen1R, mkGenRCo, mkGenC, mkGenS,
mkDataTOcc, mkDataCOcc, mkDataConWorkerOcc,
mkSuperDictSelOcc, mkSuperDictAuxOcc,
mkLocalOcc, mkMethodOcc, mkInstTyTcOcc,
mkInstTyCoOcc, mkEqPredCoOcc,
mkVectOcc, mkVectTyConOcc, mkVectDataConOcc, mkVectIsoOcc,
mkPDataTyConOcc, mkPDataDataConOcc,
mkPDatasTyConOcc, mkPDatasDataConOcc,
mkPReprTyConOcc,
mkPADFunOcc,
-- ** Deconstruction
occNameFS, occNameString, occNameSpace,
isVarOcc, isTvOcc, isTcOcc, isDataOcc, isDataSymOcc, isSymOcc, isValOcc,
parenSymOcc, startsWithUnderscore,
isTcClsNameSpace, isTvNameSpace, isDataConNameSpace, isVarNameSpace, isValNameSpace,
-- * The 'OccEnv' type
OccEnv, emptyOccEnv, unitOccEnv, extendOccEnv, mapOccEnv,
lookupOccEnv, mkOccEnv, mkOccEnv_C, extendOccEnvList, elemOccEnv,
occEnvElts, foldOccEnv, plusOccEnv, plusOccEnv_C, extendOccEnv_C,
extendOccEnv_Acc, filterOccEnv, delListFromOccEnv, delFromOccEnv,
alterOccEnv, pprOccEnv,
-- * The 'OccSet' type
OccSet, emptyOccSet, unitOccSet, mkOccSet, extendOccSet,
extendOccSetList,
unionOccSets, unionManyOccSets, minusOccSet, elemOccSet, occSetElts,
foldOccSet, isEmptyOccSet, intersectOccSet, intersectsOccSet,
-- * Tidying up
TidyOccEnv, emptyTidyOccEnv, tidyOccName, initTidyOccEnv,
-- FsEnv
FastStringEnv, emptyFsEnv, lookupFsEnv, extendFsEnv, mkFsEnv
) where
import Util
import Unique
import DynFlags
import UniqFM
import UniqSet
import FastString
import Outputable
import Lexeme
import Binary
import Data.Char
import Data.Data
{-
************************************************************************
* *
FastStringEnv
* *
************************************************************************
FastStringEnv can't be in FastString because the env depends on UniqFM
-}
type FastStringEnv a = UniqFM a -- Keyed by FastString
emptyFsEnv :: FastStringEnv a
lookupFsEnv :: FastStringEnv a -> FastString -> Maybe a
extendFsEnv :: FastStringEnv a -> FastString -> a -> FastStringEnv a
mkFsEnv :: [(FastString,a)] -> FastStringEnv a
emptyFsEnv = emptyUFM
lookupFsEnv = lookupUFM
extendFsEnv = addToUFM
mkFsEnv = listToUFM
{-
************************************************************************
* *
\subsection{Name space}
* *
************************************************************************
-}
data NameSpace = VarName -- Variables, including "real" data constructors
| DataName -- "Source" data constructors
| TvName -- Type variables
| TcClsName -- Type constructors and classes; Haskell has them
-- in the same name space for now.
deriving( Eq, Ord )
{-! derive: Binary !-}
-- Note [Data Constructors]
-- see also: Note [Data Constructor Naming] in DataCon.hs
--
-- $real_vs_source_data_constructors
-- There are two forms of data constructor:
--
-- [Source data constructors] The data constructors mentioned in Haskell source code
--
-- [Real data constructors] The data constructors of the representation type, which may not be the same as the source type
--
-- For example:
--
-- > data T = T !(Int, Int)
--
-- The source datacon has type @(Int, Int) -> T@
-- The real datacon has type @Int -> Int -> T@
--
-- GHC chooses a representation based on the strictness etc.
tcName, clsName, tcClsName :: NameSpace
dataName, srcDataName :: NameSpace
tvName, varName :: NameSpace
-- Though type constructors and classes are in the same name space now,
-- the NameSpace type is abstract, so we can easily separate them later
tcName = TcClsName -- Type constructors
clsName = TcClsName -- Classes
tcClsName = TcClsName -- Not sure which!
dataName = DataName
srcDataName = DataName -- Haskell-source data constructors should be
-- in the Data name space
tvName = TvName
varName = VarName
isDataConNameSpace :: NameSpace -> Bool
isDataConNameSpace DataName = True
isDataConNameSpace _ = False
isTcClsNameSpace :: NameSpace -> Bool
isTcClsNameSpace TcClsName = True
isTcClsNameSpace _ = False
isTvNameSpace :: NameSpace -> Bool
isTvNameSpace TvName = True
isTvNameSpace _ = False
isVarNameSpace :: NameSpace -> Bool -- Variables or type variables, but not constructors
isVarNameSpace TvName = True
isVarNameSpace VarName = True
isVarNameSpace _ = False
isValNameSpace :: NameSpace -> Bool
isValNameSpace DataName = True
isValNameSpace VarName = True
isValNameSpace _ = False
pprNameSpace :: NameSpace -> SDoc
pprNameSpace DataName = ptext (sLit "data constructor")
pprNameSpace VarName = ptext (sLit "variable")
pprNameSpace TvName = ptext (sLit "type variable")
pprNameSpace TcClsName = ptext (sLit "type constructor or class")
pprNonVarNameSpace :: NameSpace -> SDoc
pprNonVarNameSpace VarName = empty
pprNonVarNameSpace ns = pprNameSpace ns
pprNameSpaceBrief :: NameSpace -> SDoc
pprNameSpaceBrief DataName = char 'd'
pprNameSpaceBrief VarName = char 'v'
pprNameSpaceBrief TvName = ptext (sLit "tv")
pprNameSpaceBrief TcClsName = ptext (sLit "tc")
-- demoteNameSpace lowers the NameSpace if possible. We can not know
-- in advance, since a TvName can appear in an HsTyVar.
-- See Note [Demotion] in RnEnv
demoteNameSpace :: NameSpace -> Maybe NameSpace
demoteNameSpace VarName = Nothing
demoteNameSpace DataName = Nothing
demoteNameSpace TvName = Nothing
demoteNameSpace TcClsName = Just DataName
{-
************************************************************************
* *
\subsection[Name-pieces-datatypes]{The @OccName@ datatypes}
* *
************************************************************************
-}
data OccName = OccName
{ occNameSpace :: !NameSpace
, occNameFS :: !FastString
}
deriving Typeable
instance Eq OccName where
(OccName sp1 s1) == (OccName sp2 s2) = s1 == s2 && sp1 == sp2
instance Ord OccName where
-- Compares lexicographically, *not* by Unique of the string
compare (OccName sp1 s1) (OccName sp2 s2)
= (s1 `compare` s2) `thenCmp` (sp1 `compare` sp2)
instance Data OccName where
-- don't traverse?
toConstr _ = abstractConstr "OccName"
gunfold _ _ = error "gunfold"
dataTypeOf _ = mkNoRepType "OccName"
instance HasOccName OccName where
occName = id
{-
************************************************************************
* *
\subsection{Printing}
* *
************************************************************************
-}
instance Outputable OccName where
ppr = pprOccName
instance OutputableBndr OccName where
pprBndr _ = ppr
pprInfixOcc n = pprInfixVar (isSymOcc n) (ppr n)
pprPrefixOcc n = pprPrefixVar (isSymOcc n) (ppr n)
pprOccName :: OccName -> SDoc
pprOccName (OccName sp occ)
= getPprStyle $ \ sty ->
if codeStyle sty
then ztext (zEncodeFS occ)
else pp_occ <> pp_debug sty
where
pp_debug sty | debugStyle sty = braces (pprNameSpaceBrief sp)
| otherwise = empty
pp_occ = sdocWithDynFlags $ \dflags ->
if gopt Opt_SuppressUniques dflags
then text (strip_th_unique (unpackFS occ))
else ftext occ
-- See Note [Suppressing uniques in OccNames]
strip_th_unique ('[' : c : _) | isAlphaNum c = []
strip_th_unique (c : cs) = c : strip_th_unique cs
strip_th_unique [] = []
{-
Note [Suppressing uniques in OccNames]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This is a hack to de-wobblify the OccNames that contain uniques from
Template Haskell that have been turned into a string in the OccName.
See Note [Unique OccNames from Template Haskell] in Convert.hs
************************************************************************
* *
\subsection{Construction}
* *
************************************************************************
-}
mkOccName :: NameSpace -> String -> OccName
mkOccName occ_sp str = OccName occ_sp (mkFastString str)
mkOccNameFS :: NameSpace -> FastString -> OccName
mkOccNameFS occ_sp fs = OccName occ_sp fs
mkVarOcc :: String -> OccName
mkVarOcc s = mkOccName varName s
mkVarOccFS :: FastString -> OccName
mkVarOccFS fs = mkOccNameFS varName fs
mkDataOcc :: String -> OccName
mkDataOcc = mkOccName dataName
mkDataOccFS :: FastString -> OccName
mkDataOccFS = mkOccNameFS dataName
mkTyVarOcc :: String -> OccName
mkTyVarOcc = mkOccName tvName
mkTyVarOccFS :: FastString -> OccName
mkTyVarOccFS fs = mkOccNameFS tvName fs
mkTcOcc :: String -> OccName
mkTcOcc = mkOccName tcName
mkTcOccFS :: FastString -> OccName
mkTcOccFS = mkOccNameFS tcName
mkClsOcc :: String -> OccName
mkClsOcc = mkOccName clsName
mkClsOccFS :: FastString -> OccName
mkClsOccFS = mkOccNameFS clsName
-- demoteOccName lowers the Namespace of OccName.
-- see Note [Demotion]
demoteOccName :: OccName -> Maybe OccName
demoteOccName (OccName space name) = do
space' <- demoteNameSpace space
return $ OccName space' name
-- Name spaces are related if there is a chance to mean the one when one writes
-- the other, i.e. variables <-> data constructors and type variables <-> type constructors
nameSpacesRelated :: NameSpace -> NameSpace -> Bool
nameSpacesRelated ns1 ns2 = ns1 == ns2 || otherNameSpace ns1 == ns2
otherNameSpace :: NameSpace -> NameSpace
otherNameSpace VarName = DataName
otherNameSpace DataName = VarName
otherNameSpace TvName = TcClsName
otherNameSpace TcClsName = TvName
{- | Other names in the compiler add additional information to an OccName.
This class provides a consistent way to access the underlying OccName. -}
class HasOccName name where
occName :: name -> OccName
{-
************************************************************************
* *
Environments
* *
************************************************************************
OccEnvs are used mainly for the envts in ModIfaces.
Note [The Unique of an OccName]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
They are efficient, because FastStrings have unique Int# keys. We assume
this key is less than 2^24, and indeed FastStrings are allocated keys
sequentially starting at 0.
So we can make a Unique using
mkUnique ns key :: Unique
where 'ns' is a Char representing the name space. This in turn makes it
easy to build an OccEnv.
-}
instance Uniquable OccName where
-- See Note [The Unique of an OccName]
getUnique (OccName VarName fs) = mkVarOccUnique fs
getUnique (OccName DataName fs) = mkDataOccUnique fs
getUnique (OccName TvName fs) = mkTvOccUnique fs
getUnique (OccName TcClsName fs) = mkTcOccUnique fs
newtype OccEnv a = A (UniqFM a)
emptyOccEnv :: OccEnv a
unitOccEnv :: OccName -> a -> OccEnv a
extendOccEnv :: OccEnv a -> OccName -> a -> OccEnv a
extendOccEnvList :: OccEnv a -> [(OccName, a)] -> OccEnv a
lookupOccEnv :: OccEnv a -> OccName -> Maybe a
mkOccEnv :: [(OccName,a)] -> OccEnv a
mkOccEnv_C :: (a -> a -> a) -> [(OccName,a)] -> OccEnv a
elemOccEnv :: OccName -> OccEnv a -> Bool
foldOccEnv :: (a -> b -> b) -> b -> OccEnv a -> b
occEnvElts :: OccEnv a -> [a]
extendOccEnv_C :: (a->a->a) -> OccEnv a -> OccName -> a -> OccEnv a
extendOccEnv_Acc :: (a->b->b) -> (a->b) -> OccEnv b -> OccName -> a -> OccEnv b
plusOccEnv :: OccEnv a -> OccEnv a -> OccEnv a
plusOccEnv_C :: (a->a->a) -> OccEnv a -> OccEnv a -> OccEnv a
mapOccEnv :: (a->b) -> OccEnv a -> OccEnv b
delFromOccEnv :: OccEnv a -> OccName -> OccEnv a
delListFromOccEnv :: OccEnv a -> [OccName] -> OccEnv a
filterOccEnv :: (elt -> Bool) -> OccEnv elt -> OccEnv elt
alterOccEnv :: (Maybe elt -> Maybe elt) -> OccEnv elt -> OccName -> OccEnv elt
emptyOccEnv = A emptyUFM
unitOccEnv x y = A $ unitUFM x y
extendOccEnv (A x) y z = A $ addToUFM x y z
extendOccEnvList (A x) l = A $ addListToUFM x l
lookupOccEnv (A x) y = lookupUFM x y
mkOccEnv l = A $ listToUFM l
elemOccEnv x (A y) = elemUFM x y
foldOccEnv a b (A c) = foldUFM a b c
occEnvElts (A x) = eltsUFM x
plusOccEnv (A x) (A y) = A $ plusUFM x y
plusOccEnv_C f (A x) (A y) = A $ plusUFM_C f x y
extendOccEnv_C f (A x) y z = A $ addToUFM_C f x y z
extendOccEnv_Acc f g (A x) y z = A $ addToUFM_Acc f g x y z
mapOccEnv f (A x) = A $ mapUFM f x
mkOccEnv_C comb l = A $ addListToUFM_C comb emptyUFM l
delFromOccEnv (A x) y = A $ delFromUFM x y
delListFromOccEnv (A x) y = A $ delListFromUFM x y
filterOccEnv x (A y) = A $ filterUFM x y
alterOccEnv fn (A y) k = A $ alterUFM fn y k
instance Outputable a => Outputable (OccEnv a) where
ppr x = pprOccEnv ppr x
pprOccEnv :: (a -> SDoc) -> OccEnv a -> SDoc
pprOccEnv ppr_elt (A env) = pprUniqFM ppr_elt env
type OccSet = UniqSet OccName
emptyOccSet :: OccSet
unitOccSet :: OccName -> OccSet
mkOccSet :: [OccName] -> OccSet
extendOccSet :: OccSet -> OccName -> OccSet
extendOccSetList :: OccSet -> [OccName] -> OccSet
unionOccSets :: OccSet -> OccSet -> OccSet
unionManyOccSets :: [OccSet] -> OccSet
minusOccSet :: OccSet -> OccSet -> OccSet
elemOccSet :: OccName -> OccSet -> Bool
occSetElts :: OccSet -> [OccName]
foldOccSet :: (OccName -> b -> b) -> b -> OccSet -> b
isEmptyOccSet :: OccSet -> Bool
intersectOccSet :: OccSet -> OccSet -> OccSet
intersectsOccSet :: OccSet -> OccSet -> Bool
emptyOccSet = emptyUniqSet
unitOccSet = unitUniqSet
mkOccSet = mkUniqSet
extendOccSet = addOneToUniqSet
extendOccSetList = addListToUniqSet
unionOccSets = unionUniqSets
unionManyOccSets = unionManyUniqSets
minusOccSet = minusUniqSet
elemOccSet = elementOfUniqSet
occSetElts = uniqSetToList
foldOccSet = foldUniqSet
isEmptyOccSet = isEmptyUniqSet
intersectOccSet = intersectUniqSets
intersectsOccSet s1 s2 = not (isEmptyOccSet (s1 `intersectOccSet` s2))
{-
************************************************************************
* *
\subsection{Predicates and taking them apart}
* *
************************************************************************
-}
occNameString :: OccName -> String
occNameString (OccName _ s) = unpackFS s
setOccNameSpace :: NameSpace -> OccName -> OccName
setOccNameSpace sp (OccName _ occ) = OccName sp occ
isVarOcc, isTvOcc, isTcOcc, isDataOcc :: OccName -> Bool
isVarOcc (OccName VarName _) = True
isVarOcc _ = False
isTvOcc (OccName TvName _) = True
isTvOcc _ = False
isTcOcc (OccName TcClsName _) = True
isTcOcc _ = False
-- | /Value/ 'OccNames's are those that are either in
-- the variable or data constructor namespaces
isValOcc :: OccName -> Bool
isValOcc (OccName VarName _) = True
isValOcc (OccName DataName _) = True
isValOcc _ = False
isDataOcc (OccName DataName _) = True
isDataOcc _ = False
-- | Test if the 'OccName' is a data constructor that starts with
-- a symbol (e.g. @:@, or @[]@)
isDataSymOcc :: OccName -> Bool
isDataSymOcc (OccName DataName s) = isLexConSym s
isDataSymOcc _ = False
-- Pretty inefficient!
-- | Test if the 'OccName' is that for any operator (whether
-- it is a data constructor or variable or whatever)
isSymOcc :: OccName -> Bool
isSymOcc (OccName DataName s) = isLexConSym s
isSymOcc (OccName TcClsName s) = isLexSym s
isSymOcc (OccName VarName s) = isLexSym s
isSymOcc (OccName TvName s) = isLexSym s
-- Pretty inefficient!
parenSymOcc :: OccName -> SDoc -> SDoc
-- ^ Wrap parens around an operator
parenSymOcc occ doc | isSymOcc occ = parens doc
| otherwise = doc
startsWithUnderscore :: OccName -> Bool
-- ^ Haskell 98 encourages compilers to suppress warnings about unsed
-- names in a pattern if they start with @_@: this implements that test
startsWithUnderscore occ = case occNameString occ of
('_' : _) -> True
_other -> False
{-
************************************************************************
* *
\subsection{Making system names}
* *
************************************************************************
Here's our convention for splitting up the interface file name space:
d... dictionary identifiers
(local variables, so no name-clash worries)
All of these other OccNames contain a mixture of alphabetic
and symbolic characters, and hence cannot possibly clash with
a user-written type or function name
$f... Dict-fun identifiers (from inst decls)
$dmop Default method for 'op'
$pnC n'th superclass selector for class C
$wf Worker for functtoin 'f'
$sf.. Specialised version of f
T:C Tycon for dictionary for class C
D:C Data constructor for dictionary for class C
NTCo:T Coercion connecting newtype T with its representation type
TFCo:R Coercion connecting a data family to its respresentation type R
In encoded form these appear as Zdfxxx etc
:... keywords (export:, letrec: etc.)
--- I THINK THIS IS WRONG!
This knowledge is encoded in the following functions.
@mk_deriv@ generates an @OccName@ from the prefix and a string.
NB: The string must already be encoded!
-}
mk_deriv :: NameSpace
-> String -- Distinguishes one sort of derived name from another
-> String
-> OccName
mk_deriv occ_sp sys_prefix str = mkOccName occ_sp (sys_prefix ++ str)
isDerivedOccName :: OccName -> Bool
isDerivedOccName occ =
case occNameString occ of
'$':c:_ | isAlphaNum c -> True
':':c:_ | isAlphaNum c -> True
_other -> False
mkDataConWrapperOcc, mkWorkerOcc,
mkMatcherOcc, mkBuilderOcc,
mkDefaultMethodOcc,
mkGenDefMethodOcc, mkDerivedTyConOcc, mkClassDataConOcc, mkDictOcc,
mkIPOcc, mkSpecOcc, mkForeignExportOcc, mkRepEqOcc, mkGenOcc1, mkGenOcc2,
mkGenD, mkGenR, mkGen1R, mkGenRCo,
mkDataTOcc, mkDataCOcc, mkDataConWorkerOcc, mkNewTyCoOcc,
mkInstTyCoOcc, mkEqPredCoOcc, mkClassOpAuxOcc,
mkCon2TagOcc, mkTag2ConOcc, mkMaxTagOcc
:: OccName -> OccName
-- These derived variables have a prefix that no Haskell value could have
mkDataConWrapperOcc = mk_simple_deriv varName "$W"
mkWorkerOcc = mk_simple_deriv varName "$w"
mkMatcherOcc = mk_simple_deriv varName "$m"
mkBuilderOcc = mk_simple_deriv varName "$b"
mkDefaultMethodOcc = mk_simple_deriv varName "$dm"
mkGenDefMethodOcc = mk_simple_deriv varName "$gdm"
mkClassOpAuxOcc = mk_simple_deriv varName "$c"
mkDerivedTyConOcc = mk_simple_deriv tcName ":" -- The : prefix makes sure it classifies as a tycon/datacon
mkClassDataConOcc = mk_simple_deriv dataName "D:" -- We go straight to the "real" data con
-- for datacons from classes
mkDictOcc = mk_simple_deriv varName "$d"
mkIPOcc = mk_simple_deriv varName "$i"
mkSpecOcc = mk_simple_deriv varName "$s"
mkForeignExportOcc = mk_simple_deriv varName "$f"
mkRepEqOcc = mk_simple_deriv tvName "$r" -- In RULES involving Coercible
mkNewTyCoOcc = mk_simple_deriv tcName "NTCo:" -- Coercion for newtypes
mkInstTyCoOcc = mk_simple_deriv tcName "TFCo:" -- Coercion for type functions
mkEqPredCoOcc = mk_simple_deriv tcName "$co"
-- used in derived instances
mkCon2TagOcc = mk_simple_deriv varName "$con2tag_"
mkTag2ConOcc = mk_simple_deriv varName "$tag2con_"
mkMaxTagOcc = mk_simple_deriv varName "$maxtag_"
-- Generic derivable classes (old)
mkGenOcc1 = mk_simple_deriv varName "$gfrom"
mkGenOcc2 = mk_simple_deriv varName "$gto"
-- Generic deriving mechanism (new)
mkGenD = mk_simple_deriv tcName "D1"
mkGenC :: OccName -> Int -> OccName
mkGenC occ m = mk_deriv tcName ("C1_" ++ show m) (occNameString occ)
mkGenS :: OccName -> Int -> Int -> OccName
mkGenS occ m n = mk_deriv tcName ("S1_" ++ show m ++ "_" ++ show n)
(occNameString occ)
mkGenR = mk_simple_deriv tcName "Rep_"
mkGen1R = mk_simple_deriv tcName "Rep1_"
mkGenRCo = mk_simple_deriv tcName "CoRep_"
-- data T = MkT ... deriving( Data ) needs definitions for
-- $tT :: Data.Generics.Basics.DataType
-- $cMkT :: Data.Generics.Basics.Constr
mkDataTOcc = mk_simple_deriv varName "$t"
mkDataCOcc = mk_simple_deriv varName "$c"
-- Vectorisation
mkVectOcc, mkVectTyConOcc, mkVectDataConOcc, mkVectIsoOcc,
mkPADFunOcc, mkPReprTyConOcc,
mkPDataTyConOcc, mkPDataDataConOcc,
mkPDatasTyConOcc, mkPDatasDataConOcc
:: Maybe String -> OccName -> OccName
mkVectOcc = mk_simple_deriv_with varName "$v"
mkVectTyConOcc = mk_simple_deriv_with tcName "V:"
mkVectDataConOcc = mk_simple_deriv_with dataName "VD:"
mkVectIsoOcc = mk_simple_deriv_with varName "$vi"
mkPADFunOcc = mk_simple_deriv_with varName "$pa"
mkPReprTyConOcc = mk_simple_deriv_with tcName "VR:"
mkPDataTyConOcc = mk_simple_deriv_with tcName "VP:"
mkPDatasTyConOcc = mk_simple_deriv_with tcName "VPs:"
mkPDataDataConOcc = mk_simple_deriv_with dataName "VPD:"
mkPDatasDataConOcc = mk_simple_deriv_with dataName "VPDs:"
mk_simple_deriv :: NameSpace -> String -> OccName -> OccName
mk_simple_deriv sp px occ = mk_deriv sp px (occNameString occ)
mk_simple_deriv_with :: NameSpace -> String -> Maybe String -> OccName -> OccName
mk_simple_deriv_with sp px Nothing occ = mk_deriv sp px (occNameString occ)
mk_simple_deriv_with sp px (Just with) occ = mk_deriv sp (px ++ with ++ "_") (occNameString occ)
-- Data constructor workers are made by setting the name space
-- of the data constructor OccName (which should be a DataName)
-- to VarName
mkDataConWorkerOcc datacon_occ = setOccNameSpace varName datacon_occ
mkSuperDictAuxOcc :: Int -> OccName -> OccName
mkSuperDictAuxOcc index cls_tc_occ
= mk_deriv varName "$cp" (show index ++ occNameString cls_tc_occ)
mkSuperDictSelOcc :: Int -- ^ Index of superclass, e.g. 3
-> OccName -- ^ Class, e.g. @Ord@
-> OccName -- ^ Derived 'Occname', e.g. @$p3Ord@
mkSuperDictSelOcc index cls_tc_occ
= mk_deriv varName "$p" (show index ++ occNameString cls_tc_occ)
mkLocalOcc :: Unique -- ^ Unique to combine with the 'OccName'
-> OccName -- ^ Local name, e.g. @sat@
-> OccName -- ^ Nice unique version, e.g. @$L23sat@
mkLocalOcc uniq occ
= mk_deriv varName ("$L" ++ show uniq) (occNameString occ)
-- The Unique might print with characters
-- that need encoding (e.g. 'z'!)
-- | Derive a name for the representation type constructor of a
-- @data@\/@newtype@ instance.
mkInstTyTcOcc :: String -- ^ Family name, e.g. @Map@
-> OccSet -- ^ avoid these Occs
-> OccName -- ^ @R:Map@
mkInstTyTcOcc str set =
chooseUniqueOcc tcName ('R' : ':' : str) set
mkDFunOcc :: String -- ^ Typically the class and type glommed together e.g. @OrdMaybe@.
-- Only used in debug mode, for extra clarity
-> Bool -- ^ Is this a hs-boot instance DFun?
-> OccSet -- ^ avoid these Occs
-> OccName -- ^ E.g. @$f3OrdMaybe@
-- In hs-boot files we make dict funs like $fx7ClsTy, which get bound to the real
-- thing when we compile the mother module. Reason: we don't know exactly
-- what the mother module will call it.
mkDFunOcc info_str is_boot set
= chooseUniqueOcc VarName (prefix ++ info_str) set
where
prefix | is_boot = "$fx"
| otherwise = "$f"
{-
Sometimes we need to pick an OccName that has not already been used,
given a set of in-use OccNames.
-}
chooseUniqueOcc :: NameSpace -> String -> OccSet -> OccName
chooseUniqueOcc ns str set = loop (mkOccName ns str) (0::Int)
where
loop occ n
| occ `elemOccSet` set = loop (mkOccName ns (str ++ show n)) (n+1)
| otherwise = occ
{-
We used to add a '$m' to indicate a method, but that gives rise to bad
error messages from the type checker when we print the function name or pattern
of an instance-decl binding. Why? Because the binding is zapped
to use the method name in place of the selector name.
(See TcClassDcl.tcMethodBind)
The way it is now, -ddump-xx output may look confusing, but
you can always say -dppr-debug to get the uniques.
However, we *do* have to zap the first character to be lower case,
because overloaded constructors (blarg) generate methods too.
And convert to VarName space
e.g. a call to constructor MkFoo where
data (Ord a) => Foo a = MkFoo a
If this is necessary, we do it by prefixing '$m'. These
guys never show up in error messages. What a hack.
-}
mkMethodOcc :: OccName -> OccName
mkMethodOcc occ@(OccName VarName _) = occ
mkMethodOcc occ = mk_simple_deriv varName "$m" occ
{-
************************************************************************
* *
\subsection{Tidying them up}
* *
************************************************************************
Before we print chunks of code we like to rename it so that
we don't have to print lots of silly uniques in it. But we mustn't
accidentally introduce name clashes! So the idea is that we leave the
OccName alone unless it accidentally clashes with one that is already
in scope; if so, we tack on '1' at the end and try again, then '2', and
so on till we find a unique one.
There's a wrinkle for operators. Consider '>>='. We can't use '>>=1'
because that isn't a single lexeme. So we encode it to 'lle' and *then*
tack on the '1', if necessary.
Note [TidyOccEnv]
~~~~~~~~~~~~~~~~~
type TidyOccEnv = UniqFM Int
* Domain = The OccName's FastString. These FastStrings are "taken";
make sure that we don't re-use
* Int, n = A plausible starting point for new guesses
There is no guarantee that "FSn" is available;
you must look that up in the TidyOccEnv. But
it's a good place to start looking.
* When looking for a renaming for "foo2" we strip off the "2" and start
with "foo". Otherwise if we tidy twice we get silly names like foo23.
-}
type TidyOccEnv = UniqFM Int -- The in-scope OccNames
-- See Note [TidyOccEnv]
emptyTidyOccEnv :: TidyOccEnv
emptyTidyOccEnv = emptyUFM
initTidyOccEnv :: [OccName] -> TidyOccEnv -- Initialise with names to avoid!
initTidyOccEnv = foldl add emptyUFM
where
add env (OccName _ fs) = addToUFM env fs 1
tidyOccName :: TidyOccEnv -> OccName -> (TidyOccEnv, OccName)
tidyOccName env occ@(OccName occ_sp fs)
= case lookupUFM env fs of
Just n -> find n
Nothing -> (addToUFM env fs 1, occ)
where
base :: String -- Drop trailing digits (see Note [TidyOccEnv])
base = dropWhileEndLE isDigit (unpackFS fs)
find n
= case lookupUFM env new_fs of
Just n' -> find (n1 `max` n')
-- The max ensures that n increases, avoiding loops
Nothing -> (addToUFM (addToUFM env fs n1) new_fs n1,
OccName occ_sp new_fs)
-- We update only the beginning and end of the
-- chain that find explores; it's a little harder to
-- update the middle and there's no real need.
where
n1 = n+1
new_fs = mkFastString (base ++ show n)
{-
************************************************************************
* *
Binary instance
Here rather than BinIface because OccName is abstract
* *
************************************************************************
-}
instance Binary NameSpace where
put_ bh VarName = do
putByte bh 0
put_ bh DataName = do
putByte bh 1
put_ bh TvName = do
putByte bh 2
put_ bh TcClsName = do
putByte bh 3
get bh = do
h <- getByte bh
case h of
0 -> do return VarName
1 -> do return DataName
2 -> do return TvName
_ -> do return TcClsName
instance Binary OccName where
put_ bh (OccName aa ab) = do
put_ bh aa
put_ bh ab
get bh = do
aa <- get bh
ab <- get bh
return (OccName aa ab)
| christiaanb/ghc | compiler/basicTypes/OccName.hs | bsd-3-clause | 31,827 | 0 | 14 | 8,573 | 5,623 | 3,012 | 2,611 | 462 | 5 |
{-# OPTIONS -cpp #-}
module Main where
import Control.Concurrent (forkIO, threadDelay)
import Control.Concurrent.MVar (putMVar, takeMVar, newEmptyMVar)
import Control.Monad
import Control.Exception
import Data.Maybe (isNothing)
import System.Environment (getArgs)
import System.Exit
import System.IO (hPutStrLn, stderr)
#if !defined(mingw32_HOST_OS)
import System.Posix hiding (killProcess)
import System.IO.Error hiding (try,catch)
#endif
#if defined(mingw32_HOST_OS)
import System.Process
import WinCBindings
import Foreign
import System.Win32.DebugApi
import System.Win32.Types
#endif
main :: IO ()
main = do
args <- getArgs
case args of
[secs,cmd] ->
case reads secs of
[(secs', "")] -> run secs' cmd
_ -> die ("Can't parse " ++ show secs ++ " as a number of seconds")
_ -> die ("Bad arguments " ++ show args)
die :: String -> IO ()
die msg = do hPutStrLn stderr ("timeout: " ++ msg)
exitWith (ExitFailure 1)
timeoutMsg :: String
timeoutMsg = "Timeout happened...killing process..."
run :: Int -> String -> IO ()
#if !defined(mingw32_HOST_OS)
run secs cmd = do
m <- newEmptyMVar
mp <- newEmptyMVar
installHandler sigINT (Catch (putMVar m Nothing)) Nothing
forkIO $ do threadDelay (secs * 1000000)
putMVar m Nothing
forkIO $ do ei <- try $ do pid <- systemSession cmd
return pid
putMVar mp ei
case ei of
Left _ -> return ()
Right pid -> do
r <- getProcessStatus True False pid
putMVar m r
ei_pid_ph <- takeMVar mp
case ei_pid_ph of
Left e -> do hPutStrLn stderr
("Timeout:\n" ++ show (e :: IOException))
exitWith (ExitFailure 98)
Right pid -> do
r <- takeMVar m
case r of
Nothing -> do
hPutStrLn stderr timeoutMsg
killProcess pid
exitWith (ExitFailure 99)
Just (Exited r) -> exitWith r
Just (Terminated s) -> raiseSignal s
Just _ -> exitWith (ExitFailure 1)
systemSession cmd =
forkProcess $ do
createSession
executeFile "/bin/sh" False ["-c", cmd] Nothing
-- need to use exec() directly here, rather than something like
-- System.Process.system, because we are in a forked child and some
-- pthread libraries get all upset if you start doing certain
-- things in a forked child of a pthread process, such as forking
-- more threads.
killProcess pid = do
ignoreIOExceptions (signalProcessGroup sigTERM pid)
checkReallyDead 10
where
checkReallyDead 0 = hPutStrLn stderr "checkReallyDead: Giving up"
checkReallyDead (n+1) =
do threadDelay (3*100000) -- 3/10 sec
m <- tryJust (guard . isDoesNotExistError) $
getProcessStatus False False pid
case m of
Right Nothing -> return ()
Left _ -> return ()
_ -> do
ignoreIOExceptions (signalProcessGroup sigKILL pid)
checkReallyDead n
ignoreIOExceptions :: IO () -> IO ()
ignoreIOExceptions io = io `catch` ((\_ -> return ()) :: IOException -> IO ())
#else
run secs cmd =
let escape '\\' = "\\\\"
escape '"' = "\\\""
escape c = [c]
cmd' = "sh -c \"" ++ concatMap escape cmd ++ "\"" in
alloca $ \p_startupinfo ->
alloca $ \p_pi ->
withTString cmd' $ \cmd'' ->
do job <- createJobObjectW nullPtr nullPtr
let creationflags = 0
b <- createProcessW nullPtr cmd'' nullPtr nullPtr True
creationflags
nullPtr nullPtr p_startupinfo p_pi
unless b $ errorWin "createProcessW"
pi <- peek p_pi
assignProcessToJobObject job (piProcess pi)
resumeThread (piThread pi)
-- The program is now running
let handle = piProcess pi
let millisecs = secs * 1000
rc <- waitForSingleObject handle (fromIntegral millisecs)
if rc == cWAIT_TIMEOUT
then do hPutStrLn stderr timeoutMsg
terminateJobObject job 99
exitWith (ExitFailure 99)
else alloca $ \p_exitCode ->
do r <- getExitCodeProcess handle p_exitCode
if r then do ec <- peek p_exitCode
let ec' = if ec == 0
then ExitSuccess
else ExitFailure $ fromIntegral ec
exitWith ec'
else errorWin "getExitCodeProcess"
#endif
| jwiegley/ghc-release | timeout/timeout.hs | gpl-3.0 | 4,854 | 1 | 19 | 1,745 | 932 | 459 | 473 | -1 | -1 |
module Options.Misc where
import Types
miscOptions :: [Flag]
miscOptions =
[ flag { flagName = "-jN"
, flagDescription =
"When compiling with :ghc-flag:`-make`, compile ⟨N⟩ modules in parallel."
, flagType = DynamicFlag
}
, flag { flagName = "-fno-hi-version-check"
, flagDescription = "Don't complain about ``.hi`` file mismatches"
, flagType = DynamicFlag
}
, flag { flagName = "-fhistory-size"
, flagDescription = "Set simplification history size"
, flagType = DynamicFlag
}
, flag { flagName = "-fno-ghci-history"
, flagDescription =
"Do not use the load/store the GHCi command history from/to "++
"``ghci_history``."
, flagType = DynamicFlag
}
, flag { flagName = "-fno-ghci-sandbox"
, flagDescription =
"Turn off the GHCi sandbox. Means computations are run in "++
"the main thread, rather than a forked thread."
, flagType = DynamicFlag
}
, flag { flagName = "-freverse-errors"
, flagDescription =
"Display errors in GHC/GHCi sorted by reverse order of "++
"source code line numbers."
, flagType = DynamicFlag
, flagReverse = "-fno-reverse-errors"
}
]
| tjakway/ghcjvm | utils/mkUserGuidePart/Options/Misc.hs | bsd-3-clause | 1,327 | 0 | 8 | 427 | 178 | 114 | 64 | 30 | 1 |
module ListComp1 where
{- map2 xs = map length [ x | x <- xs ] -}
map2 xs = (case (length, [x | x <- xs]) of
(f, []) -> []
(f, (y : ys)) -> (f y) : (map f [x | x <- xs])) | kmate/HaRe | old/testing/generativeFold/ListComp1.hs | bsd-3-clause | 202 | 0 | 14 | 78 | 101 | 57 | 44 | 4 | 2 |
module HAD.Y2014.M03.D26.Solution
( Board
, board
, getList
, Direction (..)
, viewFrom
) where
import Data.List (groupBy, transpose)
import Control.Applicative ((<*>))
import Control.Monad (liftM, replicateM)
import Test.QuickCheck
-- Preamble
-- $setup
-- >>> import Control.Applicative ((<$>), (<*>))
-- >>> import Data.List (sort)
-- >>> :{
-- let checkReverse d1 d2 =
-- (==) <$>
-- sort . map sort . getList . viewFrom d1 <*>
-- sort . map (sort . reverse) . getList . viewFrom d2
-- :}
newtype Board a = Board {getList :: [[a]]}
deriving (Eq, Show)
data Direction = North | South | East | West
deriving (Eq, Read, Show)
-- Exercise
-- | viewFrom given a direction, produce an involution such that the
-- inner lists elements are ordered as if they were seen from that direction.
--
--
-- Examples:
--
-- Defaut view is from West
-- prop> xs == viewFrom West xs
--
-- The function is an involution
-- prop> \(d,xxs) -> (==) <*> (viewFrom d . viewFrom d) $ (xxs :: Board Int)
--
-- Ordering properties from opposite side views (for inner lists elements
-- prop> checkReverse West East (xxs :: Board Int)
-- prop> checkReverse East West (xxs :: Board Int)
-- prop> checkReverse North South (xxs :: Board Int)
-- prop> checkReverse South North (xxs :: Board Int)
--
viewFrom :: Direction -> Board a -> Board a
viewFrom d = let
go West = id
go East = reverse . map reverse
go North = transpose
go South = reverse . map reverse . transpose
in Board . go d . getList
-- Constructor
-- | board Yesterday's squareOf, build a square board with initial values
board :: Int -> a -> [a] -> [[a]]
board n x = take n . map (take n) . iterate (drop n) . (++ repeat x)
-- Arbitrary instances
instance Arbitrary a => Arbitrary (Board a) where
arbitrary = liftM Board (arbitrary >>= replicateM <*> vector)
instance Arbitrary Direction where
arbitrary = elements [North, South , East , West]
| ajerneck/1HAD | exercises/HAD/Y2014/M03/D26/Solution.hs | mit | 1,954 | 0 | 11 | 416 | 407 | 238 | 169 | 27 | 4 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude
, BangPatterns
, NondecreasingIndentation
, MagicHash
#-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Encoding.UTF8
-- Copyright : (c) The University of Glasgow, 2009
-- License : see libraries/base/LICENSE
--
-- Maintainer : libraries@haskell.org
-- Stability : internal
-- Portability : non-portable
--
-- UTF-8 Codec for the IO library
--
-- Portions Copyright : (c) Tom Harper 2008-2009,
-- (c) Bryan O'Sullivan 2009,
-- (c) Duncan Coutts 2009
--
-----------------------------------------------------------------------------
module GHC.IO.Encoding.UTF8 (
utf8, mkUTF8,
utf8_bom, mkUTF8_bom
) where
import GHC.Base
import GHC.Real
import GHC.Num
import GHC.IORef
-- import GHC.IO
import GHC.IO.Buffer
import GHC.IO.Encoding.Failure
import GHC.IO.Encoding.Types
import GHC.Word
import Data.Bits
utf8 :: TextEncoding
utf8 = mkUTF8 ErrorOnCodingFailure
-- | /Since: 4.4.0.0/
mkUTF8 :: CodingFailureMode -> TextEncoding
mkUTF8 cfm = TextEncoding { textEncodingName = "UTF-8",
mkTextDecoder = utf8_DF cfm,
mkTextEncoder = utf8_EF cfm }
utf8_DF :: CodingFailureMode -> IO (TextDecoder ())
utf8_DF cfm =
return (BufferCodec {
encode = utf8_decode,
recover = recoverDecode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf8_EF :: CodingFailureMode -> IO (TextEncoder ())
utf8_EF cfm =
return (BufferCodec {
encode = utf8_encode,
recover = recoverEncode cfm,
close = return (),
getState = return (),
setState = const $ return ()
})
utf8_bom :: TextEncoding
utf8_bom = mkUTF8_bom ErrorOnCodingFailure
mkUTF8_bom :: CodingFailureMode -> TextEncoding
mkUTF8_bom cfm = TextEncoding { textEncodingName = "UTF-8BOM",
mkTextDecoder = utf8_bom_DF cfm,
mkTextEncoder = utf8_bom_EF cfm }
utf8_bom_DF :: CodingFailureMode -> IO (TextDecoder Bool)
utf8_bom_DF cfm = do
ref <- newIORef True
return (BufferCodec {
encode = utf8_bom_decode ref,
recover = recoverDecode cfm,
close = return (),
getState = readIORef ref,
setState = writeIORef ref
})
utf8_bom_EF :: CodingFailureMode -> IO (TextEncoder Bool)
utf8_bom_EF cfm = do
ref <- newIORef True
return (BufferCodec {
encode = utf8_bom_encode ref,
recover = recoverEncode cfm,
close = return (),
getState = readIORef ref,
setState = writeIORef ref
})
utf8_bom_decode :: IORef Bool -> DecodeBuffer
utf8_bom_decode ref
input@Buffer{ bufRaw=iraw, bufL=ir, bufR=iw, bufSize=_ }
output
= do
first <- readIORef ref
if not first
then utf8_decode input output
else do
let no_bom = do writeIORef ref False; utf8_decode input output
if iw - ir < 1 then return (InputUnderflow,input,output) else do
c0 <- readWord8Buf iraw ir
if (c0 /= bom0) then no_bom else do
if iw - ir < 2 then return (InputUnderflow,input,output) else do
c1 <- readWord8Buf iraw (ir+1)
if (c1 /= bom1) then no_bom else do
if iw - ir < 3 then return (InputUnderflow,input,output) else do
c2 <- readWord8Buf iraw (ir+2)
if (c2 /= bom2) then no_bom else do
-- found a BOM, ignore it and carry on
writeIORef ref False
utf8_decode input{ bufL = ir + 3 } output
utf8_bom_encode :: IORef Bool -> EncodeBuffer
utf8_bom_encode ref input
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow, bufSize=os }
= do
b <- readIORef ref
if not b then utf8_encode input output
else if os - ow < 3
then return (OutputUnderflow,input,output)
else do
writeIORef ref False
writeWord8Buf oraw ow bom0
writeWord8Buf oraw (ow+1) bom1
writeWord8Buf oraw (ow+2) bom2
utf8_encode input output{ bufR = ow+3 }
bom0, bom1, bom2 :: Word8
bom0 = 0xef
bom1 = 0xbb
bom2 = 0xbf
utf8_decode :: DecodeBuffer
utf8_decode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| ir >= iw = done InputUnderflow ir ow
| otherwise = do
c0 <- readWord8Buf iraw ir
case c0 of
_ | c0 <= 0x7f -> do
ow' <- writeCharBuf oraw ow (unsafeChr (fromIntegral c0))
loop (ir+1) ow'
| c0 >= 0xc0 && c0 <= 0xc1 -> invalid -- Overlong forms
| c0 >= 0xc2 && c0 <= 0xdf ->
if iw - ir < 2 then done InputUnderflow ir ow else do
c1 <- readWord8Buf iraw (ir+1)
if (c1 < 0x80 || c1 >= 0xc0) then invalid else do
ow' <- writeCharBuf oraw ow (chr2 c0 c1)
loop (ir+2) ow'
| c0 >= 0xe0 && c0 <= 0xef ->
case iw - ir of
1 -> done InputUnderflow ir ow
2 -> do -- check for an error even when we don't have
-- the full sequence yet (#3341)
c1 <- readWord8Buf iraw (ir+1)
if not (validate3 c0 c1 0x80)
then invalid else done InputUnderflow ir ow
_ -> do
c1 <- readWord8Buf iraw (ir+1)
c2 <- readWord8Buf iraw (ir+2)
if not (validate3 c0 c1 c2) then invalid else do
ow' <- writeCharBuf oraw ow (chr3 c0 c1 c2)
loop (ir+3) ow'
| c0 >= 0xf0 ->
case iw - ir of
1 -> done InputUnderflow ir ow
2 -> do -- check for an error even when we don't have
-- the full sequence yet (#3341)
c1 <- readWord8Buf iraw (ir+1)
if not (validate4 c0 c1 0x80 0x80)
then invalid else done InputUnderflow ir ow
3 -> do
c1 <- readWord8Buf iraw (ir+1)
c2 <- readWord8Buf iraw (ir+2)
if not (validate4 c0 c1 c2 0x80)
then invalid else done InputUnderflow ir ow
_ -> do
c1 <- readWord8Buf iraw (ir+1)
c2 <- readWord8Buf iraw (ir+2)
c3 <- readWord8Buf iraw (ir+3)
if not (validate4 c0 c1 c2 c3) then invalid else do
ow' <- writeCharBuf oraw ow (chr4 c0 c1 c2 c3)
loop (ir+4) ow'
| otherwise ->
invalid
where
invalid = done InvalidSequence ir ow
-- lambda-lifted, to avoid thunks being built in the inner-loop:
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
in
loop ir0 ow0
utf8_encode :: EncodeBuffer
utf8_encode
input@Buffer{ bufRaw=iraw, bufL=ir0, bufR=iw, bufSize=_ }
output@Buffer{ bufRaw=oraw, bufL=_, bufR=ow0, bufSize=os }
= let
done why !ir !ow = return (why,
if ir == iw then input{ bufL=0, bufR=0 }
else input{ bufL=ir },
output{ bufR=ow })
loop !ir !ow
| ow >= os = done OutputUnderflow ir ow
| ir >= iw = done InputUnderflow ir ow
| otherwise = do
(c,ir') <- readCharBuf iraw ir
case ord c of
x | x <= 0x7F -> do
writeWord8Buf oraw ow (fromIntegral x)
loop ir' (ow+1)
| x <= 0x07FF ->
if os - ow < 2 then done OutputUnderflow ir ow else do
let (c1,c2) = ord2 c
writeWord8Buf oraw ow c1
writeWord8Buf oraw (ow+1) c2
loop ir' (ow+2)
| x <= 0xFFFF -> if isSurrogate c then done InvalidSequence ir ow else do
if os - ow < 3 then done OutputUnderflow ir ow else do
let (c1,c2,c3) = ord3 c
writeWord8Buf oraw ow c1
writeWord8Buf oraw (ow+1) c2
writeWord8Buf oraw (ow+2) c3
loop ir' (ow+3)
| otherwise -> do
if os - ow < 4 then done OutputUnderflow ir ow else do
let (c1,c2,c3,c4) = ord4 c
writeWord8Buf oraw ow c1
writeWord8Buf oraw (ow+1) c2
writeWord8Buf oraw (ow+2) c3
writeWord8Buf oraw (ow+3) c4
loop ir' (ow+4)
in
loop ir0 ow0
-- -----------------------------------------------------------------------------
-- UTF-8 primitives, lifted from Data.Text.Fusion.Utf8
ord2 :: Char -> (Word8,Word8)
ord2 c = assert (n >= 0x80 && n <= 0x07ff) (x1,x2)
where
n = ord c
x1 = fromIntegral $ (n `shiftR` 6) + 0xC0
x2 = fromIntegral $ (n .&. 0x3F) + 0x80
ord3 :: Char -> (Word8,Word8,Word8)
ord3 c = assert (n >= 0x0800 && n <= 0xffff) (x1,x2,x3)
where
n = ord c
x1 = fromIntegral $ (n `shiftR` 12) + 0xE0
x2 = fromIntegral $ ((n `shiftR` 6) .&. 0x3F) + 0x80
x3 = fromIntegral $ (n .&. 0x3F) + 0x80
ord4 :: Char -> (Word8,Word8,Word8,Word8)
ord4 c = assert (n >= 0x10000) (x1,x2,x3,x4)
where
n = ord c
x1 = fromIntegral $ (n `shiftR` 18) + 0xF0
x2 = fromIntegral $ ((n `shiftR` 12) .&. 0x3F) + 0x80
x3 = fromIntegral $ ((n `shiftR` 6) .&. 0x3F) + 0x80
x4 = fromIntegral $ (n .&. 0x3F) + 0x80
chr2 :: Word8 -> Word8 -> Char
chr2 (W8# x1#) (W8# x2#) = C# (chr# (z1# +# z2#))
where
!y1# = word2Int# x1#
!y2# = word2Int# x2#
!z1# = uncheckedIShiftL# (y1# -# 0xC0#) 6#
!z2# = y2# -# 0x80#
{-# INLINE chr2 #-}
chr3 :: Word8 -> Word8 -> Word8 -> Char
chr3 (W8# x1#) (W8# x2#) (W8# x3#) = C# (chr# (z1# +# z2# +# z3#))
where
!y1# = word2Int# x1#
!y2# = word2Int# x2#
!y3# = word2Int# x3#
!z1# = uncheckedIShiftL# (y1# -# 0xE0#) 12#
!z2# = uncheckedIShiftL# (y2# -# 0x80#) 6#
!z3# = y3# -# 0x80#
{-# INLINE chr3 #-}
chr4 :: Word8 -> Word8 -> Word8 -> Word8 -> Char
chr4 (W8# x1#) (W8# x2#) (W8# x3#) (W8# x4#) =
C# (chr# (z1# +# z2# +# z3# +# z4#))
where
!y1# = word2Int# x1#
!y2# = word2Int# x2#
!y3# = word2Int# x3#
!y4# = word2Int# x4#
!z1# = uncheckedIShiftL# (y1# -# 0xF0#) 18#
!z2# = uncheckedIShiftL# (y2# -# 0x80#) 12#
!z3# = uncheckedIShiftL# (y3# -# 0x80#) 6#
!z4# = y4# -# 0x80#
{-# INLINE chr4 #-}
between :: Word8 -- ^ byte to check
-> Word8 -- ^ lower bound
-> Word8 -- ^ upper bound
-> Bool
between x y z = x >= y && x <= z
{-# INLINE between #-}
validate3 :: Word8 -> Word8 -> Word8 -> Bool
{-# INLINE validate3 #-}
validate3 x1 x2 x3 = validate3_1 ||
validate3_2 ||
validate3_3 ||
validate3_4
where
validate3_1 = (x1 == 0xE0) &&
between x2 0xA0 0xBF &&
between x3 0x80 0xBF
validate3_2 = between x1 0xE1 0xEC &&
between x2 0x80 0xBF &&
between x3 0x80 0xBF
validate3_3 = x1 == 0xED &&
between x2 0x80 0x9F &&
between x3 0x80 0xBF
validate3_4 = between x1 0xEE 0xEF &&
between x2 0x80 0xBF &&
between x3 0x80 0xBF
validate4 :: Word8 -> Word8 -> Word8 -> Word8 -> Bool
{-# INLINE validate4 #-}
validate4 x1 x2 x3 x4 = validate4_1 ||
validate4_2 ||
validate4_3
where
validate4_1 = x1 == 0xF0 &&
between x2 0x90 0xBF &&
between x3 0x80 0xBF &&
between x4 0x80 0xBF
validate4_2 = between x1 0xF1 0xF3 &&
between x2 0x80 0xBF &&
between x3 0x80 0xBF &&
between x4 0x80 0xBF
validate4_3 = x1 == 0xF4 &&
between x2 0x80 0x8F &&
between x3 0x80 0xBF &&
between x4 0x80 0xBF
| frantisekfarka/ghc-dsi | libraries/base/GHC/IO/Encoding/UTF8.hs | bsd-3-clause | 13,355 | 0 | 31 | 5,417 | 3,984 | 2,045 | 1,939 | 291 | 14 |
{-# OPTIONS -fplugin Simple.Plugin #-}
module T12567a where
| ezyang/ghc | testsuite/tests/plugins/T12567a.hs | bsd-3-clause | 62 | 0 | 2 | 10 | 5 | 4 | 1 | 2 | 0 |
-- Class used as a type, recursively
module ShouldFail where
class XML a where toXML :: a -> XML | siddhanathan/ghc | testsuite/tests/typecheck/should_fail/tcfail134.hs | bsd-3-clause | 98 | 0 | 7 | 20 | 23 | 13 | 10 | 2 | 0 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Network.AWS.Wolf.Ctx
( runTop
, runConfCtx
, preConfCtx
, runAmazonCtx
, runAmazonStoreCtx
, runAmazonWorkCtx
, runAmazonDecisionCtx
) where
import Control.Concurrent
import Control.Exception.Lifted
import Control.Monad.Trans.AWS
import Data.Aeson
import Network.AWS.SWF
import Network.AWS.Wolf.Prelude
import Network.AWS.Wolf.Types
import Network.HTTP.Types
-- | Catcher for exceptions, traces and rethrows.
--
botSomeExceptionCatch :: MonadCtx c m => SomeException -> m a
botSomeExceptionCatch ex = do
traceError "exception" [ "error" .= displayException ex ]
throwIO ex
-- | Catch TransportError's.
--
botErrorCatch :: MonadCtx c m => Error -> m a
botErrorCatch ex = do
case ex of
TransportError _ ->
pure ()
_ ->
traceError "exception" [ "error" .= displayException ex ]
throwIO ex
-- | Catcher for exceptions, emits stats and rethrows.
--
topSomeExceptionCatch :: MonadStatsCtx c m => SomeException -> m a
topSomeExceptionCatch ex = do
traceError "exception" [ "error" .= displayException ex ]
statsIncrement "wolf.exception" [ "reason" =. textFromString (displayException ex) ]
throwIO ex
-- | Run stats ctx.
--
runTop :: MonadCtx c m => TransT StatsCtx m a -> m a
runTop action = runStatsCtx $ catch action topSomeExceptionCatch
-- | Run bottom TransT.
--
runTrans :: (MonadControl m, HasCtx c) => c -> TransT c m a -> m a
runTrans c action = runTransT c $ catches action [ Handler botErrorCatch, Handler botSomeExceptionCatch ]
-- | Run configuration context.
--
runConfCtx :: MonadStatsCtx c m => Conf -> TransT ConfCtx m a -> m a
runConfCtx conf action = do
let preamble =
[ "domain" .= (conf ^. cDomain)
, "bucket" .= (conf ^. cBucket)
, "prefix" .= (conf ^. cPrefix)
]
c <- view statsCtx <&> cPreamble <>~ preamble
runTrans (ConfCtx c conf) action
-- | Update configuration context's preamble.
--
preConfCtx :: MonadConf c m => Pairs -> TransT ConfCtx m a -> m a
preConfCtx preamble action = do
c <- view confCtx <&> cPreamble <>~ preamble
runTrans c action
-- | Run amazon context.
--
runAmazonCtx :: MonadCtx c m => TransT AmazonCtx m a -> m a
runAmazonCtx action = do
c <- view ctx
#if MIN_VERSION_amazonka(1,4,5)
e <- newEnv Discover
#else
e <- newEnv Oregon Discover
#endif
runTrans (AmazonCtx c e) action
-- | Run amazon store context.
--
runAmazonStoreCtx :: MonadConf c m => Text -> TransT AmazonStoreCtx m a -> m a
runAmazonStoreCtx uid action = do
let preamble = [ "uid" .= uid ]
c <- view confCtx <&> cPreamble <>~ preamble
p <- (-/- uid) . view cPrefix <$> view ccConf
runTrans (AmazonStoreCtx c p) action
-- | Throttle throttle exceptions.
--
throttled :: MonadStatsCtx c m => m a -> m a
throttled action = do
traceError "throttled" mempty
statsIncrement "wolf.throttled" mempty
liftIO $ threadDelay $ 5 * 1000000
catch action $ throttler action
-- | Amazon throttle handler.
--
throttler :: MonadStatsCtx c m => m a -> Error -> m a
throttler action e =
case e of
ServiceError se ->
bool (throwIO e) (throttled action) $
se ^. serviceStatus == badRequest400 &&
se ^. serviceCode == "Throttling"
_ ->
throwIO e
-- | Run amazon work context.
--
runAmazonWorkCtx :: MonadConf c m => Text -> TransT AmazonWorkCtx m a -> m a
runAmazonWorkCtx queue action = do
let preamble = [ "queue" .= queue ]
c <- view confCtx <&> cPreamble <>~ preamble
runTrans (AmazonWorkCtx c queue) (catch action $ throttler action)
-- | Swallow bad request exceptions.
--
swallowed :: MonadStatsCtx c m => m a -> m a
swallowed action = do
traceError "swallowed" mempty
statsIncrement "wolf.swallowed" mempty
catch action $ swallower action
-- | Amazon swallow handler.
--
swallower :: MonadStatsCtx c m => m a -> Error -> m a
swallower action e =
case e of
ServiceError se ->
bool (throwIO e) (swallowed action) $
se ^. serviceStatus == badRequest400 &&
se ^. serviceCode == "Bad Request"
_ ->
throwIO e
-- | Run amazon decision context.
--
runAmazonDecisionCtx :: MonadConf c m => Plan -> [HistoryEvent] -> TransT AmazonDecisionCtx m a -> m a
runAmazonDecisionCtx p hes action = do
let preamble = [ "name" .= (p ^. pStart . tName) ]
c <- view confCtx <&> cPreamble <>~ preamble
runTrans (AmazonDecisionCtx c p hes) (catch action $ swallower action)
| swift-nav/wolf | src/Network/AWS/Wolf/Ctx.hs | mit | 4,520 | 0 | 16 | 958 | 1,396 | 687 | 709 | -1 | -1 |
{-# htermination threadToIOResult :: IO a -> IOResult #-}
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/full_haskell/Prelude_threadToIOResult_1.hs | mit | 58 | 0 | 2 | 9 | 3 | 2 | 1 | 1 | 0 |
module Tree (
Tree(Leaf,Branch),
getBottom,
depth,
getVal,
getSubTree,
getAllSubTrees,
getValAt,
findMax,
chooser,
maxmin,
alphaBeta,
applyAtEnds,
applyNTimes
) where
import Data.Monoid
import Test.QuickCheck
{-
Provides the data structures and funnctions meant to deal with tree's
Tasks:
- Extending Tree typeclasses, to traversable
-}
-- A data structure meant to represent all possible choices from a event
data Tree a = Leaf a | Branch [Tree a]
deriving Show
instance Arbitrary a => Arbitrary (Tree a) where
arbitrary = sized sizedArbitrary
sizedArbitrary :: Arbitrary a => Int -> Gen(Tree a)
sizedArbitrary m = do
ls <- vectorOf (m `div` 2) (sizedArbitrary (m `div` 2))
chk <- choose (1,2) :: Gen Integer
val <- arbitrary
return (cusMap val ls chk)
where
cusMap val ls chk = if chk == 1 then Branch ls else Leaf val
instance Functor Tree where
fmap f (Branch y) = Branch (map (fmap f) y)
fmap f (Leaf x) = Leaf (f x)
instance Foldable Tree where
foldMap f (Leaf a) = f a
foldMap f (Branch br) = foldMap (foldMap f) br
getAllSubTrees :: Tree a -> [Tree a]
getAllSubTrees (Leaf _) = []
getAllSubTrees (Branch trs) = trs
-- Given a path to take retrieves subtree's
getSubTree :: Tree a -> [Int] -> Maybe (Tree a)
getSubTree tr [] = Just tr
getSubTree (Leaf _) xs | not (null xs) = Nothing
getSubTree (Branch subTr) (x:_) | x > length subTr = Nothing
getSubTree (Branch subTr) (x:xs) = getSubTree (subTr !! x) xs
-- Given a tree returns top value
getVal :: Tree a -> Maybe a
getVal (Leaf a) = Just a
getVal (Branch _) = Nothing
-- Combines getVal and getSubTree to return a specific value
getValAt :: Tree a -> [Int] -> Maybe a
getValAt tr xs = getVal =<< getSubTree tr xs
-- Gives the index of the greatest element
findMax :: Ord a => [a] -> Maybe Int
findMax [] = Nothing
findMax (y:ys) = Just $ helper 0 y 1 ys
where
helper i _ _ [] = i
helper i x j (z:zs) | x >= z = helper i x (j+1) zs
helper _ x j (z:zs) | z > x = helper j z (j+1) zs
helper _ _ _ _ = 0
(<==>) :: Eq a => Maybe a -> a -> Bool
Nothing <==> _ = False
(Just a) <==> a' = a == a'
prop_findMax :: Ord a => [a] -> Bool
prop_findMax [] = True
prop_findMax ls = fmap (ls !!) (findMax ls) <==> maximum ls
chooser :: Ord b => (Tree a -> b) -> Tree a -> Maybe Int
chooser f (Branch cly) = findMax $ map f cly
chooser _ _ = Nothing
alphaBeta :: Ord a => a -> a -> Bool -> Tree a -> a
alphaBeta _ _ _ (Leaf a) = a
alphaBeta a b True (Branch ls) = maxLeq a b ls
alphaBeta a b False (Branch ls) = minLeq a b ls
minLeq :: Ord a => a -> a -> [Tree a] -> a
minLeq _ b [] = b
minLeq a b _ | a >= b = b
minLeq a b (x:xs) | val x < b = maxLeq a (val x) xs
where
val = alphaBeta a b True
minLeq a b (x:xs) | val x >= b = maxLeq a b xs
where
val = alphaBeta a b True
maxLeq :: Ord a => a -> a -> [Tree a] -> a
maxLeq a _ [] = a
maxLeq a b _ | a > b = a
maxLeq a b (x:xs) | val x >= a = maxLeq (val x) b xs
where
val = alphaBeta a b False
maxLeq a b (x:xs) | val x < a = maxLeq a b xs
where
val = alphaBeta a b False
-- Naive minimax implementation
maxa :: ([a] -> a,[a] -> a) -> Tree a -> a
maxa _ (Leaf a) = a
maxa (f,g) (Branch y) = f (map (maxa (g,f)) y)
-- Gives value of branch of greatest value
maxmin :: Ord a => Tree a -> a
maxmin (Leaf a) = a
maxmin t = maxa (maximum,minimum) t
-- A selective fmap
applyAtEnds :: (a -> [b]) -> Tree a -> Tree b
applyAtEnds f (Branch []) = Branch []
applyAtEnds f (Branch subTr) = Branch (map (applyAtEnds f) subTr)
applyAtEnds f (Leaf val) = Branch (map Leaf (f val))
prop_applyAtEnds :: Tree a -> Bool
prop_applyAtEnds tr = (depth tr + 1) == depth (applyAtEnds (replicate 2) tr)
prop_applyNTimes :: (Integer,Tree a) -> Bool
prop_applyNTimes (i,tr) = (depth tr + i) == depth (applyNTimes [replicate 2] tr i)
-- swaps two leaf generating functions
applyNTimes :: [a -> [a]] -> Tree a -> Integer -> Tree a
applyNTimes _ tr n | n == 0 = tr
applyNTimes (f:xs) tr n = applyNTimes (xs ++ [f]) (applyAtEnds f tr) (n-1)
isBranch :: Tree a -> Bool
isBranch (Branch _) = True
isBranch (Leaf _) = False
depth :: Tree a -> Integer
depth (Branch []) = 1
depth (Leaf _) = 1
depth (Branch ts) = 1 + maximum (map depth ts)
getBottom :: Tree a -> [a]
getBottom (Branch tr) = concatMap getBottom tr
getBottom (Leaf a) = [a]
| WawerOS/Chessotron2000 | src/Tree.hs | mit | 4,421 | 0 | 12 | 1,106 | 2,155 | 1,081 | 1,074 | 109 | 4 |
module ParHackageSearch where
import Search
import System.Directory
import System.FilePath.Posix
import Text.Regex.Posix
import Control.Parallel.Strategies
import qualified Data.ByteString.Char8 as B
import Data.Char
import System.Mem
import Control.DeepSeq
import Control.Monad
packages = "/home/stephen/Projects/ParRegexSearch/hackage/package/"
readAllPackages :: FilePath -> IO [(FilePath,[(FilePath, B.ByteString)])]
readAllPackages fp = do
dirs <- listDirectory fp
let fullPaths = map (\pn -> fp ++ "/" ++ pn) dirs
onlyDirs <- filterM doesDirectoryExist fullPaths
res <- mapM readDir onlyDirs
let fr = force res
performGC
return $ fr
getAllFileNames :: FilePath -> IO [(FilePath, [FilePath])]
getAllFileNames fp = do
dirs <- listDirectory fp
let fullPaths = map (\pn -> fp ++ "/" ++ pn) dirs
onlyDirs <- filterM doesDirectoryExist fullPaths
mapM f onlyDirs
where
f dir = do
files <- getFileNamesByPred dir searchPred
return (dir, files)
searchPred fp = let ext = takeExtension fp in
return $ ext == ".hs"
readDir fp = do
contents <- getDirContentsByPred fp searchPred
return (fp, contents)
| SAdams601/ParRegexSearch | src/ParHackageSearch.hs | mit | 1,151 | 0 | 14 | 205 | 378 | 193 | 185 | 35 | 1 |
module Euler.Problem013Test (suite) where
import Test.Tasty (testGroup, TestTree)
import Test.Tasty.HUnit
import Euler.Problem013
suite :: TestTree
suite = testGroup "Problem013"
[ testCase "count of addends" testAddends
]
testAddends :: Assertion
testAddends = 100 @=? length addends
| whittle/euler | test/Euler/Problem013Test.hs | mit | 305 | 0 | 7 | 55 | 75 | 43 | 32 | 9 | 1 |
module TestImport
( withApp
, runApp
, runDB
, runDBWithApp
, wipeDB
, authenticateAs
, times
, module X
) where
import Application (makeFoundation, withEnv)
import ClassyPrelude as X
import Database.Persist as X hiding (get, delete, deleteBy)
import Database.Persist.Sql (SqlPersistM, SqlBackend, runSqlPersistMPool, rawExecute, rawSql, unSingle, connEscapeName)
import Foundation as X
import Model as X
import Test.Hspec as X hiding
( expectationFailure
, shouldBe
, shouldSatisfy
, shouldContain
, shouldMatchList
, shouldReturn
)
import Test.Hspec.Extension as X
import Test.Hspec.Expectations.Lifted as X
import Yesod.Default.Config2 (ignoreEnv, loadYamlSettings)
import Yesod.Test.Extension as X
import Settings as X
import Factories as X
runDB :: SqlPersistM a -> YesodExample App a
runDB query = do
app <- getTestYesod
liftIO $ runDBWithApp app query
runDBWithApp :: App -> SqlPersistM a -> IO a
runDBWithApp app query = runSqlPersistMPool query (appConnPool app)
withApp :: SpecWith App -> Spec
withApp = before $ runApp $ \app -> do
wipeDB app
return app
runApp :: (App -> IO a) -> IO a
runApp f = do
settings <- loadYamlSettings
["config/test-settings.yml", "config/settings.yml"]
[]
ignoreEnv
f =<< withEnv (makeFoundation settings)
-- This function will truncate all of the tables in your database.
-- 'withApp' calls it before each test, creating a clean environment for each
-- spec to run in.
wipeDB :: App -> IO ()
wipeDB app = do
runDBWithApp app $ do
tables <- getTables
sqlBackend <- ask
let escapedTables = map (connEscapeName sqlBackend . DBName) tables
query = "TRUNCATE TABLE " ++ (intercalate ", " escapedTables)
rawExecute query []
getTables :: MonadIO m => ReaderT SqlBackend m [Text]
getTables = do
tables <- rawSql "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';" []
return $ map unSingle tables
authenticateAs :: Entity User -> YesodExample App ()
authenticateAs (Entity _ u) = do
testRoot <- fmap (appRoot . appSettings) getTestYesod
let url = testRoot ++ "/auth/page/dummy"
request $ do
setMethod "POST"
addPostParam "ident" $ userIdent u
setUrl url
times :: Monad m => Int -> (Int -> m a) -> m [a]
times n = forM [1..n]
| thoughtbot/carnival | test/TestImport.hs | mit | 2,468 | 0 | 16 | 618 | 680 | 362 | 318 | 67 | 1 |
-- | Common types used by Text.Tokenify
module Text.Tokenify.Types where
import Text.Tokenify.Response (Response)
import Text.Tokenify.Regex (Regex)
-- | A series of Tokens which will match in sequencial order
type Tokenizer s a = [Token s a]
-- | Defines what is matches, and how to respond to said match
type Token s a = (Regex s, Response s a)
-- | The type for a token position in a file
type Pos = (Int, Int)
| AKST/tokenify | src/Text/Tokenify/Types.hs | mit | 420 | 0 | 6 | 80 | 83 | 54 | 29 | 6 | 0 |
{-# LANGUAGE Trustworthy #-}
-- |
-- Module : Spell.Edit
-- Description : Calculation of Minimum Edit Distance (MED).
-- Copyright : (c) Stefan Haller, 2014
--
-- License : MIT
-- Maintainer : s6171690@mail.zih.tu-dresden.de
-- Stability : experimental
-- Portability : portable
--
-- This module allows to calculate the
-- <http://en.wikipedia.org/wiki/Minimum_Edit_Distance Minimum Edit Distance>
-- using <http://en.wikipedia.org/wiki/Trie prefix trees>.
module Spell.Edit where
import Control.Monad (guard)
import Data.Char (toLower)
import Data.ListLike.Instances ()
import Data.Maybe (listToMaybe, maybeToList)
import qualified Data.PQueue.Prio.Min as PMin
import Data.Text (Text)
import qualified Data.Text as T
import Data.Trie (Trie, branches, end, expandPaths, populate, update, value)
import Data.Tuple (swap)
import Data.Vector.Unboxed (Vector, Unbox)
import qualified Data.Vector.Unboxed as V
-- | Aggregation of penalty functions.
--
-- All these functions receive both characters as arguments. The first argument
-- for the insertion and deletion is optional (because insertions and deletions
-- at the beginning of the word do not have a reference character).
--
-- Note that the function calculate the penalties for an edit operation where
-- the expected character is modified to match the character of the input.
--
-- (This behavior is the same as in the reference literature: Mark D. Kernighan,
-- Kenneth W. Church und William A. Gale. „A spelling correction program based
-- on a noisy channel model“)
data Penalties a p = Penalties
{ penaltyInsertion :: Maybe a -> a -> p
, penaltyDeletion :: Maybe a -> a -> p
, penaltySubstitution :: a -> a -> p
, penaltyReversal :: a -> a -> p
}
-- | Penalty functions where all edit operations have a cost of @1@.
defaultPenalties :: Penalties Char Double
defaultPenalties = Penalties
{ penaltyInsertion = \_ _ -> 1
, penaltyDeletion = \_ _ -> 1
, penaltyReversal = \_ _ -> 1
, penaltySubstitution = subst
}
where
subst x y
| x == y = 0.0
| toLower x == toLower y = 0.5
| otherwise = 1.0
-- | Calculates the column vectors for the nodes in the 'Trie'. Meant
-- to be used with 'populate'.
calculateEdit :: (Num p, Ord p, Unbox p)
=> Penalties Char p -> Text -> [Char] -> [Vector p] -> Vector p
calculateEdit p r = f
where
-- Builds the column vector.
--
-- The first column vector (pattern match on empty path) contains the
-- numbers [0, 1, 2, ...] in the paper. In general this is: [0,
-- penaltyInsert_1, penaltyInsert_1 + penaltyInsert_2, ...].
f [] [] = V.scanl (+) 0 . V.map (penaltyInsertion p Nothing) . V.fromList $ T.unpack r
-- For all other column vectors we construct a vector of the right size and
-- use a function produce all values.
--
-- constructN will build a vector of size n and apply our function with a
-- vector of growing size until all elements are set. So on our first call
-- the vector has size 0, then size 1, etc.
f ks vs = V.constructN (T.length r + 1) (f' ks vs)
-- This function will produce the value for the next row of our column
-- vector. We start with the lowest cell. See the ks and vs as a stack where
-- the ks represents the current path in our Trie and the vs all the values
-- on the path.
--
-- v' is the growing vector we are going to fill.
f' (k:ks) (v:vs) v'
| V.null v' = v V.! i + penaltyDeletion p (listToMaybe ks) k
| otherwise = minimum choices
where
i = V.length v'
choices = [ v V.! i + penaltyDeletion p (listToMaybe ks) k
, v' V.! (i-1) + penaltyInsertion p (Just k) (r `T.index` (i-1))
, v V.! (i-1) + penaltySubstitution p k (r `T.index` (i-1))
] ++ maybeToList reversal
-- This is like (T.!), but returns Nothing if out of bounds.
(!?) :: Text -> Int -> Maybe Char
(!?) t n
| n < 0 = Nothing
| otherwise = fmap fst . T.uncons $ T.drop n t
-- Checks for reversal. Returns the costs for the reversal or Nothing if
-- no reversal is possible.
reversal = do
let s = [Just k, listToMaybe ks]
t = [r !? (i-2), r !? (i-1)]
[s1, s2] <- sequence s -- guard against Nothings
guard (s == t) -- check that it’s really a reversal
v2 <- listToMaybe vs -- pop the second element from vector stack
return $ v2 V.! (i-2) + penaltyReversal p s2 s1
f' _ _ _ = error "This function should never get called."
-- | Shrinks the column vectors of the 'Trie' nodes.
--
-- The calculation of the best edit does not need the whole vectors. It
-- only needs:
--
-- * The last (top-most) element of the vector, because this represents
-- the edit distance of the current node.
--
-- * The minimum element of the vector, because we later use this value as
-- heuristic. Each edit path must pass this column vector and the scores
-- can only get higher. If we have an edit with a value smaller than
-- this minimum, we can completely ignore this node.
--
-- Due to reversals the result might get wrong if we only consider the minimum
-- of the current vector. Reversals allow jumps and the minimum element of the
-- vector might decrease. To fix this we take the minimum of the current and the
-- previous vector (the reversal costs only skip one column vector).
shrinkMatrices :: (Ord p, Unbox p) => Trie Char (Vector p) -> Trie Char (p, p)
shrinkMatrices = update f
where
f [] _ _ = undefined
f [v1] _ _ = (V.last v1, V.minimum v1)
f (v1:v2:_) _ _ = (V.last v1, min (V.minimum v1) (V.minimum v2))
-- | Lazily returns all suggestions sorted in order of increasing edit distance.
--
-- To calculate only the best suggestion use @head . searchBestEdits@. To
-- get the best 10 suggestions you can use @take 10 . searchBestEdit@.
--
-- The internal implementation uses some kind of greedy algorithm (similiar
-- to Dijkstra or A*, but to simplified for a tree structure):
--
-- There are two priority queues:
--
-- * The queue for end nodes (“finished queue”). The priority measure is
-- the minimum edit distance (= the top-most element of the column
-- vector).
--
-- * The queue for discovered nodes (“working queue”). The priority measure is the
-- minimum value of the colum vector.
--
-- After initializing the working queue with the given 'Trie' node, the
-- algorithm works as follows:
--
-- * Take the first element of working queue.
--
-- * If the value is greater than the minimum value in finished queue, the
-- next finished node is optimal. All paths must pass through exactly
-- one cell of each column vector of the path. The current working node
-- is on the best undiscoverd path. If the value of the next finished
-- node is lower, it must be optimal. So we return the next finished
-- node as the next result.
--
-- * Expand children of current working node and insert them into the
-- working queue.
--
-- * If the current working node is an end node, insert it into the
-- finished queue.
--
-- * In any case, the current working node is dropped from the working
-- queue.
--
-- * Repeat everything until the working queue is empty.
searchBestEdits :: (Num p, Ord p) => Maybe p -> Trie Char (Text, (p, p)) -> [(Text, p)]
searchBestEdits c trie = map swap $ processQueue (finished, queue)
where
finished = PMin.empty
queue = PMin.singleton 0 trie
-- Checks if value is greater than the Just value of cut off.
checkCutOff x = maybe False (x >) c
-- This function iterates over working queue till empty.
processQueue (f, q)
| PMin.null q = PMin.toAscList f -- working queue empty
| checkCutOff q'' = [] -- cut off detected
| not (PMin.null f) && f'' < q'' = h : processQueue (f', q) -- element is optimal
| otherwise = processQueue $ processNext (f, q) -- not sure enough, recursion
where
Just (h@(f'', _), f') = PMin.minViewWithKey f
(q'', _) = PMin.findMin q
-- This function processes the head element of the working queue. In every
-- case the current head is expanded and the children are inserted into the
-- working queue.
processNext (f, q)
| end t = (f', q'') -- reinsert element in finished queue
| otherwise = (f, q'') -- keep finished queue,
where
Just (t, q') = PMin.minView q
(path, (last', _)) = value t
f' = PMin.insert last' path f
q'' = q' `PMin.union` PMin.fromList (map (processBranches .snd) $ branches t)
-- Calculates values for all the branches (children).
processBranches t = let (_, (_, min')) = value t in (min', t)
-- | Like 'searchBestEdits' but only returns the resulting 'Text's.
searchBestEdits' :: (Num p, Ord p) => Maybe p -> Trie Char (Text, (p, p)) -> [Text]
searchBestEdits' c = map fst . searchBestEdits c
-- | One-shot function for determining the best suggestions.
--
-- This function works lazily, for more information see 'searchBestEdits'.
bestEdits :: (Num p, Ord p, Unbox p)
=> Penalties Char p -- ^ The penalties used for calculating the MED.
-> Maybe p -- ^ Nodes where the minimum of the column
-- vector is greater (or equal) than this
-- 'Just' value are cutted. 'Nothing'
-- prevents cutting.
-> Text -- ^ The reference word.
-> Trie Char () -- ^ The 'Trie' 'Data.Trie.skeleton'.
-> [(Text, p)]
bestEdits p c r = searchBestEdits c
. expandPaths
. shrinkMatrices
. populate (calculateEdit p r)
-- | Like 'bestEdits' but only returns the resulting 'Text's.
bestEdits' :: (Num p, Ord p, Unbox p)
=> Penalties Char p -> Maybe p -> Text -> Trie Char () -> [Text]
bestEdits' p c r = map fst . bestEdits p c r
| fgrsnau/spell | lib/Spell/Edit.hs | mit | 10,370 | 1 | 17 | 2,871 | 1,922 | 1,069 | 853 | 93 | 3 |
{-# LANGUAGE CPP #-}
module Common
( Monoid
, (<>)
, (<$>)
, pure
) where
#if !MIN_VERSION_base(4, 8, 0)
import Control.Applicative (pure)
import Data.Functor ((<$>))
#endif
#if MIN_VERSION_base(4, 8, 0)
import Data.Monoid ((<>))
#elif MIN_VERSION_base(4, 5, 0)
import Data.Monoid (Monoid, (<>))
#else
import Data.Monoid (Monoid, mappend)
#endif
#if !MIN_VERSION_base(4, 5, 0)
(<>) :: Monoid m => m -> m -> m
(<>) = mappend
#endif
| Rufflewind/wigner-symbols | src/Common.hs | mit | 444 | 0 | 7 | 79 | 93 | 63 | 30 | 11 | 1 |
{-# htermination (enumFromThenOrdering :: Ordering -> Ordering -> (List Ordering)) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ordering = LT | EQ | GT ;
iterate :: (a -> a) -> a -> (List a);
iterate f x = Cons x (iterate f (f x));
primMinusNat :: Nat -> Nat -> MyInt;
primMinusNat Zero Zero = Pos Zero;
primMinusNat Zero (Succ y) = Neg (Succ y);
primMinusNat (Succ x) Zero = Pos (Succ x);
primMinusNat (Succ x) (Succ y) = primMinusNat x y;
primPlusNat :: Nat -> Nat -> Nat;
primPlusNat Zero Zero = Zero;
primPlusNat Zero (Succ y) = Succ y;
primPlusNat (Succ x) Zero = Succ x;
primPlusNat (Succ x) (Succ y) = Succ (Succ (primPlusNat x y));
primMinusInt :: MyInt -> MyInt -> MyInt;
primMinusInt (Pos x) (Neg y) = Pos (primPlusNat x y);
primMinusInt (Neg x) (Pos y) = Neg (primPlusNat x y);
primMinusInt (Neg x) (Neg y) = primMinusNat y x;
primMinusInt (Pos x) (Pos y) = primMinusNat x y;
msMyInt :: MyInt -> MyInt -> MyInt
msMyInt = primMinusInt;
primPlusInt :: MyInt -> MyInt -> MyInt;
primPlusInt (Pos x) (Neg y) = primMinusNat x y;
primPlusInt (Neg x) (Pos y) = primMinusNat y x;
primPlusInt (Neg x) (Neg y) = Neg (primPlusNat x y);
primPlusInt (Pos x) (Pos y) = Pos (primPlusNat x y);
psMyInt :: MyInt -> MyInt -> MyInt
psMyInt = primPlusInt;
numericEnumFromThen n m = iterate (psMyInt (msMyInt m n)) n;
primCmpNat :: Nat -> Nat -> Ordering;
primCmpNat Zero Zero = EQ;
primCmpNat Zero (Succ y) = LT;
primCmpNat (Succ x) Zero = GT;
primCmpNat (Succ x) (Succ y) = primCmpNat x y;
primCmpInt :: MyInt -> MyInt -> Ordering;
primCmpInt (Pos Zero) (Pos Zero) = EQ;
primCmpInt (Pos Zero) (Neg Zero) = EQ;
primCmpInt (Neg Zero) (Pos Zero) = EQ;
primCmpInt (Neg Zero) (Neg Zero) = EQ;
primCmpInt (Pos x) (Pos y) = primCmpNat x y;
primCmpInt (Pos x) (Neg y) = GT;
primCmpInt (Neg x) (Pos y) = LT;
primCmpInt (Neg x) (Neg y) = primCmpNat y x;
compareMyInt :: MyInt -> MyInt -> Ordering
compareMyInt = primCmpInt;
esEsOrdering :: Ordering -> Ordering -> MyBool
esEsOrdering LT LT = MyTrue;
esEsOrdering LT EQ = MyFalse;
esEsOrdering LT GT = MyFalse;
esEsOrdering EQ LT = MyFalse;
esEsOrdering EQ EQ = MyTrue;
esEsOrdering EQ GT = MyFalse;
esEsOrdering GT LT = MyFalse;
esEsOrdering GT EQ = MyFalse;
esEsOrdering GT GT = MyTrue;
not :: MyBool -> MyBool;
not MyTrue = MyFalse;
not MyFalse = MyTrue;
fsEsOrdering :: Ordering -> Ordering -> MyBool
fsEsOrdering x y = not (esEsOrdering x y);
gtEsMyInt :: MyInt -> MyInt -> MyBool
gtEsMyInt x y = fsEsOrdering (compareMyInt x y) LT;
flip :: (c -> b -> a) -> b -> c -> a;
flip f x y = f y x;
ltEsMyInt :: MyInt -> MyInt -> MyBool
ltEsMyInt x y = fsEsOrdering (compareMyInt x y) GT;
numericEnumFromThenToP0 xy xz yu MyTrue = flip gtEsMyInt xy;
otherwise :: MyBool;
otherwise = MyTrue;
numericEnumFromThenToP1 xy xz yu MyTrue = flip ltEsMyInt xy;
numericEnumFromThenToP1 xy xz yu MyFalse = numericEnumFromThenToP0 xy xz yu otherwise;
numericEnumFromThenToP2 xy xz yu = numericEnumFromThenToP1 xy xz yu (gtEsMyInt xz yu);
numericEnumFromThenToP xy xz yu = numericEnumFromThenToP2 xy xz yu;
takeWhile0 p x xs MyTrue = Nil;
takeWhile1 p x xs MyTrue = Cons x (takeWhile p xs);
takeWhile1 p x xs MyFalse = takeWhile0 p x xs otherwise;
takeWhile2 p (Cons x xs) = takeWhile1 p x xs (p x);
takeWhile3 p Nil = Nil;
takeWhile3 xw xx = takeWhile2 xw xx;
takeWhile :: (a -> MyBool) -> (List a) -> (List a);
takeWhile p Nil = takeWhile3 p Nil;
takeWhile p (Cons x xs) = takeWhile2 p (Cons x xs);
numericEnumFromThenTo n n' m = takeWhile (numericEnumFromThenToP m n' n) (numericEnumFromThen n n');
enumFromThenToMyInt :: MyInt -> MyInt -> MyInt -> (List MyInt)
enumFromThenToMyInt = numericEnumFromThenTo;
fromEnumOrdering :: Ordering -> MyInt
fromEnumOrdering LT = Pos Zero;
fromEnumOrdering EQ = Pos (Succ Zero);
fromEnumOrdering GT = Pos (Succ (Succ Zero));
map :: (b -> a) -> (List b) -> (List a);
map f Nil = Nil;
map f (Cons x xs) = Cons (f x) (map f xs);
primEqNat :: Nat -> Nat -> MyBool;
primEqNat Zero Zero = MyTrue;
primEqNat Zero (Succ y) = MyFalse;
primEqNat (Succ x) Zero = MyFalse;
primEqNat (Succ x) (Succ y) = primEqNat x y;
primEqInt :: MyInt -> MyInt -> MyBool;
primEqInt (Pos (Succ x)) (Pos (Succ y)) = primEqNat x y;
primEqInt (Neg (Succ x)) (Neg (Succ y)) = primEqNat x y;
primEqInt (Pos Zero) (Neg Zero) = MyTrue;
primEqInt (Neg Zero) (Pos Zero) = MyTrue;
primEqInt (Neg Zero) (Neg Zero) = MyTrue;
primEqInt (Pos Zero) (Pos Zero) = MyTrue;
primEqInt vv vw = MyFalse;
esEsMyInt :: MyInt -> MyInt -> MyBool
esEsMyInt = primEqInt;
toEnum0 MyTrue vx = GT;
toEnum1 vx = toEnum0 (esEsMyInt vx (Pos (Succ (Succ Zero)))) vx;
toEnum2 MyTrue vy = EQ;
toEnum2 vz wu = toEnum1 wu;
toEnum3 vy = toEnum2 (esEsMyInt vy (Pos (Succ Zero))) vy;
toEnum3 wv = toEnum1 wv;
toEnum4 MyTrue ww = LT;
toEnum4 wx wy = toEnum3 wy;
toEnum5 ww = toEnum4 (esEsMyInt ww (Pos Zero)) ww;
toEnum5 wz = toEnum3 wz;
toEnumOrdering :: MyInt -> Ordering
toEnumOrdering ww = toEnum5 ww;
toEnumOrdering vy = toEnum3 vy;
toEnumOrdering vx = toEnum1 vx;
enumFromThenToOrdering :: Ordering -> Ordering -> Ordering -> (List Ordering)
enumFromThenToOrdering x y z = map toEnumOrdering (enumFromThenToMyInt (fromEnumOrdering x) (fromEnumOrdering y) (fromEnumOrdering z));
enumFromThenOrdering :: Ordering -> Ordering -> (List Ordering)
enumFromThenOrdering x y = enumFromThenToOrdering x y GT;
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/enumFromThen_7.hs | mit | 5,579 | 0 | 13 | 1,133 | 2,489 | 1,299 | 1,190 | 129 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGAnimatedNumberList
(js_getBaseVal, getBaseVal, js_getAnimVal, getAnimVal,
SVGAnimatedNumberList, castToSVGAnimatedNumberList,
gTypeSVGAnimatedNumberList)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"baseVal\"]" js_getBaseVal ::
SVGAnimatedNumberList -> IO (Nullable SVGNumberList)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAnimatedNumberList.baseVal Mozilla SVGAnimatedNumberList.baseVal documentation>
getBaseVal ::
(MonadIO m) => SVGAnimatedNumberList -> m (Maybe SVGNumberList)
getBaseVal self
= liftIO (nullableToMaybe <$> (js_getBaseVal (self)))
foreign import javascript unsafe "$1[\"animVal\"]" js_getAnimVal ::
SVGAnimatedNumberList -> IO (Nullable SVGNumberList)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGAnimatedNumberList.animVal Mozilla SVGAnimatedNumberList.animVal documentation>
getAnimVal ::
(MonadIO m) => SVGAnimatedNumberList -> m (Maybe SVGNumberList)
getAnimVal self
= liftIO (nullableToMaybe <$> (js_getAnimVal (self))) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGAnimatedNumberList.hs | mit | 1,911 | 12 | 10 | 235 | 460 | 283 | 177 | 31 | 1 |
-- | Types and algorithms for Markov logic networks. The module has quite a
-- few 'fromStrings' methods that take strings and parse them into data
-- structure to make it easier to play with Markov logic in the repl.
--
-- For Markov logic, data is often represented with a Set (Predicate a, Bool).
-- This is prefered to Map since it simplifies queries such as
-- "P(Cancer(Bob) | !Cancer(Bob))", where a map would not allow these two
-- different predicate -> value mappings.
module Faun.MarkovLogic
( MLN(..)
, tell
, allPredicates
, allGroundings
, allWGroundings
, fromStrings
, groundNetwork
, factors
, ask
, marginal
, joint
, conditional
, constructNetwork
) where
import qualified Data.Text as T
import qualified Data.Map as Map
import Data.Map (Map)
import qualified Data.Set as Set
import Data.Set (Set)
import Data.List (partition)
import Control.Applicative ((<|>))
import qualified Faun.FOL as FOL
import Faun.FOL (FOL)
import qualified Faun.Formula as F
import Faun.Predicate
import Faun.Term
import Faun.Symbols
import Faun.Network
import Faun.ShowTxt
import qualified Faun.FormulaSet as FS
import Faun.Parser.Probability
import Faun.Parser.FOL
-- | A Markov logic network is a set of first-order logical formulas associated
-- with a weight.
data MLN = MLN { network :: Map FOL Double }
instance Show MLN where
show = T.unpack . fmtMLN
instance ShowTxt MLN where
showTxt = fmtMLN
-- | Prints a Markov logic network.
fmtMLN :: MLN -> T.Text
fmtMLN (MLN m) =
Map.foldrWithKey
(\k v acc -> T.concat [fmtWFormula symbolic k v, "\n", acc]) "" m
-- | Prints a weighted formula.
fmtWFormula :: Symbols -> FOL -> Double -> T.Text
fmtWFormula s f w = T.concat [F.prettyPrintFm s f, ", ", T.pack $ show w, "."]
-- | Adds a formula to the markov logic network using the parser. If the parser
-- fails, the function returns the MLN unmodified.
tell :: String -> MLN -> MLN
tell s mln@(MLN m) = case parseWFOL s of
Left _ -> mln
Right (f, w) -> MLN $ Map.insert f w m
-- | Gathers all the predicates of a markov logic network in a set.
allPredicates :: MLN -> Set Predicate
allPredicates (MLN m) =
Map.foldWithKey (\k _ acc -> Set.union (F.atoms k) acc) Set.empty m
-- | Get all groundings from a Markov logic network.
allGroundings :: [Term] -> MLN -> Set FOL
allGroundings ts (MLN m) = FS.allGroundings ts $ Map.keysSet m
-- | Get all groundings from a Markov logic network, keeping the weights
-- assigned to the original formula in the Markov logic network.
allWGroundings :: [Term] -> MLN -> MLN
allWGroundings ts (MLN m) =
MLN $ Map.foldrWithKey
(\k v a -> Set.foldr' (\k' a' -> Map.insert k' v a') a (FOL.groundings ts k))
Map.empty
m
-- | Builds a ground network for Markov logic.
groundNetwork :: [Term] -> MLN -> UNetwork Predicate
groundNetwork ts (MLN m) = Set.foldr' (\p acc -> Map.insert p (mb p) acc) Map.empty ps
where
-- All groundings from all formulas in the knowledge base:
gs = Set.foldr' (\g acc -> Set.union (FOL.groundings ts g) acc) Set.empty (Map.keysSet m)
-- All the predicates
ps = FS.allPredicates gs
-- The Markov blanket of predicate 'p', that is: all its neighbours.
mb p = Set.delete p $ FS.allPredicates $ Set.filter (FOL.hasPred p) gs
-- | Returns all the factors in the MLN. Instead of mappings sets of predicates
-- to weights, this function maps them to the formula (the MLN provides the
-- weight).
factors :: [Term] -> MLN -> Map (Set Predicate) FOL
factors ts (MLN m) = fs
where
-- All groundings mapped to their original formula
gs = Set.foldr' (\k a -> Set.foldr' (`Map.insert` k) a (FOL.groundings ts k)) Map.empty (Map.keysSet m)
-- Separate the formula in sets of predicates:
fs = Map.foldrWithKey (\k v a -> Map.insert (F.atoms k) v a) Map.empty gs
-- | All possible assignments to the predicates in the network.
allAss ::
[Term] ->
MLN ->
[Map Predicate Bool]
allAss ts mln = FOL.allAss $ allGroundings ts mln
-- | Helper function to facilitate answering conditional & joint probability
-- queries from the console. See 'Sphinx.Parser.parseCondQuery' and
-- 'Sphinx.Parser.parseJointQuery' to understand what kind of strings can
-- be parsed.
ask
:: MLN -- ^ A Markov logic network.
-> [T.Text] -- ^ A list of constants to ground the Markov logic network.
-> String -- ^ A query to be parsed by 'Sphinx.Parser.parseCondQuery' or 'Sphinx.Parser.parseJointQuery'.
-> Maybe Double -- ^ Either a double in [0.0, 1.0] or Nothing if the parsers fail.
ask mln terms query = pq <|> pj
where
ts = map Constant terms
pq = case parseCondQuery query of
Left _ -> Nothing; Right (q, c) -> Just $ conditional mln ts q c
pj = case parseJointQuery query of
Left _ -> Nothing; Right q -> Just $ joint mln ts q
-- | Direct method of computing joint probabilities for Markov logic (does not
-- scale!).
partitionAss
:: Set (Predicate, Bool) -- ^ The joint query.
-> [Map Predicate Bool] -- ^ All possiblement assignments.
-> ([Map Predicate Bool], [Map Predicate Bool]) -- ^ A probability in [0.0, 1.0]
partitionAss query = partition valid
where
-- Check if an assignment fits the query:
valid ass = Set.foldr' (\(k, v) acc -> acc && case Map.lookup k ass of Just b -> v == b; _ -> False) True query
-- | Direct method of computing joint probabilities for Markov logic (does not
-- scale!).
joint
:: MLN -- ^ The Markov logic network.
-> [Term] -- ^ List of constants to ground the Markov logic network.
-> Set (Predicate, Bool) -- ^ An set of assignments. The reason...
-> Double -- ^ A probability in [0.0, 1.0]
joint mln ts query = vq / z
where
vq = sum $ map evalNet toEval
vo = sum $ map evalNet others
z = vq + vo
-- All possible assignments
allass = allAss ts fs
-- Assignments to evaluate:
(toEval, others) = partitionAss query allass
-- The formula (the factors) to evaluate
fs = allWGroundings ts mln
-- Value of the network for a given assignment.
evalNet ass' = exp $ Map.foldrWithKey (\f w a -> val f w ass' + a) 0.0 (network fs)
-- Values of a factor
val f w ass' = let v = FOL.eval ass' f in
if v == FOL.top then w
else if v == FOL.bot then 0.0
else error ("Eval failed for " ++ show v ++ " given " ++ show ass')
-- | Direct method of computing marginal probabilities for Markov logic (does
-- not scale!).
marginal
:: MLN -- ^ The Markov logic network.
-> [Term] -- ^ List of constants to ground the Markov logic network.
-> Predicate-- ^ An assignment to all predicates in the Markov logic network.
-> Bool -- ^ Truth value of the predicate.
-> Double -- ^ A probability in [0.0, 1.0]Alicia Malone
marginal mln ts p b = joint mln ts $ Set.fromList [(p, b)]
-- | Direct method of computing conditional probabilities for Markov logic (does
-- not scale!).
conditional
:: MLN -- ^ The Markov logic network.
-> [Term] -- ^ List of constants to ground the Markov logic network.
-> Set (Predicate, Bool) -- ^ An set of assignments for the query.
-> Set (Predicate, Bool) -- ^ Conditions.
-> Double -- ^ A probability in [0.0, 1.0]
conditional mln ts query cond = vnum / vden
where
vnum = sum $ map evalNet numerator
vden = sum $ map evalNet denom
-- All possible assignments
allass = allAss ts fs
-- Assignments to evaluate:
(numerator, _) = partitionAss (Set.union query cond) allass
(denom, _ ) = partitionAss cond allass
-- The formula (the factors) to evaluate
fs = allWGroundings ts mln
-- Value of the network for a given assignment.
evalNet ass' = exp $ Map.foldrWithKey (\f w a -> val f w ass' + a) 0.0 (network fs)
-- Values of a factor
val f w ass' = let v = FOL.eval ass' f in
if v == FOL.top then w
else if v == FOL.bot then 0.0
else error ("Eval failed for " ++ show v ++ " given " ++ show ass')
-- | Algorithm to construct a network for Markov logic network inference.
--
-- Reference:
-- P Domingos and D Lowd, Markov Logic: An Interface Layer for Artificial
-- Intelligence, 2009, Morgan & Claypool. p. 26.
constructNetwork :: Set Predicate -> [Predicate] -> [Term] -> MLN -> UNetwork Predicate
constructNetwork query evidence ts (MLN m) = Set.foldr' (\p acc -> Map.insert p (mb p) acc) Map.empty ps
where
-- All groundings from all formulas in the knowledge base:
gs = Set.foldr' (\g acc -> Set.union (FOL.groundings ts g) acc) Set.empty (Map.keysSet m)
-- Predicates in the network
ps = step query query
-- The Markov blanket of predicate 'p', that is: all its neighbours.
mb p = Set.delete p $ FS.allPredicates $ Set.filter (FOL.hasPred p) gs
-- One step of the algorithm
step f g
| Set.null f = g
| Set.findMin f `elem` evidence = step (Set.deleteMin f) g
| otherwise =
let mbq = mb $ Set.findMin f in
step
(Set.union (Set.deleteMin f) (Set.intersection mbq g))
(Set.union g mbq)
-- | Builds a weighted knowledge base from a list of strings. If
-- 'Sphinx.Parser.parseWFOL' fails to parse a formula, it is ignored.
fromStrings
:: [String] -- ^ A set of string, each of which is a first-order logic formula with a weight. Is being parsed by 'Sphinx.Parser.parseWFOL'.
-> MLN -- ^ A Markov logic network.
fromStrings s = MLN $ foldr
(\k acc ->
case parseWFOL k of
Left _ -> acc
Right (f, w) -> Map.insert f w acc)
Map.empty
s
| PhDP/Sphinx-AI | Faun/MarkovLogic.hs | mit | 9,465 | 0 | 16 | 2,098 | 2,387 | 1,282 | 1,105 | 154 | 3 |
{-# LANGUAGE BangPatterns, DataKinds, DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module Hadoop.Protos.HdfsProtos.ReplicaStateProto (ReplicaStateProto(..)) where
import Prelude ((+), (/), (.))
import qualified Prelude as Prelude'
import qualified Data.Typeable as Prelude'
import qualified Data.Data as Prelude'
import qualified Text.ProtocolBuffers.Header as P'
data ReplicaStateProto = FINALIZED
| RBW
| RWR
| RUR
| TEMPORARY
deriving (Prelude'.Read, Prelude'.Show, Prelude'.Eq, Prelude'.Ord, Prelude'.Typeable, Prelude'.Data)
instance P'.Mergeable ReplicaStateProto
instance Prelude'.Bounded ReplicaStateProto where
minBound = FINALIZED
maxBound = TEMPORARY
instance P'.Default ReplicaStateProto where
defaultValue = FINALIZED
toMaybe'Enum :: Prelude'.Int -> P'.Maybe ReplicaStateProto
toMaybe'Enum 0 = Prelude'.Just FINALIZED
toMaybe'Enum 1 = Prelude'.Just RBW
toMaybe'Enum 2 = Prelude'.Just RWR
toMaybe'Enum 3 = Prelude'.Just RUR
toMaybe'Enum 4 = Prelude'.Just TEMPORARY
toMaybe'Enum _ = Prelude'.Nothing
instance Prelude'.Enum ReplicaStateProto where
fromEnum FINALIZED = 0
fromEnum RBW = 1
fromEnum RWR = 2
fromEnum RUR = 3
fromEnum TEMPORARY = 4
toEnum
= P'.fromMaybe (Prelude'.error "hprotoc generated code: toEnum failure for type Hadoop.Protos.HdfsProtos.ReplicaStateProto") .
toMaybe'Enum
succ FINALIZED = RBW
succ RBW = RWR
succ RWR = RUR
succ RUR = TEMPORARY
succ _ = Prelude'.error "hprotoc generated code: succ failure for type Hadoop.Protos.HdfsProtos.ReplicaStateProto"
pred RBW = FINALIZED
pred RWR = RBW
pred RUR = RWR
pred TEMPORARY = RUR
pred _ = Prelude'.error "hprotoc generated code: pred failure for type Hadoop.Protos.HdfsProtos.ReplicaStateProto"
instance P'.Wire ReplicaStateProto where
wireSize ft' enum = P'.wireSize ft' (Prelude'.fromEnum enum)
wirePut ft' enum = P'.wirePut ft' (Prelude'.fromEnum enum)
wireGet 14 = P'.wireGetEnum toMaybe'Enum
wireGet ft' = P'.wireGetErr ft'
wireGetPacked 14 = P'.wireGetPackedEnum toMaybe'Enum
wireGetPacked ft' = P'.wireGetErr ft'
instance P'.GPB ReplicaStateProto
instance P'.MessageAPI msg' (msg' -> ReplicaStateProto) ReplicaStateProto where
getVal m' f' = f' m'
instance P'.ReflectEnum ReplicaStateProto where
reflectEnum = [(0, "FINALIZED", FINALIZED), (1, "RBW", RBW), (2, "RWR", RWR), (3, "RUR", RUR), (4, "TEMPORARY", TEMPORARY)]
reflectEnumInfo _
= P'.EnumInfo (P'.makePNF (P'.pack ".hadoop.hdfs.ReplicaStateProto") ["Hadoop", "Protos"] ["HdfsProtos"] "ReplicaStateProto")
["Hadoop", "Protos", "HdfsProtos", "ReplicaStateProto.hs"]
[(0, "FINALIZED"), (1, "RBW"), (2, "RWR"), (3, "RUR"), (4, "TEMPORARY")]
instance P'.TextType ReplicaStateProto where
tellT = P'.tellShow
getT = P'.getRead | alexbiehl/hoop | hadoop-protos/src/Hadoop/Protos/HdfsProtos/ReplicaStateProto.hs | mit | 2,952 | 0 | 11 | 542 | 784 | 429 | 355 | 65 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE StrictData #-}
{-# LANGUAGE TupleSections #-}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-servicecatalog-acceptedportfolioshare.html
module Stratosphere.Resources.ServiceCatalogAcceptedPortfolioShare where
import Stratosphere.ResourceImports
-- | Full data type definition for ServiceCatalogAcceptedPortfolioShare. See
-- 'serviceCatalogAcceptedPortfolioShare' for a more convenient constructor.
data ServiceCatalogAcceptedPortfolioShare =
ServiceCatalogAcceptedPortfolioShare
{ _serviceCatalogAcceptedPortfolioShareAcceptLanguage :: Maybe (Val Text)
, _serviceCatalogAcceptedPortfolioSharePortfolioId :: Val Text
} deriving (Show, Eq)
instance ToResourceProperties ServiceCatalogAcceptedPortfolioShare where
toResourceProperties ServiceCatalogAcceptedPortfolioShare{..} =
ResourceProperties
{ resourcePropertiesType = "AWS::ServiceCatalog::AcceptedPortfolioShare"
, resourcePropertiesProperties =
hashMapFromList $ catMaybes
[ fmap (("AcceptLanguage",) . toJSON) _serviceCatalogAcceptedPortfolioShareAcceptLanguage
, (Just . ("PortfolioId",) . toJSON) _serviceCatalogAcceptedPortfolioSharePortfolioId
]
}
-- | Constructor for 'ServiceCatalogAcceptedPortfolioShare' containing
-- required fields as arguments.
serviceCatalogAcceptedPortfolioShare
:: Val Text -- ^ 'scapsPortfolioId'
-> ServiceCatalogAcceptedPortfolioShare
serviceCatalogAcceptedPortfolioShare portfolioIdarg =
ServiceCatalogAcceptedPortfolioShare
{ _serviceCatalogAcceptedPortfolioShareAcceptLanguage = Nothing
, _serviceCatalogAcceptedPortfolioSharePortfolioId = portfolioIdarg
}
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-servicecatalog-acceptedportfolioshare.html#cfn-servicecatalog-acceptedportfolioshare-acceptlanguage
scapsAcceptLanguage :: Lens' ServiceCatalogAcceptedPortfolioShare (Maybe (Val Text))
scapsAcceptLanguage = lens _serviceCatalogAcceptedPortfolioShareAcceptLanguage (\s a -> s { _serviceCatalogAcceptedPortfolioShareAcceptLanguage = a })
-- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-servicecatalog-acceptedportfolioshare.html#cfn-servicecatalog-acceptedportfolioshare-portfolioid
scapsPortfolioId :: Lens' ServiceCatalogAcceptedPortfolioShare (Val Text)
scapsPortfolioId = lens _serviceCatalogAcceptedPortfolioSharePortfolioId (\s a -> s { _serviceCatalogAcceptedPortfolioSharePortfolioId = a })
| frontrowed/stratosphere | library-gen/Stratosphere/Resources/ServiceCatalogAcceptedPortfolioShare.hs | mit | 2,553 | 0 | 15 | 249 | 279 | 160 | 119 | 30 | 1 |
{- |
Module : $Header$
Copyright : (c) Felix Gabriel Mance
License : GPLv2 or higher, see LICENSE.txt
Maintainer : f.mance@jacobs-university.de
Stability : provisional
Portability : portable
Kewyords used for XML conversions
-}
module OWL2.XMLKeywords where
ontologyIRIK :: String
ontologyIRIK = "ontologyIRI"
iriK :: String
iriK = "IRI"
abbreviatedIRIK :: String
abbreviatedIRIK = "abbreviatedIRI"
nodeIDK :: String
nodeIDK = "nodeID"
prefixK :: String
prefixK = "Prefix"
importK :: String
importK = "Import"
classK :: String
classK = "Class"
datatypeK :: String
datatypeK = "Datatype"
namedIndividualK :: String
namedIndividualK = "NamedIndividual"
objectPropertyK :: String
objectPropertyK = "ObjectProperty"
dataPropertyK :: String
dataPropertyK = "DataProperty"
annotationPropertyK :: String
annotationPropertyK = "AnnotationProperty"
anonymousIndividualK :: String
anonymousIndividualK = "AnonymousIndividual"
facetRestrictionK :: String
facetRestrictionK = "FacetRestriction"
literalK :: String
literalK = "Literal"
declarationK :: String
declarationK = "Declaration"
annotationK :: String
annotationK = "Annotation"
objectInverseOfK :: String
objectInverseOfK = "ObjectInverseOf"
datatypeRestrictionK :: String
datatypeRestrictionK = "DatatypeRestriction"
dataComplementOfK :: String
dataComplementOfK = "DataComplementOf"
dataOneOfK :: String
dataOneOfK = "DataOneOf"
dataIntersectionOfK :: String
dataIntersectionOfK = "DataIntersectionOf"
dataUnionOfK :: String
dataUnionOfK = "DataUnionOf"
objectIntersectionOfK :: String
objectIntersectionOfK = "ObjectIntersectionOf"
objectUnionOfK :: String
objectUnionOfK = "ObjectUnionOf"
objectComplementOfK :: String
objectComplementOfK = "ObjectComplementOf"
objectOneOfK :: String
objectOneOfK = "ObjectOneOf"
objectSomeValuesFromK :: String
objectSomeValuesFromK = "ObjectSomeValuesFrom"
objectAllValuesFromK :: String
objectAllValuesFromK = "ObjectAllValuesFrom"
objectHasValueK :: String
objectHasValueK = "ObjectHasValue"
objectHasSelfK :: String
objectHasSelfK = "ObjectHasSelf"
objectMinCardinalityK :: String
objectMinCardinalityK = "ObjectMinCardinality"
objectMaxCardinalityK :: String
objectMaxCardinalityK = "ObjectMaxCardinality"
objectExactCardinalityK :: String
objectExactCardinalityK = "ObjectExactCardinality"
dataSomeValuesFromK :: String
dataSomeValuesFromK = "DataSomeValuesFrom"
dataAllValuesFromK :: String
dataAllValuesFromK = "DataAllValuesFrom"
dataHasValueK :: String
dataHasValueK = "DataHasValue"
dataMinCardinalityK :: String
dataMinCardinalityK = "DataMinCardinality"
dataMaxCardinalityK :: String
dataMaxCardinalityK = "DataMaxCardinality"
dataExactCardinalityK :: String
dataExactCardinalityK = "DataExactCardinality"
subClassOfK :: String
subClassOfK = "SubClassOf"
equivalentClassesK :: String
equivalentClassesK = "EquivalentClasses"
disjointClassesK :: String
disjointClassesK = "DisjointClasses"
disjointUnionK :: String
disjointUnionK = "DisjointUnion"
datatypeDefinitionK :: String
datatypeDefinitionK = "DatatypeDefinition"
hasKeyK :: String
hasKeyK = "HasKey"
subObjectPropertyOfK :: String
subObjectPropertyOfK = "SubObjectPropertyOf"
objectPropertyChainK :: String
objectPropertyChainK = "ObjectPropertyChain"
equivalentObjectPropertiesK :: String
equivalentObjectPropertiesK = "EquivalentObjectProperties"
disjointObjectPropertiesK :: String
disjointObjectPropertiesK = "DisjointObjectProperties"
objectPropertyDomainK :: String
objectPropertyDomainK = "ObjectPropertyDomain"
objectPropertyRangeK :: String
objectPropertyRangeK = "ObjectPropertyRange"
inverseObjectPropertiesK :: String
inverseObjectPropertiesK = "InverseObjectProperties"
functionalObjectPropertyK :: String
functionalObjectPropertyK = "FunctionalObjectProperty"
inverseFunctionalObjectPropertyK :: String
inverseFunctionalObjectPropertyK = "InverseFunctionalObjectProperty"
reflexiveObjectPropertyK :: String
reflexiveObjectPropertyK = "ReflexiveObjectProperty"
irreflexiveObjectPropertyK :: String
irreflexiveObjectPropertyK = "IrreflexiveObjectProperty"
symmetricObjectPropertyK :: String
symmetricObjectPropertyK = "SymmetricObjectProperty"
asymmetricObjectPropertyK :: String
asymmetricObjectPropertyK = "AsymmetricObjectProperty"
antisymmetricObjectPropertyK :: String
antisymmetricObjectPropertyK = "AntisymmetricObjectProperty"
transitiveObjectPropertyK :: String
transitiveObjectPropertyK = "TransitiveObjectProperty"
subDataPropertyOfK :: String
subDataPropertyOfK = "SubDataPropertyOf"
equivalentDataPropertiesK :: String
equivalentDataPropertiesK = "EquivalentDataProperties"
disjointDataPropertiesK :: String
disjointDataPropertiesK = "DisjointDataProperties"
dataPropertyDomainK :: String
dataPropertyDomainK = "DataPropertyDomain"
dataPropertyRangeK :: String
dataPropertyRangeK = "DataPropertyRange"
functionalDataPropertyK :: String
functionalDataPropertyK = "FunctionalDataProperty"
dataPropertyAssertionK :: String
dataPropertyAssertionK = "DataPropertyAssertion"
negativeDataPropertyAssertionK :: String
negativeDataPropertyAssertionK = "NegativeDataPropertyAssertion"
objectPropertyAssertionK :: String
objectPropertyAssertionK = "ObjectPropertyAssertion"
negativeObjectPropertyAssertionK :: String
negativeObjectPropertyAssertionK = "NegativeObjectPropertyAssertion"
sameIndividualK :: String
sameIndividualK = "SameIndividual"
differentIndividualsK :: String
differentIndividualsK = "DifferentIndividuals"
classAssertionK :: String
classAssertionK = "ClassAssertion"
annotationAssertionK :: String
annotationAssertionK = "AnnotationAssertion"
subAnnotationPropertyOfK :: String
subAnnotationPropertyOfK = "SubAnnotationPropertyOf"
annotationPropertyDomainK :: String
annotationPropertyDomainK = "AnnotationPropertyDomain"
annotationPropertyRangeK :: String
annotationPropertyRangeK = "AnnotationPropertyRange"
entityList :: [String]
entityList = [classK, datatypeK, namedIndividualK,
objectPropertyK, dataPropertyK, annotationPropertyK]
annotationValueList :: [String]
annotationValueList = [literalK, iriK, "AbbreviatedIRI", anonymousIndividualK]
annotationSubjectList :: [String]
annotationSubjectList = [iriK, "AbbreviatedIRI", anonymousIndividualK]
individualList :: [String]
individualList = [namedIndividualK, anonymousIndividualK]
objectPropList :: [String]
objectPropList = [objectPropertyK, objectInverseOfK]
dataPropList :: [String]
dataPropList = [dataPropertyK]
dataRangeList :: [String]
dataRangeList = [datatypeK, datatypeRestrictionK, dataComplementOfK,
dataOneOfK, dataIntersectionOfK, dataUnionOfK]
classExpressionList :: [String]
classExpressionList = [classK, objectIntersectionOfK, objectUnionOfK,
objectComplementOfK, objectOneOfK, objectSomeValuesFromK,
objectAllValuesFromK, objectHasValueK, objectHasSelfK,
objectMinCardinalityK, objectMaxCardinalityK, objectExactCardinalityK,
dataSomeValuesFromK, dataAllValuesFromK, dataHasValueK,
dataMinCardinalityK, dataMaxCardinalityK, dataExactCardinalityK]
| nevrenato/Hets_Fork | OWL2/XMLKeywords.hs | gpl-2.0 | 7,056 | 0 | 5 | 730 | 1,017 | 621 | 396 | 180 | 1 |
module Main where
import System.Environment
import Data.Char (toUpper)
import Data.List (delete)
import System.IO (hPutStr,
hClose,
openTempFile)
import System.Directory (removeFile,
renameFile)
view :: FilePath -> IO ()
view file = do contents <- readFile file
putStr contents
-- *Todo> view "./resources/todo.org"
-- * I know *exactly* what you mean.
-- * Let me tell you why you're here.
-- * You're here because you know something.
-- * What you know you can't explain, but you feel it.
-- * You've felt it your entire life, that there's something wrong with the world.
-- * You don't know what it is, but it's there, like a splinter in your mind, driving you mad.
-- * It is this feeling that has brought you to me.
-- * Do you know what I'm talking about?
capitalize :: String -> String
capitalize [] = []
capitalize (h:t) = toUpper h : t
-- *File> capitalize "this is a todo"
-- "This is a todo"
orgify :: String -> String
orgify s = "* " ++ s ++ "\n"
-- *File> orgify "this is a todo bullet"
-- "* this is a todo bullet\n"
add :: FilePath -> String -> IO ()
add fileP todo = appendFile fileP $
(unlines .
map (orgify . capitalize) .
lines) todo
deleteTodoTask :: FilePath -> IO ()
deleteTodoTask file = do putStrLn "Todo list:"
view file
htodos <- loadTodos file
putStr ("Destroy todo list? (0," ++ (range htodos) ++ ")? ")
num <- getLine
let n = (read num) in
do (del file n)
putStrLn "Updated todo list:"
view file
loadTodos :: FilePath -> IO [(Int, String)]
loadTodos file = do todostr <- readFile file
return (todos todostr)
del :: FilePath -> Int -> IO ()
del file n = do (tempPath, tempHandle) <- openTempFile "." "temp"
htodos <- (loadTodos file)
let newTodoItems = (newTodos htodos n) in
do hPutStr tempHandle newTodoItems
hClose tempHandle
removeFile file
renameFile tempPath file
todos :: String -> [(Int, String)]
todos = zip ([0..] :: [Int]) . lines
range :: [(Int, String)] -> String
range = (show . (\ x -> x-1) . length)
newTodos :: [(Int, String)] -> Int -> String
newTodos h n = (unlines .
(map (\(_, t) -> t)) .
(delete (h !! n))) h
todoDelete :: [String] -> IO ()
todoDelete (file:index:_) = del file (read index)
todoSee :: [String] -> IO ()
todoSee (file:_) = view file
todoAdd :: [String] -> IO ()
todoAdd (file:todo) = add file (unwords todo)
dispatch :: [(String, [String] -> IO ())]
dispatch = [("del", todoDelete),
("see", todoSee),
("add", todoAdd)]
main :: IO ()
main = do (command:args) <- getArgs
let (Just action) = (lookup command dispatch) in
action args
| ardumont/haskell-lab | src/io/todo.hs | gpl-2.0 | 3,083 | 0 | 13 | 1,036 | 924 | 480 | 444 | 65 | 1 |
{-# LANGUAGE TemplateHaskell #-}
module HistoryGraph.Types where
import Control.Lens
import Data.Map (Map)
import qualified Data.Map as Map
type NodeId = Int
type HistoryError = String
data Param =
Checkbox Bool
| Text String
| ZNum Integer
| QNum Rational
| RNum Double
| Options Integer
| Parent NodeId
type Params = [Param]
data ParamLabel =
CheckboxLabel String
| TextLabel String
| ZNumLabel String
| QNumLabel String
| RNumLabel String
| OptionsLabel String [String]
| ParentLabel String
type ParamLabels = [ParamLabel]
data ParamEval a =
CheckboxEval Bool
| TextEval String
| ZNumEval Integer
| QNumEval Rational
| RNumEval Double
| OptionsEval Integer
| ParentEval a
type ParamEvals a = [ParamEval a]
data Node a = Node
{ _computationKey :: RegistryKey
, _savedParams :: [Params] -- TODO: removing frequents
, _currentParams :: Integer -- index into the above
, _cachedValue :: Maybe a
, _ancestors :: [NodeId] -- used for cycle detection
, _childNodes :: [NodeId]
}
data History a = History
{ _nodes :: Map NodeId (Node a)
, _currentNodeId :: Maybe NodeId
, _nextUnusedNodeId :: NodeId
, _registry :: Registry a
}
type RegistryKey = String
data Entry a = Entry
{ _key :: RegistryKey
, _function :: (ParamEvals a -> Either HistoryError a)
, _labels :: ParamLabels
, _button :: Maybe Char
, _name :: String
, _description :: String
}
type Registry a = [Entry a]
makeLenses ''Entry
makeLenses ''Node
makeLenses ''History | Super-Fluid/history-graph | HistoryGraph/Types.hs | gpl-3.0 | 1,603 | 0 | 11 | 410 | 415 | 249 | 166 | 58 | 0 |
{-# LANGUAGE FlexibleInstances #-}
module SecretHandshake where
import Data.Digits (digitsRev)
import Text.Read (readMaybe)
class Handshakeable a where
handshake :: a -> [String]
instance Handshakeable String where
handshake xs =
if all (\d -> d==1 || d==0) ds
then toHandshake ds
else []
where
ds = strToIs xs
instance Handshakeable Int where
handshake = toHandshake . digitsRev 2
toHandshake :: [Int] -> [String]
toHandshake [] = []
toHandshake [0] = []
toHandshake (a:b:c:d:1:ds) = reverse $ toHandshake (a:b:c:d:0:ds)
toHandshake (1:ds) = "wink" : toHandshake (0:ds)
toHandshake (_:1:ds) = "double blink" : toHandshake (0:0:ds)
toHandshake (_:_:1:ds) = "close your eyes" : toHandshake (0:0:0:ds)
toHandshake (_:_:_:1:ds) = "jump" : toHandshake (0:0:0:0:ds)
toHandshake _ = []
strToIs :: String -> [Int]
strToIs xs =
case xs' of
Just n -> digitsRev 10 n
Nothing -> []
where
xs' = readMaybe xs
| ciderpunx/exercismo | src/SecretHandshake.hs | gpl-3.0 | 1,028 | 0 | 12 | 269 | 461 | 240 | 221 | 29 | 2 |
{-|
Module : Catan.Board
Description : Board Types for Catan
Copyright : (c) Dylan Mann, David Cao 2017
License : GPL-3
Maintainer : mannd@seas.upenn.edu
Stability : experimental
Portability : POSIX
Contains the primitive board types and wraps unsafe operations to provide a total
mapping from CornerLocations and Tile Locations, and so no other code has to
touch the Board and invariants are maintained. Also contains some setup methods.
-}
{-# OPTIONS_HADDOCK not-home, show-extensions #-}
{-# OPTIONS -fwarn-tabs -fwarn-incomplete-patterns -Wall #-}
module Board(Terrain(..),
Token(..),
tokenOrder,
Tile(..),
Tiles,
TileLocation,
tileToAxial,
axialToTile,
getTile,
desert,
tileIndices,
makeTileLocation,
Resource(..),
Reward,
rewardTiles,
rewardLocs,
Harbor(..),
Neighbors(..),
Corner,
Corners,
CornerLocation,
cornerToAxial,
adjacentCorners,
getCorner,
makeCornerLocation,
cornerIndices,
Board(..),
setupBoard,
defaultBuildingLocations,
defaultRoadLocations)
where
import qualified Data.Map as Map
import Data.Map(Map)
import Data.Maybe(fromJust, mapMaybe)
import System.Random.Shuffle(shuffleM)
-- | Resources that are held by players and used as currency
data Resource = Brick | Lumber | Ore | Grain | Wool
deriving (Enum, Read, Show, Eq, Ord)
-- | Tokens that represent when tile payouts happen
data Token = Two | Three | Four | Five | Six
| Eight | Nine | Ten | Eleven | Twelve
deriving (Enum, Read, Show, Eq)
-- | default token order, starting from top left and circling clockwise inwards
tokenOrder :: [Token]
tokenOrder = [Five, Two, Six, Three, Eight, Ten, Nine, Twelve, Eleven, Four,
Eight, Ten, Nine, Four, Five, Six, Three, Eleven]
-- | indices of all tiles, starting from top left and going inwards clockwise
tileIndices :: [TileLocation]
tileIndices = map TileLocation $
([8..11] ++ [0..7]) `zip` repeat 2 ++
([4, 5] ++ [0..3]) `zip` repeat 1 ++
[(0, 0)]
-- | default tile order, starting from top left and circling clockwise inwards
terrainOrder :: [Terrain]
terrainOrder = [Mountains, Pasture, Forest, Hills, Mountains, Pasture, Pasture,
Fields, Hills, Forest, Fields, Fields, Hills, Pasture, Forest,
Fields, Mountains, Forest]
-- | represents the types of paying tiles
data Terrain = Hills -- produce brick
| Forest -- produce lumber
| Mountains -- produce ore
| Fields -- produce grain
| Pasture -- produces Wool
deriving (Enum, Read, Show, Eq)
-- | Corner on a board, it has the neighboring tiles and whether it is adacent
-- to any harbors
type Corner = Reward
type Reward = (Neighbors, Maybe Harbor)
-- | Protected type can only be instantiated by makeCornerLocation outside the
-- module
data CornerLocation = CornerLocation (Int, Int, Bool)
deriving(Ord, Show, Read, Eq)
-- | neighboring tiles of the corner
data Neighbors = OneTile TileLocation
| TwoTiles TileLocation TileLocation
| ThreeTiles TileLocation TileLocation TileLocation
deriving (Read, Show, Eq)
-- | what type of harbor that corner has access to
data Harbor = GenericHarbor
| SpecialHarbor Resource
deriving (Read, Show, Eq)
-- | safe method for creating corner locations
makeCornerLocation :: Int -> Int -> Bool -> Maybe CornerLocation
makeCornerLocation x y t = let c = CornerLocation (x, y, t) in
if c `elem` cornerIndices then Just c else Nothing
-- | Protected type can only be instantiated by makeCornerLocation outside the
-- module
data TileLocation = TileLocation (Int, Int)
deriving(Ord, Read, Show, Eq)
-- | Tile is represented either by a paying tile or the desert. Paying tiles
-- yield resources to neighboring players when the dice roll the token
data Tile = Paying Terrain Token
| Desert
deriving (Eq, Show, Read)
-- | safe method for creating tile locations
makeTileLocation :: Int -> Int -> Maybe TileLocation
makeTileLocation x y | x < 0 || y < 0 = Nothing
makeTileLocation 0 0 = Just $ TileLocation (0, 0)
makeTileLocation x 1 | x < 6 = Just $ TileLocation (x, 1)
makeTileLocation x 2 | x < 12 = Just $ TileLocation (x, 2)
makeTileLocation _ _ = Nothing
type Corners = Map CornerLocation Corner
type Tiles = Map TileLocation Tile
cornerIndices :: [CornerLocation]
cornerIndices = map CornerLocation [(1,-1,False),(0,0,True),(0,-1,False),(-1,1,True),(0,0,False),(0,1,True),(2,-1,False),(1,0,True),(2,-2,False),(1,-1,True),(1,-2,False),(0,-1,True),(0,-2,False),(-1,0,True),(-1,-1,False),(-2,1,True),(-1,0,False),(-2,2,True),(-1,1,False),(-1,2,True),(0,1,False),(0,2,True),(1,0,False),(1,1,True),(3,-1,False),(2,0,True),(3,-2,False),(2,-1,True),(3,-3,False),(2,-2,True),(2,-3,False),(1,-2,True),(1,-3,False),(0,-2,True),(0,-3,False),(-1,-1,True),(-1,-2,False),(-2,0,True),(-2,-1,False),(-3,1,True),(-2,0,False),(-3,2,True),(-2,1,False),(-3,3,True),(-2,2,False),(-2,3,True),(-1,2,False),(-1,3,True),(0,2,False),(0,3,True),(1,1,False),(1,2,True),(2,0,False),(2,1,True)]
getNeighbors :: CornerLocation -> Neighbors
getNeighbors (CornerLocation (x, y, True)) = mkNeighbors $ mapMaybe axialToTile [(x, y), (x, y-1), (x+1, y-1)]
getNeighbors (CornerLocation (x, y, False)) = mkNeighbors $ mapMaybe axialToTile [(x, y), (x, y+1), (x-1, y+1)]
mkNeighbors :: [TileLocation] -> Neighbors
mkNeighbors [tl] = OneTile tl
mkNeighbors [tl, tl2] = TwoTiles tl tl2
mkNeighbors [tl, tl2, tl3] = ThreeTiles tl tl2 tl3
mkNeighbors _ = error "shouldnt happen"
neighbors :: [Neighbors]
neighbors = map getNeighbors cornerIndices
allCorners :: IO [Corner]
allCorners = do h <- harbors
return $ zip neighbors $ replicate 24 Nothing ++ makeHarbors h
makeHarbors :: [Harbor] -> [Maybe Harbor]
makeHarbors [] = []
makeHarbors (h:tl) = [Just h] ++ makeHarborsAux tl ++ [Just h]
where
makeHarborsAux (h1:h2:h3:h4:h5:h6:h7:h8:_) =
Nothing: Just h1: Just h1: Nothing: Nothing: Just h2: Just h2:
Nothing: Just h3: Just h3: Nothing: Just h4: Just h4: Nothing:
Nothing: Just h5: Just h5: Nothing: Just h6: Just h6: Nothing:
Just h7: Just h7: Nothing: Nothing: Just h8: Just h8: [Nothing]
-- these are just to make the compiler happy
makeHarborsAux _ = []
harbors :: IO [Harbor]
harbors = shuffleM $ map SpecialHarbor [Wool, Lumber, Brick, Ore, Grain] ++
replicate 4 GenericHarbor
data Board = Board {tiles :: Tiles, corners :: Corners}
deriving(Read, Show, Eq)
rewardLocs :: Reward -> [TileLocation]
rewardLocs (OneTile t, _) = [t]
rewardLocs (TwoTiles t1 t2, _) = [t1, t2]
rewardLocs (ThreeTiles t1 t2 t3, _) = [t1, t2, t3]
rewardTiles :: Board -> Reward -> [Tile]
rewardTiles b r = map (getTile b) (rewardLocs r)
allTiles :: IO [Tile]
allTiles = do terrs <- shuffleM terrainOrder
toks <- shuffleM tokenOrder
shuffleM $ zipWith Paying terrs toks ++ [Desert]
setupBoard :: IO Board
setupBoard = do allCs <- allCorners
allTs <- allTiles
let ts = Map.fromList (zip tileIndices allTs)
cs = Map.fromList (zip cornerIndices allCs)
return $ Board ts cs
getCorner :: Board -> CornerLocation -> Corner
getCorner (Board _ cs) c = fromJust $ Map.lookup c cs
adjacentCorners :: CornerLocation -> [CornerLocation]
adjacentCorners (CornerLocation (x, y, True)) =
mapMaybe mk [(x, y-1, False), (x+1, y-1, False), (x+1, y-2, False)]
where mk (a, b, c) = makeCornerLocation a b c
adjacentCorners (CornerLocation (x, y, False)) =
mapMaybe mk [(x, y+1, True), (x-1, y+1, True), (x-1, y+2, True)]
where mk (a, b, c) = makeCornerLocation a b c
getTile :: Board -> TileLocation -> Tile
getTile (Board ts _) l = fromJust $ Map.lookup l ts
desert :: Board -> TileLocation
desert = foldr des err . Map.toList . tiles
where des (l, Desert) _ = l
des _ acc = acc
err = error "desert definitely exists"
defaultBuildingLocations :: [CornerLocation]
defaultBuildingLocations = map CornerLocation
[(1,1,True),(0,2,True),(-1,2,True),(-2,2,True),(-2,1,True),(-1,0,True),(0,-1,True),(2,-2,False)]
defaultRoadLocations :: [(CornerLocation, CornerLocation)]
defaultRoadLocations = map mkLoc [((2,-1,False),(1,1,True)),((1,0,False),(0,2,True)),((0,1,False),(-1,2,True)),((-1,1,False),(-2,2,True)),((-1,0,False),(-2,1,True)),((-1,-1,False),(-1,0,True)),((0,-1,True),(1,-2,False)),((1,-1,True),(2,-2,False))]
where mkLoc (x1, x2) = (CornerLocation x1, CornerLocation x2)
axialToTile :: (Int, Int) -> Maybe TileLocation
axialToTile loc = uncurry makeTileLocation $ case loc of
(0,0) -> (0,0)
(1,0) -> (0,1)
(1,-1) -> (1,1)
(0,-1) -> (2,1)
(-1,0) -> (3,1)
(-1,1) -> (4,1)
(0, 1) -> (5,1)
(2, 0) -> (0,2)
(2,-1) -> (1,2)
(2,-2) -> (2,2)
(1,-2) -> (3,2)
(0,-2) -> (4,2)
(-1,-1) -> (5,2)
(-2,0) -> (6,2)
(-2,1) -> (7,2)
(-2,2) -> (8,2)
(-1,2) -> (9,2)
(0,2) -> (10,2)
(1,1) -> (11,2)
l -> l
tileToAxial :: TileLocation -> (Int, Int)
tileToAxial (TileLocation l) = case l of
(0,0) -> (0,0)
(0,1) -> (1,0)
(1,1) -> (1,-1)
(2,1) -> (0,-1)
(3,1) -> (-1,0)
(4,1) -> (-1,1)
(5,1) -> (0, 1)
(0,2) -> (2, 0)
(1,2) -> (2,-1)
(2,2) -> (2,-2)
(3,2) -> (1,-2)
(4,2) -> (0,-2)
(5,2) -> (-1,-1)
(6,2) -> (-2,0)
(7,2) -> (-2,1)
(8,2) -> (-2,2)
(9,2) -> (-1,2)
(10,2) -> (0,2)
(11,2) -> (1,1)
_ -> error "No other tile location can exist"
cornerToAxial :: CornerLocation -> (Int, Int, Bool)
cornerToAxial (CornerLocation c) = c
| dylanmann/CurriersOfCatan | src/Board.hs | gpl-3.0 | 10,379 | 0 | 34 | 2,705 | 4,316 | 2,531 | 1,785 | 199 | 20 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Container.Projects.Zones.Clusters.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Deletes the cluster, including the Kubernetes endpoint and all worker
-- nodes. Firewalls and routes that were configured during cluster creation
-- are also deleted. Other Google Compute Engine resources that might be in
-- use by the cluster (e.g. load balancer resources) will not be deleted if
-- they weren\'t present at the initial create time.
--
-- /See:/ <https://cloud.google.com/container-engine/ Google Container Engine API Reference> for @container.projects.zones.clusters.delete@.
module Network.Google.Resource.Container.Projects.Zones.Clusters.Delete
(
-- * REST Resource
ProjectsZonesClustersDeleteResource
-- * Creating a Request
, projectsZonesClustersDelete
, ProjectsZonesClustersDelete
-- * Request Lenses
, pzcdXgafv
, pzcdUploadProtocol
, pzcdPp
, pzcdAccessToken
, pzcdUploadType
, pzcdZone
, pzcdBearerToken
, pzcdClusterId
, pzcdProjectId
, pzcdCallback
) where
import Network.Google.Container.Types
import Network.Google.Prelude
-- | A resource alias for @container.projects.zones.clusters.delete@ method which the
-- 'ProjectsZonesClustersDelete' request conforms to.
type ProjectsZonesClustersDeleteResource =
"v1" :>
"projects" :>
Capture "projectId" Text :>
"zones" :>
Capture "zone" Text :>
"clusters" :>
Capture "clusterId" Text :>
QueryParam "$.xgafv" Text :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Delete '[JSON] Operation
-- | Deletes the cluster, including the Kubernetes endpoint and all worker
-- nodes. Firewalls and routes that were configured during cluster creation
-- are also deleted. Other Google Compute Engine resources that might be in
-- use by the cluster (e.g. load balancer resources) will not be deleted if
-- they weren\'t present at the initial create time.
--
-- /See:/ 'projectsZonesClustersDelete' smart constructor.
data ProjectsZonesClustersDelete = ProjectsZonesClustersDelete'
{ _pzcdXgafv :: !(Maybe Text)
, _pzcdUploadProtocol :: !(Maybe Text)
, _pzcdPp :: !Bool
, _pzcdAccessToken :: !(Maybe Text)
, _pzcdUploadType :: !(Maybe Text)
, _pzcdZone :: !Text
, _pzcdBearerToken :: !(Maybe Text)
, _pzcdClusterId :: !Text
, _pzcdProjectId :: !Text
, _pzcdCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsZonesClustersDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pzcdXgafv'
--
-- * 'pzcdUploadProtocol'
--
-- * 'pzcdPp'
--
-- * 'pzcdAccessToken'
--
-- * 'pzcdUploadType'
--
-- * 'pzcdZone'
--
-- * 'pzcdBearerToken'
--
-- * 'pzcdClusterId'
--
-- * 'pzcdProjectId'
--
-- * 'pzcdCallback'
projectsZonesClustersDelete
:: Text -- ^ 'pzcdZone'
-> Text -- ^ 'pzcdClusterId'
-> Text -- ^ 'pzcdProjectId'
-> ProjectsZonesClustersDelete
projectsZonesClustersDelete pPzcdZone_ pPzcdClusterId_ pPzcdProjectId_ =
ProjectsZonesClustersDelete'
{ _pzcdXgafv = Nothing
, _pzcdUploadProtocol = Nothing
, _pzcdPp = True
, _pzcdAccessToken = Nothing
, _pzcdUploadType = Nothing
, _pzcdZone = pPzcdZone_
, _pzcdBearerToken = Nothing
, _pzcdClusterId = pPzcdClusterId_
, _pzcdProjectId = pPzcdProjectId_
, _pzcdCallback = Nothing
}
-- | V1 error format.
pzcdXgafv :: Lens' ProjectsZonesClustersDelete (Maybe Text)
pzcdXgafv
= lens _pzcdXgafv (\ s a -> s{_pzcdXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pzcdUploadProtocol :: Lens' ProjectsZonesClustersDelete (Maybe Text)
pzcdUploadProtocol
= lens _pzcdUploadProtocol
(\ s a -> s{_pzcdUploadProtocol = a})
-- | Pretty-print response.
pzcdPp :: Lens' ProjectsZonesClustersDelete Bool
pzcdPp = lens _pzcdPp (\ s a -> s{_pzcdPp = a})
-- | OAuth access token.
pzcdAccessToken :: Lens' ProjectsZonesClustersDelete (Maybe Text)
pzcdAccessToken
= lens _pzcdAccessToken
(\ s a -> s{_pzcdAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pzcdUploadType :: Lens' ProjectsZonesClustersDelete (Maybe Text)
pzcdUploadType
= lens _pzcdUploadType
(\ s a -> s{_pzcdUploadType = a})
-- | The name of the Google Compute Engine
-- [zone](\/compute\/docs\/zones#available) in which the cluster resides.
pzcdZone :: Lens' ProjectsZonesClustersDelete Text
pzcdZone = lens _pzcdZone (\ s a -> s{_pzcdZone = a})
-- | OAuth bearer token.
pzcdBearerToken :: Lens' ProjectsZonesClustersDelete (Maybe Text)
pzcdBearerToken
= lens _pzcdBearerToken
(\ s a -> s{_pzcdBearerToken = a})
-- | The name of the cluster to delete.
pzcdClusterId :: Lens' ProjectsZonesClustersDelete Text
pzcdClusterId
= lens _pzcdClusterId
(\ s a -> s{_pzcdClusterId = a})
-- | The Google Developers Console [project ID or project
-- number](https:\/\/support.google.com\/cloud\/answer\/6158840).
pzcdProjectId :: Lens' ProjectsZonesClustersDelete Text
pzcdProjectId
= lens _pzcdProjectId
(\ s a -> s{_pzcdProjectId = a})
-- | JSONP
pzcdCallback :: Lens' ProjectsZonesClustersDelete (Maybe Text)
pzcdCallback
= lens _pzcdCallback (\ s a -> s{_pzcdCallback = a})
instance GoogleRequest ProjectsZonesClustersDelete
where
type Rs ProjectsZonesClustersDelete = Operation
type Scopes ProjectsZonesClustersDelete =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient ProjectsZonesClustersDelete'{..}
= go _pzcdProjectId _pzcdZone _pzcdClusterId
_pzcdXgafv
_pzcdUploadProtocol
(Just _pzcdPp)
_pzcdAccessToken
_pzcdUploadType
_pzcdBearerToken
_pzcdCallback
(Just AltJSON)
containerService
where go
= buildClient
(Proxy :: Proxy ProjectsZonesClustersDeleteResource)
mempty
| rueshyna/gogol | gogol-container/gen/Network/Google/Resource/Container/Projects/Zones/Clusters/Delete.hs | mpl-2.0 | 7,303 | 0 | 22 | 1,770 | 1,022 | 596 | 426 | 148 | 1 |
module Crypto.OPVault.Types.Common
( module Crypto.OPVault.Types.Common
, module Common
) where
import Control.Applicative as Common ((<$>), Applicative(..))
import Control.Monad as Common (mzero, join, void)
import Control.Monad.IO.Class as Common (MonadIO(..))
import Control.Monad.Trans.Class as Common (MonadTrans(..))
import Data.Aeson as Common (FromJSON(..), (.:), (.:?), Value(..), Object)
import Data.ByteString.Char8 as Common (ByteString, pack, unpack)
import Data.Foldable as Common (toList)
import Data.HashMap.Strict as Common (HashMap)
import Data.String as Common (IsString(..))
import Data.Text as Common (Text)
| bitemyapp/opvault | src/Crypto/OPVault/Types/Common.hs | mpl-2.0 | 730 | 0 | 6 | 170 | 205 | 144 | 61 | 13 | 0 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Dataproc.Projects.Regions.WorkflowTemplates.Get
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the latest workflow template.Can retrieve previously
-- instantiated template by specifying optional version parameter.
--
-- /See:/ <https://cloud.google.com/dataproc/ Cloud Dataproc API Reference> for @dataproc.projects.regions.workflowTemplates.get@.
module Network.Google.Resource.Dataproc.Projects.Regions.WorkflowTemplates.Get
(
-- * REST Resource
ProjectsRegionsWorkflowTemplatesGetResource
-- * Creating a Request
, projectsRegionsWorkflowTemplatesGet
, ProjectsRegionsWorkflowTemplatesGet
-- * Request Lenses
, prwtgXgafv
, prwtgUploadProtocol
, prwtgAccessToken
, prwtgUploadType
, prwtgName
, prwtgVersion
, prwtgCallback
) where
import Network.Google.Dataproc.Types
import Network.Google.Prelude
-- | A resource alias for @dataproc.projects.regions.workflowTemplates.get@ method which the
-- 'ProjectsRegionsWorkflowTemplatesGet' request conforms to.
type ProjectsRegionsWorkflowTemplatesGetResource =
"v1" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "version" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] WorkflowTemplate
-- | Retrieves the latest workflow template.Can retrieve previously
-- instantiated template by specifying optional version parameter.
--
-- /See:/ 'projectsRegionsWorkflowTemplatesGet' smart constructor.
data ProjectsRegionsWorkflowTemplatesGet =
ProjectsRegionsWorkflowTemplatesGet'
{ _prwtgXgafv :: !(Maybe Xgafv)
, _prwtgUploadProtocol :: !(Maybe Text)
, _prwtgAccessToken :: !(Maybe Text)
, _prwtgUploadType :: !(Maybe Text)
, _prwtgName :: !Text
, _prwtgVersion :: !(Maybe (Textual Int32))
, _prwtgCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsRegionsWorkflowTemplatesGet' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'prwtgXgafv'
--
-- * 'prwtgUploadProtocol'
--
-- * 'prwtgAccessToken'
--
-- * 'prwtgUploadType'
--
-- * 'prwtgName'
--
-- * 'prwtgVersion'
--
-- * 'prwtgCallback'
projectsRegionsWorkflowTemplatesGet
:: Text -- ^ 'prwtgName'
-> ProjectsRegionsWorkflowTemplatesGet
projectsRegionsWorkflowTemplatesGet pPrwtgName_ =
ProjectsRegionsWorkflowTemplatesGet'
{ _prwtgXgafv = Nothing
, _prwtgUploadProtocol = Nothing
, _prwtgAccessToken = Nothing
, _prwtgUploadType = Nothing
, _prwtgName = pPrwtgName_
, _prwtgVersion = Nothing
, _prwtgCallback = Nothing
}
-- | V1 error format.
prwtgXgafv :: Lens' ProjectsRegionsWorkflowTemplatesGet (Maybe Xgafv)
prwtgXgafv
= lens _prwtgXgafv (\ s a -> s{_prwtgXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
prwtgUploadProtocol :: Lens' ProjectsRegionsWorkflowTemplatesGet (Maybe Text)
prwtgUploadProtocol
= lens _prwtgUploadProtocol
(\ s a -> s{_prwtgUploadProtocol = a})
-- | OAuth access token.
prwtgAccessToken :: Lens' ProjectsRegionsWorkflowTemplatesGet (Maybe Text)
prwtgAccessToken
= lens _prwtgAccessToken
(\ s a -> s{_prwtgAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
prwtgUploadType :: Lens' ProjectsRegionsWorkflowTemplatesGet (Maybe Text)
prwtgUploadType
= lens _prwtgUploadType
(\ s a -> s{_prwtgUploadType = a})
-- | Required. The resource name of the workflow template, as described in
-- https:\/\/cloud.google.com\/apis\/design\/resource_names. For
-- projects.regions.workflowTemplates.get, the resource name of the
-- template has the following format:
-- projects\/{project_id}\/regions\/{region}\/workflowTemplates\/{template_id}
-- For projects.locations.workflowTemplates.get, the resource name of the
-- template has the following format:
-- projects\/{project_id}\/locations\/{location}\/workflowTemplates\/{template_id}
prwtgName :: Lens' ProjectsRegionsWorkflowTemplatesGet Text
prwtgName
= lens _prwtgName (\ s a -> s{_prwtgName = a})
-- | Optional. The version of workflow template to retrieve. Only previously
-- instantiated versions can be retrieved.If unspecified, retrieves the
-- current version.
prwtgVersion :: Lens' ProjectsRegionsWorkflowTemplatesGet (Maybe Int32)
prwtgVersion
= lens _prwtgVersion (\ s a -> s{_prwtgVersion = a})
. mapping _Coerce
-- | JSONP
prwtgCallback :: Lens' ProjectsRegionsWorkflowTemplatesGet (Maybe Text)
prwtgCallback
= lens _prwtgCallback
(\ s a -> s{_prwtgCallback = a})
instance GoogleRequest
ProjectsRegionsWorkflowTemplatesGet
where
type Rs ProjectsRegionsWorkflowTemplatesGet =
WorkflowTemplate
type Scopes ProjectsRegionsWorkflowTemplatesGet =
'["https://www.googleapis.com/auth/cloud-platform"]
requestClient
ProjectsRegionsWorkflowTemplatesGet'{..}
= go _prwtgName _prwtgXgafv _prwtgUploadProtocol
_prwtgAccessToken
_prwtgUploadType
_prwtgVersion
_prwtgCallback
(Just AltJSON)
dataprocService
where go
= buildClient
(Proxy ::
Proxy ProjectsRegionsWorkflowTemplatesGetResource)
mempty
| brendanhay/gogol | gogol-dataproc/gen/Network/Google/Resource/Dataproc/Projects/Regions/WorkflowTemplates/Get.hs | mpl-2.0 | 6,353 | 0 | 16 | 1,304 | 806 | 472 | 334 | 119 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Content.Accounttax.Update
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Updates the tax settings of the account. This method can only be called
-- for accounts to which the managing account has access: either the
-- managing account itself or sub-accounts if the managing account is a
-- multi-client account.
--
-- /See:/ <https://developers.google.com/shopping-content Content API for Shopping Reference> for @content.accounttax.update@.
module Network.Google.Resource.Content.Accounttax.Update
(
-- * REST Resource
AccounttaxUpdateResource
-- * Creating a Request
, accounttaxUpdate
, AccounttaxUpdate
-- * Request Lenses
, auuMerchantId
, auuPayload
, auuAccountId
, auuDryRun
) where
import Network.Google.Prelude
import Network.Google.ShoppingContent.Types
-- | A resource alias for @content.accounttax.update@ method which the
-- 'AccounttaxUpdate' request conforms to.
type AccounttaxUpdateResource =
"content" :>
"v2" :>
Capture "merchantId" (Textual Word64) :>
"accounttax" :>
Capture "accountId" (Textual Word64) :>
QueryParam "dryRun" Bool :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] AccountTax :> Put '[JSON] AccountTax
-- | Updates the tax settings of the account. This method can only be called
-- for accounts to which the managing account has access: either the
-- managing account itself or sub-accounts if the managing account is a
-- multi-client account.
--
-- /See:/ 'accounttaxUpdate' smart constructor.
data AccounttaxUpdate = AccounttaxUpdate'
{ _auuMerchantId :: !(Textual Word64)
, _auuPayload :: !AccountTax
, _auuAccountId :: !(Textual Word64)
, _auuDryRun :: !(Maybe Bool)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'AccounttaxUpdate' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'auuMerchantId'
--
-- * 'auuPayload'
--
-- * 'auuAccountId'
--
-- * 'auuDryRun'
accounttaxUpdate
:: Word64 -- ^ 'auuMerchantId'
-> AccountTax -- ^ 'auuPayload'
-> Word64 -- ^ 'auuAccountId'
-> AccounttaxUpdate
accounttaxUpdate pAuuMerchantId_ pAuuPayload_ pAuuAccountId_ =
AccounttaxUpdate'
{ _auuMerchantId = _Coerce # pAuuMerchantId_
, _auuPayload = pAuuPayload_
, _auuAccountId = _Coerce # pAuuAccountId_
, _auuDryRun = Nothing
}
-- | The ID of the managing account.
auuMerchantId :: Lens' AccounttaxUpdate Word64
auuMerchantId
= lens _auuMerchantId
(\ s a -> s{_auuMerchantId = a})
. _Coerce
-- | Multipart request metadata.
auuPayload :: Lens' AccounttaxUpdate AccountTax
auuPayload
= lens _auuPayload (\ s a -> s{_auuPayload = a})
-- | The ID of the account for which to get\/update account tax settings.
auuAccountId :: Lens' AccounttaxUpdate Word64
auuAccountId
= lens _auuAccountId (\ s a -> s{_auuAccountId = a})
. _Coerce
-- | Flag to run the request in dry-run mode.
auuDryRun :: Lens' AccounttaxUpdate (Maybe Bool)
auuDryRun
= lens _auuDryRun (\ s a -> s{_auuDryRun = a})
instance GoogleRequest AccounttaxUpdate where
type Rs AccounttaxUpdate = AccountTax
type Scopes AccounttaxUpdate =
'["https://www.googleapis.com/auth/content"]
requestClient AccounttaxUpdate'{..}
= go _auuMerchantId _auuAccountId _auuDryRun
(Just AltJSON)
_auuPayload
shoppingContentService
where go
= buildClient
(Proxy :: Proxy AccounttaxUpdateResource)
mempty
| rueshyna/gogol | gogol-shopping-content/gen/Network/Google/Resource/Content/Accounttax/Update.hs | mpl-2.0 | 4,407 | 0 | 15 | 1,014 | 583 | 344 | 239 | 85 | 1 |
{-# LANGUAGE UnboxedTuples #-}
module Math.Topology.KnotTh.Tabulation.LinkDiagrams
( nextGeneration
) where
import Control.Monad (guard, when)
import Data.Bits (shiftL)
import Data.Function (on)
import Data.List (nubBy)
import Data.STRef (newSTRef, readSTRef, writeSTRef)
import qualified Data.Vector.Mutable as MV
import qualified Data.Vector.Unboxed as UV
import qualified Data.Vector.Unboxed.Mutable as UMV
import Math.Topology.KnotTh.Algebra.Dihedral.D4
import Math.Topology.KnotTh.Tangle
p0 :: (Crossing a) => a -> Dart Link a -> ((Int, UV.Vector Int), Link a)
p0 cross ab =
let link = dartOwner ab
ba = opposite ab
cd = nextCCW ab
dc = opposite cd
n = 1 + numberOfVertices link
res = implode
( numberOfFreeLoops link
, let opp' x | x == ab = (n, 0)
| x == ba = (n, 1)
| x == dc = (n, 2)
| x == cd = (n, 3)
| otherwise = endPair' x
in map (\ v -> (map opp' $ outcomingDarts v, vertexContent v)) (allVertices link)
++ [(map beginPair' [ab, ba, dc, cd], cross)]
)
rc = let v = nthVertex res n
in min (rootCode' (nthOutcomingDart v 3) ccw)
(rootCode' (nthOutcomingDart v 0) cw)
in ((2, rc), res)
p1 :: (Crossing a) => a -> Dart Link a -> ((Int, UV.Vector Int), Link a)
p1 cross ab =
let link = dartOwner ab
ba = opposite ab
ac = nextCCW ab
ca = opposite ac
bd = nextCW ba
db = opposite bd
n = 1 + numberOfVertices link
res = implode
( numberOfFreeLoops link
, let opp' x | x == ac = (n, 0)
| x == bd = (n, 1)
| x == db = (n, 2)
| x == ca = (n, 3)
| otherwise = endPair' x
in map (\ v -> (map opp' $ outcomingDarts v, vertexContent v)) (allVertices link)
++ [(map beginPair' [ac, bd, db, ca], cross)]
)
rc = let v = nthVertex res n
in min (rootCode' (nthOutcomingDart v 0) ccw)
(rootCode' (nthOutcomingDart v 1) cw)
in ((3, rc), res)
nextGeneration :: (Crossing a) => [a] -> Link a -> [Link a]
nextGeneration cross link =
map snd $ nubBy ((==) `on` fst) $ do
(rc, child) <- do
c <- cross
d <- allDarts link
p0 c d : [p1 c d | opposite (nextCCW d) /= nextCW (opposite d)]
let rc' = minimum [rootCode r dir | r <- allDarts child, dir <- bothDirections]
guard $ rc <= rc'
return (rc, child)
rootCode :: (Crossing a) => Dart Link a -> RotationDirection -> (Int, UV.Vector Int)
rootCode ab dir | ac == opposite bd = (2, rootCode' ab dir)
| nextDir dir (opposite ac) == opposite bd = (3, rootCode' ab dir)
| otherwise = (4, UV.empty)
where
ba = opposite ab
ac = nextDir dir ab
bd = nextDir (mirrorIt dir) ba
rootCode' :: (Crossing a) => Dart Link a -> RotationDirection -> UV.Vector Int
rootCode' root dir =
case globalTransformations link of
Nothing -> codeWithGlobal d4I
Just globals -> minimum $ map codeWithGlobal globals
where
link = dartOwner root
n = numberOfVertices link
codeWithGlobal global = UV.create $ do
x <- UMV.replicate (n + 1) 0
UMV.unsafeWrite x (vertexIndex $ endVertex root) 1
q <- MV.new n
MV.unsafeWrite q 0 (opposite root)
free <- newSTRef 2
let {-# INLINE look #-}
look !d !s = do
let u = beginVertex d
ux <- UMV.unsafeRead x (vertexIndex u)
if ux > 0
then return $! ux + (s `shiftL` 7)
else do
nf <- readSTRef free
writeSTRef free $! nf + 1
UMV.unsafeWrite x (vertexIndex u) nf
MV.unsafeWrite q (nf - 1) d
return $! nf + (s `shiftL` 7)
rc <- UMV.replicate (2 * n) 0
let {-# INLINE bfs #-}
bfs !h = when (h < n) $ do
d <- MV.unsafeRead q h
nb <- foldMIncomingDartsFrom d dir look 0
case crossingCodeWithGlobal global dir d of
(# be, le #) -> do
UMV.unsafeWrite rc (2 * h) be
UMV.unsafeWrite rc (2 * h + 1) $! le + nb `shiftL` 3
bfs $! h + 1
bfs 0
return rc
| mishun/tangles | src/Math/Topology/KnotTh/Tabulation/LinkDiagrams.hs | lgpl-3.0 | 4,873 | 0 | 27 | 2,062 | 1,760 | 891 | 869 | -1 | -1 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
module Region where
import Control.Lens (makeLenses)
import GHC.Generics
import Data.Aeson
import qualified Grid as G
import Coordinate
import Link
data Region v = MkRegion
{ _regionCoordinate :: WorldCoordinate
, _regionDimension :: (Int, Int)
, _regionGrid :: G.Grid Int (Link v)
, _regionMass :: Int
, _regionVelocity :: (Int, Int)
} deriving (Show, Generic)
makeLenses ''Region
instance Linkable v => FromJSON (Region v) where
parseJSON (Object o) = do
rCoord <- o .: "coordinate"
rDimension <- o .: "dimension"
rGrid <- o .: "grid"
rMass <- o .: "mass"
rVelocity <- o .: "velocity"
return $ MkRegion
{ _regionCoordinate = rCoord
, _regionGrid = rGrid
, _regionVelocity = rVelocity
, _regionMass = rMass
, _regionDimension = rDimension
}
parseJSON _ = error "Unable to parse Region json"
instance ToJSON (Region v) where
toJSON s = object
[ "coordinate" .= _regionCoordinate s
, "grid" .= _regionGrid s
, "mass" .= _regionMass s
, "velocity" .= _regionVelocity s
, "dimension" .= _regionDimension s
]
defaultRegion :: Region v
defaultRegion = MkRegion
{ _regionCoordinate = coordinate 0 0
, _regionDimension = (0, 0)
, _regionGrid = G.empty
, _regionMass = 0
, _regionVelocity = (0, 0)
}
| nitrix/lspace | legacy/Region.hs | unlicense | 1,636 | 0 | 11 | 520 | 403 | 224 | 179 | 47 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-| Implementation of the caching interfaces for the compute data structures.
-}
module Spark.Core.Internal.CachingUntyped(
cachingType,
uncache,
autocacheGen
) where
import Control.Monad.Except
import Spark.Core.Internal.Caching
import Spark.Core.Internal.DatasetStructures
import Spark.Core.Internal.DatasetStd
import Spark.Core.Internal.DatasetFunctions
import Spark.Core.Internal.OpStructures
import Spark.Core.Internal.PathsUntyped()
import Spark.Core.Internal.DAGStructures
import Spark.Core.StructuresInternal
{-| Uncaches the dataset.
This function instructs Spark to unmark the dataset as cached. The disk and the
memory used by Spark in the future.
Unlike Spark, Karps is stricter with the uncaching operation:
- the argument of cache must be a cached dataset
- once a dataset is uncached, its cached version cannot be used again (i.e. it
must be recomputed).
Karps performs escape analysis and will refuse to run programs with caching
issues.
-}
uncache :: ComputeNode loc a -> ComputeNode loc a
uncache n = n2 `parents` [untyped n]
where n2 = emptyNodeStandard (nodeLocality n) (nodeType n) opnameUnpersist
-- This still uses UntypedNode instead of OperatorNode
-- because it relies on the parents too.
cachingType :: UntypedNode -> CacheTry NodeCachingType
cachingType n = case nodeOp n of
NodeLocalOp _ -> pure Stop
-- NodeAggregatorReduction _ -> pure Stop
NodeAggregatorLocalReduction _ -> pure Stop
NodeOpaqueAggregator _ -> pure Stop
NodeLocalLit _ _ -> pure Stop
NodeStructuredTransform _ -> pure Through
NodeLocalStructuredTransform _ -> pure Stop
NodeDistributedLit _ _ -> pure Through
NodeDistributedOp so | soName so == opnameCache ->
pure $ CacheOp (vertexToId n)
NodeDistributedOp so | soName so == opnameUnpersist ->
case nodeParents n of
[n'] -> pure $ UncacheOp (vertexToId n) (vertexToId n')
_ -> throwError "Node is not valid uncache node"
NodeDistributedOp so | soName so == opnameAutocache ->
pure $ AutocacheOp (vertexToId n)
NodeDistributedOp _ -> pure Through -- Nothing special for the other operations
NodeBroadcastJoin -> pure Through
NodeGroupedReduction _ -> pure Stop
NodeReduction _ -> pure Stop
NodePointer _ -> pure Stop -- It is supposed to be an observable
autocacheGen :: AutocacheGen UntypedNode
autocacheGen = AutocacheGen {
deriveUncache = deriveUncache',
deriveIdentity = deriveIdentity'
} where
-- TODO: use path-based identification in the future
-- f :: String -> VertexId -> VertexId
-- f s (VertexId bs) = VertexId . C8.pack . (++s) . C8.unpack $ bs
deriveIdentity' (Vertex _ un) =
let x = identity un
vid' = VertexId . unNodeId . nodeId $ x -- f "_identity" vid
in Vertex vid' x
deriveUncache' (Vertex _ un) =
let x = uncache un
vid' = VertexId . unNodeId . nodeId $ x -- f "_uncache" vid
in Vertex vid' x
| tjhunter/karps | haskell/src/Spark/Core/Internal/CachingUntyped.hs | apache-2.0 | 3,018 | 0 | 15 | 567 | 606 | 308 | 298 | 53 | 16 |
module Common where
type Name = String
class NamedObject a where
getName :: a -> String
| tr00per/adventure | src/main/haskell/Common.hs | bsd-2-clause | 94 | 0 | 7 | 22 | 28 | 16 | 12 | 4 | 0 |
module Main where
import qualified SignExtService_Client as Client
import SignExt_Types
import SignExtService
import Thrift
import Thrift.Protocol.Binary
import Thrift.Server
import Thrift.Transport
import Thrift.Transport.Handle
import Control.Exception
import Data.Either
import Data.Int
import Data.List
import Data.Maybe
import System.Environment
import Data.Time
import Data.Text.Lazy
import Data.Vector
import Network
import System.Exit
import System.Random
import Text.Printf
import Control.Concurrent
import Numeric (showHex, showIntAtBase)
-- Package size.
package_size = 3
-- Buffer size.
buffer_size = 4096 * 512
-- Buffer aligment.
buffer_alignment = 4096
-- Thread sleep time.
thread_sleep_ime = 1
-- Size of int in bits.
size_of_int = 32
getRight :: Either left right -> right
getRight (Right x) = x
-- stream read function
stream_read x to_cpu y frame_address fsz_address = do
e <- try (Client.max_framed_stream_read x to_cpu y frame_address fsz_address) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let ret = getRight e
if (fromIntegral ret) == 0 then do
threadDelay thread_sleep_ime
(stream_read x to_cpu y frame_address fsz_address)
else return (fromIntegral ret)
-- proces data function
process_data x fsz_address frame_address to_cpu numRx = do
fsz_array <- Client.receive_data_int64_t x fsz_address (fromIntegral 1)
let fsz = Data.Vector.head fsz_array
putStrLn ("CPU: Got output frame " Data.List.++ (show numRx) Data.List.++ " - " Data.List.++(show fsz) Data.List.++ " bytes")
frame_array <- Client.receive_data_int64_t x frame_address (fromIntegral 1)
let frame = Data.Vector.head frame_array
word_array <- Client.receive_data_int64_t x frame package_size
returnVal <- process_words word_array package_size 0 numRx
discardVal <- Client.max_framed_stream_discard x to_cpu (fromIntegral 1);
if (returnVal) == 0 then return 0
else do
stream_read x to_cpu (fromIntegral 1) frame_address fsz_address
process_data x fsz_address frame_address to_cpu (numRx + 1)
-- proces words function
process_words word_array 0 i numRx = do
if (word_array ! 0 ) == 0 && (word_array ! 1 ) == 0 && (word_array ! 2 ) == 0 then return 0
else return 1
process_words word_array package_size i numRx = do
let word = (word_array ! i)
putStr ("FRAME[" Data.List.++ (show numRx) Data.List.++ "] WORD[" Data.List.++ (show i) Data.List.++ "]: ")
putStrLn $ printf "0x%08x" word
process_words word_array (package_size - 1) (i + 1) numRx
main = do
startTime <- getCurrentTime
startDFETime <- getCurrentTime
args <- getArgs
case (Data.List.length args) of
2 -> return ()
otherwise -> do
putStrLn ("Usage: dfe_ip remote_ip")
exitWith $ ExitFailure (-1)
let dfe_ip = (args !! 0)
let remote_ip = (args !! 1)
-- Make socket
transport <- hOpen ("localhost", PortNumber 9090)
-- Wrap in a protocol
let protocol = BinaryProtocol transport
-- Create a client to use the protocol encoder
let client = (protocol, protocol)
stopTime <- getCurrentTime
putStrLn ("Creating a client and opening connection:\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Allocate Dfe ip adress
e <- try (Client.malloc_int64_t (protocol, protocol) (fromIntegral 5)) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let dfe_ip_address = getRight e
e <- try (Client.inet_aton client (pack dfe_ip) dfe_ip_address) :: IO (Either SomeException Int32)
case e of
Left ex -> putStrLn $ "Caught exception inet_aton: " Data.List.++ show ex
Right ex -> return ()
let dfe_ip_address_aligned = getRight e
-- Allocate Remote ip adress
e <- try (Client.malloc_int64_t client (fromIntegral 5)) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let remote_ip_address = getRight e
Client.inet_aton client (pack remote_ip) remote_ip_address
-- Allocate Netmask address
startTime <- getCurrentTime
e <- try (Client.malloc_int64_t client (fromIntegral 5)) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let netmask_address = getRight e
Client.inet_aton client (pack "255.255.255.0") netmask_address
-- Initialize maxfile
startTime <- getCurrentTime
e <- try (Client.signExt_init client) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let maxfile = getRight e
stopTime <- getCurrentTime
putStrLn ("Initializing maxfile:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Load DFE
startTime <- getCurrentTime
e <- try (Client.max_load client maxfile (pack "*")) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let engine = getRight e
stopTime <- getCurrentTime
putStrLn ("Loading DFE:\t\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Set Enum
let enumKey = Max_config_key_bool_t_struct (Just MAX_CONFIG_PRINTF_TO_STDOUT)
e <- try (Client.max_config_set_bool client (enumKey) (1)) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
-- Set actions
e <- try (Client.max_actions_init client maxfile (pack "default")) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let actions = getRight e
-- Run actions
e <- try (Client.max_run client engine actions) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
-- Free actions
e <- try (Client.max_actions_free client actions) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
-- Allocate buffer address
startTime <- getCurrentTime
e <- try (Client.malloc_int64_t client (fromIntegral 1)) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let buffer_address = getRight e
e <- try (Client.posix_memalign client buffer_address buffer_alignment buffer_size) :: IO (Either SomeException Int32)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let buffer_address_aligned = getRight e
---- Read buffer
buffer_array <- Client.receive_data_int64_t client (fromIntegral buffer_address) (fromIntegral 1)
let buffer = Data.Vector.head buffer_array
-- Framed stream setup
to_cpu <- Client.max_framed_stream_setup client engine (pack "toCPU") buffer buffer_size (fromIntegral (-1))
-- Max_net_connection
let enumconn = Max_net_connection_t_struct (Just MAX_NET_CONNECTION_QSFP_TOP_10G_PORT1)
--- Ip config
e <- try (Client.max_ip_config client engine enumconn (fromIntegral dfe_ip_address) netmask_address) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
-- Udp create socket
e <- try (Client.max_udp_create_socket client engine (pack "udpTopPort1")) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let dfe_socket = getRight e
-- Socket bind
let port = 2000
e <- try (Client.max_udp_bind client dfe_socket port) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
-- Udp connect
e <- try (Client.max_udp_connect client dfe_socket remote_ip_address (fromIntegral 0)) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
putStrLn ("Listening on: " Data.List.++ dfe_ip Data.List.++ " port " Data.List.++ (show port))
putStrLn ("Waiting for kernel response...")
-- Allocate memory for frame address
e <- try (Client.malloc_int64_t client (fromIntegral 1)) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let frame_address = getRight e
-- Allocate memory for fsz address
e <- try (Client.malloc_int64_t client (fromIntegral 1)) :: IO (Either SomeException Int64)
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
let fsz_address = getRight e
-- Main loop, wait for packages
let numMessageRx = 0
framed_return <- stream_read client to_cpu 1 frame_address fsz_address
val <- process_data client fsz_address frame_address to_cpu numMessageRx
-- Close sockets
e <- try (Client.max_udp_close client dfe_socket) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.max_framed_stream_release client to_cpu) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
-- Unload DFE
startTime <- getCurrentTime
e <- try (Client.max_unload client engine) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Unloading DFE:\t\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
e <- try (Client.max_file_free client maxfile) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
-- Free allocated memory for streams on server
startTime <- getCurrentTime
e <- try (Client.free client (fromIntegral dfe_ip_address)) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.free client remote_ip_address) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.free client netmask_address) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.free client buffer_address) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
e <- try (Client.free client (fromIntegral buffer_address_aligned)) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
stopTime <- getCurrentTime
putStrLn ("Freeing allocated memory for streams on server:\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
-- Free allocated maxfile data
startTime <- getCurrentTime
e <- try (Client.signExt_free client) :: IO (Either SomeException ())
case e of
Left ex -> putStrLn $ "Caught exception: " Data.List.++ show ex
Right ex -> return ()
-- Close!
startTime <- getCurrentTime
tClose transport
stopTime <- getCurrentTime
putStrLn ("Closing connection:\t\t\t\t" Data.List.++ (show (diffUTCTime stopTime startTime)))
| maxeler/maxskins | examples/SignExt/client/hs/Dynamic/SignExt.hs | bsd-2-clause | 12,888 | 254 | 14 | 3,487 | 3,670 | 1,883 | 1,787 | 234 | 30 |
{-| Module describing a node.
All updates are functional (copy-based) and return a new node with
updated value.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.HTools.Node
( Node(..)
, List
, pCpuEff
-- * Constructor
, create
-- ** Finalization after data loading
, buildPeers
, setIdx
, setAlias
, setOffline
, setXmem
, setFmem
, setPri
, setSec
, setMaster
, setNodeTags
, setMdsk
, setMcpu
, setPolicy
, setCpuSpeed
, setMigrationTags
, setRecvMigrationTags
-- * Tag maps
, addTags
, delTags
, rejectAddTags
-- * Diagnostic commands
, getPolicyHealth
-- * Instance (re)location
, removePri
, removeSec
, addPri
, addPriEx
, addSec
, addSecEx
, checkMigration
-- * Stats
, availDisk
, availMem
, availCpu
, iMem
, iDsk
, conflictingPrimaries
-- * Generate OpCodes
, genPowerOnOpCodes
, genPowerOffOpCodes
, genAddTagsOpCode
-- * Formatting
, defaultFields
, showHeader
, showField
, list
-- * Misc stuff
, AssocList
, AllocElement
, noSecondary
, computeGroups
, mkNodeGraph
, mkRebootNodeGraph
, haveExclStorage
) where
import Control.Monad (liftM, liftM2)
import Control.Applicative ((<$>), (<*>))
import qualified Data.Foldable as Foldable
import Data.Function (on)
import qualified Data.Graph as Graph
import qualified Data.IntMap as IntMap
import Data.List hiding (group)
import qualified Data.Map as Map
import Data.Ord (comparing)
import qualified Data.Set as Set
import Text.Printf (printf)
import qualified Ganeti.Constants as C
import qualified Ganeti.OpCodes as OpCodes
import Ganeti.Types (OobCommand(..), TagKind(..), mkNonEmpty)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.PeerMap as P
import Ganeti.BasicTypes
import qualified Ganeti.HTools.Types as T
-- * Type declarations
-- | The tag map type.
type TagMap = Map.Map String Int
-- | The node type.
data Node = Node
{ name :: String -- ^ The node name
, alias :: String -- ^ The shortened name (for display purposes)
, tMem :: Double -- ^ Total memory (MiB)
, nMem :: Int -- ^ Node memory (MiB)
, fMem :: Int -- ^ Free memory (MiB)
, xMem :: Int -- ^ Unaccounted memory (MiB)
, tDsk :: Double -- ^ Total disk space (MiB)
, fDsk :: Int -- ^ Free disk space (MiB)
, tCpu :: Double -- ^ Total CPU count
, tCpuSpeed :: Double -- ^ Relative CPU speed
, nCpu :: Int -- ^ VCPUs used by the node OS
, uCpu :: Int -- ^ Used VCPU count
, tSpindles :: Int -- ^ Node spindles (spindle_count node parameter,
-- or actual spindles, see note below)
, fSpindles :: Int -- ^ Free spindles (see note below)
, pList :: [T.Idx] -- ^ List of primary instance indices
, sList :: [T.Idx] -- ^ List of secondary instance indices
, idx :: T.Ndx -- ^ Internal index for book-keeping
, peers :: P.PeerMap -- ^ Pnode to instance mapping
, failN1 :: Bool -- ^ Whether the node has failed n1
, rMem :: Int -- ^ Maximum memory needed for failover by
-- primaries of this node
, pMem :: Double -- ^ Percent of free memory
, pDsk :: Double -- ^ Percent of free disk
, pRem :: Double -- ^ Percent of reserved memory
, pCpu :: Double -- ^ Ratio of virtual to physical CPUs
, mDsk :: Double -- ^ Minimum free disk ratio
, loDsk :: Int -- ^ Autocomputed from mDsk low disk
-- threshold
, hiCpu :: Int -- ^ Autocomputed from mCpu high cpu
-- threshold
, hiSpindles :: Double -- ^ Limit auto-computed from policy spindle_ratio
-- and the node spindle count (see note below)
, instSpindles :: Double -- ^ Spindles used by instances (see note below)
, offline :: Bool -- ^ Whether the node should not be used for
-- allocations and skipped from score
-- computations
, isMaster :: Bool -- ^ Whether the node is the master node
, nTags :: [String] -- ^ The node tags for this node
, utilPool :: T.DynUtil -- ^ Total utilisation capacity
, utilLoad :: T.DynUtil -- ^ Sum of instance utilisation
, pTags :: TagMap -- ^ Primary instance exclusion tags and their count
, group :: T.Gdx -- ^ The node's group (index)
, iPolicy :: T.IPolicy -- ^ The instance policy (of the node's group)
, exclStorage :: Bool -- ^ Effective value of exclusive_storage
, migTags :: Set.Set String -- ^ migration-relevant tags
, rmigTags :: Set.Set String -- ^ migration tags able to receive
} deriving (Show, Eq)
{- A note on how we handle spindles
With exclusive storage spindles is a resource, so we track the number of
spindles still available (fSpindles). This is the only reliable way, as some
spindles could be used outside of Ganeti. When exclusive storage is off,
spindles are a way to represent disk I/O pressure, and hence we track the amount
used by the instances. We compare it against 'hiSpindles', computed from the
instance policy, to avoid policy violations. In both cases we store the total
spindles in 'tSpindles'.
-}
instance T.Element Node where
nameOf = name
idxOf = idx
setAlias = setAlias
setIdx = setIdx
allNames n = [name n, alias n]
-- | Derived parameter: ratio of virutal to pysical CPUs, weighted
-- by CPU speed.
pCpuEff :: Node -> Double
pCpuEff n = pCpu n / tCpuSpeed n
-- | A simple name for the int, node association list.
type AssocList = [(T.Ndx, Node)]
-- | A simple name for a node map.
type List = Container.Container Node
-- | A simple name for an allocation element (here just for logistic
-- reasons).
type AllocElement = (List, Instance.Instance, [Node], T.Score)
-- | Constant node index for a non-moveable instance.
noSecondary :: T.Ndx
noSecondary = -1
-- * Helper functions
-- | Add a tag to a tagmap.
addTag :: TagMap -> String -> TagMap
addTag t s = Map.insertWith (+) s 1 t
-- | Add multiple tags.
addTags :: TagMap -> [String] -> TagMap
addTags = foldl' addTag
-- | Adjust or delete a tag from a tagmap.
delTag :: TagMap -> String -> TagMap
delTag t s = Map.update (\v -> if v > 1
then Just (v-1)
else Nothing)
s t
-- | Remove multiple tags.
delTags :: TagMap -> [String] -> TagMap
delTags = foldl' delTag
-- | Check if we can add a list of tags to a tagmap.
rejectAddTags :: TagMap -> [String] -> Bool
rejectAddTags t = any (`Map.member` t)
-- | Check how many primary instances have conflicting tags. The
-- algorithm to compute this is to sum the count of all tags, then
-- subtract the size of the tag map (since each tag has at least one,
-- non-conflicting instance); this is equivalent to summing the
-- values in the tag map minus one.
conflictingPrimaries :: Node -> Int
conflictingPrimaries (Node { pTags = t }) = Foldable.sum t - Map.size t
-- | Helper function to increment a base value depending on the passed
-- boolean argument.
incIf :: (Num a) => Bool -> a -> a -> a
incIf True base delta = base + delta
incIf False base _ = base
-- | Helper function to decrement a base value depending on the passed
-- boolean argument.
decIf :: (Num a) => Bool -> a -> a -> a
decIf True base delta = base - delta
decIf False base _ = base
-- | Is exclusive storage enabled on any node?
haveExclStorage :: List -> Bool
haveExclStorage nl =
any exclStorage $ Container.elems nl
-- * Initialization functions
-- | Create a new node.
--
-- The index and the peers maps are empty, and will be need to be
-- update later via the 'setIdx' and 'buildPeers' functions.
create :: String -> Double -> Int -> Int
-> Double -> Int -> Double -> Int -> Bool
-> Int -> Int -> T.Gdx -> Bool
-> Node
create name_init mem_t_init mem_n_init mem_f_init
dsk_t_init dsk_f_init cpu_t_init cpu_n_init offline_init
spindles_t_init spindles_f_init group_init excl_stor =
Node { name = name_init
, alias = name_init
, tMem = mem_t_init
, nMem = mem_n_init
, fMem = mem_f_init
, tDsk = dsk_t_init
, fDsk = dsk_f_init
, tCpu = cpu_t_init
, tCpuSpeed = 1
, nCpu = cpu_n_init
, uCpu = cpu_n_init
, tSpindles = spindles_t_init
, fSpindles = spindles_f_init
, pList = []
, sList = []
, failN1 = True
, idx = -1
, peers = P.empty
, rMem = 0
, pMem = fromIntegral mem_f_init / mem_t_init
, pDsk = if excl_stor
then computePDsk spindles_f_init $ fromIntegral spindles_t_init
else computePDsk dsk_f_init dsk_t_init
, pRem = 0
, pCpu = fromIntegral cpu_n_init / cpu_t_init
, offline = offline_init
, isMaster = False
, nTags = []
, xMem = 0
, mDsk = T.defReservedDiskRatio
, loDsk = mDskToloDsk T.defReservedDiskRatio dsk_t_init
, hiCpu = mCpuTohiCpu (T.iPolicyVcpuRatio T.defIPolicy) cpu_t_init
, hiSpindles = computeHiSpindles (T.iPolicySpindleRatio T.defIPolicy)
spindles_t_init
, instSpindles = 0
, utilPool = T.baseUtil
, utilLoad = T.zeroUtil
, pTags = Map.empty
, group = group_init
, iPolicy = T.defIPolicy
, exclStorage = excl_stor
, migTags = Set.empty
, rmigTags = Set.empty
}
-- | Conversion formula from mDsk\/tDsk to loDsk.
mDskToloDsk :: Double -> Double -> Int
mDskToloDsk mval = floor . (mval *)
-- | Conversion formula from mCpu\/tCpu to hiCpu.
mCpuTohiCpu :: Double -> Double -> Int
mCpuTohiCpu mval = floor . (mval *)
-- | Conversiojn formula from spindles and spindle ratio to hiSpindles.
computeHiSpindles :: Double -> Int -> Double
computeHiSpindles spindle_ratio = (spindle_ratio *) . fromIntegral
-- | Changes the index.
--
-- This is used only during the building of the data structures.
setIdx :: Node -> T.Ndx -> Node
setIdx t i = t {idx = i}
-- | Changes the alias.
--
-- This is used only during the building of the data structures.
setAlias :: Node -> String -> Node
setAlias t s = t { alias = s }
-- | Sets the offline attribute.
setOffline :: Node -> Bool -> Node
setOffline t val = t { offline = val }
-- | Sets the master attribute
setMaster :: Node -> Bool -> Node
setMaster t val = t { isMaster = val }
-- | Sets the node tags attribute
setNodeTags :: Node -> [String] -> Node
setNodeTags t val = t { nTags = val }
-- | Set migration tags
setMigrationTags :: Node -> Set.Set String -> Node
setMigrationTags t val = t { migTags = val }
-- | Set the migration tags a node is able to receive
setRecvMigrationTags :: Node -> Set.Set String -> Node
setRecvMigrationTags t val = t { rmigTags = val }
-- | Sets the unnaccounted memory.
setXmem :: Node -> Int -> Node
setXmem t val = t { xMem = val }
-- | Sets the max disk usage ratio.
setMdsk :: Node -> Double -> Node
setMdsk t val = t { mDsk = val, loDsk = mDskToloDsk val (tDsk t) }
-- | Sets the max cpu usage ratio. This will update the node's
-- ipolicy, losing sharing (but it should be a seldomly done operation).
setMcpu :: Node -> Double -> Node
setMcpu t val =
let new_ipol = (iPolicy t) { T.iPolicyVcpuRatio = val }
in t { hiCpu = mCpuTohiCpu val (tCpu t), iPolicy = new_ipol }
-- | Sets the policy.
setPolicy :: T.IPolicy -> Node -> Node
setPolicy pol node =
node { iPolicy = pol
, hiCpu = mCpuTohiCpu (T.iPolicyVcpuRatio pol) (tCpu node)
, hiSpindles = computeHiSpindles (T.iPolicySpindleRatio pol)
(tSpindles node)
}
-- | Computes the maximum reserved memory for peers from a peer map.
computeMaxRes :: P.PeerMap -> P.Elem
computeMaxRes = P.maxElem
-- | Builds the peer map for a given node.
buildPeers :: Node -> Instance.List -> Node
buildPeers t il =
let mdata = map
(\i_idx -> let inst = Container.find i_idx il
mem = if Instance.usesSecMem inst
then Instance.mem inst
else 0
in (Instance.pNode inst, mem))
(sList t)
pmap = P.accumArray (+) mdata
new_rmem = computeMaxRes pmap
new_failN1 = fMem t <= new_rmem
new_prem = fromIntegral new_rmem / tMem t
in t {peers=pmap, failN1 = new_failN1, rMem = new_rmem, pRem = new_prem}
-- | Calculate the new spindle usage
calcSpindleUse ::
Bool -- Action: True = adding instance, False = removing it
-> Node -> Instance.Instance -> Double
calcSpindleUse _ (Node {exclStorage = True}) _ = 0.0
calcSpindleUse act n@(Node {exclStorage = False}) i =
f (Instance.usesLocalStorage i) (instSpindles n)
(fromIntegral $ Instance.spindleUse i)
where
f :: Bool -> Double -> Double -> Double -- avoid monomorphism restriction
f = if act then incIf else decIf
-- | Calculate the new number of free spindles
calcNewFreeSpindles ::
Bool -- Action: True = adding instance, False = removing
-> Node -> Instance.Instance -> Int
calcNewFreeSpindles _ (Node {exclStorage = False}) _ = 0
calcNewFreeSpindles act n@(Node {exclStorage = True}) i =
case Instance.getTotalSpindles i of
Nothing -> if act
then -1 -- Force a spindle error, so the instance don't go here
else fSpindles n -- No change, as we aren't sure
Just s -> (if act then (-) else (+)) (fSpindles n) s
-- | Assigns an instance to a node as primary and update the used VCPU
-- count, utilisation data and tags map.
setPri :: Node -> Instance.Instance -> Node
setPri t inst = t { pList = Instance.idx inst:pList t
, uCpu = new_count
, pCpu = fromIntegral new_count / tCpu t
, utilLoad = utilLoad t `T.addUtil` Instance.util inst
, pTags = addTags (pTags t) (Instance.exclTags inst)
, instSpindles = calcSpindleUse True t inst
}
where new_count = Instance.applyIfOnline inst (+ Instance.vcpus inst)
(uCpu t )
-- | Assigns an instance to a node as secondary and updates disk utilisation.
setSec :: Node -> Instance.Instance -> Node
setSec t inst = t { sList = Instance.idx inst:sList t
, utilLoad = old_load { T.dskWeight = T.dskWeight old_load +
T.dskWeight (Instance.util inst) }
, instSpindles = calcSpindleUse True t inst
}
where old_load = utilLoad t
-- | Computes the new 'pDsk' value, handling nodes without local disk
-- storage (we consider all their disk unused).
computePDsk :: Int -> Double -> Double
computePDsk _ 0 = 1
computePDsk free total = fromIntegral free / total
-- | Computes the new 'pDsk' value, handling the exclusive storage state.
computeNewPDsk :: Node -> Int -> Int -> Double
computeNewPDsk node new_free_sp new_free_dsk =
if exclStorage node
then computePDsk new_free_sp . fromIntegral $ tSpindles node
else computePDsk new_free_dsk $ tDsk node
-- * Diagnostic functions
-- | For a node diagnose whether it conforms with all policies. The type
-- is chosen to represent that of a no-op node operation.
getPolicyHealth :: Node -> T.OpResult ()
getPolicyHealth n =
case () of
_ | instSpindles n > hiSpindles n -> Bad T.FailDisk
| pCpu n > T.iPolicyVcpuRatio (iPolicy n) -> Bad T.FailCPU
| otherwise -> Ok ()
-- * Update functions
-- | Set the CPU speed
setCpuSpeed :: Node -> Double -> Node
setCpuSpeed n f = n { tCpuSpeed = f }
-- | Sets the free memory.
setFmem :: Node -> Int -> Node
setFmem t new_mem =
let new_n1 = new_mem < rMem t
new_mp = fromIntegral new_mem / tMem t
in t { fMem = new_mem, failN1 = new_n1, pMem = new_mp }
-- | Removes a primary instance.
removePri :: Node -> Instance.Instance -> Node
removePri t inst =
let iname = Instance.idx inst
i_online = Instance.notOffline inst
uses_disk = Instance.usesLocalStorage inst
new_plist = delete iname (pList t)
new_mem = incIf i_online (fMem t) (Instance.mem inst)
new_dsk = incIf uses_disk (fDsk t) (Instance.dsk inst)
new_free_sp = calcNewFreeSpindles False t inst
new_inst_sp = calcSpindleUse False t inst
new_mp = fromIntegral new_mem / tMem t
new_dp = computeNewPDsk t new_free_sp new_dsk
new_failn1 = new_mem <= rMem t
new_ucpu = decIf i_online (uCpu t) (Instance.vcpus inst)
new_rcpu = fromIntegral new_ucpu / tCpu t
new_load = utilLoad t `T.subUtil` Instance.util inst
in t { pList = new_plist, fMem = new_mem, fDsk = new_dsk
, failN1 = new_failn1, pMem = new_mp, pDsk = new_dp
, uCpu = new_ucpu, pCpu = new_rcpu, utilLoad = new_load
, pTags = delTags (pTags t) (Instance.exclTags inst)
, instSpindles = new_inst_sp, fSpindles = new_free_sp
}
-- | Removes a secondary instance.
removeSec :: Node -> Instance.Instance -> Node
removeSec t inst =
let iname = Instance.idx inst
uses_disk = Instance.usesLocalStorage inst
cur_dsk = fDsk t
pnode = Instance.pNode inst
new_slist = delete iname (sList t)
new_dsk = incIf uses_disk cur_dsk (Instance.dsk inst)
new_free_sp = calcNewFreeSpindles False t inst
new_inst_sp = calcSpindleUse False t inst
old_peers = peers t
old_peem = P.find pnode old_peers
new_peem = decIf (Instance.usesSecMem inst) old_peem (Instance.mem inst)
new_peers = if new_peem > 0
then P.add pnode new_peem old_peers
else P.remove pnode old_peers
old_rmem = rMem t
new_rmem = if old_peem < old_rmem
then old_rmem
else computeMaxRes new_peers
new_prem = fromIntegral new_rmem / tMem t
new_failn1 = fMem t <= new_rmem
new_dp = computeNewPDsk t new_free_sp new_dsk
old_load = utilLoad t
new_load = old_load { T.dskWeight = T.dskWeight old_load -
T.dskWeight (Instance.util inst) }
in t { sList = new_slist, fDsk = new_dsk, peers = new_peers
, failN1 = new_failn1, rMem = new_rmem, pDsk = new_dp
, pRem = new_prem, utilLoad = new_load
, instSpindles = new_inst_sp, fSpindles = new_free_sp
}
-- | Adds a primary instance (basic version).
addPri :: Node -> Instance.Instance -> T.OpResult Node
addPri = addPriEx False
-- | Adds a primary instance (extended version).
addPriEx :: Bool -- ^ Whether to override the N+1 and
-- other /soft/ checks, useful if we
-- come from a worse status
-- (e.g. offline)
-> Node -- ^ The target node
-> Instance.Instance -- ^ The instance to add
-> T.OpResult Node -- ^ The result of the operation,
-- either the new version of the node
-- or a failure mode
addPriEx force t inst =
let iname = Instance.idx inst
i_online = Instance.notOffline inst
uses_disk = Instance.usesLocalStorage inst
cur_dsk = fDsk t
new_mem = decIf i_online (fMem t) (Instance.mem inst)
new_dsk = decIf uses_disk cur_dsk (Instance.dsk inst)
new_free_sp = calcNewFreeSpindles True t inst
new_inst_sp = calcSpindleUse True t inst
new_failn1 = new_mem <= rMem t
new_ucpu = incIf i_online (uCpu t) (Instance.vcpus inst)
new_pcpu = fromIntegral new_ucpu / tCpu t
new_dp = computeNewPDsk t new_free_sp new_dsk
l_cpu = T.iPolicyVcpuRatio $ iPolicy t
new_load = utilLoad t `T.addUtil` Instance.util inst
inst_tags = Instance.exclTags inst
old_tags = pTags t
strict = not force
in case () of
_ | new_mem <= 0 -> Bad T.FailMem
| uses_disk && new_dsk <= 0 -> Bad T.FailDisk
| uses_disk && new_dsk < loDsk t && strict -> Bad T.FailDisk
| uses_disk && exclStorage t && new_free_sp < 0 -> Bad T.FailSpindles
| uses_disk && new_inst_sp > hiSpindles t && strict -> Bad T.FailDisk
| new_failn1 && not (failN1 t) && strict -> Bad T.FailMem
| l_cpu >= 0 && l_cpu < new_pcpu && strict -> Bad T.FailCPU
| rejectAddTags old_tags inst_tags -> Bad T.FailTags
| otherwise ->
let new_plist = iname:pList t
new_mp = fromIntegral new_mem / tMem t
r = t { pList = new_plist, fMem = new_mem, fDsk = new_dsk
, failN1 = new_failn1, pMem = new_mp, pDsk = new_dp
, uCpu = new_ucpu, pCpu = new_pcpu
, utilLoad = new_load
, pTags = addTags old_tags inst_tags
, instSpindles = new_inst_sp
, fSpindles = new_free_sp
}
in Ok r
-- | Adds a secondary instance (basic version).
addSec :: Node -> Instance.Instance -> T.Ndx -> T.OpResult Node
addSec = addSecEx False
-- | Adds a secondary instance (extended version).
addSecEx :: Bool -> Node -> Instance.Instance -> T.Ndx -> T.OpResult Node
addSecEx force t inst pdx =
let iname = Instance.idx inst
old_peers = peers t
old_mem = fMem t
new_dsk = fDsk t - Instance.dsk inst
new_free_sp = calcNewFreeSpindles True t inst
new_inst_sp = calcSpindleUse True t inst
secondary_needed_mem = if Instance.usesSecMem inst
then Instance.mem inst
else 0
new_peem = P.find pdx old_peers + secondary_needed_mem
new_peers = P.add pdx new_peem old_peers
new_rmem = max (rMem t) new_peem
new_prem = fromIntegral new_rmem / tMem t
new_failn1 = old_mem <= new_rmem
new_dp = computeNewPDsk t new_free_sp new_dsk
old_load = utilLoad t
new_load = old_load { T.dskWeight = T.dskWeight old_load +
T.dskWeight (Instance.util inst) }
strict = not force
in case () of
_ | not (Instance.hasSecondary inst) -> Bad T.FailDisk
| new_dsk <= 0 -> Bad T.FailDisk
| new_dsk < loDsk t && strict -> Bad T.FailDisk
| exclStorage t && new_free_sp < 0 -> Bad T.FailSpindles
| new_inst_sp > hiSpindles t && strict -> Bad T.FailDisk
| secondary_needed_mem >= old_mem && strict -> Bad T.FailMem
| new_failn1 && not (failN1 t) && strict -> Bad T.FailMem
| otherwise ->
let new_slist = iname:sList t
r = t { sList = new_slist, fDsk = new_dsk
, peers = new_peers, failN1 = new_failn1
, rMem = new_rmem, pDsk = new_dp
, pRem = new_prem, utilLoad = new_load
, instSpindles = new_inst_sp
, fSpindles = new_free_sp
}
in Ok r
-- | Predicate on whether migration is supported between two nodes.
checkMigration :: Node -> Node -> T.OpResult ()
checkMigration nsrc ntarget =
if migTags nsrc `Set.isSubsetOf` rmigTags ntarget
then Ok ()
else Bad T.FailMig
-- * Stats functions
-- | Computes the amount of available disk on a given node.
availDisk :: Node -> Int
availDisk t =
let _f = fDsk t
_l = loDsk t
in if _f < _l
then 0
else _f - _l
-- | Computes the amount of used disk on a given node.
iDsk :: Node -> Int
iDsk t = truncate (tDsk t) - fDsk t
-- | Computes the amount of available memory on a given node.
availMem :: Node -> Int
availMem t =
let _f = fMem t
_l = rMem t
in if _f < _l
then 0
else _f - _l
-- | Computes the amount of available memory on a given node.
availCpu :: Node -> Int
availCpu t =
let _u = uCpu t
_l = hiCpu t
in if _l >= _u
then _l - _u
else 0
-- | The memory used by instances on a given node.
iMem :: Node -> Int
iMem t = truncate (tMem t) - nMem t - xMem t - fMem t
-- * Node graph functions
-- These functions do the transformations needed so that nodes can be
-- represented as a graph connected by the instances that are replicated
-- on them.
-- * Making of a Graph from a node/instance list
-- | Transform an instance into a list of edges on the node graph
instanceToEdges :: Instance.Instance -> [Graph.Edge]
instanceToEdges i
| Instance.hasSecondary i = [(pnode,snode), (snode,pnode)]
| otherwise = []
where pnode = Instance.pNode i
snode = Instance.sNode i
-- | Transform the list of instances into list of destination edges
instancesToEdges :: Instance.List -> [Graph.Edge]
instancesToEdges = concatMap instanceToEdges . Container.elems
-- | Transform the list of nodes into vertices bounds.
-- Returns Nothing is the list is empty.
nodesToBounds :: List -> Maybe Graph.Bounds
nodesToBounds nl = liftM2 (,) nmin nmax
where nmin = fmap (fst . fst) (IntMap.minViewWithKey nl)
nmax = fmap (fst . fst) (IntMap.maxViewWithKey nl)
-- | The clique of the primary nodes of the instances with a given secondary.
-- Return the full graph of those nodes that are primary node of at least one
-- instance that has the given node as secondary.
nodeToSharedSecondaryEdge :: Instance.List -> Node -> [Graph.Edge]
nodeToSharedSecondaryEdge il n = (,) <$> primaries <*> primaries
where primaries = map (Instance.pNode . flip Container.find il) $ sList n
-- | Predicate of an edge having both vertices in a set of nodes.
filterValid :: List -> [Graph.Edge] -> [Graph.Edge]
filterValid nl = filter $ \(x,y) -> IntMap.member x nl && IntMap.member y nl
-- | Transform a Node + Instance list into a NodeGraph type.
-- Returns Nothing if the node list is empty.
mkNodeGraph :: List -> Instance.List -> Maybe Graph.Graph
mkNodeGraph nl il =
liftM (`Graph.buildG` (filterValid nl . instancesToEdges $ il))
(nodesToBounds nl)
-- | Transform a Nodes + Instances into a NodeGraph with all reboot exclusions.
-- This includes edges between nodes that are the primary nodes of instances
-- that have the same secondary node. Nodes not in the node list will not be
-- part of the graph, but they are still considered for the edges arising from
-- two instances having the same secondary node.
-- Return Nothing if the node list is empty.
mkRebootNodeGraph :: List -> List -> Instance.List -> Maybe Graph.Graph
mkRebootNodeGraph allnodes nl il =
liftM (`Graph.buildG` filterValid nl edges) (nodesToBounds nl)
where
edges = instancesToEdges il `union`
(Container.elems allnodes >>= nodeToSharedSecondaryEdge il)
-- * Display functions
-- | Return a field for a given node.
showField :: Node -- ^ Node which we're querying
-> String -- ^ Field name
-> String -- ^ Field value as string
showField t field =
case field of
"idx" -> printf "%4d" $ idx t
"name" -> alias t
"fqdn" -> name t
"status" -> case () of
_ | offline t -> "-"
| failN1 t -> "*"
| otherwise -> " "
"tmem" -> printf "%5.0f" $ tMem t
"nmem" -> printf "%5d" $ nMem t
"xmem" -> printf "%5d" $ xMem t
"fmem" -> printf "%5d" $ fMem t
"imem" -> printf "%5d" $ iMem t
"rmem" -> printf "%5d" $ rMem t
"amem" -> printf "%5d" $ fMem t - rMem t
"tdsk" -> printf "%5.0f" $ tDsk t / 1024
"fdsk" -> printf "%5d" $ fDsk t `div` 1024
"tcpu" -> printf "%4.0f" $ tCpu t
"ucpu" -> printf "%4d" $ uCpu t
"pcnt" -> printf "%3d" $ length (pList t)
"scnt" -> printf "%3d" $ length (sList t)
"plist" -> show $ pList t
"slist" -> show $ sList t
"pfmem" -> printf "%6.4f" $ pMem t
"pfdsk" -> printf "%6.4f" $ pDsk t
"rcpu" -> printf "%5.2f" $ pCpu t
"cload" -> printf "%5.3f" uC
"mload" -> printf "%5.3f" uM
"dload" -> printf "%5.3f" uD
"nload" -> printf "%5.3f" uN
"ptags" -> intercalate "," . map (uncurry (printf "%s=%d")) .
Map.toList $ pTags t
"peermap" -> show $ peers t
"spindle_count" -> show $ tSpindles t
"hi_spindles" -> show $ hiSpindles t
"inst_spindles" -> show $ instSpindles t
_ -> T.unknownField
where
T.DynUtil { T.cpuWeight = uC, T.memWeight = uM,
T.dskWeight = uD, T.netWeight = uN } = utilLoad t
-- | Returns the header and numeric propery of a field.
showHeader :: String -> (String, Bool)
showHeader field =
case field of
"idx" -> ("Index", True)
"name" -> ("Name", False)
"fqdn" -> ("Name", False)
"status" -> ("F", False)
"tmem" -> ("t_mem", True)
"nmem" -> ("n_mem", True)
"xmem" -> ("x_mem", True)
"fmem" -> ("f_mem", True)
"imem" -> ("i_mem", True)
"rmem" -> ("r_mem", True)
"amem" -> ("a_mem", True)
"tdsk" -> ("t_dsk", True)
"fdsk" -> ("f_dsk", True)
"tcpu" -> ("pcpu", True)
"ucpu" -> ("vcpu", True)
"pcnt" -> ("pcnt", True)
"scnt" -> ("scnt", True)
"plist" -> ("primaries", True)
"slist" -> ("secondaries", True)
"pfmem" -> ("p_fmem", True)
"pfdsk" -> ("p_fdsk", True)
"rcpu" -> ("r_cpu", True)
"cload" -> ("lCpu", True)
"mload" -> ("lMem", True)
"dload" -> ("lDsk", True)
"nload" -> ("lNet", True)
"ptags" -> ("PrimaryTags", False)
"peermap" -> ("PeerMap", False)
"spindle_count" -> ("NodeSpindles", True)
"hi_spindles" -> ("MaxSpindles", True)
"inst_spindles" -> ("InstSpindles", True)
-- TODO: add node fields (group.uuid, group)
_ -> (T.unknownField, False)
-- | String converter for the node list functionality.
list :: [String] -> Node -> [String]
list fields t = map (showField t) fields
-- | Generate OpCode for setting a node's offline status
genOpSetOffline :: (Monad m) => Node -> Bool -> m OpCodes.OpCode
genOpSetOffline node offlineStatus = do
nodeName <- mkNonEmpty (name node)
return OpCodes.OpNodeSetParams
{ OpCodes.opNodeName = nodeName
, OpCodes.opNodeUuid = Nothing
, OpCodes.opForce = False
, OpCodes.opHvState = Nothing
, OpCodes.opDiskState = Nothing
, OpCodes.opMasterCandidate = Nothing
, OpCodes.opOffline = Just offlineStatus
, OpCodes.opDrained = Nothing
, OpCodes.opAutoPromote = False
, OpCodes.opMasterCapable = Nothing
, OpCodes.opVmCapable = Nothing
, OpCodes.opSecondaryIp = Nothing
, OpCodes.opgenericNdParams = Nothing
, OpCodes.opPowered = Nothing
}
-- | Generate OpCode for applying a OobCommand to the given nodes
genOobCommand :: (Monad m) => [Node] -> OobCommand -> m OpCodes.OpCode
genOobCommand nodes command = do
names <- mapM (mkNonEmpty . name) nodes
return OpCodes.OpOobCommand
{ OpCodes.opNodeNames = names
, OpCodes.opNodeUuids = Nothing
, OpCodes.opOobCommand = command
, OpCodes.opOobTimeout = C.oobTimeout
, OpCodes.opIgnoreStatus = False
, OpCodes.opPowerDelay = C.oobPowerDelay
}
-- | Generate OpCode for powering on a list of nodes
genPowerOnOpCodes :: (Monad m) => [Node] -> m [OpCodes.OpCode]
genPowerOnOpCodes nodes = do
opSetParams <- mapM (`genOpSetOffline` False) nodes
oobCommand <- genOobCommand nodes OobPowerOn
return $ opSetParams ++ [oobCommand]
-- | Generate OpCodes for powering off a list of nodes
genPowerOffOpCodes :: (Monad m) => [Node] -> m [OpCodes.OpCode]
genPowerOffOpCodes nodes = do
opSetParams <- mapM (`genOpSetOffline` True) nodes
oobCommand <- genOobCommand nodes OobPowerOff
return $ opSetParams ++ [oobCommand]
-- | Generate OpCodes for adding tags to a node
genAddTagsOpCode :: Node -> [String] -> OpCodes.OpCode
genAddTagsOpCode node tags = OpCodes.OpTagsSet
{ OpCodes.opKind = TagKindNode
, OpCodes.opTagsList = tags
, OpCodes.opTagsGetName = Just $ name node
}
-- | Constant holding the fields we're displaying by default.
defaultFields :: [String]
defaultFields =
[ "status", "name", "tmem", "nmem", "imem", "xmem", "fmem"
, "rmem", "tdsk", "fdsk", "tcpu", "ucpu", "pcnt", "scnt"
, "pfmem", "pfdsk", "rcpu"
, "cload", "mload", "dload", "nload" ]
{-# ANN computeGroups "HLint: ignore Use alternative" #-}
-- | Split a list of nodes into a list of (node group UUID, list of
-- associated nodes).
computeGroups :: [Node] -> [(T.Gdx, [Node])]
computeGroups nodes =
let nodes' = sortBy (comparing group) nodes
nodes'' = groupBy ((==) `on` group) nodes'
-- use of head here is OK, since groupBy returns non-empty lists; if
-- you remove groupBy, also remove use of head
in map (\nl -> (group (head nl), nl)) nodes''
| ganeti-github-testing/ganeti-test-1 | src/Ganeti/HTools/Node.hs | bsd-2-clause | 34,243 | 0 | 18 | 9,307 | 7,934 | 4,348 | 3,586 | 632 | 32 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- |
-- Module : Data.Packer.Internal
-- License : BSD-style
-- Maintainer : Vincent Hanquez <vincent@snarc.org>
-- Stability : experimental
-- Portability : unknown
--
-- Internal of packer which is a simple state monad that hold
-- a memory pointer and a size of the memory pointed.
--
module Data.Packer.Internal
( Packing(..)
, Hole
, Unpacking(..)
, MemView(..)
-- * exceptions
, OutOfBoundUnpacking(..)
, OutOfBoundPacking(..)
, HoleInPacking(..)
, IsolationNotFullyConsumed(..)
-- * unpack methods
, unpackUnsafeActRef
, unpackCheckActRef
, unpackUnsafeAct
, unpackCheckAct
, unpackIsolate
, unpackLookahead
, unpackSetPosition
, unpackGetPosition
, unpackGetNbRemaining
-- * pack methods
, packCheckAct
, packHole
, packGetPosition
, fillHole
) where
import Foreign.Ptr
import Data.Data
import Data.Word
import Control.Exception (Exception, throwIO, try, SomeException)
import Control.Monad.IO.Class
import Control.Applicative (Alternative(..), Applicative(..), (<$>), (<*>))
import Control.Concurrent.MVar
import Control.Monad (when)
import Data.ByteArray (MemView(..))
import qualified Data.ByteArray as B
memViewPlus :: MemView -> Int -> MemView
memViewPlus (MemView p len) n = MemView (p `plusPtr` n) (len - n)
-- | Packing monad
newtype Packing a = Packing { runPacking_ :: (Ptr Word8, MVar Int) -> MemView -> IO (a, MemView) }
instance Monad Packing where
return = returnPacking
(>>=) = bindPacking
instance MonadIO Packing where
liftIO f = Packing $ \_ st -> f >>= \a -> return (a,st)
instance Functor Packing where
fmap = fmapPacking
instance Applicative Packing where
pure = returnPacking
(<*>) = apPacking
bindPacking :: Packing a -> (a -> Packing b) -> Packing b
bindPacking m1 m2 = Packing $ \cst st -> do
(a, st2) <- runPacking_ m1 cst st
runPacking_ (m2 a) cst st2
{-# INLINE bindPacking #-}
fmapPacking :: (a -> b) -> Packing a -> Packing b
fmapPacking f m = Packing $ \cst st -> runPacking_ m cst st >>= \(a, st2) -> return (f a, st2)
{-# INLINE fmapPacking #-}
returnPacking :: a -> Packing a
returnPacking a = Packing $ \_ st -> return (a,st)
{-# INLINE [0] returnPacking #-}
apPacking :: Packing (a -> b) -> Packing a -> Packing b
apPacking fm m = fm >>= \p -> m >>= \r2 -> return (p r2)
{-# INLINE [0] apPacking #-}
-- | Unpacking monad
newtype Unpacking a = Unpacking { runUnpacking_ :: MemView -> MemView -> IO (a, MemView) }
instance Monad Unpacking where
return = returnUnpacking
(>>=) = bindUnpacking
instance MonadIO Unpacking where
liftIO f = Unpacking $ \_ st -> f >>= \a -> return (a,st)
instance Functor Unpacking where
fmap = fmapUnpacking
instance Applicative Unpacking where
pure = returnUnpacking
(<*>) = apUnpacking
instance Alternative Unpacking where
empty = error "Data.Packer (Alternative): empty"
f <|> g = Unpacking $ \cst st ->
tryRunUnpacking f cst st >>= either (const $ runUnpacking_ g cst st) return
tryRunUnpacking :: Unpacking a -> MemView -> MemView -> IO (Either SomeException (a,MemView))
tryRunUnpacking f cst st = try $ runUnpacking_ f cst st
bindUnpacking :: Unpacking a -> (a -> Unpacking b) -> Unpacking b
bindUnpacking m1 m2 = Unpacking $ \cst st -> do
(a, st2) <- runUnpacking_ m1 cst st
runUnpacking_ (m2 a) cst st2
{-# INLINE bindUnpacking #-}
fmapUnpacking :: (a -> b) -> Unpacking a -> Unpacking b
fmapUnpacking f m = Unpacking $ \cst st -> runUnpacking_ m cst st >>= \(a, st2) -> return (f a, st2)
{-# INLINE fmapUnpacking #-}
returnUnpacking :: a -> Unpacking a
returnUnpacking a = Unpacking $ \_ st -> return (a,st)
{-# INLINE [0] returnUnpacking #-}
apUnpacking :: Unpacking (a -> b) -> Unpacking a -> Unpacking b
apUnpacking fm m = fm >>= \p -> m >>= \r2 -> return (p r2)
{-# INLINE [0] apUnpacking #-}
-- | Exception when trying to put bytes out of the memory bounds.
data OutOfBoundPacking = OutOfBoundPacking Int -- position relative to the end
Int -- number of bytes requested
deriving (Show,Eq,Data,Typeable)
-- | Exception when trying to finalize the packing monad that still have holes open.
data HoleInPacking = HoleInPacking Int
deriving (Show,Eq,Data,Typeable)
-- | Exception when trying to get bytes out of the memory bounds.
data OutOfBoundUnpacking = OutOfBoundUnpacking Int -- position
Int -- number of bytes requested
deriving (Show,Eq,Data,Typeable)
-- | Exception when isolate doesn't consume all the bytes passed in the sub unpacker
data IsolationNotFullyConsumed = IsolationNotFullyConsumed Int -- number of bytes isolated
Int -- number of bytes not consumed
deriving (Show,Eq,Data,Typeable)
instance Exception OutOfBoundPacking
instance Exception HoleInPacking
instance Exception OutOfBoundUnpacking
instance Exception IsolationNotFullyConsumed
-- | run an action to transform a number of bytes into a 'a'
-- and increment the pointer by number of bytes.
unpackUnsafeActRef :: Int -- ^ number of bytes
-> (Ptr Word8 -> IO a)
-> Unpacking a
unpackUnsafeActRef n act = Unpacking $ \_ memView@(MemView ptr sz) -> do
r <- act ptr
return (r, memViewPlus memView n)
-- | similar 'unpackUnsafeActRef' but does boundary checking.
unpackCheckActRef :: Int
-> (Ptr Word8 -> IO a)
-> Unpacking a
unpackCheckActRef n act = Unpacking $ \(MemView iniPtr _) memView@(MemView ptr sz) -> do
when (sz < n) (throwIO $ OutOfBoundUnpacking (ptr `minusPtr` iniPtr) n)
r <- act ptr
return (r, memViewPlus memView n)
{-# INLINE [0] unpackCheckActRef #-}
-- | Isolate a number of bytes to run an unpacking operation.
--
-- If the unpacking doesn't consume all the bytes, an exception is raised.
unpackIsolate :: Int
-> Unpacking a
-> Unpacking a
unpackIsolate n sub = Unpacking $ \iniBlock@(MemView iniPtr _) memView@(MemView ptr sz) -> do
when (sz < n) (throwIO $ OutOfBoundUnpacking (ptr `minusPtr` iniPtr) n)
(r, MemView newPtr subLeft) <- (runUnpacking_ sub) iniBlock (MemView ptr n)
when (subLeft > 0) $ (throwIO $ IsolationNotFullyConsumed n subLeft)
return (r, MemView newPtr (sz - n))
-- | Similar to unpackUnsafeActRef except that it throw the foreign ptr.
unpackUnsafeAct :: Int -> (Ptr Word8 -> IO a) -> Unpacking a
unpackUnsafeAct = unpackUnsafeActRef
-- | Similar to unpackCheckActRef except that it throw the foreign ptr.
unpackCheckAct :: Int -> (Ptr Word8 -> IO a) -> Unpacking a
unpackCheckAct = unpackCheckActRef
{-# INLINE [0] unpackCheckAct #-}
-- | Set the new position from the beginning in the memory block.
-- This is useful to skip bytes or when using absolute offsets from a header or some such.
unpackSetPosition :: Int -> Unpacking ()
unpackSetPosition pos = Unpacking $ \(iniBlock@(MemView _ sz)) _ -> do
when (pos < 0 || pos > sz) (throwIO $ OutOfBoundUnpacking pos 0)
return ((), memViewPlus iniBlock pos)
-- | Get the position in the memory block.
unpackGetPosition :: Unpacking Int
unpackGetPosition = Unpacking $
\(MemView iniPtr _) st@(MemView ptr _) -> return (ptr `minusPtr` iniPtr, st)
-- | Return the number of remaining bytes
unpackGetNbRemaining :: Unpacking Int
unpackGetNbRemaining = Unpacking $ \_ st@(MemView _ sz) -> return (sz,st)
-- | Allow to look into the memory.
-- This is inherently unsafe
unpackLookahead :: (Ptr Word8 -> Int -> IO a) -- ^ callback with current position and byte left
-> Unpacking a
unpackLookahead f = Unpacking $
\_ st@(MemView ptr sz) -> f ptr sz >>= \a -> return (a, st)
-- | run a pack action on the internal packed buffer.
packCheckAct :: Int -> (Ptr Word8 -> IO a) -> Packing a
packCheckAct n act = Packing $ \_ (MemView ptr sz) -> do
when (sz < n) (throwIO $ OutOfBoundPacking sz n)
r <- act ptr
return (r, MemView (ptr `plusPtr` n) (sz - n))
{-# INLINE [0] packCheckAct #-}
-- | modify holes
modifyHoles :: (Int -> Int) -> Packing ()
modifyHoles f = Packing $ \(_, holesMVar) mem -> modifyMVar_ holesMVar (\v -> return $! f v) >> return ((), mem)
-- | Get the position in the memory block.
packGetPosition :: Packing Int
packGetPosition = Packing $ \(iniPtr, _) mem@(MemView ptr _) -> return (ptr `minusPtr` iniPtr, mem)
-- | A Hole represent something that need to be filled
-- later, for example a CRC, a prefixed size, etc.
--
-- They need to be filled before the end of the package,
-- otherwise an exception will be raised.
newtype Hole a = Hole (a -> IO ())
-- | Put a Hole of a specific size for filling later.
packHole :: Int -> (Ptr Word8 -> a -> IO ()) -> Packing (Hole a)
packHole n f = do
r <- packCheckAct n (\ptr -> return $ Hole (\w -> f ptr w))
modifyHoles (1 +)
return r
-- | Fill a hole with a value
--
-- TODO: user can use one hole many times leading to wrong counting.
fillHole :: Hole a -> a -> Packing ()
fillHole (Hole closure) a = modifyHoles (\i -> i - 1) >> liftIO (closure a)
| NicolasDP/hs-packer | Data/Packer/Internal.hs | bsd-2-clause | 9,232 | 0 | 15 | 2,044 | 2,637 | 1,426 | 1,211 | 165 | 1 |
module Chap6 where
import Data.List
-- Exercises: Eq Instances
-- 1.
data TisAnInteger =
TisAn Integer
instance Eq TisAnInteger where
(==) (TisAn integer)
(TisAn integer') =
integer == integer'
-- 2.
data TwoIntegers =
Two Integer Integer
instance Eq TwoIntegers where
(==) (Two integer1 integer2)
(Two integer1' integer2') =
integer1 == integer1' && integer2 == integer2'
-- 3.
data StringOrInt =
TisAnInt Int
| TisAnString String
deriving Show
instance Eq StringOrInt where
(==) (TisAnInt int) (TisAnInt int') = int == int'
(==) (TisAnString string) (TisAnString string') = string == string'
(==) _ _ = False
-- 4.
data Pair a =
Pair a a
deriving Show
-- In this example I assume that Pair x y == Pair y x
instance Eq a => Eq (Pair a) where
(==) (Pair x y)
(Pair x' y') =
(x == x' && y == y') ||
(x == y' && y == x')
-- 5.
data Tuple a b =
Tuple a b
deriving Show
instance (Eq a, Eq b) => Eq (Tuple a b) where
(==) (Tuple x y)
(Tuple x' y') =
(x == x' && y == y')
-- 6.
data Which a =
ThisOne a
| ThatOne a
deriving Show
instance Eq a => Eq (Which a) where
(==) (ThisOne x) (ThisOne x') = x == x'
(==) (ThatOne y) (ThatOne y') = y == y'
(==) _ _ = False
-- 7.
data EitherOr a b =
Hello a
| Goodbye b
instance (Eq a, Eq b) => Eq (EitherOr a b) where
(==) (Hello x) (Hello x') = x == x'
(==) (Goodbye y) (Goodbye y') = y == y'
(==) _ _ = False
-- Exercises: Will They Work?
willWork1 = max (length [1, 2, 3]) (length [8, 9, 10, 11, 12])
willWork2 = compare (3 * 4) (3 * 5)
--willNotWork3 = compare "Julie" True
willWork4 = (5 + 3) > (3 + 6)
-- Chapter Exercies
-- Multiple choise
-- 1. c
-- 2. b
-- 3. a
-- 4. a
-- 5. a
-- Does it typecheck
-- 1.
-- Added derving Show so that printPerson works
--data Person = Person Bool
data Person = Person Bool deriving Show
printPerson :: Person -> IO()
printPerson person = putStrLn (show person)
-- 2., 3.
-- Added deriving Eq so that settleDown works
--data Mood = Blah
-- | Woot deriving ShowΩ
data Mood = Blah
| Woot deriving (Show, Eq)
-- My extra task, I want Woot to be superior
instance Ord Mood where
(<=) _ Woot = True
(<=) Blah Blah = True
(<=) Woot Blah = False
settleDown :: Mood -> Mood
settleDown x = if x == Woot
then Blah
else x
-- 4.
-- is fine
type Subject = String
type Verb = String
type Object = String
data Sentence =
Sentence Subject Verb Object
deriving (Eq, Show)
s1 = Sentence "dogs" "drool"
s2 = Sentence "Julie" "loves" "dogs"
-- Given a datatype declaration, what can we do?
data Rocks =
Rocks String deriving (Eq, Show)
data Yeah =
Yeah Bool deriving (Eq, Show)
data Papu =
Papu Rocks Yeah
deriving (Eq, Show)
-- 1.
-- not working, as e.g. Rocks String /= String
-- phew = Papu "chaes" True
phew = Papu (Rocks "chaes") (Yeah True)
-- 2.
truth = Papu (Rocks "chomskydoz")
(Yeah True)
-- 3.
equalityForall :: Papu -> Papu -> Bool
equalityForall p p' = p == p'
-- 4.
-- Ord not derived/implemented, not working
--comparePapus :: Papu -> Papu -> Bool
--comparePapus p p' = p > p'
-- Match the types
-- 1.
i1 :: Num a => a
i1 = 1
-- Not working, must implemented Num
--i1Alt :: a
--i1Alt = 1
-- 2., 3.
f2 :: Float
f2 = 1.0
-- Num not working, but we can use fractional
--f2Alt :: Num a => a
f2Alt :: Fractional a => a
f2Alt = 1.0
-- 4.
-- :info RealFrac
-- class (Real a, Fractional a) => RealFrac a whereΩ
f4Alt :: RealFrac a => a
f4Alt = 1.0
-- 5.
freud5 :: a -> a
freud5 x = x
-- Works but it is not needed:
-- • Redundant constraint: Ord a
-- • In the type signature for:
-- freud5Alt :: Ord a => a -> a
freud5Alt :: Ord a => a -> a
freud5Alt x = x
-- 6.
freud6Alt :: Int -> Int
freud6Alt x = x
-- 7.
myX = 1 :: Int
sigmund7 :: Int -> Int
sigmund7 x = myX
-- not working, as myX :: Int
--sigmund7Alt :: a -> a
--sigmund7Alt x = myX
-- 8.
-- not working, as myX :: Int
-- sigmund8Alt :: Num a => a -> a
-- sigmund8Alt x = myX
-- 9.
jung :: Ord a => [a] -> a
jung xs = head (sort xs)
-- works, as Int implements Ord
jungAlt :: [Int] -> Int
jungAlt xs = head (sort xs)
-- 10.
young :: [Char] -> Char
young xs = head (sort xs)
youngAlt :: Ord a => [a] -> a
youngAlt xs = head (sort xs)
-- 11.
mySort :: [Char] -> [Char]
mySort = sort
signifier :: [Char] -> Char
signifier xs = head (mySort xs)
-- not working as, mySort :: [Char] -> [Char]
--signifierAlt :: Ord a => [a] -> a
--signifierAlt xs = head (mySort xs)
-- Type-Kwon-Do Two: Electric Typealoo
-- 1. noidea
chk :: Eq b => (a -> b) -> a -> b -> Bool
chk = undefined
-- 2. noidea
arith :: Num b => (a -> b) -> Integer -> a -> b
arith = undefined
| tkasu/haskellbook-adventure | app/Chap6.hs | bsd-3-clause | 4,768 | 0 | 10 | 1,240 | 1,512 | 850 | 662 | 121 | 2 |
module Imessage.Internal
(
) where
| mitchty/imessage | src/Imessage/Internal.hs | bsd-3-clause | 43 | 0 | 3 | 13 | 9 | 6 | 3 | 2 | 0 |
module Main where
import Game.LambdaPad
main :: IO ()
main = lambdaPad defaultLambdaPadConfig
| zearen-wover/lambda-pad | src/exe/lambda-pad.hs | bsd-3-clause | 96 | 0 | 6 | 15 | 27 | 15 | 12 | 4 | 1 |
{-# LANGUAGE NoImplicitPrelude #-}
module Horbits.OrbitSample where
import Control.Lens hiding (elements, (*~))
import Test.QuickCheck
import Horbits.Body
import Horbits.Dimensional.Prelude
import Horbits.Orbit
data OrbitSample = OrbitSample { desc :: String
, orbit :: Orbit
, sma :: Length Double
, e :: Dimensionless Double
, raan :: Dimensionless Double
, incl :: Dimensionless Double
, arg :: Dimensionless Double
, ap :: Length Double
, pe :: Length Double
} deriving (Show)
sampleOrbits :: [OrbitSample]
sampleOrbits = [OrbitSample "Circular equatorial 100km"
(vectorOrbit Kerbin (v3 _0 _0 (sqrt hSq0)) (v3 _0 _0 _0) _0)
(700000 *~ meter)
_0
_0
_0
_0
(100000 *~ meter)
(100000 *~ meter),
OrbitSample "Circular equatorial retrograde 100km"
(vectorOrbit Kerbin (v3 _0 _0 (negate $ sqrt hSq0)) (v3 _0 _0 _0) _0)
(700000 *~ meter)
_0
_0
(180 *~ degree)
_0
(100000 *~ meter)
(100000 *~ meter),
OrbitSample "Elliptical (e = 0.2) equatorial with arg.pe = 0"
(vectorOrbit Kerbin (v3 _0 _0 (sqrt $ 0.96 *. hSq0)) (v3 (0.2 *~ one) _0 _0) _0)
(700000 *~ meter)
(0.2 *~ one)
_0
_0
_0
(240000 *~ meter)
((-40000) *~ meter),
OrbitSample "Circular 45 deg. incl, raan = 0"
(vectorOrbit Kerbin (v3 _0 (negate . sqrt $ 0.5 *. hSq0) (sqrt $ 0.5 *. hSq0))
(v3 _0 _0 _0) _0)
(700000 *~ meter)
_0
_0
(45 *~ degree)
_0
(100000 *~ meter)
(100000 *~ meter),
OrbitSample "Circular 45 deg. incl, raan = 45 deg."
(vectorOrbit Kerbin
(v3 (sqrt $ 0.25 *. hSq0) (negate . sqrt $ 0.25 *. hSq0) (sqrt $ 0.5 *. hSq0))
(v3 _0 _0 _0) _0)
(700000 *~ meter)
_0
(45 *~ degree)
(45 *~ degree)
_0
(100000 *~ meter)
(100000 *~ meter),
OrbitSample "Elliptical (e = 0.2) 45 deg. incl, raan = arg. pe = 0"
(vectorOrbit Kerbin (v3 _0 (negate . sqrt $ 0.48 *. hSq0) (sqrt $ 0.48 *. hSq0))
(v3 (0.2 *~ one) _0 _0) _0)
(700000 *~ meter)
(0.2 *~ one)
_0
(45 *~ degree)
_0
(240000 *~ meter)
((-40000) *~ meter),
OrbitSample "Elliptical (e = 0.2) 45 deg. incl, raan = 0, arg. pe = 90 deg."
(vectorOrbit Kerbin (v3 _0 (negate . sqrt $ 0.48 *. hSq0) (sqrt $ 0.48 *. hSq0))
(v3 _0 (0.2 *. sqrt (0.5 *~ one)) (0.2 *. sqrt (0.5 *~ one))) _0)
(700000 *~ meter)
(0.2 *~ one)
_0
(45 *~ degree)
(90 *~ degree)
(240000 *~ meter)
((-40000) *~ meter),
OrbitSample "Elliptical (e = 0.2) 45 deg. incl, raan = 45 deg., arg.pe = 0"
(vectorOrbit Kerbin
(v3 (sqrt $ 0.24 *. hSq0) (negate . sqrt $ 0.24 *. hSq0) (sqrt $ 0.48 *. hSq0))
(v3 (0.2 *. sqrt (0.5 *~ one)) (0.2 *. sqrt (0.5 *~ one)) _0) _0)
(700000 *~ meter)
(0.2 *~ one)
(45 *~ degree)
(45 *~ degree)
_0
(240000 *~ meter)
((-40000) *~ meter)
]
where hSq0 = 700000 *~ meter * kerbin ^. bodyGravitationalParam
kerbin = getBody Kerbin
genSampleOrbits :: Gen OrbitSample
genSampleOrbits = elements sampleOrbits
| chwthewke/horbits | testsuite/Horbits/OrbitSample.hs | bsd-3-clause | 5,244 | 0 | 15 | 3,027 | 1,163 | 627 | 536 | 101 | 1 |
module Examples.Test.Directory(main) where
import Development.Shake
import Development.Shake.FilePath
import Examples.Util
import System.Directory(createDirectory)
import Data.List
import Control.Monad
import System.Directory(getCurrentDirectory, setCurrentDirectory)
-- Use escape characters, _o=* _l=/ __=<space>
readEsc ('_':'o':xs) = '*' : readEsc xs
readEsc ('_':'l':xs) = '/' : readEsc xs
readEsc ('_':'_':xs) = ' ' : readEsc xs
readEsc (x:xs) = x : readEsc xs
readEsc [] = []
showEsc = concatMap f
where f '*' = "_o"
f '/' = "_l"
f ' ' = "__"
f x = [x]
main = shaken test $ \args obj -> do
want $ map obj args
obj "*.contents" *> \out ->
writeFileLines out =<< getDirectoryContents (obj $ readEsc $ dropExtension $ unobj out)
obj "*.dirs" *> \out ->
writeFileLines out =<< getDirectoryDirs (obj $ readEsc $ dropExtension $ unobj out)
obj "*.files" *> \out -> do
let pats = readEsc $ dropExtension $ unobj out
let (x:xs) = ["" | " " `isPrefixOf` pats] ++ words pats
writeFileLines out =<< getDirectoryFiles (obj x) xs
obj "*.exist" *> \out -> do
let xs = map obj $ words $ readEsc $ dropExtension $ unobj out
fs <- mapM doesFileExist xs
ds <- mapM doesDirectoryExist xs
let bool x = if x then "1" else "0"
writeFileLines out $ zipWith (\a b -> bool a ++ bool b) fs ds
obj "dots" *> \out -> do
cwd <- liftIO getCurrentDirectory
liftIO $ setCurrentDirectory $ obj ""
b1 <- liftM2 (==) (getDirectoryContents ".") (getDirectoryContents "")
b2 <- liftM2 (==) (getDirectoryDirs ".") (getDirectoryDirs "")
b3 <- liftM2 (==) (getDirectoryFiles "." ["*.txt"]) (getDirectoryFiles "" ["*.txt"])
b4 <- liftM2 (==) (getDirectoryFiles "." ["C.txt/*.txt"]) (getDirectoryFiles "" ["C.txt/*.txt"])
b5 <- liftM2 (==) (getDirectoryFiles "." ["//*.txt"]) (getDirectoryFiles "" ["//*.txt"])
liftIO $ setCurrentDirectory cwd
writeFileLines out $ map show [b1,b2,b3,b4,b5]
test build obj = do
let demand x ys = let f = showEsc x in do build [f]; assertContents (obj f) $ unlines $ words ys
build ["clean"]
demand " *.txt.files" ""
demand " //*.txt.files" ""
demand ".dirs" ""
demand "A.txt B.txt C.txt.exist" "00 00 00"
writeFile (obj "A.txt") ""
writeFile (obj "B.txt") ""
createDirectory (obj "C.txt")
writeFile (obj "C.txt/D.txt") ""
writeFile (obj "C.txt/E.xtx") ""
demand " *.txt.files" "A.txt B.txt"
demand ".dirs" "C.txt"
demand "A.txt B.txt C.txt.exist" "10 10 01"
demand " //*.txt.files" "A.txt B.txt C.txt/D.txt"
demand "C.txt *.txt.files" "D.txt"
demand " *.txt //*.xtx.files" "A.txt B.txt C.txt/E.xtx"
demand " C.txt/*.files" "C.txt/D.txt C.txt/E.xtx"
build ["dots","--no-lint"]
assertContents (obj "dots") $ unlines $ words "True True True True True"
| nh2/shake | Examples/Test/Directory.hs | bsd-3-clause | 2,949 | 0 | 20 | 707 | 1,076 | 514 | 562 | 65 | 4 |
{-# LANGUAGE ParallelListComp, ScopedTypeVariables, BangPatterns, Rank2Types, TupleSections #-}
module Tools.TimePlot.Plots (
initGen
) where
import qualified Control.Monad.Trans.State.Strict as St
import Control.Arrow
import Control.Applicative
import Data.List (foldl', sort)
import Data.Maybe
import qualified Data.Map as M
import qualified Data.Map.Strict as MS
import qualified Data.Set as Set
import qualified Data.ByteString.Char8 as S
import Data.Time
import Graphics.Rendering.Chart
import Graphics.Rendering.Chart.Event
import Data.Colour
import Data.Colour.Names
import Tools.TimePlot.Types
import qualified Tools.TimePlot.Incremental as I
type PlotGen = String -> LocalTime -> LocalTime -> I.StreamSummary (LocalTime, InEvent) PlotData
initGen :: ChartKind LocalTime -> PlotGen
initGen (KindACount bs) = genActivity (\sns n -> n) bs
initGen (KindCount bs) = genActivity (\sns n -> n*toSeconds bs (undefined::LocalTime)) bs
initGen (KindAPercent bs b) = genActivity (\sns n -> 100*n/b) bs
initGen (KindAFreq bs) = genActivity (\sns n -> if n == 0 then 0 else (n / sum (M.elems sns))) bs
initGen (KindFreq bs k) = genAtoms atoms2freqs bs k
where atoms2freqs as m = let s = sum [c | (a,c) <- M.toList m]
in if s==0 then [0] else 0:[fromIntegral (M.findWithDefault 0 a m)/fromIntegral s | a <- as]
initGen (KindHistogram bs k) = genAtoms atoms2hist bs k
where atoms2hist as m = 0:[fromIntegral (M.findWithDefault 0 a m) | a <- as]
initGen KindEvent = genEvent
initGen (KindQuantile bs vs) = genQuantile bs vs
initGen (KindBinFreq bs vs) = genBinFreqs bs vs
initGen (KindBinHist bs vs) = genBinHist bs vs
initGen KindLines = genLines
initGen (KindDots alpha) = genDots alpha
initGen (KindSum bs ss) = genSum bs ss
initGen (KindCumSum bs ss) = genCumSum bs ss
initGen (KindDuration sk dropSubtrack) = genDuration sk dropSubtrack
initGen (KindWithin _ _) = \name -> error $
"KindWithin should not be plotted (this is a bug): track " ++ show name
initGen KindNone = \name -> error $
"KindNone should not be plotted (this is a bug): track " ++ show name
initGen KindUnspecified = \name -> error $
"Kind not specified for track " ++ show name ++ " (have you misspelled -dk or any of -k arguments?)"
-- Auxiliary functions for two common plot varieties
plotTrackBars :: [(LocalTime,[Double])] -> [String] -> String -> [Colour Double] -> PlotData
plotTrackBars vals titles name colors = PlotBarsData {
plotName = name,
barsStyle = BarsStacked,
barsValues = vals,
barsStyles = [ (solidFillStyle c, Nothing)
| c <- transparent:map opaque colors
| _ <- "":titles],
barsTitles = "":titles
}
plotLines :: String -> [(S.ByteString, [(LocalTime,Double)])] -> PlotData
plotLines name vss = PlotLinesData {
plotName = name,
linesData = [vs | (_, vs) <- vss],
linesStyles = [solidLine 1 color | _ <- vss | color <- map opaque colors],
linesTitles = [S.unpack subtrack | (subtrack, _) <- vss]
}
-------------------------------------------------------------
-- Plot generators
-------------------------------------------------------------
-- Wrappers for I.filterMap
values (t,InValue s v) = Just (t,s,v)
values _ = Nothing
valuesDropTrack (t, InValue s v) = Just (t,v)
valuesDropTrack _ = Nothing
atomsDropTrack (t, InAtom s a) = Just (t,a)
atomsDropTrack _ = Nothing
edges (t,InEdge s e) = Just (t,s,e)
edges _ = Nothing
------------------- Lines ----------------------
genLines :: PlotGen
genLines name t0 t1 = I.filterMap values $ (data2plot . groupByTrack) <$> I.collect
where
data2plot vss = PlotLinesData {
plotName = name,
linesData = [vs | (_,vs) <- vss],
linesStyles = [solidLine 1 color | _ <- vss | color <- map opaque colors],
linesTitles = [S.unpack subtrack | (subtrack, _) <- vss]
}
------------------- Dots ----------------------
genDots :: Double -> PlotGen
genDots alpha name t0 t1 = I.filterMap values $ (data2plot . groupByTrack) <$> I.collect
where
data2plot vss = PlotDotsData {
plotName = name,
dotsData = [vs | (_,vs) <- vss],
dotsTitles = [S.unpack subtrack | (subtrack, _) <- vss],
dotsColors = if alpha == 1 then map opaque colors else map (`withOpacity` alpha) colors
}
------------------- Binned graphs ----------------------
summaryByFixedTimeBins t0 binSize = I.byTimeBins (iterate (add binSize) t0)
-- Arguments of f will be: value bin boundaries, values in the current time bin
genByBins :: ([Double] -> [Double] -> [Double]) -> NominalDiffTime -> [Double] -> PlotGen
genByBins f timeBinSize valueBinBounds name t0 t1 = I.filterMap valuesDropTrack $
summaryByFixedTimeBins t0 timeBinSize $
I.mapInput (\(t,xs) -> (t, 0:f valueBinBounds xs)) $
(\tfs -> plotTrackBars tfs binTitles name colors) <$>
I.collect
where
binTitles = [low]++[showDt v1++".."++showDt v2
| v1 <- valueBinBounds
| v2 <- tail valueBinBounds]++
[high]
where
low = "<"++showDt (head valueBinBounds)
high = ">"++showDt (last valueBinBounds)
-- 0.1s but 90ms, etc.
showDt t | t < 0.0000001 = show (t*1000000000) ++ "ns"
| t < 0.0001 = show (t*1000000) ++ "us"
| t < 0.1 = show (t*1000) ++ "ms"
| True = show t ++ "s"
genBinHist :: NominalDiffTime -> [Double] -> PlotGen
genBinFreqs :: NominalDiffTime -> [Double] -> PlotGen
(genBinHist,genBinFreqs) = (genByBins values2binHist, genByBins values2binFreqs)
where
values2binHist bins = values2binHist' bins . sort
values2binHist' [] xs = [fromIntegral (length xs)]
values2binHist' (a:as) xs = fromIntegral (length xs0) : values2binHist' as xs'
where (xs0,xs') = span (<a) xs
values2binFreqs bins xs = map toFreq $ values2binHist bins xs
where
n = length xs
toFreq k = if n==0 then 0 else (k/fromIntegral n)
genQuantile :: NominalDiffTime -> [Double] -> PlotGen
genQuantile binSize qs name t0 t1 = I.filterMap valuesDropTrack $
summaryByFixedTimeBins t0 binSize $
I.mapInput (second (diffs . getQuantiles qs)) $
fmap (\tqs -> plotTrackBars tqs quantileTitles name colors) $
I.collect
where
quantileTitles = [show p1++".."++show p2++"%" | p1 <- percents | p2 <- tail percents]
percents = map (floor . (*100.0)) $ [0.0] ++ qs ++ [1.0]
diffs xs = zipWith (-) xs (0:xs)
getQuantiles :: (Ord a) => [Double] -> [a] -> [a]
getQuantiles qs = quantiles' . sort
where
qs' = sort qs
quantiles' [] = []
quantiles' xs = index (0:ns++[n-1]) 0 xs
where
n = length xs
ns = map (floor . (*(fromIntegral n-1))) qs'
index _ _ [] = []
index [] _ _ = []
index [i] j (x:xs)
| i<j = []
| i==j = [x]
| True = index [i] (j+1) xs
index (i:i':is) j (x:xs)
| i<j = index (i':is) j (x:xs)
| i>j = index (i:i':is) (j+1) xs
| i==i' = x:index (i':is) j (x:xs)
| True = x:index (i':is) (j+1) xs
genAtoms :: ([S.ByteString] -> M.Map S.ByteString Int -> [Double]) ->
NominalDiffTime -> PlotBarsStyle -> PlotGen
genAtoms f binSize k name t0 t1 = I.filterMap atomsDropTrack (h <$> unique (\(t,atom) -> atom) <*> fInBins)
where
fInBins :: I.StreamSummary (LocalTime, S.ByteString) [(LocalTime, M.Map S.ByteString Int)]
fInBins = summaryByFixedTimeBins t0 binSize $ I.mapInput (second counts) I.collect
counts = foldl' insert M.empty
where
insert m a = case M.lookup a m of
Nothing -> M.insert a 1 m
Just !n -> M.insert a (n+1) m
h :: [S.ByteString] -> [(LocalTime, M.Map S.ByteString Int)] -> PlotData
h as tfs = (plotTrackBars (map (second (f as)) tfs) (map show as) name colors) { barsStyle = k }
unique :: (Ord a) => (x -> a) -> I.StreamSummary x [a]
unique f = I.stateful M.empty (\a -> M.insert (f a) ()) M.keys
uniqueSubtracks :: I.StreamSummary (LocalTime,S.ByteString,a) [S.ByteString]
uniqueSubtracks = unique (\(t,s,a) -> s)
genSum :: NominalDiffTime -> SumSubtrackStyle -> PlotGen
genSum binSize ss name t0 t1 = I.filterMap values (h <$> uniqueSubtracks <*> sumsInBins t0 binSize)
where
h :: [S.ByteString] -> [(LocalTime, M.Map S.ByteString Double)] -> PlotData
h tracks binSums = plotLines name rows
where
rowsT' = case ss of
SumOverlayed -> map (second M.toList) binSums
SumStacked -> map (second stack) binSums
stack :: M.Map S.ByteString Double -> [(S.ByteString, Double)]
stack ss = zip tracks (scanl1 (+) (map (\x -> M.findWithDefault 0 x ss) tracks))
rows :: [(S.ByteString, [(LocalTime, Double)])]
rows = M.toList $ fmap sort $ M.fromListWith (++) $
[(track, [(t,sum)]) | (t, m) <- rowsT', (track, sum) <- m]
sumsInBins :: LocalTime -> NominalDiffTime -> I.StreamSummary (LocalTime,S.ByteString,Double) [(LocalTime, M.Map S.ByteString Double)]
sumsInBins t0 bs = I.mapInput (\(t,s,v) -> (t,(s,v))) $
summaryByFixedTimeBins t0 bs $
I.mapInput (second (fromListWith' (+))) $
I.collect
genCumSum :: NominalDiffTime -> SumSubtrackStyle -> PlotGen
genCumSum bs ss name t0 t1 = I.filterMap values (accumulate <$> uniqueSubtracks <*> sumsInBins t0 bs)
where
accumulate :: [S.ByteString] -> [(LocalTime, M.Map S.ByteString Double)] -> PlotData
accumulate tracks tss = plotLines name [(track, [(t, ss M.! track) | (t,ss) <- cumsums]) | track <- tracks]
where
cumsums = scanl' f (t0, M.fromList $ zip tracks (repeat 0)) (map normalize tss)
normalize (t,binSums) = (t, M.fromList [ (track, M.findWithDefault 0 track binSums) | track <- tracks ])
f (_,bases) (t,binSums) = (t,) $ M.fromList $ zip tracks $ zipWith (+) trackBases $ case ss of
SumOverlayed -> trackSums
SumStacked -> trackAccSums
where
trackSums = [ binSums M.! track | track <- tracks ]
trackBases = [ bases M.! track | track <- tracks ]
trackAccSums = scanl1' (+) trackSums
scanl1' f (x:xs) = scanl' f x xs
scanl' f !x0 [] = [x0]
scanl' f !x0 (x:xs) = x0:scanl' f (f x0 x) xs
genActivity :: (M.Map S.ByteString Double -> Double -> Double) -> NominalDiffTime -> PlotGen
genActivity f bs name t0 t1 = I.filterMap edges (h <$> uniqueSubtracks <*> binAreas)
where
binAreas :: I.StreamSummary (LocalTime,S.ByteString,Edge) [(LocalTime, M.Map S.ByteString Double)]
binAreas = fmap (map (\((t1,t2),m) -> (t1,m))) $ edges2binsSummary bs t0 t1
h tracks binAreas = (plotTrackBars barsData (map S.unpack tracks) name colors) { barsStyle = BarsStacked }
where
barsData = [(t, 0:map (f m . flip (M.findWithDefault 0) m) tracks) | (t,m) <- binAreas]
edges2binsSummary :: (Ord t,HasDelta t,Show t) =>
Delta t -> t -> t ->
I.StreamSummary (t,S.ByteString,Edge) [((t,t), M.Map S.ByteString Double)]
edges2binsSummary binSize tMin tMax = I.stateful (M.empty, iterate (add binSize) tMin, []) step flush
where
-- State: (m, ts, r) where:
-- * m = subtrack => state of current bin:
-- (area, starting time, level = rise-fall, num pulse events)
-- * ts = infinite list of time bin boundaries
-- * r = reversed list of results per bins
modState s t (!m, ts,r) f = (m', ts, r)
where
m' = MS.insertWith (\new !old -> f old) s (f (0,t,0,0)) m
flushBin st@(m,t1:t2:ts,!r) = (m', t2:ts, r')
where
states = M.toList m
binSizeSec = deltaToSeconds t2 t1
binValue (area,start,nopen,npulse) =
(fromIntegral npulse + area + deltaToSeconds t2 start*nopen) / binSizeSec
!r' = ((t1,t2), M.fromList [(s, binValue bin) | (s, bin) <- states]) : r
!m' = fmap (\(_,_,nopen,_) -> (0,t2,nopen,0)) m
step ev@(t, s, e) st@(m, t1:t2:ts, r)
| t < t1 = error $ "Times are not in ascending order, first violating is " ++ show t
| t >= t2 = step ev (flushBin st)
| True = step'' ev st
step'' ev@(t,s,e) st@(m, t1:t2:ts, r) = if (t < t1 || t >= t2) then error "Outside bin" else step' ev st
step' (t, s, SetTo _) st = st
step' (t, s, Pulse _) st = modState s t st $
\(!area, !start, !nopen, !npulse) -> (area, t, nopen, npulse+1)
step' (t, s, Rise) st = modState s t st $
\(!area, !start, !nopen, !npulse) -> (area+deltaToSeconds t start*nopen, t, nopen+1, npulse)
step' (t, s, Fall) st = modState s t st $
\(!area, !start, !nopen, !npulse) -> (area+deltaToSeconds t start*nopen, t, nopen-1, npulse)
flush st@(m, t1:t2:ts, r)
| t2 <= tMax = flush (flushBin st)
| True = reverse r
type StreamTransformer a b = forall r . I.StreamSummary b r -> I.StreamSummary a r
edges2eventsSummary :: forall t . (Ord t) =>
t -> t -> StreamTransformer (t,S.ByteString,Edge) (S.ByteString, Event t Status)
edges2eventsSummary t0 t1 s = I.stateful (M.empty,s) step flush
where
-- State: (m, sum) where
-- * m = subtrack => (event start, level = rise-fall, status)
-- * sum = summary of accumulated events so far
tellSummary e (ts,!sum) = (ts,I.insert sum e)
getTrack s (!ts,sum) = M.findWithDefault (t0, 0, emptyStatus) s ts
putTrack s t (!ts,sum) = (M.insert s t ts, sum)
killTrack s (!ts,sum) = (M.delete s ts, sum)
trackCase s whenZero withNonzero st
| numActive == 0 = whenZero
| True = withNonzero t0 numActive status
where (t0, numActive, status) = getTrack s st
emptyStatus = Status "" ""
step (t,s,Pulse st) state = tellSummary (s, PulseEvent t st) state
step (t,s,SetTo st) state = trackCase s
(putTrack s (t, 1, st) state)
(\t0 !n st0 -> putTrack s (t,n,st) $ tellSummary (s, LongEvent (t0,True) (t,True) st0) state)
state
step (t,s,Rise) state = trackCase s
(putTrack s (t, 1, emptyStatus) state)
(\t0 !n st -> putTrack s (t, n+1, st) state)
state
step (t,s,Fall) state
| numActive == 1 = killTrack s $ tellSummary (s, LongEvent (t0,True) (t,True) st) state
| True = putTrack s (t0, max 0 (numActive-1), st) state
where
(t0, numActive, st) = getTrack s state
flush (ts,sum) = I.finalize $ foldl' addEvent sum $ M.toList ts
where
addEvent sum (s,(t0,_,st)) = I.insert sum (s, LongEvent (t0,True) (t1,False) st)
edges2durationsSummary :: forall t . (Ord t, HasDelta t) =>
t -> t -> Maybe String -> StreamTransformer (t,S.ByteString,Edge) (t,InEvent)
edges2durationsSummary t0 t1 commonTrack = edges2eventsSummary t0 t1 . I.filterMap genDurations
where
genDurations (track, e) = case e of
LongEvent (t1,True) (t2,True) _ -> Just (t2, InValue (case commonTrack of
Nothing -> track
_ -> commonTrackBS)
(deltaToSeconds t2 t1))
_ -> Nothing
commonTrackBS = S.pack (fromJust commonTrack)
genEvent :: PlotGen
genEvent name t0 t1 = I.filterMap edges $
fmap (\evs -> PlotEventData { plotName = name, eventData = map snd evs }) $
edges2eventsSummary t0 t1 I.collect
-- TODO Multiple tracks
genDuration :: ChartKind LocalTime -> Bool -> PlotGen
genDuration sk dropSubtrack name t0 t1 = I.filterMap edges $
edges2durationsSummary t0 t1 (if dropSubtrack then Just name else Nothing) (initGen sk name t0 t1)
fromListWith' f kvs = foldl' insert M.empty kvs
where
insert m (k,v) = case M.lookup k m of
Nothing -> M.insert k v m
Just !v' -> M.insert k (f v' v) m
colors = cycle [green,blue,red,brown,yellow,orange,grey,purple,violet,lightblue]
groupByTrack xs = M.toList $ sort `fmap` M.fromListWith (++) [(s, [(t,v)]) | (t,s,v) <- xs]
| jkff/timeplot | Tools/TimePlot/Plots.hs | bsd-3-clause | 16,910 | 5 | 18 | 4,879 | 6,566 | 3,502 | 3,064 | 268 | 5 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS -fno-warn-orphans #-}
module System.Torrent.FileMap
( FileMap
-- * Construction
, Mode (..)
, def
, mmapFiles
, unmapFiles
-- * Query
, System.Torrent.FileMap.size
-- * Modification
, readBytes
, writeBytes
, unsafeReadBytes
-- * Unsafe conversions
, fromLazyByteString
, toLazyByteString
) where
import Control.Applicative
import Control.Monad as L
import Data.ByteString as BS
import Data.ByteString.Internal as BS
import Data.ByteString.Lazy as BL
import Data.ByteString.Lazy.Internal as BL
import Data.Default
import Data.Vector as V -- TODO use unboxed vector
import Foreign
import System.IO.MMap
import Data.Torrent
data FileEntry = FileEntry
{ filePosition :: {-# UNPACK #-} !FileOffset
, fileBytes :: {-# UNPACK #-} !BS.ByteString
} deriving (Show, Eq)
type FileMap = Vector FileEntry
instance Default Mode where
def = ReadWriteEx
mmapFiles :: Mode -> FileLayout FileSize -> IO FileMap
mmapFiles mode layout = V.fromList <$> L.mapM mkEntry (accumPositions layout)
where
mkEntry (path, (pos, expectedSize)) = do
let esize = fromIntegral expectedSize -- FIXME does this safe?
(fptr, moff, msize) <- mmapFileForeignPtr path mode $ Just (0, esize)
if msize /= esize
then error "mmapFiles" -- TODO unmap mapped files on exception
else return $ FileEntry pos (PS fptr moff msize)
unmapFiles :: FileMap -> IO ()
unmapFiles = V.mapM_ unmapEntry
where
unmapEntry (FileEntry _ (PS fptr _ _)) = finalizeForeignPtr fptr
fromLazyByteString :: BL.ByteString -> FileMap
fromLazyByteString lbs = V.unfoldr f (0, lbs)
where
f (_, Empty ) = Nothing
f (pos, Chunk x xs) = Just (FileEntry pos x, ((pos + chunkSize), xs))
where chunkSize = fromIntegral $ BS.length x
-- | /O(n)/.
toLazyByteString :: FileMap -> BL.ByteString
toLazyByteString = V.foldr f Empty
where
f FileEntry {..} bs = Chunk fileBytes bs
-- | /O(1)/.
size :: FileMap -> FileOffset
size m
| V.null m = 0
| FileEntry {..} <- V.unsafeLast m
= filePosition + fromIntegral (BS.length fileBytes)
bsearch :: FileOffset -> FileMap -> Maybe Int
bsearch x m
| V.null m = Nothing
| otherwise = branch (V.length m `div` 2)
where
branch c @ ((m !) -> FileEntry {..})
| x < filePosition = bsearch x (V.take c m)
| x >= filePosition + fileSize = do
ix <- bsearch x (V.drop (succ c) m)
return $ succ c + ix
| otherwise = Just c
where
fileSize = fromIntegral (BS.length fileBytes)
-- | /O(log n)/.
drop :: FileOffset -> FileMap -> (FileSize, FileMap)
drop off m
| Just ix <- bsearch off m
, FileEntry {..} <- m ! ix = (off - filePosition, V.drop ix m)
| otherwise = (0 , V.empty)
-- | /O(log n)/.
take :: FileSize -> FileMap -> (FileMap, FileSize)
take len m
| len >= s = (m , 0)
| Just ix <- bsearch (pred len) m = let m' = V.take (succ ix) m
in (m', System.Torrent.FileMap.size m' - len)
| otherwise = (V.empty , 0)
where
s = System.Torrent.FileMap.size m
-- | /O(log n + m)/. Do not use this function with 'unmapFiles'.
unsafeReadBytes :: FileOffset -> FileSize -> FileMap -> BL.ByteString
unsafeReadBytes off s m
| (l , m') <- System.Torrent.FileMap.drop off m
, (m'', _ ) <- System.Torrent.FileMap.take (off + s) m'
= BL.take (fromIntegral s) $ BL.drop (fromIntegral l) $ toLazyByteString m''
readBytes :: FileOffset -> FileSize -> FileMap -> IO BL.ByteString
readBytes off s m = do
let bs_copy = BL.copy $ unsafeReadBytes off s m
forceLBS bs_copy
return bs_copy
where
forceLBS Empty = return ()
forceLBS (Chunk _ x) = forceLBS x
bscpy :: BL.ByteString -> BL.ByteString -> IO ()
bscpy (PS _ _ 0 `Chunk` dest_rest) src = bscpy dest_rest src
bscpy dest (PS _ _ 0 `Chunk` src_rest) = bscpy dest src_rest
bscpy (PS dest_fptr dest_off dest_size `Chunk` dest_rest)
(PS src_fptr src_off src_size `Chunk` src_rest)
= do let csize = min dest_size src_size
withForeignPtr dest_fptr $ \dest_ptr ->
withForeignPtr src_fptr $ \src_ptr ->
memcpy (dest_ptr `advancePtr` dest_off)
(src_ptr `advancePtr` src_off)
(fromIntegral csize) -- TODO memmove?
bscpy (PS dest_fptr (dest_off + csize) (dest_size - csize) `Chunk` dest_rest)
(PS src_fptr (src_off + csize) (src_size - csize) `Chunk` src_rest)
bscpy _ _ = return ()
writeBytes :: FileOffset -> BL.ByteString -> FileMap -> IO ()
writeBytes off lbs m = bscpy dest src
where
src = BL.take (fromIntegral (BL.length dest)) lbs
dest = unsafeReadBytes off (fromIntegral (BL.length lbs)) m | pxqr/bittorrent | src/System/Torrent/FileMap.hs | bsd-3-clause | 5,027 | 1 | 15 | 1,402 | 1,675 | 875 | 800 | 110 | 2 |
{-# language ScopedTypeVariables #-}
{-# language TypeFamilies #-}
module Data.FVar (
FVar,
module Data.FVar,
) where
import Data.FVar.Core (FVar, nyi)
import qualified Data.FVar.Core as Core
-- | Runs the given action in a transaction on an FVar-store.
-- The location of the FVar-store is given by the filepath.
-- Transactions can be nested.
-- This is a simple wrapper around 'Data.FVar.Core.withTransaction'.
-- It uses the ThreadId to manage transactions. So one transaction is
-- bound to one thread.
withTransaction :: FilePath -> IO a -> IO a
withTransaction = nyi
newFVar :: FilePath -> a -> IO (FVar a)
newFVar = nyi
openFVar :: FilePath -> IO (FVar a)
openFVar = nyi
readFVar :: FVar a -> IO a
readFVar = nyi
-- | Stores a new value, destroying the previous one.
writeFVar :: FVar a -> a -> IO ()
writeFVar = nyi
-- * convenience
modifyFVar :: FVar a -> (a -> IO (a, b)) -> IO b
modifyFVar fvar action = error "NYI"
newFVarInDirectory :: FilePath -> a -> IO (FVar a)
newFVarInDirectory directory value = nyi
| soenkehahn/fvar | Data/FVar.hs | bsd-3-clause | 1,044 | 0 | 10 | 204 | 251 | 139 | 112 | 21 | 1 |
{-# LANGUAGE OverloadedLists #-}
module Core.Free where
import qualified Data.Set as S
import Data.Set ((\\))
import Data.Monoid ((<>), mconcat)
import Core.Core
type StringSet = S.Set String
bv :: Pat -> StringSet
bv Wildcard = []
bv PLit{} = []
bv (Capture x) = [x]
bv (Bound x p) = [x] <> bv p
bv (PList xs) = mconcat $ map bv xs
bv (Constructor _ _ xs) = mconcat $ map bv xs
bv (Cons x y) = bv x <> bv y
fv :: Expression -> StringSet
fv (Lambda p e) = fv e \\ bv p
fv (Var x) = [x]
fv (If c t e) = fv c <> fv t <> fv e
fv (List xs) = mconcat $ map fv xs
fv (Case ca e) = mconcat (map fv' ca) <> fv e
where fv' (CaseArm p e) = fv e \\ bv p
fv (Let p e b) = (fv b \\ bv p) <> fv e
fv (BinOp o r l) = fv o <> fv r <> fv l
fv Literal{} = []
fv Unbox{} = []
fv (Apply x y) = fv x <> fv y
| demhydraz/waffle | src/Core/Free.hs | bsd-3-clause | 827 | 0 | 9 | 235 | 509 | 255 | 254 | 27 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Afftrack.API.Brand.AdminManager
( getAdmins
, getAdminStatus
)where
import GHC.Generics
import Data.Aeson
import Control.Applicative
import Network.HTTP.Client
import qualified Data.ByteString.Char8 as BS
import Data.Text
import Afftrack.API.Common
import Afftrack.API.Types
--------------------------------------------------------------------------------
getAdmins =
Call "admin_managers"
"getAdmins"
"GET"
[ Param "limit" False ""
, Param "orderby" False ""
, Param "page" False ""
, Param "sort" False ""
, Param "status" False ""
]
getAdminStatus =
Call "admin_managers"
"getAdminStatus"
"GET"
[
]
| kelecorix/api-afftrack | src/Afftrack/API/Brand/AdminManager.hs | bsd-3-clause | 772 | 0 | 7 | 199 | 145 | 84 | 61 | 26 | 1 |
module Sound
( MusicName (..)
, Command (..)
, SoundChan
, getSoundThread
, module Control.Concurrent.Chan
) where
import Control.Applicative
import Control.Concurrent
import Control.Concurrent.Chan
import Data.Array
import Graphics.UI.SDL.Mixer
data MusicData = MD
{ title :: Music
, se :: Array Int Chunk
}
data MusicName = Title
deriving (Eq, Show)
data Command = Quit | StopMusic | Music MusicName | Select | Shoot
deriving (Eq, Show)
type SoundChan = Chan Command
loadMD :: IO MusicData
loadMD = MD <$> loadMUS "res/title.ogg"
<*> loadSE [ "res/select.wav"
, "res/fire.wav"
]
where
loadSE :: [FilePath] -> IO (Array Int Chunk)
loadSE f = fmap (listArray (0, length f - 1)) (mapM loadWAV f)
freeMD :: MusicData -> IO ()
freeMD (MD t _) = do
freeMusic t
getSoundThread :: IO SoundChan
getSoundThread = do
chan <- newChan
md <- loadMD
_ <- forkIO $ soundThread chan md
return chan
soundThread :: SoundChan -> MusicData -> IO ()
soundThread ch md = do
command <- readChan ch
case command of
Quit -> return ()
StopMusic -> do
p <- playingMusic
if p then fadeOutMusic 1000 else return ()
Music musicName -> case musicName of
Title -> playMusic (title md) (-1)
Select -> playSE 0
Shoot -> playSE 1
if command /= Quit
then soundThread ch md
else do
freeMD md
where
playSE seNum = do
playChannel seNum ((se md) ! seNum) 0
return ()
| c000/PaperPuppet | src/Sound.hs | bsd-3-clause | 1,514 | 0 | 15 | 415 | 537 | 276 | 261 | 53 | 7 |
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.ARB.TessellationShader
-- Copyright : (c) Sven Panne 2013
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- All raw functions and tokens from the ARB_tessellation_shader extension, see
-- <http://www.opengl.org/registry/specs/ARB/tessellation_shader.txt>.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.ARB.TessellationShader (
-- * Functions
glPatchParameteri,
glPatchParameterfv,
-- * Tokens
gl_PATCHES,
gl_PATCH_VERTICES,
gl_PATCH_DEFAULT_INNER_LEVEL,
gl_PATCH_DEFAULT_OUTER_LEVEL,
gl_TESS_CONTROL_OUTPUT_VERTICES,
gl_TESS_GEN_MODE,
gl_TESS_GEN_SPACING,
gl_TESS_GEN_VERTEX_ORDER,
gl_TESS_GEN_POINT_MODE,
gl_TRIANGLES,
gl_ISOLINES,
gl_QUADS,
gl_EQUAL,
gl_FRACTIONAL_ODD,
gl_FRACTIONAL_EVEN,
gl_CCW,
gl_CW,
gl_MAX_PATCH_VERTICES,
gl_MAX_TESS_GEN_LEVEL,
gl_MAX_TESS_CONTROL_UNIFORM_COMPONENTS,
gl_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS,
gl_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS,
gl_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS,
gl_MAX_TESS_CONTROL_OUTPUT_COMPONENTS,
gl_MAX_TESS_PATCH_COMPONENTS,
gl_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS,
gl_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS,
gl_MAX_TESS_CONTROL_UNIFORM_BLOCKS,
gl_MAX_TESS_EVALUATION_UNIFORM_BLOCKS,
gl_MAX_TESS_CONTROL_INPUT_COMPONENTS,
gl_MAX_TESS_EVALUATION_INPUT_COMPONENTS,
gl_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS,
gl_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS,
gl_UNIFORM_BLOCK_REFERENCED_BY_TESS_CONTROL_SHADER,
gl_UNIFORM_BLOCK_REFERENCED_BY_TESS_EVALUATION_SHADER,
gl_TESS_EVALUATION_SHADER,
gl_TESS_CONTROL_SHADER
) where
import Foreign.C.Types
import Foreign.Ptr
import Graphics.Rendering.OpenGL.Raw.ARB.Compatibility.Tokens
import Graphics.Rendering.OpenGL.Raw.Core31.Tokens
import Graphics.Rendering.OpenGL.Raw.Core31.Types
import Graphics.Rendering.OpenGL.Raw.Extensions
#include "HsOpenGLRaw.h"
extensionNameString :: String
extensionNameString = "GL_ARB_ARB_tessellation_shader"
EXTENSION_ENTRY(dyn_glPatchParameteri,ptr_glPatchParameteri,"glPatchParameteri",glPatchParameteri,GLenum -> GLint -> IO ())
EXTENSION_ENTRY(dyn_glPatchParameterfv,ptr_glPatchParameterfv,"glPatchParameterfv",glPatchParameterfv,GLenum -> Ptr GLfloat -> IO ())
gl_PATCHES :: GLenum
gl_PATCHES = 0x000E
gl_PATCH_VERTICES :: GLenum
gl_PATCH_VERTICES = 0x8E72
gl_PATCH_DEFAULT_INNER_LEVEL :: GLenum
gl_PATCH_DEFAULT_INNER_LEVEL = 0x8E73
gl_PATCH_DEFAULT_OUTER_LEVEL :: GLenum
gl_PATCH_DEFAULT_OUTER_LEVEL = 0x8E74
gl_TESS_CONTROL_OUTPUT_VERTICES :: GLenum
gl_TESS_CONTROL_OUTPUT_VERTICES = 0x8E75
gl_TESS_GEN_MODE :: GLenum
gl_TESS_GEN_MODE = 0x8E76
gl_TESS_GEN_SPACING :: GLenum
gl_TESS_GEN_SPACING = 0x8E77
gl_TESS_GEN_VERTEX_ORDER :: GLenum
gl_TESS_GEN_VERTEX_ORDER = 0x8E78
gl_TESS_GEN_POINT_MODE :: GLenum
gl_TESS_GEN_POINT_MODE = 0x8E79
gl_ISOLINES :: GLenum
gl_ISOLINES = 0x8E7A
gl_FRACTIONAL_ODD :: GLenum
gl_FRACTIONAL_ODD = 0x8E7B
gl_FRACTIONAL_EVEN :: GLenum
gl_FRACTIONAL_EVEN = 0x8E7C
gl_MAX_PATCH_VERTICES :: GLenum
gl_MAX_PATCH_VERTICES = 0x8E7D
gl_MAX_TESS_GEN_LEVEL :: GLenum
gl_MAX_TESS_GEN_LEVEL = 0x8E7E
gl_MAX_TESS_CONTROL_UNIFORM_COMPONENTS :: GLenum
gl_MAX_TESS_CONTROL_UNIFORM_COMPONENTS = 0x8E7F
gl_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS :: GLenum
gl_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS = 0x8E80
gl_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS :: GLenum
gl_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS = 0x8E81
gl_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS :: GLenum
gl_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS = 0x8E82
gl_MAX_TESS_CONTROL_OUTPUT_COMPONENTS :: GLenum
gl_MAX_TESS_CONTROL_OUTPUT_COMPONENTS = 0x8E83
gl_MAX_TESS_PATCH_COMPONENTS :: GLenum
gl_MAX_TESS_PATCH_COMPONENTS = 0x8E84
gl_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS :: GLenum
gl_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS = 0x8E85
gl_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS :: GLenum
gl_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS = 0x8E86
gl_MAX_TESS_CONTROL_UNIFORM_BLOCKS :: GLenum
gl_MAX_TESS_CONTROL_UNIFORM_BLOCKS = 0x8E89
gl_MAX_TESS_EVALUATION_UNIFORM_BLOCKS :: GLenum
gl_MAX_TESS_EVALUATION_UNIFORM_BLOCKS = 0x8E8A
gl_MAX_TESS_CONTROL_INPUT_COMPONENTS :: GLenum
gl_MAX_TESS_CONTROL_INPUT_COMPONENTS = 0x886C
gl_MAX_TESS_EVALUATION_INPUT_COMPONENTS :: GLenum
gl_MAX_TESS_EVALUATION_INPUT_COMPONENTS = 0x886D
gl_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS :: GLenum
gl_MAX_COMBINED_TESS_CONTROL_UNIFORM_COMPONENTS = 0x8E1E
gl_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS :: GLenum
gl_MAX_COMBINED_TESS_EVALUATION_UNIFORM_COMPONENTS = 0x8E1F
gl_UNIFORM_BLOCK_REFERENCED_BY_TESS_CONTROL_SHADER :: GLenum
gl_UNIFORM_BLOCK_REFERENCED_BY_TESS_CONTROL_SHADER = 0x84F0
gl_UNIFORM_BLOCK_REFERENCED_BY_TESS_EVALUATION_SHADER :: GLenum
gl_UNIFORM_BLOCK_REFERENCED_BY_TESS_EVALUATION_SHADER = 0x84F1
gl_TESS_EVALUATION_SHADER :: GLenum
gl_TESS_EVALUATION_SHADER = 0x8E87
gl_TESS_CONTROL_SHADER :: GLenum
gl_TESS_CONTROL_SHADER = 0x8E88
| mfpi/OpenGLRaw | src/Graphics/Rendering/OpenGL/Raw/ARB/TessellationShader.hs | bsd-3-clause | 5,263 | 0 | 11 | 510 | 595 | 375 | 220 | -1 | -1 |
module ADP.Tests.PaperExample where
import ADP.Multi.All
import ADP.Multi.Rewriting.All
import ADP.Tests.ABABExample(MyChar(..),myString,toString)
type Paper_Algebra alphabet answer = (
answer -> answer -> answer -> answer, -- fz1
answer -> answer -> answer, -- fz2
answer -> answer -> answer -> answer, -- fz3
EPS -> answer, -- fe
answer -> answer -> answer, -- fk1
answer -> answer, -- fk2
answer -> answer -> answer, -- fl1
answer -> answer, -- fl2
([alphabet], [alphabet]) -> answer, -- fp
[alphabet] -> answer, -- fb
[answer] -> [answer] -- h
)
data Term = FZ1 Term Term Term
| FZ2 Term Term
| FZ3 Term Term Term
| FE
| FK1 Term Term
| FK2 Term
| FL1 Term Term
| FL2 Term
| FP (String, String)
| FB String
deriving (Eq, Show)
enum :: Paper_Algebra MyChar Term
enum = (FZ1,FZ2,FZ3,\_->FE,FK1,FK2,FL1,FL2,
\(p1,p2) -> FP (toString p1,toString p2),
\b -> FB (toString b),
id)
bpmax :: Paper_Algebra MyChar Int
bpmax = (fz1,fz2,fz3,fe,fk1,fk2,fl1,fl2,fp,fb,h) where
fz1 k l z = k + l + z
fz2 b z = z
fz3 p z1 z2 = p + z1 + z2
fe _ = 0
fk1 p k = p + k
fk2 p = p
fl1 p l = p + l
fl2 p = p
fp _ = 1
fb _ = 0
h [] = []
h xs = [maximum xs]
dotbracket :: Paper_Algebra MyChar [String]
dotbracket = (fz1,fz2,fz3,fe,fk1,fk2,fl1,fl2,fp,fb,h) where
fz1 [k1,k2] [l1,l2] [z] = [k1 ++ l1 ++ k2 ++ l2 ++ z]
fz2 [b] [z] = [b ++ z]
fz3 [p1,p2] [z1] [z2] = [p1 ++ z1 ++ p2 ++ z2]
fe _ = [""]
fk1 _ [k1,k2] = [k1 ++ "(",")" ++ k2]
fk2 _ = ["(",")"]
fl1 _ [l1,l2] = [l1 ++ "[","]" ++ l2]
fl2 _ = ["[","]"]
fp _ = ["(",")"]
fb _ = ["."]
h = id
-- see ABABExample.hs
texforestnew :: Paper_Algebra MyChar String
texforestnew = (fz1,fz2,fz3,fe,fk1,fk2,fl1,fl2,fp,fb,h) where
term c idx = "[" ++ c ++ ", leaf position=" ++ show idx ++ "] "
fz1 k l z = "[Z " ++ k ++ l ++ z ++ " ] "
fz2 b z = "[Z " ++ b ++ z ++ "] "
fz3 p z1 z2 = "[Z " ++ p ++ z1 ++ z2 ++ "] "
-- a small hack
fe (EPS i) = "[Z [$\\epsilon$, leaf position=" ++ show ((fromIntegral i)-0.5) ++ " ] ] "
fk1 p k = "[K " ++ p ++ k ++ "]"
fk2 p = "[K " ++ p ++ "]"
fl1 p l = "[L " ++ p ++ l ++ "]"
fl2 p = "[L " ++ p ++ "]"
fp ([MyChar b1 i1],[MyChar b2 i2]) = "[P " ++ term [b1] i1 ++ term [b2] i2 ++ "] "
fb [MyChar b i] = "[B " ++ term [b] i ++ "] "
h = id
grammar :: Paper_Algebra MyChar answer -> String -> [answer]
grammar algebra inp =
let
(fz1,fz2,fz3,fe,fk1,fk2,fl1,fl2,fp,fb,h) = algebra
rz1, rz3 :: Dim1
rz1 [k1,k2,l1,l2,z] = [k1,l1,k2,l2,z]
rz3 [p1,p2,z1,z2] = [p1,z1,p2,z2]
z = tabulated1 $
yieldSize1 (0,Nothing) $
fz1 <<< k ~~~ l ~~~ z >>> rz1 |||
fz2 <<< b ~~~ z >>> id1 |||
fz3 <<< p ~~~ z ~~~ z >>> rz3 |||
fe <<< EPS 0 >>> id1
... h
rk1 :: Dim2
rk1 [p1,p2,k1,k2] = ([k1,p1],[p2,k2])
k = tabulated2 $
yieldSize2 (1,Nothing) (1,Nothing) $
fk1 <<< p ~~~ k >>> rk1 |||
fk2 <<< p >>> id2
l = tabulated2 $
yieldSize2 (1,Nothing) (1,Nothing) $
fl1 <<< p ~~~ l >>> rk1 |||
fl2 <<< p >>> id2
p = tabulated2 $
fp <<< (myString "a", myString "u") >>> id2 |||
fp <<< (myString "u", myString "a") >>> id2 |||
fp <<< (myString "c", myString "g") >>> id2 |||
fp <<< (myString "g", myString "c") >>> id2 |||
fp <<< (myString "g", myString "u") >>> id2 |||
fp <<< (myString "u", myString "g") >>> id2
b = tabulated1 $
fb <<< myString "a" >>> id1 |||
fb <<< myString "u" >>> id1 |||
fb <<< myString "c" >>> id1 |||
fb <<< myString "g" >>> id1
inpa = mk (myString inp)
tabulated1 = table1 inpa
tabulated2 = table2 inpa
in axiom inpa z
| adp-multi/adp-multi | tests/ADP/Tests/PaperExample.hs | bsd-3-clause | 4,184 | 0 | 29 | 1,521 | 1,860 | 1,028 | 832 | 113 | 2 |
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
{-# LANGUAGE EmptyDataDecls, ScopedTypeVariables, KindSignatures #-}
-- | Haskell with only one typeclass
--
-- <http://okmij.org/ftp/Haskell/Haskell1/Class1.hs>
--
-- <http://okmij.org/ftp/Haskell/types.html#Haskell1>
--
-- How to make ad hoc overloading less ad hoc while defining no
-- type classes.
-- For clarity, we call as Haskell1 the language Haskell98
-- with no typeclass declarations but with a single, pre-defined typeclass C
-- (which has two parameters related by a functional dependency).
-- The programmers may not declare any typeclasses; but they
-- may add instances to C and use them. We show on a series of examples that
-- despite the lack of typeclass declarations, Haskell1 can express all
-- the typeclass code of Haskell98 plus multi-parameter type classes
-- and even some (most useful?) functional dependencies.
--
-- Haskell1 is not a new language and requires no new compilers;
-- rather, it is a subset of the current Haskell. The `removal' of typeclass
-- declarations is merely the matter of discipline.
--
--
--
module Data.Class1 where
-- | The one and only type class present in Haskell1
class C l t | l -> t where
ac :: l -> t
__ = __ -- short synonym for undefined
-- ----------------------------------------------------------------------
-- | Example 1: Building overloaded numeric functions, the analogue of Num.
-- The following defines overloaded numeric functions `a la carte'. We
-- shall see how to bundle such methods into what Haskell98 calls `classes'
--
data Add a -- auxiliary labels
data Mul a
data FromInteger a
instance C (Add Int) (Int->Int->Int) where
ac _ x y = x + y
-- | We can now define the generic addition. We use the operation +$
-- to avoid the confusion with Prelude.(+)
infixl 6 +$
-- | In H98, the overloaded addition was a method. In Haskell1, it is an
-- ordinary (bounded polymorphic) function
-- The signature looks a bit ugly; we'll see how to simplify it a bit
(+$) :: forall a. C (Add a) (a->a->a) => a -> a -> a
(+$) = ac (__:: Add a)
ta1 = (1::Int) +$ 2 +$ 3
-- 6
-- | Let's define the addition for floats
instance C (Add Float) (Float->Float->Float) where
ac _ x y = x + y
-- | We now illustrate overloading over datatypes other than basic ones.
-- We define dual numbers (see Wikipedia)
data Dual a = Dual a a deriving Show
-- | We define the addition of Duals inductively, with the addition over
-- base types as the base case.
-- We could have eliminated the mentioning (a->a->a) and replaced with some
-- type t. But then we would need the undecidable instance extension...
--
instance C (Add a) (a->a->a) => C (Add (Dual a)) (Dual a->Dual a->Dual a) where
ac _ (Dual x1 y1) (Dual x2 y2) = Dual (x1 +$ x2) (y1 +$ y2)
-- | The following test uses the previously defined +$ operation, which
-- now accounts for duals automatically.
-- As in Haskell98, our overloaded functions are extensible.
ta2 = let x = Dual (1::Int) 2 in x +$ x
-- Dual 2 4
-- | Likewise define the overloaded multiplication
infixl 7 *$
instance C (Mul Int) (Int->Int->Int) where
ac _ x y = x * y
instance C (Mul Float) (Float->Float->Float) where
ac _ x y = x * y
instance (C (Add a) (a->a->a), C (Mul a) (a->a->a))
=> C (Mul (Dual a)) (Dual a->Dual a->Dual a) where
ac _ (Dual x1 y1) (Dual x2 y2) = Dual (x1 *$ x2) (x1 *$ y2 +$ y1 *$ x2)
-- | Here is a different, perhaps simpler, way of defining signatures of
-- overloaded functions. The constraint C is inferred and no longer has
-- to be mentioned explicitly
mul_sig :: a -> a -> a; mul_sig = undefined
mul_as :: a -> Mul a; mul_as = undefined
x *$ y | False = mul_sig x y
x *$ y = ac (mul_as x) x y
-- | fromInteger conversion
-- This numeric operation is different from the previous in that
-- the overloading is resolved on the result type only. The function
-- `read' is another example of such a `producer'
instance C (FromInteger Int) (Integer->Int) where
ac _ = fromInteger
instance C (FromInteger Float) (Integer->Float) where
ac _ = fromInteger
instance (C (FromInteger a) (Integer->a))
=> C (FromInteger (Dual a)) (Integer->Dual a) where
ac _ x = Dual (frmInteger x) (frmInteger 0)
-- | and the corresponding overloaded function (which in Haskell98 was a method)
-- Again, we chose a slightly different name to avoid the confusion with
-- the Prelude
frmInteger :: forall a. C (FromInteger a) (Integer->a) => Integer -> a
frmInteger = ac (__::FromInteger a)
-- | We can define generic function at will, using already defined overloaded
-- functions. For example,
genf x = x *$ x *$ (frmInteger 2)
tm1 = genf (Dual (1::Float) 2) +$ (frmInteger 3)
-- Dual 5.0 8.0
-- For completeness, we implement the quintessential Haskell98 function, Show.
data SHOW a
instance C (SHOW Int) (Int->String) where
ac _ = show
instance C (SHOW Float) (Float->String) where
ac _ = show
instance (C (SHOW a) (a->String))
=> C (SHOW (Dual a)) (Dual a -> String) where
ac _ (Dual x y) = "(|" ++ shw x ++ "," ++ shw y ++ "|)"
shw :: forall a. C (SHOW a) (a->String) => a->String
shw = ac (__::SHOW a)
ts1 = shw tm1
-- "(|5.0,8.0|)"
-- | Finally, we demonstrate overloading of non-functional values, such as
-- minBound and maxBound. These are not `methods' in the classical sense.
--
data MinBound a
instance C (MinBound Int) Int where
ac _ = minBound
instance C (MinBound Bool) Bool where
ac _ = False
mnBound :: forall a. C (MinBound a) a => a
mnBound = ac (__::MinBound a)
tmb = mnBound::Int
-- -2147483648
-- ----------------------------------------------------------------------
-- Constructor classes and Monads
-- | We are defining a super-set of monads, so called `restricted monads'.
-- Restricted monads include all ordinary monads; in addition, we can
-- define a SET monad. See
-- <http://okmij.org/ftp/Haskell/types.html#restricted-datatypes>
--
data RET (m :: * -> *) a
data BIND (m :: * -> *) a b
ret :: forall m a. C (RET m a) (a->m a) => a -> m a
ret = ac (__::RET m a)
bind :: forall m a b. C (BIND m a b) (m a->(a -> m b)->m b) =>
(m a->(a -> m b)->m b)
bind = ac (__::BIND m a b)
-- | Define two sample monads
--
instance C (RET Maybe a) (a -> Maybe a) where
ac _ = Just
instance C (BIND Maybe a b) (Maybe a -> (a->Maybe b) -> Maybe b) where
ac _ Nothing f = Nothing
ac _ (Just x) f = f x
instance C (RET (Either e) a) (a -> Either e a) where
ac _ = Right
instance C (BIND (Either e) a b)
(Either e a -> (a->Either e b) -> Either e b) where
ac _ (Right x) f = f x
ac _ (Left x) f = Left x
-- | An example of using monads and other overloaded functions
tmo = (tmo' True, tmo' False)
where
tmo' x = let t = if x then Nothing else ret (1::Int)
v = t `bind` (\x -> ret (x +$ (frmInteger 1)))
in shw v
-- ("Nothing","Just 2")
instance C (SHOW a) (a->String) => C (SHOW (Maybe a)) (Maybe a->String) where
ac _ Nothing = "Nothing"
ac _ (Just x) = "Just " ++ shw x
| suhailshergill/liboleg | Data/Class1.hs | bsd-3-clause | 7,105 | 7 | 16 | 1,497 | 1,993 | 1,081 | 912 | -1 | -1 |
module Distribution.Client.Dependency.Modular.Validate where
-- Validation of the tree.
--
-- The task here is to make sure all constraints hold. After validation, any
-- assignment returned by exploration of the tree should be a complete valid
-- assignment, i.e., actually constitute a solution.
import Control.Applicative
import Control.Monad.Reader hiding (sequence)
import Data.List as L
import Data.Map as M
import Data.Traversable
import Prelude hiding (sequence)
import Distribution.Client.Dependency.Modular.Assignment
import Distribution.Client.Dependency.Modular.Dependency
import Distribution.Client.Dependency.Modular.Flag
import Distribution.Client.Dependency.Modular.Index
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.Dependency.Modular.PSQ as P
import Distribution.Client.Dependency.Modular.Tree
-- In practice, most constraints are implication constraints (IF we have made
-- a number of choices, THEN we also have to ensure that). We call constraints
-- that for which the precondiditions are fulfilled ACTIVE. We maintain a set
-- of currently active constraints that we pass down the node.
--
-- We aim at detecting inconsistent states as early as possible.
--
-- Whenever we make a choice, there are two things that need to happen:
--
-- (1) We must check that the choice is consistent with the currently
-- active constraints.
--
-- (2) The choice increases the set of active constraints. For the new
-- active constraints, we must check that they are consistent with
-- the current state.
--
-- We can actually merge (1) and (2) by saying the the current choice is
-- a new active constraint, fixing the choice.
--
-- If a test fails, we have detected an inconsistent state. We can
-- disable the current subtree and do not have to traverse it any further.
--
-- We need a good way to represent the current state, i.e., the current
-- set of active constraints. Since the main situation where we have to
-- search in it is (1), it seems best to store the state by package: for
-- every package, we store which versions are still allowed. If for any
-- package, we have inconsistent active constraints, we can also stop.
-- This is a particular way to read task (2):
--
-- (2, weak) We only check if the new constraints are consistent with
-- the choices we've already made, and add them to the active set.
--
-- (2, strong) We check if the new constraints are consistent with the
-- choices we've already made, and the constraints we already have.
--
-- It currently seems as if we're implementing the weak variant. However,
-- when used together with 'preferEasyGoalChoices', we will find an
-- inconsistent state in the very next step.
--
-- What do we do about flags?
--
-- Like for packages, we store the flag choices we have already made.
-- Now, regarding (1), we only have to test whether we've decided the
-- current flag before. Regarding (2), the interesting bit is in discovering
-- the new active constraints. To this end, we look up the constraints for
-- the package the flag belongs to, and traverse its flagged dependencies.
-- Wherever we find the flag in question, we start recording dependencies
-- underneath as new active dependencies. If we encounter other flags, we
-- check if we've chosen them already and either proceed or stop.
-- | The state needed during validation.
data ValidateState = VS {
index :: Index,
saved :: Map QPN (FlaggedDeps QPN), -- saved, scoped, dependencies
pa :: PreAssignment
}
type Validate = Reader ValidateState
validate :: Tree (QGoalReasons, Scope) -> Validate (Tree QGoalReasons)
validate = cata go
where
go :: TreeF (QGoalReasons, Scope) (Validate (Tree QGoalReasons)) -> Validate (Tree QGoalReasons)
go (PChoiceF qpn (gr, sc) ts) = PChoice qpn gr <$> sequence (P.mapWithKey (goP qpn gr sc) ts)
go (FChoiceF qfn (gr, _sc) b ts) =
do
-- Flag choices may occur repeatedly (because they can introduce new constraints
-- in various places). However, subsequent choices must be consistent. We thereby
-- collapse repeated flag choice nodes.
PA _ pfa <- asks pa -- obtain current flag-preassignment
case M.lookup qfn pfa of
Just rb -> -- flag has already been assigned; collapse choice to the correct branch
case P.lookup rb ts of
Just t -> goF qfn gr rb t
Nothing -> return $ Fail (toConflictSet (Goal (F qfn) gr)) (MalformedFlagChoice qfn)
Nothing -> -- flag choice is new, follow both branches
FChoice qfn gr b <$> sequence (P.mapWithKey (goF qfn gr) ts)
-- We don't need to do anything for goal choices or failure nodes.
go (GoalChoiceF ts) = GoalChoice <$> sequence ts
go (DoneF rdm ) = pure (Done rdm)
go (FailF c fr ) = pure (Fail c fr)
-- What to do for package nodes ...
goP :: QPN -> QGoalReasons -> Scope -> I -> Validate (Tree QGoalReasons) -> Validate (Tree QGoalReasons)
goP qpn@(Q _pp pn) gr sc i r = do
PA ppa pfa <- asks pa -- obtain current preassignment
idx <- asks index -- obtain the index
svd <- asks saved -- obtain saved dependencies
let (PInfo deps _ _) = idx ! pn ! i -- obtain dependencies introduced by the choice
let qdeps = L.map (fmap (qualify sc)) deps -- qualify the deps in the current scope
-- the new active constraints are given by the instance we have chosen,
-- plus the dependency information we have for that instance
let goal = Goal (P qpn) gr
let newactives = Dep qpn (Fixed i goal) : L.map (resetGoal goal) (extractDeps pfa qdeps)
-- We now try to extend the partial assignment with the new active constraints.
let mnppa = extend (P qpn) ppa newactives
-- In case we continue, we save the scoped dependencies
let nsvd = M.insert qpn qdeps svd
case mnppa of
Left (c, d) -> -- We have an inconsistency. We can stop.
return (Fail c (Conflicting d))
Right nppa -> -- We have an updated partial assignment for the recursive validation.
local (\ s -> s { pa = PA nppa pfa, saved = nsvd }) r
-- What to do for flag nodes ...
goF :: QFN -> QGoalReasons -> Bool -> Validate (Tree QGoalReasons) -> Validate (Tree QGoalReasons)
goF qfn@(FN (PI qpn _i) _f) gr b r = do
PA ppa pfa <- asks pa -- obtain current preassignment
svd <- asks saved -- obtain saved dependencies
-- Note that there should be saved dependencies for the package in question,
-- because while building, we do not choose flags before we see the packages
-- that define them.
let qdeps = svd ! qpn
-- We take the *saved* dependencies, because these have been qualified in the
-- correct scope.
--
-- Extend the flag assignment
let npfa = M.insert qfn b pfa
-- We now try to get the new active dependencies we might learn about because
-- we have chosen a new flag.
let newactives = extractNewFlagDeps qfn gr b npfa qdeps
-- As in the package case, we try to extend the partial assignment.
case extend (F qfn) ppa newactives of
Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found
Right nppa -> local (\ s -> s { pa = PA nppa npfa }) r
-- | We try to extract as many concrete dependencies from the given flagged
-- dependencies as possible. We make use of all the flag knowledge we have
-- already acquired.
extractDeps :: FAssignment -> FlaggedDeps QPN -> [Dep QPN]
extractDeps fa deps = do
d <- deps
case d of
Simple sd -> return sd
Flagged qfn _ td fd -> case M.lookup qfn fa of
Nothing -> mzero
Just True -> extractDeps fa td
Just False -> extractDeps fa fd
-- | We try to find new dependencies that become available due to the given
-- flag choice. We therefore look for the flag in question, and then call
-- 'extractDeps' for everything underneath.
extractNewFlagDeps :: QFN -> QGoalReasons -> Bool -> FAssignment -> FlaggedDeps QPN -> [Dep QPN]
extractNewFlagDeps qfn gr b fa = go
where
go deps = do
d <- deps
case d of
Simple _ -> mzero
Flagged qfn' _ td fd
| qfn == qfn' -> L.map (resetGoal (Goal (F qfn) gr)) $
if b then extractDeps fa td else extractDeps fa fd
| otherwise -> case M.lookup qfn' fa of
Nothing -> mzero
Just True -> go td
Just False -> go fd
-- | Interface.
validateTree :: Index -> Tree (QGoalReasons, Scope) -> Tree QGoalReasons
validateTree idx t = runReader (validate t) (VS idx M.empty (PA M.empty M.empty))
| IreneKnapp/Faction | faction/Distribution/Client/Dependency/Modular/Validate.hs | bsd-3-clause | 8,995 | 0 | 22 | 2,339 | 1,606 | 848 | 758 | 86 | 9 |
module Main where
-- filter files to get only finished games. reads dense files, outputs sparse ones.
import System.Environment
import System.IO
import Data.List (intercalate)
import System.FilePath
import Text.Printf
import Data.Maybe
import Board
main = do
files <- getArgs
mapM_ workOnFile files
workOnFile fname = do
let newName = (takeFileName fname) <.> "won-sparse" <.> (takeExtension fname)
putStrLn (printf "Converting file: %s to %s" fname newName)
input <- lines `fmap` readFile fname
let output = catMaybes $ map translateRow input
if (length output) == 0
then putStrLn "Skipping empty output file."
else writeFile newName (unlines output)
translateRow input'row = if isFinished brd then Just row else Nothing
where
values = read ("[" ++ input'row ++ "]") :: [Int]
brd = denseReprToBoard values
row = reprToRow $ boardToSparse brd
| Tener/deeplearning-thesis | src/get-finished-games.hs | bsd-3-clause | 903 | 0 | 13 | 186 | 254 | 130 | 124 | 23 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
-------------------------------------------------------------------------------
-- |
-- Module : Control.Lens.Zoom
-- Copyright : (C) 2012 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : Rank2Types
--
-------------------------------------------------------------------------------
module Control.Lens.Zoom
( Magnify(..)
, Zoom(..)
) where
import Control.Lens.Getter
import Control.Lens.Internal
import Control.Lens.Internal.Composition
import Control.Lens.Type
import Control.Monad
import Control.Monad.Reader.Class as Reader
import Control.Monad.State as State
import Control.Monad.Trans.State.Lazy as Lazy
import Control.Monad.Trans.State.Strict as Strict
import Control.Monad.Trans.Writer.Lazy as Lazy
import Control.Monad.Trans.Writer.Strict as Strict
import Control.Monad.Trans.RWS.Lazy as Lazy
import Control.Monad.Trans.RWS.Strict as Strict
import Control.Monad.Trans.Reader
import Control.Monad.Trans.Error
import Control.Monad.Trans.List
import Control.Monad.Trans.Identity
import Control.Monad.Trans.Maybe
import Data.Monoid
-- $setup
-- >>> import Control.Lens
-- >>> import Control.Monad.State
-- >>> import Data.Map as Map
-- >>> import Debug.SimpleReflect.Expr as Expr
-- >>> import Debug.SimpleReflect.Vars as Vars
-- >>> let f :: Expr -> Expr; f = Vars.f
-- >>> let g :: Expr -> Expr; g = Vars.g
-- >>> let h :: Expr -> Expr -> Expr; h = Vars.h
-- Chosen so that they have lower fixity than ('%='), and to match ('<~')
infixr 2 `zoom`, `magnify`
-- | This class allows us to use 'zoom' in, changing the State supplied by
-- many different monad transformers, potentially quite deep in a monad transformer stack.
class (MonadState s m, MonadState t n) => Zoom m n k s t | m -> s k, n -> t k, m t -> n, n s -> m where
-- | Run a monadic action in a larger state than it was defined in,
-- using a 'Lens'' or 'Control.Lens.Traversal.Traversal''.
--
-- This is commonly used to lift actions in a simpler state monad into a
-- state monad with a larger state type.
--
-- When applied to a 'Simple 'Control.Lens.Traversal.Traversal' over
-- multiple values, the actions for each target are executed sequentially
-- and the results are aggregated.
--
-- This can be used to edit pretty much any monad transformer stack with a state in it!
--
-- >>> flip State.evalState (a,b) $ zoom _1 $ use id
-- a
--
-- >>> flip State.execState (a,b) $ zoom _1 $ id .= c
-- (c,b)
--
-- >>> flip State.execState [(a,b),(c,d)] $ zoom traverse $ _2 %= f
-- [(a,f b),(c,f d)]
--
-- >>> flip State.runState [(a,b),(c,d)] $ zoom traverse $ _2 <%= f
-- (f b <> f d <> mempty,[(a,f b),(c,f d)])
--
-- >>> flip State.evalState (a,b) $ zoom both (use id)
-- a <> b
--
-- @
-- 'zoom' :: 'Monad' m => 'Lens'' s t -> 'StateT' t m a -> 'StateT' s m a
-- 'zoom' :: ('Monad' m, 'Monoid' c) => 'Control.Lens.Traversal.Traversal'' s t -> 'StateT' t m c -> 'StateT' s m c
-- 'zoom' :: 'Monad' m => 'Lens'' s t -> 'RWST' r w t m c -> 'RWST' r w s m c
-- 'zoom' :: ('Monad' m, 'Monoid' c) => 'Control.Lens.Traversal.Traversal'' s t -> 'RWST' r w t m c -> 'RWST' r w s m c
-- 'zoom' :: 'Monad' m => 'Lens'' s t -> 'ErrorT' e ('RWST' r w t m c) -> 'ErrorT' e ('RWST' r w s m c)
-- 'zoom' :: ('Monad' m, 'Monoid' c) => 'Control.Lens.Traversal.Traversal'' s t -> 'ErrorT' e ('RWST' r w t m c) -> 'ErrorT' e ('RWST' r w s m c)
-- ...
-- @
zoom :: LensLike' (k c) t s -> m c -> n c
instance Monad z => Zoom (Strict.StateT s z) (Strict.StateT t z) (Focusing z) s t where
zoom l (Strict.StateT m) = Strict.StateT $ unfocusing #. l (Focusing #. m)
{-# INLINE zoom #-}
instance Monad z => Zoom (Lazy.StateT s z) (Lazy.StateT t z) (Focusing z) s t where
zoom l (Lazy.StateT m) = Lazy.StateT $ unfocusing #. l (Focusing #. m)
{-# INLINE zoom #-}
instance Zoom m n k s t => Zoom (ReaderT e m) (ReaderT e n) k s t where
zoom l (ReaderT m) = ReaderT (zoom l . m)
{-# INLINE zoom #-}
instance Zoom m n k s t => Zoom (IdentityT m) (IdentityT n) k s t where
zoom l (IdentityT m) = IdentityT (zoom l m)
{-# INLINE zoom #-}
instance (Monoid w, Monad z) => Zoom (Strict.RWST r w s z) (Strict.RWST r w t z) (FocusingWith w z) s t where
zoom l (Strict.RWST m) = Strict.RWST $ \r -> unfocusingWith #. l (FocusingWith #. m r)
{-# INLINE zoom #-}
instance (Monoid w, Monad z) => Zoom (Lazy.RWST r w s z) (Lazy.RWST r w t z) (FocusingWith w z) s t where
zoom l (Lazy.RWST m) = Lazy.RWST $ \r -> unfocusingWith #. l (FocusingWith #. m r)
{-# INLINE zoom #-}
instance (Monoid w, Zoom m n k s t) => Zoom (Strict.WriterT w m) (Strict.WriterT w n) (FocusingPlus w k) s t where
zoom l = Strict.WriterT . zoom (\afb -> unfocusingPlus #. l (FocusingPlus #. afb)) . Strict.runWriterT
{-# INLINE zoom #-}
instance (Monoid w, Zoom m n k s t) => Zoom (Lazy.WriterT w m) (Lazy.WriterT w n) (FocusingPlus w k) s t where
zoom l = Lazy.WriterT . zoom (\afb -> unfocusingPlus #. l (FocusingPlus #. afb)) . Lazy.runWriterT
{-# INLINE zoom #-}
instance Zoom m n k s t => Zoom (ListT m) (ListT n) (FocusingOn [] k) s t where
zoom l = ListT . zoom (\afb -> unfocusingOn . l (FocusingOn . afb)) . runListT
{-# INLINE zoom #-}
instance Zoom m n k s t => Zoom (MaybeT m) (MaybeT n) (FocusingMay k) s t where
zoom l = MaybeT . liftM getMay . zoom (\afb -> unfocusingMay #. l (FocusingMay #. afb)) . liftM May . runMaybeT
{-# INLINE zoom #-}
instance (Error e, Zoom m n k s t) => Zoom (ErrorT e m) (ErrorT e n) (FocusingErr e k) s t where
zoom l = ErrorT . liftM getErr . zoom (\afb -> unfocusingErr #. l (FocusingErr #. afb)) . liftM Err . runErrorT
{-# INLINE zoom #-}
-- TODO: instance Zoom m m k a a => Zoom (ContT r m) (ContT r m) k a a where
-- | This class allows us to use 'magnify' part of the environment, changing the environment supplied by
-- many different monad transformers. Unlike 'zoom' this can change the environment of a deeply nested monad transformer.
--
-- Also, unlike 'zoom', this can be used with any valid 'Getter', but cannot be used with a 'Traversal' or 'Fold'.
class (MonadReader b m, MonadReader a n) => Magnify m n k b a | m -> b, n -> a, m a -> n, n b -> m where
-- | Run a monadic action in a larger environment than it was defined in, using a 'Getter'.
--
-- This acts like 'Control.Monad.Reader.Class.local', but can in many cases change the type of the environment as well.
--
-- This is commonly used to lift actions in a simpler Reader monad into a monad with a larger environment type.
--
-- This can be used to edit pretty much any monad transformer stack with an environment in it:
--
-- @
-- 'magnify' :: 'Getter' s a -> (a -> r) -> s -> r
-- 'magnify' :: 'Monoid' c => 'Fold' s a -> (a -> r) -> s -> r
-- 'magnify' :: 'Monoid' w 'Getter' s t -> 'RWST' s w st c -> 'RWST' t w st c
-- 'magnify' :: ('Monoid' w, 'Monoid' c) => 'Fold' s t -> 'RWST' s w st c -> 'RWST' t w st c
-- ...
-- @
magnify :: ((b -> k c b) -> a -> k c a) -> m c -> n c
instance Monad m => Magnify (ReaderT b m) (ReaderT a m) (Effect m) b a where
magnify l (ReaderT m) = ReaderT $ getEffect #. l (Effect #. m)
{-# INLINE magnify #-}
-- | @'magnify' = 'views'@
instance Magnify ((->) b) ((->) a) Accessor b a where
magnify = views
{-# INLINE magnify #-}
instance (Monad m, Monoid w) => Magnify (Strict.RWST b w s m) (Strict.RWST a w s m) (EffectRWS w s m) b a where
magnify l (Strict.RWST m) = Strict.RWST $ getEffectRWS #. l (EffectRWS #. m)
{-# INLINE magnify #-}
instance (Monad m, Monoid w) => Magnify (Lazy.RWST b w s m) (Lazy.RWST a w s m) (EffectRWS w s m) b a where
magnify l (Lazy.RWST m) = Lazy.RWST $ getEffectRWS #. l (EffectRWS #. m)
{-# INLINE magnify #-}
instance Magnify m n k b a => Magnify (IdentityT m) (IdentityT n) k b a where
magnify l (IdentityT m) = IdentityT (magnify l m)
{-# INLINE magnify #-}
| np/lens | src/Control/Lens/Zoom.hs | bsd-3-clause | 8,289 | 0 | 15 | 1,795 | 1,972 | 1,093 | 879 | 80 | 0 |
module Network.Slack
(
Slack(..),
runSlack,
module Network.Slack.User,
module Network.Slack.Channel,
module Network.Slack.Message
)
where
import Network.Slack.Types
import Network.Slack.User
import Network.Slack.Channel
import Network.Slack.Message
import Control.Monad.State (evalStateT, modify)
import Control.Monad.Trans.Either (runEitherT)
-- |Given an API token and a Slack command, it executes the command in the IO monad
runSlack :: Token -> Slack a -> IO (Either SlackError a)
runSlack tok = flip evalStateT (slackAuth tok) . runEitherT . runSlackInternal . (slackInit >>)
-- |Constructs an initial internal state from the given API token
slackAuth :: Token -> SlackState
slackAuth tok = SlackState tok []
-- |Internal setup. Currently it just fetches the list of users so that it can associated user ids with names
slackInit :: Slack ()
slackInit = do
currentUsers <- request' "users.list" :: Slack [User]
let
updateUsers state = state {_users = currentUsers}
-- Update internal state
modify updateUsers
| glutamate/slack | Network/Slack.hs | mit | 1,116 | 0 | 11 | 243 | 239 | 136 | 103 | 23 | 1 |
{-# LANGUAGE TupleSections #-}
module WordCount (wordCount) where
import Data.Map (Map, fromListWith)
import Data.Char (toLower, isAlphaNum)
wordCount :: String -> Map String Int
wordCount = count . tokenize
where
count = fromListWith (+) . map ((,1) . map toLower)
tokenize = filter (not . null) . split
split :: String -> [String]
split "" = []
split (c : s)
| isAlphaNum c = word [c] s
| otherwise = split s
where
word :: String -> String -> [String]
word w [] = [w]
word w (c1 : c2 : s)
| c1 == '\'' && isAlphaNum c2 = word (w ++ [c1, c2]) s
word w (c : s)
| isAlphaNum c = word (w ++ [c]) s
| otherwise = w : split s
| Bugfry/exercises | exercism/haskell/word-count/src/WordCount.hs | mit | 677 | 0 | 11 | 182 | 323 | 168 | 155 | 20 | 3 |
--
--
--
------------------
-- Exercise 10.16.
------------------
--
--
--
module E'10'16 where
import Test.QuickCheck ( quickCheck )
mystery xs
= foldr ( ++ ) [] ( map sing xs )
where
sing x = [ x ]
-- First "map sing xs" is applied to the input list and transforms every
-- item into a singleton list, holding the item. Then all these single-item-lists
-- are concatenated. The result is the list, "mystery" is applied to:
--
-- mystery xs = xs
{- GHCi>
mystery []
mystery [ 0 ]
mystery [ 0 , 1 ]
-}
-- []
-- [ 0 ]
-- [ 0 , 1 ]
prop_mystery :: [Integer] -> Bool
prop_mystery list
= mystery list == list
-- GHCi> quickCheck prop_mystery
-- ---------------
-- 1. Proposition:
-- ---------------
--
-- mystery xs = xs
--
--
-- Proof By Structural Induction:
-- ------------------------------
--
--
-- 1. Induction Beginning (1. I.B.):
-- ---------------------------------
--
--
-- (Base case 1.) :<=> xs := []
--
-- => (left) := mystery xs
-- | (Base case 1.)
-- = mystery []
-- | mystery
-- = foldr ( ++ ) [] ( map sing [] )
-- | map
-- = foldr ( ++ ) [] []
-- | foldr
-- = []
--
--
-- (right) := xs
-- | (Base case 1.)
-- = []
--
--
-- => (left) = (right)
--
-- ✔
--
--
-- 1. Induction Hypothesis (1. I.H.):
-- ----------------------------------
--
-- For an arbitrary, but fixed list "xs", the statement ...
--
-- mystery xs = xs
-- <=> foldr ( ++ ) [] ( map sing xs ) = xs
--
-- ... holds.
--
--
-- 1. Induction Step (1. I.S.):
-- ----------------------------
--
--
-- (left) := mystery ( x : xs )
-- | mystery
-- = foldr ( ++ ) [] ( map sing ( x : xs ) )
-- | map
-- = foldr ( ++ ) [] ( sing x : map sing xs )
-- | sing
-- = foldr ( ++ ) [] ( [ x ] : map sing xs )
-- | foldr
-- = [ x ] ++ foldr ( ++ ) [] ( map sing xs )
-- | mystery
-- = [ x ] ++ mystery xs
-- | (1. I.H.)
-- = [ x ] ++ xs
--
-- = ( x : [] ) ++ xs
-- | ++
-- = x : ( [] ++ xs )
-- | ++
-- = x : xs
--
--
-- (right) := x : xs
--
--
-- => (left) = (right)
--
-- ■ (1. Proposition)
| pascal-knodel/haskell-craft | _/links/E'10'16.hs | mit | 3,004 | 0 | 7 | 1,503 | 180 | 143 | 37 | 8 | 1 |
module AliasVarBinds where
type T a = [a]
| robinp/haskell-indexer | haskell-indexer-backend-ghc/testdata/typelink/AliasVarBinds.hs | apache-2.0 | 43 | 0 | 5 | 9 | 14 | 10 | 4 | 2 | 0 |
{-# LANGUAGE ViewPatterns #-}
{-| Implementation of the Ganeti configuration database.
-}
{-
Copyright (C) 2011, 2012 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Ganeti.Config
( LinkIpMap
, NdParamObject(..)
, loadConfig
, saveConfig
, getNodeInstances
, getNodeRole
, getNodeNdParams
, getDefaultNicLink
, getDefaultHypervisor
, getInstancesIpByLink
, getMasterNodes
, getMasterCandidates
, getMasterOrCandidates
, getMasterNetworkParameters
, getOnlineNodes
, getNode
, getInstance
, getDisk
, getFilterRule
, getGroup
, getGroupNdParams
, getGroupIpolicy
, getGroupDiskParams
, getGroupNodes
, getGroupInstances
, getGroupOfNode
, getInstPrimaryNode
, getInstMinorsForNode
, getInstAllNodes
, getInstDisks
, getInstDisksFromObj
, getDrbdMinorsForDisk
, getDrbdMinorsForInstance
, getFilledInstHvParams
, getFilledInstBeParams
, getFilledInstOsParams
, getNetwork
, MAC
, getAllMACs
, getAllDrbdSecrets
, NodeLVsMap
, getInstanceLVsByNode
, getAllLVs
, buildLinkIpInstnameMap
, instNodes
) where
import Control.Applicative
import Control.Arrow ((&&&))
import Control.Monad
import Control.Monad.State
import qualified Data.Foldable as F
import Data.List (foldl', nub)
import Data.Maybe (fromMaybe)
import Data.Monoid
import qualified Data.Map as M
import qualified Data.Set as S
import qualified Text.JSON as J
import System.IO
import Ganeti.BasicTypes
import qualified Ganeti.Constants as C
import Ganeti.Errors
import Ganeti.JSON
import Ganeti.Objects
import Ganeti.Types
import qualified Ganeti.Utils.MultiMap as MM
-- | Type alias for the link and ip map.
type LinkIpMap = M.Map String (M.Map String String)
-- * Operations on the whole configuration
-- | Reads the config file.
readConfig :: FilePath -> IO (Result String)
readConfig = runResultT . liftIO . readFile
-- | Parses the configuration file.
parseConfig :: String -> Result ConfigData
parseConfig = fromJResult "parsing configuration" . J.decodeStrict
-- | Encodes the configuration file.
encodeConfig :: ConfigData -> String
encodeConfig = J.encodeStrict
-- | Wrapper over 'readConfig' and 'parseConfig'.
loadConfig :: FilePath -> IO (Result ConfigData)
loadConfig = fmap (>>= parseConfig) . readConfig
-- | Wrapper over 'hPutStr' and 'encodeConfig'.
saveConfig :: Handle -> ConfigData -> IO ()
saveConfig fh = hPutStr fh . encodeConfig
-- * Query functions
-- | Annotate Nothing as missing parameter and apply the given
-- transformation otherwise
withMissingParam :: String -> (a -> ErrorResult b) -> Maybe a -> ErrorResult b
withMissingParam = maybe . Bad . ParameterError
-- | Computes the nodes covered by a disk.
computeDiskNodes :: Disk -> S.Set String
computeDiskNodes dsk =
case diskLogicalId dsk of
Just (LIDDrbd8 nodeA nodeB _ _ _ _) -> S.fromList [nodeA, nodeB]
_ -> S.empty
-- | Computes all disk-related nodes of an instance. For non-DRBD,
-- this will be empty, for DRBD it will contain both the primary and
-- the secondaries.
instDiskNodes :: ConfigData -> Instance -> S.Set String
instDiskNodes cfg inst =
case getInstDisksFromObj cfg inst of
Ok disks -> S.unions $ map computeDiskNodes disks
Bad _ -> S.empty
-- | Computes all nodes of an instance.
instNodes :: ConfigData -> Instance -> S.Set String
instNodes cfg inst = maybe id S.insert (instPrimaryNode inst)
$ instDiskNodes cfg inst
-- | Computes the secondary nodes of an instance. Since this is valid
-- only for DRBD, we call directly 'instDiskNodes', skipping over the
-- extra primary insert.
instSecondaryNodes :: ConfigData -> Instance -> S.Set String
instSecondaryNodes cfg inst =
maybe id S.delete (instPrimaryNode inst) $ instDiskNodes cfg inst
-- | Get instances of a given node.
-- The node is specified through its UUID.
getNodeInstances :: ConfigData -> String -> ([Instance], [Instance])
getNodeInstances cfg nname =
let all_inst = M.elems . fromContainer . configInstances $ cfg
pri_inst = filter ((== Just nname) . instPrimaryNode) all_inst
sec_inst = filter ((nname `S.member`) . instSecondaryNodes cfg) all_inst
in (pri_inst, sec_inst)
-- | Computes the role of a node.
getNodeRole :: ConfigData -> Node -> NodeRole
getNodeRole cfg node
| nodeUuid node == clusterMasterNode (configCluster cfg) = NRMaster
| nodeMasterCandidate node = NRCandidate
| nodeDrained node = NRDrained
| nodeOffline node = NROffline
| otherwise = NRRegular
-- | Get the list of the master nodes (usually one).
getMasterNodes :: ConfigData -> [Node]
getMasterNodes cfg =
filter ((==) NRMaster . getNodeRole cfg) . F.toList . configNodes $ cfg
-- | Get the list of master candidates, /not including/ the master itself.
getMasterCandidates :: ConfigData -> [Node]
getMasterCandidates cfg =
filter ((==) NRCandidate . getNodeRole cfg) . F.toList . configNodes $ cfg
-- | Get the list of master candidates, /including/ the master.
getMasterOrCandidates :: ConfigData -> [Node]
getMasterOrCandidates cfg =
let isMC r = (r == NRCandidate) || (r == NRMaster)
in filter (isMC . getNodeRole cfg) . F.toList . configNodes $ cfg
-- | Get the network parameters for the master IP address.
getMasterNetworkParameters :: ConfigData -> MasterNetworkParameters
getMasterNetworkParameters cfg =
let cluster = configCluster cfg
in MasterNetworkParameters
{ masterNetworkParametersUuid = clusterMasterNode cluster
, masterNetworkParametersIp = clusterMasterIp cluster
, masterNetworkParametersNetmask = clusterMasterNetmask cluster
, masterNetworkParametersNetdev = clusterMasterNetdev cluster
, masterNetworkParametersIpFamily = clusterPrimaryIpFamily cluster
}
-- | Get the list of online nodes.
getOnlineNodes :: ConfigData -> [Node]
getOnlineNodes = filter (not . nodeOffline) . F.toList . configNodes
-- | Returns the default cluster link.
getDefaultNicLink :: ConfigData -> String
getDefaultNicLink =
nicpLink . (M.! C.ppDefault) . fromContainer .
clusterNicparams . configCluster
-- | Returns the default cluster hypervisor.
getDefaultHypervisor :: ConfigData -> Hypervisor
getDefaultHypervisor cfg =
case clusterEnabledHypervisors $ configCluster cfg of
-- FIXME: this case shouldn't happen (configuration broken), but
-- for now we handle it here because we're not authoritative for
-- the config
[] -> XenPvm
x:_ -> x
-- | Returns instances of a given link.
getInstancesIpByLink :: LinkIpMap -> String -> [String]
getInstancesIpByLink linkipmap link =
M.keys $ M.findWithDefault M.empty link linkipmap
-- | Generic lookup function that converts from a possible abbreviated
-- name to a full name.
getItem :: String -> String -> M.Map String a -> ErrorResult a
getItem kind name allitems = do
let lresult = lookupName (M.keys allitems) name
err msg = Bad $ OpPrereqError (kind ++ " name " ++ name ++ " " ++ msg)
ECodeNoEnt
fullname <- case lrMatchPriority lresult of
PartialMatch -> Ok $ lrContent lresult
ExactMatch -> Ok $ lrContent lresult
MultipleMatch -> err "has multiple matches"
FailMatch -> err "not found"
maybe (err "not found after successfull match?!") Ok $
M.lookup fullname allitems
-- | Looks up a node by name or uuid.
getNode :: ConfigData -> String -> ErrorResult Node
getNode cfg name =
let nodes = fromContainer (configNodes cfg)
in case getItem "Node" name nodes of
-- if not found by uuid, we need to look it up by name
Ok node -> Ok node
Bad _ -> let by_name = M.mapKeys
(nodeName . (M.!) nodes) nodes
in getItem "Node" name by_name
-- | Looks up an instance by name or uuid.
getInstance :: ConfigData -> String -> ErrorResult Instance
getInstance cfg name =
let instances = fromContainer (configInstances cfg)
in case getItem "Instance" name instances of
-- if not found by uuid, we need to look it up by name
Ok inst -> Ok inst
Bad _ -> let by_name =
M.delete ""
. M.mapKeys (fromMaybe "" . instName . (M.!) instances)
$ instances
in getItem "Instance" name by_name
-- | Looks up a disk by uuid.
getDisk :: ConfigData -> String -> ErrorResult Disk
getDisk cfg name =
let disks = fromContainer (configDisks cfg)
in getItem "Disk" name disks
-- | Looks up a filter by uuid.
getFilterRule :: ConfigData -> String -> ErrorResult FilterRule
getFilterRule cfg name =
let filters = fromContainer (configFilters cfg)
in getItem "Filter" name filters
-- | Looks up a node group by name or uuid.
getGroup :: ConfigData -> String -> ErrorResult NodeGroup
getGroup cfg name =
let groups = fromContainer (configNodegroups cfg)
in case getItem "NodeGroup" name groups of
-- if not found by uuid, we need to look it up by name, slow
Ok grp -> Ok grp
Bad _ -> let by_name = M.mapKeys
(groupName . (M.!) groups) groups
in getItem "NodeGroup" name by_name
-- | Computes a node group's node params.
getGroupNdParams :: ConfigData -> NodeGroup -> FilledNDParams
getGroupNdParams cfg ng =
fillParams (clusterNdparams $ configCluster cfg) (groupNdparams ng)
-- | Computes a node group's ipolicy.
getGroupIpolicy :: ConfigData -> NodeGroup -> FilledIPolicy
getGroupIpolicy cfg ng =
fillParams (clusterIpolicy $ configCluster cfg) (groupIpolicy ng)
-- | Computes a group\'s (merged) disk params.
getGroupDiskParams :: ConfigData -> NodeGroup -> GroupDiskParams
getGroupDiskParams cfg ng =
GenericContainer $
fillDict (fromContainer . clusterDiskparams $ configCluster cfg)
(fromContainer $ groupDiskparams ng) []
-- | Get nodes of a given node group.
getGroupNodes :: ConfigData -> String -> [Node]
getGroupNodes cfg gname =
let all_nodes = M.elems . fromContainer . configNodes $ cfg in
filter ((==gname) . nodeGroup) all_nodes
-- | Get (primary, secondary) instances of a given node group.
getGroupInstances :: ConfigData -> String -> ([Instance], [Instance])
getGroupInstances cfg gname =
let gnodes = map nodeUuid (getGroupNodes cfg gname)
ginsts = map (getNodeInstances cfg) gnodes in
(concatMap fst ginsts, concatMap snd ginsts)
-- | Retrieves the instance hypervisor params, missing values filled with
-- cluster defaults.
getFilledInstHvParams :: [String] -> ConfigData -> Instance -> HvParams
getFilledInstHvParams globals cfg inst =
-- First get the defaults of the parent
let maybeHvName = instHypervisor inst
hvParamMap = fromContainer . clusterHvparams $ configCluster cfg
parentHvParams =
maybe M.empty fromContainer (maybeHvName >>= flip M.lookup hvParamMap)
-- Then the os defaults for the given hypervisor
maybeOsName = instOs inst
osParamMap = fromContainer . clusterOsHvp $ configCluster cfg
osHvParamMap =
maybe M.empty (maybe M.empty fromContainer . flip M.lookup osParamMap)
maybeOsName
osHvParams =
maybe M.empty (maybe M.empty fromContainer . flip M.lookup osHvParamMap)
maybeHvName
-- Then the child
childHvParams = fromContainer . instHvparams $ inst
-- Helper function
fillFn con val = fillDict con val globals
in GenericContainer $ fillFn (fillFn parentHvParams osHvParams) childHvParams
-- | Retrieves the instance backend params, missing values filled with cluster
-- defaults.
getFilledInstBeParams :: ConfigData -> Instance -> ErrorResult FilledBeParams
getFilledInstBeParams cfg inst = do
let beParamMap = fromContainer . clusterBeparams . configCluster $ cfg
parentParams <- getItem "FilledBeParams" C.ppDefault beParamMap
return $ fillParams parentParams (instBeparams inst)
-- | Retrieves the instance os params, missing values filled with cluster
-- defaults. This does NOT include private and secret parameters.
getFilledInstOsParams :: ConfigData -> Instance -> OsParams
getFilledInstOsParams cfg inst =
let maybeOsLookupName = liftM (takeWhile (/= '+')) (instOs inst)
osParamMap = fromContainer . clusterOsparams $ configCluster cfg
childOsParams = instOsparams inst
in case withMissingParam "Instance without OS"
(flip (getItem "OsParams") osParamMap)
maybeOsLookupName of
Ok parentOsParams -> GenericContainer $
fillDict (fromContainer parentOsParams)
(fromContainer childOsParams) []
Bad _ -> childOsParams
-- | Looks up an instance's primary node.
getInstPrimaryNode :: ConfigData -> String -> ErrorResult Node
getInstPrimaryNode cfg name =
getInstance cfg name
>>= withMissingParam "Instance without primary node" return . instPrimaryNode
>>= getNode cfg
-- | Retrieves all nodes hosting a DRBD disk
getDrbdDiskNodes :: ConfigData -> Disk -> [Node]
getDrbdDiskNodes cfg disk =
let retrieved = case diskLogicalId disk of
Just (LIDDrbd8 nodeA nodeB _ _ _ _) ->
justOk [getNode cfg nodeA, getNode cfg nodeB]
_ -> []
in retrieved ++ concatMap (getDrbdDiskNodes cfg) (diskChildren disk)
-- | Retrieves all the nodes of the instance.
--
-- As instances not using DRBD can be sent as a parameter as well,
-- the primary node has to be appended to the results.
getInstAllNodes :: ConfigData -> String -> ErrorResult [Node]
getInstAllNodes cfg name = do
inst_disks <- getInstDisks cfg name
let disk_nodes = concatMap (getDrbdDiskNodes cfg) inst_disks
pNode <- getInstPrimaryNode cfg name
return . nub $ pNode:disk_nodes
-- | Get disks for a given instance.
-- The instance is specified by name or uuid.
getInstDisks :: ConfigData -> String -> ErrorResult [Disk]
getInstDisks cfg iname =
getInstance cfg iname >>= mapM (getDisk cfg) . instDisks
-- | Get disks for a given instance object.
getInstDisksFromObj :: ConfigData -> Instance -> ErrorResult [Disk]
getInstDisksFromObj cfg =
getInstDisks cfg . instUuid
-- | Collects a value for all DRBD disks
collectFromDrbdDisks
:: (Monoid a)
=> (String -> String -> Int -> Int -> Int -> DRBDSecret -> a)
-- ^ NodeA, NodeB, Port, MinorA, MinorB, Secret
-> Disk -> a
collectFromDrbdDisks f = col
where
col (diskLogicalId &&& diskChildren ->
(Just (LIDDrbd8 nA nB port mA mB secret), ch)) =
f nA nB port mA mB secret <> F.foldMap col ch
col d = F.foldMap col (diskChildren d)
-- | Returns the DRBD secrets of a given 'Disk'
getDrbdSecretsForDisk :: Disk -> [DRBDSecret]
getDrbdSecretsForDisk = collectFromDrbdDisks (\_ _ _ _ _ secret -> [secret])
-- | Returns the DRBD minors of a given 'Disk'
getDrbdMinorsForDisk :: Disk -> [(Int, String)]
getDrbdMinorsForDisk =
collectFromDrbdDisks (\nA nB _ mnA mnB _ -> [(mnA, nA), (mnB, nB)])
-- | Filters DRBD minors for a given node.
getDrbdMinorsForNode :: String -> Disk -> [(Int, String)]
getDrbdMinorsForNode node disk =
let child_minors = concatMap (getDrbdMinorsForNode node) (diskChildren disk)
this_minors =
case diskLogicalId disk of
Just (LIDDrbd8 nodeA nodeB _ minorA minorB _)
| nodeA == node -> [(minorA, nodeB)]
| nodeB == node -> [(minorB, nodeA)]
_ -> []
in this_minors ++ child_minors
-- | Returns the DRBD minors of a given instance
getDrbdMinorsForInstance :: ConfigData -> Instance
-> ErrorResult [(Int, String)]
getDrbdMinorsForInstance cfg =
liftM (concatMap getDrbdMinorsForDisk) . getInstDisksFromObj cfg
-- | String for primary role.
rolePrimary :: String
rolePrimary = "primary"
-- | String for secondary role.
roleSecondary :: String
roleSecondary = "secondary"
-- | Gets the list of DRBD minors for an instance that are related to
-- a given node.
getInstMinorsForNode :: ConfigData
-> String -- ^ The UUID of a node.
-> Instance
-> [(String, Int, String, String, String, String)]
getInstMinorsForNode cfg node inst =
let role = if Just node == instPrimaryNode inst
then rolePrimary
else roleSecondary
iname = fromMaybe "" $ instName inst
inst_disks = case getInstDisksFromObj cfg inst of
Ok disks -> disks
Bad _ -> []
-- FIXME: the disk/ build there is hack-ish; unify this in a
-- separate place, or reuse the iv_name (but that is deprecated on
-- the Python side)
in concatMap (\(idx, dsk) ->
[(node, minor, iname, "disk/" ++ show idx, role, peer)
| (minor, peer) <- getDrbdMinorsForNode node dsk]) .
zip [(0::Int)..] $ inst_disks
-- | Builds link -> ip -> instname map.
-- For instances without a name, we insert the uuid instead.
--
-- TODO: improve this by splitting it into multiple independent functions:
--
-- * abstract the \"fetch instance with filled params\" functionality
--
-- * abstsract the [instance] -> [(nic, instance_name)] part
--
-- * etc.
buildLinkIpInstnameMap :: ConfigData -> LinkIpMap
buildLinkIpInstnameMap cfg =
let cluster = configCluster cfg
instances = M.elems . fromContainer . configInstances $ cfg
defparams = (M.!) (fromContainer $ clusterNicparams cluster) C.ppDefault
nics = concatMap (\i -> [(fromMaybe (instUuid i) $ instName i, nic)
| nic <- instNics i])
instances
in foldl' (\accum (iname, nic) ->
let pparams = nicNicparams nic
fparams = fillParams defparams pparams
link = nicpLink fparams
in case nicIp nic of
Nothing -> accum
Just ip -> let oldipmap = M.findWithDefault M.empty
link accum
newipmap = M.insert ip iname oldipmap
in M.insert link newipmap accum
) M.empty nics
-- | Returns a node's group, with optional failure if we can't find it
-- (configuration corrupt).
getGroupOfNode :: ConfigData -> Node -> Maybe NodeGroup
getGroupOfNode cfg node =
M.lookup (nodeGroup node) (fromContainer . configNodegroups $ cfg)
-- | Returns a node's ndparams, filled.
getNodeNdParams :: ConfigData -> Node -> Maybe FilledNDParams
getNodeNdParams cfg node = do
group <- getGroupOfNode cfg node
let gparams = getGroupNdParams cfg group
return $ fillParams gparams (nodeNdparams node)
-- * Network
-- | Looks up a network. If looking up by uuid fails, we look up
-- by name.
getNetwork :: ConfigData -> String -> ErrorResult Network
getNetwork cfg name =
let networks = fromContainer (configNetworks cfg)
in case getItem "Network" name networks of
Ok net -> Ok net
Bad _ -> let by_name = M.mapKeys
(fromNonEmpty . networkName . (M.!) networks)
networks
in getItem "Network" name by_name
-- ** MACs
type MAC = String
-- | Returns all MAC addresses used in the cluster.
getAllMACs :: ConfigData -> [MAC]
getAllMACs = F.foldMap (map nicMac . instNics) . configInstances
-- ** DRBD secrets
getAllDrbdSecrets :: ConfigData -> [DRBDSecret]
getAllDrbdSecrets = F.foldMap getDrbdSecretsForDisk . configDisks
-- ** LVs
-- | A map from node UUIDs to
--
-- FIXME: After adding designated types for UUIDs,
-- use them to replace 'String' here.
type NodeLVsMap = MM.MultiMap String LogicalVolume
getInstanceLVsByNode :: ConfigData -> Instance -> ErrorResult NodeLVsMap
getInstanceLVsByNode cd inst =
withMissingParam "Instance without Primary Node"
(\i -> return $ MM.fromList . lvsByNode i)
(instPrimaryNode inst)
<*> getInstDisksFromObj cd inst
where
lvsByNode :: String -> [Disk] -> [(String, LogicalVolume)]
lvsByNode node = concatMap (lvsByNode1 node)
lvsByNode1 :: String -> Disk -> [(String, LogicalVolume)]
lvsByNode1 _ (diskLogicalId &&& diskChildren
-> (Just (LIDDrbd8 nA nB _ _ _ _), ch)) =
lvsByNode nA ch ++ lvsByNode nB ch
lvsByNode1 node (diskLogicalId -> (Just (LIDPlain lv))) =
[(node, lv)]
lvsByNode1 node (diskChildren -> ch) = lvsByNode node ch
getAllLVs :: ConfigData -> ErrorResult (S.Set LogicalVolume)
getAllLVs cd = mconcat <$> mapM (liftM MM.values . getInstanceLVsByNode cd)
(F.toList $ configInstances cd)
-- * ND params
-- | Type class denoting objects which have node parameters.
class NdParamObject a where
getNdParamsOf :: ConfigData -> a -> Maybe FilledNDParams
instance NdParamObject Node where
getNdParamsOf = getNodeNdParams
instance NdParamObject NodeGroup where
getNdParamsOf cfg = Just . getGroupNdParams cfg
instance NdParamObject Cluster where
getNdParamsOf _ = Just . clusterNdparams
| bitemyapp/ganeti | src/Ganeti/Config.hs | bsd-2-clause | 22,311 | 0 | 20 | 5,104 | 4,897 | 2,551 | 2,346 | 381 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
module Ros.Sensor_msgs.FluidPressure where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data FluidPressure = FluidPressure { _header :: Header.Header
, _fluid_pressure :: P.Double
, _variance :: P.Double
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''FluidPressure)
instance RosBinary FluidPressure where
put obj' = put (_header obj') *> put (_fluid_pressure obj') *> put (_variance obj')
get = FluidPressure <$> get <*> get <*> get
putMsg = putStampedMsg
instance HasHeader FluidPressure where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo FluidPressure where
sourceMD5 _ = "804dc5cea1c5306d6a2eb80b9833befe"
msgTypeName _ = "sensor_msgs/FluidPressure"
instance D.Default FluidPressure
| acowley/roshask | msgs/Sensor_msgs/Ros/Sensor_msgs/FluidPressure.hs | bsd-3-clause | 1,489 | 1 | 10 | 299 | 380 | 221 | 159 | 35 | 0 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
-- | Security specific migration
module Distribution.Server.Features.Security.Migration (
migratePkgTarball_v1_to_v2
, migrateCandidatePkgTarball_v1_to_v2
) where
import Distribution.Server.Prelude
-- stdlib
import Control.DeepSeq
import Control.Exception
import Data.Map (Map)
import System.IO
import System.IO.Error
import qualified Data.Map as Map
import qualified Data.Vector as Vec
-- Cabal
import Distribution.Package (PackageId)
-- hackage
import Distribution.Server.Features.Core.State
import Distribution.Server.Features.PackageCandidates.State
import Distribution.Server.Features.PackageCandidates.Types
import Distribution.Server.Features.Security.Layout
import Distribution.Server.Framework hiding (Length)
import Distribution.Server.Framework.BlobStorage
import Distribution.Server.Packages.Types
import Distribution.Server.Util.ReadDigest
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
{-------------------------------------------------------------------------------
Migration of core data structures
-------------------------------------------------------------------------------}
-- | Migrate from BlobId to BlobInfo in PkgTarball
--
-- This is part of the security feature because this computes the additional
-- information (SHA hashes) that we need for the TUF target files.
migratePkgTarball_v1_to_v2 :: ServerEnv
-> StateComponent AcidState PackagesState
-> IO ()
migratePkgTarball_v1_to_v2 env@ServerEnv{ serverVerbosity = verbosity }
packagesState
= do
precomputedHashes <- readPrecomputedHashes env
PackagesState{packageIndex} <- queryState packagesState GetPackagesState
let allPackages = PackageIndex.allPackages packageIndex
partitionSz = PackageIndex.numPackageVersions packageIndex `div` 10
partitioned = partition partitionSz allPackages
stats <- forM (zip [1..] partitioned) $ \(i, pkgs) ->
logTiming verbosity (partitionLogMsg i (length partitioned)) $
migratePkgs env updatePackage precomputedHashes pkgs
loginfo verbosity $ prettyMigrationStats (mconcat stats)
where
updatePackage :: PackageId -> PkgInfo -> IO ()
updatePackage pkgId pkgInfo = updateState packagesState
$ UpdatePackageInfo pkgId pkgInfo
partitionLogMsg :: Int -> Int -> String
partitionLogMsg i n = "Computing blob info "
++ "(" ++ show i ++ "/" ++ show n ++ ")"
-- | Similar migration for candidates
migrateCandidatePkgTarball_v1_to_v2 :: ServerEnv
-> StateComponent AcidState CandidatePackages
-> IO ()
migrateCandidatePkgTarball_v1_to_v2 env@ServerEnv{ serverVerbosity = verbosity }
candidatesState
= do
precomputedHashes <- readPrecomputedHashes env
CandidatePackages{candidateList} <- queryState candidatesState GetCandidatePackages
let allCandidates = PackageIndex.allPackages candidateList
partitionSz = PackageIndex.numPackageVersions candidateList `div` 10
partitioned = partition partitionSz allCandidates
stats <- forM (zip [1..] partitioned) $ \(i, candidates) -> do
let pkgs = map candPkgInfo candidates
logTiming verbosity (partitionLogMsg i (length partitioned)) $
migratePkgs env updatePackage precomputedHashes pkgs
loginfo verbosity $ prettyMigrationStats (mconcat stats)
where
updatePackage :: PackageId -> PkgInfo -> IO ()
updatePackage pkgId pkgInfo = do
_didUpdate <- updateState candidatesState $
UpdateCandidatePkgInfo pkgId pkgInfo
return ()
partitionLogMsg :: Int -> Int -> String
partitionLogMsg i n = "Computing candidates blob info "
++ "(" ++ show i ++ "/" ++ show n ++ ")"
migratePkgs :: ServerEnv
-> (PackageId -> PkgInfo -> IO ())
-> Precomputed
-> [PkgInfo]
-> IO MigrationStats
migratePkgs ServerEnv{ serverBlobStore = store } updatePackage precomputed =
liftM mconcat . mapM migratePkg
where
migratePkg :: PkgInfo -> IO MigrationStats
migratePkg pkg = do
tarballs' <- forM tarballs $ \(tarball, uploadInfo) -> do
tarball' <- migrateTarball tarball
return $ (, uploadInfo) <$> tarball'
-- Avoid updating the state of all packages already migrated
case sequence tarballs' of
AlreadyMigrated _ ->
return mempty
Migrated stats tarballs'' -> do
let pkg' = pkg { pkgTarballRevisions = Vec.fromList tarballs'' }
updatePackage (pkgInfoId pkg) pkg'
return stats
where
tarballs = Vec.toList (pkgTarballRevisions pkg)
migrateTarball :: PkgTarball -> IO (Migrated PkgTarball)
migrateTarball pkgTarball@PkgTarball{} =
return $ AlreadyMigrated pkgTarball
migrateTarball (PkgTarball_v2_v1 PkgTarball_v1{..}) =
case Map.lookup (blobMd5 v1_pkgTarballGz) precomputed of
Just (strSHA256, len) -> do
-- We assume all SHA hashes in the precomputed list parse OK
let Right sha256 = readDigest strSHA256
stats = MigrationStats 1 0
infoGz = BlobInfo {
blobInfoId = v1_pkgTarballGz
, blobInfoLength = len
, blobInfoHashSHA256 = sha256
}
return $ Migrated stats PkgTarball {
pkgTarballGz = infoGz
, pkgTarballNoGz = v1_pkgTarballNoGz
}
Nothing -> do
infoGz <- blobInfoFromId store v1_pkgTarballGz
let stats = MigrationStats 0 1
return $ Migrated stats PkgTarball {
pkgTarballGz = infoGz
, pkgTarballNoGz = v1_pkgTarballNoGz
}
{-------------------------------------------------------------------------------
Precomputed hashes
-------------------------------------------------------------------------------}
type MD5 = String
type SHA256 = String
type Length = Int
type Precomputed = Map MD5 (SHA256, Length)
-- | Read precomputed hashes (if any)
--
-- The result is guaranteed to be in normal form.
readPrecomputedHashes :: ServerEnv -> IO Precomputed
readPrecomputedHashes env@ServerEnv{ serverVerbosity = verbosity } = do
precomputed <- handle emptyOnError $
withFile (onDiskPrecomputedHashes env) ReadMode $ \h -> do
hashes <- Map.fromList . map parseEntry . lines <$> hGetContents h
evaluate $ rnf hashes
return hashes
loginfo verbosity $ "Found " ++ show (Map.size precomputed)
++ " precomputed hashes"
return precomputed
where
emptyOnError :: IOException -> IO Precomputed
emptyOnError err = if isDoesNotExistError err then return Map.empty
else throwIO err
parseEntry :: String -> (MD5, (SHA256, Length))
parseEntry line = let [md5, sha256, len] = words line
in (md5, (sha256, read len))
{-------------------------------------------------------------------------------
Migration infrastructure
-------------------------------------------------------------------------------}
data MigrationStats = MigrationStats {
-- | Number of hashes we lookup up in the precomputed map
migrationStatsPrecomputed :: !Int
-- | Number of hashes we had to compute
, migrationStatsComputed :: !Int
}
prettyMigrationStats :: MigrationStats -> String
prettyMigrationStats MigrationStats{..} = unwords [
show migrationStatsPrecomputed
, "hashes were precomputed, computed"
, show migrationStatsComputed
]
instance Monoid MigrationStats where
mempty = MigrationStats 0 0
mappend = (<>)
instance Semigroup MigrationStats where
(MigrationStats a b) <> (MigrationStats a' b') =
MigrationStats (a + a') (b + b')
data Migrated a = Migrated MigrationStats a | AlreadyMigrated a
deriving (Functor)
instance Applicative Migrated where
pure = return
f <*> x = do f' <- f ; x' <- x ; return $ f' x'
instance Monad Migrated where
return = AlreadyMigrated
AlreadyMigrated a >>= f = f a
Migrated stats a >>= f =
case f a of
AlreadyMigrated b -> Migrated stats b
Migrated stats' b -> Migrated (stats <> stats') b
{-------------------------------------------------------------------------------
Additional auxiliary
-------------------------------------------------------------------------------}
-- | Partition list
--
-- > partition 2 [1..5] = [[1,2],[3,4],[5]]
--
-- If partition size is 0, returns a single partition
partition :: Int -> [a] -> [[a]]
partition 0 xs = [xs]
partition _ [] = []
partition sz xs = let (firstPartition, xs') = splitAt sz xs
in firstPartition : partition sz xs'
| edsko/hackage-server | Distribution/Server/Features/Security/Migration.hs | bsd-3-clause | 9,212 | 0 | 20 | 2,319 | 1,951 | 1,010 | 941 | 167 | 4 |
import Data.Char
import System.Environment
import System.Process
import System.Exit
import System.Posix.Directory
import Graphics.Qt
import Graphics.Qt.Events.Tests ()
import Utils
import Test.QuickCheck
import Test.QuickCheck.Property
main = do
["--please-do-random-things-to-my-userdata"] <- getArgs
changeWorkingDirectory ".."
quickCheck doesntCrash
doesntCrash :: [Key] -> Property
doesntCrash keys =
morallyDubiousIOProperty $ do
putStrLn ""
print keys
startNikki (keys +: CloseWindowKey)
mkInitialEventsOptions :: [Key] -> String
mkInitialEventsOptions = unwords . map (("-i " ++) . map toLower . show)
-- | starts the game, returning if False in case of a crash
startNikki :: [Key] -> IO Bool
startNikki events = do
ec <- system ("./dist/build/core/core --run-in-place " ++ mkInitialEventsOptions events)
return $ case ec of
ExitSuccess -> True
ExitFailure n -> False
| changlinli/nikki | src/testsuite/searchCrashes.hs | lgpl-3.0 | 950 | 0 | 11 | 187 | 244 | 127 | 117 | 28 | 2 |
{-# LANGUAGE CPP, LambdaCase, BangPatterns, MagicHash, TupleSections, ScopedTypeVariables, DeriveDataTypeable #-}
#ifdef ghcjs_HOST_OS
{-# LANGUAGE JavaScriptFFI #-}
#endif
{- |
Evaluate Template Haskell splices on node.js
-}
module GHCJS.Prim.TH.Eval (
#ifdef ghcjs_HOST_OS
runTHServer
#endif
) where
#ifdef ghcjs_HOST_OS
import GHCJS.Prim.TH.Serialized
import GHCJS.Prim.TH.Types
import Control.Applicative
import qualified Control.Exception as E
import Control.Monad
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import qualified Data.ByteString.Internal as BI
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Unsafe as BU
import Data.Data
import Data.Dynamic
import Data.Int
import Data.IORef
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid ((<>))
import Data.Typeable
import Data.Typeable.Internal
import Data.Word
import Foreign.C
import Foreign.Ptr
import GHC.Prim
import GHC.Desugar
import qualified Language.Haskell.TH as TH
import qualified Language.Haskell.TH.Syntax as TH
import System.IO
import Unsafe.Coerce
data QState = QState { qsMap :: Map TypeRep Dynamic -- ^ persistent data between splices in a module
, qsFinalizers :: [TH.Q ()] -- ^ registered finalizers (in reverse order)
, qsLocation :: Maybe TH.Loc -- ^ location for current splice, if any
}
instance Show QState where show _ = "<QState>"
initQState :: QState
initQState = QState M.empty [] Nothing
runModFinalizers :: GHCJSQ ()
runModFinalizers = go =<< getState
where
go s | (f:ff) <- qsFinalizers s =
putState (s { qsFinalizers = ff}) >> TH.runQ f >> getState >>= go
go _ = return ()
data GHCJSQ a = GHCJSQ { runGHCJSQ :: QState -> IO (a, QState) }
data GHCJSQException = GHCJSQException QState (Maybe Int) String
deriving (Show, Typeable)
instance E.Exception GHCJSQException
instance Functor GHCJSQ where
fmap f (GHCJSQ s) = GHCJSQ $ fmap (\(x,s') -> (f x,s')) . s
instance Applicative GHCJSQ where
f <*> a = GHCJSQ $ \s ->
do (f',s') <- runGHCJSQ f s
(a',s'') <- runGHCJSQ a s'
return (f' a', s'')
pure x = GHCJSQ (\s -> return (x,s))
instance Monad GHCJSQ where
m >>= f = GHCJSQ $ \s ->
do (m', s') <- runGHCJSQ m s
(a, s'') <- runGHCJSQ (f m') s'
return (a, s'')
return = pure
fail err = GHCJSQ $ \s -> E.throw (GHCJSQException s Nothing err)
getState :: GHCJSQ QState
getState = GHCJSQ $ \s -> return (s,s)
putState :: QState -> GHCJSQ ()
putState s = GHCJSQ $ \_ -> return ((),s)
noLoc :: TH.Loc
noLoc = TH.Loc "<no file>" "<no package>" "<no module>" (0,0) (0,0)
instance TH.Quasi GHCJSQ where
qNewName str = do
NewName' name <- sendRequestQ (NewName str)
return name
qReport isError msg = do
Report' <- sendRequestQ (Report isError msg)
return ()
qRecover (GHCJSQ h) (GHCJSQ a) = GHCJSQ $ \s -> do
let r :: Bool -> IO ()
r b = do EndRecover' <- sendRequest (EndRecover b)
return ()
StartRecover' <- sendRequest StartRecover
(a s >>= \s' -> r False >> return s') `E.catch`
\(GHCJSQException s' _ _) -> r True >> h s
qLookupName isType occ = do
LookupName' name <- sendRequestQ (LookupName isType occ)
return name
qReify name = do
Reify' info <- sendRequestQ (Reify name)
return info
qReifyInstances name tys = do
ReifyInstances' decls <- sendRequestQ (ReifyInstances name tys)
return decls
qReifyRoles name = do
ReifyRoles' roles <- sendRequestQ (ReifyRoles name)
return roles
qReifyAnnotations lookup = do
ReifyAnnotations' payloads <- sendRequestQ (ReifyAnnotations lookup)
return (convertAnnPayloads payloads)
qReifyModule m = do
ReifyModule' mi <- sendRequestQ (ReifyModule m)
return mi
qLocation = fromMaybe noLoc . qsLocation <$> getState
qRunIO m = GHCJSQ $ \s -> fmap (,s) m
qAddDependentFile file = do
AddDependentFile' <- sendRequestQ (AddDependentFile file)
return ()
qAddTopDecls decls = do
AddTopDecls' <- sendRequestQ (AddTopDecls decls)
return ()
qAddModFinalizer fin = GHCJSQ $ \s ->
return ((), s { qsFinalizers = fin : qsFinalizers s })
qGetQ = GHCJSQ $ \s ->
let lookup :: forall a. Typeable a => Map TypeRep Dynamic -> Maybe a
lookup m = fromDynamic =<< M.lookup (typeOf (undefined::a)) m
in return (lookup (qsMap s), s)
qPutQ k = GHCJSQ $ \s ->
return ((), s { qsMap = M.insert (typeOf k) (toDyn k) (qsMap s) })
makeAnnPayload :: forall a. Data a => a -> ByteString
makeAnnPayload x =
#if __GLASGOW_HASKELL__ >= 709
let TypeRep (Fingerprint w1 w2) _ _ _ = typeOf (undefined :: a)
#else
let TypeRep (Fingerprint w1 w2) _ _ = typeOf (undefined :: a)
#endif
fp = runPut (putWord64be w1 >> putWord64be w2)
in BL.toStrict $ fp <> BL.pack (serializeWithData x)
convertAnnPayloads :: forall a. Data a => [ByteString] -> [a]
convertAnnPayloads bs = catMaybes (map convert bs)
where
#if __GLASGOW_HASKELL__ >= 709
TypeRep (Fingerprint w1 w2) _ _ _ = typeOf (undefined :: a)
#else
TypeRep (Fingerprint w1 w2) _ _ = typeOf (undefined :: a)
#endif
getFp b = runGet ((,) <$> getWord64be <*> getWord64be) $ BL.fromStrict (B.take 16 b)
convert b | (bw1,bw2) <- getFp b, bw1 == w1, bw2 == w2 =
Just (deserializeWithData . B.unpack . B.drop 16 $ b)
| otherwise = Nothing
-- | the Template Haskell server
runTHServer :: IO ()
runTHServer = do
-- msgs <- newIORef []
void (runGHCJSQ server initQState) `E.catches`
[ E.Handler $ \(GHCJSQException _ mn msg) ->
void . sendRequest $ maybe (QFail msg) QCompilerException mn
, E.Handler $ \(E.SomeException e) ->
void (sendRequest $ QUserException (show e))
]
where
server = TH.qRunIO awaitMessage >>= \case
RunTH t code loc -> do
a <- TH.qRunIO (loadCode code)
runTH t a loc
server
FinishTH -> do
runModFinalizers
TH.qRunIO $ sendResult FinishTH'
_ -> error "runTHServer: unexpected message type"
{-# NOINLINE runTH #-}
runTH :: THResultType -> Any -> Maybe TH.Loc -> GHCJSQ ()
runTH rt obj = \mb_loc -> obj `seq` do
s0 <- getState
putState $ s0 { qsLocation = mb_loc }
res <- case rt of
THExp -> runTHCode (unsafeCoerce obj :: TH.Q TH.Exp)
THPat -> runTHCode (unsafeCoerce obj :: TH.Q TH.Pat)
THType -> runTHCode (unsafeCoerce obj :: TH.Q TH.Type)
THDec -> runTHCode (unsafeCoerce obj :: TH.Q [TH.Dec])
THAnnWrapper -> case unsafeCoerce obj of
AnnotationWrapper x -> return (makeAnnPayload x)
s1 <- getState
TH.qRunIO (sendResult $ RunTH' res)
putState $ s1 { qsLocation = Nothing }
{-# NOINLINE runTHCode #-}
runTHCode :: Binary a => TH.Q a -> GHCJSQ ByteString
runTHCode c = BL.toStrict . runPut . put <$> TH.runQ c
{-# NOINLINE loadCode #-}
loadCode :: ByteString -> IO Any
loadCode bs = do
p <- fromBs bs
unsafeCoerce <$> js_loadCode p (B.length bs)
awaitMessage :: IO Message
awaitMessage = fmap (runGet get . BL.fromStrict) . toBs =<< js_awaitMessage
-- | send result back
sendResult :: Message -> IO ()
sendResult msg = do
let bs = BL.toStrict $ runPut (put msg)
p <- fromBs bs
js_sendMessage p (B.length bs)
-- | send a request and wait for the response
sendRequest :: Message -> IO Message
sendRequest msg = do
let bs = BL.toStrict $ runPut (put msg)
p <- fromBs bs
fmap (runGet get . BL.fromStrict) . toBs =<< js_sendRequest p (B.length bs)
-- | send a request and wait for the response
-- a CompilerException' response is converted to a GHCJSQException which
-- can be handled with recover.
sendRequestQ :: Message -> GHCJSQ Message
sendRequestQ msg = TH.qRunIO (sendRequest msg) >>= \case
QCompilerException' n msg -> GHCJSQ $
\s -> E.throw (GHCJSQException s (Just n) msg)
response -> return response
foreign import javascript interruptible "h$TH.sendRequest($1_1,$1_2,$2,$c);"
js_sendRequest :: Ptr Word8 -> Int -> IO (Ptr Word8)
foreign import javascript interruptible "h$TH.sendMessage($1_1,$1_2,$2,0,$c);"
js_sendMessage :: Ptr Word8 -> Int -> IO ()
foreign import javascript interruptible "h$TH.awaitMessage(0,$c);"
js_awaitMessage :: IO (Ptr Word8)
foreign import javascript unsafe "h$TH.bufSize($1_1, $1_2)"
js_bufSize :: Ptr Word8 -> IO Int
-- | actually returns the heap object to be evaluated
foreign import javascript unsafe "h$TH.loadCode($1_1,$1_2,$2)"
js_loadCode :: Ptr Word8 -> Int -> IO Double
-- | only safe in JS
fromBs :: ByteString -> IO (Ptr Word8)
fromBs bs = BU.unsafeUseAsCString bs (return . castPtr)
-- | build a ByteString that uses the whole buffer, only works in JS
toBs :: Ptr Word8 -> IO ByteString
toBs p = do
l <- js_bufSize p
BU.unsafePackCStringLen (castPtr p, l)
#endif
| beni55/ghcjs | lib/ghcjs-prim/GHCJS/Prim/TH/Eval.hs | mit | 9,402 | 15 | 18 | 2,415 | 3,048 | 1,547 | 1,501 | 2 | 0 |
{-# LANGUAGE PartialTypeSignatures, NamedWildCards #-}
module ExpressionSigNamed where
bar :: _a -> _a
bar True = (False :: _a)
bar False = (True :: _a)
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/partial-sigs/should_compile/ExpressionSigNamed.hs | bsd-3-clause | 155 | 0 | 5 | 27 | 42 | 25 | 17 | 5 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
import Diagrams.Prelude
import Diagrams.Backend.SVG.CmdLine
h = hexagon 1 # fc lightgreen
sOrigin = showOrigin' (with & oScale .~ 0.04)
diagram :: Diagram B
diagram = h # alignR # sOrigin
-- why not snugBL ?
<> h # snugB # snugL # sOrigin
<> h # snugT # snugL # sOrigin
main = mainWith $ frame 0.1 diagram
| jeffreyrosenbluth/NYC-meetup | meetup/Snug.hs | mit | 394 | 0 | 14 | 107 | 116 | 61 | 55 | 10 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Main (main) where
import Codec.Xlsx
-- import Control.Lens
import qualified Data.ByteString.Lazy as L
-- import qualified Data.Map as M
import System.Environment (getArgs)
import System.FilePath.Posix ((</>))
import System.Directory
import System.IO (stderr, hPutStrLn, hFlush, stdout)
import System.Exit
import Data.List.Extra (trim)
import YBIO
import Applicant
main :: IO ()
main = do
args <- getArgs
homeDir <- getHomeDirectory
let excelFileA :: IO FilePath = case args of
(x:_) -> return x
_ -> do
putStrLn ""
putStrLn ""
putStrLn " ---- YB Application downloader ----"
putStrLn ""
putStrLn "* Note: you can hit Control-C at any time to stop or exit this program."
putStrLn ""
putStrLn ""
putStrLn "Please drag an excel (.xlsx) file to process onto the"
putStrLn "terminal screen and press RETURN"
putStrLn ""
putStr "> "
hFlush stdout
l <- getLine
return (trim l)
outDirA :: IO FilePath = case args of
(_:y:_) -> return y
_ -> do
let defaultDir = homeDir </> "Desktop/ybapp_downloads"
putStrLn ""
putStrLn "Please drag a folder to download files into onto the"
putStrLn "terminal screen and press RETURN"
putStrLn $ "[Or just hit RETURN to use: " ++ defaultDir ++ "]: "
putStrLn ""
putStr "> "
hFlush stdout
l <- getLine
case l of
[] -> return defaultDir
x -> return (trim x)
excelFile <- excelFileA
excelFileExists <- doesFileExist excelFile
outDir <- outDirA
outDirExists <- doesDirectoryExist outDir
case outDirExists of
True -> return ()
False -> ybCreateDirectory outDir
case excelFileExists of
False -> do
hPutStrLn stderr $ "File does not exist: " ++ excelFile
hFlush stderr
exitFailure
True -> do
bs <- L.readFile excelFile
runyb outDir $ toXlsx bs
runyb :: FilePath -> Xlsx -> IO ()
runyb topdir xlsx = do
let applicantSheets = processXlsx xlsx
mapM_ (downloadSheet topdir) applicantSheets
| mjhoy/ybapp | Main.hs | mit | 2,285 | 0 | 20 | 698 | 588 | 273 | 315 | 68 | 6 |
{-# LANGUAGE PolyKinds #-}
-- ------------------------------------------------------
{- multiple type-classes to deal with multiple kind-orders
class Typeable (a :: * ) where
typeOf :: a -> TypeRep
class Typeable1 (a :: * -> *) where
typeOf1 :: forall b. a b -> TypeRep
-}
class T0 a where
f0 :: a -> String
instance T0 Int where
f0 _ = "T0 Int"
instance T0 Char where
f0 _ = "T0 Char"
-- f0 (10::Int)
-- "T0 Int"
-- f0 'x'
-- "T0 Char"
instance T0 (Maybe a) where
f0 _ = "T0 Maybe a"
class T1 m where -- m :: * -> *
f1 :: Show a => m a -> String
instance T1 Maybe where
f1 _ = "T1 Maybe"
-- f1 (Just 10)
-- "T1 Maybe"
-- ------------------------------------------------------
-- Polymorphic kinds allow us to unify the type-classes T0 and T1 into one
class T a where -- (a::k)
f :: Proxy a -> String
{-
With the DataKinds language extension, k is polymorphic by default
i.e. k can take several forms (of kind signatures)
-- *
-- * -> *
-- * -> * -> *
k is a polymorphic placeholder for many possible kind arities.
-}
-- Proxy is a phantom-type
data Proxy a = Proxy deriving Show -- (a::k)
{-
'a' is of polymorphic kind e.g.
(Proxy Int) -- Proxy :: * -> *
(Proxy Maybe) -- Proxy :: (* -> *) -> *
-}
-- Proxy is used to generalise the kind of the first argument of f
-- f :: T a => Proxy a -> String
{-
By enabling Polykinds the kind signatures generalize:
-- before and after PolyKinds
f :: * -> Constraint
f :: forall k. k -> Constraint
-- before and after PolyKinds
Proxy :: * -> *
Proxy :: forall k. k -> *
-}
-- We can verify that the type parameter has polymorphic kind:
instance T Int where -- Int :: *
f _ = "T Int"
instance T Maybe where -- Maybe :: * -> *
f _ = "T Maybe"
-- f (Proxy :: Proxy Maybe) -- "T Maybe"
-- f (Proxy :: Proxy Int) -- "T Int"
| uroboros/haskell_design_patterns | chapter7/3_kind_polymorphism.hs | mit | 1,907 | 0 | 9 | 511 | 206 | 115 | 91 | 20 | 0 |
--kMeans.hs
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-}
module Chapter6.Vector where
import Data.Default
class (Default v, Ord v) => Vector v where
distance :: v -> v -> Double
centroid :: [v] -> v
instance Vector (Double, Double) where
distance (a,b) (c,d) = sqrt $ (c-a)*(c-a) + (d-b)*(d-b)
centroid lst = let (u,v) = foldr (\(a,b) (c,d) -> (a+c,b+d)) (0.0,0.0) lst
n = fromIntegral $ length lst
in (u / n, v / n)
class Vector v => Vectorizable e v where
toVector :: e -> v
instance Vectorizable (Double, Double) (Double, Double) where
toVector = id | hnfmr/beginning_haskell | chapter6/src/Chapter6/Vector.hs | mit | 629 | 0 | 14 | 158 | 297 | 165 | 132 | 15 | 0 |
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.SVGException
(toString, toString_, pattern SVG_WRONG_TYPE_ERR,
pattern SVG_INVALID_VALUE_ERR, pattern SVG_MATRIX_NOT_INVERTABLE,
getCode, getName, getMessage, SVGException(..), gTypeSVGException)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGException.toString Mozilla SVGException.toString documentation>
toString ::
(MonadDOM m, FromJSString result) => SVGException -> m result
toString self
= liftDOM ((self ^. jsf "toString" ()) >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGException.toString Mozilla SVGException.toString documentation>
toString_ :: (MonadDOM m) => SVGException -> m ()
toString_ self = liftDOM (void (self ^. jsf "toString" ()))
pattern SVG_WRONG_TYPE_ERR = 0
pattern SVG_INVALID_VALUE_ERR = 1
pattern SVG_MATRIX_NOT_INVERTABLE = 2
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGException.code Mozilla SVGException.code documentation>
getCode :: (MonadDOM m) => SVGException -> m Word
getCode self
= liftDOM (round <$> ((self ^. js "code") >>= valToNumber))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGException.name Mozilla SVGException.name documentation>
getName ::
(MonadDOM m, FromJSString result) => SVGException -> m result
getName self = liftDOM ((self ^. js "name") >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGException.message Mozilla SVGException.message documentation>
getMessage ::
(MonadDOM m, FromJSString result) => SVGException -> m result
getMessage self
= liftDOM ((self ^. js "message") >>= fromJSValUnchecked)
| ghcjs/jsaddle-dom | src/JSDOM/Generated/SVGException.hs | mit | 2,583 | 0 | 12 | 332 | 630 | 374 | 256 | 39 | 1 |
module HRUtil ( get_cconf, connect, Fanout(..), declare_fanout, Direct(..)
, declare_ctrl_client, declare_ctrl_server
, bkey_client, bkey_server) where
import Text.ParserCombinators.Parsec
import qualified Data.Map as M
import Network.AMQP
import Debug.Trace
-- parse the config file
get_cconf :: String -> M.Map String String
get_cconf s =
case parse cconf "get_cconf" s of
Left e -> trace ("Error: " ++ show e) $ M.empty
Right lst -> foldl addm M.empty lst
where addm m ss
| 2 <= length ss = M.insert (ss !! 0) (ss !! 1) m
| otherwise = m
-- [[String]]
cconf = many row
-- [String]
row = record >>= \result->
eol >>
return result
record = field `sepBy` (char '=')
field = optional whitespace >>
bare_string >>= \result->
optional whitespace >>
return result
eol = string "\n"
whitespace = many space_char
space_char = char ' ' <|> char '\t'
bare_string = many bare_char
bare_char = noneOf "\n= "
---------------------------------
xcg_chat = "room101" -- for fanout exchange
xcg_ctrl = "ctrl" -- for direct exchange
bkey_client = "for_client"
bkey_server = "for_server"
data Fanout = Fanout { fconn :: Connection
, fchan :: Channel
, fqopts :: QueueOpts
, fxopts :: ExchangeOpts
}
connect :: M.Map String String -> IO Connection
connect m =
case validation of
Nothing -> fail "Error: hostname/vhost/user/password?"
Just (h, v, u, p) -> openConnection h v u p
where validation = M.lookup "hostname" m >>= \hostname ->
M.lookup "vhost" m >>= \vhost ->
M.lookup "user" m >>= \user ->
M.lookup "password" m >>= \password ->
return (hostname, vhost, user, password)
declare_fanout :: Connection -> Channel -> IO Fanout
declare_fanout conn chan =
let qo = newQueue { queueExclusive = True }
xo = newExchange { exchangeName = xcg_chat
, exchangeType = "fanout" }
in declareQueue chan qo >>= \(qname, _, _) ->
declareExchange chan xo >>
bindQueue chan qname xcg_chat "" >> -- fanout ignores binding key
return (Fanout conn chan (qo { queueName = qname }) xo)
data Direct = Direct { dconn :: Connection
, dchan :: Channel
, dqopts :: QueueOpts
, dxopts :: ExchangeOpts
, bkey :: String
}
declare_ctrl :: String -> Connection -> Channel -> IO Direct
declare_ctrl bkey conn chan =
let qo = newQueue { queueExclusive = True }
xo = newExchange { exchangeName = xcg_ctrl
, exchangeType = "direct" }
in declareQueue chan qo >>= \(qname, _, _) ->
declareExchange chan xo >>
bindQueue chan qname xcg_ctrl bkey >>
return (Direct conn chan (qo { queueName = qname }) xo bkey)
declare_ctrl_client :: Connection -> Channel -> IO Direct
declare_ctrl_client = declare_ctrl bkey_client
declare_ctrl_server :: Connection -> Channel -> IO Direct
declare_ctrl_server = declare_ctrl bkey_server
| cfchou/hrchat | HRUtil.hs | mit | 3,253 | 0 | 16 | 1,021 | 908 | 489 | 419 | 74 | 2 |
module Tests where
import Test.HUnit
-- define test cases.
test1 = TestCase (assertEqual "for (foo 3)," (1,2) (2,3))
{--
test2 = TestCase (do (x,y) <- partA 3
assertEqual "for the first result of partA," 5 x
b <- partB y
assertBool ("(partB " ++ show y ++ ") failed") b)
--}
-- name test cases. group them.
tests = TestList [ TestLabel "test1" test1
-- , TestLabel "test2" test2
]
| marklar/Statistics.SGT | test/Tests.hs | mit | 483 | 0 | 8 | 172 | 59 | 35 | 24 | 4 | 1 |
{-# LANGUAGE FlexibleContexts #-}
module Hasgel.Rendering (
Camera(..), defaultCamera, viewForward, viewBack, viewRight, viewLeft,
viewUp, viewDown,
renderCameraOrientation, renderPlayer, renderShots, renderInvaders,
renderString, axisRenderer, renderGamma, defaultGamma
) where
import Control.Lens ((.~), (^.))
import Control.Monad (forM_, when)
import Control.Monad.Base (MonadBase (..))
import Control.Monad.Reader (MonadReader, asks)
import Control.Monad.State (MonadState(..), gets)
import Control.Monad.Trans (MonadTrans(..))
import Control.Monad.Trans.Control (MonadBaseControl (..))
import Data.Char (ord)
import Data.Int (Int32)
import Data.Maybe (fromMaybe)
import Graphics.GL.Core45
import Linear ((!*!))
import qualified Linear as L
import Hasgel.Args (Args (..))
import Hasgel.Simulation
import Hasgel.Game (GameState(..), Player(..), Invader(..))
import Hasgel.Transform
import Hasgel.Drawable
import qualified Hasgel.GL as GL
import Hasgel.Resources (HasResources(..), Resources(..))
import qualified Hasgel.Resources as Res
data Camera = Camera
{ cameraTransform :: Transform
, cameraProjection :: L.M44 Float
} deriving (Show)
defaultGamma :: Float
defaultGamma = 2.2
mainProgramDesc :: Res.ProgramDesc
mainProgramDesc = [("shaders/basic.vert", GL.VertexShader),
("shaders/basic.frag", GL.FragmentShader)]
gammaProgramDesc :: Res.ProgramDesc
gammaProgramDesc = [("shaders/pass.vert", GL.VertexShader),
("shaders/gamma.frag", GL.FragmentShader)]
gouraudProgramDesc :: Res.ProgramDesc
gouraudProgramDesc = [("shaders/gouraud.vert", GL.VertexShader),
("shaders/gouraud.frag", GL.FragmentShader)]
explodeProgramDesc :: Res.ProgramDesc
explodeProgramDesc = ("shaders/explode.geom", GL.GeometryShader) :
gouraudProgramDesc
spriteProgramDesc :: Res.ProgramDesc
spriteProgramDesc = [("shaders/billboard.vert", GL.VertexShader),
("shaders/billboard.geom", GL.GeometryShader),
("shaders/billboard.frag", GL.FragmentShader)]
textProgramDesc :: Res.ProgramDesc
textProgramDesc = [("shaders/billboard.vert", GL.VertexShader),
("shaders/billboard.geom", GL.GeometryShader),
("shaders/text.frag", GL.FragmentShader)]
normalsProgramDesc :: Res.ProgramDesc
normalsProgramDesc = [("shaders/basic.vert", GL.VertexShader),
("shaders/normals.geom", GL.GeometryShader),
("shaders/color.frag", GL.FragmentShader)]
axisProgramDesc :: Res.ProgramDesc
axisProgramDesc = [("shaders/axis.vert", GL.VertexShader),
("shaders/axis.geom", GL.GeometryShader),
("shaders/color.frag", GL.FragmentShader)]
persp :: L.V2 Float -> L.M44 Float
persp (L.V2 width height) = persp''
where fovy = deg2Rad 60
ar = width / height
n = 0.1
f = 100
-- Workaround for left out multiplication by 2
persp' = L.perspective fovy ar n f
w = persp' ^. L._z.L._w
persp'' = L._z.L._w .~ (2*w) $ persp'
ortho :: L.M44 Float
ortho = L.ortho (-2) 2 (-2) 2 (-2) 2
defaultCamera :: L.V2 Float -> Camera
defaultCamera dim = Camera {
cameraTransform = defaultTransform { transformPosition = L.V3 0 10 20 },
cameraProjection = persp dim }
-- | Return the view rotation. This is the inverse of camera rotation.
viewRotation :: Camera -> L.Quaternion Float
viewRotation = L.conjugate . transformRotation . cameraTransform
-- | Return the forward vector of the view orientation.
-- This is the back vector of the camera orientation.
viewForward :: Camera -> L.V3 Float
viewForward = transformBack . cameraTransform
-- | Return the back vector of the view orientation.
-- This is the forward vector of the camera orientation.
viewBack :: Camera -> L.V3 Float
viewBack = transformForward . cameraTransform
viewLeft :: Camera -> L.V3 Float
viewLeft = transformLeft . cameraTransform
viewRight :: Camera -> L.V3 Float
viewRight = transformRight . cameraTransform
viewDown :: Camera -> L.V3 Float
viewDown = transformDown . cameraTransform
viewUp :: Camera -> L.V3 Float
viewUp = transformUp . cameraTransform
-- | Return the view matrix for the given camera.
cameraView :: Camera -> L.M44 Float
cameraView camera =
let transform = cameraTransform camera
pos = -transformPosition transform
trans = L.translation .~ pos $ L.identity
rot = L.fromQuaternion $ viewRotation camera
in L.m33_to_m44 rot !*! trans -- Inverse of camera transform
-- | Return the view projection matrix for the given camera.
cameraViewProjection :: Camera -> L.M44 Float
cameraViewProjection = (!*!) <$> cameraProjection <*> cameraView
renderShots :: (HasResources s, HasSimulation s GameState, MonadState s m,
MonadBaseControl IO m) => Camera -> m ()
renderShots camera = do
shots <- gets $ gShots . simState . getSimulation
forM_ shots $ \transform -> do
spriteProgram <- Res.loadProgram spriteProgramDesc
Just point <- Res.getDrawable "point"
texture <- gets $ resLaserTex . getResources
liftBase $ do
glActiveTexture GL_TEXTURE0
glBindTexture GL_TEXTURE_2D $ GL.object texture
let mv = cameraView camera !*! transform2M44 transform
GL.useProgram spriteProgram $ do
GL.uniformByName "mv" mv
GL.uniformByName "proj" $ cameraProjection camera
GL.uniformByName "size" $ transformScale transform ^. L._x
draw point
renderInvaders :: (HasResources s, HasSimulation s GameState,
MonadBaseControl IO m, MonadState s m, MonadReader Args m) =>
Camera -> m ()
renderInvaders camera = do
sim <- gets getSimulation
let invaders = gInvaders . simState $ sim
exploding = gExploded . simState $ sim
mapM_ (shipRenderer camera . iTransform) invaders
forM_ exploding $ \invader -> do
let time = timeCurrent . simTime $ sim
dt = fromMaybe 0 $ (time -) <$> iExplodeTime invader
transform = iTransform invader
explodingShipRenderer camera transform dt
renderPlayer :: (HasResources s, HasSimulation s GameState,
MonadBaseControl IO m, MonadState s m, MonadReader Args m) =>
Camera -> m ()
renderPlayer camera = do
sim <- gets getSimulation
let player = gPlayer $ simState sim
transform = pTransform player
case pExplodeTime player of
Nothing -> shipRenderer camera transform
Just explodeTime -> do
let time = timeCurrent . simTime $ sim
explodingShipRenderer camera transform (time - explodeTime)
shipRenderer :: (HasResources s, MonadBaseControl IO m, MonadState s m,
MonadReader Args m) => Camera -> Transform -> m ()
shipRenderer camera transform = do
mainProg <- Res.loadProgram gouraudProgramDesc
renderShip mainProg camera transform
normalsProg <- Res.loadProgram normalsProgramDesc
Just ship <- Res.getDrawable "player-ship"
let mvp = cameraViewProjection camera !*! transform2M44 transform
renderNormals <- asks argsNormals
liftBase . when renderNormals $ do
GL.useProgram normalsProg $
GL.uniformByName "mvp" mvp
draw ship
explodingShipRenderer :: (HasResources s, MonadBaseControl IO m, MonadState s m) =>
Camera -> Transform -> Milliseconds -> m ()
explodingShipRenderer camera transform explodeTime = do
prog <- Res.loadProgram explodeProgramDesc
let exFactor :: Float
exFactor = 8 * millis2Sec explodeTime
liftBase . GL.useProgram prog $
GL.uniformByName "explode_factor" exFactor
renderShip prog camera transform
renderShip :: (HasResources s, MonadBase IO m, MonadState s m) =>
GL.Program -> Camera -> Transform -> m ()
renderShip program camera transform = do
Just ship <- Res.getDrawable "player-ship"
tex <- gets $ resTex . getResources
let mvp = cameraViewProjection camera !*! transform2M44 transform
-- Normal transform assumes uniform scaling.
normalModel = transform2M44 transform ^. L._m33
mv = cameraView camera !*! transform2M44 transform
liftBase $ do
glActiveTexture GL_TEXTURE0
glBindTexture GL_TEXTURE_2D $ GL.object tex
GL.useProgram program $ do
GL.uniformByName "mvp" mvp
GL.uniformByName "normal_model" normalModel
GL.uniformByName "mv" mv
GL.uniformByName "mat.spec" (L.V3 0.8 0.8 0.8 :: L.V3 Float)
GL.uniformByName "mat.shine" (25 :: Float)
draw ship
renderCameraOrientation :: (HasResources s, MonadBaseControl IO m,
MonadState s m) => Camera -> m ()
renderCameraOrientation camera = do
let rot = L.fromQuaternion $ viewRotation camera
mvp = ortho !*! L.m33_to_m44 rot
renderAxis 1 mvp
axisRenderer :: (HasResources s, HasSimulation s GameState,
MonadBaseControl IO m, MonadState s m) =>
Camera -> m ()
axisRenderer camera = do
playerTrans <- gets $ pTransform . gPlayer . simState . getSimulation
let model = transform2M44 playerTrans
mvp = cameraViewProjection camera !*! model
renderAxis 2 mvp
renderAxis :: (HasResources s, MonadBaseControl IO m, MonadState s m) =>
Float -> L.M44 Float -> m ()
renderAxis scale mvp = do
axisProgram <- Res.loadProgram axisProgramDesc
Just point <- Res.getDrawable "point"
liftBase $ do
GL.useProgram axisProgram $ do
GL.uniformByName "scale" scale
GL.uniformByName "mvp" mvp
draw point
-- | Post processing effect. Renders a fullscreen quad from given texture and
-- applies gamma correction.
renderGamma :: (HasResources s, MonadBaseControl IO m, MonadState s m) =>
Float -> GL.Texture -> m ()
renderGamma gamma texture = do
prog <- Res.loadProgram gammaProgramDesc
Just plane <- Res.getDrawable "plane"
liftBase $ do
glActiveTexture GL_TEXTURE0
glBindTexture GL_TEXTURE_2D $ GL.object texture
let model = rotateLocal defaultTransform $ L.V3 90 0 0
GL.useProgram prog $ do
GL.uniformByName "mvp" $ transform2M44 model
GL.uniformByName "gamma" gamma
draw plane
-- | Construct an orthographic projection that corresponds with
-- screen resolution.
screenOrtho :: Float -> Float -> L.M44 Float
screenOrtho width height = L.ortho 0 width 0 height (-1) 1
renderString :: (HasResources s, MonadState s m, MonadBaseControl IO m) =>
L.V2 Float -> Float -> Float -> String -> m ()
renderString (L.V2 scrW scrH) x y text = do
prog <- Res.loadProgram textProgramDesc
Just plane <- Res.getDrawable "point"
texture <- gets $ resFontTex . getResources
liftBase $ do
glActiveTexture GL_TEXTURE0
glBindTexture GL_TEXTURE_2D $ GL.object texture
let proj = screenOrtho scrW scrH
cellWidth = 16 :: Int32
cellHeight = 16 :: Int32
charPos = charPosForFont 256 cellWidth cellHeight
GL.useProgram prog $ do
GL.uniformByName "proj" proj
GL.uniformByName "size" (0.5 * fromIntegral cellWidth :: Float)
GL.uniformByName "cell" $ L.V2 cellWidth cellHeight
forM_ (zip [0..] text) $ \(i, c) -> do
GL.uniformByName "char_pos" $ charPos c
let currX = x + 0.5 * i * fromIntegral cellWidth
model = translate defaultTransform $ L.V3 currX y 0
GL.uniformByName "mv" $ transform2M44 model
lift $ draw plane
charPosForFont :: Int32 -> Int32 -> Int32 -> Char -> L.V2 Int32
charPosForFont width cellWidth cellHeight char
| ord char < 0 || ord char > 255 = error "Only ASCII supported."
| otherwise = L.V2 (x * cellWidth) (y * cellHeight)
where charsPerLine = width `div` cellWidth
charNum = fromIntegral $ ord char - 32
x = charNum `mod` charsPerLine
y = charNum `div` charsPerLine
| Th30n/hasgel | src/Hasgel/Rendering.hs | mit | 11,804 | 3 | 22 | 2,602 | 3,423 | 1,740 | 1,683 | -1 | -1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE Strict #-}
module WordEmbedding.HasText.Internal.Strict.HasText
( unsafeWindowRange
, initMW
, unsafeFreezeMW
, norm2
, scale
, sigmoid
, addUU
, dotUU
, unitVector
, cosSim
) where
import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as VU
import qualified Data.Vector.Unboxed.Mutable as VUM
import qualified System.Random.MWC as RM
import WordEmbedding.HasText.Internal.Type
-- The function that return a range of the dynamic window.
unsafeWindowRange :: HasTextArgs -> LParams -> V.Vector T.Text
-> Int -- ^ The central index of a window. Note that no boundary checks.
-> IO (V.Vector T.Text)
unsafeWindowRange args lp line targetIdx =
unsafeWindowRangePrim negs rand line targetIdx
where
negs = fromIntegral . _negatives $ args
rand = _rand lp
-- The function that return a range of the dynamic window.
unsafeWindowRangePrim :: Int -> RM.GenIO -> V.Vector T.Text
-> Int -- ^ The central index of a window. Note that no boundary checks.
-> IO (V.Vector T.Text)
unsafeWindowRangePrim negatives rand line targetIdx = do
winRange <- RM.uniformR (0, negatives) rand
let winFrom = if targetIdx - winRange > 0 then targetIdx - winRange else 0
winTo = if V.length line > targetIdx + winRange then targetIdx + winRange else V.length line - 1
inWindowAndNotTarget i _ = winFrom < i && i < winTo && i /= targetIdx
return $ V.ifilter (\i e -> not $ inWindowAndNotTarget i e) line
initMW :: RM.GenIO -> Int -> IO MWeights
initMW rnd dm = do
randoms <- VUM.replicateM dm $ RM.uniformR range rnd
zeros <- VUM.new dm
return MWeights{_mwI = randoms, _mwO = zeros}
where
range :: (Double, Double)
range = (-1 / fromIntegral dm, 1 / fromIntegral dm)
unsafeFreezeMW :: MWeights -> IO Weights
unsafeFreezeMW MWeights{..} = do
wi <- VU.unsafeFreeze _mwI
wo <- VU.unsafeFreeze _mwO
return Weights{_wI = wi, _wO = wo}
norm2 :: VU.Vector Double -> Double
norm2 v = sqrt . VU.foldl1 (+) . VU.map (**2) $ v
scale :: Double -> VU.Vector Double -> VU.Vector Double
scale coeff v = VU.map (coeff *) v
addUU :: VU.Vector Double -> VU.Vector Double -> VU.Vector Double
addUU = VU.zipWith (+)
dotUU :: VU.Vector Double -> VU.Vector Double -> Double
dotUU v1 v2 = VU.foldl1 (+) $ VU.zipWith (*) v1 v2
unitVector :: VU.Vector Double -> VU.Vector Double
unitVector v = scale (1 / norm2 v) v
cosSim :: VU.Vector Double -> VU.Vector Double -> Double
cosSim nume deno = dotUU (unitVector nume) (unitVector deno)
sigmoid :: Double -> Double
sigmoid lx = 1.0 / (1.0 + exp (negate lx))
| Nnwww/hastext | src/WordEmbedding/HasText/Internal/Strict/HasText.hs | mit | 2,826 | 0 | 14 | 703 | 904 | 475 | 429 | 61 | 3 |
module Glucose.Lexer (tokens, tokenise, tokenize) where
import Control.Lens
import Control.Monad.RWS
import Data.Char
import Data.Foldable
import Data.Maybe
import Data.Text (Text, pack, unpack)
import Glucose.Error hiding (unexpected)
import qualified Glucose.Error as Error
import Glucose.Lexer.Char
import Glucose.Lexer.Location
import Glucose.Lexer.NumericLiteral
import Glucose.Lexer.SyntacticToken
import Glucose.Parser.Source
import Glucose.Token
data PartialLexeme
= StartOfLine
| Indentation
| Gap
| Lambda
| PartialIdentifier String -- reversed
| PartialOperator String -- reversed
| NumericLiteral NumericLiteral
data Lexer = Lexer { partial :: PartialLexeme, lexemeStart :: Location, pos :: Location, inDefinition :: Bool }
_pos :: Lens Lexer Lexer Location Location
_pos = lens pos (\lexer pos' -> lexer {pos = pos'})
_partial :: Lens Lexer Lexer PartialLexeme PartialLexeme
_partial = lens partial (\lexer partial' -> lexer {partial = partial'})
type Lex m a = RWST () [Lexeme] Lexer m a
tokens :: Text -> Either CompileError [Token]
tokens = (map token . snd <$>) . tokenize
tokenise :: Error m => Text -> m (Location, [FromSource Token])
tokenise input = (_2 %~ mapMaybe fromLexeme) <$> _tokenize input
tokenize :: Text -> Either CompileError (Location, [SyntacticToken])
tokenize input = (_2 %~ go) <$> _tokenize input where
go as = mapMaybe (uncurry $ syntacticToken input) $ zip (Lexeme Nothing beginning 0 : as) as
_tokenize :: Error m => Text -> m (Location, [Lexeme])
_tokenize = runLexer . traverse_ consume . unpack
runLexer :: Error m => Lex m () -> m (Location, [Lexeme])
runLexer l = (_1 %~ pos) <$> execRWST (l *> completeLexeme Nothing) () (Lexer StartOfLine beginning beginning False)
consume :: Error m => Char -> Lex m ()
consume c = consumeChar c *> (_pos %= updateLocation c)
consumeChar :: Error m => Char -> Lex m ()
consumeChar c = maybe (completeLexeme $ Just c) (_partial .=) =<< maybeAppend c =<< gets partial
maybeAppend :: Error m => Char -> PartialLexeme -> Lex m (Maybe PartialLexeme)
maybeAppend c StartOfLine | isNewline c = pure $ Just StartOfLine
maybeAppend c StartOfLine | isSpace c = pure $ Just Indentation
maybeAppend c Indentation | isNewline c = pure $ Just StartOfLine
maybeAppend c Indentation | isSpace c = pure $ Just Indentation
maybeAppend c Gap | isNewline c = pure $ Just StartOfLine
maybeAppend c Gap | isSpace c = pure $ Just Gap
maybeAppend c (PartialIdentifier cs) | isIdentifier c = pure . Just $ PartialIdentifier (c:cs)
maybeAppend c (PartialOperator cs) | isOperator c = pure . Just $ PartialOperator (c:cs)
maybeAppend c (NumericLiteral lit) = do
lit' <- toLex $ extendNumericLiteral c lit
maybe nextLexemeOrError (pure . Just . NumericLiteral) lit'
where nextLexemeOrError = Nothing <$ when (isIdentifier c) (unexpectedChar c "in numeric literal")
maybeAppend _ _ = pure Nothing
startingWith :: Error m => Char -> Lex m PartialLexeme
startingWith '\\' = pure Lambda
startingWith c | isNewline c = pure StartOfLine
startingWith c | isSpace c || isControl c = pure Gap
startingWith c | isDigit c = pure $ NumericLiteral $ numericLiteral (digitToInt c)
startingWith c | isIdentifier c = pure $ PartialIdentifier [c]
startingWith c | isOperator c = pure $ PartialOperator [c]
startingWith c = unexpectedChar c "in input"
completeLexeme :: Error m => Maybe Char -> Lex m ()
completeLexeme nextChar = gets partial >>= \case
StartOfLine -> unless (isNothing nextChar) $ implicitEndOfDefinition *> tellLexeme nextChar Nothing
Indentation -> do
indentedDefinition <- ((isJust nextChar &&) . not) <$> gets inDefinition
when indentedDefinition $ unexpected "indentation" "before first definition"
tellLexeme nextChar Nothing
Gap -> tellLexeme nextChar Nothing
Lambda -> tellLexeme nextChar $ Just BeginLambda
PartialIdentifier "epyt" -> tellLexeme nextChar $ Just $ Keyword Type
PartialIdentifier s -> tellLexeme nextChar $ Just $ Identifier $ pack $ reverse s
PartialOperator "=" -> tellLexeme nextChar $ Just $ Operator Assign
PartialOperator "|" -> tellLexeme nextChar $ Just $ Operator Bar
PartialOperator ">-" -> tellLexeme nextChar $ Just $ Operator Arrow
PartialOperator cs -> tellLexeme nextChar $ Just $ Operator $ CustomOperator (pack $ reverse cs)
NumericLiteral lit -> do
(token, lastChar) <- toLex $ completeNumericLiteral lit
case lastChar of
Nothing -> tellLexeme nextChar (Just token)
Just lc -> do
_pos %= rewind
case nextChar of
Nothing -> unexpectedChar lc "following numeric literal"
Just nc -> do
tellLexeme lastChar (Just token)
_pos %= updateLocation lc
consumeChar nc
toLex :: Error m => RWST () [Lexeme] Lexer (Either ErrorDetails) a -> Lex m a
toLex m = gets pos >>= \loc -> mapRWST (locateError loc) m
implicitEndOfDefinition :: Monad m => Lex m ()
implicitEndOfDefinition = do
start <- gets lexemeStart
when (start /= beginning) $ tell $ pure $ Lexeme (Just EndOfDefinition) start 0
tellLexeme :: Error m => Maybe Char -> Maybe Token -> Lex m ()
tellLexeme nextChar token = do
Lexer { lexemeStart, pos } <- get
tell . pure $ Lexeme token lexemeStart (codePointsBetween lexemeStart pos)
partial' <- maybe (pure undefined) startingWith nextChar
put $ Lexer partial' pos pos True
-- * Error messages
unexpected :: Error m => String -> String -> Lex m a
unexpected u s = gets pos >>= \loc -> Error.unexpected loc u s
unexpectedChar :: Error m => Char -> String -> Lex m a
unexpectedChar c = unexpected (show c)
| sardonicpresence/glucose | src/Glucose/Lexer.hs | mit | 5,640 | 0 | 23 | 1,099 | 2,068 | 1,007 | 1,061 | -1 | -1 |
data False
type Not a = a -> False
data Liar = Liar (Not Liar)
honest :: Not Liar
honest l@(Liar p) = p l
absurd :: False
absurd = honest $ Liar honest
| ankitku/awotap | haskellAbsurd.hs | mit | 156 | 0 | 8 | 38 | 75 | 40 | 35 | -1 | -1 |
module Kriek.Compiler (compile, compileFile) where
import Kriek.Reader (program)
import Text.Megaparsec (parse, parseErrorPretty)
import Data.Maybe
-- Accepts an optional file path of the file being compiled.
--
-- TODO: Don't use strings
compile :: Maybe FilePath -> String -> String
compile path s = case parse program p s of
Left e -> error $ parseErrorPretty e
Right x -> show x
where p = fromMaybe "(unknown)" path
-- Compile file from `src` and store the output in `out`.
compileFile :: FilePath -> FilePath -> IO ()
compileFile src out = do
contents <- readFile src
writeFile out $ compile (Just src) contents
| kalouantonis/kriek | src/hs/Kriek/Compiler.hs | mit | 664 | 0 | 10 | 149 | 182 | 93 | 89 | 13 | 2 |
-- | A cellular automaton witch simulates logic circuits
module WireWorld (Wire, main, wireWorldRule) where
import CellularAutomata2D
import GUI
-- | A wire can be empty (no wire), a conductor (no current), or an electron head or tail.
data Wire = Empty | Conductor | ElectronHead | ElectronTail deriving (Show, Eq, Enum, Bounded)
main :: IO ()
main = runCellularAutomata2D wireWorldRule (initSpaceWithCells (50, 50) Empty []) >> return ()
-- | A empty wire stays an empty wire forever.
-- An electron head becomes on electron tail (the electron moves)
-- An electron tail becomes a conductor (the electron moves)
-- A Conductor becomes an electron head if there one or two electron heads in his neighborhood.
-- Otherwise it stays a conductor.
-- Wire world uses a moor neighborhood.
wireWorldRule :: Rule Wire
wireWorldRule = Rule moorIndexDeltas (\self friends -> return $ case self of
Empty -> Empty
ElectronHead -> ElectronTail
ElectronTail -> Conductor
Conductor
| length (filter (== ElectronHead) friends) `elem` [1,2] ->
ElectronHead
| otherwise -> Conductor)
instance Cell Wire where
getColor Empty = grey
getColor Conductor = yellow
getColor ElectronHead = blue
getColor ElectronTail = red
getSuccState = cycleEnum
| orion-42/cellular-automata-2d | WireWorld.hs | mit | 1,305 | 0 | 19 | 273 | 258 | 142 | 116 | 21 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Functors where
import Text.HTML.Scalpel
import Text.Regex (mkRegexWithOpts, matchRegex)
import Data.Text (strip)
data Funny f a = Funny a (f a)
deriving (Show)
funnyThing :: Funny Maybe Int
funnyThing = Funny 5 $ Just 3
-- import Control.Applicative ((<|>))
gateRegex = mkRegexWithOpts "^(A|B|C|D|E)\n+$" True False
main :: IO ()
main = do
i <- allItems
let (Just is) = i
let (strippedIs :: [String]) = map strip is
mapM_ (putStrLn . show) is
allItems :: IO (Maybe [String])
allItems = scrapeURL "https://www2.portofportland.com/PDX/Flights?Day=Today&TimeFrom=12%3A00+AM&TimeTo=11%3A59+PM&Type=D&FlightNum=&CityFromTo=&Airline=" items
where
items :: Scraper String [String]
items = chroots (("tr" :: String) // ("td" :: String)) item
item :: Scraper String String
item = text ("td" :: String)
| harrisi/on-being-better | list-expansion/Haskell/Learning/Functors.hs | cc0-1.0 | 921 | 0 | 12 | 168 | 272 | 146 | 126 | -1 | -1 |
import Nightwatch.Types
import Nightwatch.Telegram
import Nightwatch.Webapp
import Nightwatch.Websocket
import Control.Concurrent.Chan
main = do
aria2Chan <- newChan
tgOutChan <- newChan
startTelegramBot aria2Chan tgOutChan
startAria2
startAria2WebsocketClient aria2Chan tgOutChan
startWebapp
| vacationlabs/nightwatch | haskell/Main.hs | gpl-2.0 | 307 | 0 | 7 | 41 | 68 | 33 | 35 | 12 | 1 |
{-# LANGUAGE DeriveDataTypeable, FlexibleInstances, MultiParamTypeClasses #-}
module Robots3.Config
( Config -- abstract
, c_hull, cluster_of
, showing_hull, show_hull
, breit
, make, make_with_hull, geschichte
, with_targets
, move, remove, addZug
, look, robots, inhalt
, positions, goals
, valid
, bounds, area
)
where
-- $Id$
import Robots3.Data
import Robots3.Exact
import Autolib.FiniteMap
import Autolib.Set
import Autolib.ToDoc
import Autolib.Hash
import Autolib.Reader
import Autolib.Size
import Autolib.Set
import Autolib.FiniteMap
import Autolib.Xml
import Autolib.Reporter
import Data.List (partition)
import Control.Monad ( guard )
import Data.Maybe ( isJust, maybeToList, fromMaybe )
import Data.Typeable
import Data.Int
data Config = Config { c_hash :: Int32
, inhalt :: FiniteMap String Robot
, targets :: Set Position
, breit :: Int
, geschichte :: [ Zug ]
, c_hull :: Set Position
, c_clusters :: FiniteMap Position Int
, show_hull :: Bool
}
deriving ( Typeable )
with_targets c ts = c { targets = mkSet ts }
cluster_of k p = lookupFM ( c_clusters k ) p
make :: [ Robot ] -> [ Position ] -> Config
make rs ts =
let i = listToFM $ do
r <- rs
return ( name r, r )
in hulled $ Config
{ c_hash = hash i
, inhalt = i
, targets = mkSet ts
, breit = maximum $ do
r <- rs
return $ extension $ position r
, geschichte = []
, show_hull = False
}
make_with_hull :: [ Robot ] -> [ Position ] -> Config
make_with_hull rs ts = showing_hull $ make rs ts
-- | recompute hull (from scratch)
hulled k =
let ps = mkSet $ map position $ robots k
fm = listToFM $ do
( i, cl ) <- zip [ 0 .. ] $ clusters ps
p <- setToList cl
return ( p, i )
in k { c_hull = exact_hull_points ps
, c_clusters = fm
}
showing_hull k = k { show_hull = True }
bounds k =
let ps = do r <- robots k ; return $ position r
xs = map x ps ; ys = map y ps
in ( ( minimum xs, minimum ys )
, ( maximum xs, maximum ys )
)
area k =
let ((a,b),(c,d)) = bounds k
in (c-a +1) * (d-b+1)
-- | fort damit (into outer space)
remove :: String -> Config -> Config
remove n k =
let i = delFromFM (inhalt k) n
in hulled $ k { inhalt = i
, c_hash = hash i
}
-- | auf neue position
move :: (String, Position) -> Config -> Config
move (n, p) k =
let i = inhalt k
j = addToFM i n
$ let r = fromMaybe ( error "Robots3.Move.move" ) ( lookupFM i n )
in r { position = p }
in hulled $ k { inhalt = j
, c_hash = hash j
}
addZug :: Zug -> Config -> Config
addZug z k = k { geschichte = z : geschichte k }
instance ToDoc Config where
toDoc k = text "make" <+> toDoc ( robots k ) <+> toDoc ( goals k )
instance Reader Config where
atomic_readerPrec p = do
guard $ p < 9
my_reserved "make"
arg1 <- reader
arg2 <- reader
return $ make arg1 arg2
instance Container Config ([ Robot ],[Position]) where
label _ = "Config"
pack k = (robots k, goals k)
unpack (rs,ts) = make rs ts
instance Hash Config where hash = c_hash
-- | nur Positionen vergleichen
essence :: Config -> ( Int32, Set Position )
essence k =
let rs = robots k
in (hash k, mkSet $ map position rs)
instance Ord Config where
compare k l = compare (essence k) (essence l)
instance Eq Config where
(==) k l = (==) (essence k) (essence l)
instance Size Config where
size = fromIntegral . area
-----------------------------------------------------------------
look :: Config -> String -> Maybe Robot
look c n = lookupFM (inhalt c) n
robots :: Config -> [ Robot ]
robots c = eltsFM (inhalt c)
positions :: Config -> [ Position ]
positions = map position . robots
goals :: Config -> [ Position ]
goals k = setToList $ targets k
valid :: Config -> Reporter ()
valid k = do
let mappe = addListToFM_C (++) emptyFM $ do
r <- robots k
return ( position r, [ name r ] )
let mehrfach = do
( p, rs ) <- fmToList mappe
guard $ length rs > 1
return ( p , rs )
inform $ text "Stehen alle Roboter auf verschiedenen Positionen?"
if ( null mehrfach )
then inform $ text "Ja."
else reject $ text "Nein, diese nicht:" <+> toDoc mehrfach
assert ( not $ null $ goals k )
$ text "Ist wenigstens ein Ziel angegeben?"
| Erdwolf/autotool-bonn | src/Robots3/Config.hs | gpl-2.0 | 4,430 | 25 | 17 | 1,234 | 1,666 | 874 | 792 | 136 | 2 |
{-# OPTIONS_GHC -Wall -fwarn-tabs -Werror #-}
-------------------------------------------------------------------------------
-- |
-- Module : Logic.Controls
-- Copyright : Copyright (c) 2014 Michael R. Shannon
-- License : GPLv2 or Later
-- Maintainer : mrshannon.aerospace@gmail.com
-- Stability : unstable
-- Portability : portable
--
-- Controls module.
-------------------------------------------------------------------------------
module Logic.Controls
( zoom
, rotate
, switchView
, handleSwitchView
, switchShading
, handleSwitchShading
, handleMove
, handleAnimate
) where
import qualified App.Types as A
import qualified Input.Keyboard.Types as K
import qualified View as V
import qualified Settings.Types as S
import qualified Graphics.Types as G
import Logic.Controls.Zoom
import Logic.Controls.Rotate
import Logic.Controls.Move
import Logic.Controls.Animate
-- Switch view modes.
switchView :: A.App -> A.App
switchView app@(A.App { A.view = view@(V.Orthographic {}) }) =
app { A.view = V.toThirdPerson view }
switchView app@(A.App { A.view = view@(V.ThirdPerson {}) }) =
app { A.view = V.toOrthographic view }
switchView app@(A.App { A.view = (V.FirstPerson {}) }) = app
-- Handle c key.
handleSwitchView :: A.App -> A.App
handleSwitchView app =
if K.KeyPressed == (K.cKey . K.keys . A.keyboard $ app)
then switchView app
else app
-- Switch shading modes.
switchShading :: A.App -> A.App
switchShading app@(A.App { A.settings = settings@(S.Settings {}) }) =
let newShading = case S.shading settings of
G.Wire -> G.Flat
G.Flat -> G.Smooth
G.Smooth -> G.Wire
in app { A.settings = settings { S.shading = newShading } }
-- Handle v key.
handleSwitchShading :: A.App -> A.App
handleSwitchShading app =
if K.KeyPressed == (K.vKey . K.keys . A.keyboard $ app)
then switchShading app
else app
| mrshannon/trees | src/Logic/Controls.hs | gpl-2.0 | 1,944 | 0 | 14 | 392 | 508 | 297 | 211 | 42 | 3 |
--
-- Copyright (c) 2013-2019 Nicola Bonelli <nicola@pfq.io>
--
-- This program is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
--
module CGrep.Types where
import Data.ByteString.Char8 as C ( ByteString )
type Offset = Int
type Offset2d = (# Int, Int #)
type OffsetLine = Int
type Text8 = C.ByteString
type Line8 = C.ByteString
| awgn/cgrep | src/CGrep/Types.hs | gpl-2.0 | 997 | 0 | 5 | 190 | 75 | 55 | 20 | -1 | -1 |
-- | Functions for generating league tables (including form tables and mini-leagues).
module Anorak.Tables (buildRecord,
formTable,
goalDiff,
LeagueRecord(..),
leaguePositions,
leagueTable,
miniLeagueTable,
played,
points,
pointsPerGame) where
import Anorak.Results
import Data.ByteString.Char8(ByteString)
import qualified Data.ByteString.Char8 as BS(unpack)
import Data.List(foldl', sort)
import Data.Map(Map, (!))
import qualified Data.Map as Map(adjust, adjustWithKey, elems, empty, filter, filterWithKey, findMax, findWithDefault, fromList, keysSet, map, mapAccum, mapWithKey, partitionWithKey, toAscList)
import Data.Maybe(fromMaybe)
import Data.Ord(comparing)
import Data.Set(Set)
import qualified Data.Set as Set(member, toAscList, union)
import Data.Time.Calendar(Day)
import Util.List(keep)
-- | A LeagueRecord contains data about the league performance of a single team. It
-- includes total number of wins, draws, defeats, goals scored and goals conceded.
data LeagueRecord = LeagueRecord {team :: !Team, -- ^ The team that this record relates to.
won :: !Int, -- ^ The number of matches won by this team.
drawn :: !Int, -- ^ The number of matches drawn by this team.
lost :: !Int, -- ^ The number of matches lost by this team.
for :: !Int, -- ^ The total number of goals scored by this team.
against :: !Int, -- ^ The total number of goals conceded by this team.
adjustment :: !Int -- ^ A points adjustment (can be positive or negative but is usually zero) to be applied to this team's total.
}
-- A LeagueRecord can be rendered as a String containing both member fields and
-- derived fields.
instance Show LeagueRecord where
show record = BS.unpack (team record) ++
" P" ++ show (played record) ++
" W" ++ show (won record) ++
" D" ++ show (drawn record) ++
" L" ++ show (lost record) ++
" F" ++ show (for record) ++
" A" ++ show (against record) ++
" GD" ++ show (goalDiff record) ++
" Pts" ++ show (points record)
-- A LeagueRecord can be compared with other records to provide an ordering for a
-- list of records.
instance Eq LeagueRecord where
(==) record1 record2 = points record1 == points record2
&& goalDiff record1 == goalDiff record2
&& for record1 == for record2
&& won record1 == won record2
instance Ord LeagueRecord where
compare record1 record2
| points record1 /= points record2 = comparing points record2 record1
| goalDiff record1 /= goalDiff record2 = comparing goalDiff record2 record1
| for record1 /= for record2 = comparing for record2 record1
| won record1 /= won record2 = comparing won record2 record1
| otherwise = EQ
-- | Calculates the total number of matches played (the sum or wins, draws and defeats).
played :: LeagueRecord -> Int
played record = won record + drawn record + lost record
-- | Calculates the total number of points (3 points for each win, 1 for each draw, +/- any adjustment).
points :: LeagueRecord -> Int
points record = won record * 3 + drawn record + adjustment record
-- | Calculates goal difference (total scored minus total conceded)
goalDiff :: LeagueRecord -> Int
goalDiff record = for record - against record
-- | Calculates average number of league points earned per game.
pointsPerGame :: LeagueRecord -> Double
pointsPerGame record = fromIntegral (points record) / fromIntegral (played record)
-- | Builds a LeagueRecord for the specified team, including all of the results (from those provided) in which that
-- team was involved.
buildRecord :: Team -> [Result] -> LeagueRecord
buildRecord t = foldl' (addResultToRecord t) (LeagueRecord t 0 0 0 0 0 0)
-- | Adds a single match result to a particular team's league record. If the specified team was not involved in that
-- match, the match is ignored.
addResultToRecord :: Team -> LeagueRecord -> Result -> LeagueRecord
addResultToRecord t record result
| t == homeTeam result = addScoreToRecord record (homeScore result) (awayScore result)
| t == awayTeam result = addScoreToRecord record (awayScore result) (homeScore result)
| otherwise = record
addScoreToRecord :: LeagueRecord -> Int -> Int -> LeagueRecord
addScoreToRecord (LeagueRecord t w d l f a adj) goalsScored goalsConceded
| goalsScored > goalsConceded = LeagueRecord t (w + 1) d l (f + goalsScored) (a + goalsConceded) adj
| goalsScored == goalsConceded = LeagueRecord t w (d + 1) l (f + goalsScored) (a + goalsConceded) adj
| otherwise = LeagueRecord t w d (l + 1) (f + goalsScored) (a + goalsConceded) adj
-- | Produces a standard league table with teams ordered in descending order of points. Takes a map of teams to
-- results and a map of points adjustments and returns a sorted list of league records.
leagueTable :: Map Team [Result] -> Map Team Int -> Int -> [LeagueRecord]
leagueTable teamResults adjustments 0 = sort $ map (adjust adjustments) table
where table = Map.elems $ Map.mapWithKey buildRecord teamResults
leagueTable teamResults adjustments split = updateSection top remainingFixtures ++ updateSection bottom remainingFixtures
where -- Create initial table based on early fixtures.
before = leagueTable (Map.map (take split) teamResults) adjustments 0
-- Split league in half.
(top, bottom) = splitAt (length before `div` 2) before
-- Keep remaining fixtures to be applied to both halves before recombining.
remainingFixtures = Map.map (drop split) teamResults
-- | Update the specified table by applying remaining results for member teams.
updateSection :: [LeagueRecord] -> Map Team [Result] -> [LeagueRecord]
updateSection table results = sort $ map (updateRecord results) table
-- | Update the specified league record by applying all of the remaining results for that team.
updateRecord :: Map Team [Result] -> LeagueRecord -> LeagueRecord
updateRecord results record = foldl' (addResultToRecord t) record (results ! t)
where t = team record
-- | Produces a form table with teams ordered in descending order of points.
formTable :: Map Team [Result] -> Int -> [(LeagueRecord, [TeamResult])]
formTable teamResults n = map (attachForm formResults) $ leagueTable formResults Map.empty 0
where formResults = Map.map (keep n) teamResults
attachForm :: Map Team [Result] -> LeagueRecord -> (LeagueRecord, [TeamResult])
attachForm results record = (record, map (convertResult (team record)) formResults)
where formResults = Map.findWithDefault [] (team record) results
-- | Looks up the points adjustment for a team (if any) and applies it to their league record.
adjust :: Map Team Int -> LeagueRecord -> LeagueRecord
adjust adjustments (LeagueRecord t w d l f a adj) = LeagueRecord t w d l f a (adj + Map.findWithDefault 0 t adjustments)
-- | Generate a league table that includes only results between the specified teams.
miniLeagueTable :: Set Team -> Map Team [Result] -> Map ByteString Team -> [LeagueRecord]
miniLeagueTable teams results aliases = leagueTable filteredResults Map.empty 0
where teamNames = Set.union teams . Map.keysSet $ Map.filter (`Set.member` teams) aliases
filteredResults = Map.map (filter $ bothTeamsInSet teamNames) $ Map.filterWithKey (\k _ -> Set.member k teamNames) results
bothTeamsInSet :: Set Team -> Result -> Bool
bothTeamsInSet teams result = Set.member (homeTeam result) teams && Set.member (awayTeam result) teams
-- | Calculate the league position of every team at the end of every match day.
leaguePositions :: Set Team -> Map Day [Result] -> Map Team Int -> Int -> Map Team [(Day, Int)]
leaguePositions teams results adj sp = foldr addPosition emptyPosMap positions
where teamList = Set.toAscList teams
halfTeamCount = length teamList `div` 2
numMatchesBeforeSplit = sp * halfTeamCount
(resultsBeforeSplit, resultsAfterSplit) = splitResults numMatchesBeforeSplit results
-- Calculate the league table (as a sorted list of LeagueRecords) for each date that games were played.
tablesByDay = tablesByDate [LeagueRecord t 0 0 0 0 0 (Map.findWithDefault 0 t adj) | t <- teamList] resultsBeforeSplit
-- Split the table into top and bottom half (for leagues that do this, such as the SPL)
(topHalf, bottomHalf) = splitAt halfTeamCount . snd $ Map.findMax tablesByDay
-- Convert the various tables to a list of triples (team, date, position).
beforeSplitPositions = tablesToPositions 1 tablesByDay
topPositions = tablesToPositions 1 $ tablesByDate topHalf resultsAfterSplit
bottomPositions = tablesToPositions (halfTeamCount + 1) $ tablesByDate bottomHalf resultsAfterSplit
positions = beforeSplitPositions ++ topPositions ++ bottomPositions
-- An initial map, with one entry for each team, into which the above triples are folded.
emptyPosMap = Map.fromList [(t, []) | t <- teamList]
-- | Calculate the league table (as a sorted list of LeagueRecords) for each date that games were played.
-- The first argument is the initial state of the league (usually blank records, but for post-split tables it
-- will be the table at the split date).
tablesByDate :: [LeagueRecord] -> Map Day [Result] -> Map Day [LeagueRecord]
tablesByDate records results = Map.map (sort.Map.elems) recordsByDate
where initialTable = Map.fromList [(team record, record) | record <- records]
recordsByDate = snd $ Map.mapAccum tableAccum initialTable results
-- Convert table for dates to a list of triples (team, date, position). First argument is the highest position
-- to assign (usually set to one, but will be something else when calculating positions for the bottom half
-- post-split in the SPL and similar leagues).
tablesToPositions :: Int -> Map Day [LeagueRecord] -> [(Team, Day, Int)]
tablesToPositions startPos = concat . Map.elems . Map.mapWithKey (\matchDay records -> zip3 (map team records) (repeat matchDay) [startPos..])
tableAccum :: Map Team LeagueRecord -> [Result] -> (Map Team LeagueRecord, Map Team LeagueRecord)
tableAccum table results = (table', table')
where table' = foldl' addResultToTable table results
addResultToTable :: Map Team LeagueRecord -> Result -> Map Team LeagueRecord
addResultToTable table result = Map.adjustWithKey update hTeam $ Map.adjustWithKey update aTeam table
where update t record = addResultToRecord t record result
hTeam = homeTeam result
aTeam = awayTeam result
addPosition :: (Team, Day, Int) -> Map Team [(Day, Int)] -> Map Team [(Day, Int)]
addPosition (t, d, p) = Map.adjust ((d, p):) t
-- | Given a number of matches that must be played before the league splits, divide the results into pre- and post-split fixtures.
-- If there is no split, there will be no post-split fixtures.
splitResults :: Int -> Map Day [Result] -> (Map Day [Result], Map Day [Result])
splitResults 0 results = (results, Map.empty)
splitResults numMatches results = Map.partitionWithKey (\d _ -> d <= splitDate) results
where splitDate = fromMaybe (fst $ Map.findMax results) . findSplitDate numMatches $ Map.toAscList results
-- | Given a number of matches that must be played before the league splits, determine the date for the last of the pre-split
-- fixtures.
findSplitDate :: Int -> [(Day, [Result])] -> Maybe Day
findSplitDate _ [] = Nothing
findSplitDate numMatches ((d, results):days)
| numMatches <= length results = Just d
| otherwise = findSplitDate (numMatches - length results) days
| dwdyer/anorak | src/haskell/Anorak/Tables.hs | gpl-3.0 | 13,494 | 1 | 24 | 4,152 | 2,882 | 1,505 | 1,377 | 149 | 1 |
module H.Prelude.Monad
( Functor(..), Applicative(..), Alternative(..)
, Monad(..), MonadPlus(..), Const(..), ZipList(..)
, (<$>), (<**>), liftA, liftA2, liftA3, optional
, (=<<), (>=>), (<=<), forever, void
, mfilter, filterM, zipWithM, zipWithM_, foldM, foldM_, onFstF, onSndF
, replicateM, replicateM_, guard, unless
, when, whenM, whenJust, whenJustM, sequenceWhileJust
, minimumByM, eitherAlt
) where
import Control.Applicative
( Applicative(..), Alternative(..), Const(..), ZipList(..)
, (<$>), (<**>), liftA, liftA2, liftA3, optional
)
import Control.Monad
( Functor(..), Monad(..), MonadPlus(..)
, (=<<), (>=>), (<=<), forever, void
, mfilter, filterM, zipWithM, zipWithM_, foldM, foldM_
, replicateM, replicateM_, guard, when, unless
)
import H.Prelude.Core
-- | If the input is Just, do a monadic action on the value
whenJust :: (Monad m) => Maybe a -> (a -> m ()) -> m ()
whenJust x f = maybe (return ()) f x
onFstF :: (Functor f) => (a -> f c) -> (a, b) -> f (c, b)
onFstF f (a, b) = (, b) <$> f a
onSndF :: (Functor f) => (b -> f c) -> (a, b) -> f (a, c)
onSndF f (a, b) = (a, ) <$> f b
-- | Find the minimum element of a list using a monadic comparison action.
minimumByM :: (Monad m) => (a -> a -> m Ordering) -> [a] -> m a
minimumByM _ [] = error "minimumByM: Empty list"
minimumByM c (x : xs) = f x c xs
where
f acc _ [] = return acc
f acc c (x : xs) = do
o <- c x acc
case o of
LT -> f x c xs
_ -> f acc c xs
-- | Like <|>, but the operands may have different value types, with Either providing
-- a union of those two types in the result
eitherAlt :: (Alternative f) => f a -> f b -> f (Either a b)
eitherAlt la ra = (Left <$> la) <|> (Right <$> ra)
infixl 3 `eitherAlt`
-- | Sequence a list of actions that return Maybes, stopping at the first Nothing
sequenceWhileJust :: (Monad m) => [m (Maybe a)] -> m [a]
sequenceWhileJust [] = return []
sequenceWhileJust (m : ms) =
m >>= maybe (return []) (\x -> liftA (x :) $ sequenceWhileJust ms)
-- | Like when, but the condition is also a monadic action
whenM :: (Monad m) => m Bool -> m () -> m ()
whenM cond m = cond >>= \case
True -> m
False -> return ()
whenJustM :: (Monad m) => m (Maybe a) -> (a -> m ()) -> m ()
whenJustM m f = m >>= maybe (return ()) f
| ktvoelker/hydrogen | src/H/Prelude/Monad.hs | gpl-3.0 | 2,316 | 0 | 12 | 526 | 1,010 | 576 | 434 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.