code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE ExistentialQuantification #-}
module Synthesizer.MIDI.CausalIO.Process (
Events,
slice,
controllerLinear,
controllerExponential,
pitchBend,
channelPressure,
bendWheelPressure,
constant,
Instrument,
Bank,
GateChunk,
noteEvents,
embedPrograms,
applyInstrument,
applyModulatedInstrument,
flattenControlSchedule,
applyModulation,
arrangeStorable,
sequenceCore,
sequenceModulated,
sequenceModulatedMultiProgram,
sequenceModulatedMultiProgramVelocityPitch,
sequenceStorable,
-- auxiliary function
initWith,
mapMaybe,
) where
import qualified Synthesizer.CausalIO.Gate as Gate
import qualified Synthesizer.CausalIO.Process as PIO
import qualified Synthesizer.MIDI.Value.BendModulation as BM
import qualified Synthesizer.MIDI.Value.BendWheelPressure as BWP
import qualified Synthesizer.MIDI.Value as MV
import qualified Synthesizer.MIDI.EventList as MIDIEv
import Synthesizer.MIDI.EventList (StrictTime, )
import qualified Synthesizer.PiecewiseConstant.Signal as PC
import qualified Synthesizer.Storable.Cut as CutSt
import qualified Synthesizer.Generic.Cut as CutG
import qualified Synthesizer.Zip as Zip
import qualified Sound.MIDI.Message.Class.Check as Check
import qualified Sound.MIDI.Message.Channel.Voice as VoiceMsg
import Control.DeepSeq (rnf, )
import qualified Data.EventList.Relative.TimeBody as EventList
import qualified Data.EventList.Relative.BodyTime as EventListBT
import qualified Data.EventList.Relative.TimeTime as EventListTT
import qualified Data.EventList.Relative.TimeMixed as EventListTM
import qualified Data.EventList.Relative.MixedTime as EventListMT
import qualified Data.EventList.Absolute.TimeBody as AbsEventList
import qualified Numeric.NonNegative.Wrapper as NonNegW
import qualified Numeric.NonNegative.Class as NonNeg
import qualified Algebra.Transcendental as Trans
import qualified Algebra.RealRing as RealRing
import qualified Algebra.Field as Field
import qualified Algebra.Additive as Additive
import qualified Algebra.ToInteger as ToInteger
import qualified Data.StorableVector as SV
import qualified Data.StorableVector.ST.Strict as SVST
import Foreign.Storable (Storable, )
import qualified Control.Monad.Trans.Writer as MW
import qualified Control.Monad.Trans.State as MS
import qualified Control.Monad.Trans.Class as MT
import Control.Monad.IO.Class (liftIO, )
import qualified Data.Traversable as Trav
import Data.Traversable (Traversable, )
import Data.Foldable (traverse_, )
import Control.Arrow (Arrow, arr, (^<<), (<<^), )
import Control.Category ((.), )
import qualified Data.Map as Map
import qualified Data.List.HT as ListHT
import qualified Data.Maybe as Maybe
import Data.Monoid (Monoid, mempty, mappend, )
import Data.Maybe (maybeToList, )
import Data.Tuple.HT (mapFst, mapPair, )
import NumericPrelude.Numeric
import NumericPrelude.Base hiding ((.), sequence, )
import Prelude ()
type Events event = EventListTT.T StrictTime [event]
initWith ::
(y -> c) ->
c ->
PIO.T
(Events y)
(EventListBT.T PC.ShortStrictTime c)
initWith f initial =
PIO.traverse initial $
\evs0 -> do
y0 <- MS.get
fmap (PC.subdivideLongStrict . EventListMT.consBody y0) $
Trav.traverse (\ys -> traverse_ (MS.put . f) ys >> MS.get) evs0
slice ::
(Check.C event) =>
(event -> Maybe Int) ->
(Int -> y) -> y ->
PIO.T
(Events event)
(EventListBT.T PC.ShortStrictTime y)
slice select f initial =
initWith f initial . mapMaybe select
mapMaybe ::
(Arrow arrow, Functor f) =>
(a -> Maybe b) ->
arrow (f [a]) (f [b])
mapMaybe f =
arr $ fmap $ Maybe.mapMaybe f
catMaybes ::
(Arrow arrow, Functor f) =>
arrow (f [Maybe a]) (f [a])
catMaybes =
arr $ fmap Maybe.catMaybes
traverse ::
(Traversable f) =>
s -> (a -> MS.State s b) ->
PIO.T (f [a]) (f [b])
traverse initial f =
PIO.traverse initial (Trav.traverse (Trav.traverse f))
controllerLinear ::
(Check.C event, Field.C y) =>
MIDIEv.Channel ->
MIDIEv.Controller ->
(y,y) -> y ->
PIO.T
(Events event)
(EventListBT.T PC.ShortStrictTime y)
controllerLinear chan ctrl bnd initial =
slice (Check.controller chan ctrl)
(MV.controllerLinear bnd) initial
controllerExponential ::
(Check.C event, Trans.C y) =>
MIDIEv.Channel ->
MIDIEv.Controller ->
(y,y) -> y ->
PIO.T
(Events event)
(EventListBT.T PC.ShortStrictTime y)
controllerExponential chan ctrl bnd initial =
slice (Check.controller chan ctrl)
(MV.controllerExponential bnd) initial
pitchBend ::
(Check.C event, Trans.C y) =>
MIDIEv.Channel ->
y -> y ->
PIO.T
(Events event)
(EventListBT.T PC.ShortStrictTime y)
pitchBend chan range center =
slice (Check.pitchBend chan)
(MV.pitchBend range center) center
channelPressure ::
(Check.C event, Trans.C y) =>
MIDIEv.Channel ->
y -> y ->
PIO.T
(Events event)
(EventListBT.T PC.ShortStrictTime y)
channelPressure chan maxVal initial =
slice (Check.channelPressure chan)
(MV.controllerLinear (zero,maxVal)) initial
bendWheelPressure ::
(Check.C event, RealRing.C y, Trans.C y) =>
MIDIEv.Channel ->
Int -> y -> y ->
PIO.T
(Events event)
(EventListBT.T PC.ShortStrictTime (BM.T y))
bendWheelPressure chan
pitchRange wheelDepth pressDepth =
let toBM = BM.fromBendWheelPressure pitchRange wheelDepth pressDepth
in initWith toBM (toBM BWP.deflt)
.
catMaybes
.
traverse BWP.deflt (BWP.check chan)
-- might be moved to synthesizer-core
constant ::
(Arrow arrow) =>
y -> arrow (Events event) (EventListBT.T PC.ShortStrictTime y)
constant y = arr $
EventListBT.singleton y .
NonNegW.fromNumberMsg "MIDI.CausalIO.constant" .
fromIntegral .
EventListTT.duration
_constant ::
(Arrow arrow, CutG.Read input) =>
y -> arrow input (EventListBT.T PC.ShortStrictTime y)
_constant y = arr $
EventListBT.singleton y .
NonNegW.fromNumberMsg "MIDI.CausalIO.constant" .
CutG.length
noteEvents ::
(Check.C event, Arrow arrow) =>
MIDIEv.Channel ->
arrow
(Events event)
(Events (Either MIDIEv.Program (MIDIEv.NoteBoundary Bool)))
noteEvents chan =
mapMaybe $ MIDIEv.checkNoteEvent chan
embedPrograms ::
MIDIEv.Program ->
PIO.T
(Events (Either MIDIEv.Program (MIDIEv.NoteBoundary Bool)))
(Events (MIDIEv.NoteBoundary (Maybe MIDIEv.Program)))
embedPrograms initPgm =
catMaybes .
traverse initPgm MIDIEv.embedProgramState
type GateChunk = Gate.Chunk MIDIEv.Velocity
type Instrument y chunk = y -> y -> PIO.T GateChunk chunk
type Bank y chunk = MIDIEv.Program -> Instrument y chunk
{-
for distinction of notes with the same pitch
We must use Integer instead of Int, in order to avoid an overflow
that would invalidate the check for unmatched NoteOffs
that is based on comparison of the NoteIds.
We cannot re-use NoteIds easily,
since the events at one time point are handled out of order.
-}
newtype NoteId = NoteId Integer
deriving (Show, Eq, Ord)
succNoteId :: NoteId -> NoteId
succNoteId (NoteId n) = NoteId (n+1)
flattenNoteIdRange :: (NoteId,NoteId) -> [NoteId]
flattenNoteIdRange (start,afterEnd) =
takeWhile (<afterEnd) $ iterate succNoteId start
newtype NoteOffList =
NoteOffList {
unwrapNoteOffList :: Events (NoteBoundary NoteId)
}
instance CutG.Read NoteOffList where
null (NoteOffList evs) =
EventListTT.isPause evs && EventListTT.duration evs == 0
length = fromIntegral . EventListTT.duration . unwrapNoteOffList
instance CutG.NormalForm NoteOffList where
evaluateHead =
EventListMT.switchTimeL (\t _ -> rnf (NonNegW.toNumber t)) .
unwrapNoteOffList
instance Monoid NoteOffList where
mempty = NoteOffList (EventListTT.pause mempty)
mappend (NoteOffList xs) (NoteOffList ys) =
NoteOffList (mappend xs ys)
{- |
The function defined here are based on the interpretation
of event lists as piecewise constant signals.
They do not fit to the interpretation of atomic events.
Because e.g. it makes no sense to split an atomic event into two instances by splitAt,
and it is also not clear, whether dropping the first chunk
shall leave a chunk of length zero
or remove that chunk completely.
-}
instance CutG.Transform NoteOffList where
take n (NoteOffList xs) =
NoteOffList $
EventListTT.takeTime
(NonNegW.fromNumberMsg "NoteOffList.take" $ fromIntegral n) xs
drop n (NoteOffList xs) =
NoteOffList $
EventListTT.dropTime
(NonNegW.fromNumberMsg "NoteOffList.drop" $ fromIntegral n) xs
splitAt n (NoteOffList xs) =
mapPair (NoteOffList, NoteOffList) $
EventListTT.splitAtTime
(NonNegW.fromNumberMsg "NoteOffList.splitAtTime" $ fromIntegral n) xs
-- cf. ChunkySize.dropMarginRem
dropMarginRem =
CutG.dropMarginRemChunky
(fmap fromIntegral . EventListTT.getTimes . unwrapNoteOffList)
reverse (NoteOffList xs) =
NoteOffList . EventListTT.reverse $ xs
findEvent ::
(a -> Bool) ->
Events a -> (Events a, Maybe a)
findEvent p =
EventListTT.foldr
(\t -> mapFst (EventListMT.consTime t))
(\evs rest ->
case ListHT.break p evs of
(prefix, suffix) ->
mapFst (EventListMT.consBody prefix) $
case suffix of
[] -> rest
ev:_ -> (EventListTT.pause mempty, Just ev))
(EventListBT.empty, Nothing)
gateFromNoteOffs ::
(MIDIEv.Pitch, NoteId) ->
NoteOffList ->
GateChunk
gateFromNoteOffs pitchNoteId (NoteOffList noteOffs) =
let dur = EventListTT.duration noteOffs
(sustain, mEnd) =
findEvent
(\bnd ->
case bnd of
-- AllNotesOff -> True
NoteBoundary endPitch _ noteId ->
pitchNoteId == (endPitch, noteId))
noteOffs
in Gate.chunk dur $
flip fmap mEnd $ \end ->
(EventListTT.duration sustain,
case end of
NoteBoundary _ endVel _ -> endVel
{-
AllNotesOff -> VoiceMsg.normalVelocity -} )
data NoteBoundary a =
NoteBoundary VoiceMsg.Pitch VoiceMsg.Velocity a
-- | AllSoundOff
deriving (Eq, Show)
{- |
We count NoteIds per pitch,
such that the pair (pitch,noteId) identifies a note.
We treat nested notes in a first-in-first-out order (FIFO).
E.g.
> On, On, On, Off, Off, Off
is interpreted as
> On 0, On 1, On 2, Off 0, Off 1, Off 2
NoteOffs without previous NoteOns are thrown away.
-}
assignNoteIds ::
(Traversable f) =>
PIO.T
(f [MIDIEv.NoteBoundary (Maybe MIDIEv.Program)])
(f [NoteBoundary (NoteId, Maybe MIDIEv.Program)])
assignNoteIds =
fmap concat
^<<
traverse Map.empty (\bnd ->
case bnd of
MIDIEv.AllNotesOff -> do
notes <- MS.get
MS.put Map.empty
return $
concatMap (\(pitch, range) ->
map
(\noteId ->
NoteBoundary pitch VoiceMsg.normalVelocity
(noteId, Nothing))
(flattenNoteIdRange range)) $
Map.toList notes
MIDIEv.NoteBoundary pitch vel mpgm ->
fmap (fmap (\noteId ->
NoteBoundary pitch vel (noteId,mpgm))) $
case mpgm of
Nothing -> do
mNoteId <- MS.gets (Map.lookup pitch)
case mNoteId of
Nothing -> return []
Just (nextNoteOffId, nextNoteOnId) ->
if nextNoteOffId >= nextNoteOnId
then return []
else do
MS.modify (Map.insert pitch (succNoteId nextNoteOffId, nextNoteOnId))
return [nextNoteOffId]
Just _ -> do
mNoteId <- MS.gets (Map.lookup pitch)
let (nextNoteOffId, nextNoteOnId) =
case mNoteId of
Nothing -> (NoteId 0, NoteId 0)
Just ids -> ids
MS.modify (Map.insert pitch (nextNoteOffId, succNoteId nextNoteOnId))
return [nextNoteOnId])
{-# INLINE velFreqBank #-}
velFreqBank ::
(Trans.C y) =>
(MIDIEv.Program -> y -> y -> process) ->
(MIDIEv.Program -> MIDIEv.Velocity -> MIDIEv.Pitch -> process)
velFreqBank bank pgm vel pitch =
bank pgm (MV.velocity vel) (MV.frequencyFromPitch pitch)
applyInstrumentCore ::
(Arrow arrow) =>
((MIDIEv.Pitch, NoteId) -> noteOffListCtrl -> gateCtrl) ->
(MIDIEv.Program -> MIDIEv.Velocity -> MIDIEv.Pitch ->
PIO.T gateCtrl chunk) ->
arrow
(Events (NoteBoundary (NoteId, Maybe MIDIEv.Program)))
(Zip.T
NoteOffList
(Events (PIO.T noteOffListCtrl chunk)))
applyInstrumentCore makeGate bank = arr $
uncurry Zip.Cons .
mapFst NoteOffList .
EventListTT.unzip .
fmap (ListHT.unzipEithers . fmap (\ev ->
case ev of
-- MIDIEv.AllNotesOff -> Left MIDIEv.AllNotesOff
NoteBoundary pitch vel (noteId, mpgm) ->
case mpgm of
Nothing -> Left $ NoteBoundary pitch vel noteId
Just pgm ->
Right $
bank pgm vel pitch
<<^
makeGate (pitch, noteId)))
applyInstrument ::
(Arrow arrow) =>
(MIDIEv.Program -> MIDIEv.Velocity -> MIDIEv.Pitch ->
PIO.T GateChunk chunk) ->
arrow
(Events (NoteBoundary (NoteId, Maybe MIDIEv.Program)))
(Zip.T
NoteOffList
(Events (PIO.T NoteOffList chunk)))
applyInstrument bank =
applyInstrumentCore gateFromNoteOffs bank
type ModulatedBank y ctrl chunk =
MIDIEv.Program -> y -> y ->
PIO.T (Zip.T GateChunk ctrl) chunk
applyModulatedInstrument ::
(Arrow arrow, CutG.Read ctrl) =>
(MIDIEv.Program -> MIDIEv.Velocity -> MIDIEv.Pitch ->
PIO.T (Zip.T GateChunk ctrl) chunk) ->
arrow
(Zip.T
(Events (NoteBoundary (NoteId, Maybe MIDIEv.Program)))
ctrl)
(Zip.T
(Zip.T NoteOffList ctrl)
(Events (PIO.T (Zip.T NoteOffList ctrl) chunk)))
applyModulatedInstrument bank =
(\(Zip.Cons (Zip.Cons noteOffs events) ctrl) ->
Zip.Cons (Zip.Cons noteOffs ctrl) events)
^<<
Zip.arrowFirst
(applyInstrumentCore
(Zip.arrowFirst . gateFromNoteOffs) bank)
{- |
Turn an event list with bundles of elements
into an event list with single events.
ToDo: Move to event-list package?
-}
flatten ::
(NonNeg.C time) =>
a ->
EventListTT.T time [a] ->
EventListTT.T time a
flatten empty =
EventListTT.foldr
EventListMT.consTime
(\bt xs ->
uncurry EventListMT.consBody $
case bt of
[] -> (empty, xs)
b:bs -> (b, foldr (\c rest -> EventListTT.cons NonNeg.zero c rest) xs bs))
EventListBT.empty
flattenControlSchedule ::
(Monoid chunk, Arrow arrow) =>
arrow
(Zip.T ctrl
(EventListTT.T StrictTime [PIO.T ctrl chunk]))
(Zip.T ctrl
(EventListTT.T StrictTime (PIO.T ctrl chunk)))
flattenControlSchedule = arr $
\(Zip.Cons ctrl evs) ->
-- Zip.consChecked "flattenControlSchedule" ctrl $
Zip.Cons ctrl $
flatten (arr (const mempty)) evs
data CausalState a b =
forall state.
CausalState
(a -> state -> IO (b, state))
(state -> IO ())
state
_applyChunkSimple :: CausalState a b -> a -> IO (b, CausalState a b)
_applyChunkSimple (CausalState next delete state0) input = do
(output, state1) <- next input state0
return (output, CausalState next delete state1)
applyChunk ::
(CutG.Read a, CutG.Read b) =>
CausalState a b -> a -> IO (b, Maybe (CausalState a b))
applyChunk (CausalState next delete state0) input = do
(output, state1) <- next input state0
cs <-
if CutG.length output < CutG.length input
then do
delete state1
return Nothing
else return $ Just $ CausalState next delete state1
return (output, cs)
-- could be moved to synthesizer-core
applyModulation ::
(CutG.Transform ctrl, CutG.NormalForm ctrl,
CutG.Read chunk,
Monoid time, ToInteger.C time) =>
PIO.T
(Zip.T ctrl (EventListTT.T time (PIO.T ctrl chunk)))
(EventListTT.T time chunk)
applyModulation = PIO.Cons
(\(Zip.Cons ctrl evs) acc0 -> do
acc1 <- mapM (flip applyChunk ctrl) acc0
let (accChunks, acc2) = unzip acc1
(newChunks, newAcc) <-
MW.runWriterT $
flip MS.evalStateT ctrl $
EventListTT.mapM
(\time -> do
ctrl_ <- MS.gets (CutG.drop (fromIntegral time))
MS.put ctrl_
return (case CutG.evaluateHead ctrl_ of () -> time))
(\(PIO.Cons next create delete) -> do
state0 <- liftIO create
(chunk, state1) <-
liftIO . applyChunk (CausalState next delete state0)
=<< MS.get
MT.lift $ MW.tell $ maybeToList state1
return chunk)
evs
return
(EventListTM.prependBodyEnd
(EventList.fromPairList $
map ((,) mempty) accChunks)
newChunks,
Maybe.catMaybes acc2 ++ newAcc))
(return [])
(mapM_ (\(CausalState _ close state) -> close state))
-- move synthesizer-core:CausalIO
arrangeStorable ::
(Arrow arrow, Storable a, Additive.C a) =>
arrow
(EventListTT.T StrictTime (SV.Vector a))
(SV.Vector a)
arrangeStorable =
arr $ \evs ->
SVST.runSTVector (do
v <- SVST.new (fromIntegral $ EventListTT.duration evs) zero
mapM_ (uncurry $ CutSt.addChunkToBuffer v) $
AbsEventList.toPairList $
AbsEventList.mapTime fromIntegral $
EventList.toAbsoluteEventList 0 $
EventListTM.switchTimeR const evs
return v)
sequenceCore ::
(Check.C event, Monoid chunk, CutG.Read chunk, Trans.C y) =>
MIDIEv.Channel ->
Bank y chunk ->
PIO.T (Events event) (EventListTT.T StrictTime chunk)
sequenceCore channel bank =
applyModulation
.
flattenControlSchedule
.
applyInstrument (velFreqBank bank)
.
assignNoteIds
.
embedPrograms (VoiceMsg.toProgram 0)
.
noteEvents channel
sequenceModulated ::
(Check.C event, Monoid chunk, CutG.Read chunk,
CutG.Transform ctrl, CutG.NormalForm ctrl, Trans.C y) =>
MIDIEv.Channel ->
ModulatedBank y ctrl chunk ->
PIO.T (Zip.T (Events event) ctrl) (EventListTT.T StrictTime chunk)
sequenceModulated channel bank =
applyModulation
.
flattenControlSchedule
.
applyModulatedInstrument (velFreqBank bank)
.
Zip.arrowFirst
(assignNoteIds
.
embedPrograms (VoiceMsg.toProgram 0)
.
noteEvents channel)
sequenceModulatedMultiProgram ::
(Check.C event, Monoid chunk, CutG.Read chunk,
CutG.Transform ctrl, CutG.NormalForm ctrl, Trans.C y) =>
MIDIEv.Channel ->
MIDIEv.Program ->
ModulatedBank y ctrl chunk ->
PIO.T (Zip.T (Events event) ctrl) (EventListTT.T StrictTime chunk)
sequenceModulatedMultiProgram channel initPgm bank =
applyModulation
.
flattenControlSchedule
.
applyModulatedInstrument (velFreqBank bank)
.
Zip.arrowFirst
(assignNoteIds
.
embedPrograms initPgm
.
noteEvents channel)
-- | may replace the other functions
sequenceModulatedMultiProgramVelocityPitch ::
(Check.C event, Monoid chunk, CutG.Read chunk,
CutG.Transform ctrl, CutG.NormalForm ctrl) =>
MIDIEv.Channel ->
MIDIEv.Program ->
(MIDIEv.Program -> MIDIEv.Velocity -> MIDIEv.Pitch ->
PIO.T (Zip.T GateChunk ctrl) chunk) ->
PIO.T (Zip.T (Events event) ctrl) (EventListTT.T StrictTime chunk)
sequenceModulatedMultiProgramVelocityPitch channel initPgm bank =
applyModulation
.
flattenControlSchedule
.
applyModulatedInstrument bank
.
Zip.arrowFirst
(assignNoteIds
.
embedPrograms initPgm
.
noteEvents channel)
sequenceStorable ::
(Check.C event, Storable a, Additive.C a, Trans.C y) =>
MIDIEv.Channel ->
Bank y (SV.Vector a) ->
PIO.T (Events event) (SV.Vector a)
sequenceStorable channel bank =
arrangeStorable
.
sequenceCore channel bank
| mpickering/ghc-exactprint | tests/examples/ghc710/Process.hs | bsd-3-clause | 20,510 | 0 | 28 | 5,278 | 5,996 | 3,150 | 2,846 | 569 | 6 |
{-# LANGUAGE FlexibleContexts, OverloadedStrings, RecordWildCards, RankNTypes #-}
{-# OPTIONS_GHC -fno-warn-name-shadowing #-}
module HIndent.Styles.Gibiansky where
import Data.Foldable
import Control.Applicative ((<$>))
import Data.Maybe
import Data.List (unfoldr, isPrefixOf)
import Control.Monad.Trans.Maybe
import Data.Functor.Identity
import Control.Monad.State.Strict hiding (state, State, forM_, sequence_)
import Data.Typeable
import HIndent.Pretty
import HIndent.Types
import Language.Haskell.Exts.Annotated.Syntax
import Language.Haskell.Exts.SrcLoc
import Language.Haskell.Exts.Pretty (prettyPrint)
import Language.Haskell.Exts.Comments
import Prelude hiding (exp, all, mapM_, minimum, and, maximum, concatMap, or, any, sequence_)
-- | Empty state.
data State = State { gibianskyForceSingleLine :: Bool, gibianskyLetBind :: Bool }
userGets :: (State -> a) -> Printer State a
userGets f = gets (f . psUserState)
userModify :: (State -> State) -> Printer State ()
userModify f = modify (\s -> s { psUserState = f (psUserState s) })
-- | The printer style.
gibiansky :: Style
gibiansky = Style { styleName = "gibiansky"
, styleAuthor = "Andrew Gibiansky"
, styleDescription = "Andrew Gibiansky's style"
, styleInitialState = State { gibianskyForceSingleLine = False, gibianskyLetBind = False }
, styleExtenders = [ Extender imp
, Extender modl
, Extender context
, Extender derivings
, Extender typ
, Extender exprs
, Extender rhss
, Extender guardedRhs
, Extender decls
, Extender stmts
, Extender condecls
, Extender alt
, Extender moduleHead
, Extender exportList
, Extender fieldUpdate
, Extender pragmas
, Extender pat
, Extender qualConDecl
]
, styleDefConfig = defaultConfig { configMaxColumns = 100
, configIndentSpaces = indentSpaces
, configClearEmptyLines = True
}
, styleCommentPreprocessor = commentPreprocessor
}
-- Field accessor for Comment.
commentContent :: Comment -> String
commentContent (Comment _ _ content) = content
-- Field accessor for Comment.
commentSrcSpan :: Comment -> SrcSpan
commentSrcSpan (Comment _ srcSpan _) = srcSpan
commentPreprocessor :: MonadState (PrintState s) m => [Comment] -> m [Comment]
commentPreprocessor cs = do
config <- gets psConfig
col <- getColumn
return $ go (fromIntegral col) config cs
where
go currentColumn config = concatMap mergeGroup . groupComments Nothing []
where
-- Group comments into blocks.
-- A comment block is the list of comments that are on consecutive lines,
-- and do not have an empty comment in between them. Empty comments are those with only whitespace.
-- Empty comments are in their own group.
groupComments :: Maybe Int -> [Comment] -> [Comment] -> [[Comment]]
groupComments nextLine accum (comment@(Comment multiline srcSpan str):comments)
| separateCommentCondition = useAsSeparateCommentGroup
| beginningOfUnprocessed str =
let (unprocessedLines, postUnprocessed) = span unprocessed comments
(endingLine, remLines) = case postUnprocessed of
x:xs -> ([x], xs)
[] -> ([], [])
separateCommentGroups = comment : unprocessedLines ++ endingLine
in currentGroupAsList ++ map (: []) separateCommentGroups ++ groupComments Nothing [] remLines
| isNothing nextLine || Just (srcSpanStartLine srcSpan) == nextLine = groupComments nextLine' (comment:accum) comments
| otherwise = currentGroupAsList ++ groupComments (Just $ srcSpanStartLine srcSpan + 1) [comment] comments
where
separateCommentCondition = or [multiline, isWhitespace str, " " `isPrefixOf` str, " >" `isPrefixOf` str]
useAsSeparateCommentGroup = currentGroupAsList ++ [comment] : groupComments nextLine' [] comments
nextCommentStartLine = srcSpanStartLine $ commentSrcSpan $ head comments
currentGroupAsList | null accum = []
| otherwise = [reverse accum]
nextLine' =
case nextLine of
Just x -> Just (x + 1)
Nothing -> Just nextCommentStartLine
groupComments _ [] [] = []
groupComments _ accum [] = [reverse accum]
beginningOfUnprocessed :: String -> Bool
beginningOfUnprocessed str = any (`isPrefixOf` str) ["@", " @", " @"]
unprocessed :: Comment -> Bool
unprocessed (Comment True _ _) = False
unprocessed (Comment _ _ str) = not $ beginningOfUnprocessed str
isWhitespace :: String -> Bool
isWhitespace = all (\x -> x == ' ' || x == '\t')
commentLen :: Int
commentLen = length ("--" :: String)
-- Merge a group of comments into one comment.
mergeGroup :: [Comment] -> [Comment]
mergeGroup [] = error "Empty comment group"
mergeGroup comments@[Comment True _ _] = comments
mergeGroup comments =
let
firstSrcSpan = commentSrcSpan $ head comments
firstLine = srcSpanStartLine firstSrcSpan
firstCol = srcSpanStartColumn firstSrcSpan
columnDelta = firstCol - currentColumn
maxStartColumn = maximum (map (srcSpanStartColumn . commentSrcSpan) comments)
lineLen = fromIntegral (configMaxColumns config) - maxStartColumn - commentLen + columnDelta
content = breakCommentLines lineLen $ unlines (map commentContent comments)
srcSpanLines = map (firstLine +) [0 .. length content - 1]
srcSpans = map (\linum -> firstSrcSpan { srcSpanStartLine = linum, srcSpanEndLine = linum, srcSpanStartColumn = maxStartColumn }) srcSpanLines
in zipWith (Comment False) srcSpans content
-- | Break a comment string into lines of a maximum character length.
-- Each line starts with a space, mirroring the traditional way of writing comments:
--
-- -- Hello
-- -- Note the space after the '-'
breakCommentLines :: Int -> String -> [String]
breakCommentLines maxLen str
-- If there's no way to do this formatting, just give up
| any ((maxLen <) . length) (words str) = [str]
-- If we already have a line of the appropriate length, leave it alone. This allows us to format
-- stuff ourselves in some cases.
| length (lines str) == 1 && length str <= maxLen = [dropTrailingNewlines str]
| otherwise = unfoldr unfolder (words str)
where
-- Generate successive lines, consuming the words iteratively.
unfolder :: [String] -> Maybe (String, [String])
unfolder [] = Nothing
unfolder ws = Just $ go maxLen [] ws
where
go :: Int -- Characters remaining on the line to be used
-> [String] -- Accumulator: The words used so far on this line
-> [String] -- Unused words
-> (String, [String]) -- (Generated line, remaining words)
go remainingLen taken remainingWords =
case remainingWords of
-- If no more words remain, we're done
[] -> (generatedLine, [])
word:remWords ->
-- If the next word doesn't fit on this line, line break
let nextRemaining = remainingLen - length word - 1
in if nextRemaining < 0
then (generatedLine, remainingWords)
else go nextRemaining (word : taken) remWords
where
generatedLine = ' ' : unwords (reverse taken)
dropTrailingNewlines :: String -> String
dropTrailingNewlines = reverse . dropWhile (== '\n') . reverse
-- | Number of spaces to indent by.
indentSpaces :: Integral a => a
indentSpaces = 2
-- | Printer to indent one level.
indentOnce :: Printer s ()
indentOnce = replicateM_ indentSpaces space
-- | How many exports to format in a single line.
-- If an export list has more than this, it will be formatted as multiple lines.
maxSingleLineExports :: Integral a => a
maxSingleLineExports = 4
attemptSingleLine :: Printer State a -> Printer State a -> Printer State a
attemptSingleLine single multiple = do
prevState <- get
if gibianskyForceSingleLine $ psUserState prevState
then single
else do
-- Try printing on one line.
modifyState $ \st -> st { gibianskyForceSingleLine = True }
result <- single
modifyState $ \st -> st { gibianskyForceSingleLine = False }
-- If it doesn't fit, reprint on multiple lines.
col <- getColumn
maxColumns <- configMaxColumns <$> gets psConfig
if col > maxColumns
then do
put prevState
multiple
else return result
--------------------------------------------------------------------------------
-- Extenders
type Extend f = f NodeInfo -> Printer State ()
-- | Format whole modules.
modl :: Extend Module
modl (Module _ mayModHead pragmas imps decls) = do
onSeparateLines pragmas
unless (null pragmas) $
unless (null imps && null decls && isNothing mayModHead) $
newline >> newline
forM_ mayModHead $ \modHead -> do
pretty modHead
unless (null imps && null decls) (newline >> newline)
onSeparateLines imps
unless (null imps || null decls) (newline >> newline)
unless (null decls) $ do
forM_ (init decls) $ \decl -> do
pretty decl
newline
unless (skipFollowingNewline decl) newline
pretty (last decls)
modl m = prettyNoExt m
skipFollowingNewline :: Decl l -> Bool
skipFollowingNewline TypeSig{} = True
skipFollowingNewline InlineSig{} = True
skipFollowingNewline AnnPragma{} = True
skipFollowingNewline MinimalPragma{} = True
skipFollowingNewline _ = False
-- | Format pragmas differently (language pragmas).
pragmas :: Extend ModulePragma
pragmas (LanguagePragma _ names) = do
write "{-# LANGUAGE "
inter (write ", ") $ map pretty names
write " #-}"
pragmas (OptionsPragma _ mtool opt) = do
write "{-# OPTIONS"
forM_ mtool $ \tool -> do
write "_"
string $ prettyPrint tool
string opt
write "#-}"
pragmas p = prettyNoExt p
-- | Format patterns.
pat :: Extend Pat
pat (PTuple _ boxed pats) = writeTuple boxed pats
pat (PList _ pats) = singleLineList pats
pat (PRec _ name fields) = recUpdateExpr fields (pretty name) (map prettyCommentCallbacks fields)
pat p = prettyNoExt p
-- | Format import statements.
imp :: Extend ImportDecl
imp ImportDecl{..} = do
write "import "
write $ if importQualified
then "qualified "
else " "
pretty importModule
forM_ importAs $ \name -> do
write " as "
pretty name
forM_ importSpecs $ \(ImportSpecList _ importHiding specs) -> do
space
when importHiding $ write "hiding "
depend (write "(") $ do
case specs of
[] -> return ()
x:xs -> do
pretty x
forM_ xs $ \spec -> do
write ","
col <- getColumn
len <- prettyColLength spec
maxColumns <- configMaxColumns <$> gets psConfig
if col + len > maxColumns
then newline
else space
pretty spec
write ")"
-- | Return the number of columns between the start and end of a printer.
-- Note that if it breaks lines, the line break is not counted; only column is used.
-- So you probably only want to use this for single-line printers.
prettyColLength :: (Integral a, Pretty ast) => ast NodeInfo -> Printer State a
prettyColLength x = fst <$> sandbox (do
col <- getColumn
pretty x
col' <- getColumn
return $ fromIntegral $ max (col' - col) 0)
-- | Format contexts with spaces and commas between class constraints.
context :: Extend Context
context (CxTuple _ asserts) =
parens $ inter (comma >> space) $ map pretty asserts
context ctx = prettyNoExt ctx
-- | Format deriving clauses with spaces and commas between class constraints.
derivings :: Extend Deriving
derivings (Deriving _ instHeads) = do
write "deriving "
go instHeads
where
go insts
| length insts == 1 = pretty $ head insts
| otherwise = parens $ inter (comma >> space) $ map pretty insts
-- | Format function type declarations.
typ :: Extend Type
-- For contexts, check whether the context and all following function types
-- are on the same line. If they are, print them on the same line; otherwise
-- print the context and each argument to the function on separate lines.
typ (TyForall _ mforall (Just ctx) rest) = do
forM_ mforall $ \forallVars -> do
write "forall "
spaced $ map pretty forallVars
write ". "
if all (sameLine ctx) $ collectTypes rest
then do
pretty ctx
write " => "
pretty rest
else do
col <- getColumn
pretty ctx
column (col - 3) $ do
newline
write "=> "
indented 3 $ pretty rest
typ (TyTuple _ boxed types) = writeTuple boxed types
typ ty@(TyFun _ from to) =
-- If the function argument types are on the same line,
-- put the entire function type on the same line.
if all (sameLine from) $ collectTypes ty
then do
pretty from
write " -> "
pretty to
else do
-- If the function argument types are on different lines,
-- write one argument type per line.
col <- getColumn
pretty from
column (col - 3) $ do
newline
write "-> "
indented 3 $ pretty to
typ t = prettyNoExt t
writeTuple :: Pretty ast => Boxed -> [ast NodeInfo] -> Printer State ()
writeTuple boxed vals = parens $ do
boxed'
inter (write ", ") $ map pretty vals
boxed'
where
boxed' =
case boxed of
Boxed -> return ()
Unboxed -> write "#"
sameLine :: (Annotated ast, Annotated ast') => ast NodeInfo -> ast' NodeInfo -> Bool
sameLine x y = line x == line y
where
line :: Annotated ast => ast NodeInfo -> Int
line = startLine . nodeInfoSpan . ann
collectTypes :: Type l -> [Type l]
collectTypes (TyFun _ from to) = from : collectTypes to
collectTypes ty = [ty]
exprs :: Extend Exp
exprs exp@Let{} = letExpr exp
exprs exp@App{} = appExpr exp
exprs exp@Do{} = doExpr exp
exprs exp@List{} = listExpr exp
exprs exp@(InfixApp _ _ (QVarOp _ (UnQual _ (Symbol _ "$"))) _) = dollarExpr exp
exprs exp@(InfixApp _ _ (QVarOp _ (UnQual _ (Symbol _ "<*>"))) _) = applicativeExpr exp
exprs exp@InfixApp{} = opExpr exp
exprs exp@Lambda{} = lambdaExpr exp
exprs exp@Case{} = caseExpr exp
exprs exp@LCase{} = lambdaCaseExpr exp
exprs exp@If{} = ifExpr exp
exprs exp@MultiIf{} = multiIfExpr exp
exprs (RecUpdate _ exp updates) = recUpdateExpr updates (pretty exp) (map prettyCommentCallbacks updates)
exprs (RecConstr _ qname updates) = recUpdateExpr updates (pretty qname) (map prettyCommentCallbacks updates)
exprs (Tuple _ _ exps) = parens $ inter (write ", ") $ map pretty exps
exprs exp = prettyNoExt exp
multiIfExpr :: Exp NodeInfo -> Printer State ()
multiIfExpr (MultiIf _ alts) =
withCaseContext True $
depend (write "if ") $
onSeparateLines' (depend (write "|") . pretty) alts
multiIfExpr _ = error "Not a multi if"
letExpr :: Exp NodeInfo -> Printer State ()
letExpr (Let _ binds result) = do
cols <- depend (write "let ") $ do
col <- getColumn
oldLetBind <- userGets gibianskyLetBind
userModify (\s -> s { gibianskyLetBind = True })
writeWhereBinds binds
userModify (\s -> s { gibianskyLetBind = oldLetBind })
return $ col - 4
column cols $ do
newline
write "in "
pretty result
letExpr _ = error "Not a let"
keepingColumn :: Printer State () -> Printer State ()
keepingColumn printer = do
eol <- gets psEolComment
when eol newline
col <- getColumn
ind <- gets psIndentLevel
column (max col ind) printer
appExpr :: Exp NodeInfo -> Printer State ()
appExpr app@(App _ f x) = do
prevState <- get
prevLine <- getLineNum
attemptSingleLine singleLine multiLine
curLine <- getLineNum
-- If the multiline version takes more than two lines,
-- print everything with one argument per line.
when (curLine - prevLine > 1) $ do
-- Restore to before printing.
put prevState
allArgsSeparate <- not <$> canSingleLine (pretty f)
if allArgsSeparate
then separateArgs app
else keepingColumn $ do
pretty f
newline
indented indentSpaces $ pretty x
where
singleLine = spaced [pretty f, pretty x]
multiLine = keepingColumn $ do
pretty f
newline
indentOnce
pretty x
canSingleLine :: Printer State a -> Printer State Bool
canSingleLine printer = do
st <- get
prevLine <- getLineNum
_ <- printer
curLine <- getLineNum
put st
return $ prevLine == curLine
-- Separate a function application into the function
-- and all of its arguments. Arguments are returned in reverse order.
collectArgs :: Exp NodeInfo -> (Exp NodeInfo, [Exp NodeInfo])
collectArgs (App _ g y) =
let (fun, args) = collectArgs g
in (fun, y : args)
collectArgs nonApp = (nonApp, [])
separateArgs :: Exp NodeInfo -> Printer State ()
separateArgs expr =
let (fun, args) = collectArgs expr
in keepingColumn $ do
pretty fun
newline
indented indentSpaces $ lined $ map pretty $ reverse args
appExpr _ = error "Not an app"
doExpr :: Exp NodeInfo -> Printer State ()
doExpr (Do _ stmts) = do
write "do"
newline
indented indentSpaces $ onSeparateLines stmts
doExpr _ = error "Not a do"
listExpr :: Exp NodeInfo -> Printer State ()
listExpr (List _ els) = attemptSingleLine (singleLineList els) (multiLineList els)
listExpr _ = error "Not a list"
singleLineList :: Pretty a => [a NodeInfo] -> Printer State ()
singleLineList exps = do
write "["
inter (write ", ") $ map pretty exps
write "]"
multiLineList :: [Exp NodeInfo] -> Printer State ()
multiLineList [] = write "[]"
multiLineList (first:exps) = keepingColumn $ do
write "[ "
pretty first
forM_ exps $ \el -> do
newline
write ", "
pretty el
newline
write "]"
dollarExpr :: Exp NodeInfo -> Printer State ()
dollarExpr (InfixApp _ left op right) = do
pretty left
space
pretty op
if needsNewline right
then do
newline
col <- getColumn
ind <- gets psIndentLevel
column (max col ind + indentSpaces) $ pretty right
else do
space
pretty right
where
needsNewline Case{} = True
needsNewline exp = lineDelta exp op > 0
dollarExpr _ = error "Not an application"
applicativeExpr :: Exp NodeInfo -> Printer State ()
applicativeExpr exp@InfixApp{} =
case applicativeArgs of
Just (first:second:rest) ->
attemptSingleLine (singleLine first second rest) (multiLine first second rest)
_ -> prettyNoExt exp
where
singleLine :: Exp NodeInfo -> Exp NodeInfo -> [Exp NodeInfo] -> Printer State ()
singleLine first second rest = spaced
[ pretty first
, write "<$>"
, pretty second
, write "<*>"
, inter (write " <*> ") $ map pretty rest
]
multiLine :: Exp NodeInfo -> Exp NodeInfo -> [Exp NodeInfo] -> Printer State ()
multiLine first second rest = do
pretty first
depend space $ do
write "<$> "
pretty second
forM_ rest $ \val -> do
newline
write "<*> "
pretty val
applicativeArgs :: Maybe [Exp NodeInfo]
applicativeArgs = collectApplicativeExps exp
collectApplicativeExps :: Exp NodeInfo -> Maybe [Exp NodeInfo]
collectApplicativeExps (InfixApp _ left op right)
| isFmap op = return [left, right]
| isAp op = do
start <- collectApplicativeExps left
return $ start ++ [right]
| otherwise = Nothing
collectApplicativeExps _ = Nothing
isFmap :: QOp NodeInfo -> Bool
isFmap (QVarOp _ (UnQual _ (Symbol _ "<$>"))) = True
isFmap _ = False
isAp :: QOp NodeInfo -> Bool
isAp (QVarOp _ (UnQual _ (Symbol _ "<*>"))) = True
isAp _ = False
applicativeExpr _ = error "Not an application"
opExpr :: Exp NodeInfo -> Printer State ()
opExpr expr@(InfixApp _ left op right) = keepingColumn $ do
let deltaLeft = lineDelta op left
deltaRight = lineDelta right op
-- If this starts out as a single line expression, try to keep it as a single line expression. Break
-- it up over multiple lines if it doesn't fit using operator columns, but only when all the
-- operators are the same.
if deltaLeft == 0 && deltaRight == 0 && numOperatorUses op expr >= 2
then attemptSingleLine opSingle opMulti
else userSpecified deltaLeft deltaRight
where
-- Use user-specified spacing for the newlines in the operator
userSpecified deltaLeft deltaRight = do
pretty left
if deltaLeft == 0
then space
else replicateM_ deltaLeft newline
pretty op
if deltaRight == 0
then space
else replicateM_ deltaRight newline
pretty right
-- Write the entire infix expression on one line.
opSingle = sequence_ [pretty left, space, pretty op, space, pretty right]
-- Use operator column layout.
opMulti = do
let opArguments = collectOpArguments op expr
forM_ (init opArguments) $ \arg -> do
pretty arg
space
pretty op
newline
pretty (last opArguments)
-- Count the number of times an infix operator is used in a row.
numOperatorUses op e = length (collectOpArguments op e) - 1
-- Collect all arguments to an infix operator.
collectOpArguments op expr'@(InfixApp _ left' op' right')
| void op == void op' = collectOpArguments op left' ++ collectOpArguments op right'
| otherwise = [expr']
collectOpArguments _ expr' = [expr']
opExpr exp = prettyNoExt exp
lambdaExpr :: Exp NodeInfo -> Printer State ()
lambdaExpr (Lambda _ pats exp) = do
write "\\"
spaced $ map pretty pats
write " ->"
if any isBefore $ nodeInfoComments $ ann exp
then multi
else attemptSingleLine (space >> pretty exp) multi
where multi = do
newline
indented indentSpaces $ pretty exp
isBefore com = comInfoLocation com == Just Before
lambdaExpr _ = error "Not a lambda"
caseExpr :: Exp NodeInfo -> Printer State ()
caseExpr (Case _ exp alts) = do
depend (write "case ") $ do
pretty exp
write " of"
newline
writeCaseAlts alts
caseExpr _ = error "Not a case"
lambdaCaseExpr :: Exp NodeInfo -> Printer State ()
lambdaCaseExpr (LCase _ alts) = do
write "\\case"
newline
writeCaseAlts alts
lambdaCaseExpr _ = error "Not a lambda case"
ifExpr :: Exp NodeInfo -> Printer State ()
ifExpr (If _ cond thenExpr elseExpr) =
depend (write "if") $ do
space
pretty cond
newline
write "then "
pretty thenExpr
newline
write "else "
pretty elseExpr
ifExpr _ = error "Not an if statement"
writeCaseAlts :: [Alt NodeInfo] -> Printer State ()
writeCaseAlts alts = do
allSingle <- and <$> mapM isSingle alts
withCaseContext True $ indented indentSpaces $ do
prettyPr <- if allSingle
then do
maxPatLen <- maximum <$> mapM (patternLen . altPattern) alts
return $ prettyCase (Just maxPatLen)
else return $ prettyCase Nothing
case alts of
[] -> return ()
first:rest -> do
printComments Before first
prettyPr first
printComments After first
forM_ (zip alts rest) $ \(prev, cur) -> do
replicateM_ (max 1 $ lineDelta cur prev) newline
printComments Before cur
prettyPr cur
printComments After cur
where
isSingle :: Alt NodeInfo -> Printer State Bool
isSingle alt' = fst <$> sandbox
(do
line <- gets psLine
pretty alt'
line' <- gets psLine
return $ not (isGuarded (altRhs alt')) && line == line')
altPattern :: Alt l -> Pat l
altPattern (Alt _ p _ _) = p
altRhs :: Alt l -> Rhs l
altRhs (Alt _ _ r _) = r
isGuarded :: Rhs l -> Bool
isGuarded GuardedRhss{} = True
isGuarded UnGuardedRhs{} = False
patternLen :: Pat NodeInfo -> Printer State Int
patternLen pat = fromIntegral <$> fst <$> sandbox
(do
col <- getColumn
pretty pat
col' <- getColumn
return $ col' - col)
prettyCase :: Maybe Int -> Alt NodeInfo -> Printer State ()
prettyCase mpatlen (Alt _ p galts mbinds) = do
-- Padded pattern
case mpatlen of
Just patlen -> do
col <- getColumn
pretty p
col' <- getColumn
replicateM_ (patlen - fromIntegral (col' - col)) space
Nothing -> pretty p
case galts of
UnGuardedRhs{} -> pretty galts
GuardedRhss{} -> do
newline
indented indentSpaces $ pretty galts
-- Optional where clause!
forM_ mbinds $ \binds -> do
newline
indented indentSpaces $ depend (write "where ") (pretty binds)
prettyCommentCallbacks :: (Pretty ast,MonadState (PrintState s) m) => ast NodeInfo -> (ComInfoLocation -> m ()) -> m ()
prettyCommentCallbacks a f =
do st <- get
case st of
PrintState{psExtenders = es,psUserState = s} ->
do
printComments Before a
f Before
depend
(case listToMaybe (mapMaybe (makePrinter s) es) of
Just (Printer m) ->
modify (\s' ->
fromMaybe s'
(runIdentity (runMaybeT (execStateT m s'))))
Nothing -> prettyNoExt a)
(f After >> printComments After a)
where makePrinter _ (Extender f) =
case cast a of
Just v -> Just (f v)
Nothing -> Nothing
makePrinter s (CatchAll f) = f s a
recUpdateExpr :: Foldable f => [f NodeInfo] -> Printer State () -> [(ComInfoLocation -> Printer State ()) -> Printer State ()] -> Printer State ()
recUpdateExpr ast expWriter updates
| null updates = do
expWriter
write "{}"
| any hasComments ast = mult
| otherwise = attemptSingleLine single mult
where
single = do
expWriter
write " { "
inter (write ", ") updates'
write " }"
mult = do
expWriter
newline
indented indentSpaces $ keepingColumn $ do
write "{ "
head updates'
forM_ (tail updates) $ \update -> do
newline
update commaAfterComment
newline
write "}"
updates' = map ($ const $ return ()) updates
commaAfterComment :: ComInfoLocation -> Printer State ()
commaAfterComment loc = case loc of
Before -> write ", "
After -> return ()
rhss :: Extend Rhs
rhss (UnGuardedRhs rhsLoc exp) = do
letBind <- userGets gibianskyLetBind
let exp'
| lineBreakAfterRhs rhsLoc exp =
indented indentSpaces $ do
newline
pretty exp
| letBind =
depend space (pretty exp)
| otherwise = space >> pretty exp
if letBind
then depend (space >> rhsSeparator) exp'
else space >> rhsSeparator >> exp'
rhss (GuardedRhss _ rs) =
flip onSeparateLines' rs $ \a@(GuardedRhs rhsLoc stmts exp) -> do
let manyStmts = length stmts > 1
remainder = do
if manyStmts then newline else space
rhsSeparator
if not manyStmts && lineBreakAfterRhs rhsLoc exp
then newline >> indented indentSpaces (pretty exp)
else space >> pretty exp
writeStmts =
case stmts of
x:xs -> do
pretty x
forM_ xs $ \x -> write "," >> newline >> pretty x
[] -> return ()
printComments Before a
if manyStmts
then do
depend (write "| ") writeStmts
remainder
else
depend (write "| ") $ writeStmts >> remainder
printComments After a
lineBreakAfterRhs :: NodeInfo -> Exp NodeInfo -> Bool
lineBreakAfterRhs rhsLoc exp = onNextLine exp
where
-- Cannot use lineDelta because we need to look at rhs start line, not end line
prevLine = srcSpanStartLine . srcInfoSpan . nodeInfoSpan $ rhsLoc
curLine = astStartLine exp
emptyLines = curLine - prevLine
onNextLine Let{} = True
onNextLine Case{} = True
onNextLine _ = emptyLines > 0
guardedRhs :: Extend GuardedRhs
guardedRhs (GuardedRhs _ stmts exp) = do
indented 1 $ prefixedLined "," (map (\p -> space >> pretty p) stmts)
space
rhsRest exp
rhsRest :: Pretty ast => ast NodeInfo -> Printer State ()
rhsRest exp = do
rhsSeparator
space
pretty exp
stmts :: Extend Stmt
stmts (LetStmt _ binds) = depend (write "let ") (writeWhereBinds binds)
stmts stmt = prettyNoExt stmt
decls :: Extend Decl
decls (DataDecl _ dataOrNew Nothing declHead constructors mayDeriving) = do
depend (pretty dataOrNew >> space) $ do
pretty declHead
case constructors of
[] -> return ()
[x] -> do
write " ="
pretty x
(x:xs) ->
depend space $ do
write "="
pretty x
forM_ xs $ \constructor -> do
newline
write "|"
pretty constructor
forM_ mayDeriving $ \deriv -> do
newline
indented indentSpaces $ pretty deriv
decls (PatBind _ pat rhs mbinds) = funBody [pat] rhs mbinds
decls (FunBind _ matches) =
flip onSeparateLines' matches $ \match -> do
printComments Before match
(writeName, pat, rhs, mbinds) <- case match of
Match _ name pat rhs mbinds -> return (pretty name, pat, rhs, mbinds)
InfixMatch _ left name pat rhs mbinds -> do
pretty left
space
let writeName = case name of
Symbol _ name' -> string name'
Ident _ name' -> do
write "`"
string name'
write "`"
return (writeName, pat, rhs, mbinds)
writeName
space
funBody pat rhs mbinds
printComments After match
decls (ClassDecl _ ctx dhead fundeps mayDecls) = do
let decls = fromMaybe [] mayDecls
noDecls = null decls
-- Header
depend (write "class ") $
depend (maybeCtx ctx) $
depend (pretty dhead >> space) $
depend (unless (null fundeps) (write " | " >> commas (map pretty fundeps))) $
unless noDecls (write "where")
-- Class method declarations
unless noDecls $ do
newline
indentSpaces <- getIndentSpaces
indented indentSpaces (onSeparateLines decls)
decls decl = prettyNoExt decl
qualConDecl :: Extend QualConDecl
qualConDecl (QualConDecl _ tyvars ctx d) =
depend (unless (null (fromMaybe [] tyvars))
(do write " forall "
spaced (map pretty (fromMaybe [] tyvars))
write ". "))
(depend (maybeCtx' ctx)
(pretty d))
where
maybeCtx' = maybe (return ())
(\p ->
pretty p >>
write " =>")
funBody :: [Pat NodeInfo] -> Rhs NodeInfo -> Maybe (Binds NodeInfo) -> Printer State ()
funBody pat rhs mbinds = do
spaced $ map pretty pat
withCaseContext False $
case rhs of
UnGuardedRhs{} -> pretty rhs
GuardedRhss{} -> do
newline
indented indentSpaces $ pretty rhs
-- Process the binding group, if it exists.
forM_ mbinds $ \binds -> do
newline
-- Add an extra newline after do blocks.
when (isDoBlock rhs) newline
indented indentSpaces $ do
write "where"
newline
indented indentSpaces $ writeWhereBinds binds
writeWhereBinds :: Binds NodeInfo -> Printer State ()
writeWhereBinds ds@(BDecls _ binds) = do
printComments Before ds
onSeparateLines binds
printComments After ds
writeWhereBinds binds = prettyNoExt binds
-- Print all the ASTs on separate lines, respecting user spacing.
onSeparateLines :: (Pretty ast, Annotated ast) => [ast NodeInfo] -> Printer State ()
onSeparateLines = onSeparateLines' pretty
onSeparateLines' :: Annotated ast => (ast NodeInfo -> Printer State ()) -> [ast NodeInfo] -> Printer State ()
onSeparateLines' _ [] = return ()
onSeparateLines' pretty' vals = do
let vals' = map (amap fixSpans) vals
(first:rest) = vals'
pretty' first
forM_ (zip vals' rest) $ \(prev, cur) -> do
replicateM_ (max 1 $ lineDelta cur prev) newline
pretty' cur
fixSpans :: NodeInfo -> NodeInfo
fixSpans info =
let infoSpan = nodeInfoSpan info
srcSpan = srcInfoSpan infoSpan
points = srcInfoPoints infoSpan
lastPt = last points
prevLastPt = last (init points)
prevPtEnd = (srcSpanEndLine prevLastPt, srcSpanEndColumn prevLastPt)
lastPtEndLoc = (srcSpanEndLine lastPt, srcSpanEndColumn lastPt)
invalidLastPt = srcSpanStartLine lastPt == srcSpanEndLine lastPt &&
srcSpanStartColumn lastPt > srcSpanEndColumn lastPt
infoEndLoc = (srcSpanEndLine srcSpan, srcSpanEndColumn srcSpan)
in if length points > 1 && lastPtEndLoc == infoEndLoc && invalidLastPt
then info { nodeInfoSpan = infoSpan { srcInfoSpan = setEnd srcSpan prevPtEnd } }
else info
where
setEnd (SrcSpan fname startL startC _ _) (endL, endC) = SrcSpan fname startL startC endL endC
astStartLine :: Annotated ast => ast NodeInfo -> Int
astStartLine decl =
let info = ann decl
comments = nodeInfoComments info
befores = filter ((== Just Before) . comInfoLocation) comments
commentStartLine (Comment _ sp _) = srcSpanStartLine sp
in if null befores
then startLine $ nodeInfoSpan info
else minimum $ map (commentStartLine . comInfoComment) befores
isDoBlock :: Rhs l -> Bool
isDoBlock (UnGuardedRhs _ Do{}) = True
isDoBlock _ = False
condecls :: Extend ConDecl
condecls (ConDecl _ name bangty) =
depend (space >> pretty name) $
forM_ bangty $ \ty -> space >> pretty ty
condecls decl@(RecDecl _ name fields) = if hasComments decl
then multiRec
else attemptSingleLine singleRec multiRec
where
singleRec = space >> depend (pretty name >> space) recBody
multiRec = do
newline
indented indentSpaces $ keepingColumn $ do
pretty name
newline
indented indentSpaces recBody
recBody = do
write "{ "
writeFields fields
write "}"
writeFields [] = return ()
writeFields [x] = do
pretty x
eol <- gets psEolComment
unless eol space
writeFields (first:rest) = do
singleLine <- gets (gibianskyForceSingleLine . psUserState)
pretty first
unless singleLine newline
forM_ rest $ \field -> do
prettyCommentCallbacks field commaAfterComment
unless singleLine newline
when singleLine space
condecls other = prettyNoExt other
hasComments :: Foldable ast => ast NodeInfo -> Bool
hasComments = any (not . null . nodeInfoComments)
alt :: Extend Alt
alt (Alt _ p rhs mbinds) = do
pretty p
case rhs of
UnGuardedRhs{} -> pretty rhs
GuardedRhss{} -> indented indentSpaces $ pretty rhs
forM_ mbinds $ \binds -> do
newline
indented indentSpaces $
depend (write "where ") (pretty binds)
moduleHead :: Extend ModuleHead
moduleHead (ModuleHead _ name mwarn mexports) = do
forM_ mwarn pretty
write "module "
pretty name
forM_ mexports $ \exports -> do
space
pretty exports
write " where"
exportList :: Extend ExportSpecList
exportList (ExportSpecList _ exports) = do
write "("
if length exports <= maxSingleLineExports
then do
inter (write ", ") $ map pretty exports
write ")"
else indented indentSpaces' $ do
-- First export
let first:rest = exports
newline
pretty first
write ","
forM_ (zip rest exports) $ \(cur, prev) -> do
replicateM_ (max 1 $ lineDelta cur prev) newline
pretty cur
write ","
newline
write ")"
where
indentSpaces' = 2 * indentSpaces
lineDelta :: (Annotated ast1, Annotated ast2) => ast1 NodeInfo -> ast2 NodeInfo -> Int
lineDelta cur prev = emptyLines
where
prevLine = srcSpanEndLine . srcInfoSpan . nodeInfoSpan . ann $ prev
curLine = astStartLine cur
emptyLines = curLine - prevLine
fieldUpdate :: Extend FieldUpdate
fieldUpdate (FieldUpdate _ name val) = do
pretty name
write " = "
pretty val
fieldUpdate upd = prettyNoExt upd
| gittywithexcitement/hindent | src/HIndent/Styles/Gibiansky.hs | bsd-3-clause | 38,201 | 0 | 26 | 11,855 | 11,380 | 5,443 | 5,937 | 901 | 8 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RankNTypes #-}
-- | Get information on modules, identifiers, etc.
module GhciInfo (collectInfo,getModInfo,showppr) where
import ConLike
import Control.Exception
import Control.Monad
import qualified CoreUtils
import Data.Data
import qualified Data.Generics
import Data.List
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.Time
import DataCon
import Desugar
import DynFlags
import GHC
import GhcMonad
import GhciTypes
import HscTypes
import Intero.Compat
import Outputable
import Prelude hiding (mod)
import System.Directory
import TcHsSyn
import Var
#if __GLASGOW_HASKELL__ <= 802
import NameSet
#endif
#if MIN_VERSION_ghc(7,8,3)
#else
import Bag
#endif
-- | Collect type info data for the loaded modules.
collectInfo :: (GhcMonad m)
=> Map ModuleName ModInfo -> [ModuleName] -> m (Map ModuleName ModInfo)
collectInfo ms loaded =
do df <- getSessionDynFlags
-- Generate for all modules in interpreted mode.
invalidated <-
liftIO (if hscTarget df == HscInterpreted
then return loaded
else filterM cacheInvalid loaded)
if null invalidated
then return ms
else do liftIO (putStrLn ("Collecting type info for " ++
show (length invalidated) ++
" module(s) ... "))
foldM (\m name ->
gcatch (do info <- getModInfo name
return (M.insert name info m))
(\(e :: SomeException) ->
do liftIO (putStrLn ("Error while getting type info from " ++
showppr df name ++
": " ++ show e))
return m))
ms
invalidated
where cacheInvalid name =
case M.lookup name ms of
Nothing -> return True
Just mi ->
do let fp =
#if MIN_VERSION_ghc(8,0,4)
ml_hi_file (ms_location (modinfoSummary mi))
#else
ml_obj_file (ms_location (modinfoSummary mi))
#endif
last' = modinfoLastUpdate mi
exists <- doesFileExist fp
if exists
then do mod <- getModificationTime fp
return (mod > last')
else return True
-- | Get info about the module: summary, types, etc.
getModInfo :: (GhcMonad m) => ModuleName -> m ModInfo
getModInfo name =
do m <- getModSummary name
p <- parseModule m
let location = getModuleLocation (parsedSource p)
typechecked <- typecheckModuleSilent p
let Just (_, imports, _, _) = renamedSource typechecked
allTypes <- processAllTypeCheckedModule typechecked
let i = tm_checked_module_info typechecked
now <- liftIO getCurrentTime
return (ModInfo m allTypes i now imports location)
-- | Type-check the module without logging messages.
typecheckModuleSilent :: GhcMonad m => ParsedModule -> m TypecheckedModule
#if MIN_VERSION_ghc(8,0,1)
typecheckModuleSilent parsed = do
typecheckModule
parsed
{ GHC.pm_mod_summary =
(GHC.pm_mod_summary parsed)
{ HscTypes.ms_hspp_opts =
(HscTypes.ms_hspp_opts (GHC.pm_mod_summary parsed))
{log_action = nullLogAction}
}
}
where
nullLogAction _df _reason _sev _span _style _msgdoc = return ()
#else
typecheckModuleSilent parsed = do
typecheckModule
parsed
{ GHC.pm_mod_summary =
(GHC.pm_mod_summary parsed)
{ HscTypes.ms_hspp_opts =
(HscTypes.ms_hspp_opts (GHC.pm_mod_summary parsed))
{log_action = nullLogAction}
}
}
where
nullLogAction _df _reason _sev _span _style = return ()
#endif
getModuleLocation :: ParsedSource -> SrcSpan
getModuleLocation pSource = case hsmodName (unLoc pSource) of
Just located -> getLoc located
Nothing -> noSrcSpan
-- | Get ALL source spans in the module.
processAllTypeCheckedModule :: GhcMonad m
=> TypecheckedModule -> m [SpanInfo]
processAllTypeCheckedModule tcm =
do let tcs = tm_typechecked_source tcm
bs = listifyAllSpans tcs :: [LHsBind StageReaderId]
es = listifyAllSpans tcs :: [LHsExpr StageReaderId]
ps = listifyAllSpans tcs :: [LPat StageReaderId]
bts <- mapM (getTypeLHsBind tcm) bs
ets <- mapM (getTypeLHsExpr tcm) es
pts <- mapM (getTypeLPat tcm) ps
return (mapMaybe toSpanInfo (sortBy cmp (concat bts ++ catMaybes (concat [ets,pts]))))
where cmp (_,a,_) (_,b,_)
| a `isSubspanOf` b = LT
| b `isSubspanOf` a = GT
| otherwise = EQ
getTypeLHsBind :: (GhcMonad m)
=> TypecheckedModule
-> LHsBind StageReaderId
-> m [(Maybe Id,SrcSpan,Type)]
#if MIN_VERSION_ghc(7,8,3)
getTypeLHsBind _ (L _spn FunBind{fun_id = pid,fun_matches = MG{}}) =
return (return (Just (unLoc pid),getLoc pid,varType (unLoc pid)))
#else
getTypeLHsBind _ (L _spn FunBind{fun_id = pid,fun_matches = MG _ _ _typ}) =
return (return (Just (unLoc pid),getLoc pid,varType (unLoc pid)))
#endif
#if MIN_VERSION_ghc(7,8,3)
#else
getTypeLHsBind m (L _spn AbsBinds{abs_binds = binds}) =
fmap concat
(mapM (getTypeLHsBind m)
(map snd (bagToList binds)))
#endif
getTypeLHsBind _ _ = return []
-- getTypeLHsBind _ x =
-- do df <- getSessionDynFlags
-- error ("getTypeLHsBind: unhandled case: " ++
-- showppr df x)
getTypeLHsExpr :: (GhcMonad m)
=> TypecheckedModule
-> LHsExpr StageReaderId
-> m (Maybe (Maybe Id,SrcSpan,Type))
getTypeLHsExpr _ e =
do hs_env <- getSession
(_,mbe) <- liftIO (deSugarExpr hs_env e)
case mbe of
Nothing -> return Nothing
Just expr ->
return (Just (case unwrapVar (unLoc e) of
#if __GLASGOW_HASKELL__ >= 806
HsVar _ (L _ i) -> Just i
#elif __GLASGOW_HASKELL__ >= 800
HsVar (L _ i) -> Just i
#else
HsVar i -> Just i
#endif
_ -> Nothing
,getLoc e
,CoreUtils.exprType expr))
where
#if __GLASGOW_HASKELL__ >= 806
unwrapVar (HsWrap _ _ var) = var
#else
unwrapVar (HsWrap _ var) = var
#endif
unwrapVar e' = e'
-- | Get id and type for patterns.
getTypeLPat :: (GhcMonad m)
=> TypecheckedModule -> LPat StageReaderId -> m (Maybe (Maybe Id,SrcSpan,Type))
getTypeLPat _ (L spn pat) =
return (Just (getMaybeId pat,spn,getPatType pat))
where
getPatType (ConPatOut (L _ (RealDataCon dc)) _ _ _ _ _ _) =
dataConRepType dc
getPatType pat' = hsPatType pat'
#if __GLASGOW_HASKELL__ >= 806
getMaybeId (VarPat _ (L _ vid)) = Just vid
#elif __GLASGOW_HASKELL__ >= 800
getMaybeId (VarPat (L _ vid)) = Just vid
#else
getMaybeId (VarPat vid) = Just vid
#endif
getMaybeId _ = Nothing
-- | Get ALL source spans in the source.
listifyAllSpans :: Typeable a
=> TypecheckedSource -> [Located a]
listifyAllSpans tcs =
listifyStaged TypeChecker p tcs
where p (L spn _) = isGoodSrcSpan spn
listifyStaged :: Typeable r
=> Stage -> (r -> Bool) -> Data.Generics.GenericQ [r]
#if __GLASGOW_HASKELL__ <= 802
listifyStaged s p =
everythingStaged
s
(++)
[]
([] `Data.Generics.mkQ`
(\x -> [x | p x]))
#else
listifyStaged _ p = Data.Generics.listify p
#endif
------------------------------------------------------------------------------
-- The following was taken from 'ghc-syb-utils'
--
-- ghc-syb-utils:
-- https://github.com/nominolo/ghc-syb
-- | Ghc Ast types tend to have undefined holes, to be filled
-- by later compiler phases. We tag Asts with their source,
-- so that we can avoid such holes based on who generated the Asts.
data Stage
= Parser
| Renamer
| TypeChecker
deriving (Eq,Ord,Show)
-- | Like 'everything', but avoid known potholes, based on the 'Stage' that
-- generated the Ast.
#if __GLASGOW_HASKELL__ <= 802
everythingStaged :: Stage -> (r -> r -> r) -> r -> Data.Generics.GenericQ r -> Data.Generics.GenericQ r
everythingStaged stage k z f x
| (const False `Data.Generics.extQ` postTcType `Data.Generics.extQ` fixity `Data.Generics.extQ` nameSet) x = z
| otherwise = foldl k (f x) (gmapQ (everythingStaged stage k z f) x)
where nameSet = const (stage `elem` [Parser,TypeChecker]) :: NameSet -> Bool
#if __GLASGOW_HASKELL__ >= 709
postTcType = const (stage<TypeChecker) :: PostTc Id Type -> Bool
#else
postTcType = const (stage<TypeChecker) :: PostTcType -> Bool
#endif
fixity = const (stage<Renamer) :: GHC.Fixity -> Bool
#endif
-- | Pretty print the types into a 'SpanInfo'.
toSpanInfo :: (Maybe Id,SrcSpan,Type) -> Maybe SpanInfo
toSpanInfo (n,mspan,typ) =
case mspan of
RealSrcSpan spn ->
Just (SpanInfo (srcSpanStartLine spn)
(srcSpanStartCol spn - 1)
(srcSpanEndLine spn)
(srcSpanEndCol spn - 1)
(Just typ)
n)
_ -> Nothing
-- | Pretty print something to string.
showppr :: Outputable a
=> DynFlags -> a -> String
showppr dflags =
showSDocForUser dflags neverQualify .
ppr
| chrisdone/intero | src/GhciInfo.hs | bsd-3-clause | 9,844 | 0 | 25 | 3,092 | 2,219 | 1,155 | 1,064 | 184 | 5 |
-- shall this also export Graph.Type, set plotType and so on?
module Graphics.Gnuplot.Plot.TwoDimensional (
T,
list,
function,
functions,
parameterFunction,
listFromFile,
pathFromFile,
linearScale,
functionToGraph,
) where
import qualified Graphics.Gnuplot.Private.Graph2DType as Type
import qualified Graphics.Gnuplot.Private.Graph2D as Graph
import qualified Graphics.Gnuplot.Private.Plot as Plot
import qualified Graphics.Gnuplot.Value.ColumnSet as Col
import qualified Graphics.Gnuplot.Value.Tuple as Tuple
import qualified Graphics.Gnuplot.Value.Atom as Atom
import Graphics.Gnuplot.Utility
(functionToGraph, linearScale, assembleCells, )
import qualified Data.List.Match as Match
import qualified Data.List.HT as ListHT
{- |
Plots can be assembled using 'mappend' or 'mconcat'.
You can alter attributes of embedded graphs using 'fmap'.
-}
type T x y = Plot.T (Graph.T x y)
-- * computed plots
{- |
> list Type.listLines (take 30 (let fibs = 0 : 1 : zipWith (+) fibs (tail fibs) in fibs))
> list Type.lines (take 30 (let fibs0 = 0 : fibs1; fibs1 = 1 : zipWith (+) fibs0 fibs1 in zip fibs0 fibs1))
-}
list ::
(Atom.C x, Atom.C y, Tuple.C a) =>
Type.T x y a -> [a] -> T x y
list typ ps =
Plot.withUniqueFile
(assembleCells (map Tuple.text ps))
[Graph.deflt typ
[1 .. case Type.tupleSize typ of Tuple.ColumnCount n -> n]]
{- |
> function Type.line (linearScale 1000 (-10,10)) sin
-}
function ::
(Atom.C x, Atom.C y,
Tuple.C a, Tuple.C b) =>
Type.T x y (a,b) -> [a] -> (a -> b) -> T x y
function typ args f =
list typ (functionToGraph args f)
{- |
> functions Type.line (linearScale 1000 (-10,10)) [sin, cos]
-}
functions ::
(Atom.C x, Atom.C y,
Tuple.C a, Tuple.C b) =>
Type.T x y (a,b) -> [a] -> [a -> b] -> T x y
functions typ args fs =
let dat = map (\x -> (x, map ($ x) fs)) args
typA :: Type.T x y (a,b) -> Type.T x y a
typA = undefined
typB :: Type.T x y (a,b) -> Type.T x y b
typB = undefined
Tuple.ColumnCount na = Type.tupleSize (typA typ)
Tuple.ColumnCount nb = Type.tupleSize (typB typ)
in Plot.withUniqueFile
(assembleCells
(map (\(a,b) -> Tuple.text a ++ concatMap Tuple.text b) dat))
(Match.take fs $
map (\ns -> Graph.deflt typ ([1..na] ++ ns)) $
ListHT.sliceVertical nb [(na+1)..])
{- |
> parameterFunction Type.line (linearScale 1000 (0,2*pi)) (\t -> (sin (2*t), cos t))
-}
parameterFunction ::
(Atom.C x, Atom.C y,
Tuple.C a) =>
Type.T x y a -> [t] -> (t -> a) -> T x y
parameterFunction typ args f = list typ (map f args)
-- * plot stored data
fromFile ::
(Atom.C x, Atom.C y) =>
Type.T x y a -> FilePath -> Col.T a -> T x y
fromFile typ filename (Col.Cons cs) =
Plot.fromGraphs filename [Graph.deflt typ cs]
listFromFile ::
(Atom.C i, Atom.C y) =>
Type.T i y y -> FilePath -> Int -> T i y
listFromFile typ filename column =
fromFile typ filename (Col.atom column)
pathFromFile ::
(Atom.C x, Atom.C y) =>
Type.T x y (x,y) -> FilePath -> Int -> Int -> T x y
pathFromFile typ filename columnX columnY =
fromFile typ filename (Col.pair (Col.atom columnX) (Col.atom columnY))
| wavewave/gnuplot | src/Graphics/Gnuplot/Plot/TwoDimensional.hs | bsd-3-clause | 3,246 | 0 | 17 | 756 | 1,134 | 612 | 522 | 73 | 1 |
{-# LANGUAGE LambdaCase #-}
module GameEngine.Loader.Entity
( parseEntities
, EntityData(..)
) where
import Control.Monad (void)
import Data.Char
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as SB8
import Text.Megaparsec hiding (count)
import qualified Text.Megaparsec as L
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L -- (skipLineComment, skipBlockComment, symbol, lexeme, signed)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Vect
import Data.Void
type Parser a = Parsec Void String a
data EntityData
= EntityData
{ classname :: String
, spawnflags :: Int
, origin :: Vec3
, angles :: Vec3
, notsingle :: Bool
, notteam :: Bool
, notfree :: Bool
, notq3a :: Bool
, speed :: Maybe Float
, wait :: Maybe Float
, random :: Maybe Float
, gravity :: Maybe Float
, roll :: Maybe Float
, light :: Maybe Float
, lip :: Maybe Float
, height :: Maybe Float
, phase :: Maybe Float
, delay :: Maybe Float
, color :: Maybe Vec3
, count :: Maybe Int
, damage :: Maybe Int
, nobots :: Maybe Int
, nohumans :: Maybe Int
, health :: Maybe Int
, noglobalsound :: Maybe Int
, model :: Maybe String
, model2 :: Maybe String
, target :: Maybe String
, targetname :: Maybe String
, team :: Maybe String
, gametype :: Maybe String
, message :: Maybe String
, noise :: Maybe String
, music :: Maybe String
, targetShaderName :: Maybe String
, targetShaderNewName :: Maybe String
}
deriving Show
emptyEntityData = EntityData
{ classname = ""
, spawnflags = 0
, origin = zero
, angles = zero
, notsingle = False
, notteam = False
, notfree = False
, notq3a = False
, speed = Nothing
, wait = Nothing
, random = Nothing
, gravity = Nothing
, roll = Nothing
, light = Nothing
, lip = Nothing
, height = Nothing
, phase = Nothing
, delay = Nothing
, color = Nothing
, count = Nothing
, damage = Nothing
, nobots = Nothing
, nohumans = Nothing
, health = Nothing
, noglobalsound = Nothing
, model = Nothing
, model2 = Nothing
, target = Nothing
, targetname = Nothing
, team = Nothing
, gametype = Nothing
, message = Nothing
, noise = Nothing
, music = Nothing
, targetShaderName = Nothing
, targetShaderNewName = Nothing
}
-- quake 3 entity parser
parseEntities :: String -> String -> Either String [EntityData]
parseEntities fname src = case parse entities fname $ map toLower src of
Left err -> Left (parseErrorPretty err)
Right e -> Right e
entities :: Parser [EntityData]
entities = newlineConsumer *> many entity <* eof
entity :: Parser EntityData
entity = foldr ($) emptyEntityData <$> between (newlineSymbol "{") (newlineSymbol "}") (some $ choice [line value, line unknownAttribute])
value :: Parser (EntityData -> EntityData)
value = stringLiteral >>= \case
"classname" -> (\v e -> e {classname = v}) <$> stringLiteral
"model" -> (\v e -> e {model = Just v}) <$> stringLiteral
"model2" -> (\v e -> e {model2 = Just v}) <$> stringLiteral
"target" -> (\v e -> e {target = Just v}) <$> stringLiteral
"targetname" -> (\v e -> e {targetname = Just v}) <$> stringLiteral
"team" -> (\v e -> e {team = Just v}) <$> stringLiteral
"targetshadername" -> (\v e -> e {targetShaderName = Just v}) <$> stringLiteral
"targetshadernewname" -> (\v e -> e {targetShaderNewName = Just v}) <$> stringLiteral
"spawnflags" -> (\v e -> e {spawnflags = v}) <$> quoted integerLiteral
"origin" -> (\v e -> e {origin = v}) <$> quoted vector3
"angles" -> (\v e -> e {angles = v}) <$> quoted vector3
"angle" -> (\v e -> e {angles = Vec3 0 v 0}) <$> quoted floatLiteral
"notsingle" -> (\v e -> e {notsingle = v /= 0}) <$> quoted integerLiteral
"notteam" -> (\v e -> e {notteam = v /= 0}) <$> quoted integerLiteral
"notfree" -> (\v e -> e {notfree = v /= 0}) <$> quoted integerLiteral
"notq3a" -> (\v e -> e {notq3a = v /= 0}) <$> quoted integerLiteral
"gametype" -> (\v e -> e {gametype = Just v}) <$> stringLiteral
-- custom; varying defaults
"message" -> (\v e -> e {message = Just v}) <$> stringLiteral
"noise" -> (\v e -> e {noise = Just v}) <$> stringLiteral
"music" -> (\v e -> e {music = Just v}) <$> stringLiteral
"speed" -> (\v e -> e {speed = Just v}) <$> quoted floatLiteral
"wait" -> (\v e -> e {wait = Just v}) <$> quoted floatLiteral
"random" -> (\v e -> e {random = Just v}) <$> quoted floatLiteral
"gravity" -> (\v e -> e {gravity = Just v}) <$> quoted floatLiteral
"roll" -> (\v e -> e {roll = Just v}) <$> quoted floatLiteral
"light" -> (\v e -> e {light = Just v}) <$> quoted floatLiteral
"lip" -> (\v e -> e {lip = Just v}) <$> quoted floatLiteral
"height" -> (\v e -> e {height = Just v}) <$> quoted floatLiteral
"phase" -> (\v e -> e {phase = Just v}) <$> quoted floatLiteral
"delay" -> (\v e -> e {delay = Just v}) <$> quoted floatLiteral
"color" -> (\v e -> e {color = Just v}) <$> quoted vector3
"count" -> (\v e -> e {count = Just v}) <$> quoted integerLiteral
"dmg" -> (\v e -> e {damage = Just v}) <$> quoted integerLiteral
"nobots" -> (\v e -> e {nobots = Just v}) <$> quoted integerLiteral
"nohumans" -> (\v e -> e {nohumans = Just v}) <$> quoted integerLiteral
"health" -> (\v e -> e {health = Just v}) <$> quoted integerLiteral
"noglobalsound" -> (\v e -> e {noglobalsound = Just v}) <$> quoted integerLiteral
_ -> return id
-- parser primitives
lineComment :: Parser ()
lineComment = L.skipLineComment "//"
blockComment :: Parser ()
blockComment = L.skipBlockComment "/*" "*/"
spaceConsumer :: Parser ()
spaceConsumer = L.space (void $ oneOf (" \t" :: String)) lineComment blockComment
newlineConsumer :: Parser ()
newlineConsumer = L.space (void spaceChar) lineComment blockComment
symbol :: String -> Parser String
symbol = L.symbol spaceConsumer -- do not consumes line breaks
newlineSymbol :: String -> Parser String
newlineSymbol = L.symbol newlineConsumer -- consumes line breaks
lexeme :: Parser a -> Parser a
lexeme = L.lexeme spaceConsumer
quoted :: Parser a -> Parser a
quoted = between (lexeme $ char '"') (lexeme $ char '"')
stringLiteral :: Parser String
stringLiteral = lexeme $ char '"' >> manyTill anyChar (char '"')
integerLiteral :: Parser Int
integerLiteral = fromIntegral <$> L.signed spaceConsumer (lexeme L.decimal)
floatLiteral :: Parser Float
floatLiteral = realToFrac <$> L.signed spaceConsumer (lexeme float) where
float = choice
[ try L.float
, try ((read . ("0."++)) <$ char '.' <*> some digitChar)
, fromIntegral <$> L.decimal
]
vector3 :: Parser Vec3
vector3 = Vec3 <$> floatLiteral <*> floatLiteral <*> floatLiteral
line :: Parser a -> Parser a
line p = p <* skipTillEol <* newlineConsumer
skipTillEol :: Parser ()
skipTillEol = do
let n = lookAhead (choice [eol, string "{", string "}"])
pos <- getPosition
cmd <- manyTill anyChar n
--unless (null cmd) $ tell ["LEFTOVER - " ++ sourcePosPretty pos ++ ": " ++ cmd]
return ()
unknownAttribute :: Parser (a -> a)
unknownAttribute = do
let n = lookAhead eol
pos <- getPosition
cmd <- some alphaNumChar
args <- manyTill anyChar n
--tell ["IGNORE - " ++ sourcePosPretty pos ++ ": " ++ cmd ++ args]
return id
| csabahruska/quake3 | game-engine/GameEngine/Loader/Entity.hs | bsd-3-clause | 7,898 | 0 | 15 | 2,183 | 2,680 | 1,466 | 1,214 | 184 | 38 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-|
Module : Stack.Sig.GPG
Description : GPG Functions
Copyright : (c) 2015-2018, Stack contributors
License : BSD3
Maintainer : Tim Dysinger <tim@fpcomplete.com>
Stability : experimental
Portability : POSIX
-}
module Stack.Sig.GPG (gpgSign, gpgVerify) where
import Stack.Prelude
import qualified Data.ByteString.Char8 as C
import Data.List (find, isPrefixOf)
import qualified Data.Text as T
import Stack.Types.Sig
import System.Directory (findExecutable)
import System.Environment (lookupEnv)
import System.Exit (ExitCode(..))
import System.IO (hGetContents, hPutStrLn)
import System.Info (os)
import System.Process (ProcessHandle, runInteractiveProcess,
waitForProcess)
-- | Sign a file path with GPG, returning the @Signature@.
gpgSign
:: HasLogFunc env
=> Path Abs File -> RIO env Signature
gpgSign path = do
gpgWarnTTY
(_hIn,hOut,hErr,process) <-
gpg
[ "--output"
, "-"
, "--use-agent"
, "--detach-sig"
, "--armor"
, toFilePath path]
(out,err,code) <-
liftIO
((,,) <$>
hGetContents hOut <*>
hGetContents hErr <*>
waitForProcess process)
if code /= ExitSuccess
then throwM (GPGSignException $ out <> "\n" <> err)
else return (Signature $ C.pack out)
-- | Verify the @Signature@ of a file path returning the
-- @Fingerprint@.
gpgVerify
:: (MonadIO m, MonadThrow m)
=> Signature -> Path Abs File -> m Fingerprint
gpgVerify (Signature signature) path = do
(hIn,hOut,hErr,process) <-
gpg ["--verify", "--with-fingerprint", "-", toFilePath path]
(_in,out,err,code) <-
liftIO
((,,,) <$>
hPutStrLn hIn (C.unpack signature) <*>
hGetContents hOut <*>
hGetContents hErr <*>
waitForProcess process)
if code /= ExitSuccess
then throwM (GPGVerifyException (out ++ "\n" ++ err))
else maybe
(throwM
(GPGFingerprintException
("unable to extract fingerprint from output\n: " <>
out)))
return
(mkFingerprint . T.pack . concat . drop 3 <$>
find
((==) ["Primary", "key", "fingerprint:"] . take 3)
(map words (lines err)))
-- | Try to execute `gpg2` but fallback to `gpg` (as a backup)
gpg
:: (MonadIO m, MonadThrow m)
=> [String] -> m (Handle, Handle, Handle, ProcessHandle)
gpg args = do
mGpg2Path <- liftIO (findExecutable "gpg2")
case mGpg2Path of
Just _ -> liftIO (runInteractiveProcess "gpg2" args Nothing Nothing)
Nothing -> do
mGpgPath <- liftIO (findExecutable "gpg")
case mGpgPath of
Just _ ->
liftIO (runInteractiveProcess "gpg" args Nothing Nothing)
Nothing -> throwM GPGNotFoundException
-- | `man gpg-agent` shows that you need GPG_TTY environment variable set to
-- properly deal with interactions with gpg-agent. (Doesn't apply to Windows
-- though)
gpgWarnTTY :: HasLogFunc env => RIO env ()
gpgWarnTTY =
unless
("ming" `isPrefixOf` os)
(do mTTY <- liftIO (lookupEnv "GPG_TTY")
when
(null mTTY)
(logWarn
"Environment variable GPG_TTY is not set (see `man gpg-agent`)"))
| anton-dessiatov/stack | src/Stack/Sig/GPG.hs | bsd-3-clause | 3,690 | 0 | 17 | 1,221 | 836 | 447 | 389 | 85 | 3 |
-- | Graph Coloring.
-- This is a generic graph coloring library, abstracted over the type of
-- the node keys, nodes and colors.
--
module GraphColor (
module GraphBase,
module GraphOps,
module GraphPpr,
colorGraph
)
where
import GraphBase
import GraphOps
import GraphPpr
import Unique
import UniqFM
import UniqSet
import Outputable
import Data.Maybe
import Data.List
-- | Try to color a graph with this set of colors.
-- Uses Chaitin's algorithm to color the graph.
-- The graph is scanned for nodes which are deamed 'trivially colorable'. These nodes
-- are pushed onto a stack and removed from the graph.
-- Once this process is complete the graph can be colored by removing nodes from
-- the stack (ie in reverse order) and assigning them colors different to their neighbors.
--
colorGraph
:: ( Uniquable k, Uniquable cls, Uniquable color
, Eq cls, Ord k
, Outputable k, Outputable cls, Outputable color)
=> Bool -- ^ whether to do iterative coalescing
-> Int -- ^ how many times we've tried to color this graph so far.
-> UniqFM (UniqSet color) -- ^ map of (node class -> set of colors available for this class).
-> Triv k cls color -- ^ fn to decide whether a node is trivially colorable.
-> (Graph k cls color -> k) -- ^ fn to choose a node to potentially leave uncolored if nothing is trivially colorable.
-> Graph k cls color -- ^ the graph to color.
-> ( Graph k cls color -- the colored graph.
, UniqSet k -- the set of nodes that we couldn't find a color for.
, UniqFM k ) -- map of regs (r1 -> r2) that were coaleced
-- r1 should be replaced by r2 in the source
colorGraph iterative spinCount colors triv spill graph0
= let
-- If we're not doing iterative coalescing then do an aggressive coalescing first time
-- around and then conservative coalescing for subsequent passes.
--
-- Aggressive coalescing is a quick way to get rid of many reg-reg moves. However, if
-- there is a lot of register pressure and we do it on every round then it can make the
-- graph less colorable and prevent the algorithm from converging in a sensible number
-- of cycles.
--
(graph_coalesced, kksCoalesce1)
= if iterative
then (graph0, [])
else if spinCount == 0
then coalesceGraph True triv graph0
else coalesceGraph False triv graph0
-- run the scanner to slurp out all the trivially colorable nodes
-- (and do coalescing if iterative coalescing is enabled)
(ksTriv, ksProblems, kksCoalesce2)
= colorScan iterative triv spill graph_coalesced
-- If iterative coalescing is enabled, the scanner will coalesce the graph as does its business.
-- We need to apply all the coalescences found by the scanner to the original
-- graph before doing assignColors.
--
-- Because we've got the whole, non-pruned graph here we turn on aggressive coalecing
-- to force all the (conservative) coalescences found during scanning.
--
(graph_scan_coalesced, _)
= mapAccumL (coalesceNodes True triv) graph_coalesced kksCoalesce2
-- color the trivially colorable nodes
-- during scanning, keys of triv nodes were added to the front of the list as they were found
-- this colors them in the reverse order, as required by the algorithm.
(graph_triv, ksNoTriv)
= assignColors colors graph_scan_coalesced ksTriv
-- try and color the problem nodes
-- problem nodes are the ones that were left uncolored because they weren't triv.
-- theres a change we can color them here anyway.
(graph_prob, ksNoColor)
= assignColors colors graph_triv ksProblems
-- if the trivially colorable nodes didn't color then something is probably wrong
-- with the provided triv function.
--
in if not $ null ksNoTriv
then pprPanic "colorGraph: trivially colorable nodes didn't color!" -- empty
( empty
$$ text "ksTriv = " <> ppr ksTriv
$$ text "ksNoTriv = " <> ppr ksNoTriv
$$ text "colors = " <> ppr colors
$$ empty
$$ dotGraph (\_ -> text "white") triv graph_triv)
else ( graph_prob
, mkUniqSet ksNoColor -- the nodes that didn't color (spills)
, if iterative
then (listToUFM kksCoalesce2)
else (listToUFM kksCoalesce1))
-- | Scan through the conflict graph separating out trivially colorable and
-- potentially uncolorable (problem) nodes.
--
-- Checking whether a node is trivially colorable or not is a resonably expensive operation,
-- so after a triv node is found and removed from the graph it's no good to return to the 'start'
-- of the graph and recheck a bunch of nodes that will probably still be non-trivially colorable.
--
-- To ward against this, during each pass through the graph we collect up a list of triv nodes
-- that were found, and only remove them once we've finished the pass. The more nodes we can delete
-- at once the more likely it is that nodes we've already checked will become trivially colorable
-- for the next pass.
--
-- TODO: add work lists to finding triv nodes is easier.
-- If we've just scanned the graph, and removed triv nodes, then the only
-- nodes that we need to rescan are the ones we've removed edges from.
colorScan
:: ( Uniquable k, Uniquable cls, Uniquable color
, Ord k, Eq cls
, Outputable k, Outputable cls)
=> Bool -- ^ whether to do iterative coalescing
-> Triv k cls color -- ^ fn to decide whether a node is trivially colorable
-> (Graph k cls color -> k) -- ^ fn to choose a node to potentially leave uncolored if nothing is trivially colorable.
-> Graph k cls color -- ^ the graph to scan
-> ([k], [k], [(k, k)]) -- triv colorable nodes, problem nodes, pairs of nodes to coalesce
colorScan iterative triv spill graph
= colorScan_spin iterative triv spill graph [] [] []
colorScan_spin
:: ( Uniquable k, Uniquable cls, Uniquable color
, Ord k, Eq cls
, Outputable k, Outputable cls)
=> Bool
-> Triv k cls color
-> (Graph k cls color -> k)
-> Graph k cls color
-> [k]
-> [k]
-> [(k, k)]
-> ([k], [k], [(k, k)])
colorScan_spin iterative triv spill graph
ksTriv ksSpill kksCoalesce
-- if the graph is empty then we're done
| isNullUFM $ graphMap graph
= (ksTriv, ksSpill, reverse kksCoalesce)
-- Simplify:
-- Look for trivially colorable nodes.
-- If we can find some then remove them from the graph and go back for more.
--
| nsTrivFound@(_:_)
<- scanGraph (\node -> triv (nodeClass node) (nodeConflicts node) (nodeExclusions node)
-- for iterative coalescing we only want non-move related
-- nodes here
&& (not iterative || isEmptyUniqSet (nodeCoalesce node)))
$ graph
, ksTrivFound <- map nodeId nsTrivFound
, graph2 <- foldr (\k g -> let Just g' = delNode k g
in g')
graph ksTrivFound
= colorScan_spin iterative triv spill graph2
(ksTrivFound ++ ksTriv)
ksSpill
kksCoalesce
-- Coalesce:
-- If we're doing iterative coalescing and no triv nodes are available
-- then it's time for a coalescing pass.
| iterative
= case coalesceGraph False triv graph of
-- we were able to coalesce something
-- go back to Simplify and see if this frees up more nodes to be trivially colorable.
(graph2, kksCoalesceFound @(_:_))
-> colorScan_spin iterative triv spill graph2
ksTriv ksSpill (reverse kksCoalesceFound ++ kksCoalesce)
-- Freeze:
-- nothing could be coalesced (or was triv),
-- time to choose a node to freeze and give up on ever coalescing it.
(graph2, [])
-> case freezeOneInGraph graph2 of
-- we were able to freeze something
-- hopefully this will free up something for Simplify
(graph3, True)
-> colorScan_spin iterative triv spill graph3
ksTriv ksSpill kksCoalesce
-- we couldn't find something to freeze either
-- time for a spill
(graph3, False)
-> colorScan_spill iterative triv spill graph3
ksTriv ksSpill kksCoalesce
-- spill time
| otherwise
= colorScan_spill iterative triv spill graph
ksTriv ksSpill kksCoalesce
-- Select:
-- we couldn't find any triv nodes or things to freeze or coalesce,
-- and the graph isn't empty yet.. We'll have to choose a spill
-- candidate and leave it uncolored.
--
colorScan_spill
:: ( Uniquable k, Uniquable cls, Uniquable color
, Ord k, Eq cls
, Outputable k, Outputable cls)
=> Bool
-> Triv k cls color
-> (Graph k cls color -> k)
-> Graph k cls color
-> [k]
-> [k]
-> [(k, k)]
-> ([k], [k], [(k, k)])
colorScan_spill iterative triv spill graph
ksTriv ksSpill kksCoalesce
= let kSpill = spill graph
Just graph' = delNode kSpill graph
in colorScan_spin iterative triv spill graph'
ksTriv (kSpill : ksSpill) kksCoalesce
-- | Try to assign a color to all these nodes.
assignColors
:: ( Uniquable k, Uniquable cls, Uniquable color
, Outputable cls)
=> UniqFM (UniqSet color) -- ^ map of (node class -> set of colors available for this class).
-> Graph k cls color -- ^ the graph
-> [k] -- ^ nodes to assign a color to.
-> ( Graph k cls color -- the colored graph
, [k]) -- the nodes that didn't color.
assignColors colors graph ks
= assignColors' colors graph [] ks
where assignColors' _ graph prob []
= (graph, prob)
assignColors' colors graph prob (k:ks)
= case assignColor colors k graph of
-- couldn't color this node
Nothing -> assignColors' colors graph (k : prob) ks
-- this node colored ok, so do the rest
Just graph' -> assignColors' colors graph' prob ks
assignColor colors u graph
| Just c <- selectColor colors graph u
= Just (setColor u c graph)
| otherwise
= Nothing
-- | Select a color for a certain node
-- taking into account preferences, neighbors and exclusions.
-- returns Nothing if no color can be assigned to this node.
--
selectColor
:: ( Uniquable k, Uniquable cls, Uniquable color
, Outputable cls)
=> UniqFM (UniqSet color) -- ^ map of (node class -> set of colors available for this class).
-> Graph k cls color -- ^ the graph
-> k -- ^ key of the node to select a color for.
-> Maybe color
selectColor colors graph u
= let -- lookup the node
Just node = lookupNode graph u
-- lookup the available colors for the class of this node.
colors_avail
= case lookupUFM colors (nodeClass node) of
Nothing -> pprPanic "selectColor: no colors available for class " (ppr (nodeClass node))
Just cs -> cs
-- find colors we can't use because they're already being used
-- by a node that conflicts with this one.
Just nsConflicts
= sequence
$ map (lookupNode graph)
$ nonDetEltsUFM
$ nodeConflicts node
-- See Note [Unique Determinism and code generation]
colors_conflict = mkUniqSet
$ catMaybes
$ map nodeColor nsConflicts
-- the prefs of our neighbors
colors_neighbor_prefs
= mkUniqSet
$ concat $ map nodePreference nsConflicts
-- colors that are still valid for us
colors_ok_ex = minusUniqSet colors_avail (nodeExclusions node)
colors_ok = minusUniqSet colors_ok_ex colors_conflict
-- the colors that we prefer, and are still ok
colors_ok_pref = intersectUniqSets
(mkUniqSet $ nodePreference node) colors_ok
-- the colors that we could choose while being nice to our neighbors
colors_ok_nice = minusUniqSet
colors_ok colors_neighbor_prefs
-- the best of all possible worlds..
colors_ok_pref_nice
= intersectUniqSets
colors_ok_nice colors_ok_pref
-- make the decision
chooseColor
-- everyone is happy, yay!
| not $ isEmptyUniqSet colors_ok_pref_nice
, c : _ <- filter (\x -> elementOfUniqSet x colors_ok_pref_nice)
(nodePreference node)
= Just c
-- we've got one of our preferences
| not $ isEmptyUniqSet colors_ok_pref
, c : _ <- filter (\x -> elementOfUniqSet x colors_ok_pref)
(nodePreference node)
= Just c
-- it wasn't a preference, but it was still ok
| not $ isEmptyUniqSet colors_ok
, c : _ <- nonDetEltsUFM colors_ok
-- See Note [Unique Determinism and code generation]
= Just c
-- no colors were available for us this time.
-- looks like we're going around the loop again..
| otherwise
= Nothing
in chooseColor
| olsner/ghc | compiler/utils/GraphColor.hs | bsd-3-clause | 15,333 | 68 | 18 | 5,908 | 2,128 | 1,164 | 964 | 199 | 5 |
{-# LANGUAGE MagicHash #-}
module T14626 where
import GHC.Prim
data T = MkT !Bool
f v = case v of
MkT y -> dataToTag# y
-- This should /not/ produce an inner case on the y, thus:
-- f v = case v of
-- MkT y -> case y of z -> dataToTag# z
-- But it was! See Trac #14626 comment:4
| shlevy/ghc | testsuite/tests/codeGen/should_compile/T14626.hs | bsd-3-clause | 308 | 0 | 8 | 92 | 48 | 27 | 21 | 8 | 1 |
{-# LANGUAGE CPP #-}
-----------------------------------------------------------------------------
--
-- Stg to C--: code generation for constructors
--
-- This module provides the support code for StgCmm to deal with with
-- constructors on the RHSs of let(rec)s.
--
-- (c) The University of Glasgow 2004-2006
--
-----------------------------------------------------------------------------
module StgCmmCon (
cgTopRhsCon, buildDynCon, bindConArgs
) where
#include "HsVersions.h"
import StgSyn
import CoreSyn ( AltCon(..) )
import StgCmmMonad
import StgCmmEnv
import StgCmmHeap
import StgCmmLayout
import StgCmmUtils
import StgCmmClosure
import StgCmmProf ( curCCS )
import CmmExpr
import CLabel
import MkGraph
import SMRep
import CostCentre
import Module
import DataCon
import DynFlags
import FastString
import Id
import Literal
import PrelInfo
import Outputable
import Platform
import Util
import Control.Monad
import Data.Char
---------------------------------------------------------------
-- Top-level constructors
---------------------------------------------------------------
cgTopRhsCon :: DynFlags
-> Id -- Name of thing bound to this RHS
-> DataCon -- Id
-> [StgArg] -- Args
-> (CgIdInfo, FCode ())
cgTopRhsCon dflags id con args =
let id_info = litIdInfo dflags id (mkConLFInfo con) (CmmLabel closure_label)
in (id_info, gen_code)
where
name = idName id
caffy = idCafInfo id -- any stgArgHasCafRefs args
closure_label = mkClosureLabel name caffy
gen_code =
do { this_mod <- getModuleName
; when (platformOS (targetPlatform dflags) == OSMinGW32) $
-- Windows DLLs have a problem with static cross-DLL refs.
ASSERT( not (isDllConApp dflags this_mod con args) ) return ()
; ASSERT( args `lengthIs` dataConRepRepArity con ) return ()
-- LAY IT OUT
; let
(tot_wds, -- #ptr_wds + #nonptr_wds
ptr_wds, -- #ptr_wds
nv_args_w_offsets) = mkVirtConstrOffsets dflags (addArgReps args)
nonptr_wds = tot_wds - ptr_wds
-- we're not really going to emit an info table, so having
-- to make a CmmInfoTable is a bit overkill, but mkStaticClosureFields
-- needs to poke around inside it.
info_tbl = mkDataConInfoTable dflags con True ptr_wds nonptr_wds
get_lit (arg, _offset) = do { CmmLit lit <- getArgAmode arg
; return lit }
; payload <- mapM get_lit nv_args_w_offsets
-- NB1: nv_args_w_offsets is sorted into ptrs then non-ptrs
-- NB2: all the amodes should be Lits!
; let closure_rep = mkStaticClosureFields
dflags
info_tbl
dontCareCCS -- Because it's static data
caffy -- Has CAF refs
payload
-- BUILD THE OBJECT
; emitDataLits closure_label closure_rep
; return () }
---------------------------------------------------------------
-- Lay out and allocate non-top-level constructors
---------------------------------------------------------------
buildDynCon :: Id -- Name of the thing to which this constr will
-- be bound
-> Bool -- is it genuinely bound to that name, or just for profiling?
-> CostCentreStack -- Where to grab cost centre from;
-- current CCS if currentOrSubsumedCCS
-> DataCon -- The data constructor
-> [StgArg] -- Its args
-> FCode (CgIdInfo, FCode CmmAGraph)
-- Return details about how to find it and initialization code
buildDynCon binder actually_bound cc con args
= do dflags <- getDynFlags
buildDynCon' dflags (targetPlatform dflags) binder actually_bound cc con args
buildDynCon' :: DynFlags
-> Platform
-> Id -> Bool
-> CostCentreStack
-> DataCon
-> [StgArg]
-> FCode (CgIdInfo, FCode CmmAGraph)
{- We used to pass a boolean indicating whether all the
args were of size zero, so we could use a static
constructor; but I concluded that it just isn't worth it.
Now I/O uses unboxed tuples there just aren't any constructors
with all size-zero args.
The reason for having a separate argument, rather than looking at
the addr modes of the args is that we may be in a "knot", and
premature looking at the args will cause the compiler to black-hole!
-}
-------- buildDynCon': Nullary constructors --------------
-- First we deal with the case of zero-arity constructors. They
-- will probably be unfolded, so we don't expect to see this case much,
-- if at all, but it does no harm, and sets the scene for characters.
--
-- In the case of zero-arity constructors, or, more accurately, those
-- which have exclusively size-zero (VoidRep) args, we generate no code
-- at all.
buildDynCon' dflags _ binder _ _cc con []
= return (litIdInfo dflags binder (mkConLFInfo con)
(CmmLabel (mkClosureLabel (dataConName con) (idCafInfo binder))),
return mkNop)
-------- buildDynCon': Charlike and Intlike constructors -----------
{- The following three paragraphs about @Char@-like and @Int@-like
closures are obsolete, but I don't understand the details well enough
to properly word them, sorry. I've changed the treatment of @Char@s to
be analogous to @Int@s: only a subset is preallocated, because @Char@
has now 31 bits. Only literals are handled here. -- Qrczak
Now for @Char@-like closures. We generate an assignment of the
address of the closure to a temporary. It would be possible simply to
generate no code, and record the addressing mode in the environment,
but we'd have to be careful if the argument wasn't a constant --- so
for simplicity we just always asssign to a temporary.
Last special case: @Int@-like closures. We only special-case the
situation in which the argument is a literal in the range
@mIN_INTLIKE@..@mAX_INTLILKE@. NB: for @Char@-like closures we can
work with any old argument, but for @Int@-like ones the argument has
to be a literal. Reason: @Char@ like closures have an argument type
which is guaranteed in range.
Because of this, we use can safely return an addressing mode.
We don't support this optimisation when compiling into Windows DLLs yet
because they don't support cross package data references well.
-}
buildDynCon' dflags platform binder _ _cc con [arg]
| maybeIntLikeCon con
, platformOS platform /= OSMinGW32 || not (gopt Opt_PIC dflags)
, StgLitArg (MachInt val) <- arg
, val <= fromIntegral (mAX_INTLIKE dflags) -- Comparisons at type Integer!
, val >= fromIntegral (mIN_INTLIKE dflags) -- ...ditto...
= do { let intlike_lbl = mkCmmClosureLabel rtsPackageKey (fsLit "stg_INTLIKE")
val_int = fromIntegral val :: Int
offsetW = (val_int - mIN_INTLIKE dflags) * (fixedHdrSizeW dflags + 1)
-- INTLIKE closures consist of a header and one word payload
intlike_amode = cmmLabelOffW dflags intlike_lbl offsetW
; return ( litIdInfo dflags binder (mkConLFInfo con) intlike_amode
, return mkNop) }
buildDynCon' dflags platform binder _ _cc con [arg]
| maybeCharLikeCon con
, platformOS platform /= OSMinGW32 || not (gopt Opt_PIC dflags)
, StgLitArg (MachChar val) <- arg
, let val_int = ord val :: Int
, val_int <= mAX_CHARLIKE dflags
, val_int >= mIN_CHARLIKE dflags
= do { let charlike_lbl = mkCmmClosureLabel rtsPackageKey (fsLit "stg_CHARLIKE")
offsetW = (val_int - mIN_CHARLIKE dflags) * (fixedHdrSizeW dflags + 1)
-- CHARLIKE closures consist of a header and one word payload
charlike_amode = cmmLabelOffW dflags charlike_lbl offsetW
; return ( litIdInfo dflags binder (mkConLFInfo con) charlike_amode
, return mkNop) }
-------- buildDynCon': the general case -----------
buildDynCon' dflags _ binder actually_bound ccs con args
= do { (id_info, reg) <- rhsIdInfo binder lf_info
; return (id_info, gen_code reg)
}
where
lf_info = mkConLFInfo con
gen_code reg
= do { let (tot_wds, ptr_wds, args_w_offsets)
= mkVirtConstrOffsets dflags (addArgReps args)
-- No void args in args_w_offsets
nonptr_wds = tot_wds - ptr_wds
info_tbl = mkDataConInfoTable dflags con False
ptr_wds nonptr_wds
; let ticky_name | actually_bound = Just binder
| otherwise = Nothing
; hp_plus_n <- allocDynClosure ticky_name info_tbl lf_info
use_cc blame_cc args_w_offsets
; return (mkRhsInit dflags reg lf_info hp_plus_n) }
where
use_cc -- cost-centre to stick in the object
| isCurrentCCS ccs = curCCS
| otherwise = panic "buildDynCon: non-current CCS not implemented"
blame_cc = use_cc -- cost-centre on which to blame the alloc (same)
---------------------------------------------------------------
-- Binding constructor arguments
---------------------------------------------------------------
bindConArgs :: AltCon -> LocalReg -> [Id] -> FCode [LocalReg]
-- bindConArgs is called from cgAlt of a case
-- (bindConArgs con args) augments the environment with bindings for the
-- binders args, assuming that we have just returned from a 'case' which
-- found a con
bindConArgs (DataAlt con) base args
= ASSERT(not (isUnboxedTupleCon con))
do dflags <- getDynFlags
let (_, _, args_w_offsets) = mkVirtConstrOffsets dflags (addIdReps args)
tag = tagForCon dflags con
-- The binding below forces the masking out of the tag bits
-- when accessing the constructor field.
bind_arg :: (NonVoid Id, VirtualHpOffset) -> FCode LocalReg
bind_arg (arg, offset)
= do emit $ mkTaggedObjectLoad dflags (idToReg dflags arg) base offset tag
bindArgToReg arg
mapM bind_arg args_w_offsets
bindConArgs _other_con _base args
= ASSERT( null args ) return []
| urbanslug/ghc | compiler/codeGen/StgCmmCon.hs | bsd-3-clause | 10,521 | 0 | 16 | 2,882 | 1,606 | 846 | 760 | -1 | -1 |
{-# LANGUAGE TemplateHaskell #-}
module TH where
import Language.Haskell.TH
decl :: Q [Dec]
decl = [d| f x = x|]
| DavidAlphaFox/ghc | utils/haddock/html-test/src/TH.hs | bsd-3-clause | 116 | 0 | 6 | 23 | 31 | 21 | 10 | 5 | 1 |
module Macro.MacroReplace
(
splitMacroDef,
toText,
findMacro
) where
import Macro.MacroParser(MacroNode(..),toMacro)
import Data.List(nub)
type MacroT = ([MacroNode],[MacroNode])
-- macrodefiniton,macros
splitMacroDefStep :: [MacroNode] -> ([MacroNode],[MacroNode]) -> ([MacroNode],[MacroNode])
-- macrodefiniton,macros
splitMacroDefStep [] (x,y) = (x,reverse y)
splitMacroDefStep (x@(MacroDef _ _):xs) (as,bs) = splitMacroDefStep xs (x:as,bs)
splitMacroDefStep (x@(List _ _):xs) (as,bs) = splitMacroDefStep xs (x:as,bs)
splitMacroDefStep (x:xs) (as,bs) = splitMacroDefStep xs (as,x:bs)
splitMacroDef :: [MacroNode] -> ([MacroNode],[MacroNode])
-- macrodefiniton,macros
splitMacroDef xs = (nub as,bs)
where
(as,bs)=replaceFlag $ splitMacroDefStep xs ([],[])
replaceFlag :: MacroT -> MacroT
replaceFlag (as,Flag f c:bs) =
(xs++as,ys++bs)
where
(ps,qs) = splitMacroDef $ fromFlag f c
(xs,ys) = replaceFlag (ps++as,qs++bs)
replaceFlag (as,x:bs) =
(xs,x:ys)
where
(xs,ys) = replaceFlag (as,bs)
replaceFlag (as,[]) = (as,[])
toText :: MacroT -> [MacroNode]
toText (_,[]) = []
toText (as,Text b:bs) = Text b : toText(as,bs)
toText (as,Macro x:bs)= toText (as,toMacro (findMacro as x)++bs)
toText (as,Flag n c:bs) = let t = fromFlag (findMacro as n) c
in toText (as,t++bs)
toText (as,Lister x:bs) = toText (as,listerMake as (m,m,False)++bs)
where
m = toMacro x
toText (_,_) = error "macro,line 38,MacroReplace"
fromFlag :: String -> [(String,String)] -> [MacroNode]
fromFlag _ [] = []
fromFlag n ((m,t):s)
| n == m = let ma = toMacro t
end = last ma
(x,_) = head s
in
case end of
Macro "break" -> init ma
_ -> ma ++ fromFlag x s
| m == "@otherwise" = fromFlag "@otherwise" $ (m,t):s
| otherwise = fromFlag n s
findMacro :: [MacroNode] -> String -> String
findMacro (MacroDef n t:xs) m
| n==m = t
| otherwise = findMacro xs m
findMacro (List _ _:xs) m = findMacro xs m
findMacro a b = error $ "error line 37,MacroReplace "++show a++"__"++ show b++" <END>"
listerMake :: [MacroNode] -> ([MacroNode],[MacroNode],Bool) -> [MacroNode]
listerMake as (Macro b:bs,cs,x)
| isList as b = let (as',rt,isNull) = findList as b in toMacro rt ++ listerMake as' (bs,cs,x||isNull)
| otherwise = Macro b:listerMake as (bs,cs,x)
where
isList [] _ = False
isList (d:ds) c =case d of
List x _-> x==c ||isList ds c
_ -> isList ds c
findList [] _ = error "error line 56,MacroReplace"
findList (List n ms:ds) c
| n == c = (List n (if bool then [] else tail ms):ds,if bool' then "" else head ms,bool)
| otherwise = let (as,rt,isNum) = findList ds c in (List n ms:as,rt,isNum)
where
bool' = null ms
bool = (||) bool' $ null $ tail ms
findList (d:ds) c = let (ds',rt,x) = findList ds c in (d:ds',rt,x)
listerMake as (Text b:bs,cs,x) =
Text b:listerMake as (bs,cs,x)
listerMake as ([],bs,x) = if x then [] else listerMake as (bs,bs,x)
listerMake _ _ = error "macro,line 63,MacroReplace"
| Qinka/GiveYouAHead | lib/Macro/MacroReplace.hs | mit | 3,622 | 0 | 13 | 1,211 | 1,630 | 868 | 762 | 70 | 8 |
module Y2016.M10.D17.Exercise where
import Data.Map (Map)
import Data.Set (Set)
import Data.Time
-- below import available from 1HaskellADay git repository
import Control.Scan.CSV
{--
In observing the top 5s of the stock markets, I've started to see some patterns
in securities over time that may be interesting.
Do you see the same things?
Today's Haskell exercise. There is a file of the top5s securities of the stock
markets included here at this directory as top5s.csv (it's not truly csv) or
at the URL:
https://raw.githubusercontent.com/geophf/1HaskellADay/master/exercises/HAD/Y2016/M10/D17/top5s.csv
Today, we're interested in looking at the leaders and the losers in the Price
category over a period of days.
Read in the file, extract the stocks for the prices, losers and leaders, and
answer the below
--}
type Stock = String
data Prices = Price { leaders, losers :: Set Stock }
deriving Show
-- so, read in the price line, remembering the date for it (a few lines up)
-- and then output a map of days -> prices
type Hist = Map Day Prices
readPrices :: FilePath -> IO Hist
readPrices file = undefined
-- Now.
-- what is the distribution of stocks and their appearances on the top5s prices?
type NStocks = Int
type NDays = Int
dist :: Hist -> (NStocks, NDays)
dist priceHistory = undefined
-- What stocks show up on the leaders one day and the losers the next day?
leadThenLose :: Hist -> [Stock]
leadThenLose priceHistory = undefined
-- Of course, you need a 'next trading day' defined, because Saturdays, Sundays
-- and holidays (some of them) are not traded, therefore not recorded
nextTradingDay :: Hist -> Day -> Day
nextTradingDay hist today = undefined
-- Opposite question: which stocks show up on the losers bracket then the winners?
loseThenLead :: Hist -> [Stock]
loseThenLead hist = undefined
-- what is the frequency of appearance of these flippin' stocks?
showings :: Hist -> Stock -> Int
showings hist stock = undefined
-- that is to say, does a leadThenLose show up frequently on the top5s list?
-- How about a loseThenLead stock?
| geophf/1HaskellADay | exercises/HAD/Y2016/M10/D17/Exercise.hs | mit | 2,086 | 0 | 9 | 372 | 233 | 139 | 94 | 23 | 1 |
module Problem4 where
main :: IO ()
main = print . maximum $ [x*y | x <- [999,998..1],
y <- [x,x-1..1],
palidrome . digitsRev $ x*y]
digits :: Integral n => n -> [n]
digits = reverse . digitsRev
digitsRev :: Integral n => n -> [n]
digitsRev 0 = []
digitsRev n = n `rem` 10 : digitsRev (n `div` 10)
palidrome :: (Eq a) => [a] -> Bool
palidrome a = reverse a == a
| DevJac/haskell-project-euler | src/Problem4.hs | mit | 434 | 0 | 10 | 149 | 209 | 113 | 96 | 12 | 1 |
module Text.Noise.Compiler.Error
( FunctionError(..)
, CompileError(..)
) where
import qualified Data.List as List
import qualified Text.Noise.Parser.AST as AST
import qualified Text.Noise.Error as Error
import Text.Noise.SourceRange (HasSourceRange, rangeInSource)
data FunctionError = MissingArgumentError String
| ArgumentTypeError String
| BlockStatementTypeError
| TooManyArgumentsError
| CompileError CompileError
| RedundantKeywordArgError String
deriving (Show, Eq)
data CompileError = FunctionCallError AST.QualifiedIdentifier FunctionError
| UndefinedFunctionError AST.QualifiedIdentifier
| ExpressionStatementTypeError AST.Expression
| PositionalArgumentError AST.Argument
| DuplicatedArgumentPrototypeError AST.ArgumentPrototype
| DuplicatedKeywordArgumentError AST.Argument
| OperandTypeError AST.Expression
deriving (Show, Eq)
instance HasSourceRange CompileError where
rangeInSource err = case err of
DuplicatedArgumentPrototypeError arg -> rangeInSource arg
ExpressionStatementTypeError fnCall -> rangeInSource fnCall
DuplicatedKeywordArgumentError arg -> rangeInSource arg
UndefinedFunctionError identifier -> rangeInSource identifier
PositionalArgumentError arg -> rangeInSource arg
FunctionCallError _ (CompileError err') -> rangeInSource err'
FunctionCallError fnCall _ -> rangeInSource fnCall
OperandTypeError expression -> rangeInSource expression
instance Error.Error CompileError where
message err =
let showDotSyntax (AST.QualifiedIdentifier path _) = List.intercalate "." path
showArgPrototypeName (AST.RequiredArgumentPrototype name _) = name
showArgName (AST.KeywordArgument keyword _ _) = keyword
showArgName _ = ""
in case err of
UndefinedFunctionError identifier ->
"Undefined function \"" ++ showDotSyntax identifier ++ "\"."
ExpressionStatementTypeError _ ->
"Top-level expression is not an element."
PositionalArgumentError _ ->
"Positional argument follows a keyword argument."
DuplicatedArgumentPrototypeError arg ->
"Duplicate argument \"" ++ showArgPrototypeName arg ++ "\" in function definition."
DuplicatedKeywordArgumentError arg ->
"Duplicate keyword argument \"" ++ showArgName arg ++ "\" in function call."
OperandTypeError _ ->
"Operand is not a number."
FunctionCallError identifier fnCallErr -> case fnCallErr of
RedundantKeywordArgError keyword ->
"Keyword argument \"" ++ keyword ++ "\" duplicates a positional argument."
MissingArgumentError keyword ->
"Function \"" ++ fnName ++ "\" requires argument \"" ++ keyword ++ "\"."
ArgumentTypeError keyword ->
"Argument \"" ++ keyword ++ "\" to function \"" ++ fnName ++ "\" has incorrect type."
BlockStatementTypeError ->
"Statement in block of function \"" ++ fnName ++ "\" has incorrect type."
TooManyArgumentsError ->
"Too many arguments to function \"" ++ fnName ++ "\"."
CompileError err' ->
Error.message err'
where fnName = showDotSyntax identifier
| brow/noise | src/Text/Noise/Compiler/Error.hs | mit | 3,372 | 0 | 17 | 843 | 618 | 314 | 304 | 65 | 0 |
{-# LANGUAGE RankNTypes, FlexibleContexts #-}
-- |This module contains functions that simplify (and hide) closure handling
-- inside of the LuaM monad
module Turnip.Eval.Closure
(getClosure
,closureLookup
,closureLookupEllipsis
,closurePush
,assignmentTarget
)
where
import Turnip.Eval.Types
import Turnip.Eval.Util
import qualified Turnip.AST as AST
import Control.Lens
import qualified Data.Map as Map (lookup)
import Control.Monad.Trans
import Control.Monad.RWS
import Control.Monad.Except
-- |Get the raw closure value.
getClosure :: LuaM Closure
getClosure = LuaMT . lift $ ask
-- |Index the current closure.
closureLookup :: Value -> LuaM Value
closureLookup v = getClosure >>= closureLookupFrom v
closureLookupEllipsis :: LuaM (Maybe [Value])
closureLookupEllipsis = getClosure >>= closureLookupEllipsisFrom
closureLookupFrom :: Value -> Closure -> LuaM Value
-- descend recursively with lookups, picking the closest name first
closureLookupFrom v (top:cls) = do
topCls <- (^. mapData) <$> getTableData (closureTableRef top)
case Map.lookup v topCls of
Just val -> return val
Nothing -> closureLookupFrom v cls
-- if closure lookup fails, try global lookup
closureLookupFrom v _ = do
_Gr <- getGlobalTableRef
getTableField _Gr v
closureLookupEllipsisFrom :: Closure -> LuaM (Maybe [Value])
closureLookupEllipsisFrom (top:cls) = do
case closureVarargs top of
Just val -> return $ Just val
Nothing -> closureLookupEllipsisFrom cls
closureLookupEllipsisFrom [] = return Nothing
-- |Executes the code block one level deeper.
closurePush :: forall m a. Monad m => ClosureLevel -> LuaMT m a -> LuaMT m a
closurePush t (LuaMT a) = LuaMT $ mapExceptT (withRWST (\cls s -> (t:cls, s))) a
-- this is a simple helper that picks either top level closure or global table
assignmentTarget :: AST.Name -> LuaM TableRef
-- before choosing local closure for assignment, we should first check
-- whether the value doesn't exist in the closure
-- this is essentially the core of lexical scoping, I suppose
assignmentTarget name = do
cls <- getClosure
assignmentTargetHelper cls name
assignmentTargetHelper :: Closure -> AST.Name -> LuaM TableRef
assignmentTargetHelper [] _ = getGlobalTableRef
assignmentTargetHelper (headCls:restCls) name = do
t <- (^. mapData) <$> getTableData (closureTableRef headCls)
case Map.lookup (Str name) t of
-- if the name appears in the closure, we assign to this one
(Just _) -> return $ closureTableRef headCls
-- otherwise we try going down the stack
Nothing -> assignmentTargetHelper restCls name
| bananu7/Turnip | src/Turnip/Eval/Closure.hs | mit | 2,660 | 0 | 12 | 502 | 616 | 323 | 293 | 49 | 2 |
module Game where
import qualified Control.Monad as Monad
import qualified Helpers
import System.Random
import qualified Table
data Game state action = Game{
isTermState :: state -> Bool,
reward :: state -> (Double, Double),
nextState :: state -> (action, action) -> state,
getPossibleActions :: state -> ([action],[action]),
startState :: state,
trainer :: state -> (action, action)
}
data Player = Player1 | Player2 deriving (Show)
data InteractiveGame state action = InteractiveGame{
game :: Game state action,
human :: Player
}
--playAgainst :: InteractiveGame -> IO String
-- playAgainst g q = do
-- putStrLn $ "You are player2 "
-- gen <- newStdGen
-- playGame1 gen g startState q
-- -- Player2 -> playGame2 game g
-- putStrLn "Thanks for playing!"
playGameInteractive :: (Show action, Show state, Ord action, Ord state) => StdGen -> Game state action -> state -> Table.Table state action -> IO ()
playGameInteractive gen g s q =
if not ( isTermState g s ) then
do
putStrLn $ "Current state " ++ show s
let (p1, p2) = getPossibleActions g s
(myAct, gn') = Helpers.getAction s p1 q gen
putStrLn $ "My move: " ++ show myAct
putStrLn $ "Please choose the index (0.."++ show(length p2 - 1) ++") of your chosen action among " ++ show p2
playerMove <- getLine
let r = read playerMove :: Int
act2 = p2 !! r
playGameInteractive gn' g (nextState g s (myAct, act2)) q
else
putStrLn $ getOutcome (reward g s)
getOutcome :: (Double, Double) -> String
getOutcome (p1, p2) = if p1 > p2 then "Player 1 wins!"
else if p1 < p2 then "Player 2 wins!"
else "Tie!"
| arnabgho/RLearnHaskell | src/Game.hs | mit | 1,717 | 0 | 14 | 439 | 510 | 281 | 229 | 34 | 3 |
module P034Spec where
import qualified P034 as P
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = -- do
-- NOTE: 長いのでコメントアウト
-- describe "solveBasic" $
-- it "各桁の階乗の和が自分自身と等しくなる数の和" $
-- P.solveBasic `shouldBe` 40730
describe "solve" $
it "各桁の階乗の和が自分自身と等しくなる数の和" $
P.solve `shouldBe` 40730
| yyotti/euler_haskell | test/P034Spec.hs | mit | 448 | 0 | 8 | 80 | 70 | 42 | 28 | 10 | 1 |
module TigerTranslate
(
Level
, Access
, Frag
, Gexp
, newLevel
, outerMost
, allocInFrame
, eqStr
, notEqStr
, strLessThan
, strLessThanOrEq
, eqCmp
, notEqCmp
, lessThan
, lessThanOrEq
, arithmetic
, intExp
, stringExp
, constructEseq
, letExpression
, assign
, createRecord
, createArray
, field
, subscript
, simpleVar
, ifThen
, ifThenElse
, whileLoop
, forLoop
, TigerTranslate.break
, callFunction
, createProcFrag
, createMainFrag
, reset
, getResult
, nilGexp
) where
import TigerITree
import TigerSemTr
import qualified TigerTemp as Tmp
import qualified TigerFrame as Frame
import qualified TigerAbsyn as Absyn
import qualified TigerRegisters as Reg
import Control.Monad.State
import Data.Bits
import Data.IORef
import Prelude hiding (EQ, LT, GT)
data Gexp = Ex Exp
| Nx Stm
| Cx (Tmp.Label -> Tmp.Label -> Stm)
-- True label False label
outerMost :: Level
outerMost = TOP
nilGexp :: Gexp
nilGexp = Ex $ CONST 0 False
newLevel :: Level -> [a] -> SemTr (Level, [(a, Access)])
newLevel parent formals =
do let numformals = length formals
(frame, (slOffset:offsets)) <- liftIO $ Frame.newFrame (numformals+1)
lvlUniq <- genUniq
let lvl = LEVEL { levelFrame=frame
, staticLinkOffset=slOffset + Reg.parambaseoffset
, levelParent=parent
, levelUniq=lvlUniq }
let offsets' = map (\off -> (lvl, off+Reg.parambaseoffset)) offsets
let formalsAndOffsets = zip formals offsets'
return (lvl, formalsAndOffsets)
allocInFrame :: Bool -> Level -> IO Access
allocInFrame isptr lvl@(LEVEL { levelFrame=lvlframe }) =
do offset <- Frame.allocLocalInFrame isptr lvlframe
return (lvl, offset + Reg.localbaseoffset)
allocInFrame _ TOP = error "Compiler error: cannot alloc local in TOP Level frame"
seqcon :: [Stm] -> Stm
seqcon (x:[]) = x
seqcon (x:xs) = SEQ(x, seqcon xs)
seqcon [] = error "Compiler error: Impossible usage of seqcon"
unEx :: Gexp -> SemTr (Exp)
unEx (Ex e) = return e
unEx (Cx genstm) = do let isrptr = False
r <- newTemp isrptr
t <- newLabel
f <- newLabel
return $ ESEQ(seqcon
[ MOVE(TEMP r isrptr, CONST 1 False)
, genstm t f
, LABEL f
, MOVE(TEMP r isrptr, CONST 0 False)
, LABEL t
], TEMP r isrptr)
unEx (Nx s) = return $ ESEQ(s, CONST 0 False)
unCx :: Gexp -> SemTr (Tmp.Label -> Tmp.Label -> Stm)
unCx (Ex (CONST 1 _)) = return $ (\t -> \_ -> JUMP(NAME(t), [t]))
unCx (Ex (CONST 0 _)) = return $ (\_ -> \f -> JUMP(NAME(f), [f]))
unCx (Ex e) = return $ (\t -> \f -> CJUMP(TEST(NE, e, CONST 0 False), t, f))
unCx (Cx genstm) = return genstm
unCx (Nx _) = error "Compiler error: Impossible usage of unCx"
unNx :: Gexp -> SemTr (Stm)
unNx (Ex e) = return $ EXP e
unNx (Nx stm) = return stm
unNx c = do e <- unEx c
unNx $ Ex e
-- Cx-constructed expression comparing two strings for equality
eqStr :: Gexp -> Gexp -> SemTr Gexp
eqStr str1 str2 =
do str1' <- unEx str1
str2' <- unEx str2
funclabel <- namedLabel "stringEqual"
retlab <- newRetLabel
return $ Ex $ CALL (NAME funclabel, [str1', str2']) False retlab
notEqStr :: Gexp -> Gexp -> SemTr Gexp
notEqStr str1 str2 =
do (Ex eqstr) <- eqStr str1 str2
return $ Cx $ \t ->
\f -> CJUMP(TEST (EQ, eqstr, CONST 0 False), t, f)
strLessThan :: Gexp -> Gexp -> SemTr Gexp
strLessThan str1 str2 =
do str1' <- unEx str1
str2' <- unEx str2
funclabel <- namedLabel "stringLessThan"
retlab <- newRetLabel
return $ Ex $ CALL (NAME funclabel, [str1', str2']) False retlab
strLessThanOrEq :: Gexp -> Gexp -> SemTr Gexp
strLessThanOrEq str1 str2 =
do lab <- newLabel
Ex eq <- eqStr str1 str2
Ex lt <- strLessThan str1 str2
return $ Cx $ \t ->
\f -> seqcon [ CJUMP (TEST (EQ, eq, CONST 0 False), lab, t)
, LABEL lab
, CJUMP (TEST (EQ, lt, CONST 0 False), f, t)]
-- Comparing non-string values
eqCmp :: Gexp -> Gexp -> SemTr Gexp
eqCmp g1 g2 =
do g1' <- unEx g1
g2' <- unEx g2
return $ Cx $ \t ->
\f -> CJUMP(TEST (EQ, g1', g2'), t, f)
notEqCmp :: Gexp -> Gexp -> SemTr Gexp
notEqCmp g1 g2 =
do g1' <- unEx g1
g2' <- unEx g2
return $ Cx $ \t ->
\f -> CJUMP(TEST (NE, g1', g2'), t, f)
lessThan :: Gexp -> Gexp -> SemTr Gexp
lessThan g1 g2 =
do g1' <- unEx g1
g2' <- unEx g2
return $ Cx $ \t ->
\f -> CJUMP(TEST (LT, g1', g2'), t, f)
lessThanOrEq :: Gexp -> Gexp -> SemTr Gexp
lessThanOrEq g1 g2 =
do g1' <- unEx g1
g2' <- unEx g2
return $ Cx $ \t ->
\f -> CJUMP(TEST (LE, g1', g2'), t, f)
-- Arithmetic
arithmetic :: Absyn.Oper -> Gexp -> Gexp -> SemTr Gexp
arithmetic op g1 g2 =
do g1' <- unEx g1
g2' <- unEx g2
return $ Ex $ BINOP (transop op, g1', g2') (isExpPtr g1' `xor` isExpPtr g2')
where transop Absyn.PlusOp = PLUS
transop Absyn.MinusOp = MINUS
transop Absyn.TimesOp = MUL
transop Absyn.DivideOp = DIV
transop Absyn.AndOp = AND
transop Absyn.OrOp = OR
transop o = error $ "Compiler error : " ++ show o ++ "not implemented yet"
-- Literal
intExp :: Int -> SemTr Gexp
intExp val = return $ Ex $ CONST val False
stringExp :: String -> SemTr Gexp
stringExp str =
do lab <- newLabel
let frag = Frame.DATA lab str
createDataFrag frag
return $ Ex $ NAME lab
-- Helper sequence function
constructEseq :: Gexp -> Gexp -> SemTr Gexp
constructEseq stm e =
do stm' <- unNx stm
exp' <- unEx e
return $ Ex $ ESEQ (stm', exp')
letExpression :: [Gexp] -> Gexp -> SemTr Gexp
letExpression [] body = return body
letExpression decs body =
do decs' <- mapM unNx decs
body' <- unEx body
return $ Ex $ ESEQ (seqcon decs', body')
-- Assignment
assign :: Gexp -> Gexp -> SemTr Gexp
assign var assgnval =
do var' <- unEx var
assgnval' <- unEx assgnval
return $ Nx $ MOVE (var', assgnval')
-- Record and Array creation
createRecord :: [(Gexp,Bool)] -> SemTr Gexp
createRecord fieldvarsAndIsPtrs =
do let (fieldvars, isptrs) = unzip fieldvarsAndIsPtrs
let bool2descriptor True = 'P'
bool2descriptor False = 'N'
let descriptors = map bool2descriptor isptrs
descriptorlab <- newLabel
let descriptorFrag = Frame.GCDESCREC descriptorlab descriptors
createDescFrag descriptorFrag
let isaptr = True
address <- newTemp isaptr
allocfunlab <- namedLabel "allocRecord"
retlab <- newRetLabel
let alloc = MOVE(TEMP address isaptr, CALL (NAME allocfunlab, [CONST (4 * length fieldvars) False, NAME descriptorlab]) True retlab)
let idxs = [1..length fieldvars]
instrs <- mapM (uncurry $ initfield address isptrs) $ zip fieldvars idxs
return $ Ex $ ESEQ(seqcon $ alloc:instrs, TEMP address isaptr)
where initfield address isptrs fieldvar idx = do
fieldvar' <- unEx fieldvar
let isbaseptr = True
let baseaddr = TEMP address isbaseptr
let addr = MEM (BINOP(PLUS, baseaddr, CONST (idx * 4) False) True, 4) (isptrs!!idx)
return $ MOVE (addr, fieldvar')
createArray :: Gexp -> Gexp -> SemTr Gexp
createArray sizexp initexp =
do sizexp' <- unEx sizexp
initexp' <- unEx initexp
allocarrfun <- namedLabel "allocArray"
retlab <- newRetLabel
desclab <- newLabel
let descriptors = Frame.GCDESCARR desclab $ isExpPtr initexp'
createDescFrag descriptors
return $ Ex $ CALL (NAME allocarrfun, [sizexp', initexp', NAME desclab]) True retlab
-- Variable access
field :: Gexp -> Int -> Bool -> SemTr Gexp
field recordge fieldnum isptr =
do fieldfunlab <- namedLabel "field"
recordge' <- unEx recordge
retlab <- newRetLabel
return $ Ex $ MEM(CALL(NAME fieldfunlab, [recordge', CONST (4*fieldnum) False]) True retlab, 4) isptr
subscript :: Gexp -> Gexp -> Bool -> SemTr Gexp
subscript arrge idxge isptr =
do arrge' <- unEx arrge
idxge' <- unEx idxge
subscriptfunlab <- namedLabel "subscript"
retlab <- newRetLabel
return $ Ex $ MEM(CALL(NAME subscriptfunlab, [arrge', idxge']) True retlab, 4) isptr
simpleVar :: Access -> Level -> Bool -> SemTr Gexp
simpleVar (varlevel, offset) fromLevel isptr =
return $ Ex $ accessFrameOff offset (frameAtLevel varlevel fromLevel $ TEMP (Tmp.Named Reg.EBP) False) isptr
frameAtLevel :: Level -> Level -> Exp -> Exp
frameAtLevel destlvl startlvl startlvlptr =
if destlvl == startlvl
then startlvlptr
else case startlvl of
TOP -> error "Functions from TOP level should not access static links"
LEVEL{staticLinkOffset=offset, levelParent=parent} -> frameAtLevel destlvl parent $ accessFrameOff offset startlvlptr False
accessFrameOff :: Int -> Exp -> Bool -> Exp
accessFrameOff offset frameptr = MEM(BINOP (PLUS, frameptr, CONST offset False) True, 4)
-- Conditional and loops
ifThen :: Gexp -> Gexp -> SemTr Gexp
ifThen testge thenge =
do testge' <- unCx testge
thenge' <- unNx thenge
t <- newLabel
f <- newLabel
return $ Nx $ seqcon [(testge' t f), LABEL t, thenge', LABEL f]
ifThenElse :: Gexp -> Gexp -> Gexp -> Bool -> SemTr Gexp
ifThenElse testge (Nx thenstm) (Nx elsestm) _ =
do testge' <- unCx testge
t <- newLabel
f <- newLabel
j <- newLabel
return $ Nx $ seqcon [ testge' t f
, LABEL t
, thenstm
, JUMP (NAME j, [j])
, LABEL f
, elsestm
, LABEL j]
ifThenElse testge thenge elsege ispointer =
do testge' <- unCx testge
t <- newLabel
f <- newLabel
j <- newLabel
r <- newTemp ispointer
thenge' <- unEx thenge
elsege' <- unEx elsege
return $ Ex $ ESEQ ( seqcon [
testge' t f
, LABEL t
, MOVE (TEMP r ispointer, thenge')
, JUMP (NAME j, [j])
, LABEL f
, MOVE (TEMP r ispointer, elsege')
, LABEL j
]
, TEMP r ispointer)
whileLoop :: Gexp -> Gexp -> Tmp.Label -> SemTr Gexp
whileLoop testge bodyge donelab =
do testge' <- unCx testge
bodyge' <- unNx bodyge
testlab <- newLabel
bodylab <- newLabel
return $ Nx $ seqcon [ LABEL testlab
, testge' bodylab donelab
, LABEL bodylab
, bodyge'
, JUMP (NAME testlab, [testlab])
, LABEL donelab ]
forLoop :: Gexp -> Gexp -> Gexp -> Tmp.Label -> Gexp -> SemTr Gexp
forLoop loge hige bodyge donelab iteratorge =
do loge' <- unEx loge
hige' <- unEx hige
bodyge' <- unNx bodyge
iteratorge' <- unEx iteratorge
limit <- newTemp False
bodylab <- newLabel
inclab <- newLabel
return $ Nx $ seqcon [ MOVE(iteratorge', loge')
, MOVE(TEMP limit False, hige')
, CJUMP (TEST (LE, iteratorge', TEMP limit False), bodylab, donelab)
, LABEL bodylab
, bodyge'
, CJUMP (TEST(LT, iteratorge', TEMP limit False), inclab, donelab)
, LABEL inclab
, MOVE(iteratorge', BINOP(PLUS, iteratorge', CONST 1 False) False)
, JUMP(NAME bodylab, [bodylab])
, LABEL donelab
]
break :: Tmp.Label -> SemTr Gexp
break lab =
return $ Nx $ JUMP(NAME lab, [lab])
updateMaxArgs :: Level -> Int -> SemTr ()
updateMaxArgs callerLvl numArgs =
case callerLvl of
TOP -> error "Compiler error: updateMaxArgs called with TOP level."
LEVEL lvlframe _ _ _ -> case lvlframe of
Frame.Frame {Frame.frameMaxArgs=maxargref} -> do
maxargs <- liftIO $ readIORef maxargref
when (numArgs > maxargs) $ liftIO $ writeIORef maxargref numArgs
callFunction :: Tmp.Label -> Level -> Level -> [Gexp] -> Bool -> SemTr Gexp
callFunction funclab callerlvl calleelvl argsge isptr =
do argsge' <- mapM unEx argsge
if calleelvl == TOP
then do updateMaxArgs callerlvl $ length argsge'
retlab <- newRetLabel
return $ Ex $ CALL (NAME funclab, argsge') isptr retlab
else do updateMaxArgs callerlvl $ 1+length argsge'
let calleeparent = levelParent calleelvl
let staticlinkexp = frameAtLevel calleeparent callerlvl $ TEMP (Tmp.Named Reg.EBP) False
retlab <- newRetLabel
return $ Ex $ CALL (NAME funclab, staticlinkexp:argsge') isptr retlab
wrapFuncBody :: Stm -> Bool -> SemTr Stm
wrapFuncBody (EXP bodyexp) isptr =
do temp <- newTemp isptr
return $ seqcon [ MOVE (TEMP temp isptr, bodyexp)
, MOVE (TEMP (Tmp.Named Reg.EAX) isptr, TEMP temp isptr) ]
wrapFuncBody body _ = return body
createProcFrag :: Tmp.Label -> Level -> Gexp -> Bool -> SemTr ()
createProcFrag proclab level bodyge returnsptr =
do bodyge' <- unNx bodyge
wrappedbody <- wrapFuncBody bodyge' returnsptr
let procfrag = Frame.PROC { Frame.procName = proclab
, Frame.procBody = wrappedbody
, Frame.procFrame = levelFrame level }
frags <- getFragList
putFragList $ procfrag:frags
createMainFrag :: Level -> Gexp -> SemTr ()
createMainFrag lvl bodyge =
do mainlab <- namedLabel "tigermain"
createProcFrag mainlab lvl bodyge False
createDataFrag :: Frag -> SemTr ()
createDataFrag f@(Frame.DATA _ _) = do
frags <- getFragList
putFragList $ f:frags
createDataFrag _ = error "Compiler error: createDataFrag called with non-DATA fragment."
createDescFrag :: Frag -> SemTr ()
createDescFrag f@(Frame.GCDESCREC _ _) = do
frags <- getFragList
putFragList $ f:frags
createDescFrag f@(Frame.GCDESCARR _ _) = do
frags <- getFragList
putFragList $ f:frags
createDescFrag _ = error "Compiler error: createDescFrag called with non-DESC fragment."
reset :: SemTr ()
reset = putFragList []
getResult :: SemTr [Frag]
getResult = getFragList
| hengchu/tiger-haskell | src/tigertranslate.hs | mit | 14,939 | 0 | 18 | 4,754 | 5,146 | 2,565 | 2,581 | 375 | 7 |
{-# OPTIONS_GHC -fno-warn-orphans #-}
module ModelOrphans where
import ClassyPrelude
import Control.Lens (view, re)
import Data.Proxy (Proxy(Proxy))
import Database.Persist
import Database.Persist.Sql
import Slack
import Web.HttpApiData
( FromHttpApiData(parseUrlPiece, parseHeader, parseQueryParam)
, ToHttpApiData(toUrlPiece, toHeader, toQueryParam) )
import Web.PathPieces (PathPiece(fromPathPiece, toPathPiece))
instance PersistField MessageSubtype where
toPersistValue = toPersistValue . messageSubtypeToText
fromPersistValue = fromPersistValue >=> messageSubtypeFromText
instance PersistFieldSql MessageSubtype where
sqlType _ = SqlOther "message_subtype"
instance PersistField TS where
toPersistValue = toPersistValue . unTS
fromPersistValue = fromPersistValue >=> ts
instance PersistFieldSql TS where
sqlType _ = sqlType (Proxy :: Proxy Text)
instance PathPiece TS where
fromPathPiece = either (const Nothing) Just . ts
toPathPiece = unTS
instance FromHttpApiData TS where
parseUrlPiece = ts
parseHeader = ts . decodeUtf8
parseQueryParam = ts
instance ToHttpApiData TS where
toUrlPiece = unTS
toHeader = encodeUtf8 . unTS
toQueryParam = unTS
instance PersistField (ID a) where
toPersistValue = toPersistValue . view _ID
fromPersistValue = map (view $ re _ID) . fromPersistValue
instance PersistFieldSql (ID a) where
sqlType _ = sqlType (Proxy :: Proxy Text)
deriving instance PathPiece (ID a)
deriving instance FromHttpApiData (ID a)
deriving instance ToHttpApiData (ID a)
| Dridus/alexandria | server/ModelOrphans.hs | mit | 1,534 | 0 | 10 | 232 | 404 | 225 | 179 | -1 | -1 |
module Main where
import qualified Data.Vector as Vec
import qualified Data.Set as Set
import System.Random
import Data.Char
import Data.List
import Data.Maybe
import System.Console.ANSI
import System.Exit
import Text.Read
offsets :: [(Int, Int)]
offsets = [(1, 0), (1, -1), (0, -1), (-1, -1), (-1, 0), (-1, 1), (0, 1), (1, 1)]
data Board = Board {gameGrid :: Vec.Vector Char
,foundWords :: Set.Set String
,allWords :: Set.Set String
,players :: Vec.Vector Player
,wordFinder :: WordFinder
,width :: Int
,height :: Int}
deriving (Eq, Show)
newtype WordFinder = WordFinder {knownWords :: Set.Set String}
deriving(Eq, Show)
newtype Player = Player {points :: Int}
deriving(Eq, Show)
instance Ord Player where
(Player p1) <= (Player p2) = p1 <= p2
newBasicBoard :: Int -> Int -> IO Board
newBasicBoard size playerNum = do
g <- newStdGen
let grid = take (size * size) (randomRs ('A', 'Z') g)
wf <- newWordFinder
let board = findAllWords (Board (Vec.fromList grid) Set.empty Set.empty (Vec.replicate playerNum (Player 0)) wf size size)
if Set.size (allWords board) == 0 then
newBasicBoard size playerNum
else
return board
newWordBoard :: String -> Int -> Int -> IO Board
newWordBoard grid size playerNum = do
g <- newStdGen
wf <- newWordFinder
let board = findAllWords (Board (Vec.fromList grid) Set.empty Set.empty (Vec.replicate playerNum (Player 0)) wf size size)
if Set.size (allWords board) == 0 then
newBasicBoard size playerNum
else
return board
newWordFinder :: IO WordFinder
newWordFinder = do
file <- readFile "word_list.txt"
return (WordFinder (Set.fromList (words file)))
findAllWords :: Board -> Board
findAllWords board = board {allWords = Set.fromList (map fromJust . filter (/=Nothing) $ [findWord (x, y) board | x <- [0..(width board - 1)], y <- [0..(height board - 1)]])}
findWord :: (Int, Int) -> Board -> Maybe String
findWord (sx, sy) board = foundString
where
(foundString, _) = traverse (sx, sy) "" Set.empty
traverse :: (Int, Int) -> String -> Set.Set (Int, Int) -> (Maybe String, Set.Set (Int, Int))
traverse (x, y) word traversed
| x < 0 || x >= width board || y < 0 || y >= height board = (Nothing, traversed)
| Set.member (x, y) traversed = (Nothing, traversed)
| not (isGoodWordPart (Set.lookupGE newWord (knownWords wf))) = (Nothing, traversed)
| isAWord && isNotFound && len >= 4 = (Just newWord, newPath)
| otherwise = checkNeighbor offsets
where
wf = wordFinder board
c = gameGrid board Vec.! (x + y * width board)
newWord = word ++ [toLower c]
len = length newWord
isAWord = Set.member newWord (knownWords wf)
isNotFound = not (Set.member newWord (foundWords board))
newPath = Set.insert (x, y) traversed
isGoodWordPart (Just s) = newWord `isPrefixOf` s
isGoodWordPart Nothing = False
checkNeighbor :: [(Int, Int)] -> (Maybe String, Set.Set (Int, Int))
checkNeighbor [] = (Nothing, traversed)
checkNeighbor ((ox, oy):offs) =
case traverse (x + ox, y + oy) newWord newPath of
(Just w, t) -> (Just w, t)
(Nothing, t) -> checkNeighbor offs
computePoints :: String -> Int
computePoints word
| 4 <= len && len <= 6 = len - 3
| len == 7 = 5
| len == 8 = 11
| len >= 9 = len * 2
| otherwise = 0
where
len = length word
toLowStr :: String -> String
toLowStr [] = []
toLowStr (c:str) = toLower c : str
main = do
setTitle "I AM BOGGLE"
setCursorPosition 0 0
clearScreen
putStrLn "########################"
putStrLn "######I AM BOGGLE!######"
putStrLn "########################\n"
putStrLn "General commands:"
putStrLn ":q -- quit the game"
putStrLn ":end -- end player's turn\n"
putStrLn "How big a grid do you want to create?"
let prompt :: String -> Int -> IO Int
prompt msg lBound = do
putStr msg
val <- getLine
case val of
":q" -> exitSuccess
_ -> case readMaybe val of
Just v -> if v < lBound then putStrLn "That's a tad too small" >> prompt msg lBound else return v
_ -> putStrLn "Invalid number." >> prompt msg lBound
gridSize <- prompt "Enter the size of the grid (at least 2): " 2
playerNumber <- prompt "Enter player number (at least 1): " 1
putStrLn "Loading a board with some words in it"
board <- newBasicBoard gridSize playerNumber
let printBoard :: Board -> IO ()
printBoard board = do
let lines = [spacefy (Vec.toList (Vec.take w s)) | i <- [0..(w - 1)], let s = Vec.drop (i * h) grid]
where
spacefy [] = [' ']
spacefy (c:str) = ' ' : c : spacefy str
w = width board
h = height board
grid = gameGrid board
putStrLn $ unlines lines
let gameLoop :: Board -> String -> Int -> IO Board
gameLoop board status currentPlayer = do
setCursorPosition 0 0
clearScreen
printBoard board
let player = players board Vec.! currentPlayer
putStrLn ("Player " ++ show currentPlayer)
putStrLn ("Points: " ++ show (points player))
putStrLn status
putStrLn "Enter a word you find on the grid (:end to end turn or :q to quit): "
let newStatus = ""
word <- getLine
case word of
":end" -> if currentPlayer == (Vec.length (players board) - 1) then return board
else gameLoop board "# Round ended! Next player #" (currentPlayer + 1)
":q" -> putStrLn "Exiting" >> exitSuccess
_ -> if length word <= 4 then gameLoop board "# The word must be at least 4 characters long in order to award points. #" currentPlayer
else if Set.member (toLowStr word) (allWords board) then
if Set.member (toLowStr word) (foundWords board) then
gameLoop board "# This word has already been found and thus gives no points. #" currentPlayer
else do
let dp = computePoints word
let plr = player {points = points player + dp}
let brd = board { players = players board Vec.// [(currentPlayer, plr)], foundWords = Set.insert (toLowStr word) (foundWords board)}
gameLoop brd ("# Found \"" ++ word ++ "\"! Awarded " ++ show dp ++ " point(s). #") currentPlayer
else gameLoop board "# I don't see this word in the grid #" currentPlayer
endBoard <- gameLoop board "" 0
setCursorPosition 0 0
clearScreen
putStrLn "#########################"
putStrLn "#########RESULTS#########"
putStrLn "#########################\n"
putStrLn "All words in this grid:"
putStrLn (unlines (Set.toList (allWords endBoard)))
putStrLn "\nPlayer points:"
putStrLn $ concatMap (\ (i, Player pts) -> "Player " ++ show i ++ " -- " ++ show pts ++ " pts\n") (Vec.toList (Vec.indexed (players endBoard)))
let winnerIndex = Vec.maxIndex (players endBoard)
let winnerPoints = points (players endBoard Vec.! winnerIndex)
if Vec.all (\(Player pts) -> pts == winnerPoints) (players endBoard) && Vec.length (players endBoard) /= 1 then
putStrLn "Draw"
else
putStrLn ("Player " ++ show winnerIndex ++ " wins")
putStrLn "Press any key to quit"
l <- getChar
return ()
| nikoheikkila/boggle | Boggle.hs | mit | 8,070 | 0 | 27 | 2,656 | 2,662 | 1,333 | 1,329 | 162 | 12 |
--------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
import qualified Data.ByteString.Lazy.Char8 as C
import Hakyll
import Hakyll.Web.Sass (sassCompiler)
import Text.Jasmine
import Text.Pandoc.Options (HTMLMathMethod (MathJax),
WriterOptions,
writerHTMLMathMethod, writerHtml5)
--------------------------------------------------------------------------------
main :: IO ()
main = hakyllWith config $ do
match ("static/fonts/*" .||. "static/images/*") $ do
route idRoute
compile copyFileCompiler
-- compress js
match "static/js/*" $ do
route idRoute
compile compressJsCompiler
match "static/css/index.scss" $ do
route $ setExtension "css"
let compressCssItem = fmap compressCss
compile (fmap compressCssItem sassCompiler)
-- render pages
match "pages/*.md" $ do
route $ composeRoutes (gsubRoute "pages/" (const ""))
(setExtension "html")
compile $ pandocCompilerWith defaultHakyllReaderOptions writerOptions
>>= finish defaultContext
tags <- buildTags "posts/*" (fromCapture "tags/*.html")
match "posts/*" $ do
route $ setExtension "html"
compile $ do
let drop = fmap (unlines . takeWhile (/= "<!-- MORE -->") . lines)
compiled <- pandocCompilerWith defaultHakyllReaderOptions writerOptions
saveSnapshot "content" compiled
saveSnapshot "teaser" $ drop compiled
loadAndApplyTemplate "templates/post.html" (postCtxWithTags tags) compiled
>>= finish (postCtxWithTags tags)
create ["about.html"] $ do
route idRoute
compile $ do
let ctx = mconcat
[ constField "title" "About"
, defaultContext
]
makeItem ""
>>= loadAndApplyTemplate "templates/about.html" ctx
>>= finish ctx
create ["archive.html"] $ do
route idRoute
compile $ do
posts <- recentFirst =<< loadAll "posts/*"
let archiveCtx = mconcat
[ listField "posts" postCtx (return posts)
, tagCloudField "tagcloud" 80 200 tags
, constField "title" "Archives"
, defaultContext
]
makeItem ""
>>= loadAndApplyTemplate "templates/archive.html" archiveCtx
>>= finish archiveCtx
tagsRules tags $ \tag pattern -> do
let title = "Posts tagged \"" ++ tag ++ "\""
route idRoute
compile $ do
posts <- recentFirst =<< loadAll pattern
let tagCtx = mconcat
[ constField "title" title
, listField "posts" postCtx (return posts)
, defaultContext
]
makeItem ""
>>= loadAndApplyTemplate "templates/tag.html" tagCtx
>>= finish tagCtx
create ["cv.html"] $ do
route idRoute
compile $ do
let ctx = mconcat
[ constField "title" "Curriculum Vitae"
, defaultContext
]
makeItem ""
>>= loadAndApplyTemplate "templates/cv.html" ctx
>>= finish ctx
create ["index.html"] $ do
route idRoute
compile $ do
let indexCtx = mconcat
[ field "posts" $ fmap const (recentPostList tags) 3
, tagCloudField "tagcloud" 80 200 tags
, defaultContext
]
ctx = mconcat
[ constField "title" "Home"
, defaultContext
]
makeItem ""
>>= loadAndApplyTemplate "templates/index.html" indexCtx
>>= finish ctx
create ["rss.xml"] $ do
route idRoute
compile $ do
let rssCtx = mconcat
[ bodyField "description"
, defaultContext
]
posts <- fmap (take 5) . recentFirst
=<< loadAllSnapshots "posts/*" "content"
renderRss (feedConfiguration "All Posts") rssCtx posts
create ["atom.xml"] $ do
route idRoute
compile $ do
let atomCtx = mconcat
[ bodyField "description"
, defaultContext
]
posts <- fmap (take 5) . recentFirst
=<< loadAllSnapshots "posts/*" "content"
renderAtom (feedConfiguration "All Posts") atomCtx posts
match "templates/*" $ compile templateBodyCompiler
--------------------------------------------------------------------------------
postCtx :: Context String
postCtx = mconcat
[ dateField "date" "%B %e, %Y"
, modificationTimeField "mtime" "%B %e, %Y"
, bodyField "content"
, defaultContext
]
finish :: Context String -> Item String -> Compiler (Item String)
finish ctx item = tpl ctx item >>= relativizeUrls
where
tpl = loadAndApplyTemplate "templates/default.html"
postCtxWithTags :: Tags -> Context String
postCtxWithTags tags = mconcat
[ tagsField "tags" tags
, postCtx
]
recentPostList :: Tags -> Int -> Compiler String
recentPostList tags n = do
posts <- loadAllSnapshots "posts/*" "teaser" >>= recentFirst
itemTpl <- loadBody "templates/post-teaser.html"
applyTemplateList itemTpl (postCtxWithTags tags) $ take n posts
compressJsCompiler :: Compiler (Item String)
compressJsCompiler = do
let minifyJS = C.unpack . minify . C.pack . itemBody
s <- getResourceString
return $ itemSetBody (minifyJS s) s
feedConfiguration :: String -> FeedConfiguration
feedConfiguration title = FeedConfiguration
{ feedTitle = "Jared Rickert - " ++ title
, feedDescription = "Personal blog of Jared Rickert"
, feedAuthorName = "Jared Rickert"
, feedAuthorEmail = "jaredrickert52@gmail.com"
, feedRoot = "http://jlrickert.me/"
}
-- Add options to the pandoc compiler
writerOptions :: WriterOptions
writerOptions = defaultHakyllWriterOptions
{ writerHTMLMathMethod = MathJax "http://cdn.mathjax.org/mathjax/latest/MathJax.js"
, writerHtml5 = True
}
--------------------------------------------------------------------------------
config :: Configuration
config = defaultConfiguration
{ deployCommand = "./deploy.sh"
}
| jlrickert/jlrickert.github.io | site.hs | mit | 7,185 | 0 | 23 | 2,755 | 1,444 | 681 | 763 | 146 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Data.Peano
-- Copyright : (c) Andrés Sicard-Ramírez 2009-2014
-- License : See the file LICENSE.
--
-- Maintainer : Andrés Sicard-Ramírez <asr@eafit.edu.co>
-- Stability : experimental
--
-- Peano numbers.
-----------------------------------------------------------------------------
{-# LANGUAGE UnicodeSyntax #-}
module Data.Peano
( int2nat
, PeanoNat(Z, S)
, nat2int
)
where
import Test.QuickCheck
( Arbitrary(arbitrary)
, arbitrarySizedNatural
)
-----------------------------------------------------------------------------
-- Auxiliary functions
mapTuple ∷ (a → b) → (a, a) → (b, b)
mapTuple f (a1, a2) = (f a1, f a2)
-----------------------------------------------------------------------------
-- From http://byorgey.wordpress.com/2010/11/:
--
-- Note that the auto-derived Ord instance have exactly the right
-- behavior due to the fact that we happened to list the Z constructor
-- first.
-- | Peano natural numbers.
data PeanoNat = Z | S PeanoNat
deriving (Eq, Ord)
nat2integer ∷ PeanoNat → Integer
nat2integer Z = 0
nat2integer (S n) = 1 + nat2integer n
nat2int ∷ PeanoNat → Int
nat2int Z = 0
nat2int (S n) = 1 + nat2int n
int2nat ∷ Int → PeanoNat
int2nat n | n < 0 = error "int2Nat: negative argument"
int2nat 0 = Z
int2nat n = S $ int2nat (n - 1)
integer2nat ∷ Integer → PeanoNat
integer2nat n | n < 0 = error "integer2Nat: negative argument"
integer2nat 0 = Z
integer2nat n = S $ integer2nat (n - 1)
-- Adapted from http://byorgey.wordpress.com/2010/11/.
instance Num PeanoNat where
Z + n = n
S m + n = S (m + n)
Z * _ = Z
S m * n = n + m * n
m - Z = m
Z - S _ = Z
S m - S n = m - n
abs n = n
-- In the @Integral@ class, @div@ is defined via @divMod@ which uses
-- @negate@. See
-- https://downloads.haskell.org/~ghc/7.8.4/docs/html/libraries/base-4.7.0.2/src/GHC-Real.html#div
negate n = n
signum Z = 0
signum (S _) = 1
fromInteger 0 = Z
fromInteger n = if n < 0
then error "fromInteger: negative value"
else S (fromInteger (n - 1))
instance Real PeanoNat where
toRational = toRational . nat2integer
instance Enum PeanoNat where
fromEnum = fromEnum . nat2int
toEnum 0 = Z
toEnum n = if n > 0
then S (toEnum (n - 1))
else error "toEnum: negative value"
instance Integral PeanoNat where
quotRem m n = mapTuple integer2nat $ quotRem (nat2integer m) (nat2integer n)
-- TODO (07 July 2014). Why is this definition necessary?
divMod m n = mapTuple integer2nat $ divMod (nat2integer m) (nat2integer n)
toInteger = nat2integer
instance Show PeanoNat where
show = show . nat2integer
instance Arbitrary PeanoNat where
arbitrary = arbitrarySizedNatural
| asr/peano | src/Data/Peano.hs | mit | 2,918 | 0 | 11 | 670 | 744 | 391 | 353 | 61 | 1 |
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Control.Monad (forever, when)
import qualified Data.ByteString.Char8 as C
import Data.Semigroup ((<>))
import System.Environment
import System.Exit
import System.Random
import System.ZMQ4.Monadic
connectPeer :: Socket z t -> String -> String -> ZMQ z ()
connectPeer sock name peer = do
connect sock (connectString peer name)
liftIO . putStrLn $ "Connecting to peer: " ++ connectString peer name
connectString :: String -> String -> String
connectString peer name = "ipc://" ++ peer ++ "-" ++ name ++ ".ipc"
main :: IO ()
main = do
args <- getArgs
when (length args < 2) $ do
putStrLn "Usage: peering1 <me> <you> [<you> ...]"
exitFailure
let self:peers = args
putStrLn $ "Preparing broker at " ++ self
runZMQ $ do
-- Bind state backend to endpoint
stateBack <- socket Pub
bind stateBack (connectString self "state")
-- Connect state frontend to peers
stateFront <- socket Sub
subscribe stateFront ""
mapM_ (connectPeer stateFront "state") peers
-- Send status, collect status
forever $ do
let pollItem = Sock stateFront [In] (Just pollEvent)
pollEvent _ = do
peerName:available:_ <- receiveMulti stateFront
liftIO . C.putStrLn $
peerName <> " " <> available <> " workers free"
pollEvents <- poll oneSec [pollItem]
when (pollEvents == [[]]) $ do
r <- liftIO $ randomRIO (0, 9)
sendMulti stateBack [C.pack self, C.pack (show (r :: Int))]
where
oneSec = 1000
| soscpd/bee | root/tests/zguide/examples/Haskell/peering1.hs | mit | 1,807 | 0 | 23 | 588 | 506 | 251 | 255 | 41 | 1 |
module Paradox.Flatten where
{-
Paradox -- Copyright (c) 2003-2007, Koen Claessen, Niklas Sorensson
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-}
import Form
import Name
import Paradox.Instantiate
import qualified Data.Set as S
import Data.Map( Map )
import qualified Data.Map as M
import Data.List ( (\\), groupBy, sort, minimumBy, partition )
-------------------------------------------------------------------------
-- macify
macify :: [Clause] -> ([(Type,Int)], [Clause], [QClause])
macify cs = ([(t,length ts)|(t,ts) <- cliques], flattenedCs, functionCs)
where
flattenedCs =
[ c'
| (c, i) <- assigns
, let d i = Fun (elt i) []
, c' <- [ Pos (c :=: d i) ]
: [ [ Neg (c :=: d j) ]
| j <- [1..i-1]
]
] ++
splitAll (concatMap (flatten assignCons) (defCs ++ coreCs))
(ds, defCs, coreCs) =
definitions cs
functionCs =
[ Uniq (Bind y
( [ Pos (t :=: Var y) ]
))
| f <- S.toList fs ++ ds
, let n = arity f
tps :-> tp = typing f
xs = [ Var (var tp i) | (i,tp) <- [1..n] `zip` tps ]
y = var tp 0
t = Fun f xs
, M.lookup t assignCons == Nothing
]
constants =
map (\g@((t,_):_) -> (t,map snd g)) $
groupBy (\x y -> fst x == fst y) $
sort $
[ (typ t,t)
| c <- S.toList fs
, arity c == 0
, let t = Fun c []
]
units =
[ lit
| [lit] <- cs
, S.size (free lit) <= 2
]
cliques =
[ (t,findClique cs units)
| (t,cs) <- constants
]
assigns =
[ (t,i)
| (_,ts) <- cliques
, (t,i) <- ts `zip` [1..]
]
assignCons =
M.fromList
[ (t, Fun (elt i) [])
| (t,i) <- assigns
]
syms = symbols cs
fs = S.filter (\f -> case typing f of
_ :-> tp -> tp /= bool
_ -> False) syms
findClique :: [Term] -> [Signed Atom] -> [Term]
findClique [] units = []
findClique ts units = S.toList (largestClique S.empty graph)
where
cons = S.fromList ts
(posInfo, negInfo) = foldr gather (S.empty, S.empty) units
where
gather (Pos p) (pos, neg) = (pos `S.union` inst p, neg)
gather (Neg p) (pos, neg) = (pos, neg `S.union` inst p)
inst p = S.fromList [ norm (subst sub p) | sub <- subs (S.toList (free p)) ]
where
norm (a :=: b) | b < a = b :=: a
norm p = p
subs [] = [ids]
subs (x:xs) = [ (x |=> t) |+| sub | sub <- subs xs, t <- ts ]
edges =
[ (a,b)
| (a,b) <- pairs (S.toList cons)
, a `notEqual` b
]
where
a `notEqual` b = a `eqNotEqual` b || a `repNotEqual` b
a `eqNotEqual` b =
(a :=: b) `S.member` negInfo
a `repNotEqual` b =
S.size (rep posInfo `S.intersection` rep negInfo) > 0
where
rep xs = S.fromList [ p `prd` map (replace a b) ys
| Fun p ys :=: t <- S.toList xs
, t == truth
]
replace a b a' | a == a' = b
replace a b (Fun f xs) = Fun f (map (replace a b) xs)
graph =
M.fromListWith S.union $
concat [ [(a, S.singleton b), (b, S.singleton a)]
| (a,b) <- edges
]
++ [ (a, S.empty)
| a <- ts
]
largestClique cl gr
| M.null gr = cl
| S.size cl >= 1 + S.size bs
|| S.size cl >= S.size cl' = largestClique cl gr'
| otherwise = largestClique cl' gr'
where
((a,bs),rest) = M.deleteFindMin gr
gr' = M.map (a `S.delete`) rest
cl' = a `S.insert` largestClique S.empty subgr
subgr = M.fromList [ (x, xs `S.intersection` bs)
| x <- S.toList bs
, let xs = case M.lookup x gr' of
Just a -> a
Nothing -> error "clique: not in table"
]
pairs [] = []
pairs (x:xs) = [ (x,y) | y <- xs ] ++ pairs xs
-------------------------------------------------------------------------
-- definitions
definitions :: [Clause] -> ([Symbol], [Clause], [Clause])
definitions cs = ([ f | (_, Fun f _) <- list ], defCs, coreCs)
where
deepTerms =
[ t'
| t <- S.toList (subterms cs)
, typ t /= bool
, let (_,t') = normalize t
, isOkTerm (t' `S.member` ts) t'
]
where
ts = topterms cs
list =
[ (t, Fun (df % i ::: (map typ xs :-> typ t)) xs)
| (t,i) <- deepTerms `zip` [1..]
, let xs = map Var (S.toList (free t))
]
defCs =
[ [Pos (deft :=: Fun f (map replace xs))]
| (Fun f xs, deft) <- list
]
coreCs =
[ map replaceLit c
| c <- cs
]
tab =
M.fromList list
normalize t =
norm t ids [vr % i | i <- [1..]] (\t' sub _ -> (sub,t'))
where
norm (Var v) sub (w:ws) k = k (Var v) (((w ::: typing v) |=> Var v) |+| sub) ws
norm (Fun f xs) sub ws k =
norms xs sub ws $ \xs' sub' ws' ->
k (Fun f xs') sub' ws'
norms [] sub ws k = k [] sub ws
norms (t:ts) sub ws k =
norm t sub ws $ \t' sub' ws' ->
norms ts sub' ws' $ \ts' sub'' ws'' ->
k (t':ts') sub'' ws''
isOkTerm top (Var _) = False
isOkTerm top (Fun _ xs) = any (not . isVar) xs
-- && S.size (free xs) <= 1
&& any (isAlreadyConnected (free xs)) xs
-- && (not top || all isGroundTerm xs)
where
isVar (Var _) = True
isVar _ = False
isAlreadyConnected vs (Var v) = vs `S.isSubsetOf` S.fromList [v]
isAlreadyConnected vs (Fun _ xs) = vs `S.isSubsetOf` S.fromList [ v | Var v <- xs ]
|| any (isAlreadyConnected vs) xs
topterms [] = S.empty
topterms (c:cs) = tops c `S.union` topterms cs
where
tops ls =
S.fromList [ t
| l <- ls
, let a :=: b = the l
, t <- case a of
Fun p ts | b == truth -> ts
_ -> [a,b]
]
replaceLit (Pos a) = Pos (replaceAtom a)
replaceLit (Neg a) = Neg (replaceAtom a)
replaceAtom (a :=: b) = replace a :=: replace b
replace (Var v) = Var v
replace t@(Fun f xs) =
case M.lookup t' tab of
Nothing -> Fun f (map replace xs)
Just t'' -> subst sub t''
where
(sub,t') = normalize t
-------------------------------------------------------------------------
-- flatten
flatten :: (Map Term Term) -> Clause -> [Clause]
flatten assignCons ls = simplify (defs ++ flatLs)
where
fls = free ls
vs = [ v
| i <- [1..]
, let v = vr % i
]
tab =
M.fromList
[ (t,v)
| (t,fv) <- S.toList (subterms ls) `zip` vs
, typ t /= bool
, let v = case t of
Var v -> v
_ -> fv ::: V (typ t)
]
var t =
case M.lookup t assignCons of
Just ci -> ci
Nothing
| tdomain (typ t) == Just 1 -> Fun (elt 1) []
| otherwise -> Var (case M.lookup t tab of
Just t' -> t'
Nothing -> error ("flatten: not in var-table: " ++ show t))
defs =
[ Neg (Fun f (map var ts) :=: Var v)
| (t@(Fun f ts), v) <- M.toList tab
, M.lookup t assignCons == Nothing
]
flatLs =
[ flat `fmap` l
| l <- ls
]
flat (Fun p ts :=: b) | b == truth =
p `prd` map var ts
flat (a :=: b) =
var a :=: var b
-------------------------------------------------------------------------
-- simplify
-- rules:
-- P | -P | C ==> { } (truth)
-- X = X | C ==> { } (truth)
-- X != X | C ==> { C } (falsity)
-- X != Y | C[X,Y] ==> { C[X,X] } (subst)
-- f(X1..Xk) != Y | Z=Y | C[X,Z] ==> { f(X1..Xk) = Z | C[X,Z] } (subst)
simplify :: Clause -> [Clause]
simplify = simp
where
simp c0 =
[ c4
| c1 <- trivial c0
, c2 <- identEq c1
, c3 <- substVarEq c2
, c4 <- substFunEq c3
]
trivial ls
| any ((`S.member` S.fromList ls) . negat) ls = []
| otherwise = [ls]
identEq ls =
case [ ()
| Pos (s :=: t) <- ls
, s == t
] of
[] -> [ls]
_ -> []
substVarEq ls = substVar [] ls
where
substVar ls' (Neg (Var v :=: Var w) : ls) =
simp (subst (v |=> Var w) (ls' ++ ls))
substVar ls' (Neg (Var v :=: t@(Fun c [])) : ls) | isElt c =
simp (subst (v |=> t) (ls' ++ ls))
substVar ls' (Neg ((Fun c1 []) :=: t@(Fun c2 [])) : ls) | isElt c1 && isElt c2 && c1 /= c2 =
[]
substVar ls' (l:ls) =
substVar (l:ls') ls
substVar ls' [] =
[ls']
substFunEq ls = substFun [] ls
where
substFun ls' (l@(Neg (t :=: Var v)) : ls)
| not (v `S.member` free t) =
substTerm v t [] (ls' ++ ls) (substFun (l:ls') ls)
substFun ls' (l:ls) =
substFun (l:ls') ls
substFun ls' [] =
[ls']
substTerm v t ls' (Pos (t1 :=: t2) : ls) k
| leftSubst = substTerm v t (Pos (t :=: t2):ls') ls k
| rightSubst = substTerm v t (Pos (t :=: t1):ls') ls k
where
leftSubst = t1 == Var v && isSmall t2
rightSubst = t2 == Var v && isSmall t1
isSmall (Var _) = True
isSmall (Fun c []) = isElt c
isSmall _ = False
substTerm v t ls' (l:ls) k
| v `S.member` free l = k
| otherwise = substTerm v t (l:ls') ls k
substTerm v t ls' [] k =
simp ls'
-------------------------------------------------------------------------
-- purify
purify :: [Clause] -> ([(Symbol,Bool)],[Clause])
purify cs
| null ps = (ps,cs')
| otherwise = (ps'++ps,cs'')
-- | otherwise = ([],cs)
where
(ps,cs') = pure M.empty cs
(ps',cs'') = purify cs'
pure tab [] =
( removePs
, [ c
| c <- cs
, all (not . isRemoveP . the) c
]
)
where
removePs =
[ (p,head (S.toList sgns))
| (p,sgns) <- M.toList tab
, S.size sgns == 1
]
setPs = S.fromList [ p | (p,_) <- removePs ]
isRemoveP (Fun p _ :=: b) = b == truth && p `S.member` setPs
isRemoveP _ = False
pure tab (c:cs) = pure (M.unionWith S.union occurs tab) cs
where
posP = S.fromList [ p | Pos (Fun p xs :=: b) <- c, b == truth ]
negP = S.fromList [ p | Neg (Fun p xs :=: b) <- c, b == truth ]
occurs =
M.fromList $
[ (pn,S.singleton True) | pn <- S.toList $ posP `S.difference` negP ] ++
[ (pn,S.singleton False) | pn <- S.toList $ negP `S.difference` posP ]
-------------------------------------------------------------------------
-- split
splitAll :: [Clause] -> [Clause]
splitAll cs = splitting 1 cs (\_ -> [])
where
splitting i [] k = k i
splitting i (c:cs) k = hyper i connections ls (\i -> splitting i cs k)
where
ls = c
n = S.size (free ls)
lvs = map free ls
connections =
[ (v,ws)
| (v,ws) <-
M.toList $
M.fromListWith S.union
[ (v,S.fromList ws)
| vs <- lvs
, (v,ws) <- select (S.toList vs)
]
, S.size ws < n-1
]
hyper i [] ls k = break i ls k
hyper i cons ls k = break (i+1) (Neg p : left)
(\i -> hyper i cons' (Pos p : right) k)
where
(v,_) = minimumBy siz cons
rest = [ x | x@(w,_) <- cons, v /= w ]
(v1,s1) `siz` (v2,s2) =
(S.size s1,tpsize v2,inter s2) `compare` (S.size s2,tpsize v1,inter s1)
tpsize v = case tdomain (typ (Var v)) of
Nothing -> maxBound
Just d -> d
inter s =
sum [ S.size (s `S.intersection` vs) | (v,vs) <- cons, v `S.member` s ]
where
xs = S.toList s
cons' =
[ (w,ws')
| (w,ws) <- rest
, w `S.member` freeRight
, let ws' = (ws `S.union` extra) `S.intersection` freeRight
extra | w `S.member` vs = vs
| otherwise = S.empty
, S.size ws' < S.size freeRight-1
]
vs = free left `S.intersection` freeRight
p = (sp % i ::: (map typ xs :-> bool)) `prd` xs
xs = map Var (S.toList vs)
(left,right) = partition ((v `S.member`) . free) ls
freeRight = free right
break i ls k =
case [ ls'
-- only try when number of variables is at least 3
| S.size (free ls) >= 3
-- for all "non-recursive" definitions in ls
, def@(Neg (t@(Fun _ _) :=: Var y)) <- ls
, not (y `S.member` free t)
-- gather literals which contain y
, let (lsWithY, lsWithoutY) =
partition ((y `S.member`) . free) (ls \\ [def])
-- there should be at least 2 such literals ...
, lWithY:(lsWithY'@(_:_)) <- [lsWithY]
-- now, partition the literals containing y
-- into two separate groups, not containing
-- any overlapping variables except the variables in def
, let connToY = y `S.delete` free lsWithY
ok = free def
(lsLeft, lsRight) =
part (free lWithY) [lWithY] [] lsWithY'
where
part vs left right [] = (left, right)
part vs left right (l:ls)
| S.size ((ws `S.intersection` vs) `S.difference` ok) == 0 =
part vs left (l:right) ls
| otherwise =
part (vs `S.union` ws) (l:left) right ls
where
ws = free l
-- they should not all end up on one side
, not (null lsRight)
-- construct the new clause with the extra literal
, let y' = prim "C" % i ::: typing y
ls' = [ def
, Neg (t :=: Var y')
]
++ subst (y |=> Var y') lsLeft
++ lsRight
++ lsWithoutY
] of
-- _ -> set ls : k i
[] -> ls : k i
(ls':_) -> splitting (i+1) [ls'] k
select :: [a] -> [(a,[a])]
select [] = []
select (x:xs) = (x,xs) : [ (y,x:ys) | (y,ys) <- select xs ]
-------------------------------------------------------------------------
-- the end.
| msakai/folkung | Haskell/Paradox/Flatten.hs | mit | 15,873 | 0 | 25 | 6,160 | 6,202 | 3,246 | 2,956 | 343 | 12 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE NoImplicitPrelude #-}
module Ringo.Extractor.Internal where
import qualified Data.Map as Map
import qualified Data.Text as Text
import Prelude.Compat
import Control.Monad.Reader (Reader, asks)
import Data.Function (on)
import Data.Maybe (mapMaybe, fromMaybe, fromJust, catMaybes)
import Data.Monoid ((<>))
import Data.List (nub, nubBy, find)
import Data.Text (Text)
import Ringo.Types.Internal
import Ringo.Utils
findTable :: TableName -> [Table] -> Maybe Table
findTable tName = find ((== tName) . tableName)
findFact :: TableName -> [Fact] -> Maybe Fact
findFact fName = find ((== fName) . factName)
findColumn :: ColumnName -> [Column] -> Maybe Column
findColumn cName = find ((== cName) . columnName)
dimColumnName :: Text -> ColumnName -> ColumnName
dimColumnName dimName columnName =
fromMaybe columnName . Text.stripPrefix (dimName <> "_") $ columnName
dimColumnMapping :: Text -> Fact -> TableName -> [(ColumnName, ColumnName)]
dimColumnMapping dimPrefix fact dimTableName =
[ (dimColumnName factColTargetTable factColTargetColumn, factColTargetColumn)
| FactColumn { factColType = DimVal {..}, ..} <- factColumns fact
, dimPrefix <> factColTargetTable == dimTableName ]
dimColumnMappings :: Text -> Fact -> [(TableName, [(ColumnName, ColumnName)])]
dimColumnMappings dimPrefix fact =
nub [ (dimTableName, dimColumnMapping dimPrefix fact dimTableName)
| FactColumn { factColType = DimVal {..}, ..} <- factColumns fact
, let dimTableName = dimPrefix <> factColTargetTable ]
timeUnitColumnName :: Text -> ColumnName -> TimeUnit -> ColumnName
timeUnitColumnName dimIdColName colName timeUnit =
colName <> "_" <> timeUnitName timeUnit <> "_" <> dimIdColName
factDimFKIdColumnName :: Text -> Text -> Fact -> Table -> [Table] -> ColumnName
factDimFKIdColumnName dimPrefix dimIdColName dimFact dimTable@Table { .. } tables =
if dimTable `elem` tables
then head [ factColTargetColumn
| FactColumn {factColType = DimId {..}, ..} <- factColumns dimFact
, factColTargetTable == tableName ]
else fromMaybe tableName (Text.stripPrefix dimPrefix tableName) <> "_" <> dimIdColName
extractedFactTableName :: Text -> Text -> TableName -> TimeUnit -> TableName
extractedFactTableName factPrefix factInfix factName timeUnit =
factPrefix <> factName <> factInfix <> timeUnitName timeUnit
idColTypeToFKIdColType :: Text -> Text
idColTypeToFKIdColType typ = case Text.toLower typ of
"serial" -> "integer"
"smallserial" -> "smallint"
"bigserial" -> "bigint"
_ -> typ
extractDimensionTables :: Fact -> Reader Config [Table]
extractDimensionTables fact = do
settings <- asks configSettings
tables <- asks configTables
return $ dimTablesFromIds tables fact ++ dimTablesFromVals settings tables fact
dimTablesFromIds :: [Table] -> Fact -> [Table]
dimTablesFromIds tables fact =
catMaybes [ findTable factColTargetTable tables
| FactColumn { factColType = DimId {..} } <- factColumns fact ]
dimTablesFromVals :: Settings -> [Table] -> Fact -> [Table]
dimTablesFromVals Settings {..} tables fact =
fact
>>- factColumns
>>> mapMaybe (findDimValColumn . Just)
>>> Map.fromListWith (flip (++))
>>> Map.mapWithKey makeDimColumns
>>> Map.toList
>>> map (uncurry makeDimTable)
where
Table {..} = fromJust . findTable (factTableName fact) $ tables
makeDimTable dim cols =
Table { tableName = settingDimPrefix <> dim
, tableColumns =
Column settingDimTableIdColumnName settingDimTableIdColumnType NotNull : cols
, tableConstraints = [ PrimaryKey settingDimTableIdColumnName
, UniqueKey (map columnName cols)
]
}
makeDimColumns dim cols = [ col { columnName = dimColumnName dim (columnName col)
, columnNullable = NotNull
}
| col <- nub cols
]
findDimValColumn :: Maybe FactColumn -> Maybe (TableName, [Column])
findDimValColumn fcol = do
FactColumn { factColType = DimVal {..}, .. } <- fcol
column <- findColumn factColTargetColumn tableColumns
return (factColTargetTable, [column])
extractAllDimensionTables :: Fact -> Reader Config [(Fact, Table)]
extractAllDimensionTables fact = do
myDims <- map (fact,) <$> extractDimensionTables fact
parentDims <- concat <$> mapM extract (factParentNames fact)
return . nubBy ((==) `on` snd) $ myDims ++ parentDims
where
extract fName = asks configFacts >>= extractAllDimensionTables . fromJust . findFact fName
| abhin4v/ringo | ringo-core/src/Ringo/Extractor/Internal.hs | mit | 4,896 | 0 | 14 | 1,116 | 1,369 | 725 | 644 | 94 | 4 |
-- ----------------------------------------------------------------------------
{- |
Module : Holumbus.Index.Common.BasicTypes
Copyright : Copyright (C) 2011 Sebastian M. Schlatt, Timo B. Huebel, Uwe Schmidt
License : MIT
Maintainer : Timo B. Huebel (tbh@holumbus.org)
Stability : experimental
Portability: none portable
Basic data types for index
-}
-- ----------------------------------------------------------------------------
module Holumbus.Index.Common.BasicTypes
where
-- ------------------------------------------------------------
-- | The URI describing the location of the original document.
type URI = String
-- | The title of a document.
type Title = String
-- | The content of a document.
type Content = String
-- | The position of a word in the document.
type Position = Int
-- | The name of a context.
type Context = String
-- | A single word.
type Word = String
-- ------------------------------------------------------------
| ichistmeinname/holumbus | src/Holumbus/Index/Common/BasicTypes.hs | mit | 1,103 | 0 | 4 | 280 | 55 | 41 | 14 | 7 | 0 |
-- -------------------------------------------------------------------------------------
-- Author: Sourabh S Joshi (cbrghostrider); Copyright - All rights reserved.
-- For email, run on linux (perl v5.8.5):
-- perl -e 'print pack "H*","736f75726162682e732e6a6f73686940676d61696c2e636f6d0a"'
-- -------------------------------------------------------------------------------------
solveProblem :: [[Int]] -> Int
solveProblem = (\[vx, vy] -> vx * vy) . foldl1 (\[accx, accy] [x, y] -> [min accx x, min accy y])
main :: IO ()
main = do
ip <- getContents
let nmss = map (map read . words) . tail . lines $ ip
ans = solveProblem nmss
putStrLn . show $ ans
| cbrghostrider/Hacking | HackerRank/Mathematics/Geometry/rectangularGame.hs | mit | 701 | 0 | 16 | 137 | 164 | 89 | 75 | 8 | 1 |
module QFeldspar.ChangeMonad (Chg(..),chg,tilNotChg) where
import QFeldspar.MyPrelude
data Chg a = Chg Bool a
deriving instance Functor Chg
deriving instance Foldable Chg
deriving instance Traversable Chg
instance Applicative Chg where
pure = return
af <*> aa = do f <- af
a <- aa
pure (f a)
instance Monad Chg where
return = Chg False
(Chg b x) >>= f = let Chg b' x' = f x
in Chg (b || b') x'
chg :: a -> Chg a
chg = Chg True
tilNotChg :: (a -> Chg a) -> a -> a
tilNotChg f x = case f x of
Chg False _ -> x
Chg True x' -> tilNotChg f x'
| shayan-najd/QFeldspar | QFeldspar/ChangeMonad.hs | gpl-3.0 | 636 | 0 | 10 | 211 | 266 | 132 | 134 | -1 | -1 |
{- ============================================================================
| Copyright 2011 Matthew D. Steele <mdsteele@alum.mit.edu> |
| |
| This file is part of Fallback. |
| |
| Fallback is free software: you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation, either version 3 of the License, or (at your option) |
| any later version. |
| |
| Fallback is distributed in the hope that it will be useful, but WITHOUT |
| ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| more details. |
| |
| You should have received a copy of the GNU General Public License along |
| with Fallback. If not, see <http://www.gnu.org/licenses/>. |
============================================================================ -}
-- | An implementation of total maps from keys to values, with constant-time
-- access and guarantees against divergence.
--
-- Since some function names clash with "Prelude" names, this module is usually
-- imported @qualified@, e.g.
--
-- > import qualified Fallback.Data.TotalMap as TM
module Fallback.Data.TotalMap
(-- * TotalMap type
TotalMap,
-- * Construction
make, makeA, makeM, fromList, unfold, mapWithKey,
-- * Operations
get, set, adjust, assocs)
where
import Control.Applicative ((<$>), (<*>), Applicative, pure)
import Control.Exception (assert)
import Control.Monad.ST (runST)
import Data.Array ((!), (//), Array, Ix, inRange, listArray, range)
import qualified Data.Array as Array (assocs)
import qualified Data.Foldable as Fold
import Data.STRef (newSTRef, readSTRef, writeSTRef)
import qualified Data.Traversable as Trav
import qualified Text.Read as Read
-------------------------------------------------------------------------------
-- | A 'TotalMap' is a key-value map whose key space is a finite set --
-- specifically, a type that is an instance of the 'Ix' and 'Bounded' classes.
-- The abstraction guarantees that a value will always be present in the
-- 'TotalMap' for every element of the key space, and that lookup is
-- constant-time.
--
-- An earlier implementation of this datatype used the 'Enum' typeclass in
-- place of 'Ix'; using 'Ix' has the advantage that tuples (of other valid key
-- types) can be used as the key type for a 'TotalMap'.
--
-- Note: 'TotalMap's are intended for use with key types that are fairly small
-- enumerations (or tuples of several small enumerations). Although the type
-- system would allow one to create a 'TotalMap' with, say, 'Char' or 'Int' as
-- the key type, doing so is probably a very bad idea (and will likely OOM your
-- program). If you need a total mapping over a large key space, consider
-- using "Fallback.Data.SparseMap" instead.
newtype TotalMap k a = TotalMap (Array k a)
deriving (Eq, Ord)
instance (Bounded k, Ix k, Read a) => Read (TotalMap k a) where
readPrec = Read.parens $ Read.prec 10 $ do
Read.Ident "fromList" <- Read.lexP
xs <- Read.readPrec
return (fromList xs)
instance (Bounded k, Ix k, Show a) => Show (TotalMap k a) where
showsPrec p tm = showParen (p > appPrec) $
showString "fromList " .
showsPrec (appPrec + 1) (Fold.toList tm)
where appPrec = 10
instance (Bounded k, Ix k) => Functor (TotalMap k) where
fmap fn tm = make $ \k -> fn (get k tm)
instance (Bounded k, Ix k) => Applicative (TotalMap k) where
pure = make . const
tm1 <*> tm2 = make $ \k -> (get k tm1) (get k tm2)
instance (Bounded k, Ix k) => Fold.Foldable (TotalMap k) where
foldr fn start (TotalMap arr) = Fold.foldr fn start arr
foldl fn start (TotalMap arr) = Fold.foldl fn start arr
instance (Bounded k, Ix k) => Trav.Traversable (TotalMap k) where
traverse fn (TotalMap arr) = fromArray <$> Trav.traverse fn arr
where fromArray a = TotalMap $ Fold.foldr seq a a
-- | Create a new 'TotalMap' with each item initialized by applying the
-- function to the corresponding key. This function is strict in the return
-- values of the function passed (to ensure that 'get' never evaluates to
-- bottom).
make :: (Bounded k, Ix k) => (k -> a) -> TotalMap k a
make fn = unsafeFromList $ map fn $ range (minBound, maxBound)
-- | Like 'make', but allows the passed function to have side effects
-- (within an arbitrary applicative functor). The side effects are sequenced
-- in the 'Ix' order of the keys.
makeA :: (Bounded k, Ix k, Applicative f) => (k -> f a)
-> f (TotalMap k a)
makeA fn =
fmap unsafeFromList $ Trav.traverse fn $ range (minBound, maxBound)
-- | Like 'make', but allows the passed function to have side effects
-- (within an arbitrary monad). The side effects are sequenced in the 'Ix'
-- order of the keys.
makeM :: (Bounded k, Ix k, Monad m) => (k -> m a) -> m (TotalMap k a)
makeM fn = do
values <- mapM fn $ range (minBound, maxBound)
return (unsafeFromList values)
-- | Build a 'TotalMap' by associating list values with respective keys in 'Ix'
-- order. Extra list items are ignored if the list is too long; this function
-- diverges if the list is too short.
fromList :: (Bounded k, Ix k) => [a] -> TotalMap k a
fromList = unfold fn where
fn _ (b : bs) = (b, bs)
fn _ [] = error "TotalMap.fromList: list is too short"
-- | Build a 'TotalMap' by unfolding from an initial seed value. The key
-- values will be visited in 'Ix' order.
unfold :: (Bounded k, Ix k) => (k -> s -> (a, s)) -> s -> TotalMap k a
unfold fn initState = runST $ do
ref <- newSTRef initState
makeM $ \a -> do
s <- readSTRef ref
let (b, s') = fn a s
writeSTRef ref s'
return b
-- | /O(n)/. Map a function over all values in the 'TotalMap'.
mapWithKey :: (Bounded k, Ix k) => (k -> a -> b) -> TotalMap k a
-> TotalMap k b
mapWithKey fn tm = make $ \k -> fn k (get k tm)
-- | /O(1)/. Get an item from a 'TotalMap'. This function will never diverge
-- (assuming that the key type is well-behaved and that the arguments do not
-- diverge).
get :: (Bounded k, Ix k) => k -> TotalMap k a -> a
get key (TotalMap arr) =
assert (inRange (minBound, maxBound) key) $ arr ! key
-- | Set an item in a 'TotalMap'. This function will never diverge (assuming
-- that the key type is well-behaved and that the arguments do not diverge).
set :: (Bounded k, Ix k) => k -> a -> TotalMap k a -> TotalMap k a
set key value (TotalMap arr) =
assert (inRange (minBound, maxBound) key) $
value `seq` (TotalMap $ arr // [(key, value)])
-- | Update the value associated with the given key.
adjust :: (Bounded k, Ix k) => k -> (a -> a) -> TotalMap k a -> TotalMap k a
adjust key fn tmap = set key (fn $ get key tmap) tmap
-- | /O(n)/. Return the list of associations of a 'TotalMap' in key order.
assocs :: (Bounded k, Ix k) => TotalMap k a -> [(k, a)]
assocs (TotalMap arr) = Array.assocs arr
-------------------------------------------------------------------------------
-- Private:
-- | Build a 'TotalMap' from a list; for internal use only. Strict in the
-- elements of the list, but /does not/ check that the list is of the correct
-- length.
unsafeFromList :: (Bounded k, Ix k) => [a] -> TotalMap k a
unsafeFromList list =
TotalMap $ listArray (minBound, maxBound) $ foldr seq list list
-------------------------------------------------------------------------------
| mdsteele/fallback | src/Fallback/Data/TotalMap.hs | gpl-3.0 | 7,937 | 0 | 15 | 1,995 | 1,662 | 898 | 764 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-- | This is a simple library to query the Linux UPower daemon (via
-- DBus) for battery information. Currently, it only retrieves
-- information for the first battery it finds.
module Battery (
-- * Types
BatteryContext,
BatteryInfo(..),
BatteryState(..),
BatteryTechnology(..),
BatteryType(..),
-- * Accessors
batteryContextNew,
getBatteryInfo
) where
import Data.Map ( Map )
import qualified Data.Map as M
import Data.Maybe ( fromMaybe )
import Data.Word
import Data.Int
import DBus
import DBus.Client
import Data.List ( find, isInfixOf )
import Data.Text ( Text )
import qualified Data.Text as T
import Safe ( atMay )
-- | An opaque wrapper around some internal library state
data BatteryContext = BC Client ObjectPath
data BatteryType = BatteryTypeUnknown
| BatteryTypeLinePower
| BatteryTypeBatteryType
| BatteryTypeUps
| BatteryTypeMonitor
| BatteryTypeMouse
| BatteryTypeKeyboard
| BatteryTypePda
| BatteryTypePhone
deriving (Show, Ord, Eq, Enum)
data BatteryState = BatteryStateUnknown
| BatteryStateCharging
| BatteryStateDischarging
| BatteryStateEmpty
| BatteryStateFullyCharged
| BatteryStatePendingCharge
| BatteryStatePendingDischarge
deriving (Show, Ord, Eq, Enum)
data BatteryTechnology = BatteryTechnologyUnknown
| BatteryTechnologyLithiumIon
| BatteryTechnologyLithiumPolymer
| BatteryTechnologyLithiumIronPhosphate
| BatteryTechnologyLeadAcid
| BatteryTechnologyNickelCadmium
| BatteryTechnologyNickelMetalHydride
deriving (Show, Ord, Eq, Enum)
-- | There are a few fields supported by UPower that aren't exposed
-- here.. could be easily.
data BatteryInfo = BatteryInfo { batteryNativePath :: Text
, batteryVendor :: Text
, batteryModel :: Text
, batterySerial :: Text
-- , batteryUpdateTime :: Time
, batteryType :: BatteryType
, batteryPowerSupply :: Bool
, batteryHasHistory :: Bool
, batteryHasStatistics :: Bool
, batteryOnline :: Bool
, batteryEnergy :: Double
, batteryEnergyEmpty :: Double
, batteryEnergyFull :: Double
, batteryEnergyFullDesign :: Double
, batteryEnergyRate :: Double
, batteryVoltage :: Double
, batteryTimeToEmpty :: Int64
, batteryTimeToFull :: Int64
, batteryPercentage :: Double
, batteryIsPresent :: Bool
, batteryState :: BatteryState
, batteryIsRechargable :: Bool
, batteryCapacity :: Double
, batteryTechnology :: BatteryTechnology
{- , batteryRecallNotice :: Bool
, batteryRecallVendor :: Text
, batteryRecallUr :: Text
-}
}
-- | Find the first power source that is a battery in the list. The
-- simple heuristic is a substring search on 'BAT'
firstBattery :: [ObjectPath] -> Maybe ObjectPath
firstBattery = find (isInfixOf "BAT" . formatObjectPath)
-- | The name of the power daemon bus
powerBusName :: BusName
powerBusName = "org.freedesktop.UPower"
-- | The base object path
powerBaseObjectPath :: ObjectPath
powerBaseObjectPath = "/org/freedesktop/UPower"
-- | A helper to read the variant contents of a dict with a default
-- value.
readDict :: (IsVariant a) => Map Text Variant -> Text -> a -> a
readDict dict key dflt = fromMaybe dflt $ do
variant <- M.lookup key dict
fromVariant variant
-- | Read the variant contents of a dict which is of an unknown integral type.
readDictIntegral :: Map Text Variant -> Text -> Int32 -> Int
readDictIntegral dict key dflt = fromMaybe (fromIntegral dflt) $ do
v <- M.lookup key dict
case variantType v of
TypeWord8 -> return $ fromIntegral (f v :: Word8)
TypeWord16 -> return $ fromIntegral (f v :: Word16)
TypeWord32 -> return $ fromIntegral (f v :: Word32)
TypeWord64 -> return $ fromIntegral (f v :: Word64)
TypeInt16 -> return $ fromIntegral (f v :: Int16)
TypeInt32 -> return $ fromIntegral (f v :: Int32)
TypeInt64 -> return $ fromIntegral (f v :: Int64)
_ -> Nothing
where
f :: (Num a, IsVariant a) => Variant -> a
f = fromMaybe (fromIntegral dflt) . fromVariant
-- | Query the UPower daemon about information on a specific battery.
-- If some fields are not actually present, they may have bogus values
-- here. Don't bet anything critical on it.
getBatteryInfo :: BatteryContext -> IO (Maybe BatteryInfo)
getBatteryInfo (BC systemConn battPath) = do
-- Grab all of the properties of the battery each call with one
-- message.
reply <- call_ systemConn (methodCall battPath "org.freedesktop.DBus.Properties" "GetAll")
{ methodCallDestination = Just "org.freedesktop.UPower"
, methodCallBody = [toVariant $ T.pack "org.freedesktop.UPower.Device"]
}
return $ do
body <- methodReturnBody reply `atMay` 0
dict <- fromVariant body
return BatteryInfo { batteryNativePath = readDict dict "NativePath" ""
, batteryVendor = readDict dict "Vendor" ""
, batteryModel = readDict dict "Model" ""
, batterySerial = readDict dict "Serial" ""
, batteryType = toEnum $ fromIntegral $ readDictIntegral dict "Type" 0
, batteryPowerSupply = readDict dict "PowerSupply" False
, batteryHasHistory = readDict dict "HasHistory" False
, batteryHasStatistics = readDict dict "HasStatistics" False
, batteryOnline = readDict dict "Online" False
, batteryEnergy = readDict dict "Energy" 0.0
, batteryEnergyEmpty = readDict dict "EnergyEmpty" 0.0
, batteryEnergyFull = readDict dict "EnergyFull" 0.0
, batteryEnergyFullDesign = readDict dict "EnergyFullDesign" 0.0
, batteryEnergyRate = readDict dict "EnergyRate" 0.0
, batteryVoltage = readDict dict "Voltage" 0.0
, batteryTimeToEmpty = readDict dict "TimeToEmpty" 0
, batteryTimeToFull = readDict dict "TimeToFull" 0
, batteryPercentage = readDict dict "Percentage" 0.0
, batteryIsPresent = readDict dict "IsPresent" False
, batteryState = toEnum $ readDictIntegral dict "State" 0
, batteryIsRechargable = readDict dict "IsRechargable" True
, batteryCapacity = readDict dict "Capacity" 0.0
, batteryTechnology =
toEnum $ fromIntegral $ readDictIntegral dict "Technology" 0
}
-- | Construct a battery context if possible. This could fail if the
-- UPower daemon is not running. The context can be used to get
-- actual battery state with 'getBatteryInfo'.
batteryContextNew :: IO (Maybe BatteryContext)
batteryContextNew = do
systemConn <- connectSystem
-- First, get the list of devices. For now, we just get the stats
-- for the first battery
reply <- call_ systemConn (methodCall powerBaseObjectPath "org.freedesktop.UPower" "EnumerateDevices")
{ methodCallDestination = Just powerBusName
}
return $ do
body <- methodReturnBody reply `atMay` 0
powerDevices <- fromVariant body
battPath <- firstBattery powerDevices
return $ BC systemConn battPath
| Fizzixnerd/xmonad-config | Battery.hs | gpl-3.0 | 8,509 | 0 | 14 | 3,014 | 1,409 | 780 | 629 | 136 | 8 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AdExchangeBuyer2.Accounts.Products.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- List all products visible to the buyer (optionally filtered by the
-- specified PQL query).
--
-- /See:/ <https://developers.google.com/authorized-buyers/apis/reference/rest/ Ad Exchange Buyer API II Reference> for @adexchangebuyer2.accounts.products.list@.
module Network.Google.Resource.AdExchangeBuyer2.Accounts.Products.List
(
-- * REST Resource
AccountsProductsListResource
-- * Creating a Request
, accountsProductsList
, AccountsProductsList
-- * Request Lenses
, aplpXgafv
, aplpUploadProtocol
, aplpAccessToken
, aplpUploadType
, aplpAccountId
, aplpFilter
, aplpPageToken
, aplpPageSize
, aplpCallback
) where
import Network.Google.AdExchangeBuyer2.Types
import Network.Google.Prelude
-- | A resource alias for @adexchangebuyer2.accounts.products.list@ method which the
-- 'AccountsProductsList' request conforms to.
type AccountsProductsListResource =
"v2beta1" :>
"accounts" :>
Capture "accountId" Text :>
"products" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "filter" Text :>
QueryParam "pageToken" Text :>
QueryParam "pageSize" (Textual Int32) :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON] ListProductsResponse
-- | List all products visible to the buyer (optionally filtered by the
-- specified PQL query).
--
-- /See:/ 'accountsProductsList' smart constructor.
data AccountsProductsList =
AccountsProductsList'
{ _aplpXgafv :: !(Maybe Xgafv)
, _aplpUploadProtocol :: !(Maybe Text)
, _aplpAccessToken :: !(Maybe Text)
, _aplpUploadType :: !(Maybe Text)
, _aplpAccountId :: !Text
, _aplpFilter :: !(Maybe Text)
, _aplpPageToken :: !(Maybe Text)
, _aplpPageSize :: !(Maybe (Textual Int32))
, _aplpCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'AccountsProductsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'aplpXgafv'
--
-- * 'aplpUploadProtocol'
--
-- * 'aplpAccessToken'
--
-- * 'aplpUploadType'
--
-- * 'aplpAccountId'
--
-- * 'aplpFilter'
--
-- * 'aplpPageToken'
--
-- * 'aplpPageSize'
--
-- * 'aplpCallback'
accountsProductsList
:: Text -- ^ 'aplpAccountId'
-> AccountsProductsList
accountsProductsList pAplpAccountId_ =
AccountsProductsList'
{ _aplpXgafv = Nothing
, _aplpUploadProtocol = Nothing
, _aplpAccessToken = Nothing
, _aplpUploadType = Nothing
, _aplpAccountId = pAplpAccountId_
, _aplpFilter = Nothing
, _aplpPageToken = Nothing
, _aplpPageSize = Nothing
, _aplpCallback = Nothing
}
-- | V1 error format.
aplpXgafv :: Lens' AccountsProductsList (Maybe Xgafv)
aplpXgafv
= lens _aplpXgafv (\ s a -> s{_aplpXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
aplpUploadProtocol :: Lens' AccountsProductsList (Maybe Text)
aplpUploadProtocol
= lens _aplpUploadProtocol
(\ s a -> s{_aplpUploadProtocol = a})
-- | OAuth access token.
aplpAccessToken :: Lens' AccountsProductsList (Maybe Text)
aplpAccessToken
= lens _aplpAccessToken
(\ s a -> s{_aplpAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
aplpUploadType :: Lens' AccountsProductsList (Maybe Text)
aplpUploadType
= lens _aplpUploadType
(\ s a -> s{_aplpUploadType = a})
-- | Account ID of the buyer.
aplpAccountId :: Lens' AccountsProductsList Text
aplpAccountId
= lens _aplpAccountId
(\ s a -> s{_aplpAccountId = a})
-- | An optional PQL query used to query for products. See
-- https:\/\/developers.google.com\/ad-manager\/docs\/pqlreference for
-- documentation about PQL and examples. Nested repeated fields, such as
-- product.targetingCriterion.inclusions, cannot be filtered.
aplpFilter :: Lens' AccountsProductsList (Maybe Text)
aplpFilter
= lens _aplpFilter (\ s a -> s{_aplpFilter = a})
-- | The page token as returned from ListProductsResponse.
aplpPageToken :: Lens' AccountsProductsList (Maybe Text)
aplpPageToken
= lens _aplpPageToken
(\ s a -> s{_aplpPageToken = a})
-- | Requested page size. The server may return fewer results than requested.
-- If unspecified, the server will pick an appropriate default.
aplpPageSize :: Lens' AccountsProductsList (Maybe Int32)
aplpPageSize
= lens _aplpPageSize (\ s a -> s{_aplpPageSize = a})
. mapping _Coerce
-- | JSONP
aplpCallback :: Lens' AccountsProductsList (Maybe Text)
aplpCallback
= lens _aplpCallback (\ s a -> s{_aplpCallback = a})
instance GoogleRequest AccountsProductsList where
type Rs AccountsProductsList = ListProductsResponse
type Scopes AccountsProductsList =
'["https://www.googleapis.com/auth/adexchange.buyer"]
requestClient AccountsProductsList'{..}
= go _aplpAccountId _aplpXgafv _aplpUploadProtocol
_aplpAccessToken
_aplpUploadType
_aplpFilter
_aplpPageToken
_aplpPageSize
_aplpCallback
(Just AltJSON)
adExchangeBuyer2Service
where go
= buildClient
(Proxy :: Proxy AccountsProductsListResource)
mempty
| brendanhay/gogol | gogol-adexchangebuyer2/gen/Network/Google/Resource/AdExchangeBuyer2/Accounts/Products/List.hs | mpl-2.0 | 6,409 | 0 | 20 | 1,491 | 968 | 560 | 408 | 138 | 1 |
-- brittany { lconfig_columnAlignMode: { tag: ColumnAlignModeDisabled }, lconfig_indentPolicy: IndentPolicyLeft }
func x
| a somewhat longer guard x = "and a somewhat longer expession that does not"
| otherwise = "fit without putting the guards in new lines"
| lspitzner/brittany | data/Test419.hs | agpl-3.0 | 263 | 0 | 8 | 43 | 34 | 15 | 19 | 3 | 1 |
{-
Copyright (C) 2014 Albert Krewinkel <tarleb+metropolis@moltkeplatz.de>
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-}
module Metropolis.Worker.Tests ( tests ) where
import Test.Tasty
import Test.Tasty.HUnit
import Metropolis.Types
import Metropolis.Worker
tests :: TestTree
tests = testGroup "Worker Tests" [ shellWorkerTests ]
shellWorkerTests :: TestTree
shellWorkerTests = testGroup "Shell Worker"
[ testCase "Basic Shell output" $
let res = emptyResult{ resultOutput = "Hello"
, resultSource = "printf Hello"
}
in fmap (== res) (runCode def{ parameterLanguage = Shell } "printf Hello")
@? "wrong printf result"
, testCase "Haskell interpreter" $
let ref = emptyResult{ resultOutput = "42\n"
, resultSource = "print (42::Int)"
}
res = runCode def{ parameterLanguage = Haskell } "print (42::Int)"
resetErr r = r{ resultError = "" }
in fmap (== ref) (fmap resetErr res)
@? "Haskell results differ"
, testCase "Run unknown language" $
let (lang, code) = (UnknownLanguage "unknown", "foo")
in fmap (== cannotRunLanguageResult code)
(runCode def{ parameterLanguage = lang } code)
@? "wrong fallback"
]
| tarleb/rundoc | tests/Metropolis/Worker/Tests.hs | agpl-3.0 | 1,960 | 0 | 15 | 526 | 290 | 160 | 130 | 26 | 1 |
module Main where
import Control.Monad
import Data.Map ( Map )
import qualified Data.Map as Map
import Data.Time.Calendar
-- import Data.Time.Format
import Network.CGI hiding ( Html )
import Prelude hiding ( head )
import System.FilePath
import System.Random
-- import Text.Blaze.Html
-- import Text.Blaze.Html.Renderer.Pretty
-- import Text.Blaze.XHtml1.Strict hiding ( style, map )
-- import Text.Blaze.XHtml1.Strict.Attributes hiding ( title )
-- import qualified Text.Pandoc.UTF8 as UTF8
datadir :: FilePath
datadir = "/home/simons/src/quizmaster/htdocs"
type Ident = FilePath
data MetaData = MetaData
{ hideUntil :: Day
, related :: [Ident]
}
deriving (Show, Read)
type Manifest = Map Ident MetaData
parseManifestEntry :: [String] -> (Ident, MetaData)
parseManifestEntry [file] = (file, MetaData (toEnum 0) [])
parseManifestEntry (file:md) = (file, read (unwords md))
parseManifestEntry line = error $ "parseManifestEntry: invalid line " ++ show line
loadManifest :: IO Manifest
loadManifest = do
buf <- readFile $ datadir </> "manifest"
return $ Map.fromList (map (parseManifestEntry . words) (lines buf))
selectRandomCard :: Manifest -> IO Ident
selectRandomCard cards = do
j <- randomRIO (0, Map.size cards - 1)
return $ fst $ Map.elemAt j cards
cgiMain :: CGI CGIResult
cgiMain = do
scheme <- getVarWithDefault "REQUEST_SCHEME" "http"
host <- getVarWithDefault "HTTP_HOST" "no.such.name" -- TODO: error handling
cards <- liftIO loadManifest
nextCard <- liftIO (selectRandomCard cards)
let nextUrl = concat [ scheme, "://", host, nextCard ]
redirect nextUrl
-- env <- getVars
-- args <- getInputs
-- setHeader "Content-Type" "text/html; charset=utf-8"
-- outputFPS $ UTF8.fromStringLazy $ renderHtml $ docTypeHtml $ do
-- head $ do meta ! httpEquiv "Content-Type" ! content "text/html; charset=UTF-8"
-- title "Quizmaster CGI Environment"
-- body $ do h1 "Known Cards:"
-- table ! cellpadding "2" $
-- forM_ (Map.toAscList cards) $ \(key,val) ->
-- tr $ td (toHtml key) >> td (toHtml (show val))
-- h1 "CGI Arguments:"
-- table ! cellpadding "2" $
-- forM_ args $ \(key,val) ->
-- tr $ td (toHtml key) >> td (toHtml val)
-- h1 "CGI Environment:"
-- table ! cellpadding "2" $
-- forM_ env $ \(key,val) ->
-- tr $ td (toHtml key) >> td (toHtml val)
main :: IO ()
main = runCGI (handleErrors cgiMain)
| peti/quizmaster | src/quizmaster.hs | agpl-3.0 | 2,548 | 0 | 12 | 592 | 488 | 270 | 218 | 39 | 1 |
module Main where
import Test.QuickCheck
import qualified Data.ByteString as BS
import Data.Word
import Data.CBOR
import Data.CBOR.Util
import Data.Binary.CBOR
import Test.Framework
-- import Test.QuickCheck.Property
import Test.Framework.Providers.QuickCheck2
import Data.Binary.Get (runGet)
import Data.Binary.Put (runPut)
import Control.Applicative
import Data.Monoid
main :: IO ()
main = defaultMain tests
-- main = defaultMainWithOpts tests runnerOptions
runnerOptions :: RunnerOptions' Maybe
runnerOptions = mempty {
ropt_test_options = Just testOptions
}
testOptions :: TestOptions' Maybe
testOptions = mempty {
topt_maximum_generated_tests = Just 5
, topt_maximum_unsuitable_generated_tests = Just 1
, topt_maximum_test_size = Just 1
, topt_maximum_test_depth = Just 1
}
tests :: [Test]
tests = [testGroup "Group1" [
testProperty "round_trip" prop_roundtrip
]]
prop_roundtrip :: CBOR -> Bool
prop_roundtrip x = x == runGet getCBOR (runPut $ putCBOR x)
instance Arbitrary HalfFloat where
arbitrary = HF <$> arbitrary
--instance Arbitrary BS.ByteString where
-- arbitrary :: Gen BS.ByteString
-- arbitrary =
instance Arbitrary CBOR where
arbitrary = sized sizedCBOR
sizedCBOR :: Int -> Gen CBOR
sizedCBOR 0 =
oneof [
CBOR_UInt <$> choose (0, toInt (maxBound :: Word64))
, CBOR_SInt <$> (-) (-1) <$> choose (0, toInt (maxBound :: Word64))
, CBOR_BS <$> arbitraryByteString
, CBOR_TS <$> arbitraryTextString
, CBOR_HalfFloat <$> arbitrary
, CBOR_Float <$> arbitrary
, CBOR_Double <$> arbitrary
, return CBOR_NULL
, return CBOR_Undefined
, CBOR_Reserved <$> oneof [return 28,return 29,return 30]
, CBOR_Unassigned <$> oneof (map return $ [0..19])
, return CBOR_True
, return CBOR_False
, CBOR_Byte <$> arbitrary
]
sizedCBOR n =
oneof [
CBOR_UInt <$> choose (0, toInt (maxBound :: Word64))
, CBOR_SInt <$> (-) (-1) <$> choose (0, toInt (maxBound :: Word64))
, CBOR_BS <$> arbitraryByteString
, CBOR_TS <$> arbitraryTextString
, CBOR_Array <$> listOf1 (sizedCBOR $ n `div` 2)
, CBOR_Map <$> listOf1 ((,) <$> (sizedCBOR $ n `div` 2) `suchThat` allowedKeyTypes <*> (sizedCBOR $ n `div` 2))
, CBOR_Tag <$> choose (0, toInt (maxBound :: Word64)) <*> (sizedCBOR $ n `div` 2)
, CBOR_HalfFloat <$> arbitrary
, CBOR_Float <$> arbitrary
, CBOR_Double <$> arbitrary
, return CBOR_NULL
, return CBOR_Undefined
, CBOR_Reserved <$> oneof [return 28,return 29,return 30]
, CBOR_Unassigned <$> oneof (map return $ [0..19])
, return CBOR_True
, return CBOR_False
, CBOR_Byte <$> arbitrary
]
allowedKeyTypes :: CBOR -> Bool
allowedKeyTypes x = case x of
(CBOR_UInt _) -> True
(CBOR_SInt _) -> True
(CBOR_BS _) -> True
(CBOR_TS _) -> True
(CBOR_Tag _ y) -> allowedKeyTypes y
(CBOR_HalfFloat _) -> True
(CBOR_Float _) -> True
(CBOR_Double _) -> True
(CBOR_Reserved _) -> True
(CBOR_Unassigned _) -> True
CBOR_True -> True
CBOR_False -> True
CBOR_NULL -> True
(CBOR_Byte _) -> True
_ -> False
arbitraryByteString :: Gen BS.ByteString
arbitraryByteString = BS.pack <$> listOf1 arbitrary
arbitraryTextString :: Gen BS.ByteString
arbitraryTextString = BS.pack <$> listOf1 (choose (0, 0x79))
-- Technically it can be higher than this, but then it must be encoded as multiple bytes | orclev/CBOR | test/Suite.hs | lgpl-3.0 | 3,441 | 0 | 15 | 743 | 1,043 | 567 | 476 | 90 | 15 |
module Three_2 where
import Test.QuickCheck
import Test.QuickCheck.Checkers
import Test.QuickCheck.Classes
data Three' a b = Three' a b b deriving (Eq, Show)
instance Functor (Three' a) where fmap f (Three' x y z) = Three' x (f y) (f z)
instance (Monoid m) => Applicative (Three' m) where
pure a = Three' mempty a a
Three' mx1 f g <*> Three' mx2 x y = Three' (mx1 `mappend` mx2) (f x) (g y)
-- QuickCheck arbitrary
instance (Arbitrary a, Arbitrary b) => Arbitrary (Three' a b) where
arbitrary = do
x <- arbitrary
y <- arbitrary
z <- arbitrary
return $ Three' x y z
-- Required for checkers
instance (Eq a, Eq b) => EqProp (Three' a b) where (=-=) = eq
-- Use checkers to verify the Applicative and Functor are valid
verifyThree'IsFunctor :: IO ()
verifyThree'IsFunctor = quickBatch $ functor (undefined :: Three' (Int,Int,Int) (Int,Int,Int))
verifyThree'IsApplicative :: IO ()
verifyThree'IsApplicative = quickBatch $ applicative (undefined :: Three' (String,String,String) (Int, Int, Int))
| dmp1ce/Haskell-Programming-Exercises | Chapter 17/ApplicativeInstances/src/Three_2.hs | unlicense | 1,020 | 0 | 9 | 192 | 405 | 216 | 189 | 20 | 1 |
module BadMonad where
import Test.QuickCheck
import Test.QuickCheck.Checkers
import Test.QuickCheck.Classes
data CountMe a =
CountMe Integer a
deriving (Eq, Show)
instance Functor CountMe where
fmap f (CountMe i a) = CountMe i (f a)
instance Applicative CountMe where
pure = CountMe 0
CountMe n f <*> CountMe n' a = CountMe (n + n') (f a)
instance Monad CountMe where
return = pure
CountMe n a >>= f =
let CountMe n' b = f a
in CountMe (n + n') b
instance Arbitrary a => Arbitrary (CountMe a) where
arbitrary = CountMe <$> arbitrary <*> arbitrary
instance Eq a => EqProp (CountMe a) where
(=-=) = eq
main = do
let trigger = undefined :: CountMe (Int, String, Int)
quickBatch $ functor trigger
quickBatch $ applicative trigger
quickBatch $ monad trigger
| aniketd/learn.haskell | haskellbook/monads/BadMonad.hs | unlicense | 795 | 0 | 11 | 173 | 318 | 159 | 159 | 26 | 1 |
module PodDownload where
import Data.Maybe
import Database.HDBC
import Network.HTTP
import Network.URI
import System.IO
import PodDB
import PodParser
import PodTypes
downloadURL :: String -> IO (Either String String)
downloadURL url = do
resp <- simpleHTTP request
case resp of
Left x -> return $ Left ("Error connecting: " ++ show x)
Right r -> case rspCode r of
(2,_,_) -> return $ Right $ rspBody r
(3,_,_) -> redirect r
_ -> return $ Left $ show r
where
request = Request { rqURI = uri,
rqMethod = GET,
rqHeaders = [],
rqBody = ""}
uri = fromJust $ parseURI url
redirect r = case findHeader HdrLocation r of
Nothing -> return $ Left $ show r
Just redirectUrl -> downloadURL redirectUrl
{- | Update the podcast in the database. -}
updatePodcastFromFeed :: IConnection conn => conn -> Podcast -> IO ()
updatePodcastFromFeed dbconn p = do
resp <- downloadURL $ castURL p
case resp of
Left x -> putStrLn x
Right doc -> updateDB doc
where
updateDB doc = do
addEpisode dbconn `mapM_` episodes
commit dbconn
where
episodes = map (item2episode p) (items feed)
feed = parse doc $ castURL p
getEpisode :: IConnection conn => conn -> Episode -> IO (Maybe String)
getEpisode dbconn ep = do
resp <- downloadURL $ epURL ep
case resp of
Left x -> do { putStrLn x; return Nothing }
Right doc -> do
file <- openBinaryFile filename WriteMode
hPutStr file doc
hClose file
updateEpisode dbconn $ ep { epDone = True}
commit dbconn
return $ Just filename
where
filename = "pod." ++ (show . castId .epCast $ ep) ++ "." ++ show (epId ep)
++ ".mp3"
| lucamtudor/haskell-podcatcher | PodDownload.hs | apache-2.0 | 2,032 | 1 | 14 | 778 | 628 | 306 | 322 | 51 | 5 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ExplicitForAll #-}
-- |Contains the class definition of 'Iso', indicating isomorphism between two
-- types.
module Data.Types.Isomorphic (
Iso(..),
module Data.Types.Injective,
from,
) where
import qualified Numeric.Natural as N
import qualified Numeric.Peano as PN
import qualified Data.Text as TS
import qualified Data.Text.Lazy as TL
import Data.Types.Injective
-- |The class of isomorphic types, i.e. those which can be cast to each
-- other without loss of information. Type isomorphism is an equivalence
-- relation (reflexive, symmetric, transitive), but due to the limitations
-- of the type system, only reflexivity is implemented for all types.
-- Since there are no type inequality constraints, writing symmetry and
-- transitivity instances over all types would result in overlapping instances
-- with due to reflexivity.
--
-- The following must be ensured:
--
-- [@Isomorphism@]
-- @
-- from . to = id
-- @
--
-- Reflexivity, symmetry and transitivity are then "free":
--
-- @
-- instance Iso a a
-- @
--
-- @
-- instance (Iso a b, Iso b c) => Iso a c
-- @
--
-- Out of these, only the first one (reflexivity) is actually implemented,
-- since the other two would result in overlapping instances. We would be able
-- to avoid this with type inequality constrains (e.g. @a \/~ b@, @a \/~ c@,
-- @b \/~ c)@.
class (Injective a b, Injective b a) => Iso a b where
-- |Synonym for 'to'.
from :: forall b a. (Iso a b) => b -> a
from = to
instance Iso a a where
-- equivalence class of string types.
instance Iso TS.Text String
instance Iso String TS.Text
instance Iso TL.Text String
instance Iso String TL.Text
instance Iso TS.Text TL.Text
instance Iso TL.Text TS.Text
-- Peano wholes and integers.
instance Iso PN.Whole Integer
instance Iso Integer PN.Whole
| ombocomp/type-iso | Data/Types/Isomorphic.hs | apache-2.0 | 1,890 | 0 | 7 | 347 | 266 | 165 | 101 | -1 | -1 |
module Data.GitParser.Parser
(parseCommits)
where
import Control.Monad.State
import Data.Char
import Data.List
import Data.Time.Format
import Data.Time.LocalTime
import System.Locale
import Data.GitParser.Types
getRight,getLeft :: GitParser [String]
getRight = fmap psRight get
getLeft = fmap psLeft get
putBlock bl = do ps <- get
put $ ps { psBlocks = psBlocks ps ++ [bl] }
getNextLine :: GitParser String
getNextLine = do r <- getRight
if null r then return [] else return (head r)
shift = do ps <- get
if null (psRight ps)
then return ()
else do
nl <- getNextLine
put $ ps { psLeft = psLeft ps ++ [nl], psRight = tail $ psRight ps }
clear = do ps <- get
put $ ps { psLeft = [] }
parseCommit :: GitParser ()
parseCommit = do nl <- getRight
if null nl
then return ()
else do getNextLine >>= findNext
parseCommit
where
findNext x | "commit" `isPrefixOf` x = parseSHA
| "Merge:" `isPrefixOf` x = parseMerge
| "Author:" `isPrefixOf` x = parseAuthor
| "Date:" `isPrefixOf` x = parseDate
| " " `isPrefixOf` x = parseMessage
findNext x = shift >> clear
parseSHA = do r <- getNextLine
let sha = takeWhile (not . isSpace) . tail . dropWhile (/= ' ') $ r
putBlock (SHA sha)
shift >> clear
colonParse x = takeWhile (/= '\n') . tail . dropWhile (/= ':') $ x
colonBlock con = do r <- getNextLine
putBlock (con $ strip . colonParse $ r)
shift >> clear
parseMerge = shift >> clear
parseAuthor = colonBlock Author
parseDate = colonBlock Date
parseMessage = do r <- getNextLine
l <- getLeft
if " " `isPrefixOf` r
then shift >> parseMessage
else putBlock (Message $ unlines . map (drop 4) $ l) >> clear
strip = reverse . fn . reverse . fn
where fn = dropWhile isSpace
splitCommits l = filter (not . null) $ splitCommits (lines l) [] []
where
splitCommits [] ls final = final ++ [unlines ls]
splitCommits (x:xs) ls final = if "commit" `isPrefixOf` x
then splitCommits xs [x] (final ++ [unlines ls])
else splitCommits xs (ls ++ [x]) final
toCommit = foldl af emptyCommit
where af c (SHA a) = c { getSHA = a }
af c (Author a) = c { getAuthor = toAuthor a }
af c (Date a) = c { getDate = toLocalTime a }
af c (Message a) = c { getMessage = a }
toLocalTime = parseTime defaultTimeLocale "%a %b %e %H:%M:%S %Y %z"
toAuthor x = let name = strip . takeWhile (/= '<') $ x
emal = tail . takeWhile (/= '>') . dropWhile (/= '<') $ x
in CommitAuthor name emal
parseCommits = map (toCommit . psBlocks . execState parseCommit . mkParserState) . splitCommits
| jamessanders/gitparser | src/Data/GitParser/Parser.hs | bsd-2-clause | 3,189 | 0 | 15 | 1,213 | 1,059 | 545 | 514 | 70 | 4 |
module Testsuite.Random where
-- standard modules
import Control.Monad
import Data.List
import qualified Data.Foldable as Fold
import qualified Data.Map as Map
import Data.Maybe
import Data.Random hiding (shuffle)
--import Data.Random.Distribution.Binomial
import qualified Data.Random.Extras as R
import Data.Ratio
import Data.RVar
import qualified Data.Set as Set
import System.Random
-- local modules
import Basics
import Interface.Sparq
import Helpful.General
import Helpful.Math
import Helpful.Random
import Debug.Trace
randomScenario :: (Calculus a)
=> Int
-> [a]
-> Int
-> IO (Network [String] (ARel a))
randomScenario rank domain syze = do
rels <- randomsOfIO domain
let cons = fromJust $ consFromList $
zip (kCombinations rank $ map show [1..syze])
(map ARel rels)
let net = eNetwork { nDesc = "Random_Network", nCons = cons }
return net
randomAClosureConsistentScenario :: (Calculus a)
=> Int
-> [a]
-> Int
-> IO (Network [String] (ARel a))
randomAClosureConsistentScenario rank domain syze = do
let t1:t2:tuples = kCombinations rank $ map show [1..syze]
rel1 <- oneOfIO atomicDomain
rel2 <- oneOfIO atomicDomain
gen <- newStdGen
let maybeCons = buildScenario gen [ (t2,rel2), (t1,rel1) ] tuples
if isNothing maybeCons then
error $ "I could not find an algebraically closed "
++ cName (head domain) ++ " scenario of size "
++ show syze
else do
let net = eNetwork { nDesc = "Random_Network"
, nCons = fromJust $ consFromList $
fromJust maybeCons }
return net
where
atomicDomain = map ARel domain
aClosureInconsistent cons = (Just False ==) $ ( \(x,_,_) -> x) $
algebraicClosure $ makeNonAtomic $
eNetwork{ nCons = fromJust $ consFromList cons }
buildScenario gen cons tuples =
if aClosureInconsistent cons then
Nothing
else if null tuples then
Just cons
else
listToMaybe $ catMaybes scenarios
where
(rels, gen') = shuffle atomicDomain gen
(tuple:tuples') = tuples
scenarios = map
(\ rel -> buildScenario gen'
((tuple, rel):cons)
tuples'
) rels
randomAtomicNetworkWithDensity :: (Calculus a)
=> Int
-> [a]
-> Int
-> Ratio Int
-> IO (Network [String] (ARel a))
randomAtomicNetworkWithDensity rank domain syze density = do
combis <- sampleRVar $ R.shuffle $ kCombinations rank $ map show [1..syze]
rels <- randomsOfIO $ map ARel domain
let denom = choose syze rank
let (factor, rest) = divMod denom (denominator density)
let numer = (numerator density) * factor
let cons = fromJust $ Fold.foldrM (uncurry insertCon)
Map.empty
(take numer $ zip combis rels)
if rest /= 0 then
error $ "Cannot create a network of size " ++ show syze
++ " and density " ++ show density
else
return $ eNetwork { nDesc = "Random_Network", nCons = cons }
randomConnectedAtomicNetworkWithDensity :: (Calculus a)
=> Int
-> [a]
-> Int
-> Ratio Int
-> IO (Network [String] (ARel a))
randomConnectedAtomicNetworkWithDensity rank domain syze density = do
let atomicDomain = map ARel domain
let combis = [ kCombinations (rank - 1) $ map show [1..n]
| n <- [rank - 1..] ]
skel <- foldM (\consAcc intNode -> do
let node = show intNode
combi <- oneOfIO $ combis!!(intNode - rank)
rel <- oneOfIO atomicDomain
-- v--------- the order of these two
-- is very important
-- because the nodes of the
-- networks are assumed to
-- be sorted throughout the
-- code.
-- Fixme: this should
-- be handled better by
-- internal generating
-- functions. We could need
-- a function that
-- generalises bcInsert and
-- tcInsert.
--v--------v--
let newCon = [(combi ++ [node], rel)]
return $ consAcc ++ newCon
) [] [rank..syze]
let combisLeft =
(kCombinations rank $ map show [1..syze]) \\ (fst $ unzip skel)
fleshCombis <- sampleRVar $ R.shuffle combisLeft
fleshRels <- randomsOfIO atomicDomain
let denom = choose syze rank
let (factor, rest) = divMod denom (denominator density)
let numer = (numerator density) * factor
let flesh = take (numer - syze + rank - 1) $ zip fleshCombis fleshRels
let cons = fromJust $ Fold.foldrM (uncurry insertCon)
Map.empty
(skel ++ flesh)
if rest /= 0 || numer < (syze - rank + 1) then
error $ "Cannot create a connected network of size " ++ show syze
++ " and density " ++ show density
else
return $ eNetwork { nDesc = "Random_Network", nCons = cons }
-- fixme: these need to be adjusted to the new functions above:
--randomAtomicNetworkAroundDensity :: (Calculus a)
-- => Int
-- -> [a]
-- -> Int
-- -> Ratio Int
-- -> IO (Network [String] a, Int)
--randomAtomicNetworkAroundDensity rank domain syze density = do
-- let numer' = fixNumeratorAtEdge density
-- let denom = denominator density
-- numer <- sampleBinomial numer' denom
-- net <- randomAtomicNetworkWithDensity rank domain syze (numer%denom)
-- return (net, numer)
--
--randomConnectedAtomicNetworkAroundDensity :: (Calculus a)
-- => Int
-- -> [a]
-- -> Int
-- -> Ratio Int
-- -> IO (Network [String] a, Int)
--randomConnectedAtomicNetworkAroundDensity rank domain syze density =
-- do
-- let numer' = fixNumeratorAtEdge density
-- let denom = denominator density
-- numer <- sampleBinomial numer' denom
-- net <- randomConnectedAtomicNetworkWithDensity rank domain syze (numer%denom)
-- return (net, numer)
--
--fixNumeratorAtEdge density
-- | numer == 0 = 0.5
-- | numer == denom = fromIntegral numer - 0.5
-- | otherwise = fromIntegral numer
-- where
-- numer = numerator density
-- denom = denominator density
--
--sampleBinomial numerator denomin = sampleRVar $
-- binomial (denomin :: Int) (numerator / fromIntegral denomin :: Float)
| spatial-reasoning/zeno | src/Testsuite/Random.hs | bsd-2-clause | 7,880 | 0 | 19 | 3,352 | 1,543 | 814 | 729 | 118 | 4 |
{-# OPTIONS -fglasgow-exts #-}
-----------------------------------------------------------------------------
{-| Module : QTabletEvent.hs
Copyright : (c) David Harley 2010
Project : qtHaskell
Version : 1.1.4
Modified : 2010-09-02 17:02:35
Warning : this file is machine generated - do not modify.
--}
-----------------------------------------------------------------------------
module Qtc.Enums.Gui.QTabletEvent (
TabletDevice, eNoDevice, ePuck, eStylus, eAirbrush, eFourDMouse, eXFreeEraser, eRotationStylus
, PointerType, eUnknownPointer, eEraser
)
where
import Qtc.Classes.Base
import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr)
import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int)
import Qtc.Enums.Base
import Qtc.Enums.Classes.Core
data CTabletDevice a = CTabletDevice a
type TabletDevice = QEnum(CTabletDevice Int)
ieTabletDevice :: Int -> TabletDevice
ieTabletDevice x = QEnum (CTabletDevice x)
instance QEnumC (CTabletDevice Int) where
qEnum_toInt (QEnum (CTabletDevice x)) = x
qEnum_fromInt x = QEnum (CTabletDevice x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> TabletDevice -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eNoDevice :: TabletDevice
eNoDevice
= ieTabletDevice $ 0
ePuck :: TabletDevice
ePuck
= ieTabletDevice $ 1
eStylus :: TabletDevice
eStylus
= ieTabletDevice $ 2
eAirbrush :: TabletDevice
eAirbrush
= ieTabletDevice $ 3
eFourDMouse :: TabletDevice
eFourDMouse
= ieTabletDevice $ 4
eXFreeEraser :: TabletDevice
eXFreeEraser
= ieTabletDevice $ 5
eRotationStylus :: TabletDevice
eRotationStylus
= ieTabletDevice $ 6
data CPointerType a = CPointerType a
type PointerType = QEnum(CPointerType Int)
iePointerType :: Int -> PointerType
iePointerType x = QEnum (CPointerType x)
instance QEnumC (CPointerType Int) where
qEnum_toInt (QEnum (CPointerType x)) = x
qEnum_fromInt x = QEnum (CPointerType x)
withQEnumResult x
= do
ti <- x
return $ qEnum_fromInt $ fromIntegral ti
withQEnumListResult x
= do
til <- x
return $ map qEnum_fromInt til
instance Qcs (QObject c -> PointerType -> IO ()) where
connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler
= do
funptr <- wrapSlotHandler_int slotHandlerWrapper_int
stptr <- newStablePtr (Wrap _handler)
withObjectPtr _qsig_obj $ \cobj_sig ->
withCWString _qsig_nam $ \cstr_sig ->
withObjectPtr _qslt_obj $ \cobj_slt ->
withCWString _qslt_nam $ \cstr_slt ->
qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr)
return ()
where
slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO ()
slotHandlerWrapper_int funptr stptr qobjptr cint
= do qobj <- qObjectFromPtr qobjptr
let hint = fromCInt cint
if (objectIsNull qobj)
then do when (stptr/=ptrNull)
(freeStablePtr (castPtrToStablePtr stptr))
when (funptr/=ptrNull)
(freeHaskellFunPtr (castPtrToFunPtr funptr))
else _handler qobj (qEnum_fromInt hint)
return ()
eUnknownPointer :: PointerType
eUnknownPointer
= iePointerType $ 0
instance QePen PointerType where
ePen
= iePointerType $ 1
instance QeCursor PointerType where
eCursor
= iePointerType $ 2
eEraser :: PointerType
eEraser
= iePointerType $ 3
| uduki/hsQt | Qtc/Enums/Gui/QTabletEvent.hs | bsd-2-clause | 4,558 | 0 | 18 | 1,018 | 1,227 | 618 | 609 | 116 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module TeleHash.Switch
(
initial_switch
, startSwitchThread
) where
import Control.Applicative
import Control.Concurrent
import Control.Exception
import Control.Monad
import Control.Monad.Error
import Control.Monad.State
import Crypto.Random
import Data.Aeson
import Data.Aeson.Types
import Data.Bits
import Data.ByteString.Internal (w2c)
import Data.Char
import Data.IP
import Data.List
import Data.Maybe
import Data.Scientific
import Data.String.Utils
import Data.Typeable
import Network.BSD
import Prelude hiding (id, (.), head, either, catch)
import System.Environment
import System.IO
import System.Log.Handler.Simple
import System.Log.Logger
import System.Time
import TeleHash.Convert
import TeleHash.Crypto1a
import TeleHash.Packet
import TeleHash.Paths
import TeleHash.Utils
import qualified Crypto.Hash.SHA256 as SHA256
import qualified Data.Aeson as Aeson
import qualified Data.Binary as Binary
import qualified Data.ByteString as B
import qualified Data.ByteString.Base16 as B16
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as HM
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Data.Vector as V
import qualified Network.Socket as NS
import qualified Network.Socket.ByteString as SB
import qualified System.Random as R
-- ---------------------------------------------------------------------
-- localIP = "10.0.0.28"
localIP = "10.2.2.83"
-- ---------------------------------------------------------------------
--
-- Set up actions to run on start and end, and run the main loop
--
runSwitch :: IO ((),Switch)
runSwitch = bracket initialize disconnect loop
where
disconnect (_,_,ss) = NS.sClose (slSocket (fromJust $ swH ss))
loop (ch1,ch2,st) = catch (runStateT (run ch1 ch2) st) (exc)
exc :: SomeException -> IO ((),Switch)
exc _e = return ((),(assert False undefined))
-- ---------------------------------------------------------------------
--
-- Set up actions to run on start and end, and run the main loop in
-- its own thread
--
startSwitchThread :: IO (Chan Signal,Chan Reply,ThreadId)
startSwitchThread = do
(ch1,ch2,st) <- initialize
-- thread <- forkIO (io (runStateT run st))
thread <- forkIO (doit ch1 ch2 st)
return (ch1,ch2,thread)
where
doit :: Chan Signal -> Chan Reply -> Switch -> IO ()
doit ch1 ch2 st = do
_ <- runStateT (run ch1 ch2) st
return ()
-- ---------------------------------------------------------------------
-- | The first point where we have entered the TeleHash Monad
run :: Chan Signal -> Chan Reply -> TeleHash ()
run ch1 ch2 = do
-- ch1 <- io (newChan)
_ <- io (forkIO (timer (1000 * onesec) SignalPingSeeds ch1))
_ <- io (forkIO (timer (1000 * onesec) SignalScanLines ch1))
_ <- io (forkIO (timer (3000 * onesec) SignalTapTap ch1))
_ <- io (forkIO (timer (30 * onesec) SignalShowSwitch ch1))
h <- gets swH
_ <- io (forkIO (dolisten h ch1))
-- load the id
loadId testId
logT $ "loading id done"
-- crypt_deopenize_1a (Binary.decode $ BL.pack p1)
-- error "done for now"
-- load the seeds, hardcoded for now
mapM_ addSeed initialSeeds
logT $ "loading seeds done"
logT $ "going online.."
online nullCb
logT $ "online done"
-- -------------- this bit from ping.c -------------------------------
{-
// create/send a ping packet
c = chan_new(s, bucket_get(seeds, 0), "seek", 0);
p = chan_packet(c);
packet_set_str(p,"seek",s->id->hexname);
chan_send(c, p);
-}
sw <- get
let seed0 = head (swSeeds sw)
let Just hcs = Map.lookup seed0 (swAll sw)
let path = Path { {- pType = PathType "ipv4"
-- , pIp = Just "208.68.164.253"
, pIp = localIP
, pPort = 42424
, pHttp = ""
-}
pJson = PIPv4 (PathIPv4 localIP 42424)
, pLastIn = Nothing
, pLastOut = Nothing
, pRelay = Nothing
, pId = Nothing
, pPriority = Nothing
, pIsSeed = True
, pGone = False
}
let msg = Telex { tId = Nothing
, tType = Just "seek"
, tPath = Nothing
, tJson = HM.fromList
[("type",String "seek")
,("c", Number 0)
,("seek",String (Text.pack $ unHN $ hHashName hcs))
]
, tCsid = Just "1a"
, tChanId = Nothing
, tTo = Just path
, tPacket = Nothing
}
-- io $ threadDelay (5 * onesec)
{-
logT $ "AZ starting ping inject"
-- AZ carry on here: use send, not raw
packet <- telexToPacket msg
-- let (hcs',lined) = crypt_lineize_1a hcs packet
-- send path lined Nothing
timeNow <- io getClockTime
let js = "{\"c\":0,\"seek\":\"" ++ (unHN $ hHashName hcs) ++ "\"}"
Just json@(Aeson.Object jsHashMap) = Aeson.decode (cbsTolbs $ BC.pack js) :: Maybe Aeson.Value
msg' = RxTelex { rtId = 0
, rtSender = path
, rtAt = timeNow
, rtJs = jsHashMap
, rtPacket = Packet HeadEmpty (Body BC.empty)
, rtChanId = Nothing
}
-- c <- raw hcs "seek" msg' nullRxCb
-- logT $ "sending msg returned :" ++ show c
-- xxxxxxxxxxxxxxxxxxxx
-}
-- -------------- ping.c end -----------------------------------------
-- Process the async messages from the various sources above
forever $ do
s <- io (readChan ch1)
timeNow <- io getClockTime
-- io (putStrLn $ "got signal: " ++ (show s) ++ " at " ++ (show timeNow))
-- io (putStrLn $ "got signal:at " ++ (show timeNow))
case s of
-- SignalPingSeeds -> pingSeeds
-- SignalScanLines -> scanlines timeNow
-- SignalTapTap -> taptap timeNow
SignalSyncPath hn -> do
logT $ "SignalSyncPath for: " ++ show hn
hnSync hn
SignalMsgRx msg addr -> recvTelex msg addr
-- External commands
SignalShowSwitch -> do
sw <- getSwitch
logT $ "current switch:" ++ showSwitch sw
SignalGetSwitch -> do
sw <- getSwitch
io (writeChan ch2 (ReplyGetSwitch sw))
_ -> logT $ "run not processing signal:" ++ show s
-- io (putStrLn $ "done signal:at " ++ (show timeNow))
-- ---------------------------------------------------------------------
getSwitch :: TeleHash Switch
getSwitch = do
switch <- get
return switch
-- ---------------------------------------------------------------------
initialize :: IO (Chan Signal,Chan b,Switch)
initialize = do
-- Look up the hostname and port. Either raises an exception
-- or returns a nonempty list. First element in that list
-- is supposed to be the best option.
-- (serveraddr,ip,port) <- resolveToSeedIPP initialSeed
-- let seedIPP = IPP (ip ++ ":" ++ port)
-- Establish a socket for communication
--sock <- socket (addrFamily serveraddr) Datagram defaultProtocol
sock <- NS.socket NS.AF_INET NS.Datagram defaultProtocol
-- We want to listen on all interfaces (0.0.0.0)
bindAddr <- NS.inet_addr "0.0.0.0"
NS.bindSocket sock (NS.SockAddrInet 0 bindAddr)
socketName <- NS.getSocketName sock
warningM "Controller" ("server listening " ++ (show socketName))
ch1 <- newChan
ch2 <- newChan
-- Save off the socket, and server address in a handle
sw <- initial_switch
return (ch1, ch2, sw {swH = Just (SocketHandle sock)
,swChan = Just ch1})
-- ---------------------------------------------------------------------
--
-- Listen for incoming messages and drop them in the FIFO
--
dolisten :: Maybe SocketHandle -> Chan Signal -> IO ()
dolisten Nothing _ = return ()
dolisten (Just h) channel = forever $ do
(msg,rinfo) <- (SB.recvFrom (slSocket h) 1000)
-- (putStrLn ("dolisten:rx msg=" ++ (BC.unpack msg)))
(writeChan channel (SignalMsgRx msg rinfo))
onesec :: Int
onesec = 1000000
timer :: Int -> a -> Chan a -> IO ()
timer timeoutVal signalValue channel = forever $
threadDelay timeoutVal >> writeChan channel signalValue
oneShotTimer :: Int -> Signal -> TeleHash ()
oneShotTimer timeoutVal signalValue = do
mchannel <- gets swChan
let Just channel = mchannel
void $ io $ forkIO (threadDelay timeoutVal >> writeChan channel signalValue)
return ()
-- =====================================================================
-- ---------------------------------------------------------------------
-- API
{-
int switch_init(switch_t s, packet_t keys)
{
char *csid = crypt_supported;
if(!keys) return 1;
while(*csid)
{
loadkey(*csid,s,keys);
csid++;
}
packet_free(keys);
if(!s->parts->json) return 1;
s->id = hn_getparts(s->index, s->parts);
if(!s->id) return 1;
return 0;
}
-- Result of running telehash-c ping
alanz@alanz-laptop:~/mysrc/github/telehash/telehash-c$ ./bin/ping
*** public key o0UL/D6qQ+dcSX7hCoyMjLDYeA6dNScZ+YY/fcX4fyCtsSO2u9L5Lg== ***
*** secret key iollyIcHaGeD/JpUNn/7ef1QAzE= ***
loaded hashname 7ecf6a5884d483fde2f6a027e33e6e1756efdb70925557c3e3f776b35329aef5
-}
testId :: Id
testId = r
where
v = "{\"1a\":\"o0UL/D6qQ+dcSX7hCoyMjLDYeA6dNScZ+YY/fcX4fyCtsSO2u9L5Lg==\",\"1a_secret\":\"iollyIcHaGeD/JpUNn/7ef1QAzE=\"}"
Just r = decode v
testSeeds = do
fc <- BL.readFile "../data/seeds.json"
let mv = decode fc :: Maybe Value
putStrLn $ "seeds=" ++ show mv
-- ---------------------------------------------------------------------
relayPid :: PathId
relayPid = PId 1
ipv4Pid :: PathId
ipv4Pid = PId 2
initialSeeds :: [SeedInfo]
-- initialSeeds = [seed195,seed253]
-- initialSeeds = [seed253]
initialSeeds = [seedLocal]
seedLocal:: SeedInfo
seedLocal =
SI
{ sId = "3036d9b6f9525660b71d58bacd80d0ef0f6e191f1622daefd461823c366eb4fc"
, sAdmin = "alanz"
, sPaths =
[ Path { {-
pType = PathType "ipv4"
, pIp = localIP
, pPort = 42424
, pHttp = ""
-}
pJson = PIPv4 (PathIPv4 localIP 42424)
, pLastIn = Nothing
, pLastOut = Nothing
, pRelay = Nothing
, pId = Nothing
, pPriority = Nothing
, pIsSeed = True
, pGone = False
}
]
, sParts =
[
("3a", "d4b78855e6cee2d005753ef4abe8bd05cc014efdd2bdb9c7994d34c712020a8e")
, ("2a", "c216b2ccb1a832f0e893c847b0ef1f81d0a00f9fd0708b845299715226c87112")
, ("1a", "7ce35806dc84943da12ea8d3a93bbcfdcf83e6b9")
]
, sKeys =
[ ("1a", "KaOZRKU3ouxNLGBHQV4TFAGrwM8pF8PncWC9XLcx+7H+fHebOTdcyg==")
]
, sIsBridge = True
}
seed195:: SeedInfo
seed195 =
SI
{ sId = "f50f423ce7f94fe98cdd09268c7e57001aed300b23020840a84a881c76739471"
, sAdmin = "http://github.com/quartzjer"
, sPaths =
[ Path { {-
pType = PathType "ipv4"
, pIp = Just "208.126.199.195"
, pPort = 42424
, pHttp = ""
-}
pJson = PIPv4 (PathIPv4 "208.126.199.195" 42424)
, pLastIn = Nothing
, pLastOut = Nothing
, pRelay = Nothing
, pId = Nothing
, pPriority = Nothing
, pIsSeed = True
, pGone = False
}
{-
}, {
"type": "ipv6",
"ip": "2001:470:c0a6:3::10",
"port": 42424
}, {
"type": "http",
"http": "http://208.126.199.195:42424"
}],
-}
]
, sParts =
[("2a", "8a5235d7cebb82d48a945e7c4b301efed40503d50ea1063464fe839b12278d93")
,("1a", "b3c9341ff5d11670c1e1c918ad51631b1251448a")
]
, sKeys =
[ ("2a", "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5mWOu3o0chHcpcxPYX43fD6DTWGkCj09QaWerHbTX1Gua5eW8VdPOM/Ki21WEY2xcBa55/s37hIRP1XZveFiWgIXft9g/L+1AsF56cO0ZGnHhrp5Wabrt+L5mVuWg2VcSAUQ/gdoSLmDRTdOc0ruzroIN4a4Wnfk6rwvFYq/LfTj2w5cBD3ziVts4XSicX9fnmWElrTKfGLWyC6W5ICbLZ0BmTt9CZLbdNmotuYqQkPXPJ0wccsWAuI8yjbmU2my+F+vakWbGFvMSCBlLlQLXMTnLbLtgnwgeujTHiJaB0Iycw5Q9FS0RiQ0QeFqUvmMX9BezKfayq2hHjcob58WbwIDAQAB")
, ("1a", "idT0VmmEmSdDF1eMrajIVHP0XZ/8Udgeas1Zxy0va5tD/KP393Ri3w==")
]
, sIsBridge = True
}
seed253 :: SeedInfo
seed253 =
SI
{ sId = "89a4cbc6c27eb913c1bcaf06bac2d8b872c7cbef626b35b6d7eaf993590d37de"
, sAdmin = "http://github.com/quartzjer"
, sPaths =
[ Path { {- pType = PathType "ipv4"
, pIp = Just "208.68.164.253"
, pPort = 42424
, pHttp = ""
-}
pJson = PIPv4 (PathIPv4 "208.68.164.253" 42424)
, pLastIn = Nothing
, pLastOut = Nothing
, pRelay = Nothing
, pId = Nothing
, pPriority = Nothing
, pIsSeed = True
, pGone = False
}
{-
, Path { pType = PathType "ipv6"
, pIp = Just "2605:da00:5222:5269:230:48ff:fe35:6572"
, pPort = 42424
, pHttp = ""
, pLastIn = Nothing
, pLastOut = Nothing
, pRelay = Nothing
, pId = Nothing
, pPriority = Nothing
, pIsSeed = True
}
, Path { pType = PathType "http"
, pIp = Nothing
, pPort = 42424
, pHttp = "http://208.68.164.253:42424"
, pLastIn = Nothing
, pLastOut = Nothing
, pRelay = Nothing
, pId = Nothing
, pPriority = Nothing
, pIsSeed = True
}
-}
]
, sParts =
[("2a", "beb07e8864786e1d3d70b0f537e96fb719ca2bbb4a2a3791ca45e215e2f67c9a")
,("1a", "6c0da502755941a463454e9d478b16bbe4738e67")
]
, sKeys =
[ ("2a", "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvJlhpi2pZZRrnf+bmnnRRAQHfzMzDwOV+s+JzamyL0X9wwJK8m2DHCFcpJQSLFIzv3v+e102+pZlIWAU6vvO5s6J60C+9UwQoKj9L3cxUL/XmEBjAnbwfs+61HGSjf8yS8Uon/0oDXxssJyJjnrzAJT7K5G+Nqf5N5IJiEfhYkfa9wkhWR4fU1ZiC3PZZoMrGurGxWAwIs4No2LlBkZXUtAC31jISWODBAahGcaRjPgHFVaDxQ+gEQFse0Aa0mWA7KCRiSbBC89Brx837AREWFa7t14UAi01BLcPSkbAIbIv1SmM+E3k7HdN6rXTjz2h7Na5DOCS4z1LgujXW460WQIDAQAB")
, ("1a", "hhzKwBDYADu6gQdDrP2AgQJmAE5iIbEqpPmLLZI9w72rLTCCqob9sw==")
]
, sIsBridge = True
}
-- ---------------------------------------------------------------------
initial_switch :: IO Switch
initial_switch = do
rng <- initRNG
let
sw = Switch
{ swH = Nothing
, swChan = Nothing
, swSender = doSendDgram
, swSeeds = []
, swLocals = Set.empty
, swLines = Map.empty
, swBridges = []
, swBridgeLine = []
, swBridging = False
, swAll = Map.empty
, swBuckets = Map.empty
, swCapacity = []
, swRels = Map.empty
, swPaths = Map.empty
, swBridgeCache = []
, swHashname = Nothing
, swId = Map.empty
, swIdCrypto = Nothing
, swCs = Map.empty
, swKeys = Map.empty
, swCSets = Map.fromList [("1a",cset_1a)]
, swParts = []
, swPub4 = Nothing
, swPriority = Nothing
, swLoad = loadId
, swMake = keysgen
-- configure defaults
, swNat = False
, swSeed = True
, swLanToken = Nothing
-- udp socket stuff
, swPcounter = 1
-- , swReceive = receive
, swNetworks = Map.fromList [(PtRelay, (relayPid,relay))
,(PtIPv4, (ipv4Pid ,ipv4Send))]
-- connect to the network, online(callback(err))
, swIsOnline = False
-- internal listening unreliable channels
, swRaws = Map.fromList
[ ("peer", inPeer)
, ("connect",inConnect)
, ("seek", inSeek)
, ("path", inPath)
, ("bridge", inBridge)
, ("link", inLink)
]
, swSeeks = Map.empty
, swWaits = []
, swWaiting = Nothing
-- crypto
, swRNG = rng
, swCountOnline = 0
, swCountTx = 0
, swCountRx = 0
}
{-
put sw
linkLoop -- should never return
sw' <- get
return sw'
-}
return sw
initRNG :: IO SystemRNG
initRNG = do
pool <- createEntropyPool
return $ cprgCreate pool
-- ---------------------------------------------------------------------
crypt_supported :: TeleHash [String]
crypt_supported = do
sw <- get
return $ Map.keys (swCSets sw)
-- ---------------------------------------------------------------------
{-
self.load = function(id)
{
if(!id || !id.parts) return "bad keys";
self.parts = id.parts;
self.id = id;
var err = loadkeys(self);
if(err) return err;
if(Object.keys(self.cs).length == 0) return "missing cipher sets";
self.hashname = parts2hn(self.parts);
return false;
}
-}
loadId :: Id -> TeleHash ()
loadId anId = do
logT $ "loadId:" ++ show anId
sw <- get
-- put $ sw { swParts = [("1a",id1a anId),("1a_secret",id1a_secret anId)] }
let (Hash hash1a) = mkHashFromB64 $ id1a anId
put $ sw { swId = Map.fromList [("1a",id1a anId),("1a_secret",id1a_secret anId)]
, swParts = [("1a",hash1a)]
}
loadkeys
sw' <- get
when (Map.size (swCs sw') == 0) $ logT "missing cipher sets"
let hashName = parts2hn (swParts sw')
put $ sw' { swHashname = Just hashName }
-- These three are set in crypto 1a loadkey
{-
id.key = bs
id.public
id.private
-}
{-
csids <- crypt_supported
Just hn <- crypt_loadkey_1a (id1a anId) (Just $ id1a_secret anId)
logT $ "loadId:got hn=" ++ show (hcHashName hn)
put $ sw {swParts = hcParts hn, swHashname = Just (hcHashName hn)}
return ()
-}
loadIdOld :: Id -> TeleHash ()
loadIdOld anId = do
logT $ "loadId:" ++ show anId
sw <- get
csids <- crypt_supported
Just hn <- crypt_loadkey_1a (id1a anId) (Just $ id1a_secret anId)
logT $ "loadId:got hn=" ++ show (hcHashName hn)
put $ sw {swParts = hcParts hn, swHashname = Just (hcHashName hn)}
return ()
-- ---------------------------------------------------------------------
loadkeys :: TeleHash ()
loadkeys = do
sw <- get
put $ sw { swCs = Map.empty, swKeys = Map.empty }
-- logT $ "loadkeys:swId=" ++ show (swId sw)
let
doOne (csid,_v) = do
sw <- get
let cs' = Map.delete csid (swCs sw)
put sw {swCs = cs'}
case Map.lookup csid (swCSets sw) of
Nothing -> logT $ csid ++ " not supported"
Just cset -> do
let mpub = Map.lookup csid (swId sw)
mpriv = Map.lookup (csid ++ "_secret") (swId sw)
case mpub of
Nothing -> return ()
Just pub -> do
mhc <- (csLoadkey cset) pub mpriv
case mhc of
Just hc -> do
-- timeNow <- io getClockTime
-- randomHexVal <- randomHEX 16
let keys = Map.insert csid pub (swKeys sw)
-- h = mkHashContainer (hcHashName hc) timeNow randomHexVal
-- h' = h { hSelf = Just hc }
-- allHc = Map.insert (hcHashName hc) h' (swAll sw)
-- put $ sw {swKeys = keys, swAll = allHc }
put $ sw { swKeys = keys
, swIdCrypto = Just hc
, swCs = Map.insert csid hc (swCs sw)
}
Nothing -> return ()
return ()
return ()
mapM_ doOne (swParts sw)
sw' <- get
-- logT $ "loadkeys: (swKeys,swCs)=" ++ show (swKeys sw',swCs sw')
return ()
{-
function loadkeys(self)
{
self.cs = {};
self.keys = {};
var err = false;
Object.keys(self.parts).forEach(function(csid){
self.cs[csid] = {};
if(!self.CSets[csid]) err = csid+" not supported";
err = err||self.CSets[csid].loadkey(self.cs[csid], self.id[csid], self.id[csid+"_secret"]);
self.keys[csid] = self.id[csid];
});
return err;
}
-}
-- ---------------------------------------------------------------------
-- |process the incoming packet
-- was swReceive
receive :: NetworkPacket -> Path -> ClockTime -> TeleHash ()
receive rxPacket path timeNow = do
-- logT $ "receive: (rxTelex,remoteipp,timeNow):" ++ show (rxPacket,path,timeNow)
if (networkPacketLen rxPacket) == 2
then return () -- Empty packets are NAT pings
else do
counterVal <- incPCounter
let packet = NetworkTelex
{ ntSender = path
, ntId = counterVal
, ntAt = timeNow
, ntPacket = rxPacket
}
-- debug(">>>>",Date(),msg.length, packet.head_length, packet.body_length,[path.type,path.ip,path.port,path.id].join(","));
logT $ ">>>>" ++ show (timeNow, networkPacketLen rxPacket,(showPath path))
case rxPacket of
{-
HeadJson _j -> do
-- handle any LAN notifications
logT $ "receive: not processing JSON"
assert False undefined
return ()
-}
OpenPacket b bs -> do
-- process open packet
open <- deopenize packet
case open of
DeOpenizeVerifyFail -> do
logT $ "receive.deopenize: couldn't decode open"
return ()
_ -> do
-- logT $ "receive.deopenize verified ok " -- ++ show open
let (Aeson.Object js) = doJs open
if not (expectedKeysPresent (doJs open) ["line","to","from","at"])
then do
logT $ "deopenize missing required js fields, have only:" ++ show (HM.keys js)
return ()
else do
if not (isHEX (valToBs (js HM.! "line")) 32)
then do
logT $ "invalid line id enclosed" ++ show (js HM.! "line")
return ()
else do
sw <- get
if (valToString (js HM.! "to") /= unHN (gfromJust "deopenize" $ swHashname sw))
then do
logT $ "open for wrong hashname" ++ show (js HM.! "to")
return ()
else do
let (Aeson.Object jsparts) = js HM.! "from"
jsparts2 = map (\(k,v) -> (Text.unpack k,valToString v)) $ HM.toList jsparts
-- logT $ "deopenize:jsparts=" ++ show jsparts2
let mcsid = partsMatch (swParts sw) jsparts2
-- logT $ "deopenize:mcsid=" ++ show mcsid
if mcsid /= Just (doCsid open)
then do
logT $ "open with mismatch CSID" ++ show (mcsid,doCsid open)
return ()
else do
-- var from = self.whokey(open.js.from,open.key);
-- if (!from) return warn("invalid hashname", open.js.from);
let keyVal = BC.unpack $ B64.encode (doKey open)
mfrom <- whokey jsparts2 (Left keyVal)
case mfrom of
Nothing -> do
logT $ "deopenize:invalid hashname=" ++ show jsparts2
return ()
Just from -> do
-- // make sure this open is legit
-- if (typeof open.js.at != "number") return warn("invalid at", open.js.at);
-- logT $ "deopenize:js.at=" ++ show (js HM.! "at")
case (js HM.! "at") of
(Data.Aeson.Number atVal) -> do
-- duplicate open and there's newer line packets, ignore it
-- if(from.openAt && open.js.at <= from.openAt && from.lineAt == from.openAt) return;
let showTod (TOD a b) = "(TOD" ++ show a ++ " " ++ show b ++ ")"
showmTod Nothing = "Nothing"
showmTod (Just tod) = "Just " ++ showTod tod
let jsAt = TOD (round (atVal / 1000)) (((round atVal) `mod` 1000) * 10^9)
-- logT $ "deopenize:jsAt=" ++ showTod jsAt
-- logT $ "deopenize:hOpenAt=" ++ showmTod (hOpenAt from)
-- logT $ "deopenize:hLineAt=" ++ showmTod (hLineAt from)
if (isJust (hOpenAt from) && jsAt <= (fromJust (hOpenAt from))
&& (hLineAt from == hOpenAt from))
then do
logT $ "deopenize:duplicate open and newer line packets, ignoring"
return ()
else do
-- open is legit!
logT $ "inOpen verified" ++ show (hHashName from)
-- add this path in
path2 <- hnPathIn from path
from' <- getHNsafe (hHashName from) "receive"
-- if new line id, reset incoming channels
if Just (valToBs (js HM.! "line")) /= (hLineIn from')
then do
logT $ "new line"
putHN from'
forM_ (Map.keys (hChans from')) $ \id1 -> do
if channelSlot id1 == channelSlot (hChanOut from')
then return () -- our ids
else do
fm <- getHNsafe (hHashName from') "deopenize"
case Map.lookup id1 (hChans fm) of
Nothing -> return ()
Just c -> chanFail (hHashName from') c Nothing
delChan (hHashName from') id1
return ()
else return ()
-- update values
from2 <- getHNsafe (hHashName from) "deopenize"
let from3 = from2
{ hOpenAt = Just jsAt
, hLineIn = Just (b16Tobs $ valToBs (js HM.! "line"))
}
putHN from3
-- send an open back
mpacket <- hnOpen from3
case mpacket of
Nothing -> do
logT $ "deopenize: hnOpen returned Nothing"
return ()
Just msg -> do
send path2 msg (Just $ hHashName from3)
-- self.send(path,from.open(),from);
-- line is open now!
from4 <- getHNsafe (hHashName from) "deopenize"
sw3 <- get
let from5 = from4 { hCsid = Just (doCsid open) }
mcset = Map.lookup (doCsid open) (swCSets sw3)
case mcset of
Nothing -> do
logT $ "deopenize: cset lookup failed"
Just cset -> do
(csOpenLine cset) from5 open
from8 <- getHNsafe (hHashName from) "deopenize.8"
logT $ "line open " ++ show (hHashName from5,hLineOut from5,B16.encode $ gfromJust "deopenize" $ hLineIn from8)
-- logT $ "deopenize:hEncKey from8=" ++ show (hEncKey from8,hDecKey from8)
sw4 <- get
put $ sw4 { swLines
= Map.insert (hLineOut from8) (hHashName from8) (swLines sw4)}
-- resend the last sent packet again
-- logT $ "deopenize:not doing last packet resend"
case (hLastPacket from5) of
Nothing -> return ()
Just msg -> do
from6 <- getHNsafe (hHashName from5) "deopenize.9"
putHN $ from6 { hLastPacket = Nothing }
from7 <- getHNsafe (hHashName from5) "deopenize.10"
logT $ "deopenize:resending packet"
-- logT $ "deopenize:hEncKey from6=" ++ show (hEncKey from6)
-- logT $ "deopenize:hEncKey from7=" ++ show (hEncKey from7)
void $ hnSend from7 msg
-- if it was a lan seed, add them
sw4 <- get
if hIsLocal from5 && Set.notMember (hHashName from5) (swLocals sw4)
then put $ sw4 { swLocals = Set.insert (hHashName from5) (swLocals sw4)}
else return ()
_ -> do
logT $ "deopenize:invalid is, need Number:" ++ show (js HM.! "at")
return ()
return ()
LinePacket pbody -> do
-- its a line
logT $ "receive:got line msg"
let lineID = BC.unpack $ B16.encode $ BC.take 16 pbody
logT $ "receive:lineID=" ++ lineID
sw <- get
case Map.lookup lineID (swLines sw) of
Nothing -> do
-- a matching line is required to decode the packet
assert False undefined
Just lineHn -> do
-- decrypt and process
line <- getHNsafe lineHn "receive.line"
case Map.lookup (gfromJust "receive.line" $ hCsid line) (swCSets sw) of
Nothing -> do
logT $ "receive.line:couldn't load cset for:" ++ show (hCsid line)
Just cset -> do
res <- (csDelineize cset) line packet
case res of
Left err -> do
logT $ "couldn't decrypt line:" ++ err
assert False undefined
Right pkt -> do
putHN $ line { hLineAt = hOpenAt line }
line2 <- getHNsafe (hHashName line) "receive.line"
hnReceive line2 pkt
return ()
{-
// or it's a line
if(packet.head.length == 0)
{
var lineID = packet.body.slice(0,16).toString("hex");
var line = packet.from = self.lines[lineID];
// a matching line is required to decode the packet
if(!line) {
if(!self.bridgeLine[lineID]) return debug("unknown line received", lineID, JSON.stringify(packet.sender));
debug("BRIDGE",JSON.stringify(self.bridgeLine[lineID]),lineID);
var id = crypto.createHash("sha256").update(packet.body).digest("hex")
if(self.bridgeCache[id]) return; // drop duplicates
self.bridgeCache[id] = true;
// flat out raw retransmit any bridge packets
return self.send(self.bridgeLine[lineID],msg);
}
// decrypt and process
var err;
if((err = self.CSets[line.csid].delineize(line, packet))) return debug("couldn't decrypt line",err,packet.sender);
line.lineAt = line.openAt;
line.receive(packet);
return;
}
-}
return ()
{-
// self.receive, raw incoming udp data
function receive(msg, path)
{
var self = this;
var packet = pdecode(msg);
if(!packet) return warn("failed to decode a packet from", path, msg.toString());
if(packet.length == 2) return; // empty packets are NAT pings
packet.sender = path;
packet.id = self.pcounter++;
packet.at = Date.now();
debug(">>>>",Date(),msg.length, packet.head_length, packet.body_length,[path.type,path.ip,path.port,path.id].join(","));
// handle any LAN notifications
if(packet.js.type == "lan") return inLan(self, packet);
if(packet.js.type == "seed") return inLanSeed(self, packet);
// either it's an open
if(packet.head.length == 1)
{
var open = deopenize(self, packet);
if (!open || !open.verify) return warn("couldn't decode open",open);
if (!isHEX(open.js.line, 32)) return warn("invalid line id enclosed",open.js.line);
if(open.js.to !== self.hashname) return warn("open for wrong hashname",open.js.to);
var csid = partsMatch(self.parts,open.js.from);
if(csid != open.csid) return warn("open with mismatch CSID",csid,open.csid);
var from = self.whokey(open.js.from,open.key);
if (!from) return warn("invalid hashname", open.js.from);
// make sure this open is legit
if (typeof open.js.at != "number") return warn("invalid at", open.js.at);
// duplicate open and there's newer line packets, ignore it
if(from.openAt && open.js.at <= from.openAt && from.lineAt == from.openAt) return;
// open is legit!
debug("inOpen verified", from.hashname);
from.recvAt = Date.now();
// add this path in
path = from.pathIn(path);
// if new line id, reset incoming channels
if(open.js.line != from.lineIn)
{
debug("new line");
Object.keys(from.chans).forEach(function(id){
if(id % 2 == from.chanOut % 2) return; // our ids
if(from.chans[id]) from.chans[id].fail({js:{err:"reset"}});
delete from.chans[id];
});
}
// update values
var line = {};
from.openAt = open.js.at;
from.lineIn = open.js.line;
// send an open back
self.send(path,from.open(),from);
// line is open now!
from.csid = open.csid;
self.CSets[open.csid].openline(from, open);
debug("line open",from.hashname,from.lineOut,from.lineIn);
self.lines[from.lineOut] = from;
// resend the last sent packet again
if (from.lastPacket) {
var packet = from.lastPacket;
delete from.lastPacket;
from.send(packet)
}
// if it was a lan seed, add them
if(from.local && self.locals.indexOf(from) == -1) self.locals.push(from);
return;
}
// or it's a line
if(packet.head.length == 0)
{
var lineID = packet.body.slice(0,16).toString("hex");
var line = packet.from = self.lines[lineID];
// a matching line is required to decode the packet
if(!line) {
if(!self.bridgeLine[lineID]) return debug("unknown line received", lineID, JSON.stringify(packet.sender));
debug("BRIDGE",JSON.stringify(self.bridgeLine[lineID]),lineID);
var id = crypto.createHash("sha256").update(packet.body).digest("hex")
if(self.bridgeCache[id]) return; // drop duplicates
self.bridgeCache[id] = true;
// flat out raw retransmit any bridge packets
return self.send(self.bridgeLine[lineID],msg);
}
// decrypt and process
var err;
if((err = self.CSets[line.csid].delineize(line, packet))) return debug("couldn't decrypt line",err,packet.sender);
line.lineAt = line.openAt;
line.receive(packet);
return;
}
if(Object.keys(packet.js).length > 0) warn("dropping incoming packet of unknown type", packet.js, packet.sender);
}
-}
-- ---------------------------------------------------------------------
{-
self.deliver = function(type, callback){ self.networks[type] = callback};
-}
-- was swDeliver
deliver :: String -> () -> ()
deliver = (assert False undefined)
-- ---------------------------------------------------------------------
{-
self.networks["relay"] = function(path,msg){
if(path.relay.ended) return debug("dropping dead relay");
path.relay.send({body:msg});
};
-}
relay :: Path -> LinePacket -> Maybe HashName -> TeleHash ()
relay path msg _ = (assert False undefined)
-- ---------------------------------------------------------------------
{-
self.send = function(path, msg, to){
if(!msg) return debug("send called w/ no packet, dropping");
if(to) path = to.pathOut(path);
if(!path) return debug("send called w/ no valid network, dropping");
debug("<<<<",Date(),msg.length,[path.type,path.ip,path.port,path.id].join(","),to&&to.hashname);
// try to send it via a supported network
if(self.networks[path.type]) self.networks[path.type](path,msg,to);
// if the path has been active in or out recently, we're done
if(Date.now() - path.lastIn < defaults.nat_timeout || Date.now() - path.lastOut < (defaults.chan_timeout / 2)) return;
// no network support or unresponsive path, try a bridge
self.bridge(path,msg,to);
};
-}
-- | Do the send, where the Telex has a fully lineized packet in it
-- was swSend
send :: Path -> LinePacket -> Maybe HashName -> TeleHash ()
send mpath msg mto = do
-- logT $ "send entered for path:" ++ showPath mpath
timeNow <- io getClockTime
sw <- get
path <- case mto of
Just toHn -> do
to <- getHNsafe toHn "send"
mpath' <- hnPathOut to mpath
return $ gfromJust "send" mpath'
Nothing -> return mpath
-- if(!path) return debug("send called w/ no valid network, dropping");
-- debug("<<<<",Date(),msg.length,[path.type,path.ip,path.port,path.id].join(","),to&&to.hashname);
logT $ "<<<<" ++ show (timeNow,BC.length $ unLP msg) ++ "," ++ showPath mpath
-- try to send it via a supported network
-- if(self.networks[path.type])
-- self.networks[path.type](path,msg,to);
logT $ "send:path=" ++ showPath path
case Map.lookup (pathType path) (swNetworks sw) of
Nothing -> do
logT $ "send: no path"
return ()
Just (_pid,sender) -> do
sender path msg mto
return ()
logT $ "send: must still update stats, and bridge if necessary"
{-
case mpid of
Nothing -> do
logT "send called w/ no valid network, dropping"
return ()
Just pid -> do
-- if the path has been active in or out recently (ideally by the
-- send process), we're done
timeNow <- io getClockTime
sw' <- get -- get a fresh one, send may have updated it
let Just p = Map.lookup pid (swPaths sw')
if (isTimeOut timeNow (pLastIn p) (natTimeout defaults)) ||
(isTimeOut timeNow (pLastOut p) (chanTimeout defaults))
then
-- no network support or unresponsive path, try a bridge
(swBridge sw') path msg mto
else
return ()
-}
{-
if(to) path = to.pathOut(path);
if(!path) return debug("send called w/ no valid network, dropping");
debug("<<<<",Date(),msg.length,[path.type,path.ip,path.port,path.id].join(","),to&&to.hashname);
// try to send it via a supported network
if(self.networks[path.type]) self.networks[path.type](path,msg,to);
// if the path has been active in or out recently, we're done
if(Date.now() - path.lastIn < defaults.nat_timeout || Date.now() - path.lastOut < (defaults.chan_timeout / 2)) return;
// no network support or unresponsive path, try a bridge
self.bridge(path,msg,to);
};
-}
-- ---------------------------------------------------------------------
-- was swPathSet
pathSet :: Path -> TeleHash ()
pathSet path = (assert False undefined)
{-
self.pathSet = function(path)
{
var updated = (self.paths[path.type] && JSON.stringify(self.paths[path.type]) == JSON.stringify(path));
self.paths[path.type] = path;
// if ip4 and local ip, set nat mode
if(path.type == "ipv4") self.nat = isLocalIP(path.ip);
// trigger pings if our address changed
if(self.isOnline && !updated)
{
debug("local network updated, checking links")
linkMaint(self);
}
}
-}
-- ---------------------------------------------------------------------
-- | this creates a hashname identity object (or returns existing)
-- If it creates a new one, this is inserted into the index
-- was swWhois
whois :: HashName -> TeleHash (Maybe HashContainer)
whois hn = do
-- logT $ "whois entered for:" ++ show hn
sw <- get
-- never return ourselves
if (fromMaybe (HN "") (swHashname sw)) == hn
then do
-- logT "whois called for self"
return Nothing
else do
-- logT "whois not called for self"
-- if we already have it, return it
case Map.lookup hn (swAll sw) of
Just hc -> do
-- logT $ "whois got cached val" -- ++ show hc
return (Just hc)
Nothing -> do
-- logT "whois not seen value"
timeNow <- io getClockTime
randomHexVal <- randomHEX 16
-- logT $ "whois:randomHexVal=" ++ show randomHexVal
let hc = mkHashContainer hn timeNow randomHexVal
hc' = hc {hBucket = dhash (fromJust $ swHashname sw) hn }
-- to create a new channels to this hashname
chanOut = if (head $ sort [fromJust $ swHashname sw,hn]) == (fromJust $ swHashname sw)
then 2 else 1
hc'' = hc' { hChanOut = chanOut }
swAll' = Map.insert hn hc'' (swAll sw)
put $ sw {swAll = swAll'}
return $ Just hc''
{-
// this creates a hashname identity object (or returns existing)
function whois(hashname)
{
var self = this;
// validations
if(!hashname) { warn("whois called without a hashname", hashname, new Error().stack); return false; }
if(typeof hashname != "string") { warn("wrong type, should be string", typeof hashname,hashname); return false; }
hashname = hashname.split(",")[0]; // convenience if an address is passed in
if(!isHEX(hashname, 64)) { warn("whois called without a valid hashname", hashname); return false; }
// never return ourselves
if(hashname === self.hashname) { debug("whois called for self"); return false; }
var hn = self.all[hashname];
if(hn) return hn;
// make a new one
hn = self.all[hashname] = {hashname:hashname, chans:{}, self:self, paths:[], isAlive:0};
hn.at = Date.now();
hn.bucket = dhash(self.hashname, hashname);
if(!self.buckets[hn.bucket]) self.buckets[hn.bucket] = [];
// to create a new channels to this hashname
var sort = [self.hashname,hashname].sort();
hn.chanOut = (sort[0] == self.hashname) ? 2 : 1;
hn.start = channel;
hn.raw = raw;
// manage network information consistently, called on all validated incoming packets
hn.pathIn = function(path)
{
path = hn.pathGet(path);
// first time we've seen em
if(!path.lastIn && !path.lastOut)
{
debug("PATH INNEW",JSON.stringify(path.json),hn.paths.map(function(p){return JSON.stringify(p.json)}));
// for every new incoming path, trigger a sync (delayed so caller can continue/respond first)
setTimeout(hn.sync,1);
// update public ipv4 info
if(path.type == "ipv4" && !isLocalIP(path.ip))
{
hn.ip = path.ip;
hn.port = path.port;
}
// cull any invalid paths of the same type
hn.paths.forEach(function(other){
if(other == path) return;
if(other.type != path.type) return;
if(!pathValid(other)) hn.pathEnd(other);
});
// "local" custom paths, we must bridge for
if(path.type == "local") hn.bridging = true;
// track overall if we trust them as local
if(isLocalPath(path)) hn.isLocal = true;
}
path.lastIn = Date.now();
self.recvAt = Date.now();
// end any active relay
if(hn.to && hn.to.type == "relay" && path.type != "relay") hn.to.relay.fail();
// update default if better
if(!pathValid(hn.to) || pathValid(path)) hn.to = path;
hn.alive = pathValid(hn.to);
return path;
}
// handle all incoming line packets
hn.receive = function(packet)
{
// if((Math.floor(Math.random()*10) == 4)) return warn("testing dropping randomly!");
if(!packet.js || typeof packet.js.c != "number") return warn("dropping invalid channel packet",packet.js);
debug("LINEIN",JSON.stringify(packet.js));
hn.recvAt = Date.now();
// normalize/track sender network path
packet.sender = hn.pathIn(packet.sender);
// find any existing channel
var chan = hn.chans[packet.js.c];
if(chan === false) return; // drop packet for a closed channel
if(chan) return chan.receive(packet);
// start a channel if one doesn't exist, check either reliable or unreliable types
var listening = {};
if(typeof packet.js.seq == "(assert False undefined)") listening = self.raws;
if(packet.js.seq === 0) listening = self.rels;
if(!listening[packet.js.type])
{
// bounce error
if(!packet.js.end && !packet.js.err)
{
warn("bouncing unknown channel/type",packet.js);
var err = (packet.js.type) ? "unknown type" : "unknown channel"
hn.send({js:{err:err,c:packet.js.c}});
}
return;
}
// verify incoming new chan id
if(packet.js.c % 2 == hn.chanOut % 2) return warn("channel id incorrect",packet.js.c,hn.chanOut)
// make the correct kind of channel;
var kind = (listening == self.raws) ? "raw" : "start";
var chan = hn[kind](packet.js.type, {bare:true,id:packet.js.c}, listening[packet.js.type]);
chan.receive(packet);
}
// track who told us about this hn
hn.via = function(from, address)
{
if(typeof address != "string") return warn("invalid see address",address);
if(!hn.vias) hn.vias = {};
if(hn.vias[from.hashname]) return;
hn.vias[from.hashname] = address;
}
// just make a seek request conveniently
hn.seek = function(hashname, callback)
{
var bucket = dhash(hn.hashname, hashname);
var prefix = hashname.substr(0, Math.ceil((255-bucket)/4)+2);
hn.raw("seek", {timeout:defaults.seek_timeout, retry:3, js:{"seek":prefix}}, function(err, packet, chan){
callback(packet.js.err,Array.isArray(packet.js.see)?packet.js.see:[]);
});
}
// return our address to them
hn.address = function(to)
{
if(!to) return "";
var csid = partsMatch(hn.parts,to.parts);
if(!csid) return "";
if(!hn.ip) return [hn.hashname,csid].join(",");
return [hn.hashname,csid,hn.ip,hn.port].join(",");
}
// send a simple lossy peer request, don't care about answer
hn.peer = function(hashname, csid)
{
if(!csid || !self.parts[csid]) return;
var js = {"peer":hashname};
js.paths = hn.pathsOut();
hn.raw("peer",{js:js, body:getkey(self,csid)}, function(err, packet, chan){
if(err) return;
if(!packet.body) return warn("relay in w/ no body",packet.js,packet.from.hashname);
// create a network path that maps back to this channel
var path = {type:"relay",relay:chan,json:{type:"relay",relay:packet.from.hashname}};
if(packet.js.bridge) path = packet.sender; // sender is offering to bridge, use them!
self.receive(packet.body, path);
});
}
// return the current open packet
hn.open = function()
{
if(!hn.parts) return false; // can't open if no key
if(hn.opened) return hn.opened;
hn.opened = openize(self,hn);
return hn.opened;
}
// generate current paths array to them, for peer and paths requests
hn.pathsOut = function()
{
var paths = [];
if(self.paths.pub4) paths.push({type:"ipv4", ip:self.paths.pub4.ip, port:self.paths.pub4.port});
if(self.paths.pub6) paths.push({type:"ipv6", ip:self.paths.pub6.ip, port:self.paths.pub6.port});
if(self.paths.http)
{
if(self.paths.http.http) paths.push({type:"http", http:self.paths.http.http});
else if(self.paths.pub4) paths.push({type:"http", http:"http://"+self.paths.pub4.ip+":"+self.paths.http.port});
}
if(self.paths.webrtc) paths.push({type:"webrtc"});
if(hn.isLocal)
{
if(self.paths.lan4) paths.push({type:"ipv4", ip:self.paths.lan4.ip, port:self.paths.lan4.port});
if(self.paths.lan6) paths.push({type:"ipv6", ip:self.paths.lan6.ip, port:self.paths.lan6.port});
}
return paths;
}
return hn;
}
-}
-- ---------------------------------------------------------------------
-- |handle all incoming line packets, post decryption
-- The hn holds the from
hnReceive :: HashContainer -> RxTelex -> TeleHash ()
hnReceive hn rxTelex = do
let packet = rtPacket rxTelex
jsHashMap = rtJs rxTelex
case (HM.lookup "c" jsHashMap) of
Nothing -> logT $ "dropping invalid channel packet, c missing"
Just (Aeson.Number c) -> do
logT $ "LINEIN " ++ showJson jsHashMap
timeNow <- io getClockTime
putHN hn { hRecvAt = Just timeNow }
hn2a <- getHNsafe (hHashName hn) "hnReceive"
-- normalize/track sender network path
path <- hnPathIn hn2a (rtSender rxTelex)
hn2 <- getHNsafe (hHashName hn) "hnReceive.2"
-- find any existing channel
case Map.lookup (CID (round c)) (hChans hn2) of
Just chan -> do
if (chDone chan)
then return () -- drop packet for a closed channel
else chanReceive hn2 chan (rxTelex { rtSender = path })
Nothing -> do
-- start a channel if one doesn't exist, check either reliable or unreliable types
sw <- get
let (listening,cKind)
= case HM.lookup "seq" jsHashMap of
Nothing -> (swRaws sw,hnRaw)
Just (Aeson.Number n) -> if (round n) == 0
then ((swRels sw),start)
else (Map.empty,assert False undefined)
mptype = HM.lookup "type" jsHashMap
ptype = case mptype of
Nothing -> "*unknown*"
Just v -> valToString v
case Map.lookup ptype listening of
Nothing -> do
-- bounce error
if (not (HM.member "end" jsHashMap)) && (not ( HM.member "err" jsHashMap))
then do
logT $ "bouncing unknown channel/type" ++ showJson jsHashMap
assert False undefined
else do
logT $ "hnReceive: discarding:" ++ showJson jsHashMap
Just fn -> do
-- verify incoming new chan id
if channelSlot (CID (round c)) == channelSlot (hChanOut hn2)
then do
logT $ "channel id incorrect" ++ show (c,hChanOut hn2)
return ()
else do
-- make the correct kind of channel
let cb = Map.findWithDefault (assert False undefined) ptype listening
-- var chan = hn[kind](packet.js.type, {bare:true,id:packet.js.c}, listening[packet.js.type]);
chan <- cKind hn2 ptype (rxTelexToTelex rxTelex) cb
putChan (hHashName hn2) chan
hn3 <- getHNsafe (hHashName hn2) "hnReceive"
chanReceive hn3 chan rxTelex
Just _ -> logT $ "dropping invalid channel packet, c not numeric"
{-
// handle all incoming line packets
hn.receive = function(packet)
{
// if((Math.floor(Math.random()*10) == 4)) return warn("testing dropping randomly!");
if(!packet.js || typeof packet.js.c != "number") return warn("dropping invalid channel packet",packet.js);
debug("LINEIN",JSON.stringify(packet.js));
hn.recvAt = Date.now();
// normalize/track sender network path
packet.sender = hn.pathIn(packet.sender);
// find any existing channel
var chan = hn.chans[packet.js.c];
if(chan === false) return; // drop packet for a closed channel
if(chan) return chan.receive(packet);
// start a channel if one doesn't exist, check either reliable or unreliable types
var listening = {};
if(typeof packet.js.seq == "undefined") listening = self.raws;
if(packet.js.seq === 0) listening = self.rels;
if(!listening[packet.js.type])
{
// bounce error
if(!packet.js.end && !packet.js.err)
{
warn("bouncing unknown channel/type",packet.js);
var err = (packet.js.type) ? "unknown type" : "unknown channel"
hn.send({js:{err:err,c:packet.js.c}});
}
return;
}
// verify incoming new chan id
if(packet.js.c % 2 == hn.chanOut % 2) return warn("channel id incorrect",packet.js.c,hn.chanOut)
// make the correct kind of channel;
var kind = (listening == self.raws) ? "raw" : "start";
var chan = hn[kind](packet.js.type, {bare:true,id:packet.js.c}, listening[packet.js.type]);
chan.receive(packet);
}
-}
-- ---------------------------------------------------------------------
hnPathOut :: HashContainer -> Path -> TeleHash (Maybe Path)
hnPathOut hn path = do
path' <- hnPathGet hn path
if (pathType path' == PtRelay)
&& isJust (pRelay path') && chEnded (fromJust (pRelay path'))
then do
hnPathEnd hn path'
return Nothing
else do
timeNow <- io getClockTime
let path'' = path' { pLastOut = Just timeNow }
hn' = if (not $ pathValid timeNow (hTo hn)) &&
(pathValid timeNow (Just path''))
then hn { hTo = Just path''}
else hn
putHN hn'
return $ Just path''
{-
hn.pathOut = function(path)
{
if(!path) return false;
path = hn.pathGet(path);
if(path.type == "relay" && path.relay.ended) return hn.pathEnd(path);
path.lastOut = Date.now();
if(!pathValid(hn.to) && pathValid(path)) hn.to = path;
return path;
}
-}
-- ---------------------------------------------------------------------
hnPathEnd :: HashContainer -> Path -> TeleHash ()
hnPathEnd hn path = do
if (pIsSeed path) -- never remove a seed path
then return ()
else do
let hn2 = if hTo hn == Just path
then hn {hTo = Nothing}
else hn
-- paths = filter (/=path) (Map.elems (hPaths hn))
paths = Map.delete (pJson path) (hPaths hn)
putHN $ hn2 { hPaths = paths }
logT $ "PATH END" ++ show path
{-
hn.pathEnd = function(path)
{
if(path.seed) return false; // never remove a seed-path
if(hn.to == path) hn.to = false;
path.gone = true;
var index = hn.paths.indexOf(path);
if(index >= 0) hn.paths.splice(index,1);
debug("PATH END",JSON.stringify(path.json));
return false;
}
-}
-- ---------------------------------------------------------------------
hnPathIn :: HashContainer -> Path -> TeleHash Path
hnPathIn hn path = do
path1 <- hnPathGet hn path
timeNow <- io getClockTime
-- first time we've seen em
if (pLastIn path1 == Nothing && pLastOut path1 == Nothing)
then do
-- debug("PATH INNEW",JSON.stringify(path.json),hn.paths.map(function(p){return JSON.stringify(p.json)}));
logT $ "PATH INNEW " ++ showPath path1
-- for every new incoming path, trigger a sync (delayed so caller can continue/respond first)
oneShotTimer (1 * onesec) (SignalSyncPath (hHashName hn))
-- update public ipv4 info
let hn1 = if (pathType path1 == PtIPv4 && not (isLocalIP (gfromJust "hnPathIn" (pathIp path1))))
then hn {hIp = pathIp path1, hPort = pathPort path1}
else hn
putHN hn1
-- cull any invalid paths of the same type
forM_ (Map.elems $ hPaths hn1) $ \other -> do
if ((other == path1) -- ++AZ++ TODO: check what we define as equality
|| (pathType other /= pathType path1))
then return ()
else do
if not (pathValid timeNow (Just other))
then do
hnNow <- getHNsafe (hHashName hn1) "hnPathIn"
void $ hnPathEnd hnNow other
return ()
else return ()
-- "local" custom paths we must bridge for
if pathType path1 == PtLocal
then do
hnNow <- getHNsafe (hHashName hn1) "hnPathIn.1"
putHN $ hnNow {hBridging = True}
else return ()
-- track overall if we trust them as local
if isLocalPath path1
then do
hnNow <- getHNsafe (hHashName hn1) "hnPathIn.2"
putHN $ hnNow {hIsLocal = True}
else return ()
else return ()
hnNow <- getHNsafe (hHashName hn) "hnPathIn.3"
-- end any active relay
logT $ "hnPathIn: must still code 'end any active relay'"
-- if(hn.to && hn.to.type == "relay" && path.type != "relay") hn.to.relay.fail();
-- update default if better
let hnNow2 = if (not (pathValid timeNow (hTo hnNow))
|| pathValid timeNow (Just path1))
then hnNow {hTo = Just path1}
else hnNow
logT $ "hnPathIn: hTo=" ++ show (hTo hnNow2)
let path2 = path1 {pLastIn = Just timeNow }
putHN hnNow2 { hIsAlive = pathValid timeNow (hTo hnNow2)
, hRecvAt = Just timeNow
, hPaths = Map.insert (pJson path2) path2 (hPaths hnNow2)
}
return path2
{-
hn.pathIn = function(path)
{
path = hn.pathGet(path);
// first time we've seen em
if(!path.lastIn && !path.lastOut)
{
debug("PATH INNEW",JSON.stringify(path.json),hn.paths.map(function(p){return JSON.stringify(p.json)}));
// for every new incoming path, trigger a sync (delayed so caller can continue/respond first)
setTimeout(hn.sync,1);
// update public ipv4 info
if(path.type == "ipv4" && !isLocalIP(path.ip))
{
hn.ip = path.ip;
hn.port = path.port;
}
// cull any invalid paths of the same type
hn.paths.forEach(function(other){
if(other == path) return;
if(other.type != path.type) return;
if(!pathValid(other)) hn.pathEnd(other);
});
// "local" custom paths, we must bridge for
if(path.type == "local") hn.bridging = true;
// track overall if we trust them as local
if(isLocalPath(path)) hn.isLocal = true;
}
path.lastIn = Date.now();
self.recvAt = Date.now();
// end any active relay
if(hn.to && hn.to.type == "relay" && path.type != "relay") hn.to.relay.fail();
// update default if better
if(!pathValid(hn.to) || pathValid(path)) hn.to = path;
hn.alive = pathValid(hn.to);
return path;
}
-}
-- ---------------------------------------------------------------------
-- | Try to send the packet, return True on success
-- try to send a packet to a hashname, doing whatever is possible/necessary
hnSend :: HashContainer -> Telex -> TeleHash Bool
hnSend hn packet = do
sent <- case hLineIn hn of
Just lineIn -> do
-- Already have an open line, send on it
logT $ "line sending " ++ show (hHashName hn, B16.encode lineIn)
-- TODO: dispatch this via CSets
msg <- telexToPacket packet
(hn',lined) <- crypt_lineize_1a hn msg
putHN hn'
-- directed packets are preferred, just dump and done
-- logT $ "hnSend:tTo packet=" ++ show (tTo packet)
case tTo packet of
Just to -> do send to lined (Just $ hHashName hn')
return True
Nothing -> do
-- send to the default best path
logT $ "hnSend:hTo hn=" ++ show (hTo hn)
case hTo hn of
Just to -> do send to lined (Just $ hHashName hn')
return True
Nothing -> do return False -- need to fall through
Nothing -> do
logT $ "hnSend:hLineIn = Nothing"
return False
logT $ "hnSend:sent=" ++ show sent
if sent
then return True -- we're done
else do
-- we've fallen through, either no line, or no valid paths
-- logT $ "alive failthrough" ++ show (hSendSeek hn, Map.keys (hVias hn))
hn1a <- getHNsafe (hHashName hn) "hnSend.2"
let hn' = hn1a { hIsAlive = False
, hLastPacket = Just packet -- will be resent if/when an open is received
}
putHN hn'
-- always send to all known paths, increase resiliency
mp <- hnOpen hn'
hn2 <- getHNsafe (hHashName hn) "hnSend"
ret <- case mp of
Nothing -> do
logT $ "hnSend: hnOpen returned Nothing"
return False
Just lpacket -> do
-- logT $ "hnSend: hnOpen returned packet" -- ++ show lpacket
forM_ (Map.elems $ hPaths hn2) $ \path -> do
send path lpacket (Just $ hHashName hn2)
return True
-- also try using any via information to create a new line
let
vias :: TeleHash Bool
vias = do
assert False undefined
-- if there's via information, just try that
if (Map.size (hVias hn2) /= 0)
then vias
else do
-- never too fast, worst case is to try to seek again
timeNow <- io getClockTime
let dt = diffClockTimes timeNow (fromJust $ hSendSeek hn2)
diff5sec = TimeDiff 0 0 0 0 0 5 0
if (hSendSeek hn2 == Nothing
|| dt > diff5sec)
then do
putHN $ hn2 {hSendSeek = Just timeNow }
let
fn err = do
hn3 <-getHNsafe (hHashName hn2) "hnSend.2"
case (hLastPacket hn3) of
Nothing -> do
-- the packet was already sent elsewhere
return ()
Just _ -> do
-- process any new vias
vias
return ()
seek (hHashName hn2) fn
return False
else do
return False
-- return False
{-
// try to send a packet to a hashname, doing whatever is possible/necessary
hn.send = function(packet){
// if there's a line, try sending it via a valid network path!
if(hn.lineIn)
{
debug("line sending",hn.hashname,hn.lineIn);
var lined = packet.msg || self.CSets[hn.csid].lineize(hn, packet);
hn.sentAt = Date.now();
// directed packets are preferred, just dump and done
if(packet.to) return self.send(packet.to, lined, hn);
// send to the default best path
if(hn.to) self.send(hn.to, lined, hn);
// if it was good, we're done, if not fall through
if(pathValid(hn.to)) return;
}
// we've fallen through, either no line, or no valid paths
debug("alive failthrough",hn.sendSeek,Object.keys(hn.vias||{}));
hn.alive = false;
hn.lastPacket = packet; // will be resent if/when an open is received
// always send to all known paths, increase resiliency
hn.paths.forEach(function(path){
self.send(path, hn.open(), hn);
});
// also try using any via informtion to create a new line
function vias()
{
if(!hn.vias) return;
hn.sentOpen = false; // whenever we send a peer, we'll always need to resend any open regardless
// try to connect vias
var todo = hn.vias;
delete hn.vias; // never use more than once
Object.keys(todo).forEach(function(via){
var address = todo[via].split(",");
if(address.length <= 1) return;
if(address.length == 4 && address[2].split(".").length == 4 && parseInt(address[3]) > 0)
{
// NAT hole punching
var path = {type:"ipv4",ip:address[2],port:parseInt(address[3])};
self.send(path,pencode());
// if possibly behind the same NAT, set flag to allow/ask to relay a local path
if(self.nat && address[2] == (self.paths.pub4 && self.paths.pub4.ip)) hn.isLocal = true;
}
// send the peer request
self.whois(via).peer(hn.hashname, address[1]);
});
}
// if there's via information, just try that
if(hn.vias) return vias();
// never too fast, worst case is to try to seek again
if(!hn.sendSeek || (Date.now() - hn.sendSeek) > 5000)
{
hn.sendSeek = Date.now();
self.seek(hn, function(err){
if(!hn.lastPacket) return; // packet was already sent elsewise
vias(); // process any new vias
});
}
}
-}
-- ---------------------------------------------------------------------
-- |send a full network path sync
hnSync :: HashName -> TeleHash ()
hnSync hn = do
hc <- getHNsafe hn "hnSync"
logT $ "SYNCING:" ++ show hn ++ " " ++ (intercalate "," $ map showPath (Map.elems $ hPaths hc))
-- compose all of our known paths we can send to them
paths <- hnPathsOut hn
-- logT $ "hnSync:paths=" ++ (intercalate "," $ map showPath paths)
timeNow <- io getClockTime
-- check all paths at once
forM_ paths $ \path -> do
logT $ "PATHLOOP" ++ show (Map.size (hPaths hc)) ++ "," ++ showPath path
logT $ "hnSync: must process local/relay paths when implemented"
-- if(["relay","local"].indexOf(path.type) == -1) js.path = path.json;
-- our outgoing priority of this path
-- js.priority = (path.type == "relay") ? 0 : 1;
let js = HM.fromList [("priority",Number 1)] :: (HM.HashMap Text.Text Aeson.Value)
let js2 = HM.insert "paths" (toJSON paths) js
let cb :: Bool -> RxTelex -> RawChannel -> TeleHash ()
cb err packet chan = do
logT $ "hnSync.cb " ++ intercalate "," [show err,showJson (rtJs packet),showChan chan]
-- hnSync.cbTrue,{"end":true,"c":5,"priority":2,"path":{"ip":"10.2.2.83","port":49285,"type":"ipv4"}},(path,HN "3036d9b6f9525660b71d58bacd80d0ef0f6e191f1622daefd461823c366eb4fc",CID 5)
-- when it actually errored and hasn't been active, invalidate it
if err
then do
logT $ "hnSync:must check for lastIn activity since the send"
-- assert False undefined
else do
inPath True packet chan
let msg = RxTelex { rtId = 0
, rtSender = path
, rtAt = timeNow
, rtJs = js2
, rtPacket = newPacket
, rtChanId = Nothing
}
-- raw :: HashContainer -> String -> RxTelex -> RxCallBack -> TeleHash Channel
hc2 <- getHNsafe hn "hnSync.2"
hnRaw hc2 "path" (rxTelexToTelex msg) cb
{-
// send a full network path sync
hn.sync = function()
{
debug("SYNCING",hn.hashname,hn.paths.map(function(p){return JSON.stringify(p.json)}));
// compose all of our known paths we can send to them
var paths = hn.pathsOut();
// check all paths at once
hn.paths.forEach(function(path){
debug("PATHLOOP",hn.paths.length,JSON.stringify(path.json));
var js = {};
if(["relay","local"].indexOf(path.type) == -1) js.path = path.json;
// our outgoing priority of this path
js.priority = (path.type == "relay") ? 0 : 1;
if(paths.length > 0) js.paths = paths;
var lastIn = path.lastIn;
hn.raw("path",{js:js, timeout:3000, to:path}, function(err, packet){
// when it actually errored and hasn't been active, invalidate it
if(err && err !== true && path.lastIn == lastIn) path.lastIn = 0;
else inPath(true, packet); // handles any response .priority and .paths
});
});
}
-}
-- ---------------------------------------------------------------------
-- |return the current open packet if open, else generates a new one
hnOpen :: HashContainer -> TeleHash (Maybe LinePacket)
hnOpen hn = do
case hParts hn of
Nothing -> return Nothing
_ -> if isJust (hOpened hn)
then return $ hOpened hn
else do
op <- openize (hHashName hn)
hn2 <- getHNsafe (hHashName hn) "hnOpen"
putHN $ hn2 { hOpened = op}
-- logT $ "hnOpen:hEcc=" ++ show (hEcc hn2)
return op
{-
// return the current open packet
hn.open = function()
{
if(!hn.parts) return false; // can't open if no key
if(hn.opened) return hn.opened;
hn.opened = openize(self,hn);
return hn.opened;
}
-}
-- ---------------------------------------------------------------------
hnPathsOut :: HashName -> TeleHash [Path]
hnPathsOut hn = do
hc <- getHNsafe hn "hnPathsOut"
if False -- (hIsLocal hc)
then return []
else return $ filter isLocalPath (Map.elems $ hPaths hc)
{-
// generate current paths array to them, for peer and paths requests
hn.pathsOut = function()
{
var paths = [];
self.paths.forEach(function(path){
if(isLocalPath(path) && !hn.isLocal) return;
paths.push(path);
});
return paths;
}
-}
-- ---------------------------------------------------------------------
telexToPacket :: Telex -> TeleHash Telex
telexToPacket telex = do
case (HM.toList $ tJson telex) of
[] -> return $ telex { tPacket = Just newPacket}
js -> do
logT $ "telexToPacket: encoded js=" ++ (BC.unpack $ lbsTocbs $ encode (tJson telex))
let packet = newPacket { paHead = HeadJson (lbsTocbs $ encode (tJson telex)) }
return $ telex { tPacket = Just packet}
-- ---------------------------------------------------------------------
{-
function whokey(parts, key, keys)
{
var self = this;
if(typeof parts != "object") return false;
var csid = partsMatch(self.parts,parts);
if(!csid) return false;
hn = self.whois(parts2hn(parts));
if(!hn) return false;
hn.parts = parts;
if(keys) key = keys[csid]; // convenience for addSeed
var err = loadkey(self,hn,csid,key);
if(err)
{
warn("whokey err",hn.hashname,err);
return false;
}
return hn;
}
-}
-- was swWhokey
whokey :: Parts -> Either String (Map.Map String String) -> TeleHash (Maybe HashContainer)
whokey parts keyVal = do
sw <- get
let mcsid = partsMatch (swParts sw) parts
-- logT $ "whokey:mcsid=" ++ show mcsid
-- TODO: put this bit in the Maybe Monad
r <- case mcsid of
Nothing -> return Nothing
Just csid -> do
mhn <- whois (parts2hn parts)
-- logT $ "whokey:whois returned:" ++ show mhn
case mhn of
Nothing -> return Nothing
Just hn -> do
mhc <- case keyVal of
Left k -> loadkey csid k
Right keys ->
case Map.lookup csid keys of
Nothing -> return Nothing
Just k -> loadkey csid k
return (Just $ hn { hSelf = mhc, hCsid = Just csid })
case r of
Nothing -> do
logT $ "whokey err:" ++ show (parts,keyVal)
return Nothing
Just hn -> do
let hn' = hn {hParts = Just parts}
putHN hn'
return (Just hn')
-- ---------------------------------------------------------------------
{-
function partsMatch(parts1, parts2)
{
if(typeof parts1 != "object" || typeof parts2 != "object") return false;
var ids = Object.keys(parts1).sort();
var csid;
while(csid = ids.pop()) if(parts2[csid]) return csid;
return false;
}
-}
partsMatch :: Parts -> Parts -> Maybe String
partsMatch parts1 parts2 = r
where
ids = sort $ map fst parts1
p2 = Set.fromList $ map fst parts2
common = filter (\k -> Set.member k p2) ids
r = if common == [] then Nothing
else Just $ head common
-- ---------------------------------------------------------------------
{-
self.start = function(hashname,type,arg,cb)
{
var hn = self.whois(hashname);
if(!hn) return cb("invalid hashname");
return hn.start(type,arg,cb);
}
-}
-- TODO: This is the switch level one, that just calls the hn one
-- was swStart
start :: HashContainer -> String -> a -> RxCallBack -> TeleHash RawChannel
start hashname typ arg cb = (assert False undefined)
-- ---------------------------------------------------------------------
-- |configures or checks
isBridge :: Maybe Bool -> Maybe HashName -> TeleHash Bool
isBridge arg mhn = do
sw <- get
if arg == Just True
then put $ sw {swBridging = True}
else return ()
sw2 <- get
if (swBridging sw2)
then return True
else do
case mhn of
Nothing -> return $ swBridging sw2
Just hn -> do
mhc <- whois hn
case mhc of
Nothing -> return False
Just hc -> return (hBridging hc)
{-
// configures or checks
function isBridge(arg)
{
var self = this;
if(arg === true) self.bridging = true;
if(self.bridging) return true;
if(!arg) return self.bridging;
var check = (typeof arg == "string")?self.whois(arg):arg;
if(check && check.bridging) return true;
return false;
}
-}
-- ---------------------------------------------------------------------
{-
function addSeed(arg) {
var self = this;
if(!arg.parts) return warn("invalid args to addSeed",arg);
var seed = self.whokey(arg.parts,false,arg.keys);
if(!seed) return warn("invalid seed info",arg);
if(Array.isArray(arg.paths)) arg.paths.forEach(function(path){
path = seed.pathGet(path);
path.seed = true;
});
seed.isSeed = true;
self.seeds.push(seed);
}
-}
-- was swAddSeed
addSeed :: SeedInfo -> TeleHash ()
addSeed arg = do
logT $ "addSeed:args=" ++ show (sId arg)
sw <- get
mseed <- whokey (sParts arg) (Right (Map.fromList (sKeys arg)))
-- logT $ "addSeed:mseed=" ++ show mseed
case mseed of
Nothing -> do
logT $ "invalid seed info:" ++ show arg
return ()
Just seed -> do
forM_ (sPaths arg) $ \path -> do
void $ hnPathGet seed path
sw' <- get
put $ sw' { swSeeds = (swSeeds sw') ++ [(hHashName seed)] }
-- ---------------------------------------------------------------------
-- was swOnline
online :: CallBack -> TeleHash ()
online callback = do
sw <- get
if swWaits sw /= []
then put $ sw {swWaiting = Just (online callback)}
else do
setIsOnline True
-- ping lan
token <- randomHEX 16
setLanToken $ token
-- TODO: send the lan packet too
logT $ "online: must still send the lan token message"
-- (swSend sw) (PathType "lan") (pencode Telex BL.empty) Nothing
-- error "call swSend"
case (swSeeds sw) of
[] -> do
logT "no seeds"
callback
seeds -> do
let
-- safely callback only once or when all seeds return
done = do
logT $ "online:done called"
forM_ seeds $ \ seed -> do
logT $ "online:processing:" ++ show seed
let fn hn = do
logT $ "online:fn active for :" ++ show hn
hc <- getHNsafe hn "online"
if hIsAlive hc
then hnSync hn
else return ()
done
Just hcSeed <- whois seed
hnLink (hHashName hcSeed) (Just fn)
{-
function online(callback)
{
var self = this;
if(self.waits.length > 0) return self.waiting = function(){self.online(callback)};
self.isOnline = true;
// ping lan
self.lanToken = randomHEX(16);
self.send({type:"lan"}, pencode({type:"lan",lan:self.lanToken,from:self.parts}));
var dones = self.seeds.length;
if(!dones) {
warn("no seeds");
return callback(null,0);
}
// safely callback only once or when all seeds return
function done()
{
if(!dones) return; // already called back
var alive = self.seeds.filter(function(seed){return seed.alive}).length;
if(alive)
{
callback(null,alive);
dones = 0;
return;
}
dones--;
// failed
if(!dones) callback("offline",0);
}
self.seeds.forEach(function(seed){
seed.link(function(){
if(seed.alive) seed.sync();
done();
});
});
}
-}
return ()
-- ---------------------------------------------------------------------
-- | Set the swIsOnline flag
setIsOnline :: Bool -> TeleHash ()
setIsOnline v = do
sw <- get
put $ sw {swIsOnline = v}
-- | Set the swLanToken value
setLanToken :: String -> TeleHash ()
setLanToken v = do
sw <- get
put $ sw {swLanToken = Just v}
-- ---------------------------------------------------------------------
{-
self.listen = function(type, callback){
if(typeof type != "string" || typeof callback != "function") return warn("invalid arguments to listen");
if(type.substr(0,1) !== "_") type = "_"+type;
self.rels[type] = callback;
};
-}
-- was swListen
listen :: String -> () -> IO ()
listen typ callback = (assert False undefined)
-- ---------------------------------------------------------------------
-- | Network layer. Generatean `open` network packet for the given hashname
openize :: HashName -> TeleHash (Maybe LinePacket)
openize toHn = do
to <- getHNsafe toHn "openize"
sw <- get
case hCsid to of
Nothing -> do
logT $ "can't open without key"
return Nothing
Just _ -> do
timeNow <- io getClockTime
let
to2 = if hLineAt to == Nothing
then to {hLineAt = Just timeNow }
else to
putHN to2
let inner = OpenizeInner
{ oiAt = gfromJust "openize" $ hLineAt to2
, oiTo = hHashName to2
, oiFrom = swParts sw
, oiLine = hLineOut to2
}
lp <- crypt_openize_1a to2 inner
return (Just lp)
{-
function openize(self, to)
{
if(!to.csid)
{
console.log("can't open w/ no key");
return undefined;
}
if(!to.lineOut) to.lineOut = randomHEX(16);
if(!to.lineAt) to.lineAt = Date.now();
var inner = {}
inner.at = to.lineAt; // always the same for the generated line id/key
inner.to = to.hashname;
inner.from = self.parts;
inner.line = to.lineOut;
return self.CSets[to.csid].openize(self, to, inner);
}
-}
-- ---------------------------------------------------------------------
deopenize :: NetworkTelex -> TeleHash DeOpenizeResult
deopenize open = do
let p = ntPacket open
logT $ "DEOPEN :" ++ show (networkPacketLen p)
case p of
LinePacket _ -> return DeOpenizeVerifyFail
OpenPacket csHex pbody -> do
sw <- get
let csid = BC.unpack $ B16.encode (BC.pack [w2c csHex])
-- logT $ "deopenize got cs:" ++ csid
case Map.lookup csid (swCSets sw) of
Nothing -> return DeOpenizeVerifyFail
Just cs -> do
ret <- (csDeopenize cs) p
return ret
{-
function deopenize(self, open)
{
// console.log("DEOPEN",open.body.length);
var ret;
var csid = open.head.charCodeAt().toString(16);
if(!self.CSets[csid]) return {err:"unknown CSID of "+csid};
try{ret = self.CSets[csid].deopenize(self, open);}catch(E){return {err:E};}
ret.csid = csid;
return ret;
}
-}
-- ---------------------------------------------------------------------
{-
function deopenize(self, open)
{
// console.log("DEOPEN",open.body.length);
var ret;
var csid = open.head.charCodeAt().toString(16);
if(!self.CSets[csid]) return {err:"unknown CSID of "+csid};
try{ret = self.CSets[csid].deopenize(self, open);}catch(E){return {err:E};}
ret.csid = csid;
return ret;
}
-}
-- ---------------------------------------------------------------------
-- |seek the dht for this hashname
seek :: HashName -> (String -> TeleHash () ) -> TeleHash ()
seek hname callback = do
logT $ "seek:" ++ show hname
-- Do we already have one?
seeks <- gets swSeeks
case Map.lookup hname seeks of
Nothing -> do
logT $ "seek: starting new seek for " ++ show hname
mhn <- whois hname
case mhn of
Nothing -> callback "invalid hashname"
Just hn -> do
-- logT $ "seek:not implemented"
-- SEND seek {"seek":"f50","type":"seek","c":10}
-- load all seeds and sort to get top 3
seeds <- getAllLiveSeedsFromDht
let sortFunc a b = compare (dhash hname (hHashName a)) (dhash hname (hHashName b))
seeds2 = sortBy sortFunc seeds
seeds3 = take 3 seeds2
dht <- gets swBuckets
logT $ "seek:swBuckets=" ++ show dht
logT $ "seek:seeds=" ++ show (map hHashName seeds)
logT $ "seek:seeds3=" ++ show (map hHashName seeds3)
if null seeds3
then do
-- no live seeds, nothing to be done
return ()
else do
assert False undefined -- carry on here
Just s -> do
logT $ "seek: found existing seek " ++ show s
assert False undefined
{-
// seek the dht for this hashname
function seek(hn, callback)
{
var self = this;
if(typeof hn == "string") hn = self.whois(hn);
if(!callback) callback = function(){};
if(!hn) return callback("invalid hashname");
var did = {};
var doing = {};
var queue = [];
var wise = {};
var closest = 255;
// load all seeds and sort to get the top 3
var seeds = []
Object.keys(self.buckets).forEach(function(bucket){
self.buckets[bucket].forEach(function(link){
if(link.hashname == hn) return; // ignore the one we're (re)seeking
if(link.seed && link.alive) seeds.push(link);
});
});
seeds.sort(function(a,b){ return dhash(hn.hashname,a.hashname) - dhash(hn.hashname,b.hashname) }).slice(0,3).forEach(function(seed){
wise[seed.hashname] = true;
queue.push(seed.hashname);
});
debug("seek starting with",queue,seeds.length);
// always process potentials in order
function sort()
{
queue = queue.sort(function(a,b){
return dhash(hn.hashname,a) - dhash(hn.hashname,b)
});
}
// track when we finish
function done(err)
{
// get all the hashnames we used/found and do final sort to return
Object.keys(did).forEach(function(k){ if(queue.indexOf(k) == -1) queue.push(k); });
Object.keys(doing).forEach(function(k){ if(queue.indexOf(k) == -1) queue.push(k); });
sort();
while(cb = hn.seeking.shift()) cb(err, queue.slice());
}
// track callback(s);
if(!hn.seeking) hn.seeking = [];
hn.seeking.push(callback);
if(hn.seeking.length > 1) return;
// main loop, multiples of these running at the same time
function loop(onetime){
if(!hn.seeking.length) return; // already returned
debug("SEEK LOOP",queue);
// if nothing left to do and nobody's doing anything, failed :(
if(Object.keys(doing).length == 0 && queue.length == 0) return done("failed to find the hashname");
// get the next one to ask
var mine = onetime||queue.shift();
if(!mine) return; // another loop() is still running
// if we found it, yay! :)
if(mine == hn.hashname) return done();
// skip dups
if(did[mine] || doing[mine]) return onetime||loop();
var distance = dhash(hn.hashname, mine);
if(distance > closest) return onetime||loop(); // don't "back up" further away
if(wise[mine]) closest = distance; // update distance if trusted
doing[mine] = true;
var to = self.whois(mine);
to.seek(hn.hashname, function(err, see){
see.forEach(function(item){
var sug = self.whois(item);
if(!sug) return;
// if this is the first entry and from a wise one, give them wisdom too
if(wise[to.hashname] && see.indexOf(item) == 0) wise[sug.hashname] = true;
sug.via(to, item);
queue.push(sug.hashname);
});
sort();
did[mine] = true;
delete doing[mine];
onetime||loop();
});
}
// start three of them
loop();loop();loop();
// also force query any locals
self.locals.forEach(function(local){loop(local.hashname)});
}
-}
-- ---------------------------------------------------------------------
{-
self.raw = function(type, callback){
if(typeof type != "string" || typeof callback != "function") return warn("invalid arguments to raw");
self.raws[type] = callback;
};
-}
-- raw :: String -> () -> IO ()
-- raw typ callback = (assert False undefined)
-- ---------------------------------------------------------------------
{-
-- from the c version
chan_t chan_new(switch_t s, hn_t to, char *type, uint32_t id)
{
chan_t c;
if(!s || !to || !type) return NULL;
// use new id if none given
if(!to->chanOut) chan_reset(s, to);
if(!id)
{
id = to->chanOut;
to->chanOut += 2;
}
DEBUG_PRINTF("channel new %d %s",id,type);
c = malloc(sizeof (struct chan_struct));
memset(c,0,sizeof (struct chan_struct));
c->type = strdup(type);
c->s = s;
c->to = to;
c->state = STARTING;
c->id = id;
util_hex((unsigned char*)&(s->uid),4,(unsigned char*)c->uid); // switch-wide unique id
s->uid++;
util_hex((unsigned char*)&(c->id),4,(unsigned char*)c->hexid);
if(!to->chans) to->chans = xht_new(17);
xht_set(to->chans,(char*)c->hexid,c);
xht_set(s->index,(char*)c->uid,c);
return c;
}
-}
-- ---------------------------------------------------------------------
{-
// create a reliable channel with a friendlier interface
function channel(type, arg, callback)
{
var hn = this;
var chan = {inq:[], outq:[], outSeq:0, inDone:-1, outConfirmed:-1, lastAck:-1, callback:callback};
chan.id = arg.id;
if(!chan.id)
{
chan.id = hn.chanOut;
hn.chanOut += 2;
}
hn.chans[chan.id] = chan;
chan.timeout = arg.timeout || defaults.chan_timeout;
// app originating if not bare, be friendly w/ the type, don't double-underscore if they did already
if(!arg.bare && type.substr(0,1) !== "_") type = "_"+type;
chan.type = type; // save for debug
if(chan.type.substr(0,1) != "_") chan.safe = true; // means don't _ escape the json
chan.hashname = hn.hashname; // for convenience
debug("new channel",hn.hashname,chan.type,chan.id);
// used by app to change how it interfaces with the channel
chan.wrap = function(wrap)
{
if(!channelWraps[wrap]) return false;
return channelWraps[wrap](chan);
}
// called to do eventual cleanup
chan.done = function(){
if(chan.ended) return; // prevent multiple calls
chan.ended = true;
debug("channel done",chan.id);
hn.chanDone(chan.id);
};
// used to internally fail a channel, timeout or connection failure
chan.fail = function(packet){
if(chan.errored) return; // prevent multiple calls
chan.errored = packet;
packet.from = hn;
chan.callback(packet.js.err, packet, chan, function(){});
chan.done();
}
// simple convenience wrapper to end the channel
chan.end = function(){
chan.send({end:true});
chan.done();
};
// errors are hard-send-end
chan.err = function(err){
if(chan.errored) return;
chan.errored = {js:{err:err,c:chan.id}};
hn.send(chan.errored);
chan.done();
};
// process packets at a raw level, handle all miss/ack tracking and ordering
chan.receive = function(packet)
{
// if it's an incoming error, bail hard/fast
if(packet.js.err) return chan.fail(packet);
// in errored state, only/always reply with the error and drop
if(chan.errored) return chan.send(chan.errored);
chan.lastIn = Date.now();
// process any valid newer incoming ack/miss
var ack = parseInt(packet.js.ack);
if(ack > chan.outSeq) return warn("bad ack, dropping entirely",chan.outSeq,ack);
var miss = Array.isArray(packet.js.miss) ? packet.js.miss : [];
if(miss.length > 100) {
warn("too many misses", miss.length, chan.id, packet.from.hashname);
miss = miss.slice(0,100);
}
if(miss.length > 0 || ack > chan.lastAck)
{
debug("miss processing",ack,chan.lastAck,miss,chan.outq.length);
chan.lastAck = ack;
// rebuild outq, only keeping newer packets, resending any misses
var outq = chan.outq;
chan.outq = [];
outq.forEach(function(pold){
// packet acknowleged!
if(pold.js.seq <= ack) {
if(pold.callback) pold.callback();
return;
}
chan.outq.push(pold);
if(miss.indexOf(pold.js.seq) == -1) return;
// resend misses but not too frequently
if(Date.now() - pold.resentAt < 1000) return;
pold.resentAt = Date.now();
chan.ack(pold);
});
}
// don't process packets w/o a seq, no batteries included
var seq = packet.js.seq;
if(!(seq >= 0)) return;
// auto trigger an ack in case none were sent
if(!chan.acker) chan.acker = setTimeout(function(){ delete chan.acker; chan.ack();}, defaults.chan_autoack);
// drop duplicate packets, always force an ack
if(seq <= chan.inDone || chan.inq[seq-(chan.inDone+1)]) return chan.forceAck = true;
// drop if too far ahead, must ack
if(seq-chan.inDone > defaults.chan_inbuf)
{
warn("chan too far behind, dropping", seq, chan.inDone, chan.id, packet.from.hashname);
return chan.forceAck = true;
}
// stash this seq and process any in sequence, adjust for yacht-based array indicies
chan.inq[seq-(chan.inDone+1)] = packet;
debug("INQ",Object.keys(chan.inq),chan.inDone,chan.handling);
chan.handler();
}
// wrapper to deliver packets in series
chan.handler = function()
{
if(chan.handling) return;
var packet = chan.inq[0];
// always force an ack when there's misses yet
if(!packet && chan.inq.length > 0) chan.forceAck = true;
if(!packet) return;
chan.handling = true;
if(!chan.safe) packet.js = packet.js._ || {}; // unescape all content json
chan.callback(packet.js.end, packet, chan, function(ack){
// catch whenever it was ended to start cleanup
if(packet.js.end) chan.endIn = true;
if(chan.endOut && chan.endIn) chan.done();
chan.inq.shift();
chan.inDone++;
chan.handling = false;
if(ack) chan.ack(); // auto-ack functionality
chan.handler();
});
}
// resend the last sent packet if it wasn't acked
chan.resend = function()
{
if(chan.ended) return;
if(!chan.outq.length) return;
var lastpacket = chan.outq[chan.outq.length-1];
// timeout force-end the channel
if(Date.now() - lastpacket.sentAt > chan.timeout)
{
chan.fail({js:{err:"timeout"}});
return;
}
debug("channel resending");
chan.ack(lastpacket);
setTimeout(chan.resend, defaults.chan_resend); // recurse until chan_timeout
}
// add/create ack/miss values and send
chan.ack = function(packet)
{
if(!packet) debug("ACK CHECK",chan.id,chan.outConfirmed,chan.inDone);
// these are just empty "ack" requests
if(!packet)
{
// drop if no reason to ack so calling .ack() harmless when already ack'd
if(!chan.forceAck && chan.outConfirmed == chan.inDone) return;
packet = {js:{}};
}
chan.forceAck = false;
// confirm only what's been processed
if(chan.inDone >= 0) chan.outConfirmed = packet.js.ack = chan.inDone;
// calculate misses, if any
delete packet.js.miss; // when resending packets, make sure no old info slips through
if(chan.inq.length > 0)
{
packet.js.miss = [];
for(var i = 0; i < chan.inq.length; i++)
{
if(!chan.inq[i]) packet.js.miss.push(chan.inDone+i+1);
}
}
// now validate and send the packet
packet.js.c = chan.id;
debug("SEND",chan.type,JSON.stringify(packet.js));
hn.send(packet);
// catch whenever it was ended to start cleanup
if(packet.js.end) chan.endOut = true;
if(chan.endOut && chan.endIn) chan.done();
}
// send content reliably
chan.send = function(arg)
{
if(chan.ended) return warn("can't send to an ended channel");
// create a new packet from the arg
if(!arg) arg = {};
var packet = {};
packet.js = chan.safe ? arg.js : {_:arg.js};
if(arg.type) packet.js.type = arg.type;
if(arg.end) packet.js.end = arg.end;
packet.body = arg.body;
packet.callback = arg.callback;
// do durable stuff
packet.js.seq = chan.outSeq++;
// reset/update tracking stats
packet.sentAt = Date.now();
chan.outq.push(packet);
// add optional ack/miss and send
chan.ack(packet);
// to auto-resend if it isn't acked
if(chan.resender) clearTimeout(chan.resender);
chan.resender = setTimeout(chan.resend, defaults.chan_resend);
return chan;
}
// send optional initial packet with type set
if(arg.js)
{
arg.type = type;
chan.send(arg);
}
return chan;
}
-}
-- ---------------------------------------------------------------------
inPeer :: Bool -> RxTelex -> RawChannel -> TeleHash ()
inPeer = (assert False undefined)
{-
// be the middleman to help NAT hole punch
function inPeer(err, packet, chan)
{
if(err) return;
var self = packet.from.self;
if(chan.relay) return relay(self, chan, chan.relay, packet);
if(!isHEX(packet.js.peer, 64)) return;
var peer = self.whois(packet.js.peer);
if(!peer || !peer.lineIn) return; // these happen often as lines come/go, ignore dead peer requests
var js = {from:packet.from.parts};
// sanity on incoming paths array
if(!Array.isArray(packet.js.paths)) packet.js.paths = [];
// insert in incoming IP path
if(packet.sender.type.indexOf("ip") == 0) packet.js.paths.push(packet.sender.json);
// load/cleanse all paths
js.paths = [];
packet.js.paths.forEach(function(path){
if(typeof path.type != "string") return;
if(pathMatch(js.paths,path)) return; // duplicate
if(isLocalPath(path) && !peer.isLocal) return; // don't pass along local paths to public
js.paths.push(path);
});
// must bundle the senders key so the recipient can open them
chan.relay = peer.raw("connect",{js:js, body:packet.body},function(err, packet, chan2){
if(err) return;
relay(self, chan2, chan, packet);
});
}
-}
-- ---------------------------------------------------------------------
inConnect :: Bool -> RxTelex -> RawChannel -> TeleHash ()
inConnect = (assert False undefined)
{-
// someone's trying to connect to us, send an open to them
function inConnect(err, packet, chan)
{
if(err || !packet.body) return;
var self = packet.from.self;
// if this channel is acting as a relay
if(chan.relay)
{
// create a virtual network path that maps back to this channel
var path = {type:"relay",relay:chan,json:{type:"relay",relay:packet.from.hashname}};
if(packet.js.bridge) path = packet.sender; // sender is offering to bridge, use them!
self.receive(packet.body, path);
return;
}
var to = chan.relay = self.whokey(packet.js.from,packet.body);
if(!chan.relay) return warn("invalid connect request from",packet.from.hashname,packet.js);
// try the suggested paths
if(Array.isArray(packet.js.paths)) packet.js.paths.forEach(function(path){
if(typeof path.type != "string") return debug("bad path",JSON.stringify(path));
self.send(path,to.open(),to);
});
// send back an open through the connect too
chan.send({body:to.open()});
}
-}
-- ---------------------------------------------------------------------
inSeek :: Bool -> RxTelex -> RawChannel -> TeleHash ()
inSeek = (assert False undefined)
{-
// return a see to anyone closer
function inSeek(err, packet, chan)
{
if(err) return;
if(!isHEX(packet.js.seek)) return warn("invalid seek of ", packet.js.seek, "from:", packet.from.hashname);
var self = packet.from.self;
var seek = packet.js.seek;
var see = [];
var seen = {};
// see if we have any seeds to add
var bucket = dhash(self.hashname, packet.js.seek);
var links = self.buckets[bucket] ? self.buckets[bucket] : [];
// first, sort by age and add the most wise one
links.sort(function(a,b){ return a.age - b.age}).forEach(function(seed){
if(see.length) return;
if(!seed.seed) return;
see.push(seed.address(packet.from));
seen[seed.hashname] = true;
});
// sort by distance for more
links.sort(function(a,b){ return dhash(seek,a.hashname) - dhash(seek,b.hashname)}).forEach(function(link){
if(seen[link.hashname]) return;
if(link.seed || link.hashname.substr(seek.length) == seek)
{
see.push(link.address(packet.from));
seen[link.hashname] = true;
}
});
var answer = {end:true, see:see.filter(function(x){return x}).slice(0,8)};
chan.send({js:answer});
}
-}
-- ---------------------------------------------------------------------
-- update/respond to network state
inPath :: Bool -> RxTelex -> RawChannel -> TeleHash ()
inPath err packet chan = do
logT $ "inPath:" ++ show (err,chId chan) ++ showPath (rtSender packet)
hn <- getHNsafe (chHashName chan) "inPath"
-- check/try any alternate paths
done <- case (HM.lookup "paths" (rtJs packet)) of
Nothing -> return True
Just p -> do
let mp = (fromJSON p) :: Result [PathJson] -- :: Result [PathJson]
case mp of
Error err1 -> do
logT $ "inPath: could not parse paths:" ++ err1
return True -- do not continue with junk
Success paths -> do
-- logT $ "inPath:packet=" ++ show packet
r <- forM paths $ \pathJson -> do
let path = pathFromPathJson pathJson
case pathMatch path (Map.elems $ hPaths hn) of
Just p -> do
-- logT $ "inPath:pathMatch returned:" ++ show p
return "seen"
Nothing -> do
-- a new one, experimentally send it a path
-- packet.from.raw("path",{js:{priority:1},to:path}, inPath);
let msg = packet { rtJs = HM.fromList [("priority",toJSON (1::Int))]
, rtSender = path
}
chanSendRaw (hHashName hn) chan (rxTelexToTelex msg)
return "new"
return $ any (=="seen") r
logT $ "inPath:done=" ++ show done
if done
then return ()
else do
-- if path info from a seed, update our public ip/port
if hIsSeed hn
then do
case HM.lookup "path" (rtJs packet) of
Nothing -> return ()
Just p -> do
let mp = (fromJSON p) :: Result PathJson -- :: Result [PathJson]
case mp of
Error err -> do
logT $ "inPath: could not parse path:" ++ err
return ()
Success pj@(PIPv4 (PathIPv4 ip _)) -> do
if not (isLocalIP ip)
then do
sw <- get
logT $ "updating public ipv4" ++ show (swPub4 sw,ip)
pathSet (pathFromPathJson pj)
else return ()
Success _ -> return ()
else return ()
-- update any optional priority information
case HM.lookup "priority" (rtJs packet) of
Nothing -> return ()
Just (Number p) -> do
putHN $ hn { hPriority = Just (round p)}
logT $ "inPath:must still adjust relative priorities. Once it clarifies."
-- if(packet.from.to && packet.sender.priority > packet.from.to.priority) packet.from.to = packet.sender; // make the default!
if err
then return () -- bye bye bye!
else do
-- need to respond, prioritize everything above relay
let priority = 2
logT $ "inPath: must still prioritise over relay"
-- var priority = (packet.sender.type == "relay") ? 0 : 2;
hn2 <- getHNsafe (hHashName hn) "inPath"
let rxPathJson = HM.lookupDefault (Object HM.empty) "path" (rtJs packet)
msg1 = rxTelexToTelex packet
msg2 = msg1 { tJson = HM.fromList [("end",toJSON True)
,("priority",Number priority)
,("path", rxPathJson)
] }
msg3 = msg2 { tTo = Just (rtSender packet) }
-- chan.send({js:{end:true, priority:priority, path:packet.sender.json}});
-- logT $ "inPath:sending 2" ++ (show msg3)
chanSendRaw (hHashName hn2) chan msg3
{-
// update/respond to network state
function inPath(err, packet, chan)
{
var self = packet.from.self;
// check/try any alternate paths
if(Array.isArray(packet.js.paths)) packet.js.paths.forEach(function(path){
if(typeof path.type != "string") return; // invalid
// don't send to ones we know about
if(pathMatch(path, packet.from.paths)) return;
// a new one, experimentally send it a path
packet.from.raw("path",{js:{priority:1},to:path}, inPath);
});
// if path info from a seed, update our public ip/port
if(packet.from.isSeed && typeof packet.js.path == "object" && packet.js.path.type == "ipv4" && !isLocalIP(packet.js.path.ip))
{
debug("updating public ipv4",JSON.stringify(self.paths.pub4),JSON.stringify(packet.js.path));
self.pathSet({type:"pub4", ip:packet.js.path.ip, port:parseInt(packet.js.path.port)})
}
// update any optional priority information
if(typeof packet.js.priority == "number"){
packet.sender.priority = packet.js.priority;
if(packet.from.to && packet.sender.priority > packet.from.to.priority) packet.from.to = packet.sender; // make the default!
}
if(err) return; // bye bye bye!
// need to respond, prioritize everything above relay
var priority = (packet.sender.type == "relay") ? 0 : 2;
// if bridging, and this path is from the bridge, flag it for lower priority
if(packet.from.bridge && pathMatch(packet.sender, self.whois(packet.from.bridge).paths)) priority = 1;
chan.send({js:{end:true, priority:priority, path:packet.sender.json}});
}
-}
-- ---------------------------------------------------------------------
inBridge :: Bool -> RxTelex -> RawChannel -> TeleHash ()
inBridge = (assert False undefined)
{-
// handle any bridge requests, if allowed
function inBridge(err, packet, chan)
{
if(err) return;
var self = packet.from.self;
// ensure valid request
if(!isHEX(packet.js.to,32) || !isHEX(packet.js.from,32) || typeof packet.js.path != "object") return warn("invalid bridge request",JSON.stringify(packet.js),packet.from.hashname);
// must be allowed either globally or per hashname
if(!self.bridging && !packet.from.bridging) return chan.send({js:{err:"not allowed"}});
// don't bridge for types we don't know
if(!self.networks[packet.js.path.type]) return chan.send({js:{err:"bad path"}});
// ignore fool line ids
if(self.lines[packet.js.to] || self.lines[packet.js.from]) return chan.send({js:{err:"bad line"}});
// set up the actual bridge paths
debug("BRIDGEUP",JSON.stringify(packet.js));
self.bridgeLine[packet.js.to] = packet.js.path;
self.bridgeLine[packet.js.from] = packet.sender;
chan.send({js:{end:true}});
}
-}
-- ---------------------------------------------------------------------
-- |Accept a DHT link
inLink :: Bool -> RxTelex -> RawChannel -> TeleHash ()
inLink True _packet _chan = return ()
inLink err packet chan = do
logT $ "inLink: must do timeout"
-- chan.timeout(defaults.nat_timeout*2); // two NAT windows to be safe
let mlm = parseJs (rtJs packet) :: Maybe LinkMessage
case mlm of
Nothing -> do
logT $ "inLink:couldn't parse LinkMessage:" ++ showJson (rtJs packet)
Just lm -> do
hn <- getHNsafe (chHashName chan) "inLink"
logT $ "inLink: must capture from.age"
-- add in this link
putHN $ hn { hLinked = Just (chId chan)
, hIsSeed = lSeed lm
}
storeHashInDht (chHashName chan) (hBucket hn)
-- send a response if this is a new incoming
if (chSentAt chan == Nothing)
then hnLink (hHashName hn) Nothing
else return ()
-- look for any see and check to see if we should create a link
logT $ "inLink: got see:" ++ (show (lSee lm))
-- inLink: got see:["89a4cbc6c27eb913c1bcaf06bac2d8b872c7cbef626b35b6d7eaf993590d37de,1a"]
forM_ (lSee lm) $ \see -> do
let fields = Text.splitOn "," (Text.pack see)
logT $ "inLink: see in words:" ++ (show fields)
case fields of
(h:_) -> do
mhn <- whois(HN $ Text.unpack h)
case mhn of
Nothing -> do
return ()
Just hn -> do
if (hLinked hn) == Nothing
then do
bucketSize <- getBucketSize (hBucket hn)
if bucketSize < (linkK defaults)
then hnLink (hHashName hn) Nothing
else return ()
else return ()
_ -> do
logT $ "inLink:got junk see:" ++ show see
logT $ "inLink: must still check for bridges"
putChan (hHashName hn) chan { chCallBack = inMaintenance }
{-
// accept a dht link
function inLink(err, packet, chan)
{
if(err) return;
var self = packet.from.self;
chan.timeout(defaults.nat_timeout*2); // two NAT windows to be safe
// add in this link
if(!packet.from.age) packet.from.age = Date.now();
packet.from.linked = chan;
packet.from.seed = packet.js.seed;
if(self.buckets[packet.from.bucket].indexOf(packet.from) == -1) self.buckets[packet.from.bucket].push(packet.from);
// send a response if this is a new incoming
if(!chan.sentAt) packet.from.link();
// look for any see and check to see if we should create a link
if(Array.isArray(packet.js.see)) packet.js.see.forEach(function(address){
if(!address) return; // garbage
var hn = self.whois(address);
if(!hn || hn.linked) return;
if(self.buckets[hn.bucket].length < defaults.link_k) hn.link();
});
// check for bridges
if(Array.isArray(packet.js.bridges)) packet.js.bridges.forEach(function(type){
if(!self.bridges[type]) self.bridges[type] = {};
self.bridges[type][packet.from.hashname] = Date.now();
});
// let mainteanance handle
chan.callback = inMaintenance;
}
-}
-- ---------------------------------------------------------------------
-- | Process incoming link messages for a linked channel
inMaintenance :: Bool -> RxTelex -> RawChannel -> TeleHash ()
inMaintenance err packet chan = do
-- ignore if this isn't the main link
from <- getHNsafe (chHashName chan) "inMaintenance.1"
if (hLinked from) == Nothing
then return ()
else do
if err
then do
assert False undefined
else do
let mlm = parseJs (rtJs packet) :: Maybe LinkMaintMessage
case mlm of
Nothing -> do
logT $ "inMaintenance:couldn't parse LinkMessage:" ++ showJson (rtJs packet)
-- inMaintenance:couldn't parse LinkMessage:{"seed":true,"c":1}
Just lm -> do
-- update seed status
putHN $ from {hIsSeed = lmSeed lm}
-- only send a response if we've not sent one in a while
timeNow <- io getClockTime
if (isTimeOut timeNow (chSentAt chan) (linkTimer defaults))
then do
let
msg1 = rxTelexToTelex packet
msg2 = msg1 { tJson = HM.fromList [("seed",toJSON (hIsSeed from)) -- AZ: is this the right end?
] }
msg3 = msg2 { tTo = Just (rtSender packet) }
logT $ "inMaintenance:sending link maintenance to" ++ show (hHashName from)
chanSendRaw (hHashName from) chan msg3
else return ()
{-
function inMaintenance(err, packet, chan)
{
// ignore if this isn't the main link
if(!packet.from || !packet.from.linked || packet.from.linked != chan) return;
var self = packet.from.self;
if(err)
{
delete packet.from.linked;
var index = self.buckets[packet.from.bucket].indexOf(packet.from);
if(index > -1) self.buckets[packet.from.bucket].splice(index,1);
return;
}
// update seed status
packet.from.seed = packet.js.seed;
// only send a response if we've not sent one in a while
if((Date.now() - chan.sentAt) > Math.ceil(defaults.link_timer/2)) chan.send({js:{seed:self.seed}});
}
-}
-- ---------------------------------------------------------------------
{-
From https://github.com/telehash/telehash.org/blob/master/switch.md#seek
The response is a compact "see":[...] array of addresses that are
closest to the hash value (based on the DHT rules). The addresses are
a compound comma-delimited string containing the "hash,cs,ip,port"
(these are intentionally not JSON as the verbosity is not helpful
here), for example
"1700b2d3081151021b4338294c9cec4bf84a2c8bdf651ebaa976df8cff18075c,1a,123.45.67.89,10111".
The "cs" is the Cipher Set ID and is required. The ip and port parts
are optional and only act as hints for NAT hole punching.
-}
hnAddress :: HashName -> HashContainer -> TeleHash Aeson.Value
hnAddress hn to = do
hc <- getHNsafe hn "hnAddress"
if isJust (hParts hc) && isJust (hParts to)
then do
let mcsid = partsMatch (fromJust $ hParts hc) (fromJust $ hParts to)
case mcsid of
Nothing -> return (String "")
Just csid -> do
if isJust (hIp hc) && isJust (hPort hc)
then return (String $ Text.pack $ intercalate "," [(unHN $ hHashName hc),csid,show (hIp hc),show (hPort hc)])
else return (String $ Text.pack $ intercalate "," [(unHN $ hHashName hc),csid])
else return (String "")
{-
// return our address to them
hn.address = function(to)
{
if(!to) return "";
var csid = partsMatch(hn.parts,to.parts);
if(!csid) return "";
if(!hn.ip) return [hn.hashname,csid].join(",");
return [hn.hashname,csid,hn.ip,hn.port].join(",");
}
-}
-- ---------------------------------------------------------------------
-- |DHT action: request to be stored on their side
hnLink :: HashName -> Maybe HnCallBack -> TeleHash ()
hnLink hn mcb = do
sw <- get
hc <- getHNsafe hn "hnLink"
let callback = case mcb of
Just cb -> cb
Nothing -> nullHnCb
-- TODO:
-- Set the JS 'see' value to
-- sort the buckets by age
-- pull out the seed values
-- for each seed get the address associated with this hn
-- take the first 8 -- (0,8)
buckets <- getBucketContents (hBucket hc)
let ageComp a b = compare (hAge a) (hAge b)
buckets2 = sortBy ageComp buckets
seeds = filter hIsSeed buckets2
see1 <- mapM (hnAddress hn) seeds
logT $ "hnLink:see1=" ++ show see1
-- add some distant ones if none or too few
allBucketsLists <- mapM getBucketContents (Map.keys (swBuckets sw))
let -- allBuckets = Set.fromList $ concat $ Map.elems (swBuckets sw)
allBuckets = concat allBucketsLists
allBucketsSet = Set.fromList (map hHashName allBuckets)
buckets3 = Set.fromList $ take 8 $ map hHashName seeds
allOtherBucketsHn = Set.toList (allBucketsSet Set.\\ buckets3)
aobm <- mapM getHN allOtherBucketsHn
let allOtherBuckets = sortBy ageComp $ catMaybes aobm
let see = take 8 (seeds ++ allOtherBuckets)
logT $ "hnLink:see=" ++ show (map hHashName see)
seeVal <- mapM (hnAddress hn) $ filter hIsSeed see
-- TODO: sort out relay/bridge
isBr <- isBridge Nothing (Just hn)
let toBridge :: PathType -> [Aeson.Value]
toBridge (PtRelay) = ["relay"]
toBridge (PtLocal) = ["local"]
toBridge _ = []
bridges = concatMap toBridge $ nub
$ filter (\pt -> pt == PtRelay || pt == PtLocal)
$ Map.keys (swNetworks sw)
brVals
= if isBr
then [("bridges",Array $ V.fromList bridges)]
else []
let js = HM.fromList $ ([("seed", toJSON (hIsSeed hc))
,("see",Aeson.Array $ V.fromList seeVal)]
++ brVals
)
let msg = emptyTelex
{
tJson = js
}
case hLinked hc of
Just linkCid -> do
-- linkHc <- getHNsafe linkHn "hnLink.2"
-- hnSend linkHc msg
mchan <- getChan hn linkCid
chanSendRaw hn (gfromJust "hnLink" mchan) msg
callback hn
Nothing -> do
let rawCb err packet chan = do
logT $ "hnLink:rawCb called"
inLink err packet chan
-- TODO: return packet.js.err in the callback
callback hn
logT $ "hnLink: must set retries"
c <- hnRaw hc "link" msg rawCb
logT $ "link: raw returned c=" ++ showChan c
return ()
{-
// request a new link to them
hn.link = function(callback)
{
if(!callback) callback = function(){}
var js = {seed:self.seed};
js.see = self.buckets[hn.bucket].sort(function(a,b){ return a.age - b.age }).filter(function(a){ return a.seed }).map(function(seed){ return seed.address(hn) }).slice(0,8);
// add some distant ones if none
if(js.see.length < 8) Object.keys(self.buckets).forEach(function(bucket){
if(js.see.length >= 8) return;
self.buckets[bucket].sort(function(a,b){ return a.age - b.age }).forEach(function(seed){
if(js.see.length >= 8 || !seed.seed || js.see.indexOf(seed.address(hn)) != -1) return;
js.see.push(seed.address(hn));
});
});
if(self.isBridge(hn)) js.bridges = Object.keys(self.networks).filter(function(type){return (["local","relay"].indexOf(type) >= 0)?false:true});
if(hn.linked)
{
hn.linked.send({js:js});
return callback();
}
hn.raw("link", {retry:3, js:js}, function(err, packet, chan){
inLink(err, packet, chan);
callback(packet.js.err);
});
}
-}
-- ---------------------------------------------------------------------
-- was swBridge
bridge :: Path -> LinePacket -> Maybe HashContainer -> TeleHash ()
bridge = (assert False undefined)
{-
// try finding a bridge
function bridge(path, msg, to)
{
var self = this;
var packet = pdecode(msg);
if(packet.head.length) return; // only bridge line packets
if(!to) return; // require to for line info
// check for existing bridge
var existing = pathMatch(path,to.bridges);
if(existing)
{
if(existing.bridged) return self.send(existing.bridged,msg); // leave off to to prevent loops
existing.bridgeq = msg; // queue most recent packet;
return;
}
if(!self.bridges[path.type]) return;
debug("bridging",JSON.stringify(path.json),to.hashname);
// TODO, better selection of a bridge?
var via;
Object.keys(self.bridges[path.type]).forEach(function(id){
if(id == to.hashname) return; // lolz
var hn = self.whois(id);
if(hn.alive) via = hn;
});
if(!via) return debug("couldn't find a bridge host");
// stash this so that any more bridge's don't spam
if(!to.bridges) to.bridges = [];
path.bridgeq = msg;
to.bridges.push(path);
// create the bridge
via.raw("bridge", {js:{to:to.lineIn,from:to.lineOut,path:path}}, function(end, packet){
// TODO we can try another one if failed?
if(end !== true) return debug("failed to create bridge",end,via.hashname);
// create our mapping!
path.bridged = packet.sender;
self.send(packet.sender,path.bridgeq);
delete path.bridgeq;
});
}
-}
-- ---------------------------------------------------------------------
-- |Encode the packet into a bytestring
-- |This should return the ByteString, ready for encryption
pencode :: Telex -> Body -> Telex
pencode = (assert False undefined)
{-
// encode a packet
function pencode(js, body)
{
var head = (typeof js == "number") ? new Buffer(String.fromCharCode(js)) : new Buffer(js?JSON.stringify(js):"", "utf8");
if(typeof body == "string") body = new Buffer(body, "binary");
body = body || new Buffer(0);
var len = new Buffer(2);
len.writeInt16BE(head.length, 0);
return Buffer.concat([len, head, body]);
}
-}
-- ---------------------------------------------------------------------
pdecode :: Packet -> (Telex,Body)
pdecode = (assert False undefined)
{-
// packet decoding
function pdecode(packet)
{
if(!packet) return (assert False undefined);
var buf = (typeof packet == "string") ? new Buffer(packet, "binary") : packet;
// read and validate the json length
var len = buf.readUInt16BE(0);
if(len > (buf.length - 2)) return undefined;
var head = buf.slice(2, len+2);
var body = buf.slice(len + 2);
// parse out the json
var js = {};
if(len > 1)
{
try {
js = JSON.parse(head.toString("utf8"));
} catch(E) {
console.log("couldn't parse JS",head.toString("hex"),E,packet.sender);
return undefined;
}
}
return {js:js, length:buf.length, head:head.toString("binary"), body:body};
}
-}
-- ---------------------------------------------------------------------
pathMatch :: Path -> [Path] -> Maybe Path
pathMatch path1 paths = r
where
mtypes = filter (\p -> pathType path1 == pathType p) paths
m :: Path -> Path -> Maybe Path
m p1 p2
| pathType p1 == PtRelay
&& pRelay p1 == pRelay p2 = Just p2
| (pathType p1 == PtIPv4 ||
pathType p1 == PtIPv6)
&& (pathIp p1 == pathIp p2)
&& (pathPort p1 == pathPort p2)
= Just p2
| pathType p1 == PtHttp
&& pathHttp p1 == pathHttp p2 = Just p2
| pathType p1 == PtLocal
&& pId p1 == pId p2 = Just p2
-- webrtc always matches
| pathType p1 == PtWebRtc = Just p2
| otherwise = Nothing
r = case catMaybes $ map (m path1) mtypes of
[] -> Nothing
xs -> Just $ head xs
{-
function pathMatch(path1, paths)
{
var match;
if(!Array.isArray(paths)) return match;
paths.forEach(function(path2){
if(path2.type != path1.type) return;
switch(path1.type)
{
case "relay":
if(path1.relay == path2.relay) match = path2;
case "ipv4":
case "ipv6":
if(path1.ip == path2.ip && path1.port == path2.port) match = path2;
break;
case "http":
if(path1.http == path2.http) match = path2;
break;
case "local":
if(path1.id == path2.id) match = path2;
break;
case "webrtc":
match = path2; // always matches
break;
}
});
return match;
}
-}
-- ---------------------------------------------------------------------
{-
// validate if a network path is acceptable to stop at
function pathValid(path)
{
if(!path || path.gone) return false;
if(path.type == "relay" && !path.relay.ended) return true; // active relays are always valid
if(!path.lastIn) return false; // all else must receive to be valid
if(Date.now() - path.lastIn < defaults.nat_timeout) return true; // received anything recently is good
return false;
}
-}
-- ---------------------------------------------------------------------
{-
function partsMatch(parts1, parts2)
{
if(typeof parts1 != "object" || typeof parts2 != "object") return false;
var ids = Object.keys(parts1).sort();
var csid;
while(csid = ids.pop()) if(parts2[csid]) return csid;
return false;
}
-}
-- ---------------------------------------------------------------------
isLocalPath :: Path -> Bool
isLocalPath path
| pathType path == PtBlueTooth = True
| (pathType path == PtIPv4) ||
(pathType path == PtIPv6) = isLocalIP (fromJust $ pathIp path)
-- http?
| otherwise = False
{-
function isLocalPath(path)
{
if(!path || !path.type) return false;
if(path.type == "bluetooth") return true;
if(["ipv4","ipv6"].indexOf(path.type) >= 0) return isLocalIP(path.ip);
// http?
return false;
}
-}
-- ---------------------------------------------------------------------
isLocalIP :: IP -> Bool
{-
// return if an IP is local or public
function isLocalIP(ip)
{
// ipv6 ones
if(ip.indexOf(":") >= 0)
{
if(ip.indexOf("::") == 0) return true; // localhost
if(ip.indexOf("fc00") == 0) return true;
if(ip.indexOf("fe80") == 0) return true;
return false;
}
var parts = ip.split(".");
if(parts[0] == "0") return true;
if(parts[0] == "127") return true; // localhost
if(parts[0] == "10") return true;
if(parts[0] == "192" && parts[1] == "168") return true;
if(parts[0] == "172" && parts[1] >= 16 && parts[1] <= 31) return true;
if(parts[0] == "169" && parts[1] == "254") return true; // link local
return false;
}
-}
isLocalIP ip@(IPv4 _) = r
where
r127 = makeAddrRange ((read "127.0.0.0")::IPv4) 8
r10 = makeAddrRange ((read "10.0.0.0")::IPv4) 8
r192 = makeAddrRange ((read "192.168.0.0")::IPv4) 16
r172 = makeAddrRange ((read "172.16.0.0")::IPv4) 9
r169 = makeAddrRange ((read "169.254.0.0")::IPv4) 16
r = any (isMatchedTo (ipv4 ip)) [r127,r10,r192,r172,r169]
-- ---------------------------------------------------------------------
{-
function getkey(id, csid)
{
return id.cs && id.cs[csid] && id.cs[csid].key;
}
-}
-- ---------------------------------------------------------------------
mkHashContainer :: HashName -> ClockTime -> String -> HashContainer
mkHashContainer hn timeNow randomHexVal =
H { hHashName = hn
, hChans = Map.empty
, hSelf = Nothing
, hPaths = Map.empty
, hIsAlive = False
, hIsPublic = False
, hAt = timeNow
, hBucket = -1
, hAge = Nothing
, hChanOut = 0
, hIsSeed = False
, hTo = Nothing
, hLineIn = Nothing
, hLineAt = Nothing
, hSendSeek = Nothing
, hVias = Map.empty
, hLastPacket = Nothing
, hParts = Nothing
, hOpened = Nothing
, hOpenAt = Nothing
, hRecvAt = Nothing
, hCsid = Nothing
, hPriority = Nothing
, hIp = Nothing
, hPort = Nothing
, hBridging = False
, hIsLocal = False
, hLinked = Nothing
, hLineOut = randomHexVal
, hLineIV = 0
, hEncKey = Nothing
, hDecKey = Nothing
, hEcc = Nothing
}
-- ---------------------------------------------------------------------
{-
function loadkey(self, id, csid, key)
{
id.csid = csid;
return self.CSets[csid].loadkey(id, key);
}
-}
loadkey :: String -> String -> TeleHash (Maybe HashCrypto)
loadkey csid key = do
sw <- get
let set = Map.lookup csid (swCSets sw)
case set of
Nothing -> do
logT $ "missing CSet for " ++ csid
return Nothing
Just cs -> do
(csLoadkey cs) key Nothing
-- ---------------------------------------------------------------------
{-
function keysgen(cbDone,cbStep)
{
var self = this;
var ret = {parts:{}};
var todo = Object.keys(self.CSets);
if(todo.length == 0) return cbDone("no sets supported");
function pop(err)
{
if(err) return cbDone(err);
var csid = todo.pop();
if(!csid){
self.load(ret);
return cbDone(null, ret);
}
self.CSets[csid].genkey(ret,pop,cbStep);
}
pop();
}
-}
keysgen :: () -> () -> IO ()
keysgen cbDone cbStep = (assert False undefined)
-- ---------------------------------------------------------------------
randomHEX :: Int -> TeleHash String
randomHEX len = do
sw <- get
let (bytes,newRNG) = cprgGenerate len (swRNG sw)
put $ sw {swRNG = newRNG}
-- return $ BU.toString $ B16.encode bytes
return $ BC.unpack $ B16.encode bytes
{-
// return random bytes, in hex
function randomHEX(len)
{
return crypto.randomBytes(len).toString("hex");
}
-}
-- ---------------------------------------------------------------------
uriparse :: String -> String
uriparse = (assert False undefined)
{-
var urllib = require("url");
function uriparse(uri)
{
// node's uri parser enforces dns max 63 chars per label, grr!
if(typeof uri !== "string") uri = "";
var hashname = uri.match(/[0-9A-Fa-f]{64}/);
if(!hashname) return urllib.parse(uri);
var full = hashname[0];
var part = full.substr(0,32);
var u = urllib.parse(uri.replace(full,part));
if(u.hostname != part) return urllib.parse(uri); // hashname was not the hostname
Object.keys(u).forEach(function(k){
if(typeof u[k] != "string") return;
u[k] = u[k].replace(part,full);
});
return u;
}
-}
-- ---------------------------------------------------------------------
isHashName :: String -> String
isHashName = (assert False undefined)
{-
self.isHashname = function(hex){return isHEX(hex, 64)};
-}
-- ---------------------------------------------------------------------
{-
/* CHANNELS API
hn.channel(type, arg, callback)
- used by app to create a reliable channel of given type
- arg contains .js and .body for the first packet
- callback(err, arg, chan, cbDone)
- called when any packet is received (or error/fail)
- given the response .js .body in arg
- cbDone when arg is processed
- chan.send() to send packets
- chan.wrap(bulk|stream) to modify interface, replaces this callback handler
- chan.bulk(str, cbDone) / onBulk(cbDone(err, str))
- chan.read/write
hn.raw(type, arg, callback)
- arg contains .js and .body to create an unreliable channel
- callback(err, arg, chan)
- called on any packet or error
- given the response .js .body in arg
- chan.send() to send packets
self.channel(type, callback)
- used to listen for incoming reliable channel starts
- callback(err, arg, chan, cbDone)
- called for any answer or subsequent packets
- chan.wrap() to modify
self.raw(type, callback)
- used to listen for incoming unreliable channel starts
- callback(err, arg, chan)
- called for any incoming packets
*/
-}
channelWraps :: IO ()
channelWraps = (assert False undefined)
{-
// these are called once a reliable channel is started both ways to add custom functions for the app
var channelWraps = {
"bulk":function(chan){
// handle any incoming bulk flow
var bulkIn = "";
chan.callback = function(end, packet, chan, cb)
{
cb();
if(packet.body) bulkIn += packet.body;
if(!chan.onBulk) return;
if(end) chan.onBulk(end!==true?end:false, bulkIn);
}
// handle (optional) outgoing bulk flow
chan.bulk = function(data, callback)
{
// break data into chunks and send out, no backpressure yet
while(data)
{
var chunk = data.substr(0,1000);
data = data.substr(1000);
var packet = {body:chunk};
if(!data) packet.callback = callback; // last packet gets confirmed
chan.send(packet);
}
chan.end();
}
}
}
-}
-- ---------------------------------------------------------------------
wait :: Bool -> IO ()
wait = (assert False undefined)
{-
self.wait = function(bool){
if(bool) return self.waits.push(true);
self.waits.pop();
if(self.waiting && self.waits.length == 0) self.waiting();
}
-}
-- ---------------------------------------------------------------------
-- do the maintenance work for links
linkLoop :: TeleHash ()
linkLoop = do
sw <- get
put sw {swBridgeCache = []} -- reset cache for any bridging
hnReap
linkMaint -- ping all of them
io $ threadDelay $ milliToMicro (linkTimer defaults)
linkLoop
-- | convert a millisecond value to a microsecond one
milliToMicro :: Num a => a -> a
milliToMicro x = 1000 * x
-- ---------------------------------------------------------------------
hnReap :: TeleHash ()
hnReap = do return ()
{-
// delete any defunct hashnames!
function hnReap(self)
{
var hn;
function del(why)
{
if(hn.lineOut) delete self.lines[hn.lineOut];
delete self.all[hn.hashname];
debug("reaping ", hn.hashname, why);
}
Object.keys(self.all).forEach(function(h){
hn = self.all[h];
debug("reap check",hn.hashname,Date.now()-hn.sentAt,Date.now()-hn.recvAt,Object.keys(hn.chans).length);
if(hn.isSeed) return;
if(Object.keys(hn.chans).length > 0) return; // let channels clean themselves up
if(Date.now() - hn.at < hn.timeout()) return; // always leave n00bs around for a while
if(!hn.sentAt) return del("never sent anything, gc");
if(!hn.recvAt) return del("sent open, never received");
if(Date.now() - hn.sentAt > hn.timeout()) return del("we stopped sending to them");
if(Date.now() - hn.recvAt > hn.timeout()) return del("they stopped responding to us");
});
}
-}
-- ---------------------------------------------------------------------
{-
// every link that needs to be maintained, ping them
function linkMaint(self)
{
// process every bucket
Object.keys(self.buckets).forEach(function(bucket){
// sort by age and send maintenance to only k links
var sorted = self.buckets[bucket].sort(function(a,b){ return a.age - b.age });
if(sorted.length) debug("link maintenance on bucket",bucket,sorted.length);
sorted.slice(0,defaults.link_k).forEach(function(hn){
if(!hn.linked || !hn.alive) return;
if((Date.now() - hn.linked.sentAt) < Math.ceil(defaults.link_timer/2)) return; // we sent to them recently
hn.linked.send({js:{seed:self.seed}});
});
});
}
-}
-- ---------------------------------------------------------------------
{-
validPathTypes :: Set.Set PathType
validPathTypes
= Set.fromList
$ map (\pt -> PathType pt) ["ipv4","ipv6","http","relay","webrtc","local"]
-}
hnPathGet :: HashContainer -> Path -> TeleHash Path
hnPathGet hc path = do
{-
if Set.notMember (pType path) validPathTypes
then do
logT $ "unknown path type:" ++ show (pType path)
return path
else do
-}
case pathMatch path (Map.elems $ hPaths hc) of
Just p -> return path
Nothing -> do
logT $ "adding new path:" ++ show (Map.size $ hPaths hc) ++ "," ++ showPath path
-- always default to minimum priority
let path' = path { pPriority = Just (if pathType path == PtRelay then (-1) else 0)}
putHN $ hc { hPaths = Map.insert (pJson path') path' (hPaths hc)
, hIsPublic = not $ isLocalPath path
}
return path'
{-
hn.pathGet = function(path)
{
if(["ipv4","ipv6","http","relay","webrtc","local"].indexOf(path.type) == -1)
{
warn("unknown path type", JSON.stringify(path));
return path;
}
var match = pathMatch(path, hn.paths);
if(match) return match;
// preserve original
if(!path.json) path.json = JSON.parse(JSON.stringify(path));
debug("adding new path",hn.paths.length,JSON.stringify(path.json));
info(hn.hashname,path.type,JSON.stringify(path.json));
hn.paths.push(path);
// always default to minimum priority
if(typeof path.priority != "number") path.priority = (path.type=="relay")?-1:0;
// track overall if they have a public IP network
if(!isLocalPath(path)) hn.isPublic = true;
return path;
}
-}
-- ---------------------------------------------------------------------
-- every link that needs to be maintained, ping them
linkMaint :: TeleHash ()
linkMaint = do
sw <- get
-- process every bucket
forM (Map.keys $ swBuckets sw) $ \hashDistance -> do
bucket <- getBucketContents hashDistance
-- sort the bucket contents on age
let sorted = sortBy sf bucket
sf a b = compare (hAge a) (hAge b)
when (not $ null sorted) $ logT $ "link maintenance on bucket " ++ show (bucket,length sorted)
forM (take (linkK defaults) sorted) $ \hn -> do
if (hLinked hn == Nothing) || (not $ hIsAlive hn)
then return ()
else do
timeNow <- io getClockTime
mchan <- getChan (hHashName hn) (gfromJust "linkMaint.1" $ hLinked hn)
let chan = gfromJust "linkMaint.2" mchan
if isTimeOut timeNow (chSentAt chan) ((linkTimer defaults) `div` 2)
then return () -- we sent to them recently
-- else send (fromJust $ hLinked hn) (seedMsg (swSeed sw)) Nothing
-- else chanSendRaw (hHashName hn) chan (seedMsg (swSeed sw))
else assert False undefined
return ()
-- ---------------------------------------------------------------------
seedMsg :: Bool -> LinePacket
seedMsg = (assert False undefined)
-- ---------------------------------------------------------------------
-- TODO: consider memoising this result, will be used a LOT
-- TODO: check that the algorithm is implemented correctly
--distanceTo :: Num a => Hash -> Hash -> a
dhash :: HashName -> HashName -> HashDistance
dhash (HN this) (HN h) = go 252 (reverse diffs)
where
go acc [] = acc
go _acc (-1:[]) = -1
go acc (-1:xs) = go (acc - 4) xs
go acc (x:_xs) = acc + x
diffs = map (\(a,b) -> sbtab !! (xor (digitToInt a) (digitToInt b))) $ zip this h
sbtab = [-1,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3]
{-
// XOR distance between two hex strings, high is furthest bit, 0 is closest bit, -1 is error
function dhash(h1, h2) {
// convert to nibbles, easier to understand
var n1 = hex2nib(h1);
var n2 = hex2nib(h2);
if(!n1.length || !n2.length) return -1;
// compare nibbles
var sbtab = [-1,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3];
var ret = 252;
for (var i = 0; i < n1.length; i++) {
if(!n2[i]) return ret;
var diff = n1[i] ^ n2[i];
if (diff) return ret + sbtab[diff];
ret -= 4;
}
return ret;
}
-- ---------------------------------------------------------------------
// convert hex string to nibble array
function hex2nib(hex)
{
var ret = [];
for (var i = 0; i < hex.length / 2; i ++) {
var bite = parseInt(hex.substr(i * 2, 2), 16);
if (isNaN(bite)) return [];
ret[ret.length] = bite >> 4;
ret[ret.length] = bite & 0xf;
}
return ret;
}
-}
-- ---------------------------------------------------------------------
-- | Send the body of the packet in the telex. It is already encrypted
ipv4Send :: Path -> LinePacket -> Maybe HashName -> TeleHash ()
ipv4Send path msg _ = do
-- logT $ "ipv4Send:" ++ show (path)
-- logT $ "ipv4Send:" ++ show (B16.encode $ lbsTocbs $ unLP msg)
-- logT $ "ipv4Send:" ++ (show $ gfromJust "ipv4Send" $ pathIp path) ++ ":" ++ (show $ pathPort path)
addr <- io (addrFromHostPort (show $ gfromJust "ipv4Send.1" $ pathIp path)
(show $ gfromJust "ipv4Send.2" $ pathPort path))
sender <- gets swSender
sender msg addr
return ()
-- ---------------------------------------------------------------------
{-
sendTelex :: Telex -> TeleHash ()
sendTelex msg = do
timeNow <- io getClockTime
res <- prepareTelex msg timeNow
case (res) of
Nothing -> return ()
Just (line,msgJson) -> do
-- console.log(["SEND[", telex._to, "]\t", msg].join(""));
logT ( "SEND[:" ++ (show $ teleTo msg) ++ "]\t" ++ (msgJson))
switch <- get
put switch {swCountTx = (swCountTx switch) + 1 }
addr <- io (addrFromHostPort (lineHost line) (linePort line))
--Just socketh <- gets swH
--io (sendDgram socketh msgJson addr)
sender <- gets swSender
sender msgJson addr
updateTelehashLine(line)
-}
-- ---------------------------------------------------------------------
doNullSendDgram :: LinePacket -> NS.SockAddr -> TeleHash ()
doNullSendDgram msgJson addr = do
--logT ("doNullSendDgram[" ++ msgJson ++ "] to " ++ (show addr))
logT ("doNullSendDgram" )
-- ---------------------------------------------------------------------
doSendDgram :: LinePacket -> NS.SockAddr -> TeleHash ()
doSendDgram (LP msgJson) address = do
-- logT $ "doSendDgram to:" ++ show addr
Just socketh <- gets swH
io (sendDgram socketh msgJson address)
-- ---------------------------------------------------------------------
sendDgram :: SocketHandle -> BC.ByteString -> NS.SockAddr -> IO ()
sendDgram socketh msgJson address =
sendstr msgJson
where
-- Send until everything is done
sendstr :: BC.ByteString -> IO ()
sendstr omsg
| BC.length omsg == 0 = return ()
| otherwise = do sent <- SB.sendTo (slSocket socketh) omsg address
sendstr (BC.drop sent omsg)
{-
sendDgram :: SocketHandle -> String -> NS.SockAddr -> IO ()
sendDgram socketh msgJson addr =
sendstr msgJson
where
-- Send until everything is done
sendstr :: String -> IO ()
sendstr [] = return ()
sendstr omsg = do sent <- NS.sendTo (slSocket socketh) omsg addr
sendstr (genericDrop sent omsg)
-}
-- ---------------------------------------------------------------------
resolve :: String -> String -> IO (NS.AddrInfo,String,String)
resolve hostname port = do
-- Look up the hostname and port. Either raises an exception
-- or returns a nonempty list. First element in that list
-- is supposed to be the best option.
addrinfos <- NS.getAddrInfo Nothing (Just hostname) (Just port)
let serveraddr = head addrinfos
(Just resolvedhost,Just servicename) <- (NS.getNameInfo [NS.NI_NUMERICHOST] True True (NS.addrAddress serveraddr))
--putStrLn $ "resolve:" ++ (show hostname) ++ " " ++ (show servicename)
--return (serveraddr,port)
return (serveraddr,resolvedhost,servicename)
-- ---------------------------------------------------------------------
addrFromHostPort :: String -> String -> IO NS.SockAddr
addrFromHostPort hostname port = do
(serveraddr,_,_) <- resolve hostname port
return (NS.addrAddress serveraddr)
-- ---------------------------------------------------------------------
-- Dispatch incoming raw messages
recvTelex :: BC.ByteString -> NS.SockAddr -> TeleHash ()
recvTelex msg rinfo = do
-- logT ( ("recvTelex:" ++ (show (msg))))
-- logT $ "recvTelex:rinfo=" ++ show rinfo
switch' <- get
put switch' { swCountRx = (swCountRx switch') + 1 }
-- switch <- get
-- seedsIndex <- gets swSeedsIndex
(Just hostIP,Just port) <- io (NS.getNameInfo [NS.NI_NUMERICHOST] True True rinfo)
let
remoteipp = IPP (hostIP ++ ":" ++ port)
timeNow <- io getClockTime
--console.log(["RECV from ", remoteipp, ": ", JSON.stringify(telex)].join(""));
logT ("RECV from " ++ (show remoteipp) ++ ":" -- ++ (show $ B16.encode msg)
++ " at " ++ (show timeNow))
let
maybeRxTelex = fromNetworkPacket (LP msg)
-- logT $ "recvTelex:maybeRxTelex:" ++ show maybeRxTelex
let
path = Path
{
pJson = PIPv4 (PathIPv4 (read hostIP) (read port))
, pRelay = Nothing
, pId = Nothing
, pLastIn = Nothing
, pLastOut = Nothing
, pPriority = Nothing
, pIsSeed = False
, pGone = False
}
case maybeRxTelex of
Just rxTelex -> receive rxTelex path timeNow
Nothing -> do
logT $ "could not parse packet, discarding:" ++ (show $ B16.encode msg)
return ()
-- ---------------------------------------------------------------------
isHEX :: BC.ByteString -> Int -> Bool
isHEX str len = r
where
(_f,b) = B16.decode str
r = BC.length b == 0 && BC.length str == len
-- ---------------------------------------------------------------------
expectedKeysPresent :: Aeson.Value -> [String] -> Bool
expectedKeysPresent (Aeson.Object hm) keys = all present keys
where
present k = HM.member (Text.pack k) hm
-- ---------------------------------------------------------------------
-- raw channels
{-
/* CHANNELS API
hn.channel(type, arg, callback)
- used by app to create a reliable channel of given type
- arg contains .js and .body for the first packet
- callback(err, arg, chan, cbDone)
- called when any packet is received (or error/fail)
- given the response .js .body in arg
- cbDone when arg is processed
- chan.send() to send packets
- chan.wrap(bulk|stream) to modify interface, replaces this callback handler
- chan.bulk(str, cbDone) / onBulk(cbDone(err, str))
- chan.read/write
hn.raw(type, arg, callback)
- arg contains .js and .body to create an unreliable channel
- callback(err, arg, chan)
- called on any packet or error
- given the response .js .body in arg
- chan.send() to send packets
self.channel(type, callback)
- used to listen for incoming reliable channel starts
- callback(err, arg, chan, cbDone)
- called for any answer or subsequent packets
- chan.wrap() to modify
self.raw(type, callback)
- used to listen for incoming unreliable channel starts
- callback(err, arg, chan)
- called for any incoming packets
*/
-}
-- ---------------------------------------------------------------------
-- create an unreliable channel
-- was swRaw
hnRaw :: HashContainer -> String -> Telex -> RxCallBack -> TeleHash RawChannel
hnRaw hn typ arg callback = do
sw <- get
(hn',chanId) <- case tChanId arg of
Just i -> return (hn,i)
Nothing -> return (hn { hChanOut = (hChanOut hn) + 2},hChanOut hn)
let
chan = Chan { chType = typ
, chCallBack = callback
, chId = chanId
, chHashName = hHashName hn
, chLast = Nothing
, chSentAt = Nothing
, chRxAt = Nothing
, chEnded = False
, chDone = False
}
putHN hn'
putChan (hHashName hn) chan
hn2 <- getHNsafe (hHashName hn) "raw"
logT "raw:must implement timeout"
-- debug("new unreliable channel",hn.hashname,chan.type,chan.id);
logT $ "new unreliable channel " ++ show (hHashName hn,chType chan,chId chan)
if not (HM.null (tJson arg))
then do
-- let msg = rxTelexToTelex arg
let msg = arg { tJson = HM.insert "type" (toJSON typ) (tJson arg)}
chanSendRaw (hHashName hn2) chan msg
else return ()
-- WIP: carry on here with the send
{-
// send optional initial packet with type set
if(arg.js)
{
arg.js.type = type;
chan.send(arg);
// retry if asked to, TODO use timeout for better time
if(arg.retry)
{
var at = 1000;
function retry(){
if(chan.ended || chan.recvAt) return; // means we're gone or received a packet
chan.send(arg);
if(at < 4000) at *= 2;
arg.retry--;
if(arg.retry) setTimeout(retry, at);
};
setTimeout(retry, at);
}
}
-}
logT "raw not implemented"
return chan
{-
// create an unreliable channel
function raw(type, arg, callback)
{
var hn = this;
var chan = {type:type, callback:callback};
chan.id = arg.id;
if(!chan.id)
{
chan.id = hn.chanOut;
hn.chanOut += 2;
}
hn.chans[chan.id] = chan;
// raw channels always timeout/expire after the last sent/received packet
if(!arg.timeout) arg.timeout = defaults.chan_timeout;
function timer()
{
if(chan.timer) clearTimeout(chan.timer);
chan.timer = setTimeout(function(){
chan.fail({js:{err:"timeout"}});
}, arg.timeout);
}
chan.timeout = function(timeout)
{
arg.timeout = timeout;
timer();
}
chan.hashname = hn.hashname; // for convenience
debug("new unreliable channel",hn.hashname,chan.type,chan.id);
// send optional initial packet with type set
if(arg.js)
{
arg.js.type = type;
chan.send(arg);
// retry if asked to, TODO use timeout for better time
if(arg.retry)
{
var at = 1000;
function retry(){
if(chan.ended || chan.recvAt) return; // means we're gone or received a packet
chan.send(arg);
if(at < 4000) at *= 2;
arg.retry--;
if(arg.retry) setTimeout(retry, at);
};
setTimeout(retry, at);
}
}
return chan;
}
-}
-- ---------------------------------------------------------------------
-- !process packets at a raw level, very little to do
chanReceive :: HashContainer -> RawChannel -> RxTelex -> TeleHash ()
chanReceive hn chan packet = do
logT $ "chanReceive on " ++ show (hHashName hn,chId chan)
case (Map.lookup (chId chan) (hChans hn)) of
Nothing -> do
logT $ "dropping receive packet to dead channel" ++ show (chId chan,rtJs packet)
return ()
Just _ -> do
-- if err'd or ended, delete ourselves
let errOrFail = (HM.member "err" (rtJs packet)) || (HM.member "end" (rtJs packet))
if errOrFail
then do
chanFail (hHashName hn) chan Nothing
else return ()
-- cache last received network
timeNow <- io getClockTime
let chan' = chan { chLast = Just (rtSender packet)
, chRxAt = Just timeNow
}
putChan (hHashName hn) chan'
logT $ "chanReceive calling callback for " ++ showChan chan'
(chCallBack chan') errOrFail packet chan'
logT $ "chanReceive:must do timer()"
{-
// process packets at a raw level, very little to do
chan.receive = function(packet)
{
if(!hn.chans[chan.id]) return debug("dropping receive packet to dead channel",chan.id,packet.js)
// if err'd or ended, delete ourselves
if(packet.js.err || packet.js.end) chan.fail();
chan.last = packet.sender; // cache last received network
chan.recvAt = Date.now();
chan.callback(packet.js.err||packet.js.end, packet, chan);
timer();
}
-}
-- ---------------------------------------------------------------------
-- | minimal wrapper to send raw packets
chanSendRaw :: HashName -> RawChannel -> Telex -> TeleHash ()
chanSendRaw hname chan packet = do
-- logT $ "chanSendRaw entered for " ++ show (chId chan)
hn <- getHNsafe hname "chanSendRaw"
case Map.lookup (chId chan) (hChans hn) of
Nothing -> do
logT $ "dropping send packet to dead channel " ++ show (chId chan,packet)
Just ch -> do
-- logT $ "chanSendRaw got chan to:" ++ show (chHashName ch)
let newJson = HM.insert "c" (Number $ fromIntegral $ unChannelId (chId chan)) (tJson packet)
packet2 = packet {tJson = newJson}
let js = showJson newJson
logT $ "SEND " ++ show (chType chan) ++ "," ++ js
timeNow <- io getClockTime
let ch' = ch { chSentAt = Just timeNow }
putChan (hHashName hn) ch'
let packet' =
case tTo packet2 of
Just _ -> packet2
Nothing -> -- always send back to the last received for this channel
if pathValid timeNow (chLast ch')
then packet2 { tTo = chLast ch' }
else packet2
sentChan <- hnSend hn packet'
if HM.member "err" (tJson packet') || HM.member "end" (tJson packet')
then chanFail (hHashName hn) ch' Nothing
else return ()
chanTimer ch'
{-
// minimal wrapper to send raw packets
chan.send = function(packet)
{
if(!hn.chans[chan.id]) return debug("dropping send packet to dead channel",chan.id,packet.js);
if(!packet.js) packet.js = {};
packet.js.c = chan.id;
debug("SEND",chan.type,JSON.stringify(packet.js));
chan.sentAt = Date.now();
if(!packet.to && pathValid(chan.last)) packet.to = chan.last; // always send back to the last received for this channel
hn.send(packet);
// if err'd or ended, delete ourselves
if(packet.js.err || packet.js.end) chan.fail();
timer();
}
-}
-- ---------------------------------------------------------------------
chanFail :: HashName -> RawChannel -> Maybe RxTelex -> TeleHash ()
chanFail hn chan mpacket = do
if chEnded chan
then return ()
else do
hnChanDone hn (chId chan)
case mpacket of
Nothing -> return ()
Just packet -> do
logT $ "chanFail calling callback for " ++ showChan chan
(chCallBack chan) True packet chan
{-
chan.fail = function(packet){
if(chan.ended) return; // prevent multiple calls
hn.chanDone(chan.id);
chan.ended = true;
if(packet)
{
packet.from = hn;
chan.callback(packet.js.err, packet, chan, function(){});
}
}
-}
hnChanDone :: HashName -> ChannelId -> TeleHash ()
hnChanDone hn chid = do
delChan hn chid
{-
hn.chanDone = function(id)
{
hn.chans[id] = false;
}
-}
chanTimer :: RawChannel -> TeleHash ()
chanTimer chan = do
logT $ "chanTimer unimplemented"
return ()
| alanz/htelehash | src/Network/TeleHash/Old/Switch.hs | bsd-3-clause | 149,149 | 0 | 60 | 41,554 | 19,291 | 9,713 | 9,578 | 1,468 | 22 |
module Database.Concelo.Prelude
( null
, foldM
, forM_
, mapM_
, length
, (++)
, (+)
, (-)
, (*)
, (/)
, ($)
, (>)
, (<)
, (>=)
, (<=)
, (/=)
, (&&)
, (||)
, (!!)
, (.)
, not
, error
, undefined
, otherwise
, flip
, fromIntegral
, toEnum
, fst
, snd
, id
, const
, reverse
, concat
, take
, repeat
, last
, filter
, floor
, zipWith
, Double()
, Bool(True, False)
, String
, Int()
, Integer()
, IO()
, Show(show)
, Read
, read
, Eq((==))
, Ord(compare)
, Ordering(EQ)
, Maybe(Just, Nothing)
, fromMaybe
, isJust
, fromJust
, isNothing
, maybe
, Either(Left, Right)
, Monad
, (>>=)
, (>>)
, return
, liftM2
, liftM3
, when
, mapM
, Foldable(foldr)
, toList
, foldl'
, Functor(fmap)
, Traversable(traverse)
, pure
, (<$>)
, (<*>)
, (<|>)
, inits ) where
import Prelude (($), (.), error, undefined, String, Bool(True, False),
flip, Int(), (+), (-), Show(show), Read, read, otherwise,
fromIntegral, (*), (/), (>), (<), (>=), (<=), (/=), toEnum,
fst, snd, id, (&&), (||), not, Eq((==)), Ord(compare),
Integer(), const, (++), reverse, concat, Double(), (!!),
take, repeat, last, filter, Ordering(EQ), IO(), zipWith,
floor)
import Data.Maybe (Maybe(Just, Nothing), fromMaybe, isJust, fromJust,
isNothing, maybe)
import Data.Either (Either(Left, Right))
import Data.Foldable (Foldable(foldr), toList, foldl')
import Control.Monad (Monad, (>>=), (>>), return, liftM2, liftM3, when, mapM)
import Data.Functor (Functor(fmap), (<$>))
import Control.Applicative ((<|>), (<*>), pure)
import Data.Traversable (Traversable(traverse))
import Data.List (inits)
import qualified Control.Monad as M
null :: Foldable t => t a -> Bool
null = foldr (\_ _ -> False) True
foldM :: (Foldable t, Monad m) => (b -> a -> m b) -> b -> t a -> m b
foldM visit seed foldable = M.foldM visit seed $ toList foldable
forM_ :: (Foldable t, Monad m) => t a -> (a -> m b) -> m ()
forM_ = M.forM_ . toList
mapM_ :: (Foldable t, Monad m) => (a -> m b) -> t a -> m ()
mapM_ = flip forM_
length :: Foldable t => t a -> Int
length = foldl' (\c _ -> c + 1) 0
| Concelo/concelo | src/Database/Concelo/Prelude.hs | bsd-3-clause | 2,287 | 4 | 10 | 641 | 990 | 649 | 341 | 132 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE RecordWildCards #-}
module Codegen where
import Data.Word
import Data.List
import Data.Function
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (listToMaybe)
import Control.Monad.State
import Control.Monad.Trans.Maybe (MaybeT(..))
import Control.Applicative
import LLVM.General.AST
import LLVM.General.AST.Global
import qualified LLVM.General.AST as AST
import qualified LLVM.General.AST.Global as G
import qualified LLVM.General.AST.Constant as C
import qualified LLVM.General.AST.Attribute as A
import qualified LLVM.General.AST.CallingConvention as CC
import qualified LLVM.General.AST.FloatingPointPredicate as FP
import qualified LLVM.General.AST.IntegerPredicate as IP
import LLVM.General.AST.Type (ptr)
import Syntax (SymName)
-------------------------------------------------------------------------------
-- Module Level
-------------------------------------------------------------------------------
newtype LLVM a = LLVM { unLLVM :: State AST.Module a }
deriving (Functor, Applicative, Monad, MonadState AST.Module)
runLLVM :: AST.Module -> LLVM a -> AST.Module
runLLVM = flip (execState . unLLVM)
emptyModule :: String -> AST.Module
emptyModule label = defaultModule { moduleName = label }
addDefn :: Definition -> LLVM ()
addDefn d = do
defs <- gets moduleDefinitions
modify $ \s -> s { moduleDefinitions = nub $ defs ++ [d] }
defineGlobalVar :: String -> LLVM ()
defineGlobalVar varName = addDefn $
GlobalDefinition $ globalVariableDefaults {
name = Name varName
, G.type' = uint
, initializer = Just $ C.Int uintSize 0
}
defineFunc :: Type -> String -> [(Type, Name)] -> [BasicBlock] -> LLVM ()
defineFunc retty label argtys body = addDefn $
GlobalDefinition $ functionDefaults {
name = Name label
, parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False)
, returnType = retty
, basicBlocks = body
}
delFunc :: String -> LLVM ()
delFunc fname = do
mFuncDef <- gets $ getFuncDefinition fname . moduleDefinitions
maybe (return ()) del mFuncDef
where
del funcDef =
modify $ \mod ->
mod { AST.moduleDefinitions = delete funcDef $ moduleDefinitions mod }
getFuncDefinition :: SymName -> [AST.Definition] -> Maybe AST.Definition
getFuncDefinition searchedName modDefs =
listToMaybe . filter filt $ modDefs
where
filt
(AST.GlobalDefinition
(AST.Function { G.name = AST.Name funcName, .. }))
| funcName == searchedName = True
filt _ = False
defineType :: String -> Type -> LLVM ()
defineType name ty = addDefn . TypeDefinition (Name name) . Just $ ty
external :: Type -> String -> [(Type, Name)] -> LLVM ()
external retty label argtys = addDefn $
GlobalDefinition $ functionDefaults {
name = Name label
, parameters = ([Parameter ty nm [] | (ty, nm) <- argtys], False)
, returnType = retty
, basicBlocks = []
}
---------------------------------------------------------------------------------
-- Types
-------------------------------------------------------------------------------
uint :: Type
uint = IntegerType uintSize
uintSize :: Num a => a
uintSize = 64
double :: Type
double = FloatingPointType 64 IEEE
i8ptr :: Type
i8ptr = ptr $ IntegerType 8
uintSizeBytes :: Integral a => a
uintSizeBytes = uintSize `div` 8
-------------------------------------------------------------------------------
-- Names
-------------------------------------------------------------------------------
type Names = Map.Map String Int
uniqueName :: String -> Names -> (String, Names)
uniqueName nm ns =
case Map.lookup nm ns of
Nothing -> (nm, Map.insert nm 1 ns)
Just ix -> (nm ++ show ix, Map.insert nm (ix+1) ns)
-------------------------------------------------------------------------------
-- Codegen State
-------------------------------------------------------------------------------
type SymbolTable = Map SymName Operand
data CodegenState
= CodegenState {
currentBlock :: Name -- Name of the active block to append to
, blocks :: Map.Map Name BlockState -- Blocks for function
, symtab :: SymbolTable -- Function scope symbol table
, blockCount :: Int -- Count of basic blocks
, count :: Word -- Count of unnamed instructions
, names :: Names -- Name Supply
, extraFuncs :: [LLVM ()] -- LLVM computations of lambdas
, funcName :: SymName -- 'CodegenState's function name
, globalVars :: [SymName]
} {-deriving Show-}
data BlockState
= BlockState {
idx :: Int -- Block index
, stack :: [Named Instruction] -- Stack of instructions
, term :: Maybe (Named Terminator) -- Block terminator
} deriving Show
-------------------------------------------------------------------------------
-- Codegen Operations
-------------------------------------------------------------------------------
newtype Codegen a = Codegen { runCodegen :: State CodegenState a }
deriving (Functor, Applicative, Monad, MonadState CodegenState )
sortBlocks :: [(Name, BlockState)] -> [(Name, BlockState)]
sortBlocks = sortBy (compare `on` (idx . snd))
createBlocks :: CodegenState -> [BasicBlock]
createBlocks m = map makeBlock $ sortBlocks $ Map.toList (blocks m)
mergeBlocks :: BasicBlock -> BasicBlock -> BasicBlock
mergeBlocks (BasicBlock _ srcInstrs _) (BasicBlock name targetInstrs term) =
BasicBlock name (targetInstrs ++ srcInstrs) term
makeBlock :: (Name, BlockState) -> BasicBlock
makeBlock (l, BlockState _ s t) = BasicBlock l s (maketerm t)
where
maketerm (Just x) = x
maketerm Nothing = error $ "Block has no terminator: " ++ show l
entryBlockName :: String
entryBlockName = "entry"
emptyBlock :: Int -> BlockState
emptyBlock i = BlockState i [] Nothing
emptyCodegen :: SymName -> CodegenState
emptyCodegen fname =
CodegenState
(Name entryBlockName) Map.empty Map.empty 1 0 Map.empty [] fname []
execCodegen :: SymName -> [SymName] -> Codegen a -> CodegenState
execCodegen fname globalVars computation =
execState (runCodegen computation) $
(emptyCodegen fname) { globalVars = globalVars }
fresh :: Codegen Word
fresh = do
i <- gets count
modify $ \s -> s { count = 1 + i }
return $ i + 1
instr :: Instruction -> Codegen Operand
instr ins = do
n <- fresh
let ref = UnName n
blk <- current
let i = stack blk
modifyBlock (blk { stack = i ++ [ref := ins] } )
return $ local ref
terminator :: Named Terminator -> Codegen (Named Terminator)
terminator trm = do
blk <- current
modifyBlock (blk { term = Just trm })
return trm
-------------------------------------------------------------------------------
-- Block Stack
-------------------------------------------------------------------------------
entry :: Codegen Name
entry = gets currentBlock
addBlock :: String -> Codegen Name
addBlock bname = do
bls <- gets blocks
ix <- gets blockCount
nms <- gets names
let new = emptyBlock ix
(qname, supply) = uniqueName bname nms
modify $ \s -> s { blocks = Map.insert (Name qname) new bls
, blockCount = ix + 1
, names = supply
}
return (Name qname)
setBlock :: Name -> Codegen Name
setBlock bname = do
modify $ \s -> s { currentBlock = bname }
return bname
getBlock :: Codegen Name
getBlock = gets currentBlock
modifyBlock :: BlockState -> Codegen ()
modifyBlock new = do
active <- gets currentBlock
modify $ \s -> s { blocks = Map.insert active new (blocks s) }
current :: Codegen BlockState
current = do
c <- gets currentBlock
blks <- gets blocks
case Map.lookup c blks of
Just x -> return x
Nothing -> error $ "No such block: " ++ show c
-------------------------------------------------------------------------------
-- Symbol Table
-------------------------------------------------------------------------------
assign :: String -> Operand -> Codegen ()
assign var x = do
lcls <- gets symtab
modify $ \s -> s { symtab = Map.insert var x lcls }
getvar :: String -> Codegen (Maybe Operand)
getvar var = return . Map.lookup var =<< gets symtab
-------------------------------------------------------------------------------
-- References
local :: Name -> Operand
local = LocalReference uint
global :: Name -> C.Constant
global = C.GlobalReference uint
extern :: Name -> Operand
extern = ConstantOperand . C.GlobalReference uint
-- Arithmetic and Constants
iadd :: Operand -> Operand -> Codegen Operand
iadd a b = instr $ Add False False a b []
isub :: Operand -> Operand -> Codegen Operand
isub a b = instr $ Sub False False a b []
imul :: Operand -> Operand -> Codegen Operand
imul a b = instr $ Mul False False a b []
idiv :: Operand -> Operand -> Codegen Operand
idiv a b = instr $ SDiv False a b []
icmp :: IP.IntegerPredicate -> Operand -> Operand -> Codegen Operand
icmp cond a b = instr $ ICmp cond a b []
fadd :: Operand -> Operand -> Codegen Operand
fadd a b = instr $ FAdd NoFastMathFlags a b []
fsub :: Operand -> Operand -> Codegen Operand
fsub a b = instr $ FSub NoFastMathFlags a b []
fmul :: Operand -> Operand -> Codegen Operand
fmul a b = instr $ FMul NoFastMathFlags a b []
fdiv :: Operand -> Operand -> Codegen Operand
fdiv a b = instr $ FDiv NoFastMathFlags a b []
fcmp :: FP.FloatingPointPredicate -> Operand -> Operand -> Codegen Operand
fcmp cond a b = instr $ FCmp cond a b []
funcOpr :: Type -> Name -> [Type] -> Operand
funcOpr retTy name tys =
constOpr $
C.GlobalReference
(FunctionType retTy tys False)
name
namedType :: String -> Type
namedType = AST.NamedTypeReference . AST.Name
constUint :: Integral i => i -> Operand
constUint = constOpr . C.Int uintSize . fromIntegral
constUintSize :: Integral i => Word32 -> i -> Operand
constUintSize size = constOpr . C.Int size . fromIntegral
constOpr :: C.Constant -> Operand
constOpr = ConstantOperand
uitofp :: Type -> Operand -> Codegen Operand
uitofp ty a = instr $ UIToFP a ty []
inttoptr :: Operand -> Type -> Codegen Operand
inttoptr a ty = instr $ IntToPtr a ty []
ptrtoint :: Operand -> Type -> Codegen Operand
ptrtoint a ty = instr $ PtrToInt a ty []
zext :: Type -> Operand -> Codegen Operand
zext ty a = instr $ ZExt a ty []
shl :: Operand -> Operand -> Codegen Operand
shl a shiftSize = instr $ Shl False False a shiftSize []
shr :: Operand -> Operand -> Codegen Operand
shr a shiftSize = instr $ LShr False a shiftSize []
or :: Operand -> Operand -> Codegen Operand
or a b = instr $ Or a b []
toArgs :: [Operand] -> [(Operand, [A.ParameterAttribute])]
toArgs = map (\x -> (x, []))
-- Effects
call :: Operand -> [Operand] -> Codegen Operand
call fn args = instr $ Call False CC.C [] (Right fn) (toArgs args) [] []
bitcast :: Operand -> Type -> Codegen Operand
bitcast opr ty = instr $ BitCast opr ty []
alloca :: Type -> Codegen Operand
alloca ty = instr $ Alloca ty Nothing 0 []
store :: Operand -> Operand -> Codegen Operand
store ptr val = instr $ Store False ptr val Nothing 0 []
load :: Operand -> Codegen Operand
load ptr = instr $ Load False ptr Nothing 0 []
getelementptr :: Integral i => Operand -> i -> Codegen Operand
getelementptr address ix = getelementptrRaw address [0, ix]
getelementptrRaw :: Integral i => Operand -> [i] -> Codegen Operand
getelementptrRaw address ixs =
instr $ GetElementPtr True address (map (constUintSize 32) ixs) []
-- Control Flow
br :: Name -> Codegen (Named Terminator)
br val = terminator $ Do $ Br val []
cbr :: Operand -> Name -> Name -> Codegen (Named Terminator)
cbr cond tr fl = terminator $ Do $ CondBr cond tr fl []
phi :: Type -> [(Operand, Name)] -> Codegen Operand
phi ty incoming = instr $ Phi ty incoming []
ret :: Operand -> Codegen (Named Terminator)
ret val = terminator $ Do $ Ret (Just val) []
| talw/crisp-compiler | src/Codegen.hs | bsd-3-clause | 11,969 | 0 | 15 | 2,411 | 3,897 | 2,041 | 1,856 | 263 | 2 |
{-# LANGUAGE QuasiQuotes #-}
module Usage (progUsage) where
import System.Environment (getArgs)
import System.Console.Docopt
progUsage :: Docopt
progUsage = [docopt|
Huntex.
Usage:
huntex FILE [ -a ]
huntex --help | -h
huntex --version
Options:
-h, --help Show this screen.
-a, --ast Dump AST
--version Show version.
Arguments
FILE File to be 'huntex'ed
|]
| vzaccaria/huntex | app/Usage.hs | bsd-3-clause | 418 | 0 | 5 | 113 | 40 | 27 | 13 | 6 | 1 |
module Sklite.Drivers
( generateDrivers
)
where
import Control.Applicative ((<$>), (<*>), pure)
import Control.Monad (forM_)
import Data.Maybe (catMaybes)
import System.FilePath ((</>))
import Text.PrettyPrint
import Numeric (showHex)
import Sklite.Types
import qualified Sklite.Paths as Paths
import Sklite.Layout.Validation
-- This is going to be gross for now. In the future, we might
-- consider the 'language-c' package for generating C code.
generateDrivers :: FilePath -> ExplodedLayout -> IO ()
generateDrivers destDir (ExplodedLayout (ValidatedLayout layout)) = do
let f cell = buildDriverSource cell
(getRegions layout (cellSharedMemoryRegions cell))
(chans cell)
chans cell = [ c | c <- layoutChannels layout
, cellName cell `elem` [chanFrom c, chanTo c]
]
cfiles = concat $ f <$> layoutCells layout
writeSourceFiles destDir cfiles
writeSourceFiles :: FilePath -> [(FilePath, String)] -> IO ()
writeSourceFiles destDir files =
forM_ files $ \(filename, contents) ->
writeFile (destDir </> filename) (contents ++ "\n")
getRegions :: Layout
-> [SharedMemoryAccess]
-> [(SharedMemoryAccess, SharedMemoryRegion)]
getRegions _ [] = []
getRegions layout (a:as) =
((,) <$> (pure a) <*> thisRegion) ++ rest
where
thisRegion = filter (\r -> accessRegionName a == regionName r)
(sharedMemoryRegions layout)
rest = getRegions layout as
buildDriverSource :: Cell
-> [(SharedMemoryAccess, SharedMemoryRegion)]
-> [Channel]
-> [(FilePath, String)]
buildDriverSource cell regions chans =
[ (Paths.driverCfilename cell, render $ driver_c cell regions chans)
, (Paths.driverHfilename cell, render $ driver_h cell regions chans)
]
regionBasename :: SharedMemoryAccess -> String
regionBasename a = "region_" ++ (accessAlias a)
regionBaseAddr :: SharedMemoryAccess -> String
regionBaseAddr a = regionBasename a ++ "_base"
regionBasePtr :: SharedMemoryAccess -> String
regionBasePtr a = regionBasename a ++ "_base_ptr"
readable :: SharedMemoryAccess -> Bool
readable a = accessType a `elem` [MemReadOnly, MemReadWrite]
writable :: SharedMemoryAccess -> Bool
writable a = accessType a `elem` [MemWriteOnly, MemReadWrite]
driver_h :: Cell
-> [(SharedMemoryAccess, SharedMemoryRegion)]
-> [Channel]
-> Doc
driver_h cell regions chans =
vcat [ text "#ifndef __DRIVER_H__"
, text "#define __DRIVER_H__"
, text "#include <sys/types.h>"
, text "#include <stddef.h>"
, text "#include <string.h>"
, text "#include <mem.h>"
, text "#include <channels.h>"
, vcat $ prototypes <$> regions
, vcat $ chanVar <$> chans
, text "int cell_main(int argc, char **argv);"
, text "#endif"
]
where
chanVar ch = text "extern" <+>
(if chanTo ch == cellName cell then text "read" else text "write") <>
text "_channel_p" <+> (text $ chanName ch) <> text ";"
prototypes (access, region) =
let required =
[ "void * " ++ regionBaseAddr access ++ "(void);"
, "#define " ++ regionBasename access ++ "_size() "
++ (show $ regionSize region)
, "struct mem " ++ regionBasename access ++ "_mem();"
]
optional = [ if writable access
then Just $ "#define " ++ regionBasename access ++ "_write(src, sz, offset) { memcpy("
++ regionBaseAddr access ++ "() + offset, src, sz); }"
else Nothing
, if readable access
then Just $ "#define " ++ regionBasename access ++ "_read(dst, sz, offset) { memcpy(dst, "
++ regionBaseAddr access ++ "() + offset, sz); }"
else Nothing
]
in vcat $ text <$> (required ++ catMaybes optional)
driver_c :: Cell
-> [(SharedMemoryAccess, SharedMemoryRegion)]
-> [Channel]
-> Doc
driver_c cell regions chans =
vcat [ text "#include <sys/types.h>"
, text "#include <sys/stat.h>"
, text "#include <sys/mman.h>"
, text "#include <pwd.h>"
, text "#include <stdio.h>"
, text "#include <errno.h>"
, text "#include <fcntl.h>"
, text "#include <fcntl.h>"
, text "#include <string.h>"
, text "#include <unistd.h>"
, text "#include <stdlib.h>"
, text "#include <grp.h>"
, text "#include <mem.h>"
, text "#include <channels.h>"
, text "#include <signal.h>"
, text $ "#include \"" ++ (Paths.driverHfilename cell) ++ "\""
, vcat $ chanVar <$> chans
, vcat $ baseAddrVar <$> regions
, vcat $ baseAddrFunc <$> regions
, vcat $ memFunc <$> regions
, vcat [ text "void setup_shmem() {"
, nest 4 $ setupShmemBody regions
, text "}"
]
, vcat [ text "void setup_channels() {"
, nest 4 $ setupChannelsBody cell chans
, text "}"
]
, if cellRunMethod cell == CellMain
then text ""
else vcat [ text "static void segv_handler(int signal, siginfo_t *si, void *arg) {"
, text " switch (signal) {"
, text " case SIGBUS:"
, text " fprintf(stderr, \"Bus error at %p\\n\", si->si_addr);"
, text " break;"
, text " case SIGSEGV:"
, text " fprintf(stderr, \"Segmentation fault at %p\\n\", si->si_addr);"
, text " break;"
, text " default:"
, text " fprintf(stderr, \"Unexpected signal: %d\\n\", signal);"
, text " break;"
, text " }"
, text " exit(EXIT_FAILURE);"
, text "}"
, text ""
, text "static void setup_segv_handler(void) {"
, text ""
, text " int res;"
, text " struct sigaction sa ="
, text " { .sa_sigaction = segv_handler"
, text " , .sa_flags = SA_SIGINFO"
, text " };"
, text " sigemptyset(&sa.sa_mask);"
, text ""
, text " res = sigaction(SIGSEGV, &sa, NULL);"
, text " res = sigaction(SIGBUS, &sa, NULL);"
, text " if (res == -1) {"
, text " perror(\"sigaction\");"
, text " exit(EXIT_FAILURE);"
, text " }"
, text "}"
]
, vcat [ text "int main(int argc, char **argv) {"
, nest 4 $ mainBody cell
, text "}"
]
]
where
chanVar ch = (if chanTo ch == cellName cell then text "read" else text "write") <>
text "_channel_p" <+> (text $ chanName ch) <+> equals <+> text "NULL;"
memFunc (access, _) =
vcat [ text $ "struct mem " ++ regionBasename access ++ "_mem() {"
, nest 4 $ vcat [ text "struct mem m;"
, text $ "m.base = " ++ regionBaseAddr access ++ "();"
, text $ "m.size = " ++ regionBasename access ++ "_size();"
, text "return m;"
]
, text "}"
]
baseAddrVar (access, _) =
text $ concat [ "void * "
, regionBasePtr access
, " = NULL;"
]
baseAddrFunc (access, _) =
vcat [ text $ concat [ "void * "
, regionBaseAddr access
, "() {"
]
, nest 4 $ text $ concat [ "return "
, regionBasePtr access
, ";"
]
, text "}"
]
mainBody :: Cell -> Doc
mainBody cell =
vcat $ [ text "struct passwd *user_entry = NULL;"
, text $ "const char *username = " ++ (show $ cellUser cell) ++ ";"
, text "setup_shmem();"
, text "setup_channels();"
-- Reset errno because getpwnam will not do it.
, text "errno = 0;"
, text "user_entry = getpwnam(username);"
, text "if (user_entry == NULL) {"
, nest 4 $ vcat [ text "printf(\"Error getting user information for '%s': %s\", username, strerror(errno));"
, text "return 1;"
]
, text "}"
, text "setuid(user_entry->pw_uid);"
, text "setgid(user_entry->pw_gid);"
, text "setgroups(0, NULL);"
] ++ runProgram
where
runProgram = case cellRunMethod cell of
CellMain -> runCellMain
RawBinary addr p sz -> runFromAddr addr p sz
runCellMain = [ text "return cell_main(argc, argv);"
]
runFromAddr addr p sz = [ text $ "int prog = open(" ++ show p ++ ", O_RDWR);"
, text $ "struct stat s;"
, text "if (0 != fstat(prog, &s)) {"
, nest 4 $ vcat [ text "printf(\"Error in fstat(): %s\\n\", strerror(errno));"
, text "return 1;"
]
, text "}"
, text $ "void *entry = (void *) " ++ show addr ++ ";"
, text $ "void *result = mmap(entry, " ++ show sz ++ ", "
++ "PROT_READ|PROT_WRITE|PROT_EXEC, MAP_ANONYMOUS|MAP_PRIVATE, -1, 0);"
, text $ "if (result != entry) {"
, nest 4 $ vcat [ text "printf(\"Error mapping entry point address: %s\\n\", strerror(errno));"
, text "printf(\"Got entry = %p, result = %p\\n\", entry, result);"
, text "return 1;"
]
, text "}"
, text "if (s.st_size != read(prog, entry, s.st_size)) { exit(1); }"
, text $ "if (mprotect(entry, " ++ show sz ++ ", PROT_READ|PROT_WRITE|PROT_EXEC)) {"
, nest 4 $ vcat [ text "printf(\"Error calling mprotect: %s\\n\", strerror(errno));"
, text "return 1;"
]
, text "}"
, text "setup_segv_handler();"
, text "asm(\"jmpq *%0\"::\"r\"(entry):\"%rax\");"
, text "return 0;"
]
setupChannelsBody :: Cell
-> [Channel]
-> Doc
setupChannelsBody cell chans =
let chanFunc ch = if cellName cell == chanFrom ch
then text "write_channel"
else text "read_channel"
args ch = [ text "region_" <> (text $ slotsBufName ch) <> text "_mem()"
, text "region_" <> (text $ readerBufName ch) <> text "_mem()"
, text "region_" <> (text $ writerBufName ch) <> text "_mem()"
]
f ch = let allArgs = args ch ++ [ integer $ chanMsgSize ch
, integer $ chanMsgSlots ch
] ++
if cellName cell == chanFrom ch
then [int $ if chanOverwrite ch then 1 else 0]
else []
in [ (text $ chanName ch) <+> equals <+> (chanFunc ch) <>
(parens $ hcat $ punctuate comma allArgs) <>
semi
, text "if" <+> (parens $ ((text $ chanName ch) <+> text "== NULL")) <+> lbrace
, nest 4 $ vcat [ text "printf(\"Error setting up channel\\n\");"
, text "exit(1);"
]
, rbrace
]
in vcat $ concat $ f <$> chans
setupShmemBody :: [(SharedMemoryAccess, SharedMemoryRegion)]
-> Doc
setupShmemBody regions =
vcat $ uncurry setupShmemBody_ <$> zip [0..] regions
setupShmemBody_ :: Int -> (SharedMemoryAccess, SharedMemoryRegion) -> Doc
setupShmemBody_ pos (access, region) =
let (prot, openFlags) = case accessType access of
MemReadOnly -> ("PROT_READ", "O_RDONLY")
-- NOTE: for write-only cases we still
-- open the file read-write, because
-- mmap() will refuse to create
-- MAP_SHARED mappings if the file is
-- not opened this way. We still rely
-- on static types to prevent the user
-- from actually writing to the memory
-- as long as they use the included
-- APIs.
MemWriteOnly -> ("PROT_WRITE", "O_RDWR")
MemReadWrite -> ("PROT_READ|PROT_WRITE", "O_RDWR")
fdVar = "fd" ++ show pos
mapAddr = case accessMapAddress access of
Just a -> "(void *) 0x" ++ showHex a ""
Nothing -> "NULL"
in vcat [ text $ "int " ++ fdVar ++ " = open("
++ (show $ Paths.regionFilename region)
++ ", " ++ openFlags ++ ");"
, text $ "if (" ++ fdVar ++ " == -1) {"
, nest 4 $ vcat [ text $ "printf(\"Error opening shared memory file %s: %s\\n\", "
++ (show $ Paths.regionFilename region)
++ ", strerror(errno));"
, text "exit(1);"
]
, text "}"
, text $ regionBasePtr access ++ " = "
++ "mmap(" ++ mapAddr ++ ", " ++ regionBasename access ++ "_size(), "
++ prot ++ ", MAP_SHARED, " ++ fdVar ++ ", 0);"
, text $ "if (" ++ regionBasePtr access ++ " == MAP_FAILED || (" ++ mapAddr ++
" != NULL && " ++ regionBasePtr access ++ " != " ++ mapAddr ++ ")) {"
, nest 4 $ vcat [ text "printf(\"Error mapping shared memory: %s\\n\", strerror(errno));"
, text "exit(1);"
]
, text "}"
]
| GaloisInc/sk-dev-platform | user/sklite/src/Sklite/Drivers.hs | bsd-3-clause | 15,628 | 0 | 20 | 6,969 | 3,077 | 1,583 | 1,494 | 269 | 4 |
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ConstraintKinds #-}
module SupersessionSpec (spec) where
import Test.Hspec
import Control.Monad.Identity
import Supersession.Internal.Mealy
spec :: Spec
spec = describe "it" $ do
it "should work" $ do
let test = ["Login \"jkarni\"", "Msg \"hi\"", "Logout"]
runMealy LoggedOutSing test `shouldBe` Identity [ "()"
, "\"jkarni says: hi\""
, "\"now logged out!\""
]
----Logged in ----------------------------------------------------------------
data LoggedIn = Msg String | Logout
deriving (Eq, Show, Read)
loggedIn :: (NeededCtx orig final, Monad m)
=> EgSing LoggedIn String
-> LoggedIn
-> m (String, Mealy m orig final)
loggedIn _ Logout = return ("now logged out!", next LoggedOutSing)
loggedIn (LoggedInSing usr) (Msg m)
= return (usr ++ " says: " ++ m, next $ LoggedInSing usr)
instance (NeededCtx orig final) => MealyStep EgSing LoggedIn String orig final where
move = loggedIn
-----Logged out --------------------------------------------------------------
data LoggedOut = Login Username | Error
deriving (Eq, Show, Read)
loggedOut :: (NeededCtx orig final, Monad m)
=> LoggedOut -> m ((), Mealy m orig final)
loggedOut (Login usr) = return ((), next $ LoggedInSing usr)
loggedOut Error = return ((), next LoggedOutSing)
instance ( NeededCtx orig final)
=> MealyStep EgSing LoggedOut () orig final where
move _ = loggedOut
-----General ------------------------------------------------------------------
type Username = String
-- Contains constructors for all possible states
data EgSing a b where
-- input output
-- vvv vvv
LoggedOutSing :: EgSing LoggedOut ()
LoggedInSing :: Username -> EgSing LoggedIn String
-- ^ The singleton may be used to keep state
-- This includes all constraints we need. Only necessary if we choose to leave
-- orig and final polymorphic
type NeededCtx orig final = ( Parse orig LoggedOut, Parse orig LoggedIn
, Render () final, Render String final
)
| turingjmp/supersession | test/SupersessionSpec.hs | bsd-3-clause | 2,290 | 0 | 14 | 648 | 528 | 286 | 242 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
-- | This module implements the two algorithms from RFC 3490. (<http://tools.ietf.org/html/rfc3490>)
module Text.IDNA (acePrefix, toASCII, toUnicode)
where
import Text.StringPrep
import Text.StringPrep.Profiles
import Control.Monad
import qualified Data.Text as Text
import Data.Text (Text)
import qualified Data.Text.Punycode as Puny
import Data.Text.Encoding as E
-- | The ASCII Compatible Encoding prefix (currently \'@xn--@\').
acePrefix :: Text
acePrefix = "xn--"
-- | Implements the ToASCII algorithm.
toASCII :: Bool -- ^ Whether to allow unassigned code points (in RFC: AllowUnassigned).
-> Bool -- ^ Whether to disallow certain ASCII characters (in RFC: UseSTD3ASCIIRules).
-> Text -- ^ The text to transform.
-> Maybe Text
toASCII allowUnassigned useSTD3ASCIIRules t = do
step2 <- if Text.any (>'\x7f') t
then runStringPrep (namePrepProfile allowUnassigned) t
else return t
step3 <- if (useSTD3ASCIIRules && (Text.any isLDHascii step2 || Text.head step2 == '-' || Text.last step2 == '-'))
then Nothing
else return step2
step7 <- if (Text.any (>'\x7f') step2)
then if acePrefix `Text.isPrefixOf` step3
then Nothing
else case return (Puny.encode step3) of -- TODO: this can fail?
Left _ -> Nothing
Right t -> return $ acePrefix `Text.append` E.decodeUtf8 t
else return step3
if Text.length step7 <= 63
then return step7
else Nothing
isLDHascii c =
'\x0' <= c && c <= '\x2c' ||
'\x2e' <= c && c <= '\x2f' ||
'\x3a' <= c && c <= '\x40' ||
'\x5b' <= c && c <= '\x60' ||
'\x7b' <= c && c <= '\x7f'
toUnicode :: Bool -- ^ Whether to allow unassigned code points (in RFC: AllowUnassigned).
-> Bool -- ^ Whether to disallow certain ASCII characters (in RFC: UseSTD3ASCIIRules).
-> Text -- ^ The text to transform.
-> Text
toUnicode allowUnassigned useSTD3ASCIIRules t = mergeEither $ do
step2 <- if Text.any (>'\x7f') t
then case runStringPrep (namePrepProfile allowUnassigned) t of
Nothing -> Left t
Just t' -> return t'
else return t
step3 <- if not $ acePrefix `Text.isPrefixOf` step2
then Left step2
else return step2
let step4 = Text.drop (Text.length acePrefix) step3
step5 <- case Puny.decode $ E.encodeUtf8 step4 of
Left _ -> Left step3
Right s -> return s
case toASCII allowUnassigned useSTD3ASCIIRules step5 of
Nothing -> return step3
Just t -> if t == step3
then return step5
else return step3
mergeEither :: Either a a -> a
mergeEither (Left x) = x
mergeEither (Right y) = y
tests :: [Text]
tests = ["Bücher","tūdaliņ"]
| Porges/idna-hs | Text/IDNA.hs | bsd-3-clause | 2,590 | 58 | 23 | 513 | 797 | 415 | 382 | 65 | 7 |
{-# LANGUAGE ScopedTypeVariables #-}
-- -----------------------------------------------------------------------------
-- |
-- Module : PixelParty.Texture2D
-- Copyright : (c) Andreas-Christoph Bernstein 2011
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : andreas.bernstein@googlemail.com
-- Stability : unstable
-- Portability : not portable
--
-- Load, use and save textures.
--
--------------------------------------------------------------------------------
module PixelParty.Texture2D
-- ( loadTexture
-- , enableTexture
-- , screenshot
-- ) where
where
{-
import Control.Applicative ((<$>), pure)
import Data.Array.Unboxed
import Foreign.Storable (Storable, sizeOf)
import Graphics.GL.Core41 as GL
import Graphics.GL.Types as GL
import Foreign.Marshal.Array (withArray, allocaArray, peekArray, withArrayLen)
import Foreign (nullPtr, plusPtr, sizeOf, castPtr, Ptr, withMany)
import Data.Array.MArray (thaw)
import Data.Array.Storable (withStorableArray)
import Data.Bitmap.IO
import Codec.Image.STB (loadImage)
import qualified Graphics.Imlib as I
type GLTextureUnit = GL.GLenum
type GLTextureObject = GL.GLuint
loadTexture :: FilePath -> GLTextureUnit -> IO (GLTextureObject, GLTextureUnit)
loadTexture path u = do
e <- loadImage path
case e of
Left err -> error $ "loadTexture: " ++ err
Right bm -> do
GL.glActiveTexture u
t <- fmap head $ allocaArray 1 (\buf -> GL.glGenTextures 1 buf >> peekArray 1 buf)
GL.glBindTexture GL.gl_TEXTURE_2D t
GL.glTexParameteri GL.gl_TEXTURE_2D GL.gl_TEXTURE_MIN_FILTER
(fromIntegral GL.gl_LINEAR)
GL.glTexParameteri GL.gl_TEXTURE_2D GL.gl_TEXTURE_MAG_FILTER
(fromIntegral GL.gl_LINEAR)
withBitmap bm $ \(width,height) nchn pad ptr -> do
let ty = marshalPixelComponent bm
(pf,pif) = formatPlusInternalFormat bm
(w,h) = (fromIntegral width, fromIntegral height)
alignment = fromIntegral (bitmapRowAlignment bm)
GL.glPixelStorei GL.gl_UNPACK_ALIGNMENT alignment
GL.glTexImage2D GL.gl_TEXTURE_2D 0 pif w h 0 pf ty ptr
return (t,u)
enableTexture :: (GLTextureObject, GLTextureUnit) -> IO ()
enableTexture (t,u) = do
GL.glActiveTexture u
GL.glEnable GL.gl_TEXTURE_2D
GL.glBindTexture GL.gl_TEXTURE_2D t
-- -----------------------------------------------------------------------------
marshalPixelComponent :: forall t.PixelComponent t => Bitmap t -> GL.GLenum
marshalPixelComponent _ = case pixelComponentType (undefined::t) of
PctWord8 -> GL.gl_UNSIGNED_BYTE
PctWord16 -> GL.gl_UNSIGNED_SHORT
PctWord32 -> GL.gl_UNSIGNED_INT
PctFloat -> GL.gl_FLOAT
formatPlusInternalFormat :: forall t. PixelComponent t => Bitmap t -> (GL.GLenum, GL.GLint)
formatPlusInternalFormat bm =
case pixelComponentType (undefined::t) of
PctWord8 -> case bitmapNChannels bm of
1 -> (GL.gl_ALPHA, fromIntegral GL.gl_ALPHA8)
2 -> (GL.gl_LUMINANCE_ALPHA, fromIntegral GL.gl_LUMINANCE8_ALPHA8)
3 -> (GL.gl_RGB, fromIntegral GL.gl_RGB8)
4 -> (GL.gl_RGBA, fromIntegral GL.gl_RGBA8)
_ -> case bitmapNChannels bm of
1 -> (GL.gl_ALPHA, fromIntegral GL.gl_ALPHA)
2 -> (GL.gl_LUMINANCE_ALPHA, fromIntegral GL.gl_LUMINANCE_ALPHA)
3 -> (GL.gl_RGB, fromIntegral GL.gl_RGB)
4 -> (GL.gl_RGBA, fromIntegral GL.gl_RGBA)
readPixels :: Int -> Int -> IO I.ImlibImage
readPixels w h =
let f = GL.gl_BGRA
t = GL.gl_UNSIGNED_BYTE
size = (w*h*4)
in allocaArray (w*h*4) $ \buf -> do
GL.glPixelStorei GL.gl_PACK_ALIGNMENT 1
GL.glReadPixels 0 0 (fromIntegral w) (fromIntegral h) f t buf
I.createImageUsingData w h buf
getTexImage2d :: Int -> Int -> IO I.ImlibImage
getTexImage2d w h =
let f = GL.gl_BGRA
t = GL.gl_UNSIGNED_BYTE
size = (w*h*4)
in allocaArray (w*h*4) $ \buf -> do
GL.glPixelStorei GL.gl_PACK_ALIGNMENT 1
GL.glGetTexImage GL.gl_TEXTURE_2D 0 f t buf
I.createImageUsingData w h buf
screenshot :: FilePath -> Int -> Int -> IO ()
screenshot file w h = readPixels w h >>= I.contextSetImage >> I.saveImage file
-}
| bernstein/pixelparty | src/PixelParty/Texture2D.hs | bsd-3-clause | 4,242 | 0 | 3 | 889 | 25 | 23 | 2 | 2 | 0 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module Juno.Consensus.Api
( apiReceiver
) where
import Control.Lens
import qualified Data.Set as Set
import Control.Monad
import qualified Data.Map as Map
import qualified Data.ByteString.Char8 as SB8
import Control.Monad.RWS
import Text.Read (readMaybe)
import Control.Concurrent (takeMVar, putMVar, modifyMVar_)
import Juno.Types
import Juno.Util.Util
import Juno.Runtime.Timer
import Juno.Runtime.Sender (sendRPC)
-- TODO do we need all this? can we just enqueueEvent directly?
-- get commands with getEntry and put them on the event queue to be sent
-- THREAD: CLIENT COMMAND
apiReceiver :: MonadIO m => Raft m ()
apiReceiver = do
nid <- view (cfg.nodeId)
forever $ do
cmdMap <- view (rs.cmdStatusMap)
(rid@(RequestId _), cmdEntries) <- dequeueCommand
-- support for special REPL command "> batch test:5000", runs hardcoded batch job
cmds' <- case cmdEntries of
(CommandEntry cmd):[] | SB8.take 11 cmd == "batch test:" -> do
let missiles = replicate (batchSize cmd) $ hardcodedTransfers nid cmdMap
liftIO $ sequence missiles
_ -> liftIO $ sequence $ fmap ((nextRid nid) cmdMap) cmdEntries
-- set current requestId in Raft to the value associated with this request.
rid' <- setNextRequestId' rid
liftIO (modifyMVar_ cmdMap (\(CommandMap n m) -> return $ CommandMap n (Map.insert rid CmdAccepted m)))
-- hack set the head to the org rid
let cmds'' = case cmds' of
((Command entry nid' _ NewMsg):rest) -> (Command entry nid' rid' NewMsg):rest
_ -> []
-- TODO: have the client really sign this and map the client digest to this.
-- for now, node 1003 has keys registered as client and protocol node.
clientSendCommandBatch' $ CommandBatch cmds'' NewMsg
where
batchSize :: (Num c, Read c) => SB8.ByteString -> c
batchSize cmd = maybe 500 id . readMaybe $ drop 11 $ SB8.unpack cmd
nextRid :: NodeID -> CommandMVarMap -> CommandEntry -> IO Command
nextRid nid cmdMap entry = do
rid <- (setNextCmdRequestId' cmdMap)
return (Command entry nid rid NewMsg)
hardcodedTransfers :: NodeID -> CommandMVarMap-> IO Command
hardcodedTransfers nid cmdMap = nextRid nid cmdMap transferCmdEntry
transferCmdEntry :: CommandEntry
transferCmdEntry = (CommandEntry "transfer(Acct1->Acct2, 1 % 1)")
-- move to utils, this is the only CommandStatus that should inc the requestId
-- NB: this only works when we have a single client, but punting on solving this for now is a good idea.
-- TODO add Mac or node ID to the requestID for now.
setNextCmdRequestId' :: CommandMVarMap -> IO RequestId
setNextCmdRequestId' cmdMapMvar = do
(CommandMap nextId m) <- takeMVar cmdMapMvar
putMVar cmdMapMvar $ CommandMap (nextId + 1) (Map.insert nextId CmdSubmitted m)
return nextId
-- This should be broken now? This node might not be the leader.
setNextRequestId' :: Monad m => RequestId -> Raft m RequestId
setNextRequestId' rid = do
currentRequestId .= rid
use currentRequestId
-- | Always send CommandBatches, a single Command is a batch of size 1.
-- Sends to the leader, knows the leader because running in Raft
-- THREAD: CLIENT MAIN. updates state
clientSendCommandBatch' :: Monad m => CommandBatch -> Raft m ()
clientSendCommandBatch' cmdb@CommandBatch{..} = do
mlid <- use currentLeader
case mlid of
Just lid -> do
sendRPC lid $ CMDB' cmdb
prcount <- fmap Map.size (use pendingRequests)
-- if this will be our only pending request, start the timer
-- otherwise, it should already be running
let lastCmd = last _cmdbBatch
when (prcount == 0) resetHeartbeatTimer
pendingRequests %= Map.insert (_cmdRequestId lastCmd) lastCmd -- TODO should we update CommandMap here?
Nothing -> do
setLeaderToFirst' -- TODO: do we need this anymore? The raft protocol should be taking care of this as well.
clientSendCommandBatch' cmdb
-- THREAD: CLIENT MAIN. updates state
-- If the client doesn't know the leader? Then set leader to first node, the client will be updated with the real leaderId when it receives a command response.
setLeaderToFirst' :: Monad m => Raft m ()
setLeaderToFirst' = do
nodes <- view (cfg.otherNodes)
when (Set.null nodes) $ error "the client has no nodes to send requests to"
setCurrentLeader $ Just $ Set.findMin nodes
| buckie/juno | src/Juno/Consensus/Api.hs | bsd-3-clause | 4,527 | 0 | 23 | 1,002 | 992 | 499 | 493 | 70 | 3 |
module Data.TTask.Pretty.Status
( ppProjectSprintLog
) where
import Control.Applicative
import Control.Lens
import Data.List
import Data.Time
import Data.TTask.Types
import Data.TTask.Analysis
import Data.TTask.Pretty.Contents
ppProjectSprintLog :: Id -> Project -> Maybe String
ppProjectSprintLog i pj = ppDailySprintLog <$> pj^?sprint i
ppDailySprintLog :: Sprint -> String
ppDailySprintLog s =
let
sx = s^.lastStatuses
cond f r = (r^.getLogStatus.f) && (r^.getLogContents.isTask)
summary f = show (summaryPointBy (cond f) sx)
in concat
[ ppSprintHeaderDetail s
, "\n\n"
, intercalate "\n" . map ppDailyStatuses . dailyGroup $ s^.statuses
, "\n\n"
, "Wait : " ++ summary isWait ++ "pt\n"
, "Running : " ++ summary isRunning ++ "pt\n"
, "Finished : " ++ summary isFinished ++ "pt\n"
, "Not Achieved : " ++ summary isNotAchieved ++ "pt\n"
, "Rejected : " ++ summary isRejected ++ "pt"
]
----
ppDailyStatuses :: DailyStatuses -> String
ppDailyStatuses d = concat
[ show (dayStDay d), " : Total Finished point = ", show (dayStPoint d), "\n"
, intercalate "\n" . map ((" "++) . ppStatusLog) $ dayStStatuses d
]
ppStatusLog :: StatusLogRec -> String
ppStatusLog s = case s^.getLogContents of
TTaskProject v ->
fmtStatusRec "PROJECT" 0 (v^.point) s (_projectName v)
TTaskSprint v ->
fmtStatusRec "SPRINT" (v^.sprintId) (v^.point) s (_sprintDescription v)
TTaskStory v ->
fmtStatusRec "STORY" (v^.storyId) (v^.point) s (_storyDescription v)
TTaskTask v ->
fmtStatusRec "TASK" (v^.taskId) (v^.point) s (_taskDescription v)
fmtStatusRec
:: String -> Id -> Point -> StatusLogRec -> String -> String
fmtStatusRec s i p r d = concat
[ "[", s, " ", show p, "pt ", stAndLt, "] ", show i, " : ", d ]
where
stAndLt :: String
stAndLt = concat
[ ppStatusRecord (r^.getLogStatus)
, " at ", show . localTimeOfDay $ r^.getLogStatus.getStatusTime
]
| tokiwoousaka/ttask | src/Data/TTask/Pretty/Status.hs | bsd-3-clause | 2,009 | 0 | 13 | 459 | 676 | 354 | 322 | -1 | -1 |
module Y21.D13 where
import qualified Data.Map.Strict as M
import qualified Data.Set as S
import Imports
import Util
type YX = (Int, Int)
data Fold = AlongX Int | AlongY Int deriving Show
-- 6,10
-- 0,14
--
-- fold along y=7
-- fold along x=5
--
dotsAndFolds :: Parser ([YX], [Fold])
dotsAndFolds =
(,) <$> (xy `endBy` eol) <* eol
<*> (fold `endBy` eol)
where
xy :: Parser YX
xy = flip (,) <$> decimal <* char ',' <*> decimal
fold :: Parser Fold
fold = do
string "fold along "
let fx = char 'x' $> AlongX
fy = char 'y' $> AlongY
(fx <|> fy) <*> (char '=' *> decimal)
foldOnce :: [YX] -> Fold -> [YX]
foldOnce dots fold =
let wrap = case fold of
AlongX alongX -> \yx@(y,x) -> if x > alongX then (y, 2*alongX-x) else yx
AlongY alongY -> \yx@(y,x) -> if y > alongY then (2*alongY-y, x) else yx
in fmap wrap dots
-- ## ## # # ## #### # # # # # #
-- # # # # # # # # # # # # # #
-- # # #### # # # #### ## # #
-- # # # # #### # # # # # # #
-- # # # # # # # # # # # # # # #
-- ## ## # # # # #### # # # # ##
--
showDots :: [YX] -> String
showDots yxs =
let maxY = maximum . fmap fst $ yxs
maxX = maximum . fmap snd $ yxs
s = S.fromList yxs
in unlines $ flip fmap [0..maxY] \y ->
flip fmap [0..maxX] \x ->
bool ' ' '#' (S.member (y,x) s)
solve1 :: String -> Int
solve1 =
length
. nub
. (\(dots, folds) -> foldl' foldOnce dots [head folds])
. parseOrDie dotsAndFolds
solve2 :: String -> String
solve2 =
fmap ((pat2char M.!) . (concat . take 4))
. divvy 4 5
. transpose
. lines
. showDots
. uncurry (foldl' foldOnce)
. parseOrDie dotsAndFolds
where
pat2char :: Map String Char
pat2char =
M.fromList $ zip
(fmap concat $ divvy 4 5 $ transpose $ drop 1 rawChars)
(fmap head $ divvy 1 5 $ head rawChars)
rawChars :: [String]
rawChars =
[ "A B C E F G H J K L P R U Z "
, " ## ### ## #### #### ## # # ## # # # ### ### # # ####"
, "# # # # # # # # # # # # # # # # # # # # # # #"
, "# # ### # ### ### # #### # ## # # # # # # # # "
, "#### # # # # # # ## # # # # # # ### ### # # # "
, "# # # # # # # # # # # # # # # # # # # # # # # "
, "# # ### ## #### # ### # # ## # # #### # # # ## ####"
]
| oshyshko/adventofcode | src/Y21/D13.hs | bsd-3-clause | 2,652 | 0 | 16 | 1,107 | 761 | 416 | 345 | -1 | -1 |
{-# LANGUAGE RebindableSyntax #-}
-- Copyright : (C) 2009 Corey O'Connor
-- License : BSD-style (see the file LICENSE)
import Bind.Marshal.Prelude
import Bind.Marshal.Verify
import Bind.Marshal.Action
import Bind.Marshal.DataModel.Base
import Bind.Marshal.DesAction.Base
import Bind.Marshal.DesAction.Dynamic
import Bind.Marshal.DesAction.Static
import Bind.Marshal.DesAction.Verify
import Bind.Marshal.StaticProperties
import Bind.Marshal.StdLib.Des
import Bind.Marshal.StdLib.Dynamic.FixedBuffer
import Bind.Marshal.Verify.Dynamic
import Control.DeepSeq
import "monads-tf" Control.Monad.Trans
import Data.IORef
import Foreign.Storable
import Foreign.Marshal.Alloc
import Foreign.Ptr
import System.IO
t_static = do
_ :: Word32 <- des
return ()
t_0 i = do
replicateM i $ dyn_action $ do
_ :: Word32 <- des
return ()
validate_t_0 :: BytePtr -> Test TestResult
validate_t_0 buffer_0 =
verify "deserializes an arbitrary number of units" $ \ (Max1KBInt i) -> liftIOResult $ do
validate_t_0_inner buffer_0 i
{-# NOINLINE validate_t_0_inner #-}
validate_t_0_inner :: BytePtr -> Int -> IO PropertyResult
validate_t_0_inner buffer_0 i = do
let b = castPtr buffer_0
des_buf = BufferRegion b (i * 4)
des_buf_provider <- fixed_buffer des_buf
vs <- des_from_buffer_delegate_ (dyn_action $! t_0 i) des_buf_provider
deepseq vs $! returnM succeeded :: IO PropertyResult
t_1 = do
forM_ [0..4] $ \(i :: Int) -> dyn_action $ do
_ :: Word32 <- des
return ()
return ()
t_2 = do
forM_ [0..4] $ \(i :: Int) -> dyn_action $ do
_ :: Word32 <- des
_ :: Word32 <- des
return ()
return ()
t_3 = do
_ :: Word32 <- des
forM_ [0..4] $ \(i :: Int) -> dyn_action $ do
_ :: Word32 <- des
return ()
return ()
t_4 = do
forM_ [0..4] $ \(i :: Int) -> dyn_action $ do
_ :: Word32 <- des
return ()
_ :: Word32 <- des
return ()
t_5 = do
dyn_action $ do
_ :: Word32 <- des
return ()
_ :: Word32 <- des
return ()
{-# NOINLINE replicate_print #-}
replicate_print i = do
replicateM i $ do
putStr "."
returnM () :: IO ()
main = run_test $ do
buffer_0 <- liftIO $ mallocBytes (4 * 1024 * 1024) :: Test ( Ptr Word8 )
verify1 "-" $ \ (Max1KBInt i) -> liftIOResult $ do
replicate_print i
returnM succeeded :: IO PropertyResult
-- the first two tests assure the parameterized monad Bind typeing is correct.
verify1 "t_static can deserialize from a memory block of 4 bytes" $ marshalled_byte_count t_static == 4
verify1 "Deserializes t_static" $ liftIOResult $ do
let b = castPtr buffer_0
let des_buf = BufferRegion b 4
((), b_end') <- apply_des_to_fixed_buffer t_static des_buf
returnM succeeded :: IO PropertyResult
-- Now try some dynamic deserializations
log_out "validated there are 5 bufferings of size 4" :: Test ()
verify1 "t_1" $ liftIOResult $ do
let b = castPtr buffer_0
des_buf = BufferRegion b 20
des_buf_provider <- logging_buffer_delegate =<< fixed_buffer des_buf
() <- des_from_buffer_delegate_ (dyn_action $! t_1) des_buf_provider
dump_request_log des_buf_provider
-- verify_logged_requests des_buf_provider $ replicate 5 4
returnM succeeded :: IO PropertyResult
log_out "validated there are 5 bufferings of size 8" :: Test ()
verify1 "t_2" $ liftIOResult $ do
let b = castPtr buffer_0
des_buf = BufferRegion b 40
des_buf_provider <- logging_buffer_delegate =<< fixed_buffer des_buf
() <- des_from_buffer_delegate_ (dyn_action $! t_2) des_buf_provider
dump_request_log des_buf_provider
-- verify_logged_requests des_buf_provider $ replicate 5 8
returnM succeeded :: IO PropertyResult
log_out "validated there is 1 buffering of size 4 followed by 5 of size 4" :: Test ()
verify1 "t_3" $ liftIOResult $ do
let b = castPtr buffer_0
des_buf = BufferRegion b 24
des_buf_provider <- logging_buffer_delegate =<< fixed_buffer des_buf
() <- des_from_buffer_delegate_ (dyn_action $! t_3) des_buf_provider
dump_request_log des_buf_provider
-- verify_logged_requests des_buf_provider $ replicate (1 + 5) 4
returnM succeeded :: IO PropertyResult
log_out "validated there is 5 bufferings of size 4 followed by 1 of size 4" :: Test ()
verify1 "t_4" $ liftIOResult $ do
let b = castPtr buffer_0
des_buf = BufferRegion b 24
des_buf_provider <- logging_buffer_delegate =<< fixed_buffer des_buf
() <- des_from_buffer_delegate_ (dyn_action $! t_4) des_buf_provider
dump_request_log des_buf_provider
-- verify_logged_requests des_buf_provider $ replicate (5 + 1) 4
returnM succeeded :: IO PropertyResult
log_out "validated there is 2 bufferings of size 4" :: Test ()
verify1 "t_5" $ liftIOResult $ do
let b = castPtr buffer_0
des_buf = BufferRegion b 8
des_buf_provider <- logging_buffer_delegate =<< fixed_buffer des_buf
() <- des_from_buffer_delegate_ (dyn_action $! t_5) des_buf_provider
dump_request_log des_buf_provider
-- verify_logged_requests des_buf_provider $ replicate 2 4
returnM succeeded :: IO PropertyResult
validate_t_0 buffer_0
liftIO $ free buffer_0 :: Test()
returnM () :: Test ()
| coreyoconnor/bind-marshal | test/verify_desaction_dynamic_fixed_buffer.hs | bsd-3-clause | 5,533 | 0 | 14 | 1,397 | 1,461 | 692 | 769 | -1 | -1 |
{-# LANGUAGE ScopedTypeVariables,
TypeFamilies,
EmptyDataDecls,
FlexibleInstances #-}
{- Joel Svensson 2013 -}
module Obsidian.Mutable ( Mutable(Mutable)
, Shared
, Global
, newS
, forceTo
, writeTo
, pullFrom
, atomicInc
-- , mutlen -- hack
, namedMutable
, undefinedMutable
) where
import Obsidian.Exp
import Obsidian.Types
import Obsidian.Globs
import Obsidian.Program
import Obsidian.Memory
import Obsidian.Names
import Obsidian.Array
import Obsidian.Atomic
import Data.Word
{-
Todo: Think about Global vs Shared.
Todo: Add creation of mutable global arrays.
Todo: Make mutable interface (atomic ops) very low-level
TODO: Rethink. Have two sepparate types of mutable arrays.
Also Skip the Type family magic if possible.
Make both kinds of Mutable arrays an instance of Array
-}
---------------------------------------------------------------------------
-- Mutable arrays
---------------------------------------------------------------------------
--
-- Global mutable arrays can only be passed as inputs to a function.
-- Shared mutable arrays may be created using newS
--
data Shared
data Global
-- A mutable array has an attached location.
-- Either it recides in Global or in Shared memory.
data Mutable mloc s a = Mutable s (Names a)
type MShared a = Mutable Shared Word32 a
type MGlobal a = Mutable Global EWord32 a
instance ArrayLength (Mutable Shared) where
len (Mutable n _) = n
namedMutable s v = Mutable v (Single s)
undefinedMutable v = Mutable v undefined
---------------------------------------------------------------------------
-- Create Mutable Shared memory arrays
-- # allocates shared memory
---------------------------------------------------------------------------
newS :: Storable a => SPush Block a -> Program Block (Mutable Shared Word32 a)
newS arr = do
(snames :: Names a) <- names "arr"
allocateArray snames n
let mut = Mutable n snames
writeTo mut arr
return $ mut -- Mutable n snames
where
n = len arr
---------------------------------------------------------------------------
-- forceTo & writeTo
---------------------------------------------------------------------------
-- Much Hacking here
writeTo :: Storable a
=> Mutable Shared Word32 a
-> Push Block Word32 a
-> Program Block ()
writeTo (Mutable n snames) p
| n <= m = p <: assignArray snames
| otherwise = error "WriteTo: Incompatible sizes"
where
m = len p
-- Add forceTo with offsets (why? just thought it might be useful)
forceTo :: Storable a
=> Mutable Shared Word32 a
-> Push Block Word32 a
-> Program Block ()
forceTo m arr =
do
writeTo m arr
Sync
---------------------------------------------------------------------------
-- pullFrom
---------------------------------------------------------------------------
toPull :: Storable a => Mutable Shared Word32 a -> SPull a
toPull (Mutable n snames) = pullFrom snames n
---------------------------------------------------------------------------
-- Atomics
---------------------------------------------------------------------------
-- | Increment atomically
atomicInc :: forall mloc a s t . AtomicInc a
=> EWord32
-> Mutable mloc s (Exp a)
-> TProgram ()
atomicInc ix (Mutable n noms) = mapNamesM_ f noms
where
f nom = atomicOp nom ix (AtomicInc :: Atomic a) >> return ()
-- | Add atomically
atomicAdd :: forall mloc a s. AtomicAdd a
=> EWord32
-> Exp a
-> Mutable mloc s (Exp a)
-> TProgram ()
atomicAdd ix v (Mutable n noms) = mapNamesM_ f noms
where
f nom = atomicOp nom ix (AtomicAdd v) >> return ()
-- | Subtract atomically
atomicSub :: forall mloc a s. AtomicSub a
=> EWord32
-> Exp a
-> Mutable mloc s (Exp a)
-> TProgram ()
atomicSub ix v (Mutable n noms) = mapNamesM_ f noms
where
f nom = atomicOp nom ix (AtomicSub v) >> return ()
-- Special case ? No.
atomicExch :: forall mloc a s. AtomicExch a
=> EWord32
-> Exp a
-> Mutable mloc s (Exp a)
-> TProgram ()
atomicExch ix v (Mutable n (Single nom)) = f nom
where
f nom = atomicOp nom ix (AtomicExch v)
{-
---------------------------------------------------------------------------
atomicExch()
int atomicExch(int* address, int val);
unsigned int atomicExch(unsigned int* address,
unsigned int val);
unsigned long long int atomicExch(unsigned long long int* address,
unsigned long long int val);
float atomicExch(float* address, float val);
---------------------------------------------------------------------------
atomicMin()
int atomicMin(int* address, int val);
unsigned int atomicMin(unsigned int* address,
unsigned int val);
unsigned long long int atomicMin(unsigned long long int* address,
unsigned long long int val);
---------------------------------------------------------------------------
atomicMax()
int atomicMax(int* address, int val);
unsigned int atomicMax(unsigned int* address,
unsigned int val);
unsigned long long int atomicMax(unsigned long long int* address,
unsigned long long int val);
---------------------------------------------------------------------------
atomicInc()
unsigned int atomicInc(unsigned int* address,
unsigned int val);
---------------------------------------------------------------------------
atomicDec()
unsigned int atomicDec(unsigned int* address,
unsigned int val);
---------------------------------------------------------------------------
atomicCAS()
int atomicCAS(int* address, int compare, int val);
unsigned int atomicCAS(unsigned int* address,
unsigned int compare,
unsigned int val);
unsigned long long int atomicCAS(unsigned long long int* address,
unsigned long long int compare,
unsigned long long int val);
---------------------------------------------------------------------------
atomicAnd()
int atomicAnd(int* address, int val);
unsigned int atomicAnd(unsigned int* address,
unsigned int val);
unsigned long long int atomicAnd(unsigned long long int* address,
unsigned long long int val);
---------------------------------------------------------------------------
atomicOr()
int atomicOr(int* address, int val);
unsigned int atomicOr(unsigned int* address,
unsigned int val);
unsigned long long int atomicOr(unsigned long long int* address,
unsigned long long int val);
---------------------------------------------------------------------------
atomicXor()
int atomicXor(int* address, int val);
unsigned int atomicXor(unsigned int* address,
unsigned int val);
unsigned long long int atomicXor(unsigned long long int* address,
unsigned long long int val);
-}
| svenssonjoel/ObsidianGFX | Obsidian/Mutable.hs | bsd-3-clause | 7,655 | 0 | 12 | 2,141 | 983 | 506 | 477 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.DNS.Types where
import qualified Control.Monad.State as S
import Data.Binary
import Data.Binary.Get ()
import Data.Binary.Put
import qualified Data.ByteString.Lazy as BL
import Data.IntMap (IntMap)
import qualified Data.IntMap as IM
import Data.List.Split (splitOn)
import qualified Data.Map as M
import Data.Maybe (mapMaybe)
type Domain = BL.ByteString
data TYPE = A | AAAA | NS | TXT | MX | CNAME | SOA | PTR | SRV
| UNKNOWN Int deriving (Eq, Show, Read)
data Pointer = Pointer { pOffset :: Int, pRecord :: Domain }
deriving (Show, Eq)
instance Enum TYPE where
fromEnum A = 1
fromEnum NS = 2
fromEnum CNAME = 5
fromEnum SOA = 6
fromEnum PTR = 12
fromEnum MX = 15
fromEnum TXT = 16
fromEnum AAAA = 28
fromEnum SRV = 33
fromEnum (UNKNOWN x) = x
toEnum 1 = A
toEnum 2 = NS
toEnum 5 = CNAME
toEnum 6 = SOA
toEnum 12 = PTR
toEnum 15 = MX
toEnum 16 = TXT
toEnum 28 = AAAA
toEnum 33 = SRV
toEnum x = UNKNOWN x
data OpCode = Query
| IQuery
| ServerStatus
| Reserved Int
deriving (Show, Eq)
instance Enum OpCode where
fromEnum Query = 0
fromEnum IQuery = 1
fromEnum ServerStatus = 2
fromEnum (Reserved r) = r
toEnum 0 = Query
toEnum 1 = IQuery
toEnum 2 = ServerStatus
toEnum r = Reserved r
type IPV4 = [Int]
tryParse :: String -> Maybe Int
tryParse s = case reads s of
[(i, _)] -> Just i
_ -> Nothing
makeIPV4 :: String -> IPV4
makeIPV4 = mapMaybe tryParse . splitOn "."
data RCODE = NoErr | FormatErr | ServFail | NameErr | NotImpl | Refused deriving (Eq, Show, Enum)
data RData = RDA IPV4
| RDNS Domain
| RDCName Domain
| RDPtr Domain
deriving (Show, Eq)
data DNSFlags = DNSFlags {
flResp :: !Bool
, flOpcode :: !OpCode
, flAa :: !Bool
, flTc :: !Bool
, flRd :: !Bool
, flRa :: !Bool
, flRcode :: !RCODE
} deriving (Show, Eq)
data DNSHeader = DNSHeader {
dnsID :: !Word16
, flags :: !DNSFlags
, qdcount :: !Int
, ancount :: !Int
, nscount :: !Int
, arcount :: !Int
} deriving (Show, Eq)
data DNSQuestion = DNSQuestion {
qname :: !Domain
, qtype :: !TYPE
, qclass :: !Int
} deriving (Show, Eq)
data DNSResource = DNSResource {
rname :: !Domain
, rtype :: !TYPE
, rclass :: !Int
, rttl :: !Int
, rdlength :: !Int
, rdata :: !RData
} deriving (Show, Eq)
data DNSPacket = DNSPacket {
header :: !DNSHeader
, questions :: ![DNSQuestion]
, answers :: ![DNSResource]
, authority :: ![DNSResource]
, additional :: ![DNSResource]
} deriving (Show, Eq)
-- States for serialisation
data WState = WState {
wsMap :: M.Map Domain Int
, wsPosition :: Int
} deriving (Eq, Show)
data RState = RState {
rsMap :: IntMap Domain
, rsPosition :: Int
} deriving (Eq, Show)
rstate :: RState
rstate = RState IM.empty 0
wstate :: WState
wstate = WState M.empty 0
-- StateT's for the above
type SGet a = S.StateT RState Get a
type SPut = S.StateT WState PutM ()
| compters/LearnDNS | Network/DNS/Types.hs | bsd-3-clause | 3,304 | 0 | 10 | 1,047 | 1,071 | 601 | 470 | 170 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
module Language.Cynide.Parser where
import Control.Monad ( liftM, liftM2 )
import Control.Monad.Identity ( Identity )
import Data.Either ( partitionEithers)
import Data.Maybe ( fromMaybe )
import Text.Parsec
import Text.Parsec.Expr hiding (Operator)
import qualified Text.Parsec.Expr as E
import Text.Parsec.Token hiding (GenTokenParser(..))
import qualified Text.Parsec.Token as T
import Language.Cynide.Syntax
-- --------------------
type P s a = ParsecT s () Identity a
type Operator s a = E.Operator s () Identity a
type OpTable s a = [[Operator s a]]
-- --------------------
cynide :: Stream s Identity Char => T.GenTokenParser s () Identity
cynide = makeTokenParser cynideDef
cynideDef :: Stream s Identity Char => GenLanguageDef s () Identity
cynideDef
= LanguageDef
{ commentStart = "/*"
, commentEnd = "*/"
, commentLine = "//"
, nestedComments = True
, identStart = letter <|> char '_'
, identLetter = alphaNum <|> oneOf "_$"
, opStart = oneOf "+-!~&|^*/%><=?:"
, opLetter = oneOf "+-!~&|^*/%><=?:"
, reservedNames = cynideKeywords
, reservedOpNames = cynideOps
, caseSensitive = True
}
lexeme = T.lexeme cynide
lexeme :: (Stream s Identity Char) => P s a -> P s a
whiteSpace :: Stream s Identity Char => P s ()
whiteSpace = T.whiteSpace cynide
identifier :: Stream s Identity Char => P s String
identifier = T.identifier cynide
reserved :: Stream s Identity Char => String -> P s ()
reserved = T.reserved cynide
reservedOp :: Stream s Identity Char => String -> P s ()
reservedOp = T.reservedOp cynide
symbol :: Stream s Identity Char => String -> P s ()
symbol x = T.symbol cynide x >> return ()
stringLiteral :: Stream s Identity Char => P s String
stringLiteral = T.stringLiteral cynide
-- integer :: P s Integer
-- integer = T.integer cynide
parens, brackets, braces :: Stream s Identity Char => P s a -> P s a
parens = T.parens cynide
brackets = T.brackets cynide
braces = T.braces cynide
comma, semi, colon, dot :: Stream s Identity Char => P s ()
comma = T.comma cynide >> return ()
semi = T.semi cynide >> return ()
colon = T.colon cynide >> return ()
dot = T.dot cynide >> return ()
commaSep, commaSep1 :: Stream s Identity Char => P s a -> P s [a]
commaSep = T.commaSep cynide
commaSep1 = T.commaSep1 cynide
-- http://www.hdlworks.com/hdl_corner/cynide_ref/index.html
cynideKeywords :: [String]
cynideKeywords
= [ "m", "p", "i", "o", "c", "io", "r", "w", "n"]
cynideOps :: [String]
cynideOps
= [ "+", "-", "!", "~", "&", "~&", "|", "~|", "^", "~^", "^~"
, "+", "-", "*", "/", "%", ">", ">=", "<", "<="
, "&&", "||", "==", "!=", "===", "!===", "&", "|", "^", "^~", "~^"
, "<<", ">>", "<<<", ">>>"
, "?", ":", "->", "**" ]
-- --------------------
cynideFile :: Stream s Identity Char => P s Cynide
cynideFile
= do whiteSpace
ds <- many module_description
eof
return (Cynide ds)
module_description :: Stream s Identity Char => P s Module
module_description
= do reserved "m"
name <- ident
items <- braces (many module_item)
return (Module name items)
module_item :: Stream s Identity Char => P s Item
module_item
= liftM ParamDeclItem parameter_declaration <|>
liftM InputDeclItem input_declaration <|>
liftM OutputDeclItem output_declaration <|>
liftM InOutDeclItem inout_declaration <|>
liftM ConnectDeclItem connect_declaration <|>
liftM InstDeclItem instance_declaration
<?> "module item"
parameter_declaration :: Stream s Identity Char => P s ParamDecl
parameter_declaration
= do reserved "p"
param_assigns <- (commaSep1 parameter_assignment)
<?> "parameter list"
return (ParamDecl param_assigns)
<?> "parameter declaration"
input_declaration :: Stream s Identity Char => P s InputDecl
input_declaration
= do reserved "i"
pt <- optionMaybe portType
xs <- ident
l <- optionMaybe const_expr
w <- optionMaybe const_expr
return (InputDecl pt xs l w)
<?> "input declaration"
output_declaration :: Stream s Identity Char => P s OutputDecl
output_declaration
= do reserved "o"
pt <- optionMaybe portType
xs <- ident
l <- optionMaybe const_expr
w <- optionMaybe const_expr
return (OutputDecl pt xs l w)
<?> "output declaration"
inout_declaration :: Stream s Identity Char => P s InOutDecl
inout_declaration
= do reserved "io"
pt <- optionMaybe portType
xs <- ident
l <- optionMaybe const_expr
w <- optionMaybe const_expr
return (InOutDecl pt xs l w)
<?> "inout declaration"
connect_declaration :: Stream s Identity Char => P s ConnectDecl
connect_declaration
= do reserved "c"
src <- connect_node
drs <- many connect_node
return (ConnectDecl src drs)
<?> "connect declaration"
connect_node :: Stream s Identity Char => P s ConnectNode
connect_node
= do node <- ident
r1 <- optionMaybe range
r2 <- optionMaybe range
return (ConnectNode node r1 r2)
<?> "connect node"
interconnect_declaration :: Stream s Identity Char => P s InterConDel
interconnect_declaration
= do n1 <- connect_node
n2 <- connect_node
return (InterConDel n1 n2)
instance_declaration :: Stream s Identity Char => P s InstDecl
instance_declaration
= do reserved "n"
x <- ident
r <- ident
c <- optionMaybe (parens (many interconnect_declaration))
return (InstDecl x r c)
-- -----------------------------------------------------------------------------
-- expressions
const_expr :: Stream s Identity Char => P s Expression
const_expr = expression
<?> "constant expression"
expression :: Stream s Identity Char => P s Expression
expression
= do e1 <- expression'
choice [ do symbol "?"
e2 <- expression
symbol ":"
e3 <- expression
return (ExprCond e1 e2 e3)
, return e1
]
expression' :: Stream s Identity Char => P s Expression
expression'
= buildExpressionParser opTable factor <?> "expression"
where
factor
= choice [ parens expression
, ident >>= expr_ident
, expr_number
, expr_string
, expr_concat
]
<?> "factor"
-- parse an expression that starts with an identifier
expr_ident :: Stream s Identity Char => Ident -> P s Expression
expr_ident x
= liftM (ExprFunCall x) (parens (commaSep expression)) <|>
(brackets $
do e <- expression
-- for ExprSlice, 'e' is actually a constant expression,
-- but const_expr = expression, so it does not matter.
choice [ colon >> liftM (ExprSlice x e) const_expr
, symbol "+:" >> liftM (ExprSlicePlus x e) const_expr
, symbol "-:" >> liftM (ExprSliceMinus x e) const_expr
, return (ExprIndex x e)
]) <|>
return (ExprVar x)
opTable :: Stream s Identity Char => OpTable s Expression
opTable
= [ [ unaryOp "+" UPlus
, unaryOp "-" UMinus
, unaryOp "!" UBang
, unaryOp "~" UTilde
]
, [ binaryOp "*" Times
, binaryOp "/" Divide
, binaryOp "%" Modulo
, binaryOp "**" Pow
]
, [ binaryOp "+" Plus
, binaryOp "-" Minus
]
-- TODO <<< and >>> operators
, [ binaryOp "<<" ShiftLeft
, binaryOp ">>" ShiftRight
]
, [ binaryOp "<" LessThan
, binaryOp "<=" LessEqual
, binaryOp ">" GreaterThan
, binaryOp ">=" GreaterEqual
]
, [ binaryOp "==" Equals
, binaryOp "!=" NotEquals
, binaryOp "===" CEquals
, binaryOp "!==" CNotEquals
]
, [ unaryOp "&" UAnd
, unaryOp "~&" UNand
, binaryOp "&" And
]
, [ unaryOp "^" UXor
, unaryOp "^~" UXnor
, unaryOp "~^" UXnor
, binaryOp "^" Xor
, binaryOp "^~" Xnor
, binaryOp "~^" Xnor
]
, [ unaryOp "|" UOr
, unaryOp "~|" UNor
, binaryOp "|" Or
, binaryOp "~|" Nor
]
, [ binaryOp "&&" LAnd ]
, [ binaryOp "||" LOr ]
]
unaryOp :: Stream s Identity Char => String -> UnaryOp -> Operator s Expression
unaryOp name fun
= Prefix (reservedOp name >> return (ExprUnary fun))
binaryOp :: Stream s Identity Char => String -> BinaryOp -> Operator s Expression
binaryOp name fun
= Infix (reservedOp name >> return (ExprBinary fun)) AssocLeft
expr_number :: Stream s Identity Char => P s Expression
expr_number
= liftM ExprNum number
{- syntax for numbers:
[ sign ] [ size ] [ 'base ] value // integer
[ sign ] value[.value] [ sign ] baseExponent // real
where an integer value is allowed to have some subset of
"0123456789abcdefABCDEFxXzZ?_", depending on the base,
and a real value contains only decimal characters: "0123456789".
-}
expr_string :: Stream s Identity Char => P s Expression
expr_string
= liftM ExprString stringLiteral
expr_concat :: Stream s Identity Char => P s Expression
expr_concat
= do symbol "{"
e <- expression
choice [ do comma
es <- commaSep expression
symbol "}"
return (ExprConcat (e:es))
, do es <- braces (commaSep expression)
symbol "}"
return (ExprMultiConcat e es)
]
number :: Stream s Identity Char => P s Number
number
= do { s <- optionMaybe sign
; whiteSpace
; base_integral s Nothing <|>
do n <- decimal_number
whiteSpace
-- n could be the size of an integral, the integral value itself,
-- or the integral part of a real.
base_integral s (Just n) <|> real_number s n
}
where
base_integral maybe_sign maybe_size
= do b <- base
whiteSpace
x <- digits b
whiteSpace
return (IntNum maybe_sign maybe_size (Just b) x)
-- given the optional sign and the integral part, parse the remainder of a
-- real number, or yield an integer.
real_number maybe_sign int_value
= choice [ do maybe_fractional <- optionMaybe (dot >> decimal_number)
whiteSpace
maybe_exponent <- optionMaybe $
do _ <- oneOf "eE"
s <- optionMaybe sign
e <- decimal_number
return (s, e)
case (maybe_fractional, maybe_exponent) of
(Nothing, Nothing)
-> return $ IntNum maybe_sign Nothing Nothing int_value
_ -> return $ RealNum maybe_sign int_value
maybe_fractional maybe_exponent
]
decimal_number :: Stream s Identity Char => P s String
decimal_number = digits DecBase
digits :: Stream s Identity Char => Base -> P s String
digits BinBase
= many1 (oneOf "01xXzZ?_") <?> "binary digit"
digits OctBase
= many1 (oneOf "01234567xXzZ?_") <?> "octal digit"
digits HexBase
= many1 (oneOf "0123456789abcdefABCDEFxXzZ?_") <?> "hexadecimal digit"
digits DecBase
= many1 (oneOf "0123456789_") <?> "decimal digit"
sign :: Stream s Identity Char => P s Sign
sign = (symbol "+" >> return Pos) <|>
(symbol "-" >> return Neg)
base :: Stream s Identity Char => P s Base
base = do { _ <- char '\''
; (oneOf "bB" >> return BinBase) <|>
(oneOf "oO" >> return OctBase) <|>
(oneOf "dD" >> return DecBase) <|>
(oneOf "hH" >> return HexBase)
} <?> "base"
-- -----------------------------------------------------------------------------
-- miscellaneous
ident :: Stream s Identity Char => P s Ident
ident = liftM Ident identifier
portDir :: Stream s Identity Char => P s PortDir
portDir = (reserved "i" >> return (PortDir Input)) <|>
(reserved "o" >> return (PortDir Output)) <|>
(reserved "i" >> return (PortDir InOut)) <?> "port direction"
portType :: Stream s Identity Char => P s PortType
portType = (reserved "w" >> return (PortType Wire)) <|>
(reserved "r" >> return (PortType Reg))
parameter_assignment :: Stream s Identity Char => P s ParamAssign
parameter_assignment
= do x <- ident
e <- const_expr
return (ParamAssign x e)
range :: Stream s Identity Char => P s Range
range
= brackets $ do e1 <- const_expr
e2 <- optionMaybe halfRange
return (Range e1 e2)
halfRange :: Stream s Identity Char => P s ConstExpr
halfRange
= do colon
e <- const_expr
return e
-- -----------------------------------------------------------------------------
| githubkleon/ConvenientHDL | src/Language/Cynide/Parser.hs | bsd-3-clause | 13,044 | 0 | 16 | 3,813 | 3,776 | 1,876 | 1,900 | 314 | 2 |
{-# LANGUAGE GADTs, KindSignatures, RankNTypes, StandaloneDeriving, FlexibleInstances, GeneralizedNewtypeDeriving, FlexibleContexts #-}
module Packet where
import Control.Monad
import Command
import Procedure
data Packet a = Packet [Command] (Maybe (Procedure a))
deriving Show
| ku-fpg/remote-monad-examples | classic-examples/Packet/Packet.hs | bsd-3-clause | 298 | 0 | 10 | 50 | 46 | 28 | 18 | 7 | 0 |
{-# LANGUAGE OverloadedStrings #-}
module Cda.Parser where
import qualified Data.Text as T
import Text.XML.Expat.Tree
-- import Text.XML.Expat.Format
import Text.XML.Expat.Proc
import System.Environment
import System.Exit
import System.IO
import qualified Data.ByteString.Lazy as L
import Data.Maybe ( fromJust, maybe )
import Control.Monad
import Data.Monoid
strM::T.Text -> Maybe T.Text
strM s = if (T.length s) > 0 then Just s else Nothing
---------------------------
tagTextM:: UNode T.Text -> T.Text -> Maybe T.Text
tagTextM node stag = findChild stag node >>= (strM . textContent)
----------------------------
attrTextM:: UNode T.Text -> T.Text -> T.Text -> Maybe T.Text
attrTextM node stag sattr = flip getAttribute sattr =<< findChild stag node
----------------------------
viewTitle:: UNode T.Text -> T.Text
viewTitle node = do
let
defTitle = Just ("Clinical document"::T.Text)
tagTitle = tagTextM node "title"
codeName = attrTextM node "code" "displayName"
fromJust $ tagTitle `mplus` codeName `mplus` defTitle
----------------------------
getCsection:: UNode T.Text -> [UNode T.Text]
getCsection = findElements "component" <=< findChild "structuredBody" <=< findChild "component"
map set
renderCda:: UNode T.Text -> T.Text
renderCda =
| SLikhachev/pureCdaViewer | snaplets/cda/src/Cda/Parser.hs | bsd-3-clause | 1,280 | 1 | 13 | 194 | 390 | 209 | 181 | -1 | -1 |
module PFDS.Sec8.Ex1 where
-- RedBlackTree with delete (batched rebuilding)
data Color = R | B deriving (Show, Eq)
data Vital = L | D deriving (Show, Eq)
data Tree e = E | T Color (Tree e) e (Tree e) Vital deriving (Show, Eq)
data RBTree e = RBTree (Tree e) Livings Deads deriving (Show, Eq)
type Livings = Int
type Deads = Int
empty :: RBTree e
empty = RBTree E 0 0
member :: Ord e => e -> RBTree e -> Bool
member x (RBTree t _ _) = member' x t
where
member' :: Ord e => e -> Tree e -> Bool
member' _ E = False
member' x (T _ a y b _)
| x < y = member' x a
| x > y = member' x b
| otherwise = True
balance :: Color -> Tree e -> e -> Tree e -> Vital -> Tree e
balance B (T R (T R a x b u) y c v) z d w = T R (T B a x b u) y (T B c z d v) w
balance B (T R a x (T R b y c u) v) z d w = T R (T B a x b u) y (T B c z d v) w
balance B a x (T R (T R b y c u) z d v) w = T R (T B a x b u) y (T B c z d v) w
balance B a x (T R b y (T R c z d u) v) w = T R (T B a x b u) y (T B c z d v) w
balance color e w f v = T color e w f v
insert :: Ord e => e -> RBTree e -> RBTree e
insert x (RBTree s l d) = RBTree (T B a' y' b' v') (l + 1) d
where
T _ a' y' b' v' = ins s
ins E = T R E x E L
ins s'@(T color a y b v)
| x < y = balance color (ins a) y b v
| x > y = balance color a y (ins b) v
| otherwise = s'
delete :: Ord e => e -> RBTree e -> RBTree e
delete x (RBTree s l d) = if l < (d + 1) * 2 then rebuild t else t
where
t = RBTree (del s) l (d + 1)
del E = E
del s'@(T color a y b v)
| x < y = T color (del a) y b v
| x > y = T color a y (del b) v
| otherwise = T color (del a) x (del b) D
rebuild :: Ord e => RBTree e -> RBTree e
rebuild (RBTree s l d) = RBTree (fromOrdList s') (length s') 0
where
s' = toOrdList s
toOrdList :: Ord e => Tree e -> [e]
toOrdList t' = go t' []
where
go E xs = xs
go (T _ a x b L) xs = go a (x:go b xs)
go (T _ a _ b D) xs = go a (go b xs)
-- from Sec3.Ex9
fromOrdList :: Ord e => [e] -> Tree e
fromOrdList xs' = (\(x,_,_) -> x) $ go d' r' xs'
where
n = length xs'
d' = until (\x -> 2^(x+1)-1 >= n) succ 0
r' = n - 2^d' + 1
go :: Ord e => Int -> Int -> [e] -> (Tree e, [e], Int)
go _ r [] = (E, [], r)
go d r xs0@(x:xs)
| d == 1, r == 0 = (T B E x E L, xs, r)
| d == 0, r > 0 = (T R E x E L, xs, r-1)
| d == 0 = (E, xs0, r)
| otherwise = (T B t1 x1 t2 L, xs2, r2)
where
(t1, x1:xs1, r1) = go (d-1) r xs0
(t2, xs2, r2) = go (d-1) r1 xs1
-- 最短と最長で深さの差が1以下の平衡二分木を作ることを考える。
-- 最短パスの長さ d を設定する。d はノード数 n の平衡二分木における深さ最短のパスの長さである。
-- 深さ d の完全二分木を作ることを考え、余ったノードが r 個存在するとする。
-- 深さ d の完全二分木は全て黒ノードであり、余ったノードは最左の子から順に赤のノードとして付加する。
| matonix/pfds | src/PFDS/Sec8/Ex1.hs | bsd-3-clause | 3,046 | 0 | 14 | 907 | 1,738 | 877 | 861 | 61 | 3 |
module Karamaan.Opaleye.Predicates where
import Karamaan.Opaleye.Wire (Wire, unWire)
import Karamaan.Opaleye.QueryArr (QueryArr, restrictWith)
import Karamaan.Opaleye.Values (sqlStringOfDay)
import Karamaan.Opaleye.Operators2 (eq, constantDay)
import Database.HaskellDB.Query (ShowConstant, showConstant)
import Database.HaskellDB.PrimQuery (PrimExpr(AttrExpr, UnExpr, ConstExpr,
BinExpr),
Literal(BoolLit),
BinOp(OpOr, OpEq, OpNotEq),
UnOp(OpIsNull), Literal(OtherLit))
import Data.Time.Calendar (Day)
import Control.Arrow (arr, Arrow, first, (<<<))
-- The combinators are to be in Operators2 are to be preferred to the ones here.
-- Predicates contains code from an earlier time when I didn't understand the
-- more convenient ways to work with Arrows.
-- We now have an alpha implementation of 'ExprArr' which essentially
-- does all the PrimExpr plumbing with a safer API. In the future
-- it's preferable to use 'ExprArr' to any of the combinators here
-- (except restrict which is still needed).
notEqualC :: ShowConstant a => a -> QueryArr (Wire a) ()
notEqualC = restrictWith . flip wireIsNot . showConstant
-- TODO: replace this with something like equalsDay?
equalsC :: ShowConstant a => a -> QueryArr (Wire a) ()
equalsC = restrictWith . flip wireIs . showConstant
literalDay :: Day -> Literal
literalDay = OtherLit . sqlStringOfDay
-- ^^ I guess this should really be a DateLit, but I can't
-- work out how to use HaskellDB's CalendarTime
-- TODO: should we get rid of this as it is somewhat redundant?
equalsDay :: Day -> QueryArr (Wire Day) ()
equalsDay day = restrict
<<< eq
<<< first (constantDay day)
<<< arr (\d -> ((), d))
null' :: Wire a -> PrimExpr
null' = UnExpr OpIsNull . AttrExpr . unWire
wireIsOneOf :: Wire a -> [Literal] -> PrimExpr
wireIsOneOf w = foldr or' false . map (wireIs w)
where false :: PrimExpr
false = ConstExpr (BoolLit False)
or' = BinExpr OpOr
wireOp :: BinOp -> Wire a -> Literal -> PrimExpr
wireOp op w l = BinExpr op ((AttrExpr . unWire) w) (ConstExpr l)
wireIs :: Wire a -> Literal -> PrimExpr
wireIs = wireOp OpEq
wireIsNot :: Wire a -> Literal -> PrimExpr
wireIsNot = wireOp OpNotEq
wireTrue :: Wire a -> PrimExpr
wireTrue = AttrExpr . unWire
restrict :: QueryArr (Wire Bool) ()
restrict = restrictWith wireTrue
| dbp/karamaan-opaleye | Karamaan/Opaleye/Predicates.hs | bsd-3-clause | 2,535 | 0 | 10 | 624 | 603 | 338 | 265 | 41 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{- |
Module : $Header$
Description : Generic Prover GUI.
Copyright : (c) Klaus Luettich, Rainer Grabbe, Uni Bremen 2006
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : needs POSIX
Generic GUI for automatic theorem provers. Based upon former SPASS Prover GUI.
-}
module GUI.HTkGenericATP (genericATPgui) where
import Logic.Prover
import qualified Common.AS_Annotation as AS_Anno
import qualified Data.Map as Map
import Common.Utils (getEnvSave, readMaybe)
import Common.Result
import Data.List
import Data.Maybe
import qualified Control.Exception as Exception
import qualified Control.Concurrent as Conc
import HTk.Toolkit.SpinButton
import HTk.Toolkit.Separator
import HTk.Devices.XSelection
import HTk.Widgets.Space
import GUI.Utils
import GUI.HTkUtils hiding (createTextSaveDisplay, createTextDisplay)
import Interfaces.GenericATPState
import Proofs.BatchProcessing
{- |
Utility function to set the time limit of a Config.
For values <= 0 a default value is used.
-}
setTimeLimit :: Int -> GenericConfig proofTree -> GenericConfig proofTree
setTimeLimit n c = c { timeLimit = if n > 0 then Just n else Nothing }
{- |
Utility function to set the extra options of a Config.
-}
setExtraOpts :: [String] -> GenericConfig proofTree -> GenericConfig proofTree
setExtraOpts opts c = c { extraOpts = opts }
-- ** Constants
-- ** Defining the view
{- |
Colors used by the GUI to indicate the status of a goal.
-}
data ProofStatusColour
-- | Proved
= Green
-- | Proved, but theory is inconsistent
| Brown
-- | Disproved
| Red
-- | Open
| Black
-- | Running
| Blue
deriving (Bounded, Enum, Show)
{- |
Generates a ('ProofStatusColour', 'String') tuple representing a Proved proof
status.
-}
statusProved :: (ProofStatusColour, String)
statusProved = (Green, "Proved")
{- |
Generates a ('ProofStatusColour', 'String') tuple representing a Proved
(but inconsistent) proof status.
-}
statusProvedButInconsistent :: (ProofStatusColour, String)
statusProvedButInconsistent = (Brown, "Proved/Inconsistent")
{- |
Generates a ('ProofStatusColour', 'String') tuple representing a Disproved
proof status.
-}
statusDisproved :: (ProofStatusColour, String)
statusDisproved = (Red, "Disproved")
{- |
Generates a ('ProofStatusColour', 'String') tuple representing an Open proof
status.
-}
statusOpen :: (ProofStatusColour, String)
statusOpen = (Black, "Open")
{- |
Generates a ('ProofStatusColour', 'String') tuple representing an Open proof
status in case the time limit has been exceeded.
-}
statusOpenTExceeded :: (ProofStatusColour, String)
statusOpenTExceeded = (Black, "Open (Time is up!)")
{- |
Generates a ('ProofStatusColour', 'String') tuple representing a Running proof
status.
-}
statusRunning :: (ProofStatusColour, String)
statusRunning = (Blue, "Running")
{- |
Converts a 'ProofStatus' into a ('ProofStatusColour', 'String') tuple to be
displayed by the GUI.
-}
toGuiStatus :: GenericConfig proofTree -- ^ current prover configuration
-> ProofStatus a -- ^ status to convert
-> (ProofStatusColour, String)
toGuiStatus cf st = case goalStatus st of
Proved c -> if c then statusProved else statusProvedButInconsistent
Disproved -> statusDisproved
_ -> if timeLimitExceeded cf
then statusOpenTExceeded
else statusOpen
-- | stores widgets of an options frame and the frame itself
data OpFrame = OpFrame { ofFrame :: Frame
, ofTimeSpinner :: SpinButton
, ofTimeEntry :: Entry Int
, ofOptionsEntry :: Entry String
}
{- |
Generates a list of 'GUI.HTkUtils.LBGoalView' representations of all goals
from a 'GenericATPState.GenericState'.
-}
goalsView
:: GenericState sentence proofTree pst -- ^ current global prover state
-> [LBGoalView] -- ^ resulting ['LBGoalView'] list
goalsView s = map ((\ g ->
let cfg = Map.lookup g (configsMap s)
statind = maybe LBIndicatorOpen
(indicatorFromProofStatus . proofStatus)
cfg
in
LBGoalView {statIndicator = statind,
goalDescription = g})
. AS_Anno.senAttr) $ goalsList s
-- * GUI Implementation
-- ** Utility Functions
{- |
Retrieves the value of the time limit 'Entry'. Ignores invalid input.
-}
getValueSafe
:: Int -- ^ default time limt
-> Entry Int -- ^ time limit 'Entry'
-> IO Int -- ^ user-requested time limit or default for errors
getValueSafe defaultTimeLimit timeEntry =
Exception.catch (getValue timeEntry :: IO Int)
$ \ e -> do
putStrLn $ "Warning: Error " ++ show (e :: Exception.SomeException)
++ " was ignored"
return defaultTimeLimit
{- |
reads passed ENV-variable and if it exists and has an Int-value this value is
returned otherwise the value of 'batchTimeLimit' is returned.
-}
getBatchTimeLimit :: String -- ^ ENV-variable containing batch time limit
-> IO Int
getBatchTimeLimit env =
getEnvSave batchTimeLimit env readMaybe
{- |
Text displayed by the batch mode window.
-}
batchInfoText :: Int -- ^ batch time limt
-> Int -- ^ total number of goals
-> Int -- ^ number of that have been processed
-> String
batchInfoText tl gTotal gDone =
let totalSecs = (gTotal - gDone) * tl
(remMins, secs) = divMod totalSecs 60
(hours, mins) = divMod remMins 60
in
"Batch mode running.\n" ++
show gDone ++ "/" ++ show gTotal ++ " goals processed.\n" ++
"At most " ++ show hours ++ "h " ++ show mins ++ "m " ++ show secs
++ "s remaining."
-- ** Callbacks
{- |
Updates the display of the status of the current goal.
-}
updateDisplay
:: GenericState sentence proofTree pst
-- ^ current global prover state
-> Bool -- ^ set to 'True' if you want the 'ListBox' to be updated
-> ListBox String
-- ^ 'ListBox' displaying the status of all goals (see 'goalsView')
-> Label
{- ^ 'Label' displaying the status of the currently selected goal
(see 'toGuiStatus') -}
-> Entry Int -- ^ 'Entry' containing the time limit of the current goal
-> Entry String -- ^ 'Entry' containing the extra options
-> ListBox String
-- ^ 'ListBox' displaying all axioms used to prove a goal (if any)
-> IO ()
updateDisplay st updateLb goalsLb statusLabel timeEntry optionsEntry axiomsLb =
{- the code in comments only works with an updated uni version that
will be installed when switching to ghc-6.6.1 -}
do
when updateLb (do
(offScreen, _) <- view Vertical goalsLb
populateGoalsListBox goalsLb (goalsView st)
moveto Vertical goalsLb offScreen
)
maybe (return ())
(\ go ->
let mprfst = Map.lookup go (configsMap st)
cf = Map.findWithDefault
(error "GUI.GenericATP.updateDisplay")
go (configsMap st)
t' = fromMaybe guiDefaultTimeLimit (timeLimit cf)
opts' = unwords (extraOpts cf)
(color, label) = maybe statusOpen
(toGuiStatus cf . proofStatus)
mprfst
usedAxs = maybe [] (usedAxioms . proofStatus) mprfst
in do
statusLabel # text label
statusLabel # foreground (show color)
timeEntry # value t'
optionsEntry # value opts'
axiomsLb # value (usedAxs :: [String])
return ())
(currentGoal st)
newOptionsFrame
:: Container par
=> par -- ^ the parent container
-> (Entry Int -> Spin -> IO a)
-- ^ Function called by pressing one spin button
-> Bool -- ^ extra options input line
-> IO OpFrame
newOptionsFrame con updateFn isExtraOps = do
right <- newFrame con []
-- contents of newOptionsFrame
l1 <- newLabel right [text "Options:"]
pack l1 [Anchor NorthWest]
opFrame <- newFrame right []
pack opFrame [Expand On, Fill X, Anchor North]
spacer <- newLabel opFrame [text " "]
pack spacer [Side AtLeft]
opFrame2 <- newVBox opFrame []
pack opFrame2 [Expand On, Fill X, Anchor NorthWest]
timeLimitFrame <- newFrame opFrame2 []
pack timeLimitFrame [Expand On, Fill X, Anchor West]
l2 <- newLabel timeLimitFrame [text "TimeLimit"]
pack l2 [Side AtLeft]
-- extra HBox for time limit display
timeLimitLine <- newHBox timeLimitFrame []
pack timeLimitLine [Expand On, Side AtRight, Anchor East]
timeEntry <- newEntry timeLimitLine [width 18,
value guiDefaultTimeLimit]
pack (timeEntry :: Entry Int) []
timeSpinner <- newSpinButton timeLimitLine (updateFn timeEntry) []
pack timeSpinner []
l3 <- newLabel opFrame2 [text "Extra Options:"]
when isExtraOps $
pack l3 [Anchor West]
optionsEntry <- newEntry opFrame2 [width 37]
when isExtraOps $
pack (optionsEntry :: Entry String) [Fill X, PadX (cm 0.1)]
return OpFrame
{ ofFrame = right
, ofTimeSpinner = timeSpinner
, ofTimeEntry = timeEntry
, ofOptionsEntry = optionsEntry }
-- ** Main GUI
{- |
Invokes the prover GUI. Users may start the batch prover run on all goals,
or use a detailed GUI for proving each goal manually.
-}
genericATPgui
:: (Ord proofTree, Ord sentence)
=> ATPFunctions sign sentence mor proofTree pst
-- ^ prover specific functions
-> Bool -- ^ prover supports extra options
-> String -- ^ prover name
-> String -- ^ theory name
-> Theory sign sentence proofTree
-- ^ theory with signature and sentences
-> [FreeDefMorphism sentence mor] -- ^ freeness constraints
-> proofTree -- ^ initial empty proofTree
-> IO [ProofStatus proofTree] -- ^ proof status for each goal
genericATPgui atpFun isExtraOptions prName thName th freedefs pt = do
-- create initial backing data structure
let initState = initialGenericState prName
(initialProverState atpFun)
(atpTransSenName atpFun) th freedefs pt
stateMVar <- Conc.newMVar initState
batchTLimit <- getBatchTimeLimit $ batchTimeEnv atpFun
-- main window
main <- createToplevel [text $ thName ++ " - " ++ prName ++ " Prover"]
pack main [Expand On, Fill Both]
-- VBox for the whole window
b <- newVBox main []
pack b [Expand On, Fill Both]
-- HBox for the upper part (goals on the left, options/results on the right)
b2 <- newHBox b []
pack b2 [Expand On, Fill Both]
-- left frame (goals)
left <- newFrame b2 []
pack left [Expand On, Fill Both]
b3 <- newVBox left []
pack b3 [Expand On, Fill Both]
l0 <- newLabel b3 [text "Goals:"]
pack l0 [Anchor NorthWest]
lbFrame <- newFrame b3 []
pack lbFrame [Expand On, Fill Both]
lb <- newListBox lbFrame [bg "white", exportSelection False,
selectMode Single, height 15] :: IO (ListBox String)
pack lb [Expand On, Side AtLeft, Fill Both]
sb <- newScrollBar lbFrame []
pack sb [Expand On, Side AtRight, Fill Y, Anchor West]
lb # scrollbar Vertical sb
-- right frame (options/results)
OpFrame { ofFrame = right
, ofTimeSpinner = timeSpinner
, ofTimeEntry = timeEntry
, ofOptionsEntry = optionsEntry}
<- newOptionsFrame b2
(\ timeEntry sp -> synchronize main
(do
st <- Conc.readMVar stateMVar
maybe noGoalSelected
(\ goal ->
do
curEntTL <- getValueSafe guiDefaultTimeLimit timeEntry
s <- Conc.takeMVar stateMVar
let sEnt = s {configsMap =
adjustOrSetConfig
(setTimeLimit curEntTL)
prName goal pt (configsMap s)}
cfg = getConfig prName goal pt (configsMap sEnt)
t = timeLimit cfg
t' = case sp of
Up -> maybe (guiDefaultTimeLimit + 10)
(+ 10)
t
_ -> maybe (guiDefaultTimeLimit - 10)
(subtract 10)
t
s' = sEnt {configsMap =
adjustOrSetConfig
(setTimeLimit t')
prName goal pt (configsMap sEnt)}
Conc.putMVar stateMVar s'
timeEntry # value
(fromMaybe guiDefaultTimeLimit
$ timeLimit $ getConfig prName goal pt
$ configsMap s')
done)
(currentGoal st)))
isExtraOptions
pack right [Expand On, Fill Both, Anchor NorthWest]
-- buttons for options
buttonsHb1 <- newHBox right []
pack buttonsHb1 [Anchor NorthEast]
saveProbButton <- newButton buttonsHb1 [text $ "Save "
++ removeFirstDot (problemOutput $ fileExtensions atpFun)
++ " File"]
pack saveProbButton [Side AtLeft]
proveButton <- newButton buttonsHb1 [text "Prove"]
pack proveButton [Side AtRight]
-- result frame
resultFrame <- newFrame right []
pack resultFrame [Expand On, Fill Both]
l4 <- newLabel resultFrame [text ("Results:" ++ replicate 70 ' ')]
pack l4 [Anchor NorthWest]
spacer <- newLabel resultFrame [text " "]
pack spacer [Side AtLeft]
resultContentBox <- newHBox resultFrame []
pack resultContentBox [Expand On, Anchor West, Fill Both]
-- labels on the left side
rc1 <- newVBox resultContentBox []
pack rc1 [Expand Off, Anchor North]
l5 <- newLabel rc1 [text "Status"]
pack l5 [Anchor West]
l6 <- newLabel rc1 [text "Used Axioms"]
pack l6 [Anchor West]
-- contents on the right side
rc2 <- newVBox resultContentBox []
pack rc2 [Expand On, Fill Both, Anchor North]
statusLabel <- newLabel rc2 [text " -- "]
pack statusLabel [Anchor West]
axiomsFrame <- newFrame rc2 []
pack axiomsFrame [Expand On, Anchor West, Fill Both]
axiomsLb <- newListBox axiomsFrame [value ([] :: [String]),
bg "white", exportSelection False,
selectMode Browse,
height 6, width 19] :: IO (ListBox String)
pack axiomsLb [Side AtLeft, Expand On, Fill Both]
axiomsSb <- newScrollBar axiomsFrame []
pack axiomsSb [Side AtRight, Fill Y, Anchor West]
axiomsLb # scrollbar Vertical axiomsSb
detailsButton <- newButton resultFrame [text "Show Details"]
pack detailsButton [Anchor NorthEast]
-- separator
sp1 <- newSpace b (cm 0.15) []
pack sp1 [Expand Off, Fill X, Side AtBottom]
newHSeparator b
sp2 <- newSpace b (cm 0.15) []
pack sp2 [Expand Off, Fill X, Side AtBottom]
-- batch mode frame
batch <- newFrame b []
pack batch [Expand Off, Fill X]
batchTitle <- newLabel batch [text $ prName ++ " Batch Mode:"]
pack batchTitle [Side AtTop]
batchIFrame <- newFrame batch []
pack batchIFrame [Expand On, Fill X]
batchRight <- newVBox batchIFrame []
pack batchRight [Expand On, Fill X, Side AtRight]
batchBtnBox <- newHBox batchRight []
pack batchBtnBox [Expand On, Fill X, Side AtRight]
stopBatchButton <- newButton batchBtnBox [text "Stop"]
pack stopBatchButton []
runBatchButton <- newButton batchBtnBox [text "Run"]
pack runBatchButton []
batchSpacer <- newSpace batchRight (pp 150) [orient Horizontal]
pack batchSpacer [Side AtRight]
batchStatusLabel <- newLabel batchRight [text "\n\n"]
pack batchStatusLabel []
OpFrame { ofFrame = batchLeft
, ofTimeSpinner = batchTimeSpinner
, ofTimeEntry = batchTimeEntry
, ofOptionsEntry = batchOptionsEntry}
<- newOptionsFrame batchIFrame
(\ tEntry sp -> synchronize main
(do
curEntTL <- getValueSafe batchTLimit tEntry
let t' = case sp of
Up -> curEntTL + 10
_ -> max batchTLimit (curEntTL - 10)
tEntry # value t'
done))
isExtraOptions
pack batchLeft [Expand On, Fill X, Anchor NorthWest, Side AtLeft]
batchGoal <- newHBox batch []
pack batchGoal [Expand On, Fill X, Anchor West, Side AtBottom]
batchCurrentGoalTitle <- newLabel batchGoal [text "Current goal:"]
pack batchCurrentGoalTitle []
batchCurrentGoalLabel <- newLabel batchGoal [text "--"]
pack batchCurrentGoalLabel []
saveProblem_batch <- createTkVariable False
saveProblem_batch_checkBox <-
newCheckButton batchLeft
[variable saveProblem_batch,
text $ "Save "
++ removeFirstDot (problemOutput $ fileExtensions atpFun)]
enableSaveCheckBox <- getEnvSave False "HETS_ENABLE_BATCH_SAVE" readMaybe
when enableSaveCheckBox $
pack saveProblem_batch_checkBox [Expand Off, Fill None, Side AtBottom]
batchTimeEntry # value batchTLimit
-- separator 2
sp1_2 <- newSpace b (cm 0.15) []
pack sp1_2 [Expand Off, Fill X, Side AtBottom]
newHSeparator b
sp2_2 <- newSpace b (cm 0.15) []
pack sp2_2 [Expand Off, Fill X, Side AtBottom]
-- global options frame
globalOptsFr <- newFrame b []
pack globalOptsFr [Expand Off, Fill Both]
gOptsTitle <- newLabel globalOptsFr [text "Global Options:"]
pack gOptsTitle [Side AtTop]
inclProvedThsTK <- createTkVariable True
inclProvedThsCheckButton <-
newCheckButton globalOptsFr
[variable inclProvedThsTK,
text ("include preceding proven therorems"
++ " in next proof attempt")]
pack inclProvedThsCheckButton [Side AtLeft]
-- separator 3
sp1_3 <- newSpace b (cm 0.15) []
pack sp1_3 [Expand Off, Fill X, Side AtBottom]
newHSeparator b
sp2_3 <- newSpace b (cm 0.15) []
pack sp2_3 [Expand Off, Fill X, Side AtBottom]
-- bottom frame (help/save/exit buttons)
bottom <- newHBox b []
pack bottom [Expand Off, Fill Both]
helpButton <- newButton bottom [text "Help"]
pack helpButton [PadX (cm 0.3), IPadX (cm 0.1)] -- wider "Help" button
saveButton <- newButton bottom [text "Save Prover Configuration"]
pack saveButton [PadX (cm 0.3)]
exitButton <- newButton bottom [text "Exit Prover"]
pack exitButton [PadX (cm 0.3)]
populateGoalsListBox lb (goalsView initState)
putWinOnTop main
-- MVars for thread-safe communication
mVar_batchId <- Conc.newEmptyMVar :: IO (Conc.MVar Conc.ThreadId)
windowDestroyedMVar <- Conc.newEmptyMVar :: IO (Conc.MVar ())
-- events
(selectGoal, _) <- bindSimple lb (ButtonPress (Just 1))
doProve <- clicked proveButton
saveProb <- clicked saveProbButton
showDetails <- clicked detailsButton
runBatch <- clicked runBatchButton
stopBatch <- clicked stopBatchButton
help <- clicked helpButton
saveConfiguration <- clicked saveButton
exit <- clicked exitButton
(closeWindow, _) <- bindSimple main Destroy
let goalSpecificWids = [EnW timeEntry, EnW timeSpinner, EnW optionsEntry] ++
map EnW [proveButton, detailsButton, saveProbButton]
wids = EnW lb : goalSpecificWids ++
[EnW batchTimeEntry, EnW batchTimeSpinner,
EnW saveProblem_batch_checkBox,
EnW batchOptionsEntry, EnW inclProvedThsCheckButton] ++
map EnW [helpButton, saveButton, exitButton, runBatchButton]
disableWids goalSpecificWids
disable stopBatchButton
-- event handlers
_ <- spawnEvent $ forever
$ selectGoal >>> do
s <- Conc.takeMVar stateMVar
let oldGoal = currentGoal s
curEntTL <- getValueSafe guiDefaultTimeLimit timeEntry :: IO Int
let s' = maybe s
(\ og -> s
{configsMap =
adjustOrSetConfig (setTimeLimit curEntTL)
prName og pt
(configsMap s)})
oldGoal
sel <- getSelection lb :: IO (Maybe [Int])
let s'' = maybe s' (\ sg -> s' {currentGoal =
Just $ AS_Anno.senAttr
(goalsList s' !! head sg)})
sel
Conc.putMVar stateMVar s''
batchModeRunning <- isBatchModeRunning mVar_batchId
when (isJust sel && not batchModeRunning)
(enableWids goalSpecificWids)
when (isJust sel) $ enableWids [EnW detailsButton, EnW saveProbButton]
updateDisplay s'' False lb statusLabel timeEntry optionsEntry axiomsLb
done
+> saveProb >>> do
rs <- Conc.readMVar stateMVar
curEntTL <- getValueSafe guiDefaultTimeLimit
timeEntry :: IO Int
inclProvedThs <- readTkVariable inclProvedThsTK
maybe (return ())
(\ goal -> do
let (nGoal, lp') =
prepareLP (proverState rs)
rs goal inclProvedThs
s = rs {configsMap = adjustOrSetConfig
(setTimeLimit curEntTL)
prName goal pt
(configsMap rs)}
extraOptions <- getValue optionsEntry :: IO String
let s' = s {configsMap = adjustOrSetConfig
(setExtraOpts (words extraOptions))
prName goal pt
(configsMap s)}
prob <- goalOutput atpFun lp' nGoal $
createProverOptions atpFun
(getConfig prName goal pt $ configsMap s')
createTextSaveDisplay
(prName ++ " Problem for Goal " ++ goal)
(thName ++ '_' : goal
++ problemOutput (fileExtensions atpFun))
prob)
$ currentGoal rs
done
+> doProve >>> do
rs <- Conc.readMVar stateMVar
case currentGoal rs of
Nothing -> noGoalSelected >> done
Just goal -> do
curEntTL <- getValueSafe guiDefaultTimeLimit
timeEntry :: IO Int
inclProvedThs <- readTkVariable inclProvedThsTK
let s = rs {configsMap = adjustOrSetConfig
(setTimeLimit curEntTL)
prName goal pt
(configsMap rs)}
(nGoal, lp') = prepareLP (proverState rs)
rs goal inclProvedThs
extraOptions <- getValue optionsEntry :: IO String
let s' = s {configsMap = adjustOrSetConfig
(setExtraOpts (words extraOptions))
prName goal pt
(configsMap s)}
statusLabel # text (snd statusRunning)
statusLabel # foreground (show $ fst statusRunning)
disableWids wids
(retval, cfg) <- runProver atpFun lp'
(getConfig prName goal pt $ configsMap s') False
thName nGoal
-- check if window was closed
wDestroyed <- windowDestroyed windowDestroyedMVar
if wDestroyed
then done
else do
case retval of
ATPError m -> errorDialog "Error" m
_ -> return ()
let s'' = s' {
configsMap =
adjustOrSetConfig
(\ c -> c {timeLimitExceeded = isTimeLimitExceeded
retval,
proofStatus = (proofStatus cfg)
{usedTime = timeUsed cfg},
resultOutput = resultOutput cfg,
timeUsed = timeUsed cfg})
prName goal pt (configsMap s')}
Conc.modifyMVar_ stateMVar (return . const s'')
-- check again if window was closed
wDestroyed2 <- windowDestroyed windowDestroyedMVar
if wDestroyed2
then done
else do
enable lb
enable axiomsLb
updateDisplay s'' True lb statusLabel timeEntry
optionsEntry axiomsLb
enableWids wids
done
+> showDetails >>> do
Conc.yield
s <- Conc.readMVar stateMVar
case currentGoal s of
Nothing -> noGoalSelected >> done
Just goal -> do
let result = Map.lookup goal (configsMap s)
output = maybe ["This goal hasn't been run through " ++
"the prover yet."]
resultOutput
result
detailsText = concatMap ('\n' :) output
createTextSaveDisplay (prName ++ " Output for Goal " ++ goal)
(goal ++ proverOutput (fileExtensions atpFun))
(seq (length detailsText) detailsText)
done
+> runBatch >>> do
s <- Conc.readMVar stateMVar
-- get options for this batch run
curEntTL <- getValueSafe batchTLimit batchTimeEntry :: IO Int
let tLimit = if curEntTL > 0 then curEntTL else batchTLimit
batchTimeEntry # value tLimit
extOpts <- getValue batchOptionsEntry :: IO String
let extOpts' = words extOpts
openGoalsMap = filterOpenGoals $ configsMap s
numGoals = Map.size openGoalsMap
firstGoalName = fromMaybe "--" $
find (`Map.member` openGoalsMap) $
map AS_Anno.senAttr (goalsList s)
if numGoals > 0
then do
let afterEachProofAttempt gPSF nSen nextSen cfg@(retval, _) = do
{- this function is called after the prover returns from a
proof attempt (... -> IO Bool) -}
cont <- goalProcessed stateMVar tLimit extOpts'
numGoals prName gPSF nSen False cfg
Conc.tryTakeMVar mVar_batchId >>=
maybe (return False) (\ tId -> do
stored <- Conc.tryPutMVar
mVar_batchId
tId
if not stored
then fail $ "GenericATP: Thread " ++
"run check failed"
else do
wDestroyed <- windowDestroyed windowDestroyedMVar
if wDestroyed
then return False
else do
batchStatusLabel #
text (if cont
then batchInfoText tLimit numGoals gPSF
else "Batch mode finished\n\n")
setCurrentGoalLabel batchCurrentGoalLabel
(if cont
then maybe "--" AS_Anno.senAttr nextSen
else "--")
st <- Conc.readMVar stateMVar
updateDisplay st True lb statusLabel timeEntry
optionsEntry axiomsLb
case retval of
ATPError m -> errorDialog "Error" m
_ -> return ()
batchModeRunning <-
isBatchModeRunning mVar_batchId
let cont' = cont && batchModeRunning
unless cont $ do
disable stopBatchButton
enableWids wids
enableWidsUponSelection lb goalSpecificWids
cleanupThread mVar_batchId
return cont')
-- END of afterEachProofAttempt
batchStatusLabel # text (batchInfoText tLimit numGoals 0)
setCurrentGoalLabel batchCurrentGoalLabel firstGoalName
disableWids wids
enable stopBatchButton
enableWidsUponSelection lb [EnW detailsButton, EnW saveProbButton]
enable lb
inclProvedThs <- readTkVariable inclProvedThsTK
saveProblem_F <- readTkVariable saveProblem_batch
batchProverId <- Conc.forkIO $
genericProveBatch False tLimit extOpts' inclProvedThs
saveProblem_F
afterEachProofAttempt
(atpInsertSentence atpFun) (runProver atpFun)
prName thName s Nothing
>> return ()
stored <- Conc.tryPutMVar mVar_batchId batchProverId
if stored
then done
else fail "GenericATP: MVar for batchProverId already taken!!"
else {- numGoals < 1 -} do
batchStatusLabel # text "No further open goals\n\n"
batchCurrentGoalLabel # text "--"
done
+> stopBatch >>> do
cleanupThread mVar_batchId
wDestroyed <- windowDestroyed windowDestroyedMVar
if wDestroyed
then done
else do
disable stopBatchButton
enableWids wids
enableWidsUponSelection lb goalSpecificWids
batchStatusLabel # text "Batch mode stopped\n\n"
batchCurrentGoalLabel # text "--"
st <- Conc.readMVar stateMVar
updateDisplay st True lb statusLabel timeEntry
optionsEntry axiomsLb
done
+> help >>> do
createTextDisplay (prName ++ " Help")
(proverHelpText atpFun)
done
+> saveConfiguration >>> do
s <- Conc.readMVar stateMVar
let cfgText = show $ printCfgText $ configsMap s
createTextSaveDisplay
(prName ++ " Configuration for Theory " ++ thName)
(thName ++ theoryConfiguration (fileExtensions atpFun))
cfgText
done
sync $ exit >>> destroy main
+> closeWindow >>> do
Conc.putMVar windowDestroyedMVar ()
cleanupThread mVar_batchId
destroy main
s <- Conc.takeMVar stateMVar
let Result _ proofstats = revertRenamingOfLabels s $
map ((\ g -> let
res = Map.lookup g (configsMap s)
g' = Map.findWithDefault
(error ("Lookup of name failed: (1) "
++ "should not happen \""
++ g ++ "\""))
g (namesMap s)
in maybe (openProofStatus g' prName $ currentProofTree s)
proofStatus res) . AS_Anno.senAttr)
$ goalsList s
-- diags should not be plainly shown by putStrLn here
maybe (fail "reverse translation of names failed") return proofstats
where
cleanupThread mVar_TId =
Conc.tryTakeMVar mVar_TId >>= maybe (return ()) Conc.killThread
windowDestroyed sMVar =
Conc.yield >>
Conc.tryTakeMVar sMVar >>=
maybe (return False) (\ un -> Conc.putMVar sMVar un >> return True)
isBatchModeRunning tIdMVar =
Conc.tryTakeMVar tIdMVar >>=
maybe (return False) (\ tId -> Conc.putMVar tIdMVar tId >> return True)
noGoalSelected = errorDialog "Error" "Please select a goal first."
prepareLP prS s goal inclProvedThs =
let (beforeThis, afterThis) =
splitAt (fromMaybe (error "GUI.GenericATP: goal shoud be found")
$ findIndex ((== goal) . AS_Anno.senAttr)
$ goalsList s) $ goalsList s
proved = filter (checkGoal (configsMap s) . AS_Anno.senAttr)
beforeThis
in if inclProvedThs
then (head afterThis,
foldl (\ lp provedGoal ->
atpInsertSentence atpFun
lp provedGoal {AS_Anno.isAxiom = True})
prS
(reverse proved))
else (fromMaybe (error ("GUI.GenericATP.prepareLP: Goal "
++ goal ++ " not found!!"))
(find ((== goal) . AS_Anno.senAttr) (goalsList s)),
prS)
setCurrentGoalLabel batchLabel s = batchLabel # text (take 65 s)
removeFirstDot [] = error "GenericATP: no extension given"
removeFirstDot e@(h : ext) =
case h of
'.' -> ext
_ -> e
| keithodulaigh/Hets | GUI/HTkGenericATP.hs | gpl-2.0 | 34,733 | 0 | 41 | 13,151 | 7,850 | 3,777 | 4,073 | 661 | 20 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.StorageGateway.RetrieveTapeRecoveryPoint
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Retrieves the recovery point for the specified virtual tape.
--
-- A recovery point is a point in time view of a virtual tape at which all the
-- data on the tape is consistent. If your gateway crashes, virtual tapes that
-- have recovery points can be recovered to a new gateway.
--
-- The virtual tape can be retrieved to only one gateway. The retrieved tape is
-- read-only. The virtual tape can be retrieved to only a gateway-VTL. There is
-- no charge for retrieving recovery points.
--
-- <http://docs.aws.amazon.com/storagegateway/latest/APIReference/API_RetrieveTapeRecoveryPoint.html>
module Network.AWS.StorageGateway.RetrieveTapeRecoveryPoint
(
-- * Request
RetrieveTapeRecoveryPoint
-- ** Request constructor
, retrieveTapeRecoveryPoint
-- ** Request lenses
, rtrpGatewayARN
, rtrpTapeARN
-- * Response
, RetrieveTapeRecoveryPointResponse
-- ** Response constructor
, retrieveTapeRecoveryPointResponse
-- ** Response lenses
, rtrprTapeARN
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.JSON
import Network.AWS.StorageGateway.Types
import qualified GHC.Exts
data RetrieveTapeRecoveryPoint = RetrieveTapeRecoveryPoint
{ _rtrpGatewayARN :: Text
, _rtrpTapeARN :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'RetrieveTapeRecoveryPoint' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rtrpGatewayARN' @::@ 'Text'
--
-- * 'rtrpTapeARN' @::@ 'Text'
--
retrieveTapeRecoveryPoint :: Text -- ^ 'rtrpTapeARN'
-> Text -- ^ 'rtrpGatewayARN'
-> RetrieveTapeRecoveryPoint
retrieveTapeRecoveryPoint p1 p2 = RetrieveTapeRecoveryPoint
{ _rtrpTapeARN = p1
, _rtrpGatewayARN = p2
}
rtrpGatewayARN :: Lens' RetrieveTapeRecoveryPoint Text
rtrpGatewayARN = lens _rtrpGatewayARN (\s a -> s { _rtrpGatewayARN = a })
-- | The Amazon Resource Name (ARN) of the virtual tape for which you want to
-- retrieve the recovery point.
rtrpTapeARN :: Lens' RetrieveTapeRecoveryPoint Text
rtrpTapeARN = lens _rtrpTapeARN (\s a -> s { _rtrpTapeARN = a })
newtype RetrieveTapeRecoveryPointResponse = RetrieveTapeRecoveryPointResponse
{ _rtrprTapeARN :: Maybe Text
} deriving (Eq, Ord, Read, Show, Monoid)
-- | 'RetrieveTapeRecoveryPointResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rtrprTapeARN' @::@ 'Maybe' 'Text'
--
retrieveTapeRecoveryPointResponse :: RetrieveTapeRecoveryPointResponse
retrieveTapeRecoveryPointResponse = RetrieveTapeRecoveryPointResponse
{ _rtrprTapeARN = Nothing
}
-- | The Amazon Resource Name (ARN) of the virtual tape for which the recovery
-- point was retrieved.
rtrprTapeARN :: Lens' RetrieveTapeRecoveryPointResponse (Maybe Text)
rtrprTapeARN = lens _rtrprTapeARN (\s a -> s { _rtrprTapeARN = a })
instance ToPath RetrieveTapeRecoveryPoint where
toPath = const "/"
instance ToQuery RetrieveTapeRecoveryPoint where
toQuery = const mempty
instance ToHeaders RetrieveTapeRecoveryPoint
instance ToJSON RetrieveTapeRecoveryPoint where
toJSON RetrieveTapeRecoveryPoint{..} = object
[ "TapeARN" .= _rtrpTapeARN
, "GatewayARN" .= _rtrpGatewayARN
]
instance AWSRequest RetrieveTapeRecoveryPoint where
type Sv RetrieveTapeRecoveryPoint = StorageGateway
type Rs RetrieveTapeRecoveryPoint = RetrieveTapeRecoveryPointResponse
request = post "RetrieveTapeRecoveryPoint"
response = jsonResponse
instance FromJSON RetrieveTapeRecoveryPointResponse where
parseJSON = withObject "RetrieveTapeRecoveryPointResponse" $ \o -> RetrieveTapeRecoveryPointResponse
<$> o .:? "TapeARN"
| romanb/amazonka | amazonka-storagegateway/gen/Network/AWS/StorageGateway/RetrieveTapeRecoveryPoint.hs | mpl-2.0 | 4,796 | 0 | 9 | 964 | 530 | 324 | 206 | 63 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Network.MPD.Applicative.DatabaseSpec (main, spec) where
import TestUtil
import Unparse
import Network.MPD.Applicative.Database
import Network.MPD.Commands.Query
import Network.MPD.Commands.Types
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "count" $ do
it "returns a count of entries matching a query" $ do
count (Title =? "Foo")
`with` [("count Title \"Foo\""
, Right "songs: 0\nplaytime: 0\nOK")]
`shouldBe` Right (Count 0 0)
describe "find" $ do
it "returns songs exactly matching a query" $ do
let obj = defaultSong "Bar.ogg"
resp = unparse obj ++ "OK"
find (Title =? "Foo")
`with` [("find Title \"Foo\"", Right resp)]
`shouldBe` Right [obj]
describe "findadd" $ do
it "adds songs matching query to current playlist" $ do
findAdd (Title =? "Foo")
`with` [("findadd Title \"Foo\"", Right "OK")]
`shouldBe` Right ()
describe "list" $ do
it "lists all tags of the specified type" $ do
list Title mempty
`with` [("list Title"
, Right "Title: Foo\nTitle: Bar\nOK"
)]
`shouldBe` Right ["Foo", "Bar"]
it "can list albums by an artist" $ do
list Album (Artist =? "Muzz")
`with` [("list Album Artist \"Muzz\""
, Right "Album: Foo\nOK")]
`shouldBe` Right ["Foo"]
it "can filter tags by arbitrary queries" $ do
list Title (qNot $ Artist =? "Foo" <> Album =? "Bar")
`with` [("list Title \"(!((Artist == \\\"Foo\\\") AND (Album == \\\"Bar\\\")))\""
, Right "Title: Foo\nOK")]
`shouldBe` Right ["Foo"]
describe "listAll" $ do
it "recursively lists songs in a database directory" $ do
listAll ""
`with` [("listall \"\""
, Right "directory: FooBand\n\
\directory: FooBand/album1\n\
\file: FooBand/album1/01 - songA.ogg\n\
\file: FooBand/album1/02 - songB.ogg\nOK")]
`shouldBe` Right ["FooBand/album1/01 - songA.ogg"
,"FooBand/album1/02 - songB.ogg"]
describe "lsInfo" $ do
it "returns a non-recursive listing of a database directory" $ do
let song = defaultSong "Bar.ogg"
resp = "directory: Foo\n" ++ unparse song
++ "\nplaylist: Quux\nOK"
lsInfo ""
`with` [("lsinfo \"\""
, Right resp)]
`shouldBe` Right [ LsDirectory "Foo"
, LsSong song
, LsPlaylist "Quux"]
describe "listAllInfo" $ do
it "is a recursive 'lsInfo'" $ do
listAllInfo ""
`with` [("listallinfo \"\""
, Right "directory: Foo\ndirectory: Bar\nOK"
)]
`shouldBe` Right [LsDirectory "Foo", LsDirectory "Bar"]
describe "search" $ do
it "returns songs matching a case-insensitive query" $ do
let obj = defaultSong "Bar.ogg"
resp = unparse obj ++ "OK"
search (Title =? "Foo")
`with` [("search Title \"Foo\"", Right resp)]
`shouldBe` Right [obj]
describe "searchAdd" $ do
it "sends a searchadd request" $ do
searchAdd (Title =? "Foo")
`with` [("searchadd Title \"Foo\"", Right "OK")]
`shouldBe` Right ()
describe "searchAddPl" $ do
it "sends a searchaddpl request" $ do
searchAddPl "Foo" (Title =? "Bar")
`with` [("searchaddpl \"Foo\" Title \"Bar\"", Right "OK")]
`shouldBe` Right ()
describe "update" $ do
it "updates the entire collection by default" $ do
update Nothing
`with` [("update", Right "updating_db: 23\nOK")]
`shouldBe` Right 23
it "can update a specific path" $ do
update (Just "foo")
`with` [("update \"foo\"", Right "updating_db: 23\nOK")]
`shouldBe` Right 23
describe "rescan" $ do
it "returns entire collection by default" $ do
rescan Nothing
`with` [("rescan", Right "updating_db: 23\nOK")]
`shouldBe` Right 23
it "can rescan a specific path" $ do
rescan (Just "foo")
`with` [("rescan \"foo\"", Right "updating_db: 23\nOK")]
`shouldBe` Right 23
| bens/libmpd-haskell | tests/Network/MPD/Applicative/DatabaseSpec.hs | lgpl-2.1 | 4,930 | 0 | 20 | 1,974 | 1,112 | 567 | 545 | 104 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fr-FR">
<title>AdvFuzzer Add-On</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/fuzz/src/main/javahelp/org/zaproxy/zap/extension/fuzz/resources/help_fr_FR/helpset_fr_FR.hs | apache-2.0 | 962 | 79 | 67 | 157 | 413 | 209 | 204 | -1 | -1 |
{-# OPTIONS -fno-warn-missing-signatures #-}
-- | Clean out unneeded spill\/reload instrs
--
-- * Handling of join points
--
-- B1: B2:
-- ... ...
-- RELOAD SLOT(0), %r1 RELOAD SLOT(0), %r1
-- ... A ... ... B ...
-- jump B3 jump B3
--
-- B3: ... C ...
-- RELOAD SLOT(0), %r1
-- ...
--
-- the plan:
-- So long as %r1 hasn't been written to in A, B or C then we don't need the
-- reload in B3.
--
-- What we really care about here is that on the entry to B3, %r1 will always
-- have the same value that is in SLOT(0) (ie, %r1 is _valid_)
--
-- This also works if the reloads in B1\/B2 were spills instead, because
-- spilling %r1 to a slot makes that slot have the same value as %r1.
--
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
module RegAlloc.Graph.SpillClean (
cleanSpills
)
where
import RegAlloc.Liveness
import Instruction
import Reg
import BlockId
import OldCmm
import UniqSet
import UniqFM
import Unique
import State
import Outputable
import Platform
import Data.List
import Data.Maybe
import Data.Map (Map)
import Data.Set (Set)
import qualified Data.Map as Map
import qualified Data.Set as Set
--
type Slot = Int
-- | Clean out unneeded spill\/reloads from this top level thing.
cleanSpills
:: Instruction instr
=> Platform -> LiveCmmDecl statics instr -> LiveCmmDecl statics instr
cleanSpills platform cmm
= evalState (cleanSpin platform 0 cmm) initCleanS
-- | do one pass of cleaning
cleanSpin
:: Instruction instr
=> Platform
-> Int
-> LiveCmmDecl statics instr
-> CleanM (LiveCmmDecl statics instr)
{-
cleanSpin _ spinCount code
= do jumpValid <- gets sJumpValid
pprTrace "cleanSpin"
( int spinCount
$$ text "--- code"
$$ ppr code
$$ text "--- joins"
$$ ppr jumpValid)
$ cleanSpin' spinCount code
-}
cleanSpin platform spinCount code
= do
-- init count of cleaned spills\/reloads
modify $ \s -> s
{ sCleanedSpillsAcc = 0
, sCleanedReloadsAcc = 0
, sReloadedBy = emptyUFM }
code_forward <- mapBlockTopM (cleanBlockForward platform) code
code_backward <- cleanTopBackward code_forward
-- During the cleaning of each block we collected information about what regs
-- were valid across each jump. Based on this, work out whether it will be
-- safe to erase reloads after join points for the next pass.
collateJoinPoints
-- remember how many spills\/reloads we cleaned in this pass
spills <- gets sCleanedSpillsAcc
reloads <- gets sCleanedReloadsAcc
modify $ \s -> s
{ sCleanedCount = (spills, reloads) : sCleanedCount s }
-- if nothing was cleaned in this pass or the last one
-- then we're done and it's time to bail out
cleanedCount <- gets sCleanedCount
if take 2 cleanedCount == [(0, 0), (0, 0)]
then return code
-- otherwise go around again
else cleanSpin platform (spinCount + 1) code_backward
-- | Clean one basic block
cleanBlockForward
:: Instruction instr
=> Platform
-> LiveBasicBlock instr
-> CleanM (LiveBasicBlock instr)
cleanBlockForward platform (BasicBlock blockId instrs)
= do
-- see if we have a valid association for the entry to this block
jumpValid <- gets sJumpValid
let assoc = case lookupUFM jumpValid blockId of
Just assoc -> assoc
Nothing -> emptyAssoc
instrs_reload <- cleanForward platform blockId assoc [] instrs
return $ BasicBlock blockId instrs_reload
-- | Clean out unneeded reload instructions.
-- Walking forwards across the code
-- On a reload, if we know a reg already has the same value as a slot
-- then we don't need to do the reload.
--
cleanForward
:: Instruction instr
=> Platform
-> BlockId -- ^ the block that we're currently in
-> Assoc Store -- ^ two store locations are associated if they have the same value
-> [LiveInstr instr] -- ^ acc
-> [LiveInstr instr] -- ^ instrs to clean (in backwards order)
-> CleanM [LiveInstr instr] -- ^ cleaned instrs (in forward order)
cleanForward _ _ _ acc []
= return acc
-- write out live range joins via spill slots to just a spill and a reg-reg move
-- hopefully the spill will be also be cleaned in the next pass
--
cleanForward platform blockId assoc acc (li1 : li2 : instrs)
| LiveInstr (SPILL reg1 slot1) _ <- li1
, LiveInstr (RELOAD slot2 reg2) _ <- li2
, slot1 == slot2
= do
modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
cleanForward platform blockId assoc acc
(li1 : LiveInstr (mkRegRegMoveInstr platform reg1 reg2) Nothing : instrs)
cleanForward platform blockId assoc acc (li@(LiveInstr i1 _) : instrs)
| Just (r1, r2) <- takeRegRegMoveInstr i1
= if r1 == r2
-- erase any left over nop reg reg moves while we're here
-- this will also catch any nop moves that the "write out live range joins" case above
-- happens to add
then cleanForward platform blockId assoc acc instrs
-- if r1 has the same value as some slots and we copy r1 to r2,
-- then r2 is now associated with those slots instead
else do let assoc' = addAssoc (SReg r1) (SReg r2)
$ delAssoc (SReg r2)
$ assoc
cleanForward platform blockId assoc' (li : acc) instrs
cleanForward platform blockId assoc acc (li : instrs)
-- update association due to the spill
| LiveInstr (SPILL reg slot) _ <- li
= let assoc' = addAssoc (SReg reg) (SSlot slot)
$ delAssoc (SSlot slot)
$ assoc
in cleanForward platform blockId assoc' (li : acc) instrs
-- clean a reload instr
| LiveInstr (RELOAD{}) _ <- li
= do (assoc', mli) <- cleanReload platform blockId assoc li
case mli of
Nothing -> cleanForward platform blockId assoc' acc instrs
Just li' -> cleanForward platform blockId assoc' (li' : acc) instrs
-- remember the association over a jump
| LiveInstr instr _ <- li
, targets <- jumpDestsOfInstr instr
, not $ null targets
= do mapM_ (accJumpValid assoc) targets
cleanForward platform blockId assoc (li : acc) instrs
-- writing to a reg changes its value.
| LiveInstr instr _ <- li
, RU _ written <- regUsageOfInstr instr
= let assoc' = foldr delAssoc assoc (map SReg $ nub written)
in cleanForward platform blockId assoc' (li : acc) instrs
-- | Try and rewrite a reload instruction to something more pleasing
--
cleanReload
:: Instruction instr
=> Platform
-> BlockId
-> Assoc Store
-> LiveInstr instr
-> CleanM (Assoc Store, Maybe (LiveInstr instr))
cleanReload platform blockId assoc li@(LiveInstr (RELOAD slot reg) _)
-- if the reg we're reloading already has the same value as the slot
-- then we can erase the instruction outright
| elemAssoc (SSlot slot) (SReg reg) assoc
= do modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
return (assoc, Nothing)
-- if we can find another reg with the same value as this slot then
-- do a move instead of a reload.
| Just reg2 <- findRegOfSlot assoc slot
= do modify $ \s -> s { sCleanedReloadsAcc = sCleanedReloadsAcc s + 1 }
let assoc' = addAssoc (SReg reg) (SReg reg2)
$ delAssoc (SReg reg)
$ assoc
return (assoc', Just $ LiveInstr (mkRegRegMoveInstr platform reg2 reg) Nothing)
-- gotta keep this instr
| otherwise
= do -- update the association
let assoc' = addAssoc (SReg reg) (SSlot slot) -- doing the reload makes reg and slot the same value
$ delAssoc (SReg reg) -- reg value changes on reload
$ assoc
-- remember that this block reloads from this slot
accBlockReloadsSlot blockId slot
return (assoc', Just li)
cleanReload _ _ _ _
= panic "RegSpillClean.cleanReload: unhandled instr"
-- | Clean out unneeded spill instructions.
--
-- If there were no reloads from a slot between a spill and the last one
-- then the slot was never read and we don't need the spill.
--
-- SPILL r0 -> s1
-- RELOAD s1 -> r2
-- SPILL r3 -> s1 <--- don't need this spill
-- SPILL r4 -> s1
-- RELOAD s1 -> r5
--
-- Maintain a set of
-- "slots which were spilled to but not reloaded from yet"
--
-- Walking backwards across the code:
-- a) On a reload from a slot, remove it from the set.
--
-- a) On a spill from a slot
-- If the slot is in set then we can erase the spill,
-- because it won't be reloaded from until after the next spill.
--
-- otherwise
-- keep the spill and add the slot to the set
--
-- TODO: This is mostly inter-block
-- we should really be updating the noReloads set as we cross jumps also.
--
-- TODO: generate noReloads from liveSlotsOnEntry
--
cleanTopBackward
:: Instruction instr
=> LiveCmmDecl statics instr
-> CleanM (LiveCmmDecl statics instr)
cleanTopBackward cmm
= case cmm of
CmmData{}
-> return cmm
CmmProc info label sccs
| LiveInfo _ _ _ liveSlotsOnEntry <- info
-> do sccs' <- mapM (mapSCCM (cleanBlockBackward liveSlotsOnEntry)) sccs
return $ CmmProc info label sccs'
cleanBlockBackward
:: Instruction instr
=> Map BlockId (Set Int)
-> LiveBasicBlock instr
-> CleanM (LiveBasicBlock instr)
cleanBlockBackward liveSlotsOnEntry (BasicBlock blockId instrs)
= do instrs_spill <- cleanBackward liveSlotsOnEntry emptyUniqSet [] instrs
return $ BasicBlock blockId instrs_spill
cleanBackward
:: Instruction instr
=> Map BlockId (Set Int) -- ^ Slots live on entry to each block
-> UniqSet Int -- ^ slots that have been spilled, but not reloaded from
-> [LiveInstr instr] -- ^ acc
-> [LiveInstr instr] -- ^ instrs to clean (in forwards order)
-> CleanM [LiveInstr instr] -- ^ cleaned instrs (in backwards order)
cleanBackward liveSlotsOnEntry noReloads acc lis
= do reloadedBy <- gets sReloadedBy
cleanBackward' liveSlotsOnEntry reloadedBy noReloads acc lis
cleanBackward' _ _ _ acc []
= return acc
cleanBackward' liveSlotsOnEntry reloadedBy noReloads acc (li : instrs)
-- if nothing ever reloads from this slot then we don't need the spill
| LiveInstr (SPILL _ slot) _ <- li
, Nothing <- lookupUFM reloadedBy (SSlot slot)
= do modify $ \s -> s { sCleanedSpillsAcc = sCleanedSpillsAcc s + 1 }
cleanBackward liveSlotsOnEntry noReloads acc instrs
| LiveInstr (SPILL _ slot) _ <- li
= if elementOfUniqSet slot noReloads
-- we can erase this spill because the slot won't be read until after the next one
then do
modify $ \s -> s { sCleanedSpillsAcc = sCleanedSpillsAcc s + 1 }
cleanBackward liveSlotsOnEntry noReloads acc instrs
else do
-- this slot is being spilled to, but we haven't seen any reloads yet.
let noReloads' = addOneToUniqSet noReloads slot
cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
-- if we reload from a slot then it's no longer unused
| LiveInstr (RELOAD slot _) _ <- li
, noReloads' <- delOneFromUniqSet noReloads slot
= cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
-- If a slot is live in a jump target then assume it's reloaded there.
-- TODO: A real dataflow analysis would do a better job here.
-- If the target block _ever_ used the slot then we assume it always does,
-- but if those reloads are cleaned the slot liveness map doesn't get updated.
| LiveInstr instr _ <- li
, targets <- jumpDestsOfInstr instr
= do
let slotsReloadedByTargets
= Set.unions
$ catMaybes
$ map (flip Map.lookup liveSlotsOnEntry)
$ targets
let noReloads' = foldl' delOneFromUniqSet noReloads
$ Set.toList slotsReloadedByTargets
cleanBackward liveSlotsOnEntry noReloads' (li : acc) instrs
-- some other instruction
| otherwise
= cleanBackward liveSlotsOnEntry noReloads (li : acc) instrs
-- collateJoinPoints:
--
-- | combine the associations from all the inward control flow edges.
--
collateJoinPoints :: CleanM ()
collateJoinPoints
= modify $ \s -> s
{ sJumpValid = mapUFM intersects (sJumpValidAcc s)
, sJumpValidAcc = emptyUFM }
intersects :: [Assoc Store] -> Assoc Store
intersects [] = emptyAssoc
intersects assocs = foldl1' intersectAssoc assocs
-- | See if we have a reg with the same value as this slot in the association table.
findRegOfSlot :: Assoc Store -> Int -> Maybe Reg
findRegOfSlot assoc slot
| close <- closeAssoc (SSlot slot) assoc
, Just (SReg reg) <- find isStoreReg $ uniqSetToList close
= Just reg
| otherwise
= Nothing
---------------
type CleanM = State CleanS
data CleanS
= CleanS
{ -- regs which are valid at the start of each block.
sJumpValid :: UniqFM (Assoc Store)
-- collecting up what regs were valid across each jump.
-- in the next pass we can collate these and write the results
-- to sJumpValid.
, sJumpValidAcc :: UniqFM [Assoc Store]
-- map of (slot -> blocks which reload from this slot)
-- used to decide if whether slot spilled to will ever be
-- reloaded from on this path.
, sReloadedBy :: UniqFM [BlockId]
-- spills\/reloads cleaned each pass (latest at front)
, sCleanedCount :: [(Int, Int)]
-- spills\/reloads that have been cleaned in this pass so far.
, sCleanedSpillsAcc :: Int
, sCleanedReloadsAcc :: Int }
initCleanS :: CleanS
initCleanS
= CleanS
{ sJumpValid = emptyUFM
, sJumpValidAcc = emptyUFM
, sReloadedBy = emptyUFM
, sCleanedCount = []
, sCleanedSpillsAcc = 0
, sCleanedReloadsAcc = 0 }
-- | Remember the associations before a jump
accJumpValid :: Assoc Store -> BlockId -> CleanM ()
accJumpValid assocs target
= modify $ \s -> s {
sJumpValidAcc = addToUFM_C (++)
(sJumpValidAcc s)
target
[assocs] }
accBlockReloadsSlot :: BlockId -> Slot -> CleanM ()
accBlockReloadsSlot blockId slot
= modify $ \s -> s {
sReloadedBy = addToUFM_C (++)
(sReloadedBy s)
(SSlot slot)
[blockId] }
--------------
-- A store location can be a stack slot or a register
--
data Store
= SSlot Int
| SReg Reg
-- | Check if this is a reg store
isStoreReg :: Store -> Bool
isStoreReg ss
= case ss of
SSlot _ -> False
SReg _ -> True
-- spill cleaning is only done once all virtuals have been allocated to realRegs
--
instance Uniquable Store where
getUnique (SReg r)
| RegReal (RealRegSingle i) <- r
= mkRegSingleUnique i
| RegReal (RealRegPair r1 r2) <- r
= mkRegPairUnique (r1 * 65535 + r2)
| otherwise
= error "RegSpillClean.getUnique: found virtual reg during spill clean, only real regs expected."
getUnique (SSlot i) = mkRegSubUnique i -- [SLPJ] I hope "SubUnique" is ok
instance Outputable Store where
ppr (SSlot i) = text "slot" <> int i
ppr (SReg r) = ppr r
--------------
-- Association graphs.
-- In the spill cleaner, two store locations are associated if they are known
-- to hold the same value.
--
type Assoc a = UniqFM (UniqSet a)
-- | an empty association
emptyAssoc :: Assoc a
emptyAssoc = emptyUFM
-- | add an association between these two things
addAssoc :: Uniquable a
=> a -> a -> Assoc a -> Assoc a
addAssoc a b m
= let m1 = addToUFM_C unionUniqSets m a (unitUniqSet b)
m2 = addToUFM_C unionUniqSets m1 b (unitUniqSet a)
in m2
-- | delete all associations to a node
delAssoc :: (Outputable a, Uniquable a)
=> a -> Assoc a -> Assoc a
delAssoc a m
| Just aSet <- lookupUFM m a
, m1 <- delFromUFM m a
= foldUniqSet (\x m -> delAssoc1 x a m) m1 aSet
| otherwise = m
-- | delete a single association edge (a -> b)
delAssoc1 :: Uniquable a
=> a -> a -> Assoc a -> Assoc a
delAssoc1 a b m
| Just aSet <- lookupUFM m a
= addToUFM m a (delOneFromUniqSet aSet b)
| otherwise = m
-- | check if these two things are associated
elemAssoc :: (Outputable a, Uniquable a)
=> a -> a -> Assoc a -> Bool
elemAssoc a b m
= elementOfUniqSet b (closeAssoc a m)
-- | find the refl. trans. closure of the association from this point
closeAssoc :: (Outputable a, Uniquable a)
=> a -> Assoc a -> UniqSet a
closeAssoc a assoc
= closeAssoc' assoc emptyUniqSet (unitUniqSet a)
where
closeAssoc' assoc visited toVisit
= case uniqSetToList toVisit of
-- nothing else to visit, we're done
[] -> visited
(x:_)
-- we've already seen this node
| elementOfUniqSet x visited
-> closeAssoc' assoc visited (delOneFromUniqSet toVisit x)
-- haven't seen this node before,
-- remember to visit all its neighbors
| otherwise
-> let neighbors
= case lookupUFM assoc x of
Nothing -> emptyUniqSet
Just set -> set
in closeAssoc' assoc
(addOneToUniqSet visited x)
(unionUniqSets toVisit neighbors)
-- | intersect
intersectAssoc
:: Uniquable a
=> Assoc a -> Assoc a -> Assoc a
intersectAssoc a b
= intersectUFM_C (intersectUniqSets) a b
| mcmaniac/ghc | compiler/nativeGen/RegAlloc/Graph/SpillClean.hs | bsd-3-clause | 16,868 | 433 | 18 | 3,811 | 4,078 | 2,110 | 1,968 | 312 | 3 |
{-# LANGUAGE PatternGuards #-}
module Main where
import System.IO
import GHC
import MonadUtils
import Outputable
import Bag (filterBag,isEmptyBag)
import System.Directory (removeFile)
import System.Environment( getArgs )
main::IO()
main = do
let c="module Test where\ndata DataT=MkData {name :: String}\n"
writeFile "Test.hs" c
[libdir] <- getArgs
ok<- runGhc (Just libdir) $ do
dflags <- getSessionDynFlags
setSessionDynFlags dflags
let mn =mkModuleName "Test"
addTarget Target { targetId = TargetModule mn, targetAllowObjCode = True, targetContents = Nothing }
load LoadAllTargets
modSum <- getModSummary mn
p <- parseModule modSum
t <- typecheckModule p
d <- desugarModule t
l <- loadModule d
let ts=typecheckedSource l
-- liftIO (putStr (showSDocDebug (ppr ts)))
let fs=filterBag isDataCon ts
return $ not $ isEmptyBag fs
removeFile "Test.hs"
print ok
where
isDataCon (L _ (AbsBinds { abs_binds = bs }))
= not (isEmptyBag (filterBag isDataCon bs))
isDataCon (L l (f@FunBind {}))
| (MG (L _ (m:_)) _ _ _) <- fun_matches f,
(L _ (c@ConPatOut{}):_)<-hsLMatchPats m,
(L l _)<-pat_con c
= isGoodSrcSpan l -- Check that the source location is a good one
isDataCon _
= False
| elieux/ghc | testsuite/tests/ghc-api/T6145.hs | bsd-3-clause | 1,641 | 0 | 16 | 643 | 452 | 220 | 232 | 39 | 3 |
-- GSoC 2015 - Haskell bindings for OpenCog.
{-# LANGUAGE GADTs #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE AutoDeriveTypeable #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE StandaloneDeriving #-}
-- | This Module defines the main data types for Haskell bindings.
module OpenCog.AtomSpace.Types (
TruthVal (..)
, AtomName (..)
, AtomType (..)
, Atom(..)
) where
-- | Atom name type.
type AtomName = String
type AtomType = String
-- Main general atom representation.
data Atom = Link AtomType [Atom] TruthVal
| Node AtomType AtomName TruthVal
deriving (Eq,Show,Read)
--data Value = FloatValue AtomType [Double] | LinkValue AtomType [Value]
-- | 'TruthVal' represent the different types of TruthValues.
data TruthVal = SimpleTV { tvMean :: Double
, tvConfidence :: Double
}
| CountTV { tvMean :: Double
, tvCount :: Double
, tvConfidence :: Double
}
| IndefTV { tvMean :: Double
, tvL :: Double
, tvU :: Double
, tvConfLevel :: Double
, tvDiff :: Double
}
| FuzzyTV { tvMean :: Double
, tvConfidence :: Double
}
| ProbTV { tvMean :: Double
, tvCount :: Double
, tvConfidence :: Double
}
deriving (Show,Read,Eq)
| inflector/atomspace | opencog/haskell/OpenCog/AtomSpace/Types.hs | agpl-3.0 | 1,886 | 0 | 8 | 841 | 238 | 156 | 82 | 35 | 0 |
{-|
Module : Idris.Providers
Description : Idris' 'Type Provider' implementation.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE PatternGuards, DeriveFunctor #-}
module Idris.Providers (
providerTy
, getProvided
, Provided(..)
) where
import Idris.Core.TT
import Idris.Core.Evaluate
import Idris.AbsSyntax
import Idris.AbsSyntaxTree
import Idris.Error
-- | Wrap a type provider in the type of type providers
providerTy :: FC -> PTerm -> PTerm
providerTy fc tm
= PApp fc (PRef fc [] $ sNS (sUN "Provider" ) ["Providers", "Prelude"]) [PExp 0 [] (sMN 0 "pvarg") tm]
ioret :: Name
ioret = sUN "prim_io_return"
ermod :: Name
ermod = sNS (sUN "Error") ["Providers", "Prelude"]
prmod :: Name
prmod = sNS (sUN "Provide") ["Providers", "Prelude"]
data Provided a = Provide a
deriving (Show, Eq, Functor)
-- | Handle an error, if the type provider returned an error. Otherwise return the provided term.
getProvided :: FC -> TT Name -> Idris (Provided (TT Name))
getProvided fc tm | (P _ pioret _, [tp, result]) <- unApply tm
, (P _ nm _, [_, err]) <- unApply result
, pioret == ioret && nm == ermod
= case err of
Constant (Str msg) -> ierror . At fc . ProviderError $ msg
_ -> ifail "Internal error in type provider, non-normalised error"
| (P _ pioret _, [tp, result]) <- unApply tm
, (P _ nm _, [_, res]) <- unApply result
, pioret == ioret && nm == prmod
= return . Provide $ res
| otherwise = ifail $ "Internal type provider error: result was not " ++
"IO (Provider a), or perhaps missing normalisation." ++
"Term: " ++ take 1000 (show tm)
| tpsinnem/Idris-dev | src/Idris/Providers.hs | bsd-3-clause | 1,881 | 0 | 12 | 590 | 514 | 271 | 243 | 35 | 2 |
module Opaleye.Internal.PGTypes where
import Opaleye.Internal.Column (Column(Column))
import qualified Opaleye.Internal.HaskellDB.PrimQuery as HPQ
import qualified Data.Text as SText
import qualified Data.Text.Encoding as STextEncoding
import qualified Data.Text.Lazy as LText
import qualified Data.Text.Lazy.Encoding as LTextEncoding
import qualified Data.ByteString as SByteString
import qualified Data.ByteString.Lazy as LByteString
import qualified Data.Time as Time
import qualified Data.Time.Locale.Compat as Locale
unsafePgFormatTime :: Time.FormatTime t => HPQ.Name -> String -> t -> Column c
unsafePgFormatTime typeName formatString = castToType typeName . format
where format = Time.formatTime Locale.defaultTimeLocale formatString
literalColumn :: HPQ.Literal -> Column a
literalColumn = Column . HPQ.ConstExpr
castToType :: HPQ.Name -> String -> Column c
castToType typeName =
Column . HPQ.CastExpr typeName . HPQ.ConstExpr . HPQ.OtherLit
strictDecodeUtf8 :: SByteString.ByteString -> String
strictDecodeUtf8 = SText.unpack . STextEncoding.decodeUtf8
lazyDecodeUtf8 :: LByteString.ByteString -> String
lazyDecodeUtf8 = LText.unpack . LTextEncoding.decodeUtf8
| alanz/haskell-opaleye | src/Opaleye/Internal/PGTypes.hs | bsd-3-clause | 1,194 | 0 | 9 | 151 | 287 | 171 | 116 | 23 | 1 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="es-ES">
<title>Port Scan | ZAP Extension</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Índice</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Buscar</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/zest/src/main/javahelp/org/zaproxy/zap/extension/zest/resources/help_es_ES/helpset_es_ES.hs | apache-2.0 | 973 | 92 | 29 | 160 | 402 | 213 | 189 | -1 | -1 |
{-# LANGUAGE UnicodeSyntax #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
module T5886a where
import Language.Haskell.TH hiding (Type)
import Data.Kind (Type)
class C α where
type AT α ∷ Type
bang ∷ DecsQ
bang = return [InstanceD Nothing [] (AppT (ConT ''C) (ConT ''Int))
[TySynInstD (TySynEqn Nothing (AppT (ConT ''AT) (ConT ''Int)) (ConT ''Int))]]
| sdiehl/ghc | testsuite/tests/th/T5886a.hs | bsd-3-clause | 398 | 0 | 16 | 77 | 144 | 79 | 65 | -1 | -1 |
-- BANNERSTART
-- - Copyright 2006-2008, Galois, Inc.
-- - This software is distributed under a standard, three-clause BSD license.
-- - Please see the file LICENSE, distributed with this software, for specific
-- - terms and conditions.
-- Author: Adam Wick <awick@galois.com>
-- BANNEREND
-- |Debugging support for the HALVM.
--
-- This module currently only exports one routine, which allows you to
-- write out information on the Xen emergency console. It appears that
-- this console is only lightly buffered (it may be newline buffered,
-- but we haven't experienced a emergency console flushing problem in
-- practice), and thus it's helpful in debugging.
--
-- The default Xen kernel does not support the emergency console in a
-- very useful way. It largely makes these writes into NOOPS. To see
-- output, you need to make Xen using the "verbose=y" flag for both the
-- build *and* the install. Thus, build with "make verbose=y world" and
-- then install with "make verbose=y install".
module Hypervisor.Debug where
import Foreign.C.String
import Foreign.Ptr
-- |Write on the Xen emergency console.
writeDebugConsole :: String -> IO ()
writeDebugConsole str =
withCString str $ \ cstr -> econsoleMsg (length str) cstr
foreign import ccall unsafe "runtime_reqs.h runtime_write"
econsoleMsg :: Int -> Ptr a -> IO ()
| thumphries/HaLVM | src/HALVMCore/Hypervisor/Debug.hs | bsd-3-clause | 1,333 | 0 | 9 | 224 | 113 | 69 | 44 | 8 | 1 |
module M2 where
data BTree a
= Empty | T a (BTree a) (BTree a) deriving Show
buildtree :: (Monad m, Ord a) => [a] -> m (BTree a)
buildtree [] = return Empty
buildtree ((x : xs))
= do res1 <- buildtree xs
res <- insert x res1
return res
insert
:: (Monad m, Ord a) => a -> (BTree a) -> m (BTree a)
insert val v2
= do case v2 of
T val Empty Empty
| val == val -> return Empty
| otherwise ->
return (T val Empty (T val Empty Empty))
T val (T val2 Empty Empty) Empty -> return Empty
_ -> return v2
main :: IO ()
main
= do (a, n@(T val Empty Empty)) <- buildtree
[3, 1, 2]
putStrLn $ (show n)
| SAdams601/HaRe | old/testing/unfoldAsPatterns/M2AST.hs | bsd-3-clause | 810 | 0 | 15 | 354 | 350 | 174 | 176 | 24 | 3 |
module Syntax where
import Data.List
------------------------------------------------------------------
-- Abstract syntax -----------------------------------------------
------------------------------------------------------------------
-- info for all primops; the totality of the info in primops.txt(.pp)
data Info
= Info [Option] [Entry] -- defaults, primops
deriving Show
-- info for one primop
data Entry
= PrimOpSpec { cons :: String, -- PrimOp name
name :: String, -- name in prog text
ty :: Ty, -- type
cat :: Category, -- category
desc :: String, -- description
opts :: [Option] } -- default overrides
| PrimVecOpSpec { cons :: String, -- PrimOp name
name :: String, -- name in prog text
prefix :: String, -- prefix for generated names
veclen :: Int, -- vector length
elemrep :: String, -- vector ElemRep
ty :: Ty, -- type
cat :: Category, -- category
desc :: String, -- description
opts :: [Option] } -- default overrides
| PseudoOpSpec { name :: String, -- name in prog text
ty :: Ty, -- type
desc :: String, -- description
opts :: [Option] } -- default overrides
| PrimTypeSpec { ty :: Ty, -- name in prog text
desc :: String, -- description
opts :: [Option] } -- default overrides
| PrimVecTypeSpec { ty :: Ty, -- name in prog text
prefix :: String, -- prefix for generated names
veclen :: Int, -- vector length
elemrep :: String, -- vector ElemRep
desc :: String, -- description
opts :: [Option] } -- default overrides
| Section { title :: String, -- section title
desc :: String } -- description
deriving Show
is_primop :: Entry -> Bool
is_primop (PrimOpSpec _ _ _ _ _ _) = True
is_primop _ = False
is_primtype :: Entry -> Bool
is_primtype (PrimTypeSpec {}) = True
is_primtype _ = False
-- a binding of property to value
data Option
= OptionFalse String -- name = False
| OptionTrue String -- name = True
| OptionString String String -- name = { ... unparsed stuff ... }
| OptionInteger String Int -- name = <int>
| OptionVector [(String,String,Int)] -- name = [(,...),...]
| OptionFixity (Maybe Fixity) -- fixity = infix{,l,r} <int> | Nothing
deriving Show
-- categorises primops
data Category
= Dyadic | Monadic | Compare | GenPrimOp
deriving Show
-- types
data Ty
= TyF Ty Ty
| TyC Ty Ty -- We only allow one constraint, keeps the grammar simpler
| TyApp TyCon [Ty]
| TyVar TyVar
| TyUTup [Ty] -- unboxed tuples; just a TyCon really,
-- but convenient like this
deriving (Eq,Show)
type TyVar = String
data TyCon = TyCon String
| SCALAR
| VECTOR
| VECTUPLE
| VecTyCon String String
deriving (Eq, Ord)
instance Show TyCon where
show (TyCon tc) = tc
show SCALAR = "SCALAR"
show VECTOR = "VECTOR"
show VECTUPLE = "VECTUPLE"
show (VecTyCon tc _) = tc
-- Follow definitions of Fixity and FixityDirection in GHC
-- The SourceText exists so that it matches the SourceText field in
-- BasicTypes.Fixity
data Fixity = Fixity SourceText Int FixityDirection
deriving (Eq, Show)
data FixityDirection = InfixN | InfixL | InfixR
deriving (Eq, Show)
data SourceText = SourceText String
| NoSourceText
deriving (Eq,Show)
------------------------------------------------------------------
-- Sanity checking -----------------------------------------------
------------------------------------------------------------------
{- Do some simple sanity checks:
* all the default field names are unique
* for each PrimOpSpec, all override field names are unique
* for each PrimOpSpec, all overriden field names
have a corresponding default value
* that primop types correspond in certain ways to the
Category: eg if Comparison, the type must be of the form
T -> T -> Bool.
Dies with "error" if there's a problem, else returns ().
-}
myseqAll :: [()] -> a -> a
myseqAll (():ys) x = myseqAll ys x
myseqAll [] x = x
sanityTop :: Info -> ()
sanityTop (Info defs entries)
= let opt_names = map get_attrib_name defs
primops = filter is_primop entries
in
if length opt_names /= length (nub opt_names)
then error ("non-unique default attribute names: " ++ show opt_names ++ "\n")
else myseqAll (map (sanityPrimOp opt_names) primops) ()
sanityPrimOp :: [String] -> Entry -> ()
sanityPrimOp def_names p
= let p_names = map get_attrib_name (opts p)
p_names_ok
= length p_names == length (nub p_names)
&& all (`elem` def_names) p_names
ty_ok = sane_ty (cat p) (ty p)
in
if not p_names_ok
then error ("attribute names are non-unique or have no default in\n" ++
"info for primop " ++ cons p ++ "\n")
else
if not ty_ok
then error ("type of primop " ++ cons p ++ " doesn't make sense w.r.t" ++
" category " ++ show (cat p) ++ "\n")
else ()
sane_ty :: Category -> Ty -> Bool
sane_ty Compare (TyF t1 (TyF t2 td))
| t1 == t2 && td == TyApp (TyCon "Int#") [] = True
sane_ty Monadic (TyF t1 td)
| t1 == td = True
sane_ty Dyadic (TyF t1 (TyF t2 td))
| t1 == td && t2 == td = True
sane_ty GenPrimOp _
= True
sane_ty _ _
= False
get_attrib_name :: Option -> String
get_attrib_name (OptionFalse nm) = nm
get_attrib_name (OptionTrue nm) = nm
get_attrib_name (OptionString nm _) = nm
get_attrib_name (OptionInteger nm _) = nm
get_attrib_name (OptionVector _) = "vector"
get_attrib_name (OptionFixity _) = "fixity"
lookup_attrib :: String -> [Option] -> Maybe Option
lookup_attrib _ [] = Nothing
lookup_attrib nm (a:as)
= if get_attrib_name a == nm then Just a else lookup_attrib nm as
is_vector :: Entry -> Bool
is_vector i = case lookup_attrib "vector" (opts i) of
Nothing -> False
_ -> True
| ezyang/ghc | utils/genprimopcode/Syntax.hs | bsd-3-clause | 6,661 | 0 | 16 | 2,202 | 1,428 | 801 | 627 | 131 | 3 |
import System.Environment
import System.IO
main :: IO ()
main = error "main got called"
foo :: IO ()
foo = do putStrLn "This is foo"
getArgs >>= print
hFlush stdout
error "foo"
| siddhanathan/ghc | testsuite/tests/ghc-e/should_run/ghc-e005.hs | bsd-3-clause | 208 | 0 | 7 | 63 | 69 | 32 | 37 | 9 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Data.Function (on)
import Data.Ord (comparing)
import Data.List (foldl', sortBy)
import Data.Map (Map)
import qualified Data.Map as M
type Position = Int
type Occurence = Int
type Frequency =
Map Char Occurence
type Received =
Map Position Frequency
addOccurence :: Received -> (Position, Char) -> Received
addOccurence map (pos, c) =
M.alter addChar pos map
where addChar (Just map') =
Just $ M.alter (Just . maybe 1 (+1)) c map'
addChar Nothing =
Just $ M.singleton c 1
empty :: Received
empty = M.empty
processLine :: Received -> String -> Received
processLine map =
foldl' addOccurence map . zip [0..]
processLines :: [String] -> Received
processLines =
foldl' processLine empty
mostOccuringChar :: Frequency -> Char
mostOccuringChar =
fst . head . reverse . sortBy (compare `on` snd) . M.toList
leastOccuringChar :: Frequency -> Char
leastOccuringChar =
fst . head . sortBy (compare `on` snd) . M.toList
decodeMessage :: Received -> String
decodeMessage =
map (mostOccuringChar . snd) . M.toList
decodeMessage' :: Received -> String
decodeMessage' =
map (leastOccuringChar . snd) . M.toList
input :: IO [String]
input = lines <$> readFile "input.txt"
main :: IO ()
main = do
received <- processLines <$> input
putStr "Part1: "
print $ decodeMessage received
putStr "Part2: "
print $ decodeMessage' received
putStrLn "all done"
| CarstenKoenig/AdventOfCode2016 | Day6/Main.hs | mit | 1,486 | 0 | 12 | 312 | 499 | 266 | 233 | 50 | 2 |
{-# LANGUAGE OverloadedStrings #-}
module LLVM.Examples.Identity (identity) where
import qualified LLVM.General.AST as AST
import LLVM.Gen
import Control.Monad
import Control.Lens ((.=))
import LLVM.General.AST.Type (double)
identity :: AST.Module
identity = mapLLVM . runLLVM defaultModule $ do
moduleName .= "identity"
defn double "id" [(double, "a", 8)] $ do
a <- getvar' "a"
ret a
| AKST/scheme.llvm | src/LLVM/Examples/Identity.hs | mit | 406 | 0 | 12 | 72 | 128 | 73 | 55 | 13 | 1 |
{-# OPTIONS_GHC -F -pgmF htfpp #-}
{-# LANGUAGE StandaloneDeriving #-}
module Control.Equation.Types.Test where
import Control.Equation.Types
import Test.Framework
import Test.HUnit
-- Not useful in general case
instance Eq value => Eq (Term world value) where
(Constant x) == (Constant y) = x == y
_ == _ = False
deriving instance Eq value => Eq (Expression world value)
deriving instance Eq value => Eq (Equation world value)
instance Show value => Show (Term world value) where
show (Constant c) = "Constant " ++ show c
show (Variable c _) = "Variable " ++ show c ++ " v"
deriving instance Show value => Show (Expression world value)
deriving instance Show value => Show (Equation world value)
test_operators = do
assertEqual
(1 :: Expression Int Int)
(Expression [ Constant 1
])
assertEqual
(1 + 2 :: Expression Int Int)
(Expression [ Constant 3
])
assertEqual
(1 + 2 =:= 3 :: Equation Int Int)
(Equation $ Expression [ Constant 0
])
test_simplify = do
assertEqual
(Expression [Constant 1])
(simplify $ Expression [Constant 1])
| koterpillar/equations | testsuite/Control/Equation/Types/Test.hs | mit | 1,208 | 0 | 12 | 347 | 406 | 202 | 204 | 30 | 1 |
-- | Data types and functions for rendering generated documentation from
-- PureScript code, in a variety of formats.
module Language.PureScript.Docs (
module Docs
) where
import Language.PureScript.Docs.Types as Docs
import Language.PureScript.Docs.RenderedCode.Types as Docs
import Language.PureScript.Docs.RenderedCode.Render as Docs
import Language.PureScript.Docs.Convert as Docs
import Language.PureScript.Docs.Render as Docs
import Language.PureScript.Docs.ParseAndDesugar as Docs
| michaelficarra/purescript | src/Language/PureScript/Docs.hs | mit | 493 | 0 | 4 | 54 | 71 | 55 | 16 | 8 | 0 |
module Network.BitFunctor.Consensus.Difficulty where
import Blockchain (block)
initialBaseTarget :: Integer
initialBaseTarget = div maxHit (2*goalBlockTime*(fromIntegral systemBalance))
maxBaseTarget :: Integer
maxBaseTarget = initialBaseTarget * (fromIntegral systemBalance)
--correct this!
difficultyFunction :: Integer -> Double
difficultyFunction x = 20e8/(fromIntegral x)
cumulativeDifficulty :: BlockChain -> Double
cumulativeDifficulty chain = foldl (\cd b -> cd + (difficultyFunction $ baseTarget b)) 0 chain
cumulativeNodeDifficulty :: Node -> Double
cumulativeNodeDifficulty node = totalDifficulty $ bestBlock $ localView node
nextBaseTarget :: LocalState -> UTCTime -> Integer
nextBaseTarget state time = min (max mint candidate) maxt
where
mint = max (currentt `div` 2) 1
maxt = min (2 * currentt) maxBaseTarget
currentt = baseTarget $ block state Head
candidate = currentt * timespan `div` goalBlockTime
timespan = toInteger (time - (timestamp $ block state Head))
| BitFunctor/bitfunctor | src/Network/BitFunctor/Consensus/Difficulty.hs | mit | 1,125 | 0 | 12 | 272 | 302 | 162 | 140 | 19 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-|
Module : Language.LSP.Test
Description : A functional testing framework for LSP servers.
Maintainer : luke_lau@icloud.com
Stability : experimental
Portability : non-portable
Provides the framework to start functionally testing
<https://github.com/Microsoft/language-server-protocol Language Server Protocol servers>.
You should import "Language.LSP.Types" alongside this.
-}
module Language.LSP.Test
(
-- * Sessions
Session
, runSession
, runSessionWithConfig
, runSessionWithHandles
-- ** Config
, SessionConfig(..)
, defaultConfig
, C.fullCaps
-- ** Exceptions
, module Language.LSP.Test.Exceptions
, withTimeout
-- * Sending
, request
, request_
, sendRequest
, sendNotification
, sendResponse
-- * Receiving
, module Language.LSP.Test.Parsing
-- * Utilities
-- | Quick helper functions for common tasks.
-- ** Initialization
, initializeResponse
-- ** Documents
, createDoc
, openDoc
, closeDoc
, changeDoc
, documentContents
, getDocumentEdit
, getDocUri
, getVersionedDoc
-- ** Symbols
, getDocumentSymbols
-- ** Diagnostics
, waitForDiagnostics
, waitForDiagnosticsSource
, noDiagnostics
, getCurrentDiagnostics
, getIncompleteProgressSessions
-- ** Commands
, executeCommand
-- ** Code Actions
, getCodeActions
, getAllCodeActions
, executeCodeAction
-- ** Completions
, getCompletions
-- ** References
, getReferences
-- ** Definitions
, getDeclarations
, getDefinitions
, getTypeDefinitions
, getImplementations
-- ** Renaming
, rename
-- ** Hover
, getHover
-- ** Highlights
, getHighlights
-- ** Formatting
, formatDoc
, formatRange
-- ** Edits
, applyEdit
-- ** Code lenses
, getCodeLenses
-- ** Call hierarchy
, prepareCallHierarchy
, incomingCalls
, outgoingCalls
-- ** SemanticTokens
, getSemanticTokens
-- ** Capabilities
, getRegisteredCapabilities
) where
import Control.Applicative.Combinators
import Control.Concurrent
import Control.Monad
import Control.Monad.IO.Class
import Control.Exception
import Control.Lens hiding ((.=), List, Empty)
import qualified Data.Map.Strict as Map
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Aeson
import Data.Default
import qualified Data.HashMap.Strict as HashMap
import Data.List
import Data.Maybe
import Language.LSP.Types
import Language.LSP.Types.Lens hiding
(id, capabilities, message, executeCommand, applyEdit, rename)
import qualified Language.LSP.Types.Lens as LSP
import qualified Language.LSP.Types.Capabilities as C
import Language.LSP.VFS
import Language.LSP.Test.Compat
import Language.LSP.Test.Decoding
import Language.LSP.Test.Exceptions
import Language.LSP.Test.Parsing
import Language.LSP.Test.Session
import Language.LSP.Test.Server
import System.Environment
import System.IO
import System.Directory
import System.FilePath
import System.Process (ProcessHandle)
import qualified System.FilePath.Glob as Glob
-- | Starts a new session.
--
-- > runSession "hie" fullCaps "path/to/root/dir" $ do
-- > doc <- openDoc "Desktop/simple.hs" "haskell"
-- > diags <- waitForDiagnostics
-- > let pos = Position 12 5
-- > params = TextDocumentPositionParams doc
-- > hover <- request STextdocumentHover params
runSession :: String -- ^ The command to run the server.
-> C.ClientCapabilities -- ^ The capabilities that the client should declare.
-> FilePath -- ^ The filepath to the root directory for the session.
-> Session a -- ^ The session to run.
-> IO a
runSession = runSessionWithConfig def
-- | Starts a new sesion with a custom configuration.
runSessionWithConfig :: SessionConfig -- ^ Configuration options for the session.
-> String -- ^ The command to run the server.
-> C.ClientCapabilities -- ^ The capabilities that the client should declare.
-> FilePath -- ^ The filepath to the root directory for the session.
-> Session a -- ^ The session to run.
-> IO a
runSessionWithConfig config' serverExe caps rootDir session = do
config <- envOverrideConfig config'
withServer serverExe (logStdErr config) $ \serverIn serverOut serverProc ->
runSessionWithHandles' (Just serverProc) serverIn serverOut config caps rootDir session
-- | Starts a new session, using the specified handles to communicate with the
-- server. You can use this to host the server within the same process.
-- An example with lsp might look like:
--
-- > (hinRead, hinWrite) <- createPipe
-- > (houtRead, houtWrite) <- createPipe
-- >
-- > forkIO $ void $ runServerWithHandles hinRead houtWrite serverDefinition
-- > runSessionWithHandles hinWrite houtRead defaultConfig fullCaps "." $ do
-- > -- ...
runSessionWithHandles :: Handle -- ^ The input handle
-> Handle -- ^ The output handle
-> SessionConfig
-> C.ClientCapabilities -- ^ The capabilities that the client should declare.
-> FilePath -- ^ The filepath to the root directory for the session.
-> Session a -- ^ The session to run.
-> IO a
runSessionWithHandles = runSessionWithHandles' Nothing
runSessionWithHandles' :: Maybe ProcessHandle
-> Handle -- ^ The input handle
-> Handle -- ^ The output handle
-> SessionConfig
-> C.ClientCapabilities -- ^ The capabilities that the client should declare.
-> FilePath -- ^ The filepath to the root directory for the session.
-> Session a -- ^ The session to run.
-> IO a
runSessionWithHandles' serverProc serverIn serverOut config' caps rootDir session = do
pid <- getCurrentProcessID
absRootDir <- canonicalizePath rootDir
config <- envOverrideConfig config'
let initializeParams = InitializeParams Nothing
-- Narowing to Int32 here, but it's unlikely that a pid will
-- be outside the range
(Just $ fromIntegral pid)
(Just lspTestClientInfo)
(Just $ T.pack absRootDir)
(Just $ filePathToUri absRootDir)
(lspConfig config')
caps
(Just TraceOff)
(List <$> initialWorkspaceFolders config)
runSession' serverIn serverOut serverProc listenServer config caps rootDir exitServer $ do
-- Wrap the session around initialize and shutdown calls
initReqId <- sendRequest SInitialize initializeParams
-- Because messages can be sent in between the request and response,
-- collect them and then...
(inBetween, initRspMsg) <- manyTill_ anyMessage (responseForId SInitialize initReqId)
case initRspMsg ^. LSP.result of
Left error -> liftIO $ putStrLn ("Error while initializing: " ++ show error)
Right _ -> pure ()
initRspVar <- initRsp <$> ask
liftIO $ putMVar initRspVar initRspMsg
sendNotification SInitialized (Just InitializedParams)
case lspConfig config of
Just cfg -> sendNotification SWorkspaceDidChangeConfiguration (DidChangeConfigurationParams cfg)
Nothing -> return ()
-- ... relay them back to the user Session so they can match on them!
-- As long as they are allowed.
forM_ inBetween checkLegalBetweenMessage
msgChan <- asks messageChan
liftIO $ writeList2Chan msgChan (ServerMessage <$> inBetween)
-- Run the actual test
session
where
-- | Asks the server to shutdown and exit politely
exitServer :: Session ()
exitServer = request_ SShutdown Empty >> sendNotification SExit Empty
-- | Listens to the server output until the shutdown ack,
-- makes sure it matches the record and signals any semaphores
listenServer :: Handle -> SessionContext -> IO ()
listenServer serverOut context = do
msgBytes <- getNextMessage serverOut
msg <- modifyMVar (requestMap context) $ \reqMap ->
pure $ decodeFromServerMsg reqMap msgBytes
writeChan (messageChan context) (ServerMessage msg)
case msg of
(FromServerRsp SShutdown _) -> return ()
_ -> listenServer serverOut context
-- | Is this message allowed to be sent by the server between the intialize
-- request and response?
-- https://microsoft.github.io/language-server-protocol/specifications/specification-3-15/#initialize
checkLegalBetweenMessage :: FromServerMessage -> Session ()
checkLegalBetweenMessage (FromServerMess SWindowShowMessage _) = pure ()
checkLegalBetweenMessage (FromServerMess SWindowLogMessage _) = pure ()
checkLegalBetweenMessage (FromServerMess STelemetryEvent _) = pure ()
checkLegalBetweenMessage (FromServerMess SWindowShowMessageRequest _) = pure ()
checkLegalBetweenMessage msg = throw (IllegalInitSequenceMessage msg)
-- | Check environment variables to override the config
envOverrideConfig :: SessionConfig -> IO SessionConfig
envOverrideConfig cfg = do
logMessages' <- fromMaybe (logMessages cfg) <$> checkEnv "LSP_TEST_LOG_MESSAGES"
logStdErr' <- fromMaybe (logStdErr cfg) <$> checkEnv "LSP_TEST_LOG_STDERR"
return $ cfg { logMessages = logMessages', logStdErr = logStdErr' }
where checkEnv :: String -> IO (Maybe Bool)
checkEnv s = fmap convertVal <$> lookupEnv s
convertVal "0" = False
convertVal _ = True
-- | The current text contents of a document.
documentContents :: TextDocumentIdentifier -> Session T.Text
documentContents doc = do
vfs <- vfs <$> get
let file = vfsMap vfs Map.! toNormalizedUri (doc ^. uri)
return (virtualFileText file)
-- | Parses an ApplyEditRequest, checks that it is for the passed document
-- and returns the new content
getDocumentEdit :: TextDocumentIdentifier -> Session T.Text
getDocumentEdit doc = do
req <- message SWorkspaceApplyEdit
unless (checkDocumentChanges req || checkChanges req) $
liftIO $ throw (IncorrectApplyEditRequest (show req))
documentContents doc
where
checkDocumentChanges req =
let changes = req ^. params . edit . documentChanges
maybeDocs = fmap (fmap documentChangeUri) changes
in case maybeDocs of
Just docs -> (doc ^. uri) `elem` docs
Nothing -> False
checkChanges req =
let mMap = req ^. params . edit . changes
in maybe False (HashMap.member (doc ^. uri)) mMap
-- | Sends a request to the server and waits for its response.
-- Will skip any messages in between the request and the response
-- @
-- rsp <- request STextDocumentDocumentSymbol params
-- @
-- Note: will skip any messages in between the request and the response.
request :: SClientMethod m -> MessageParams m -> Session (ResponseMessage m)
request m = sendRequest m >=> skipManyTill anyMessage . responseForId m
-- | The same as 'sendRequest', but discard the response.
request_ :: SClientMethod (m :: Method FromClient Request) -> MessageParams m -> Session ()
request_ p = void . request p
-- | Sends a request to the server. Unlike 'request', this doesn't wait for the response.
sendRequest
:: SClientMethod m -- ^ The request method.
-> MessageParams m -- ^ The request parameters.
-> Session (LspId m) -- ^ The id of the request that was sent.
sendRequest method params = do
idn <- curReqId <$> get
modify $ \c -> c { curReqId = idn+1 }
let id = IdInt idn
let mess = RequestMessage "2.0" id method params
-- Update the request map
reqMap <- requestMap <$> ask
liftIO $ modifyMVar_ reqMap $
\r -> return $ fromJust $ updateRequestMap r id method
~() <- case splitClientMethod method of
IsClientReq -> sendMessage mess
IsClientEither -> sendMessage $ ReqMess mess
return id
-- | Sends a notification to the server.
sendNotification :: SClientMethod (m :: Method FromClient Notification) -- ^ The notification method.
-> MessageParams m -- ^ The notification parameters.
-> Session ()
-- Open a virtual file if we send a did open text document notification
sendNotification STextDocumentDidOpen params = do
let n = NotificationMessage "2.0" STextDocumentDidOpen params
oldVFS <- vfs <$> get
let (newVFS,_) = openVFS oldVFS n
modify (\s -> s { vfs = newVFS })
sendMessage n
-- Close a virtual file if we send a close text document notification
sendNotification STextDocumentDidClose params = do
let n = NotificationMessage "2.0" STextDocumentDidClose params
oldVFS <- vfs <$> get
let (newVFS,_) = closeVFS oldVFS n
modify (\s -> s { vfs = newVFS })
sendMessage n
sendNotification STextDocumentDidChange params = do
let n = NotificationMessage "2.0" STextDocumentDidChange params
oldVFS <- vfs <$> get
let (newVFS,_) = changeFromClientVFS oldVFS n
modify (\s -> s { vfs = newVFS })
sendMessage n
sendNotification method params =
case splitClientMethod method of
IsClientNot -> sendMessage (NotificationMessage "2.0" method params)
IsClientEither -> sendMessage (NotMess $ NotificationMessage "2.0" method params)
-- | Sends a response to the server.
sendResponse :: ToJSON (ResponseResult m) => ResponseMessage m -> Session ()
sendResponse = sendMessage
-- | Returns the initialize response that was received from the server.
-- The initialize requests and responses are not included the session,
-- so if you need to test it use this.
initializeResponse :: Session (ResponseMessage Initialize)
initializeResponse = ask >>= (liftIO . readMVar) . initRsp
-- | /Creates/ a new text document. This is different from 'openDoc'
-- as it sends a workspace/didChangeWatchedFiles notification letting the server
-- know that a file was created within the workspace, __provided that the server
-- has registered for it__, and the file matches any patterns the server
-- registered for.
-- It /does not/ actually create a file on disk, but is useful for convincing
-- the server that one does exist.
--
-- @since 11.0.0.0
createDoc :: FilePath -- ^ The path to the document to open, __relative to the root directory__.
-> T.Text -- ^ The text document's language identifier, e.g. @"haskell"@.
-> T.Text -- ^ The content of the text document to create.
-> Session TextDocumentIdentifier -- ^ The identifier of the document just created.
createDoc file languageId contents = do
dynCaps <- curDynCaps <$> get
rootDir <- asks rootDir
caps <- asks sessionCapabilities
absFile <- liftIO $ canonicalizePath (rootDir </> file)
let pred :: SomeRegistration -> [Registration WorkspaceDidChangeWatchedFiles]
pred (SomeRegistration r@(Registration _ SWorkspaceDidChangeWatchedFiles _)) = [r]
pred _ = mempty
regs = concatMap pred $ Map.elems dynCaps
watchHits :: FileSystemWatcher -> Bool
watchHits (FileSystemWatcher pattern kind) =
-- If WatchKind is excluded, defaults to all true as per spec
fileMatches (T.unpack pattern) && createHits (fromMaybe (WatchKind True True True) kind)
fileMatches pattern = Glob.match (Glob.compile pattern) relOrAbs
-- If the pattern is absolute then match against the absolute fp
where relOrAbs
| isAbsolute pattern = absFile
| otherwise = file
createHits (WatchKind create _ _) = create
regHits :: Registration WorkspaceDidChangeWatchedFiles -> Bool
regHits reg = foldl' (\acc w -> acc || watchHits w) False (reg ^. registerOptions . watchers)
clientCapsSupports =
caps ^? workspace . _Just . didChangeWatchedFiles . _Just . dynamicRegistration . _Just
== Just True
shouldSend = clientCapsSupports && foldl' (\acc r -> acc || regHits r) False regs
when shouldSend $
sendNotification SWorkspaceDidChangeWatchedFiles $ DidChangeWatchedFilesParams $
List [ FileEvent (filePathToUri (rootDir </> file)) FcCreated ]
openDoc' file languageId contents
-- | Opens a text document that /exists on disk/, and sends a
-- textDocument/didOpen notification to the server.
openDoc :: FilePath -> T.Text -> Session TextDocumentIdentifier
openDoc file languageId = do
context <- ask
let fp = rootDir context </> file
contents <- liftIO $ T.readFile fp
openDoc' file languageId contents
-- | This is a variant of `openDoc` that takes the file content as an argument.
-- Use this is the file exists /outside/ of the current workspace.
openDoc' :: FilePath -> T.Text -> T.Text -> Session TextDocumentIdentifier
openDoc' file languageId contents = do
context <- ask
let fp = rootDir context </> file
uri = filePathToUri fp
item = TextDocumentItem uri languageId 0 contents
sendNotification STextDocumentDidOpen (DidOpenTextDocumentParams item)
pure $ TextDocumentIdentifier uri
-- | Closes a text document and sends a textDocument/didOpen notification to the server.
closeDoc :: TextDocumentIdentifier -> Session ()
closeDoc docId = do
let params = DidCloseTextDocumentParams (TextDocumentIdentifier (docId ^. uri))
sendNotification STextDocumentDidClose params
-- | Changes a text document and sends a textDocument/didOpen notification to the server.
changeDoc :: TextDocumentIdentifier -> [TextDocumentContentChangeEvent] -> Session ()
changeDoc docId changes = do
verDoc <- getVersionedDoc docId
let params = DidChangeTextDocumentParams (verDoc & version . non 0 +~ 1) (List changes)
sendNotification STextDocumentDidChange params
-- | Gets the Uri for the file corrected to the session directory.
getDocUri :: FilePath -> Session Uri
getDocUri file = do
context <- ask
let fp = rootDir context </> file
return $ filePathToUri fp
-- | Waits for diagnostics to be published and returns them.
waitForDiagnostics :: Session [Diagnostic]
waitForDiagnostics = do
diagsNot <- skipManyTill anyMessage (message STextDocumentPublishDiagnostics)
let (List diags) = diagsNot ^. params . LSP.diagnostics
return diags
-- | The same as 'waitForDiagnostics', but will only match a specific
-- 'Language.LSP.Types._source'.
waitForDiagnosticsSource :: String -> Session [Diagnostic]
waitForDiagnosticsSource src = do
diags <- waitForDiagnostics
let res = filter matches diags
if null res
then waitForDiagnosticsSource src
else return res
where
matches :: Diagnostic -> Bool
matches d = d ^. source == Just (T.pack src)
-- | Expects a 'PublishDiagnosticsNotification' and throws an
-- 'UnexpectedDiagnostics' exception if there are any diagnostics
-- returned.
noDiagnostics :: Session ()
noDiagnostics = do
diagsNot <- message STextDocumentPublishDiagnostics
when (diagsNot ^. params . LSP.diagnostics /= List []) $ liftIO $ throw UnexpectedDiagnostics
-- | Returns the symbols in a document.
getDocumentSymbols :: TextDocumentIdentifier -> Session (Either [DocumentSymbol] [SymbolInformation])
getDocumentSymbols doc = do
ResponseMessage _ rspLid res <- request STextDocumentDocumentSymbol (DocumentSymbolParams Nothing Nothing doc)
case res of
Right (InL (List xs)) -> return (Left xs)
Right (InR (List xs)) -> return (Right xs)
Left err -> throw (UnexpectedResponseError (SomeLspId $ fromJust rspLid) err)
-- | Returns the code actions in the specified range.
getCodeActions :: TextDocumentIdentifier -> Range -> Session [Command |? CodeAction]
getCodeActions doc range = do
ctx <- getCodeActionContextInRange doc range
rsp <- request STextDocumentCodeAction (CodeActionParams Nothing Nothing doc range ctx)
case rsp ^. result of
Right (List xs) -> return xs
Left error -> throw (UnexpectedResponseError (SomeLspId $ fromJust $ rsp ^. LSP.id) error)
-- | Returns all the code actions in a document by
-- querying the code actions at each of the current
-- diagnostics' positions.
getAllCodeActions :: TextDocumentIdentifier -> Session [Command |? CodeAction]
getAllCodeActions doc = do
ctx <- getCodeActionContext doc
foldM (go ctx) [] =<< getCurrentDiagnostics doc
where
go :: CodeActionContext -> [Command |? CodeAction] -> Diagnostic -> Session [Command |? CodeAction]
go ctx acc diag = do
ResponseMessage _ rspLid res <- request STextDocumentCodeAction (CodeActionParams Nothing Nothing doc (diag ^. range) ctx)
case res of
Left e -> throw (UnexpectedResponseError (SomeLspId $ fromJust rspLid) e)
Right (List cmdOrCAs) -> pure (acc ++ cmdOrCAs)
getCodeActionContextInRange :: TextDocumentIdentifier -> Range -> Session CodeActionContext
getCodeActionContextInRange doc caRange = do
curDiags <- getCurrentDiagnostics doc
let diags = [ d | d@Diagnostic{_range=range} <- curDiags
, overlappingRange caRange range
]
return $ CodeActionContext (List diags) Nothing
where
overlappingRange :: Range -> Range -> Bool
overlappingRange (Range s e) range =
positionInRange s range
|| positionInRange e range
positionInRange :: Position -> Range -> Bool
positionInRange (Position pl po) (Range (Position sl so) (Position el eo)) =
pl > sl && pl < el
|| pl == sl && pl == el && po >= so && po <= eo
|| pl == sl && po >= so
|| pl == el && po <= eo
getCodeActionContext :: TextDocumentIdentifier -> Session CodeActionContext
getCodeActionContext doc = do
curDiags <- getCurrentDiagnostics doc
return $ CodeActionContext (List curDiags) Nothing
-- | Returns the current diagnostics that have been sent to the client.
-- Note that this does not wait for more to come in.
getCurrentDiagnostics :: TextDocumentIdentifier -> Session [Diagnostic]
getCurrentDiagnostics doc = fromMaybe [] . Map.lookup (toNormalizedUri $ doc ^. uri) . curDiagnostics <$> get
-- | Returns the tokens of all progress sessions that have started but not yet ended.
getIncompleteProgressSessions :: Session (Set.Set ProgressToken)
getIncompleteProgressSessions = curProgressSessions <$> get
-- | Executes a command.
executeCommand :: Command -> Session ()
executeCommand cmd = do
let args = decode $ encode $ fromJust $ cmd ^. arguments
execParams = ExecuteCommandParams Nothing (cmd ^. command) args
void $ sendRequest SWorkspaceExecuteCommand execParams
-- | Executes a code action.
-- Matching with the specification, if a code action
-- contains both an edit and a command, the edit will
-- be applied first.
executeCodeAction :: CodeAction -> Session ()
executeCodeAction action = do
maybe (return ()) handleEdit $ action ^. edit
maybe (return ()) executeCommand $ action ^. command
where handleEdit :: WorkspaceEdit -> Session ()
handleEdit e =
-- Its ok to pass in dummy parameters here as they aren't used
let req = RequestMessage "" (IdInt 0) SWorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing e)
in updateState (FromServerMess SWorkspaceApplyEdit req)
-- | Adds the current version to the document, as tracked by the session.
getVersionedDoc :: TextDocumentIdentifier -> Session VersionedTextDocumentIdentifier
getVersionedDoc (TextDocumentIdentifier uri) = do
fs <- vfsMap . vfs <$> get
let ver =
case fs Map.!? toNormalizedUri uri of
Just vf -> Just (virtualFileVersion vf)
_ -> Nothing
return (VersionedTextDocumentIdentifier uri ver)
-- | Applys an edit to the document and returns the updated document version.
applyEdit :: TextDocumentIdentifier -> TextEdit -> Session VersionedTextDocumentIdentifier
applyEdit doc edit = do
verDoc <- getVersionedDoc doc
caps <- asks sessionCapabilities
let supportsDocChanges = fromMaybe False $ do
let mWorkspace = caps ^. LSP.workspace
C.WorkspaceClientCapabilities _ mEdit _ _ _ _ _ _ _ <- mWorkspace
C.WorkspaceEditClientCapabilities mDocChanges _ _ _ _ <- mEdit
mDocChanges
let wEdit = if supportsDocChanges
then
let docEdit = TextDocumentEdit verDoc (List [InL edit])
in WorkspaceEdit Nothing (Just (List [InL docEdit])) Nothing
else
let changes = HashMap.singleton (doc ^. uri) (List [edit])
in WorkspaceEdit (Just changes) Nothing Nothing
let req = RequestMessage "" (IdInt 0) SWorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing wEdit)
updateState (FromServerMess SWorkspaceApplyEdit req)
-- version may have changed
getVersionedDoc doc
-- | Returns the completions for the position in the document.
getCompletions :: TextDocumentIdentifier -> Position -> Session [CompletionItem]
getCompletions doc pos = do
rsp <- request STextDocumentCompletion (CompletionParams doc pos Nothing Nothing Nothing)
case getResponseResult rsp of
InL (List items) -> return items
InR (CompletionList _ (List items)) -> return items
-- | Returns the references for the position in the document.
getReferences :: TextDocumentIdentifier -- ^ The document to lookup in.
-> Position -- ^ The position to lookup.
-> Bool -- ^ Whether to include declarations as references.
-> Session (List Location) -- ^ The locations of the references.
getReferences doc pos inclDecl =
let ctx = ReferenceContext inclDecl
params = ReferenceParams doc pos Nothing Nothing ctx
in getResponseResult <$> request STextDocumentReferences params
-- | Returns the declarations(s) for the term at the specified position.
getDeclarations :: TextDocumentIdentifier -- ^ The document the term is in.
-> Position -- ^ The position the term is at.
-> Session ([Location] |? [LocationLink])
getDeclarations = getDeclarationyRequest STextDocumentDeclaration DeclarationParams
-- | Returns the definition(s) for the term at the specified position.
getDefinitions :: TextDocumentIdentifier -- ^ The document the term is in.
-> Position -- ^ The position the term is at.
-> Session ([Location] |? [LocationLink])
getDefinitions = getDeclarationyRequest STextDocumentDefinition DefinitionParams
-- | Returns the type definition(s) for the term at the specified position.
getTypeDefinitions :: TextDocumentIdentifier -- ^ The document the term is in.
-> Position -- ^ The position the term is at.
-> Session ([Location] |? [LocationLink])
getTypeDefinitions = getDeclarationyRequest STextDocumentTypeDefinition TypeDefinitionParams
-- | Returns the type definition(s) for the term at the specified position.
getImplementations :: TextDocumentIdentifier -- ^ The document the term is in.
-> Position -- ^ The position the term is at.
-> Session ([Location] |? [LocationLink])
getImplementations = getDeclarationyRequest STextDocumentImplementation ImplementationParams
getDeclarationyRequest :: (ResponseResult m ~ (Location |? (List Location |? List LocationLink)))
=> SClientMethod m
-> (TextDocumentIdentifier
-> Position
-> Maybe ProgressToken
-> Maybe ProgressToken
-> MessageParams m)
-> TextDocumentIdentifier
-> Position
-> Session ([Location] |? [LocationLink])
getDeclarationyRequest method paramCons doc pos = do
let params = paramCons doc pos Nothing Nothing
rsp <- request method params
case getResponseResult rsp of
InL loc -> pure (InL [loc])
InR (InL (List locs)) -> pure (InL locs)
InR (InR (List locLinks)) -> pure (InR locLinks)
-- | Renames the term at the specified position.
rename :: TextDocumentIdentifier -> Position -> String -> Session ()
rename doc pos newName = do
let params = RenameParams doc pos Nothing (T.pack newName)
rsp <- request STextDocumentRename params
let wEdit = getResponseResult rsp
req = RequestMessage "" (IdInt 0) SWorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing wEdit)
updateState (FromServerMess SWorkspaceApplyEdit req)
-- | Returns the hover information at the specified position.
getHover :: TextDocumentIdentifier -> Position -> Session (Maybe Hover)
getHover doc pos =
let params = HoverParams doc pos Nothing
in getResponseResult <$> request STextDocumentHover params
-- | Returns the highlighted occurences of the term at the specified position
getHighlights :: TextDocumentIdentifier -> Position -> Session (List DocumentHighlight)
getHighlights doc pos =
let params = DocumentHighlightParams doc pos Nothing Nothing
in getResponseResult <$> request STextDocumentDocumentHighlight params
-- | Checks the response for errors and throws an exception if needed.
-- Returns the result if successful.
getResponseResult :: ResponseMessage m -> ResponseResult m
getResponseResult rsp =
case rsp ^. result of
Right x -> x
Left err -> throw $ UnexpectedResponseError (SomeLspId $ fromJust $ rsp ^. LSP.id) err
-- | Applies formatting to the specified document.
formatDoc :: TextDocumentIdentifier -> FormattingOptions -> Session ()
formatDoc doc opts = do
let params = DocumentFormattingParams Nothing doc opts
edits <- getResponseResult <$> request STextDocumentFormatting params
applyTextEdits doc edits
-- | Applies formatting to the specified range in a document.
formatRange :: TextDocumentIdentifier -> FormattingOptions -> Range -> Session ()
formatRange doc opts range = do
let params = DocumentRangeFormattingParams Nothing doc range opts
edits <- getResponseResult <$> request STextDocumentRangeFormatting params
applyTextEdits doc edits
applyTextEdits :: TextDocumentIdentifier -> List TextEdit -> Session ()
applyTextEdits doc edits =
let wEdit = WorkspaceEdit (Just (HashMap.singleton (doc ^. uri) edits)) Nothing Nothing
-- Send a dummy message to updateState so it can do bookkeeping
req = RequestMessage "" (IdInt 0) SWorkspaceApplyEdit (ApplyWorkspaceEditParams Nothing wEdit)
in updateState (FromServerMess SWorkspaceApplyEdit req)
-- | Returns the code lenses for the specified document.
getCodeLenses :: TextDocumentIdentifier -> Session [CodeLens]
getCodeLenses tId = do
rsp <- request STextDocumentCodeLens (CodeLensParams Nothing Nothing tId)
case getResponseResult rsp of
List res -> pure res
-- | Pass a param and return the response from `prepareCallHierarchy`
prepareCallHierarchy :: CallHierarchyPrepareParams -> Session [CallHierarchyItem]
prepareCallHierarchy = resolveRequestWithListResp STextDocumentPrepareCallHierarchy
incomingCalls :: CallHierarchyIncomingCallsParams -> Session [CallHierarchyIncomingCall]
incomingCalls = resolveRequestWithListResp SCallHierarchyIncomingCalls
outgoingCalls :: CallHierarchyOutgoingCallsParams -> Session [CallHierarchyOutgoingCall]
outgoingCalls = resolveRequestWithListResp SCallHierarchyOutgoingCalls
-- | Send a request and receive a response with list.
resolveRequestWithListResp :: (ResponseResult m ~ Maybe (List a))
=> SClientMethod m -> MessageParams m -> Session [a]
resolveRequestWithListResp method params = do
rsp <- request method params
case getResponseResult rsp of
Nothing -> pure []
Just (List x) -> pure x
-- | Pass a param and return the response from `prepareCallHierarchy`
getSemanticTokens :: TextDocumentIdentifier -> Session (Maybe SemanticTokens)
getSemanticTokens doc = do
let params = SemanticTokensParams Nothing Nothing doc
rsp <- request STextDocumentSemanticTokensFull params
pure $ getResponseResult rsp
-- | Returns a list of capabilities that the server has requested to /dynamically/
-- register during the 'Session'.
--
-- @since 0.11.0.0
getRegisteredCapabilities :: Session [SomeRegistration]
getRegisteredCapabilities = Map.elems . curDynCaps <$> get
| wz1000/haskell-lsp | lsp-test/src/Language/LSP/Test.hs | mit | 32,407 | 0 | 25 | 6,969 | 6,736 | 3,401 | 3,335 | -1 | -1 |
module CFDI.Types.RelationshipType where
import CFDI.Chainable
import CFDI.Types.Type
import Data.Text (pack)
data RelationshipType
= CreditNote
| DebitNote
| ReturnedGoods
| Substitution
| PrevInvoicedTransfer
| PrevTransferedInvoice
| AdvanceApplication
| PartialPaymentInvoice
| DeferredPaymentInvoice
deriving (Bounded, Enum, Eq)
instance Chainable RelationshipType where
chain = pack . render
instance Show RelationshipType where
show CreditNote = "Nota de crédito de los documentos relacionados"
show DebitNote = "Nota de débito de los documentos relacionados"
show ReturnedGoods = "Devolución de mercancía sobre facturas o \
\traslados previos"
show Substitution = "Sustitución de los CFDI previos"
show PrevInvoicedTransfer = "Traslados de mercancias facturados previamente"
show PrevTransferedInvoice = "Factura generada por los traslados previos"
show AdvanceApplication = "CFDI por aplicación de anticipo"
show PartialPaymentInvoice = "Factura generada por pagos en parcialidades"
show DeferredPaymentInvoice = "Factura generada por pagos diferidos"
instance Type RelationshipType where
parseExpr "01" = Right CreditNote
parseExpr "02" = Right DebitNote
parseExpr "03" = Right ReturnedGoods
parseExpr "04" = Right Substitution
parseExpr "05" = Right PrevInvoicedTransfer
parseExpr "06" = Right PrevTransferedInvoice
parseExpr "07" = Right AdvanceApplication
parseExpr "08" = Right PartialPaymentInvoice
parseExpr "09" = Right DeferredPaymentInvoice
parseExpr _ = Left NotInCatalog
render CreditNote = "01"
render DebitNote = "02"
render ReturnedGoods = "03"
render Substitution = "04"
render PrevInvoicedTransfer = "05"
render PrevTransferedInvoice = "06"
render AdvanceApplication = "07"
render PartialPaymentInvoice = "08"
render DeferredPaymentInvoice = "09"
| yusent/cfdis | src/CFDI/Types/RelationshipType.hs | mit | 2,008 | 0 | 6 | 448 | 361 | 185 | 176 | 47 | 0 |
module Sudoku where
import Test.QuickCheck
import Test.Hspec
import Data.Array hiding (bounds)
import Data.Char (isDigit)
import Data.Maybe
import Data.List
import qualified Data.Set as S
import Control.Monad
import Data.List.Split
data Cell = Unknown [Int]
| Known Int
deriving (Eq,Ord)
instance Show Cell where
show (Known n) = show n
show (Unknown _) = "_"
knownValue :: Cell -> Maybe Int
knownValue (Known n) = Just n
knownValue _ = Nothing
type RawGrid = Array (Int,Int) Cell
data Grid = Grid RawGrid deriving (Eq,Ord)
instance Show Grid where
show = display
cells :: Grid -> [((Int,Int),Cell)]
cells (Grid g) = assocs g
cellStates :: Grid -> [Cell]
cellStates g = map snd (cells g)
bounds :: ((Int,Int),(Int,Int))
bounds = ((0,0),(8,8))
buildRawGrid :: String -> RawGrid
buildRawGrid s = listArray bounds (map toCell s)
buildGridFromRawGrid :: RawGrid -> Maybe Grid
buildGridFromRawGrid r
| isValid g = Just (Grid g)
| otherwise = Nothing
where
g = applyConstraints r
buildGrid :: String -> Maybe Grid
buildGrid = buildGridFromRawGrid . buildRawGrid
display :: Grid -> String
display g = unlines $ chunksOf 9 $ concatMap show (cellStates g)
toCell :: Char -> Cell
toCell c
| isDigit c = Known (read [c])
| otherwise = Unknown [1..9]
row :: RawGrid -> Int -> [Cell]
row g r = map snd (filter (\((x,y),e) -> x == r) (assocs g))
col :: RawGrid -> Int -> [Cell]
col g c = map snd (filter (\((x,y),e) -> y == c) (assocs g))
subgrid :: RawGrid -> (Int,Int) -> [Cell]
subgrid g (r,c) = map snd (filter (isInSubGrid (r,c)) (assocs g))
isInSubGrid :: (Int,Int) -> ((Int,Int),a) -> Bool
isInSubGrid (r,c) ((x,y),_) = r `div` 3 == x `div` 3 &&
c `div` 3 == y `div` 3
eliminatePossibilities :: RawGrid -> (Int,Int) -> Cell
eliminatePossibilities g p = eliminatePossibilities' (g ! p) (known $ surroundingCells g p)
eliminatePossibilities' :: Cell -> [Int] -> Cell
eliminatePossibilities' (Known x) _ = Known x
eliminatePossibilities' (Unknown ys) xs
| length rest == 1 = Known (head rest)
| otherwise = Unknown rest
where
rest = ys \\ xs
applyConstraints :: RawGrid -> RawGrid
applyConstraints g
| next == g = g
| otherwise = applyConstraints next
where
next = array bounds (map (\(x,e) -> (x,eliminatePossibilities g x)) (assocs g))
known :: [Cell] -> [Int]
known = mapMaybe knownValue
isSolvedGrid :: Grid -> Bool
isSolvedGrid (Grid r) = isSolved r
isSolved :: RawGrid -> Bool
isSolved g = all isKnown (map snd (assocs g))
where
isKnown (Known _) = True
isKnown _ = False
isValid :: RawGrid -> Bool
isValid raw = (isSolved raw || all choiceRemains (map snd (assocs raw))) &&
all uniq vals
where
choiceRemains (Known _) = True
choiceRemains (Unknown xs) = (not . null) xs
uniq xs = length xs == length (nub xs)
vals = map (known . row raw) [0..8] ++
map (known . col raw) [0..8] ++
map (known . subgrid raw) [(0,0),(3,3),(6,6)]
surroundingCells :: RawGrid -> (Int,Int) -> [Cell]
surroundingCells g p@(r,c) = d : ((row g r \\ [d]) ++
(col g c \\ [d]) ++
(subgrid g p \\ [d]))
where
d = g ! p
choices :: Cell -> [Cell]
choices k@(Known n) = [k]
choices (Unknown ns)= map Known ns
data GridNode = GridNode Grid [GridNode] deriving (Show,Eq)
search :: GridNode -> (Grid -> Bool) -> Maybe Grid
search = search' S.empty
search' :: S.Set Grid -> GridNode -> (Grid -> Bool) -> Maybe Grid
search' seen (GridNode x rest) f
| x `S.member` seen = Nothing
| f x = Just x
| null rest = Nothing
| otherwise = listToMaybe $ mapMaybe (\z -> search' seen' z f) rest
where
seen' = S.insert x seen
buildGraph :: Grid -> GridNode
buildGraph g@(Grid r)
| isSolvedGrid g = GridNode g []
| otherwise = GridNode g (map buildGraph validChildren)
where
nextGrids = mapMaybe buildGridFromRawGrid (possibleNextSteps r)
validChildren = delete g $ nextGrids
possibleNextSteps :: RawGrid -> [RawGrid]
possibleNextSteps g = concatMap (uncurry updateGrid) (assocs g)
where
updateGrid p c = map (\c' -> g // [(p,c')]) (choices c)
solve :: String -> Maybe Grid
solve s = graph >>= st
where
graph = fmap buildGraph (buildGrid s)
st = flip search isSolvedGrid
veryEasy :: String
veryEasy = "6185___2_" ++
"_5__17_63" ++
"__326__95" ++
"8_1_5_74_" ++
"5__6_13_9" ++
"_9782_5__" ++
"286_45___" ++
"1____26_4" ++
"_4_1_6258"
veryEasySolution :: String
veryEasySolution = "618593427\n" ++
"952417863\n" ++
"473268195\n" ++
"861359742\n" ++
"524671389\n" ++
"397824516\n" ++
"286745931\n" ++
"135982674\n" ++
"749136258\n"
-- Hardest problem
-- http://www.telegraph.co.uk/science/science-news/9359579/Worlds-hardest-sudoku-can-you-crack-it.html
veryHard :: String
veryHard = "8________" ++
"__36_____" ++
"_7__9_2__" ++
"_5___7___" ++
"____457__" ++
"___1___3_" ++
"__1____68" ++
"__85___1_" ++
"_9____4__"
veryHardSolution :: String
veryHardSolution = "812753649\n" ++
"943682175\n" ++
"675491283\n" ++
"154237896\n" ++
"369845721\n" ++
"287169534\n" ++
"521974368\n" ++
"438526917\n" ++
"796318452\n"
invalidGrid :: String
invalidGrid = "6185___2_" ++
"_5__17_63" ++
"__326__95" ++
"8_1_5_74_" ++
"5__6913_9" ++ -- the middle 9 is invalid
"_9782_5__" ++
"286_45___" ++
"1____26_4" ++
"_4_1_6258"
main = hspec $ do
describe "Sudoku" $ do
it "row 0" $ do
known (row (buildRawGrid veryEasy) 0) `shouldBe` [6,1,8,5,2]
it "col 1" $ do
known (col (buildRawGrid veryEasy) 1) `shouldBe` [1,5,9,8,4]
it "subgrid 7,7" $ do
known (subgrid (buildRawGrid veryEasy) (7,7)) `shouldBe` [6,4,2,5,8]
it "subgrid 1,1" $ do
known (subgrid (buildRawGrid invalidGrid) (1,1)) `shouldBe` [6,1,8,5,3]
it "eliminates possibilities" $ do
eliminatePossibilities (buildRawGrid veryEasy) (0,8) `shouldBe` (Known 7)
it "a grid is solved if all of the cells are known" $ do
isSolved (buildRawGrid veryEasy) `shouldBe` False
it "solve simple examples" $ do
maybe "" display (solve veryEasy) `shouldBe` veryEasySolution
it "solve an example that requires back-tracking" $ do
maybe "" display (solve veryHard) `shouldBe` veryHardSolution
it "will guess a constrained cell" $ do
choices (Unknown [1..9]) `shouldBe` map Known [1..9]
it "has a predicate to determine invalid state" $ do
buildGrid invalidGrid `shouldBe` Nothing
it "builds a graph for an easy solution" $ do
liftM buildGraph (buildGrid veryEasy) `shouldBe` liftM buildGraph (buildGrid veryEasy)
| fffej/codekatas | sudoku/Sudoku.hs | mit | 7,323 | 0 | 19 | 2,133 | 2,678 | 1,399 | 1,279 | -1 | -1 |
module Q003 where
{- Name: Largest Prime Factor
Purpose: The primes factors of 13195 are 5, 7, 13 and 29.
What is the largest prime factor of the number 600851475143?
Answer: 6857
Author: Alex Adusei
-}
num = 600851475143
-- Function that takes value to determine smallest prime factor
start :: Integer -> Integer
start n = largestPrimeFactor n (intRoot n)
-- finds smallest factor of n >= f
largestPrimeFactor :: Integer -> Integer -> Integer
largestPrimeFactor n f
| (mod n f == 0) && isPrime f = f
| otherwise = largestPrimeFactor n (f-1)
-- Auxiliary function checking if a number is prime
isPrime :: Integer -> Bool
isPrime n = null [fact | fact <- [2..(intRoot n)], mod n fact == 0]
intRoot :: Integer -> Integer
intRoot n = floor (sqrt (fromIntegral n)) | alexadusei/ProjectEuler | q003.hs | mit | 785 | 0 | 11 | 161 | 197 | 101 | 96 | 12 | 1 |
module Main where
import Language.Jass.Parser.GrammarTest
import Language.Jass.Semantic.CheckTest
import Language.Jass.Codegen.GeneratorTest
import Language.Jass.Codegen.MultiFileTest
import Test.Tasty
--import Test.Tasty.QuickCheck as QC
--import Test.Tasty.HUnit
main :: IO()
main = defaultMain tests
tests :: TestTree
tests = testGroup "Tests" [
testGroup "Parsing" [syntaxTests, simpleParsing, commonParsing],
testGroup "Semantic checks" [commonSemanticTest],
testGroup "Code generation" [simpleCodegenTest],
testGroup "Integration tests" [multifileTests]
] | NCrashed/hjass | test-suites/all-tests.hs | mit | 575 | 0 | 8 | 65 | 122 | 73 | 49 | 14 | 1 |
module Format (format) where
import Data.List
import FactorTable
import Types
format :: FactorTable -> String
format table = let result = intercalate ", " $ map formatPair (toList table)
in "{" ++ result ++ "}"
formatPair :: (Int, [Int]) -> String
formatPair (num,nums) = show num
++ ": ["
++ (intercalate ", " $ map show nums)
++ "]"
| mg50avant/factorizer | src/Format.hs | mit | 432 | 0 | 12 | 156 | 136 | 72 | 64 | 12 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Modes
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- Definitions for the bulk of modes shipped with Yi.
module Yi.Modes (TokenBasedMode, fundamentalMode,
cMode, objectiveCMode, cppMode, cabalMode,
srmcMode, ocamlMode, ottMode, gnuMakeMode,
perlMode, pythonMode, javaMode, jsonMode, anyExtension,
extensionOrContentsMatch, linearSyntaxMode,
svnCommitMode, hookModes, applyModeHooks,
lookupMode, whitespaceMode,
gitCommitMode, rubyMode, styleMode
) where
import Control.Applicative
import Control.Lens
import Data.List (isPrefixOf)
import Data.Maybe
import Data.Text ()
import System.FilePath
import "regex-tdfa" Text.Regex.TDFA ((=~))
import Yi.Buffer
import qualified Yi.IncrementalParse as IncrParser
import Yi.Keymap
import Yi.Lexer.Alex
import qualified Yi.Lexer.C as C
import qualified Yi.Lexer.Cabal as Cabal
import qualified Yi.Lexer.Cplusplus as Cplusplus
import qualified Yi.Lexer.GNUMake as GNUMake
import qualified Yi.Lexer.GitCommit as GitCommit
import qualified Yi.Lexer.JSON as JSON
import qualified Yi.Lexer.Java as Java
import qualified Yi.Lexer.OCaml as OCaml
import qualified Yi.Lexer.ObjectiveC as ObjectiveC
import qualified Yi.Lexer.Ott as Ott
import qualified Yi.Lexer.Perl as Perl
import qualified Yi.Lexer.Python as Python
import qualified Yi.Lexer.Ruby as Ruby
import qualified Yi.Lexer.SVNCommit as SVNCommit
import qualified Yi.Lexer.Srmc as Srmc
import qualified Yi.Lexer.Whitespace as Whitespace
import Yi.MiniBuffer
import qualified Yi.Rope as R
import Yi.Style
import Yi.Syntax hiding (mkHighlighter)
import Yi.Syntax.Driver (mkHighlighter)
import Yi.Syntax.OnlineTree (manyToks, Tree)
import Yi.Syntax.Tree
import Yi.Search (makeSimpleSearch)
type TokenBasedMode tok = Mode (Tree (Tok tok))
type StyleBasedMode = TokenBasedMode StyleName
fundamentalMode :: Mode syntax
fundamentalMode = emptyMode
{ modeName = "fundamental"
, modeApplies = modeAlwaysApplies
, modeIndent = const autoIndentB
, modePrettify = const fillParagraph
, modeGotoDeclaration = do
currentPoint <- pointB
currentWord <- readCurrentWordB
currentWordBeginningPoint <- regionStart <$> regionOfB unitWord
_ <- gotoLn 0
word <- return $ makeSimpleSearch currentWord
searchResults <- regexB Forward word
case searchResults of
(declarationRegion : _) -> do
searchPoint <- return $ regionStart declarationRegion
if currentWordBeginningPoint /= searchPoint
then moveTo searchPoint
else moveTo currentPoint
[] -> moveTo currentPoint
}
-- | Creates a 'TokenBasedMode' from a 'Lexer' and a function that
-- turns tokens into 'StyleName'.
linearSyntaxMode' :: Show (l s)
=> Lexer l s (Tok t) i
-> (t -> StyleName)
-> TokenBasedMode t
linearSyntaxMode' scanToken tts = fundamentalMode
& modeHLA .~ ExtHL (mkHighlighter $ IncrParser.scanner manyToks . lexer)
& modeGetStrokesA .~ tokenBasedStrokes tokenToStroke
where
tokenToStroke = fmap tts . tokToSpan
lexer = lexScanner scanToken
-- | Specialised version of 'linearSyntaxMode'' for the common case,
-- wrapping up into a 'Lexer' with 'commonLexer'.
linearSyntaxMode :: Show s => s -- ^ Starting state
-> TokenLexer AlexState s (Tok t) AlexInput
-> (t -> StyleName)
-> TokenBasedMode t
linearSyntaxMode initSt scanToken =
linearSyntaxMode' (commonLexer scanToken initSt)
styleMode :: Show (l s) => StyleLexer l s t i
-> TokenBasedMode t
styleMode l = linearSyntaxMode' (l ^. styleLexer) (l ^. tokenToStyle)
cMode :: StyleBasedMode
cMode = styleMode C.lexer
& modeNameA .~ "c"
& modeAppliesA .~ anyExtension [ "c", "h" ]
objectiveCMode :: StyleBasedMode
objectiveCMode = styleMode ObjectiveC.lexer
& modeNameA .~ "objective-c"
& modeAppliesA .~ anyExtension [ "m", "mm" ]
cppMode :: StyleBasedMode
cppMode = styleMode Cplusplus.lexer
& modeAppliesA .~ anyExtension [ "cxx", "cpp", "hxx" ]
& modeNameA .~ "c++"
cabalMode :: StyleBasedMode
cabalMode = styleMode Cabal.lexer
& modeNameA .~ "cabal"
& modeAppliesA .~ anyExtension [ "cabal" ]
& modeToggleCommentSelectionA .~ Just (toggleCommentB "--")
srmcMode :: StyleBasedMode
srmcMode = styleMode Srmc.lexer
& modeNameA .~ "srmc"
& modeAppliesA .~ anyExtension [ "pepa", "srmc" ] -- pepa is a subset of srmc
gitCommitMode :: TokenBasedMode GitCommit.Token
gitCommitMode = styleMode GitCommit.lexer
& modeNameA .~ "git-commit"
& modeAppliesA .~ isCommit
where
isCommit p _ = case (takeFileName p, takeFileName $ takeDirectory p) of
("COMMIT_EDITMSG", ".git") -> True
_ -> False
svnCommitMode :: StyleBasedMode
svnCommitMode = styleMode SVNCommit.lexer
& modeNameA .~ "svn-commit"
& modeAppliesA .~ isCommit
where
isCommit p _ = "svn-commit" `isPrefixOf` p && extensionMatches ["tmp"] p
ocamlMode :: TokenBasedMode OCaml.Token
ocamlMode = styleMode OCaml.lexer
& modeNameA .~ "ocaml"
& modeAppliesA .~ anyExtension [ "ml", "mli", "mly" , "mll", "ml4", "mlp4" ]
perlMode :: StyleBasedMode
perlMode = styleMode Perl.lexer
& modeNameA .~ "perl"
& modeAppliesA .~ anyExtension [ "t", "pl", "pm" ]
rubyMode :: StyleBasedMode
rubyMode = styleMode Ruby.lexer
& modeNameA .~ "ruby"
& modeAppliesA .~ anyExtension [ "rb", "ru" ]
pythonMode :: StyleBasedMode
pythonMode = base
& modeNameA .~ "python"
& modeAppliesA .~ anyExtension [ "py" ]
& modeToggleCommentSelectionA .~ Just (toggleCommentB "#")
& modeIndentSettingsA %~ (\x -> x { expandTabs = True, tabSize = 4 })
where
base = styleMode Python.lexer
javaMode :: StyleBasedMode
javaMode = styleMode Java.lexer
& modeNameA .~ "java"
& modeAppliesA .~ anyExtension [ "java" ]
jsonMode :: StyleBasedMode
jsonMode = styleMode JSON.lexer
& modeNameA .~ "json"
& modeAppliesA .~ anyExtension [ "json" ]
gnuMakeMode :: StyleBasedMode
gnuMakeMode = styleMode GNUMake.lexer
& modeNameA .~ "Makefile"
& modeAppliesA .~ isMakefile
& modeIndentSettingsA %~ (\x -> x { expandTabs = False, shiftWidth = 8 })
where
isMakefile :: FilePath -> a -> Bool
isMakefile path _contents = matches $ takeFileName path
where matches "Makefile" = True
matches "makefile" = True
matches "GNUmakefile" = True
matches filename = extensionMatches [ "mk" ] filename
ottMode :: StyleBasedMode
ottMode = styleMode Ott.lexer
& modeNameA .~ "ott"
& modeAppliesA .~ anyExtension [ "ott" ]
whitespaceMode :: StyleBasedMode
whitespaceMode = styleMode Whitespace.lexer
& modeNameA .~ "whitespace"
& modeAppliesA .~ anyExtension [ "ws" ]
& modeIndentA .~ (\_ _ -> insertB '\t')
-- | Determines if the file's extension is one of the extensions in the list.
extensionMatches :: [String]
-> FilePath
-> Bool
extensionMatches extensions fileName = extension `elem` extensions'
where extension = takeExtension fileName
extensions' = ['.' : ext | ext <- extensions]
-- | When applied to an extensions list, creates a 'Mode.modeApplies' function.
anyExtension :: [String] -- ^ List of extensions
-> FilePath -- ^ Path to compare against
-> a -- ^ File contents. Currently unused but see
-- 'extensionOrContentsMatch'.
-> Bool
anyExtension extensions fileName _contents
= extensionMatches extensions fileName
-- | When applied to an extensions list and regular expression pattern, creates
-- a 'Mode.modeApplies' function.
extensionOrContentsMatch :: [String] -> String -> FilePath -> R.YiString -> Bool
extensionOrContentsMatch extensions pattern fileName contents
= extensionMatches extensions fileName || contentsMatch
where contentsMatch = R.toString contents =~ pattern :: Bool
-- | Adds a hook to all matching hooks in a list
hookModes :: (AnyMode -> Bool) -> BufferM () -> [AnyMode] -> [AnyMode]
hookModes p h = map $ \am@(AnyMode m) ->
if p am then AnyMode (m & modeOnLoadA %~ (>> h)) else am
-- | Apply a list of mode hooks to a list of AnyModes
applyModeHooks :: [(AnyMode -> Bool, BufferM ())] -> [AnyMode] -> [AnyMode]
applyModeHooks hs ms = flip map ms $ \am -> case filter (($ am) . fst) hs of
[] -> am
ls -> onMode (modeOnLoadA %~ \x -> foldr ((>>) . snd) x ls) am
-- | Check whether a mode of the same name is already in modeTable and
-- returns the original mode, if it isn't the case.
lookupMode :: AnyMode -> YiM AnyMode
lookupMode am@(AnyMode m) = fromMaybe am <$> anyModeByNameM (modeName m)
| atsukotakahashi/wi | src/library/Yi/Modes.hs | gpl-2.0 | 9,187 | 0 | 16 | 2,147 | 2,197 | 1,218 | 979 | 191 | 4 |
module Cashlog.Data.Completion
( articleCompletion
, categoryCompletion
, shopCompletion
, voucherCompletion
, mapArticleCompletionToKey
, mapArticleKeyToCompletion
, mapCategoryCompletionToKey
, mapCategoryKeyToCompletion
, mapShopCompletionToKey
, mapShopKeyToCompletion
, mapVoucherCompletionToKey
, mapVoucherKeyToCompletion
, isArticleKey
, isCategoryKey
, isShopKey
, isVoucherKey
) where
import qualified Database.HDBC as DB
import qualified System.Console.Haskeline.Completion as HKLC
import Data.List
import Cashlog.Data.Connection
import Cashlog.Data.Utility
isPrimaryKeyValid :: DataHandle
-> String
-> Int
-> IO Bool
isPrimaryKeyValid handle table key = do
result <- DB.quickQuery' handle
( "SELECT id \
\FROM " ++ table ++ " \
\WHERE id = ?" )
[DB.toSql key]
return $ not $ null result
isArticleKey handle = isPrimaryKeyValid handle "article"
isCategoryKey handle = isPrimaryKeyValid handle "category"
isShopKey handle = isPrimaryKeyValid handle "shop"
isVoucherKey handle = isPrimaryKeyValid handle "voucher"
completeArticle :: DataHandle
-> String
-> IO [String]
completeArticle handle word = do
result <- DB.quickQuery' handle
( "SELECT name \
\FROM article \
\WHERE name \
\LIKE '" ++ word ++"%'" )
[]
return $ map (\(n:[]) -> DB.fromSql n) result
mapArticleCompletionToKey :: DataHandle
-> String
-> IO (Maybe Int)
mapArticleCompletionToKey handle comp = do
result <- DB.quickQuery' handle
"SELECT id FROM article WHERE name = ?"
params
return $ justTopLeft result
where params = [ DB.toSql comp ]
mapArticleKeyToCompletion :: DataHandle
-> Int
-> IO (Maybe String)
mapArticleKeyToCompletion handle comp = do
result <- DB.quickQuery' handle
"SELECT id FROM article WHERE name = ?"
params
return $ justTopLeft result
where params = [ DB.toSql comp ]
completeCategory :: DataHandle
-> String
-> IO [String]
completeCategory handle word = do
result <- DB.quickQuery' handle
( "SELECT name \
\FROM category \
\WHERE name \
\LIKE '" ++ word ++ "%'" )
[]
return $ map (\(n:[]) -> DB.fromSql n) result
mapCategoryCompletionToKey :: DataHandle
-> String
-> IO (Maybe Int)
mapCategoryCompletionToKey handle comp = do
result <- DB.quickQuery' handle
"SELECT id \
\FROM category \
\WHERE name = ?"
params
return $ justTopLeft result
where params = [ DB.toSql comp ]
mapCategoryKeyToCompletion :: DataHandle
-> Int
-> IO (Maybe String)
mapCategoryKeyToCompletion handle key = do
result <- DB.quickQuery' handle
"SELECT name \
\FROM category \
\WHERE id = ?"
params
return $ justTopLeft result
where params = [ DB.toSql key ]
completeShop :: DataHandle
-> String
-> IO [(String)]
completeShop handle nc = do
let (n,c) = unwrapPair nc
result <- DB.quickQuery' handle
( "SELECT name, city \
\FROM shop \
\WHERE name LIKE '" ++ n ++ "%' \
\AND city LIKE '" ++ c ++ "%'" )
[]
return $ map (\(n:c:[]) -> wrapPair (DB.fromSql n) (DB.fromSql c)) result
mapShopCompletionToKey :: DataHandle
-> String
-> IO (Maybe Int)
mapShopCompletionToKey handle comp = do
let (n,c) = unwrapPair comp
(n',c') = (DB.toSql n, DB.toSql c)
result <- DB.quickQuery' handle
"SELECT id \
\FROM shop \
\WHERE name = ? \
\AND city = ?"
[n', c']
return $ justTopLeft result
mapShopKeyToCompletion :: DataHandle
-> Int
-> IO (Maybe String)
mapShopKeyToCompletion handle key = do
result <- DB.quickQuery' handle
"SELECT name, city \
\FROM shop \
\WHERE id = ?"
params
case result of
((name:city:[]):[]) -> return $ Just
$ wrapPair (DB.fromSql name)
(DB.fromSql city)
otherwise -> return Nothing
where params = [ DB.toSql key ]
completeVoucher :: DataHandle
-> String
-> IO [(String)]
completeVoucher handle ts = do
let (t,n) = unwrapPair ts
result <- DB.quickQuery' handle
( "SELECT strftime('%d-%m-%Y-%H:%M:%S',v.timestamp), s.name \
\FROM voucher v, shop s \
\WHERE v.shop_id = s.id \
\AND strftime('%d-%m-%Y-%H:%M:%S',v.timestamp) LIKE '" ++ t ++ "%' \
\AND s.name LIKE '" ++ n ++ "%'" )
[]
return $ map (\(t:n:[]) -> wrapPair (DB.fromSql t) (DB.fromSql n)) result
mapVoucherCompletionToKey :: DataHandle
-> String
-> IO (Maybe Int)
mapVoucherCompletionToKey handle comp = do
let (t,n) = unwrapPair comp
(t',n') = (DB.toSql t, DB.toSql n)
result <- DB.quickQuery' handle
"SELECT v.id \
\FROM voucher v, shop s \
\WHERE v.shop_id = s.id \
\AND strftime('%d-%m-%Y-%H:%M:%S',v.timestamp) = ? \
\AND s.name = ?"
[t', n']
return $ justTopLeft result
mapVoucherKeyToCompletion :: DataHandle
-> Int
-> IO (Maybe String)
mapVoucherKeyToCompletion handle key = do
result <- DB.quickQuery' handle
"SELECT v.timestamp, s.name \
\FROM voucher v, shop s \
\WHERE v.shop_id = s.id \
\AND v.id = ?"
params
case result of
((timestamp:name:[]):[]) -> return $ Just
$ wrapPair (DB.fromSql timestamp)
(DB.fromSql name)
where params = [ DB.toSql key ]
commonPrefix :: [String]
-> String
commonPrefix [] = []
commonPrefix (([]):_) = []
commonPrefix xss@(x:_) = fst $ foldl prefix ([], True) (inits x)
where isCommonPrefix p = and $ map (isPrefixOf p) xss
prefix (o, True) n = case isCommonPrefix n of
True -> (n, True)
_ -> (o, False)
prefix (o, False) _ = (o, False)
simpleWordCompletion :: (String -> IO [String])
-> String
-> IO [HKLC.Completion]
simpleWordCompletion fcomp word = do
compList <- fcomp word
case compList of
[] -> return []
_ -> do let compList' = map (drop $ length word) compList
prefix = commonPrefix compList'
return $ map (makeCompletion prefix) compList
where makeCompletion [] comp = HKLC.Completion word comp False
makeCompletion p comp = HKLC.Completion (word ++ p) comp False
articleCompletion handle = simpleWordCompletion (completeArticle handle)
categoryCompletion handle = simpleWordCompletion (completeCategory handle)
shopCompletion handle = simpleWordCompletion (completeShop handle)
voucherCompletion handle = simpleWordCompletion (completeVoucher handle)
| pads-fhs/Cashlog | src/Cashlog/Data/Completion.hs | gpl-2.0 | 8,709 | 10 | 18 | 3,824 | 1,910 | 962 | 948 | 183 | 3 |
import Graphics.UI.Gtk hiding (Settings)
import Graphics.Rendering.Cairo
import Text.Printf
hueLimits = (0.00, 1.00)
valLimits = (1.00, 0.25)
limitsFrom = (0.00, 12.3)
f02 :: Double -> String
f02 = printf "%.2f"
ptAlong limitsTo limitsFrom pointFrom =
to0 + distTo*((ptFrom - from0)/distFrom)
where
(to0,to1) = limitsTo
distTo = to1 - to0
(from0,from1) = limitsFrom
distFrom = from1 - from0
ptFrom = from0 `max` pointFrom `min` from1
drawCanvas canvas adj2 _evt = do
rankD <- adjustmentGetValue adj2
let sat = 0.40
colorPoint = logBase 2.0 rankD
hue = ptAlong hueLimits limitsFrom colorPoint
val = ptAlong valLimits limitsFrom colorPoint
dw <- widgetGetDrawWindow canvas
renderWithDrawable dw (drawColorLine hue sat val)
return True
paintText x y text = do
fntDscr <- liftIO (fontDescriptionFromString "Sans 8")
layout <- createLayout (text)
liftIO (layoutSetFontDescription layout (Just fntDscr))
moveTo x y
setSourceRGB 0 0 0
showLayout layout
drawColorLine hue sat val = do
let (r,g,b) = hsvToRgb (hue,sat,val)
text = "hue="++(f02 hue)++" sat="++(f02 sat)++" val="++(f02 val)
setSourceRGB r g b
rectangle 40 10 300 3
fill
rectangle 10 10 20 20
fill
setSourceRGB 0 0 0
rectangle 10 10 20 20
stroke
paintText 40 17 text
drawCanvas2 canvas _evt = do
dw <- widgetGetDrawWindow canvas
mapM
(\(y,twoToY) -> renderWithDrawable dw (drawBox y twoToY))
[(y, 2.0**y) | y <- [0.00..12.00]]
return True
drawBox y rankD = do
let colorPoint = logBase 2.0 rankD
sat = 0.40
hue = ptAlong hueLimits limitsFrom colorPoint
val = ptAlong valLimits limitsFrom colorPoint
(r,g,b) = hsvToRgb (hue,sat,val)
setSourceRGB r g b
rectangle 10 (10+y*24) 20 20
fill
setSourceRGB 0 0 0
rectangle 10 (10+y*24) 20 20
stroke
paintText 40 (13+y*24) (show (round (rankD)))
main = do
initGUI
window <- windowNew
onDestroy window mainQuit
vbox <- vBoxNew False 0
set window [
containerChild := vbox ]
canvas <- drawingAreaNew
widgetSetSizeRequest canvas 380 35
boxPackStart vbox canvas PackNatural 0
adj2 <- adjustmentNew 1 1 5001 1 50 1
hsc2 <- hScaleNew adj2
scaleSetDigits hsc2 0
boxPackStart vbox hsc2 PackNatural 0
onExpose canvas (
drawCanvas canvas adj2)
canvas2 <- drawingAreaNew
widgetSetSizeRequest canvas2 380 330
boxPackStart vbox canvas2 PackGrow 0
onExpose canvas2 ( drawCanvas2 canvas2 )
onValueChanged adj2 ( widgetQueueDraw canvas )
widgetShowAll window
mainGUI
| jsavatgy/hatupist | code/resultColorTheme.hs | gpl-2.0 | 2,559 | 0 | 16 | 570 | 1,000 | 481 | 519 | 85 | 1 |
module DailyProgrammer.TestISBN where
import Test.HUnit
import DailyProgrammer.ISBN
testCharValue1 = 10 @=? charValue 'X'
testCharValue2 = 0 @=? charValue '-'
testCharValue3 = 3 @=? charValue '3'
testFormat1 = True @=? isValidFormat "0-7475-3269-9"
testFormat2 = False @=? isValidFormat "0-7475-3269-x"
testFormat3 = True @=? isValidFormat "0-7475-3269-X"
testFormat4 = False @=? isValidFormat "0-74753-269-X"
testFormat5 = False @=? isValidFormat "0-74X5-3269-9"
testChecksum = 242 @=? checksum "0-7475-3269-9"
testIsValid1 = True @=? isValid "0-7475-3269-9"
testIsValid2 = False @=? isValid "0-7475-3269-X"
testIsValid3 = False @=? isValid "0-74753-269-9"
| d-strickland/dailyprogrammer | haskell/test/DailyProgrammer/TestISBN.hs | gpl-3.0 | 669 | 0 | 6 | 90 | 160 | 82 | 78 | 15 | 1 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Jet.Variables where
import Control.Applicative
import Control.Monad.Trans.Reader
import Data.ByteString.Char8 (ByteString, pack)
import qualified Data.ByteString.Lazy.Char8 as C
import Data.Double.Conversion.ByteString (toFixed)
import Data.Map (Map, fromList, maxView)
import Data.Maybe (fromMaybe)
import HEP.Data.LHEF
import Jet.Selection (finalObjs)
import Object.Particles (ParObjs (..))
type JetLevelResult = Map C.ByteString ByteString
calcVar :: Reader EventEntry JetLevelResult
calcVar = do
fobj <- finalObjs
let alljet = (++) <$> jet <*> bjet $ fobj
(nl, nb, ntau) = (,,) <$>
length . isoLep <*> length . bjet <*> length . taujet $
fobj
nj = length . filter (\p -> pt p > 30) $! alljet
met = pt (missingPt fobj)
hT = hTinc fobj
meff = met + hT
mT = fromMaybe (-1) (transMassLep fobj)
pTj1 = if null alljet then -1 else maximum (map pt alljet)
mbl_by_theta = mBL fobj
er_by_theta = eRatioBL fobj
return $ fromList [ ("nl", (pack . show) nl)
, ("nb", (pack . show) nb)
, ("ntau", (pack . show) ntau)
, ("nj", (pack . show) nj)
, ("met", toFixed 2 met)
, ("HT", toFixed 2 hT)
, ("meff", toFixed 2 meff)
, ("mT", toFixed 2 mT)
, ("pTj1", toFixed 2 pTj1)
, ("m_bl_theta", toFixed 2 mbl_by_theta)
, ("er_by_theta", toFixed 3 er_by_theta)
]
hTinc :: ParObjs -> Double
hTinc ParObjs { .. } =
(sum . filter (>30) . map pt) (jet ++ bjet) +
(sum . filter (>20) . map pt) isoLep
transMassLep :: ParObjs -> Maybe Double
transMassLep ParObjs { .. }
| null isoLep = Nothing
| otherwise = Just $ transverseMass [head isoLep] (setXYM kx ky 0)
where kx = let (x, _, _, _, _) = pup missingPt in x
ky = let (_, y, _, _, _) = pup missingPt in y
mBL :: ParObjs -> Double
mBL ParObjs { .. } =
case (maxView . fromList) [(cosTheta b l, invariantMass [b,l]) |
b <- bjet, l <- isoLep] of
Just (mbl, _) -> mbl
Nothing -> -1
eRatioBL :: ParObjs -> Double
eRatioBL ParObjs { .. } =
case (maxView . fromList) [(cosTheta b l,
let (eB, eL) = (energyOf b, energyOf l)
in eL / (eB + eL))
| b <- bjet, l <- isoLep, invariantMass [b,l] < 165] of
Just (er, _) -> er
_ -> -1
| cbpark/GluinoStopPolarization | lib/Jet/Variables.hs | gpl-3.0 | 2,967 | 0 | 17 | 1,198 | 1,000 | 543 | 457 | 65 | 2 |
-- |This module implements algorithms for HTTP user-agents described in
-- section 5 of RFC 6265, \"HTTP State Management Mechanism\"
-- (<http://www.rfc-editor.org/rfc/rfc6265.txt>).
--
-- The algorithms in the RFC make frequent use of the current time. For
-- flexibility, this module does not retrieve the current time from the
-- underlying system; instead, operations which may need the current time
-- take it as a parameter.
module Web.CookieJar (
-- * Types
Jar()
, emptyJar
, Endpoint(..)
, Cookie(..)
, SetCookie(..)
, emptySetCookie
-- * Algorithms
, parseSetCookie
, receive
, receiveHeaders
, send
, sendHeaders
, endSession
) where
import qualified Data.ByteString as BS
import Control.Monad
import Data.List
import Data.Maybe
import Data.Time
import qualified Network.DNS.Public as P
import Web.CookieJar.Types
import Web.CookieJar.Parser
import Web.CookieJar.Parser.Util
domainMatches :: CI Bytes -> CI Bytes -> Bool
domainMatches bs ds
| bs == ds
= True
| otherwise
= ds' `BS.isSuffixOf` bs'
&& BS.pack [period] `BS.isSuffixOf` BS.take (BS.length bs' - BS.length ds') bs'
&& isHostName bs'
where
bs' = foldedCase bs
ds' = foldedCase ds
-- TODO should be False when the string is an IP address
-- TODO is there any way to do a positive test instead?
isHostName :: Bytes -> Bool
isHostName _ = True
defaultPath :: Endpoint -> Bytes
defaultPath Endpoint{..} = case BS.uncons epPath of
Just (0x2F, bs) ->
case listToMaybe $ BS.findIndices (== slash) bs of
Nothing -> root
Just pos -> BS.take (pos + 1) epPath
_ -> root
where
root = BS.pack [slash]
pathMatches :: Bytes -> Bytes -> Bool
pathMatches rp cp
| rp == cp = True
| pre && root `BS.isSuffixOf` cp = True
| pre && root `BS.isPrefixOf` BS.drop (BS.length cp) rp = True
| otherwise = False
where
pre = cp `BS.isPrefixOf` rp
root = BS.pack [slash]
-- |End the current session, as described in section 5.3 of the RFC, on page 24.
endSession :: Jar -> Jar
endSession = modifyCookies $ filter cPersist
expire :: Time -> Jar -> Jar
expire now = modifyCookies . filter $ not . (== Just True) . fmap (<= now) . cExpires
-- |Receive a set-cookie request, possibly updating the user-agent state
receive
:: Time -- ^The current time
-> Endpoint -- ^The source of the request
-> SetCookie -- ^The request
-> Jar -- ^The user-agent state
-> Jar
receive now ep@Endpoint{..} SetCookie{..} jar =
expire now
$ if abort then jar else Jar (jarRules jar)
$ Cookie
{ cName = scName
, cValue = scValue
, cCreation = maybe now id $ fmap cCreation same
, cAccess = now
, cExpires = exp
, cPersist = maybe False (const True) exp
, cDomain = domain
, cHostOnly = dMat == Nothing
, cPath = path
, cSecure = scSecure
, cHttpOnly = scHttpOnly
} : cs
where
(sames, cs) =
partition (\Cookie{..} -> (cName, cDomain, cPath) == (scName, domain, path))
$ jarCookies jar
same = listToMaybe $ take 1 sames
{--
- Non-positive max-age values are supposed to result in an expiration
- date set to the "earliest representable time" (section 5.2.2).
- However, UTCTime does not have an earliest representable time, as it
- allows negative days, which are stored using infinite-precision
- integers.
-
- This isn't a huge problem, because the effect of setting a cookie to
- expire at the earliest representable time is that it expires
- immediately, which is an effect we can achieve by setting the
- expiration time to any time equal to or earlier than now.
--}
exp =
if fmap (< 0) scMaxAge == Just True
then Just now
else fmap (flip addUTCTime now . fromIntegral) scMaxAge `mplus` scExpires
public =
let d = fmap original scDomain >>= P.makeDomain
in isJust d && fmap (P.publicSuffix $ jarRules jar) d == d
exactDomain = scDomain == Just epDomain
scDomain' = if public && exactDomain then Nothing else scDomain
dMat = fmap (epDomain `domainMatches`) scDomain'
abort =
dMat == Just False
|| scHttpOnly && not epHttp
|| fmap cHttpOnly same == Just True && not epHttp
|| public && not exactDomain
domain = maybe epDomain id scDomain'
path = maybe (defaultPath ep) id scPath
sendNoExpire :: Time -> Endpoint -> Jar -> ([Cookie], Jar)
sendNoExpire now ep jar =
(sortBy headerOrder send', Jar (jarRules jar) $ send' ++ noSend)
where
(send, noSend) = partition (shouldSend ep) $ jarCookies jar
send' = map (\c -> c { cAccess = now }) send
headerOrder :: Cookie -> Cookie -> Ordering
headerOrder a b = let f = BS.length . cPath in case compare (f b) (f a) of
EQ -> compare (cCreation a) (cCreation b)
o -> o
shouldSend :: Endpoint -> Cookie -> Bool
shouldSend Endpoint{..} Cookie{..} =
hostOk
&& epPath `pathMatches` cPath
&& (not cSecure || epSecure)
&& (not cHttpOnly || epHttp)
where
hostOk =
cHostOnly && epDomain == cDomain
|| not cHostOnly && epDomain `domainMatches` cDomain
-- |Return the cookies that should be sent as part of a request
--
-- The order of the cookies is specified by section 5.4.2 of the RFC.
--
-- Note that this function returns an updated state because the last-access
-- times of the cookies being sent must be updated (see section 5.4.3 of
-- the RFC). It may be safe to ignore these state changes in certain
-- circumstances, as the only purpose of the last-access field (according
-- to RFC 6265) is to determine the order in which cookies are evicted when
-- space limits are exceeded. Space limits are an optional feature of the
-- RFC and are not yet implemented by this module.
send
:: Time -- ^The current time
-> Endpoint -- ^The destination of the request
-> Jar -- ^The user-agent state
-> ([Cookie], Jar)
send now ep = sendNoExpire now ep . expire now
-- |Receive any set-cookie requests present in a list of HTTP response
-- headers, possibly updating the user-agent state
receiveHeaders
:: Time -- ^The current time
-> Endpoint -- ^The source of the request
-> ResponseHeaders -- ^The HTTP response headers
-> Jar -- ^The user-agent state
-> Jar
receiveHeaders time host =
flip (foldr $ receive time host)
. catMaybes
. map (parseSetCookie . snd)
. filter ((== "Set-Cookie") . fst)
makeHeaderValue :: Cookie -> Bytes
makeHeaderValue Cookie{..} = cName `BS.append` BS.cons equals cValue
-- |Return the cookie headers that should be sent as part of an HTTP request
--
-- See "send" for some important notes which also apply to this function.
sendHeaders
:: Time -- ^The current time
-> Endpoint -- ^The destination of the request
-> Jar -- ^The user-agent state
-> (RequestHeaders, Jar)
sendHeaders now ep jar = (map ("Cookie",) bs, jar')
where
bs = case map makeHeaderValue cs of
[] -> []
(b : bs) -> [BS.concat $ b : concatMap ((sep :) . (: [])) bs]
sep = BS.pack [semicolon, space]
(cs, jar') = send now ep jar
| ktvoelker/cookie-jar | src/Web/CookieJar.hs | gpl-3.0 | 7,138 | 0 | 16 | 1,710 | 1,762 | 959 | 803 | -1 | -1 |
module Util where
import Control.Monad.Catch
import Control.Monad.IO.Class
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.List.NonEmpty (NonEmpty)
import Data.Typeable
import Options.Applicative
import System.IO
import System.Random
--------------------------------------------------------------------------------
-- Message parser
data ParseException = ParseException [ByteString]
deriving (Show, Typeable)
instance Exception ParseException
type ClientId = ByteString
data Message
= MsgStdin ClientId ByteString
| MsgStdout ByteString
| MsgStderr ByteString
parseMessage :: MonadThrow m => [ByteString] -> m Message
parseMessage ["STDOUT", msg] = return (MsgStdout msg)
parseMessage ["STDERR", msg] = return (MsgStderr msg)
parseMessage msg = parseMessageStdin msg
parseMessageStdin :: MonadThrow m => [ByteString] -> m Message
parseMessageStdin ["STDIN", client_id, msg] = return (MsgStdin client_id msg)
parseMessageStdin msgs = throwM (ParseException msgs)
serializeMessage :: Message -> NonEmpty ByteString
serializeMessage (MsgStdin client_id msg) = ["STDIN", client_id, msg]
serializeMessage (MsgStdout msg) = ["STDOUT", msg]
serializeMessage (MsgStderr msg) = ["STDERR", msg]
displayMessage :: MonadIO m => Message -> m ()
displayMessage (MsgStdin client_id msg) = liftIO . BS.putStrLn $ "[" <> client_id <> "]: " <> msg
displayMessage (MsgStdout msg) = liftIO $ BS.putStr msg
displayMessage (MsgStderr msg) = liftIO $ BS.hPutStr stderr msg
--------------------------------------------------------------------------------
-- Misc
-- | Make random 4 character ID.
mkRandomId :: MonadIO m => m ByteString
mkRandomId = BS.pack . take 4 . randomRs ('a','z') <$> liftIO getStdGen
| mitchellwrosen/coop | src/Util.hs | gpl-3.0 | 1,846 | 0 | 10 | 318 | 509 | 274 | 235 | 35 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.CivicInfo.Elections.VoterInfoQuery
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Looks up information relevant to a voter based on the voter\'s
-- registered address.
--
-- /See:/ <https://developers.google.com/civic-information Google Civic Information API Reference> for @civicinfo.elections.voterInfoQuery@.
module Network.Google.Resource.CivicInfo.Elections.VoterInfoQuery
(
-- * REST Resource
ElectionsVoterInfoQueryResource
-- * Creating a Request
, electionsVoterInfoQuery
, ElectionsVoterInfoQuery
-- * Request Lenses
, eviqReturnAllAvailableData
, eviqElectionId
, eviqAddress
, eviqPayload
, eviqOfficialOnly
) where
import Network.Google.CivicInfo.Types
import Network.Google.Prelude
-- | A resource alias for @civicinfo.elections.voterInfoQuery@ method which the
-- 'ElectionsVoterInfoQuery' request conforms to.
type ElectionsVoterInfoQueryResource =
"civicinfo" :>
"v2" :>
"voterinfo" :>
QueryParam "address" Text :>
QueryParam "returnAllAvailableData" Bool :>
QueryParam "electionId" (Textual Int64) :>
QueryParam "officialOnly" Bool :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] VoterInfoRequest :>
Get '[JSON] VoterInfoResponse
-- | Looks up information relevant to a voter based on the voter\'s
-- registered address.
--
-- /See:/ 'electionsVoterInfoQuery' smart constructor.
data ElectionsVoterInfoQuery = ElectionsVoterInfoQuery'
{ _eviqReturnAllAvailableData :: !Bool
, _eviqElectionId :: !(Textual Int64)
, _eviqAddress :: !Text
, _eviqPayload :: !VoterInfoRequest
, _eviqOfficialOnly :: !Bool
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ElectionsVoterInfoQuery' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'eviqReturnAllAvailableData'
--
-- * 'eviqElectionId'
--
-- * 'eviqAddress'
--
-- * 'eviqPayload'
--
-- * 'eviqOfficialOnly'
electionsVoterInfoQuery
:: Text -- ^ 'eviqAddress'
-> VoterInfoRequest -- ^ 'eviqPayload'
-> ElectionsVoterInfoQuery
electionsVoterInfoQuery pEviqAddress_ pEviqPayload_ =
ElectionsVoterInfoQuery'
{ _eviqReturnAllAvailableData = False
, _eviqElectionId = 0
, _eviqAddress = pEviqAddress_
, _eviqPayload = pEviqPayload_
, _eviqOfficialOnly = False
}
-- | If set to true, the query will return the success codeand include any
-- partial information when it is unable to determine a matching address or
-- unable to determine the election for electionId=0 queries.
eviqReturnAllAvailableData :: Lens' ElectionsVoterInfoQuery Bool
eviqReturnAllAvailableData
= lens _eviqReturnAllAvailableData
(\ s a -> s{_eviqReturnAllAvailableData = a})
-- | The unique ID of the election to look up. A list of election IDs can be
-- obtained at
-- https:\/\/www.googleapis.com\/civicinfo\/{version}\/elections
eviqElectionId :: Lens' ElectionsVoterInfoQuery Int64
eviqElectionId
= lens _eviqElectionId
(\ s a -> s{_eviqElectionId = a})
. _Coerce
-- | The registered address of the voter to look up.
eviqAddress :: Lens' ElectionsVoterInfoQuery Text
eviqAddress
= lens _eviqAddress (\ s a -> s{_eviqAddress = a})
-- | Multipart request metadata.
eviqPayload :: Lens' ElectionsVoterInfoQuery VoterInfoRequest
eviqPayload
= lens _eviqPayload (\ s a -> s{_eviqPayload = a})
-- | If set to true, only data from official state sources will be returned.
eviqOfficialOnly :: Lens' ElectionsVoterInfoQuery Bool
eviqOfficialOnly
= lens _eviqOfficialOnly
(\ s a -> s{_eviqOfficialOnly = a})
instance GoogleRequest ElectionsVoterInfoQuery where
type Rs ElectionsVoterInfoQuery = VoterInfoResponse
type Scopes ElectionsVoterInfoQuery = '[]
requestClient ElectionsVoterInfoQuery'{..}
= go (Just _eviqAddress)
(Just _eviqReturnAllAvailableData)
(Just _eviqElectionId)
(Just _eviqOfficialOnly)
(Just AltJSON)
_eviqPayload
civicInfoService
where go
= buildClient
(Proxy :: Proxy ElectionsVoterInfoQueryResource)
mempty
| rueshyna/gogol | gogol-civicinfo/gen/Network/Google/Resource/CivicInfo/Elections/VoterInfoQuery.hs | mpl-2.0 | 5,119 | 0 | 16 | 1,173 | 631 | 372 | 259 | 97 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
-- Module : Network.AWS.ElasticFileSystem.DeleteMountTarget
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Deletes the specified mount target.
--
-- This operation forcibly breaks any mounts of the file system via the
-- mount target being deleted, which might disrupt instances or
-- applications using those mounts. To avoid applications getting cut off
-- abruptly, you might consider unmounting any mounts of the mount target,
-- if feasible. The operation also deletes the associated network
-- interface. Uncommitted writes may be lost, but breaking a mount target
-- using this operation does not corrupt the file system itself. The file
-- system you created remains. You can mount an EC2 instance in your VPC
-- using another mount target.
--
-- This operation requires permission for the following action on the file
-- system:
--
-- - @elasticfilesystem:DeleteMountTarget@
--
-- The @DeleteMountTarget@ call returns while the mount target state is
-- still \"deleting\". You can check the mount target deletion by calling
-- the DescribeMountTargets API, which returns a list of mount target
-- descriptions for the given file system.
--
-- The operation also requires permission for the following Amazon EC2
-- action on the mount target\'s network interface:
--
-- - @ec2:DeleteNetworkInterface@
--
-- <http://docs.aws.amazon.com/directoryservice/latest/devguide/API_DeleteMountTarget.html>
module Network.AWS.ElasticFileSystem.DeleteMountTarget
(
-- * Request
DeleteMountTarget
-- ** Request constructor
, deleteMountTarget
-- ** Request lenses
, dmtMountTargetId
-- * Response
, DeleteMountTargetResponse
-- ** Response constructor
, deleteMountTargetResponse
) where
import Network.AWS.ElasticFileSystem.Types
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | /See:/ 'deleteMountTarget' smart constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'dmtMountTargetId'
newtype DeleteMountTarget = DeleteMountTarget'
{ _dmtMountTargetId :: Text
} deriving (Eq,Read,Show)
-- | 'DeleteMountTarget' smart constructor.
deleteMountTarget :: Text -> DeleteMountTarget
deleteMountTarget pMountTargetId =
DeleteMountTarget'
{ _dmtMountTargetId = pMountTargetId
}
-- | String. The ID of the mount target to delete.
dmtMountTargetId :: Lens' DeleteMountTarget Text
dmtMountTargetId = lens _dmtMountTargetId (\ s a -> s{_dmtMountTargetId = a});
instance AWSRequest DeleteMountTarget where
type Sv DeleteMountTarget = ElasticFileSystem
type Rs DeleteMountTarget = DeleteMountTargetResponse
request = delete
response = receiveNull DeleteMountTargetResponse'
instance ToHeaders DeleteMountTarget where
toHeaders = const mempty
instance ToPath DeleteMountTarget where
toPath DeleteMountTarget'{..}
= mconcat
["/2015-02-01/mount-targets/",
toText _dmtMountTargetId]
instance ToQuery DeleteMountTarget where
toQuery = const mempty
-- | /See:/ 'deleteMountTargetResponse' smart constructor.
data DeleteMountTargetResponse =
DeleteMountTargetResponse'
deriving (Eq,Read,Show)
-- | 'DeleteMountTargetResponse' smart constructor.
deleteMountTargetResponse :: DeleteMountTargetResponse
deleteMountTargetResponse = DeleteMountTargetResponse'
| fmapfmapfmap/amazonka | amazonka-efs/gen/Network/AWS/ElasticFileSystem/DeleteMountTarget.hs | mpl-2.0 | 3,999 | 0 | 9 | 767 | 335 | 215 | 120 | 42 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.MapsEngine.Tables.Patch
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Mutate a table asset.
--
-- /See:/ <https://developers.google.com/maps-engine/ Google Maps Engine API Reference> for @mapsengine.tables.patch@.
module Network.Google.Resource.MapsEngine.Tables.Patch
(
-- * REST Resource
TablesPatchResource
-- * Creating a Request
, tablesPatch
, TablesPatch
-- * Request Lenses
, tpPayload
, tpId
) where
import Network.Google.MapsEngine.Types
import Network.Google.Prelude
-- | A resource alias for @mapsengine.tables.patch@ method which the
-- 'TablesPatch' request conforms to.
type TablesPatchResource =
"mapsengine" :>
"v1" :>
"tables" :>
Capture "id" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] Table :> Patch '[JSON] ()
-- | Mutate a table asset.
--
-- /See:/ 'tablesPatch' smart constructor.
data TablesPatch = TablesPatch'
{ _tpPayload :: !Table
, _tpId :: !Text
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'TablesPatch' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'tpPayload'
--
-- * 'tpId'
tablesPatch
:: Table -- ^ 'tpPayload'
-> Text -- ^ 'tpId'
-> TablesPatch
tablesPatch pTpPayload_ pTpId_ =
TablesPatch'
{ _tpPayload = pTpPayload_
, _tpId = pTpId_
}
-- | Multipart request metadata.
tpPayload :: Lens' TablesPatch Table
tpPayload
= lens _tpPayload (\ s a -> s{_tpPayload = a})
-- | The ID of the table.
tpId :: Lens' TablesPatch Text
tpId = lens _tpId (\ s a -> s{_tpId = a})
instance GoogleRequest TablesPatch where
type Rs TablesPatch = ()
type Scopes TablesPatch =
'["https://www.googleapis.com/auth/mapsengine"]
requestClient TablesPatch'{..}
= go _tpId (Just AltJSON) _tpPayload
mapsEngineService
where go
= buildClient (Proxy :: Proxy TablesPatchResource)
mempty
| rueshyna/gogol | gogol-maps-engine/gen/Network/Google/Resource/MapsEngine/Tables/Patch.hs | mpl-2.0 | 2,800 | 0 | 13 | 684 | 386 | 232 | 154 | 59 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Test.HUnit
import qualified Cortex.Miranda.CommitRaw as C
import Cortex.Miranda.CommitList
-----
-- Test if insertion order matters.
test0 :: Test
test0 = TestCase $ do
let a = C.Commit ("a", C.Set "1", "", "2012.03.18 20:32:32:910425188000")
let b = C.Commit ("b", C.Set "2", "", "2012.03.18 20:32:33:910425188000")
let c = C.Commit ("c", C.Set "3", "", "2012.03.18 20:32:34:910425188000")
let add x y = fst $ insert x y
let cl1 = add a $ add b $ add c empty
let cl2 = add a $ add c $ add b empty
let cl3 = add c $ add b $ add a empty
let cl4 = add b $ add c $ add a empty
let t1 = member "2fba5c6f2ca481b06b69336926c51bfbd57e27a9" cl1
let t2 = member "2fba5c6f2ca481b06b69336926c51bfbd57e27a9" cl2
let t3 = member "2fba5c6f2ca481b06b69336926c51bfbd57e27a9" cl3
let t4 = member "2fba5c6f2ca481b06b69336926c51bfbd57e27a9" cl4
let t5 = member "3a67d20754c9ca37b0da711cfdaf0b54c75f59ff" cl1
let t6 = member "3a67d20754c9ca37b0da711cfdaf0b54c75f59ff" cl2
let t7 = member "3a67d20754c9ca37b0da711cfdaf0b54c75f59ff" cl3
let t8 = member "3a67d20754c9ca37b0da711cfdaf0b54c75f59ff" cl4
let t9 = member "6f5c2c772412e41d75f5fa64ceb81bdbf11462b0" cl1
let t10 = member "6f5c2c772412e41d75f5fa64ceb81bdbf11462b0" cl2
let t11 = member "6f5c2c772412e41d75f5fa64ceb81bdbf11462b0" cl3
let t12 = member "6f5c2c772412e41d75f5fa64ceb81bdbf11462b0" cl4
assertBool "cl1 lost commits" $ (==) 3 (length $ toList cl1)
assertBool "cl2 lost commits" $ (==) 3 (length $ toList cl2)
assertBool "cl3 lost commits" $ (==) 3 (length $ toList cl3)
assertBool "cl4 lost commits" $ (==) 3 (length $ toList cl4)
assertBool "hash mismatch" $
and [t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12]
-----
-----
-- Test if converting to list works.
test1 :: Test
test1 = TestCase $ do
let a = C.Commit ("a", C.Set "1", "", "2012.03.18 20:32:32:910425188000")
let b = C.Commit ("b", C.Set "2", "", "2012.03.18 20:32:33:910425188000")
let c = C.Commit ("c", C.Set "3", "", "2012.03.18 20:32:34:910425188000")
let add x y = fst $ insert x y
let cl = add a $ add b $ add c empty
assertBool "" $ (==) (toList cl) [c, b, a]
-----
-----
-- Test insertion of equal commits.
test2 :: Test
test2 = TestCase $ do
let a = C.Commit ("a", C.Set "1", "", "2012.03.18 20:32:32:910425188000")
let add x y = fst $ insert x y
let cl = add a $ add a $ add a empty
let r = snd $ insert a cl
assertBool "" $ null r
assertBool "" $ (==) 1 (length $ toList cl)
-----
-----
tests :: Test
tests = TestList
[ test0
, test1
, test2
]
main :: IO Counts
main = runTestTT tests
| maarons/Cortex | Miranda/test/CommitList_Test.hs | agpl-3.0 | 2,743 | 0 | 14 | 612 | 981 | 480 | 501 | 55 | 1 |
-- -*-haskell-*-
-- GIMP Toolkit (GTK) CustomStore TreeModel
--
-- Author : Duncan Coutts, Axel Simon
--
-- Created: 11 Feburary 2006
--
-- Copyright (C) 2005 Duncan Coutts, Axel Simon
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- |
-- Maintainer : gtk2hs-users@lists.sourceforge.net
-- Stability : provisional
-- Portability : portable (depends on GHC)
--
-- Standard model to store hierarchical data.
--
module Graphics.UI.Gtk.ModelView.TreeStore (
-- * Types
TreeStore,
-- * Constructors
treeStoreNew,
treeStoreNewDND,
-- * Implementation of Interfaces
treeStoreDefaultDragSourceIface,
treeStoreDefaultDragDestIface,
-- * Methods
treeStoreGetValue,
treeStoreGetTree,
treeStoreLookup,
treeStoreSetValue,
-- treeStoreSetTree,
treeStoreInsert,
treeStoreInsertTree,
treeStoreRemove,
treeStoreClear,
) where
import Data.Bits
import Data.Word (Word)
import Data.Maybe ( fromMaybe, isJust )
import Data.Tree
import Control.Monad ( liftM, when )
import Control.Exception (assert)
import Data.IORef
import Graphics.UI.Gtk.ModelView.Types
import Graphics.UI.Gtk.Types (GObjectClass(..), TreeModelClass)
import Graphics.UI.Gtk.ModelView.CustomStore
import Graphics.UI.Gtk.ModelView.TreeModel
import Graphics.UI.Gtk.ModelView.TreeDrag
import Control.Monad.Trans ( liftIO )
--------------------------------------------
-- internal model data types
--
-- | A store for hierarchical data.
--
newtype TreeStore a = TreeStore (CustomStore (IORef (Store a)) a)
instance TypedTreeModelClass TreeStore
instance TreeModelClass (TreeStore a)
instance GObjectClass (TreeStore a) where
toGObject (TreeStore tm) = toGObject tm
unsafeCastGObject = TreeStore . unsafeCastGObject
-- | Maximum number of nodes on each level.
--
-- * These numbers determine how many bits in a 'TreeIter' are devoted to
-- each level. Hence, these numbers reflect log2 of the maximum number
-- of nodes at a level, rounded up.
--
type Depth = [Int]
data Store a = Store {
depth :: Depth,
content :: Cache a
}
-- | Create a new list store.
--
-- * The given rose tree determines the initial content and may be the empty
-- list. Each 'Tree' in the forest corresponds to one top-level node.
--
treeStoreNew :: Forest a -> IO (TreeStore a)
treeStoreNew forest = treeStoreNewDND forest
(Just treeStoreDefaultDragSourceIface)
(Just treeStoreDefaultDragDestIface)
-- | Create a new list store.
--
-- * In addition to 'treeStoreNew', this function takes an two interfaces
-- to implement user-defined drag-and-drop functionality.
--
treeStoreNewDND :: Forest a -- ^ the inital tree stored in this model
-> Maybe (DragSourceIface TreeStore a) -- ^ an optional interface for drags
-> Maybe (DragDestIface TreeStore a) -- ^ an optional interface to handle drops
-> IO (TreeStore a)
treeStoreNewDND forest mDSource mDDest = do
storeRef <- newIORef Store {
depth = calcForestDepth forest,
content = storeToCache forest
}
let withStore f = readIORef storeRef >>= return . f
withStoreUpdateCache f = do
store <- readIORef storeRef
let (result, cache') = f store
writeIORef storeRef store { content = cache' }
return result
customStoreNew storeRef TreeStore TreeModelIface {
treeModelIfaceGetFlags = return [],
treeModelIfaceGetIter = \path -> withStore $
\Store { depth = d } -> fromPath d path,
treeModelIfaceGetPath = \iter -> withStore $
\Store { depth = d } -> toPath d iter,
treeModelIfaceGetRow = \iter -> withStoreUpdateCache $
\Store { depth = d, content = cache } ->
case checkSuccess d iter cache of
(True, cache'@((_, (Node { rootLabel = val }:_)):_)) ->
(val, cache')
_ -> error "TreeStore.getRow: iter does not refer to a valid entry",
treeModelIfaceIterNext = \iter -> withStoreUpdateCache $
\Store { depth = d, content = cache } -> iterNext d iter cache,
treeModelIfaceIterChildren = \mIter -> withStoreUpdateCache $
\Store { depth = d, content = cache } ->
let iter = fromMaybe invalidIter mIter
in iterNthChild d 0 iter cache,
treeModelIfaceIterHasChild = \iter -> withStoreUpdateCache $
\Store { depth = d, content = cache } ->
let (mIter, cache') = iterNthChild d 0 iter cache
in (isJust mIter, cache'),
treeModelIfaceIterNChildren = \mIter -> withStoreUpdateCache $
\Store { depth = d, content = cache } ->
let iter = fromMaybe invalidIter mIter
in iterNChildren d iter cache,
treeModelIfaceIterNthChild = \mIter idx -> withStoreUpdateCache $
\Store { depth = d, content = cache } ->
let iter = fromMaybe invalidIter mIter
in iterNthChild d idx iter cache,
treeModelIfaceIterParent = \iter -> withStore $
\Store { depth = d } -> iterParent d iter,
treeModelIfaceRefNode = \_ -> return (),
treeModelIfaceUnrefNode = \_ -> return ()
} mDSource mDDest
-- | Default drag functions for
-- 'Graphics.UI.Gtk.ModelView.TreeStore'. These functions allow the rows of
-- the model to serve as drag source. Any row is allowed to be dragged and the
-- data set in the 'SelectionDataM' object is set with 'treeSetRowDragData',
-- i.e. it contains the model and the 'TreePath' to the row.
treeStoreDefaultDragSourceIface :: DragSourceIface TreeStore row
treeStoreDefaultDragSourceIface = DragSourceIface {
treeDragSourceRowDraggable = \_ _-> return True,
treeDragSourceDragDataGet = treeSetRowDragData,
treeDragSourceDragDataDelete = \model dest@(_:_) -> do
liftIO $ treeStoreRemove model dest
return True
}
-- | Default drop functions for 'Graphics.UI.Gtk.ModelView.TreeStore'. These
-- functions accept a row and insert the row into the new location if it is
-- dragged into a tree view
-- that uses the same model.
treeStoreDefaultDragDestIface :: DragDestIface TreeStore row
treeStoreDefaultDragDestIface = DragDestIface {
treeDragDestRowDropPossible = \model dest -> do
mModelPath <- treeGetRowDragData
case mModelPath of
Nothing -> return False
Just (model', source) -> return (toTreeModel model==toTreeModel model'),
treeDragDestDragDataReceived = \model dest@(_:_) -> do
mModelPath <- treeGetRowDragData
case mModelPath of
Nothing -> return False
Just (model', source@(_:_)) ->
if toTreeModel model/=toTreeModel model' then return False
else liftIO $ do
row <- treeStoreGetTree model source
treeStoreInsertTree model (init dest) (last dest) row
return True
}
--------------------------------------------
-- low level bit-twiddling utility functions
--
-- TODO: figure out how these things work when Word is 64 bits
bitsNeeded :: Word -> Int
bitsNeeded n = bitsNeeded' 0 n
where bitsNeeded' b 0 = b
bitsNeeded' b n = bitsNeeded' (b+1) (n `shiftR` 1)
getBitSlice :: TreeIter -> Int -> Int -> Word
getBitSlice (TreeIter _ a b c) off count =
getBitSliceWord a off count
.|. getBitSliceWord b (off-32) count
.|. getBitSliceWord c (off-64) count
where getBitSliceWord :: Word -> Int -> Int -> Word
getBitSliceWord word off count =
word `shiftR` off .&. (1 `shiftL` count - 1)
setBitSlice :: TreeIter -> Int -> Int -> Word -> TreeIter
setBitSlice (TreeIter stamp a b c) off count value =
assert (value < 1 `shiftL` count) $
TreeIter stamp
(setBitSliceWord a off count value)
(setBitSliceWord b (off-32) count value)
(setBitSliceWord c (off-64) count value)
where setBitSliceWord :: Word -> Int -> Int -> Word -> Word
setBitSliceWord word off count value =
let mask = (1 `shiftL` count - 1) `shiftL` off
in (word .&. complement mask) .|. (value `shiftL` off)
iterPrefixEqual :: TreeIter -> TreeIter -> Int -> Bool
iterPrefixEqual (TreeIter _ a1 b1 c1) (TreeIter _ a2 b2 c2) pos
| pos>64 = let mask = 1 `shiftL` (pos-64) - 1 in
a1==a2 && b1==b2 && (c1 .&. mask) == (c2 .&. mask)
| pos>32 = let mask = 1 `shiftL` (pos-32) - 1 in
a1==a2 && (b1 .&. mask) == (b2 .&. mask)
| otherwise = let mask = 1 `shiftL` pos - 1 in
(a1 .&. mask) == (a2 .&. mask)
-- | The invalid tree iterator.
--
invalidIter :: TreeIter
invalidIter = TreeIter 0 0 0 0
showIterBits (TreeIter _ a b c) = [showBits a, showBits b, showBits c]
showBits :: Bits a => a -> String
showBits a = [ if testBit a i then '1' else '0' | i <- [0..bitSize a - 1] ]
-- | Calculate the maximum number of nodes on a per-level basis.
--
calcForestDepth :: Forest a -> Depth
calcForestDepth f = map bitsNeeded $
takeWhile (/=0) $
foldr calcTreeDepth (repeat 0) f
where
calcTreeDepth Node { subForest = f } (d:ds) =
(d+1): zipWith max ds (foldr calcTreeDepth (repeat 0) f)
-- | Convert an iterator into a path.
--
toPath :: Depth -> TreeIter -> TreePath
toPath d iter = gP 0 d
where
gP pos [] = []
gP pos (d:ds) = let idx = getBitSlice iter pos d in
if idx==0 then [] else fromIntegral (idx-1) : gP (pos+d) ds
-- | Try to convert a path into a 'TreeIter'.
--
fromPath :: Depth -> TreePath -> Maybe TreeIter
fromPath = fP 0 invalidIter
where
fP pos ti _ [] = Just ti -- the remaining bits are zero anyway
fP pos ti [] _ = Nothing
fP pos ti (d:ds) (p:ps) = let idx = fromIntegral (p+1) :: Word in
if idx >= bit d then Nothing else
fP (pos+d) (setBitSlice ti pos d idx) ds ps
-- | The 'Cache' type synonym is only used iternally. What it represents
-- the stack during a (fictional) lookup operations.
-- The topmost frame is the node
-- for which this lookup was started and the innermost frame (the last
-- element of the list) contains the root of the tree.
--
type Cache a = [(TreeIter, Forest a)]
-- | Create a traversal structure that allows a pre-order traversal in linear
-- time.
--
-- * The returned structure points at the root of the first level which doesn't
-- really exist, but serves to indicate that it is before the very first
-- node.
--
storeToCache :: Forest a -> Cache a
storeToCache [] = []
storeToCache forest = [(invalidIter, [Node root forest])]
where
root = error "TreeStore.storeToCache: accessed non-exitent root of tree"
-- | Extract the store from the cache data structure.
cacheToStore :: Cache a -> Forest a
cacheToStore [] = []
cacheToStore cache = case last cache of (_, [Node _ forest]) -> forest
-- | Advance the traversal structure to the given 'TreeIter'.
--
advanceCache :: Depth -> TreeIter -> Cache a -> Cache a
advanceCache depth goal [] = []
advanceCache depth goal cache@((rootIter,_):_) =
moveToSameLevel 0 depth
where
moveToSameLevel pos [] = cache
moveToSameLevel pos (d:ds) =
let
goalIdx = getBitSlice goal pos d
curIdx = getBitSlice rootIter pos d
isNonZero pos d (ti,_) = getBitSlice ti pos d/=0
in
if goalIdx==curIdx then moveToSameLevel (pos+d) ds else
if goalIdx==0 then dropWhile (isNonZero pos d) cache else
if curIdx==0 then moveToChild pos (d:ds) cache else
if goalIdx<curIdx then
moveToChild pos (d:ds) (dropWhile (isNonZero pos d) cache)
else let
-- advance the current iterator to coincide with the goal iterator
-- at this level
moveWithinLevel pos d ((ti,forest):parents) = let
diff = fromIntegral (goalIdx-curIdx)
(dropped, remain) = splitAt diff forest
advance = length dropped
ti' = setBitSlice ti pos d (curIdx+fromIntegral advance)
in
if advance==diff then moveToChild (pos+d) ds ((ti',remain):parents)
else (ti',remain):parents -- node not found
in moveWithinLevel pos d $ case ds of
[] -> cache
(d':_) -> dropWhile (isNonZero (pos+d) d') cache
-- Descend into the topmost forest to find the goal iterator. The position
-- and the remainding depths specify the index in the cache that is zero.
-- All indices in front of pos coincide with that of the goal iterator.
moveToChild :: Int -> Depth -> Cache a -> Cache a
moveToChild pos [] cache = cache -- we can't set more than the leaf
moveToChild pos (d:ds) cache@((ti,forest):parents)
| getBitSlice goal pos d == 0 = cache
| otherwise = case forest of
[] -> cache -- impossible request
Node { subForest = children }:_ ->
let
childIdx :: Int
childIdx = fromIntegral (getBitSlice goal pos d)-1
(dropped, remain) = splitAt childIdx children
advanced = length dropped
ti' = setBitSlice ti pos d (fromIntegral advanced+1)
in if advanced<childIdx then ((ti',remain):cache) else
moveToChild (pos+d) ds ((ti',remain):cache)
-- | Advance to the given iterator and return weather this was successful.
--
checkSuccess :: Depth -> TreeIter -> Cache a -> (Bool, Cache a)
checkSuccess depth iter cache = case advanceCache depth iter cache of
cache'@((cur,sibs):_) -> (cmp cur iter && not (null sibs), cache')
[] -> (False, [])
where
cmp (TreeIter _ a1 b1 c1) (TreeIter _ a2 b2 c2) =
a1==a2 && b1==b2 && c2==c2
cache'@((cur,sibs):_) = advanceCache depth iter cache
-- | Get the leaf index of this iterator.
--
-- * Due to the way we construct the 'TreeIter's, we can check which the last
-- level of an iterator is: The bit sequence of level n is zero if n is
-- greater or equal to the level that the iterator refers to. The returned
-- triple is (pos, leaf, zero) such that pos..pos+leaf denotes the leaf
-- index and pos+leaf..pos+leaf+zero denotes the bit field that is zero.
--
getTreeIterLeaf :: Depth -> TreeIter -> (Int, Int, Int)
getTreeIterLeaf ds ti = gTIL 0 0 ds
where
gTIL pos dCur (dNext:ds)
| getBitSlice ti (pos+dCur) dNext==0 = (pos,dCur,dNext)
| otherwise = gTIL (pos+dCur) dNext ds
gTIL pos d [] = (pos, d, 0)
-- | Move an iterator forwards on the same level.
--
iterNext :: Depth -> TreeIter -> Cache a -> (Maybe TreeIter, Cache a)
iterNext depth iter cache = let
(pos,leaf,child) = getTreeIterLeaf depth iter
curIdx = getBitSlice iter pos leaf
nextIdx = curIdx+1
nextIter = setBitSlice iter pos leaf nextIdx
in
if nextIdx==bit leaf then (Nothing, cache) else
case checkSuccess depth nextIter cache of
(True, cache) -> (Just nextIter, cache)
(False, cache) -> (Nothing, cache)
-- | Move down to the child of the given iterator.
--
iterNthChild :: Depth -> Int -> TreeIter -> Cache a ->
(Maybe TreeIter, Cache a)
iterNthChild depth childIdx_ iter cache = let
(pos,leaf,child) = getTreeIterLeaf depth iter
childIdx = fromIntegral childIdx_+1 :: Word
nextIter = setBitSlice iter (pos+leaf) child childIdx
in
if childIdx>=bit child then (Nothing, cache) else
case checkSuccess depth nextIter cache of
(True, cache) -> (Just nextIter, cache)
(False, cache) -> (Nothing, cache)
-- | Descend to the first child.
--
iterNChildren :: Depth -> TreeIter -> Cache a -> (Int, Cache a)
iterNChildren depth iter cache = case checkSuccess depth iter cache of
(True, cache@((_,Node { subForest = forest}:_):_)) -> (length forest, cache)
(_, cache) -> (0, cache)
-- | Ascend to parent.
--
iterParent :: Depth -> TreeIter -> Maybe TreeIter
iterParent depth iter = let
(pos,leaf,child) = getTreeIterLeaf depth iter
in if pos==0 then Nothing else
if getBitSlice iter pos leaf==0 then Nothing else
Just (setBitSlice iter pos leaf 0)
-- | Insert nodes into the store.
--
-- * The given list of nodes is inserted into given parent at @pos@.
-- If the parent existed, the function returns @Just path@ where @path@
-- is the position of the newly inserted elements. If @pos@ is negative
-- or greater or equal to the number of children of the node at @path@,
-- the new nodes are appended to the list.
--
treeStoreInsertForest ::
TreeStore a -- ^ the store
-> TreePath -- ^ @path@ - the position of the parent
-> Int -- ^ @pos@ - the index of the new tree
-> Forest a -- ^ the list of trees to be inserted
-> IO ()
treeStoreInsertForest (TreeStore model) path pos nodes = do
customStoreInvalidateIters model
(idx, toggle) <- atomicModifyIORef (customStoreGetPrivate model) $
\store@Store { depth = d, content = cache } ->
case insertIntoForest (cacheToStore cache) nodes path pos of
Nothing -> error ("treeStoreInsertForest: path does not exist " ++ show path)
Just (newForest, idx, toggle) ->
let depth = calcForestDepth newForest
in (Store { depth = depth,
content = storeToCache newForest },
(idx, toggle))
Store { depth = depth } <- readIORef (customStoreGetPrivate model)
let rpath = reverse path
stamp <- customStoreGetStamp model
sequence_ [ let p' = reverse p
Just iter = fromPath depth p'
in treeModelRowInserted model p' (treeIterSetStamp iter stamp)
| (i, node) <- zip [idx..] nodes
, p <- paths (i : rpath) node ]
let Just iter = fromPath depth path
when toggle $ treeModelRowHasChildToggled model path
(treeIterSetStamp iter stamp)
where paths :: TreePath -> Tree a -> [TreePath]
paths path Node { subForest = ts } =
path : concat [ paths (n:path) t | (n, t) <- zip [0..] ts ]
-- | Insert a node into the store.
--
treeStoreInsertTree ::
TreeStore a -- ^ the store
-> TreePath -- ^ @path@ - the position of the parent
-> Int -- ^ @pos@ - the index of the new tree
-> Tree a -- ^ the value to be inserted
-> IO ()
treeStoreInsertTree store path pos node =
treeStoreInsertForest store path pos [node]
-- | Insert a single node into the store.
--
-- * This function inserts a single node without children into the tree.
-- Its arguments are similar to those of 'treeStoreInsert'.
--
treeStoreInsert ::
TreeStore a -- ^ the store
-> TreePath -- ^ @path@ - the position of the parent
-> Int -- ^ @pos@ - the index of the new tree
-> a -- ^ the value to be inserted
-> IO ()
treeStoreInsert store path pos node =
treeStoreInsertForest store path pos [Node node []]
-- | Insert nodes into a forest.
--
-- * If the parent was found, returns the new tree, the child number
-- and a flag denoting if these new nodes were the first children
-- of the parent.
--
insertIntoForest :: Forest a -> Forest a -> TreePath -> Int ->
Maybe (Forest a, Int, Bool)
insertIntoForest forest nodes [] pos
| pos<0 = Just (forest++nodes, length forest, null forest)
| otherwise = Just (prev++nodes++next, length prev, null forest)
where (prev, next) = splitAt pos forest
insertIntoForest forest nodes (p:ps) pos = case splitAt p forest of
(prev, []) -> Nothing
(prev, Node { rootLabel = val,
subForest = for}:next) ->
case insertIntoForest for nodes ps pos of
Nothing -> Nothing
Just (for, pos, toggle) -> Just (prev++Node { rootLabel = val,
subForest = for }:next,
pos, toggle)
-- | Remove a node from the store.
--
-- * The node denoted by the path is removed, along with all its children.
-- The function returns @True@ if the given node was found.
--
treeStoreRemove :: TreeStore a -> TreePath -> IO Bool
--TODO: eliminate this special case without segfaulting!
treeStoreRemove (TreeStore model) [] = return False
treeStoreRemove (TreeStore model) path = do
customStoreInvalidateIters model
(found, toggle) <- atomicModifyIORef (customStoreGetPrivate model) $
\store@Store { depth = d, content = cache } ->
if null cache then (store, (False, False)) else
case deleteFromForest (cacheToStore cache) path of
Nothing -> (store, (False, False))
Just (newForest, toggle) ->
(Store { depth = d, -- this might be a space leak
content = storeToCache newForest }, (True, toggle))
when found $ do
when (toggle && not (null path)) $ do
Store { depth = depth } <- readIORef (customStoreGetPrivate model)
let parent = init path
Just iter = fromPath depth parent
treeModelRowHasChildToggled model parent iter
treeModelRowDeleted model path
return found
treeStoreClear :: TreeStore a -> IO ()
treeStoreClear (TreeStore model) = do
customStoreInvalidateIters model
Store { content = cache } <- readIORef (customStoreGetPrivate model)
let forest = cacheToStore cache
writeIORef (customStoreGetPrivate model) Store {
depth = calcForestDepth [],
content = storeToCache []
}
let loop (-1) = return ()
loop n = treeModelRowDeleted model [n] >> loop (n-1)
loop (length forest - 1)
-- | Remove a node from a rose tree.
--
-- * Returns the new tree if the node was found. The returned flag is
-- @True@ if deleting the node left the parent without any children.
--
deleteFromForest :: Forest a -> TreePath -> Maybe (Forest a, Bool)
deleteFromForest forest [] = Just ([], False)
deleteFromForest forest (p:ps) =
case splitAt p forest of
(prev, kill@Node { rootLabel = val,
subForest = for}:next) ->
if null ps then Just (prev++next, null prev && null next) else
case deleteFromForest for ps of
Nothing -> Nothing
Just (for,toggle) -> Just (prev++Node {rootLabel = val,
subForest = for }:next, toggle)
(prev, []) -> Nothing
-- | Set a node in the store.
--
treeStoreSetValue :: TreeStore a -> TreePath -> a -> IO ()
treeStoreSetValue store path value = treeStoreChangeM store path (\_ -> return value)
>> return ()
-- | Change a node in the store.
--
-- * Returns @True@ if the node was found. For a monadic version, see
-- 'treeStoreChangeM'.
--
treeStoreChange :: TreeStore a -> TreePath -> (a -> a) -> IO Bool
treeStoreChange store path func = treeStoreChangeM store path (return . func)
-- | Change a node in the store.
--
-- * Returns @True@ if the node was found. For a purely functional version, see
-- 'treeStoreChange'.
--
treeStoreChangeM :: TreeStore a -> TreePath -> (a -> IO a) -> IO Bool
treeStoreChangeM (TreeStore model) path act = do
customStoreInvalidateIters model
store@Store { depth = d, content = cache } <-
readIORef (customStoreGetPrivate model)
(store'@Store { depth = d, content = cache }, found) <- do
mRes <- changeForest (cacheToStore cache) act path
return $ case mRes of
Nothing -> (store, False)
Just newForest -> (Store { depth = d,
content = storeToCache newForest }, True)
writeIORef (customStoreGetPrivate model) store'
let Just iter = fromPath d path
stamp <- customStoreGetStamp model
when found $ treeModelRowChanged model path (treeIterSetStamp iter stamp)
return found
-- | Change a node in the forest.
--
-- * Returns @True@ if the given node was found.
--
changeForest :: Forest a -> (a -> IO a) -> TreePath -> IO (Maybe (Forest a))
changeForest forest act [] = return Nothing
changeForest forest act (p:ps) = case splitAt p forest of
(prev, []) -> return Nothing
(prev, Node { rootLabel = val,
subForest = for}:next) ->
if null ps then do
val' <- act val
return (Just (prev++Node { rootLabel = val',
subForest = for }:next))
else do
mFor <- changeForest for act ps
case mFor of
Nothing -> return Nothing
Just for -> return $ Just (prev++Node { rootLabel = val,
subForest = for }:next)
-- | Extract one node from the current model. Fails if the given
-- 'TreePath' refers to a non-existent node.
--
treeStoreGetValue :: TreeStore a -> TreePath -> IO a
treeStoreGetValue model path = fmap rootLabel (treeStoreGetTree model path)
-- | Extract a subtree from the current model. Fails if the given
-- 'TreePath' refers to a non-existent node.
--
treeStoreGetTree :: TreeStore a -> TreePath -> IO (Tree a)
treeStoreGetTree (TreeStore model) path = do
store@Store { depth = d, content = cache } <-
readIORef (customStoreGetPrivate model)
case fromPath d path of
(Just iter) -> do
let (res, cache') = checkSuccess d iter cache
writeIORef (customStoreGetPrivate model) store { content = cache' }
case cache' of
((_,node:_):_) | res -> return node
_ -> fail ("treeStoreGetTree: path does not exist " ++ show path)
_ -> fail ("treeStoreGetTree: path does not exist " ++ show path)
-- | Extract a subtree from the current model. Like 'treeStoreGetTree'
-- but returns @Nothing@ if the path refers to a non-existant node.
--
treeStoreLookup :: TreeStore a -> TreePath -> IO (Maybe (Tree a))
treeStoreLookup (TreeStore model) path = do
store@Store { depth = d, content = cache } <-
readIORef (customStoreGetPrivate model)
case fromPath d path of
(Just iter) -> do
let (res, cache') = checkSuccess d iter cache
writeIORef (customStoreGetPrivate model) store { content = cache' }
case cache' of
((_,node:_):_) | res -> return (Just node)
_ -> return Nothing
_ -> return Nothing
| thiagoarrais/gtk2hs | gtk/Graphics/UI/Gtk/ModelView/TreeStore.hs | lgpl-2.1 | 25,419 | 71 | 26 | 5,792 | 7,100 | 3,794 | 3,306 | 428 | 11 |
-- Copyright 2020-2021 Google LLC
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module SIntLiterals where
import Data.SInt (SInt)
import TestUtils (theSInt, asFin, showTyped)
-- Easy: can we use a literal given that we know it matches the type?
x0 :: SInt 24
x0 = 24
-- Can we infer the Nat index from the literal?
x1 :: String
x1 = showTyped (theSInt 24)
-- Can we use it to pin down the Nat index of a Fin?
x2 :: String
x2 = showTyped (10 `asFin` 24)
| google/hs-dependent-literals | dependent-literals-plugin/tests/SIntLiterals.hs | apache-2.0 | 972 | 0 | 7 | 180 | 98 | 64 | 34 | -1 | -1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.