_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
f3bc0d657eff51d04b275803f5d323b29dbddbc9cd11969ddd16190c028463c1 | e-wrks/edh | Vector.hs | module Language.Edh.Batteries.Vector where
-- import Debug.Trace
import Control.Applicative
import Control.Concurrent.STM
import Data.Hashable
import qualified Data.Lossless.Decimal as D
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Vector as V
import Data.Vector.Mutable (IOVector)
import qualified Data.Vector.Mutable as MV
import GHC.Conc (unsafeIOToSTM)
import Language.Edh.Batteries.InterOp (wrapHostProc)
import Language.Edh.Control (EdhErrorTag (UsageError), OpSymbol)
import Language.Edh.Evaluate
import Language.Edh.IOPD (iopdUpdate, odLookup)
import Language.Edh.RUID
import Language.Edh.RtTypes
import Prelude
Boxed Vector for Edh values , non - transactional , mutable anytime
data EdhVector = EdhVector !(TVar (IOVector EdhValue)) !RUID
instance Eq EdhVector where
EdhVector _ x'u == EdhVector _ y'u = x'u == y'u
instance Hashable EdhVector where
hashWithSalt s (EdhVector _ s'u) = hashWithSalt s s'u
instance Show EdhVector where
show _ = "<vector>"
createVectorClass :: Scope -> STM Object
createVectorClass !clsOuterScope =
mkHostClass clsOuterScope "Vector" vecAllocator [] $ \ !clsScope -> do
!mths <-
sequence
[ (AttrByName nm,) <$> mkHostProc clsScope vc nm hp
| (nm, vc, hp) <-
[ ("append", EdhMethod, wrapHostProc vecAppendProc),
("__eq__", EdhMethod, wrapHostProc vecEqProc),
-- vectorized (non)equality tests and ordering comparisons
("(>)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False ">"),
("(>.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True ">"),
("(>=)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False ">="),
("(>=.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True ">="),
("(<)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "<"),
("(<.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "<"),
("(<=)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "<="),
("(<=.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "<="),
("(==)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "=="),
("(==.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "=="),
("(!=)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "!="),
("(!=.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "!="),
-- indexing
("([])", EdhMethod, wrapHostProc vecIdxReadProc),
-- indexed assignment
("([=])", EdhMethod, wrapHostProc vecIdxWriteProc),
-- indexed update
("([++=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "++"),
("([+=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "+"),
("([-=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "-"),
("([*=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "*"),
("([/=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "/"),
("([//=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "//"),
("([**=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "**"),
("([&&=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "&&"),
("([||=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "||"),
-- vectorized ops
("(++)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "++"),
("(++.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "++"),
("(+)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "+"),
("(+.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "+"),
("(-)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "-"),
("(-.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "-"),
("(*)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "*"),
("(*.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "*"),
("(/)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "/"),
("(/.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "/"),
("(//)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "//"),
("(//.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "//"),
("(mod)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "mod"),
("(mod.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "mod"),
("(rem)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "rem"),
("(rem.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "rem"),
("(**)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "**"),
("(**.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "**"),
("(&&)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "&&"),
("(&&.", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "&&"),
("(||)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "||"),
("(||.", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "||"),
-- inplace update
("(++=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "++"),
("(+=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "+"),
("(-=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "-"),
("(*=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "*"),
("(/=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "/"),
("(//=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "//"),
("(**=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "**"),
("(&&=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "&&"),
("(||=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "||"),
-- misc
("__null__", EdhMethod, wrapHostProc vecNullProc),
("__len__", EdhMethod, wrapHostProc vecLenProc),
("__repr__", EdhMethod, wrapHostProc vecReprProc)
]
]
iopdUpdate mths $ edh'scope'entity clsScope
where
vecAllocator :: ArgsPack -> EdhObjectAllocator
vecAllocator (ArgsPack !ctorArgs !ctorKwargs) !ctorExit !etsCtor = do
let doIt :: Int -> [EdhValue] -> STM ()
note @vs@ got to be lazy
doIt !len vs = do
let !vec = case len of
_ | len < 0 -> V.fromList vs
_ -> V.fromListN len vs
!mvec <- unsafeIOToSTM $ V.thaw vec
!mvv <- newTVar mvec
!vid <- newRUID'STM
ctorExit $ HostStore $ wrapHostValue $ EdhVector mvv vid
case odLookup (AttrByName "length") ctorKwargs of
Nothing -> doIt (-1) ctorArgs
Just (EdhDecimal !d) -> case D.decimalToInteger d of
Just !len
| len >= 0 ->
doIt (fromInteger len) $ ctorArgs ++ repeat edhNA
_ ->
throwEdh etsCtor UsageError $
"length not an positive integer: "
<> T.pack (show d)
Just !badLenVal ->
throwEdh etsCtor UsageError $
"invalid length: "
<> T.pack
(show badLenVal)
vecAppendProc :: [EdhValue] -> EdhHostProc
vecAppendProc !args !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
!mvec' <-
unsafeIOToSTM $ V.thaw . (V.++ V.fromList args) =<< V.freeze mvec
writeTVar mvv mvec'
exitEdh ets exit $
EdhObject $
edh'scope'this $
contextScope $
edh'context ets
vecEqProc :: EdhValue -> EdhHostProc
vecEqProc !other !exit !ets =
castObjectStore' other >>= \case
Nothing -> exitEdh ets exit $ EdhBool False
Just (_, EdhVector !vvOther _) ->
readTVar vvOther >>= \mvecOther ->
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
!conclusion <- unsafeIOToSTM $ do
TODO we 're sacrificing thread safety for zero - copy
-- performance here, justify this decision
!vec <- V.unsafeFreeze mvec
!vecOther <- V.unsafeFreeze mvecOther
return $ vec == vecOther
exitEdh ets exit $ EdhBool conclusion
vecIdxReadProc :: EdhValue -> EdhHostProc
vecIdxReadProc !idxVal !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
let exitWith :: IOVector EdhValue -> STM ()
exitWith !newVec = do
!newVV <- newTVar newVec
!vid <- newRUID'STM
let !newStore = HostStore $ wrapHostValue (EdhVector newVV vid)
edhMutCloneObj ets (edh'scope'that scope) thisVecObj newStore $
\ !thatObjClone -> exitEdh ets exit $ EdhObject thatObjClone
exitWithRange :: Int -> Int -> Int -> STM ()
exitWithRange !start !stop !step = do
!newVec <- unsafeIOToSTM $ do
let (q, r) = quotRem (stop - start) step
!len = if r == 0 then abs q else 1 + abs q
!newVec <- MV.new len
let cpElems :: Int -> Int -> IO ()
cpElems !n !i =
if i >= len
then return ()
else do
MV.unsafeRead mvec n >>= MV.unsafeWrite newVec i
cpElems (n + step) (i + 1)
cpElems start 0
return newVec
exitWith newVec
tryScalarIdx = parseEdhIndex ets idxVal $ \case
Left !err -> throwEdh ets UsageError err
Right (EdhIndex !i) ->
unsafeIOToSTM (MV.read mvec i) >>= exitEdh ets exit
Right EdhAny -> exitEdh ets exit $ EdhObject thisVecObj
Right EdhAll -> exitEdh ets exit $ EdhObject thisVecObj
Right (EdhSlice !start !stop !step) ->
regulateEdhSlice ets (MV.length mvec) (start, stop, step) $
\(!iStart, !iStop, !iStep) ->
if iStep == 1
then exitWith $ MV.unsafeSlice iStart (iStop - iStart) mvec
else exitWithRange iStart iStop iStep
case edhUltimate idxVal of
EdhObject !idxObj -> withHostInstance' idxObj tryScalarIdx $
\(EdhVector vvMask _) ->
readTVar vvMask >>= \ !vMask ->
if MV.length vMask /= MV.length mvec
then
throwEdh ets UsageError $
"index vector size mismatch: "
<> T.pack (show $ MV.length vMask)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
unsafeIOToSTM (MV.new $ MV.length mvec) >>= \ !newVec ->
let copyAt :: Int -> STM ()
copyAt !n | n < 0 = do
!newVV <- newTVar newVec
!vid <- newRUID'STM
let !newStore =
HostStore $
wrapHostValue (EdhVector newVV vid)
edhMutCloneObj
ets
(edh'scope'that scope)
thisVecObj
newStore
$ \ !thatObjClone ->
exitEdh ets exit $
EdhObject thatObjClone
copyAt !n =
unsafeIOToSTM
( liftA2 (,) (MV.read mvec n) (MV.read vMask n)
)
>>= \(!srcVal, !maskVal) ->
edhValueNull ets maskVal $ \case
False -> do
unsafeIOToSTM $ MV.unsafeWrite newVec n srcVal
copyAt (n - 1)
True -> do
unsafeIOToSTM $ MV.unsafeWrite newVec n edhNA
copyAt (n - 1)
in copyAt (MV.length mvec - 1)
_ -> tryScalarIdx
where
!scope = contextScope $ edh'context ets
!thisVecObj = edh'scope'this scope
vecIdxWriteProc :: EdhValue -> EdhValue -> EdhHostProc
vecIdxWriteProc !idxVal !other !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
let exitWithRangeAssign :: Int -> Int -> Int -> STM ()
exitWithRangeAssign !start !stop !step =
castObjectStore' (edhUltimate other) >>= \case
Nothing -> exitWithNonVecAssign
Just (_, EdhVector !vvOther _) ->
readTVar vvOther >>= \mvecOther -> do
unsafeIOToSTM $ assignWithVec start 0 mvecOther
exitEdh ets exit other
where
(q, r) = quotRem (stop - start) step
!len = if r == 0 then abs q else 1 + abs q
exitWithNonVecAssign = case edhUltimate other of
EdhArgsPack (ArgsPack !args' _) -> do
unsafeIOToSTM $ assignWithList start $ take len args'
exitEdh ets exit other
EdhList (List _ !lsv) -> do
!ls <- readTVar lsv
unsafeIOToSTM $ assignWithList start $ take len ls
exitEdh ets exit other
!badList ->
throwEdh ets UsageError $
"not assignable to indexed vector: "
<> edhTypeNameOf badList
assignWithList :: Int -> [EdhValue] -> IO ()
assignWithList _ [] = return ()
assignWithList !n (x : xs) = do
MV.unsafeWrite mvec n x
assignWithList (n + step) xs
assignWithVec :: Int -> Int -> IOVector EdhValue -> IO ()
assignWithVec !n !i !mvec' =
if i >= len || i >= MV.length mvec'
then return ()
else do
MV.unsafeRead mvec' i >>= MV.unsafeWrite mvec n
assignWithVec (n + step) (i + 1) mvec'
tryScalarIdx = parseEdhIndex ets idxVal $ \case
Left !err -> throwEdh ets UsageError err
Right (EdhIndex !i) -> do
unsafeIOToSTM $ MV.unsafeWrite mvec i other
exitEdh ets exit other
Right EdhAny -> do
unsafeIOToSTM $ MV.set mvec other
exitEdh ets exit other
Right EdhAll -> exitWithRangeAssign 0 (MV.length mvec) 1
Right (EdhSlice !start !stop !step) ->
regulateEdhSlice ets (MV.length mvec) (start, stop, step) $
\(!iStart, !iStop, !iStep) ->
exitWithRangeAssign iStart iStop iStep
case edhUltimate idxVal of
EdhObject !idxObj -> withHostInstance' idxObj tryScalarIdx $
\(EdhVector vvMask _) ->
readTVar vvMask >>= \ !vMask ->
if MV.length vMask /= MV.length mvec
then
throwEdh ets UsageError $
"index vector size mismatch: "
<> T.pack (show $ MV.length vMask)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
castObjectStore' (edhUltimate other) >>= \case
Nothing ->
let updAt :: Int -> STM ()
updAt !n | n < 0 = exitEdh ets exit other
updAt !n =
unsafeIOToSTM (MV.read vMask n)
>>= \ !maskVal ->
edhValueNull ets maskVal $ \case
False -> do
unsafeIOToSTM $
MV.unsafeWrite mvec n other
updAt (n - 1)
True -> updAt (n - 1)
in updAt (MV.length mvec - 1)
Just (_, EdhVector !vvOther _) ->
readTVar vvOther >>= \mvecOther ->
if MV.length mvecOther /= MV.length mvec
then
throwEdh ets UsageError $
"value vector size mismatch: "
<> T.pack (show $ MV.length mvecOther)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
let updAt :: Int -> STM ()
updAt !n | n < 0 = exitEdh ets exit other
updAt !n =
unsafeIOToSTM (MV.read vMask n)
>>= \ !maskVal ->
edhValueNull ets maskVal $ \case
False -> do
unsafeIOToSTM $
MV.unsafeRead mvecOther n
>>= MV.unsafeWrite mvec n
updAt (n - 1)
True -> updAt (n - 1)
in updAt (MV.length mvec - 1)
_ -> tryScalarIdx
vecCopyWithOpProc :: Bool -> OpSymbol -> EdhValue -> EdhHostProc
TODO go element - wise op when other is a Vector of same length
vecCopyWithOpProc !flipOperands !opSym !other !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec ->
unsafeIOToSTM (MV.new $ MV.length mvec) >>= \ !newVec ->
let copyAt :: Int -> STM ()
copyAt !n | n < 0 = do
!newVV <- newTVar newVec
!vid <- newRUID'STM
let !newStore =
HostStore $ wrapHostValue (EdhVector newVV vid)
edhMutCloneObj
ets
(edh'scope'that scope)
thisVecObj
newStore
$ \ !thatObjClone ->
exitEdh ets exit $
EdhObject thatObjClone
copyAt !n =
unsafeIOToSTM (MV.read mvec n) >>= \ !srcVal -> do
let writeNA _lhv _rhv _ets = do
unsafeIOToSTM $ MV.unsafeWrite newVec n edhNA
copyAt (n - 1)
runEdhTx ets $
( if flipOperands
then
evalInfix'
opSym
writeNA
(LitExpr $ ValueLiteral other)
(LitExpr $ ValueLiteral srcVal)
else
evalInfix'
opSym
writeNA
(LitExpr $ ValueLiteral srcVal)
(LitExpr $ ValueLiteral other)
)
$ \ !rv _ets -> do
unsafeIOToSTM $ MV.unsafeWrite newVec n rv
copyAt (n - 1)
in copyAt (MV.length mvec - 1)
where
!scope = contextScope $ edh'context ets
!thisVecObj = edh'scope'this scope
opAssignElems ::
EdhThreadState ->
OpSymbol ->
EdhValue ->
IOVector EdhValue ->
Int ->
Int ->
Int ->
STM () ->
STM ()
opAssignElems !ets !opSym !rhVal !mvec !start !stop !step !exit =
assignAt
start
where
assignAt :: Int -> STM ()
assignAt !n =
if n >= stop
then exit
else
unsafeIOToSTM (MV.read mvec n) >>= \ !oldVal ->
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
assignAt (n + step)
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral rhVal)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
assignAt (n + step)
vecAssignWithOpProc :: OpSymbol -> EdhValue -> EdhHostProc
TODO go element - wise op when other is a Vector of same length
vecAssignWithOpProc !opSym !other !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec ->
opAssignElems ets opSym other mvec 0 (MV.length mvec) 1 $
exitEdh ets exit $
EdhObject thisVecObj
where
!scope = contextScope $ edh'context ets
!thisVecObj = edh'scope'this scope
opAssignRange ::
EdhThreadState ->
OpSymbol ->
EdhValue ->
IOVector EdhValue ->
Int ->
Int ->
Int ->
EdhTxExit EdhValue ->
STM ()
opAssignRange !ets !opSym !rhVal !mvec !start !stop !step !exit =
castObjectStore' (edhUltimate rhVal) >>= \case
Nothing -> exitWithNonVecAssign
Just (_, EdhVector !vvOther _) ->
readTVar vvOther >>= \mvecOther ->
assignWithVec start 0 mvecOther
where
(q, r) = quotRem (stop - start) step
!len = if r == 0 then abs q else 1 + abs q
exitWithNonVecAssign = case edhUltimate rhVal of
EdhArgsPack (ArgsPack !args' _) ->
assignWithList start $
take len args'
EdhList (List _ !lsv) -> do
!ls <- readTVar lsv
assignWithList start $ take len ls
_ ->
opAssignElems ets opSym rhVal mvec start stop step $
exitEdh ets exit nil
assignWithList :: Int -> [EdhValue] -> STM ()
assignWithList _ [] = exitEdh ets exit nil
assignWithList !n (x : xs) =
unsafeIOToSTM (MV.read mvec n) >>= \ !oldVal ->
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
assignWithList (n + step) xs
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral x)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
assignWithList (n + step) xs
assignWithVec :: Int -> Int -> IOVector EdhValue -> STM ()
assignWithVec !n !i !mvec' =
if i >= len || i >= MV.length mvec'
then exitEdh ets exit nil
else do
!oldVal <- unsafeIOToSTM $ MV.unsafeRead mvec n
!otherVal <- unsafeIOToSTM $ MV.unsafeRead mvec' i
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
assignWithVec (n + step) (i + 1) mvec'
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral otherVal)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
assignWithVec (n + step) (i + 1) mvec'
vecIdxAssignWithOpProc :: OpSymbol -> EdhValue -> EdhValue -> EdhHostProc
vecIdxAssignWithOpProc !opSym !idxVal !other !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
let tryScalarIdx = parseEdhIndex ets idxVal $ \case
Left !err -> throwEdh ets UsageError err
Right (EdhIndex !i) ->
unsafeIOToSTM (MV.read mvec i) >>= \ !oldVal ->
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec i edhNA
exitEdh ets exit edhNA
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral other)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec i opRtnV
exitEdh ets exit opRtnV
Right EdhAny ->
opAssignElems ets opSym other mvec 0 (MV.length mvec) 1 $
exitEdh ets exit nil
Right EdhAll ->
opAssignRange ets opSym other mvec 0 (MV.length mvec) 1 exit
Right (EdhSlice !start !stop !step) ->
regulateEdhSlice ets (MV.length mvec) (start, stop, step) $
\(!iStart, !iStop, !iStep) ->
opAssignRange ets opSym other mvec iStart iStop iStep exit
case edhUltimate idxVal of
EdhObject !idxObj -> withHostInstance' idxObj tryScalarIdx $
\(EdhVector vvMask _) ->
readTVar vvMask >>= \ !vMask ->
if MV.length vMask /= MV.length mvec
then
throwEdh ets UsageError $
"index vector size mismatch: "
<> T.pack (show $ MV.length vMask)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
castObjectStore' (edhUltimate other) >>= \case
Nothing ->
let updAt :: Int -> STM ()
updAt !n | n < 0 = exitEdh ets exit other
updAt !n = do
!maskVal <- unsafeIOToSTM (MV.read vMask n)
edhValueNull ets maskVal $ \case
False -> do
!oldVal <- unsafeIOToSTM (MV.unsafeRead mvec n)
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
updAt (n - 1)
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral other)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
updAt (n - 1)
True -> updAt (n - 1)
in updAt (MV.length mvec - 1)
Just (_, EdhVector !vvOther _) ->
readTVar vvOther
>>= \mvecOther ->
if MV.length mvecOther /= MV.length mvec
then
throwEdh ets UsageError $
"value vector size mismatch: "
<> T.pack (show $ MV.length mvecOther)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
let updAt :: Int -> STM ()
updAt !n | n < 0 = exitEdh ets exit other
updAt !n = do
!maskVal <- unsafeIOToSTM (MV.read vMask n)
edhValueNull ets maskVal $ \case
False -> do
(!oldVal, !otherVal) <-
unsafeIOToSTM $
liftA2
(,)
(MV.unsafeRead mvec n)
(MV.unsafeRead mvecOther n)
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
updAt (n - 1)
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral otherVal)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
updAt (n - 1)
True -> updAt (n - 1)
in updAt (MV.length mvec - 1)
_ -> tryScalarIdx
vecNullProc :: EdhHostProc
vecNullProc !exit !ets = withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec ->
exitEdh ets exit $
EdhBool $
MV.length mvec <= 0
vecLenProc :: EdhHostProc
vecLenProc !exit !ets = withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec ->
exitEdh ets exit $ EdhDecimal $ fromIntegral $ MV.length mvec
vecReprProc :: EdhHostProc
vecReprProc !exit !ets = withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
let go :: [EdhValue] -> [Text] -> STM ()
go [] !rs =
exitEdh ets exit $
EdhString $
"Vector( "
<> T.concat (reverse $ (<> ", ") <$> rs)
<> ")"
go (v : rest) rs = edhValueRepr ets v $ \ !r -> go rest (r : rs)
!vec <- unsafeIOToSTM $ V.freeze mvec
go (V.toList vec) []
| null | https://raw.githubusercontent.com/e-wrks/edh/9cc52fef71c16cce1d6fdb27f10fb4b60d597de7/host.hs/src/Language/Edh/Batteries/Vector.hs | haskell | import Debug.Trace
vectorized (non)equality tests and ordering comparisons
indexing
indexed assignment
indexed update
vectorized ops
inplace update
misc
performance here, justify this decision | module Language.Edh.Batteries.Vector where
import Control.Applicative
import Control.Concurrent.STM
import Data.Hashable
import qualified Data.Lossless.Decimal as D
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Vector as V
import Data.Vector.Mutable (IOVector)
import qualified Data.Vector.Mutable as MV
import GHC.Conc (unsafeIOToSTM)
import Language.Edh.Batteries.InterOp (wrapHostProc)
import Language.Edh.Control (EdhErrorTag (UsageError), OpSymbol)
import Language.Edh.Evaluate
import Language.Edh.IOPD (iopdUpdate, odLookup)
import Language.Edh.RUID
import Language.Edh.RtTypes
import Prelude
Boxed Vector for Edh values , non - transactional , mutable anytime
data EdhVector = EdhVector !(TVar (IOVector EdhValue)) !RUID
instance Eq EdhVector where
EdhVector _ x'u == EdhVector _ y'u = x'u == y'u
instance Hashable EdhVector where
hashWithSalt s (EdhVector _ s'u) = hashWithSalt s s'u
instance Show EdhVector where
show _ = "<vector>"
createVectorClass :: Scope -> STM Object
createVectorClass !clsOuterScope =
mkHostClass clsOuterScope "Vector" vecAllocator [] $ \ !clsScope -> do
!mths <-
sequence
[ (AttrByName nm,) <$> mkHostProc clsScope vc nm hp
| (nm, vc, hp) <-
[ ("append", EdhMethod, wrapHostProc vecAppendProc),
("__eq__", EdhMethod, wrapHostProc vecEqProc),
("(>)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False ">"),
("(>.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True ">"),
("(>=)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False ">="),
("(>=.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True ">="),
("(<)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "<"),
("(<.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "<"),
("(<=)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "<="),
("(<=.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "<="),
("(==)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "=="),
("(==.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "=="),
("(!=)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "!="),
("(!=.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "!="),
("([])", EdhMethod, wrapHostProc vecIdxReadProc),
("([=])", EdhMethod, wrapHostProc vecIdxWriteProc),
("([++=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "++"),
("([+=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "+"),
("([-=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "-"),
("([*=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "*"),
("([/=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "/"),
("([//=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "//"),
("([**=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "**"),
("([&&=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "&&"),
("([||=])", EdhMethod, wrapHostProc $ vecIdxAssignWithOpProc "||"),
("(++)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "++"),
("(++.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "++"),
("(+)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "+"),
("(+.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "+"),
("(-)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "-"),
("(-.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "-"),
("(*)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "*"),
("(*.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "*"),
("(/)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "/"),
("(/.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "/"),
("(//)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "//"),
("(//.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "//"),
("(mod)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "mod"),
("(mod.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "mod"),
("(rem)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "rem"),
("(rem.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "rem"),
("(**)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "**"),
("(**.)", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "**"),
("(&&)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "&&"),
("(&&.", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "&&"),
("(||)", EdhMethod, wrapHostProc $ vecCopyWithOpProc False "||"),
("(||.", EdhMethod, wrapHostProc $ vecCopyWithOpProc True "||"),
("(++=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "++"),
("(+=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "+"),
("(-=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "-"),
("(*=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "*"),
("(/=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "/"),
("(//=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "//"),
("(**=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "**"),
("(&&=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "&&"),
("(||=)", EdhMethod, wrapHostProc $ vecAssignWithOpProc "||"),
("__null__", EdhMethod, wrapHostProc vecNullProc),
("__len__", EdhMethod, wrapHostProc vecLenProc),
("__repr__", EdhMethod, wrapHostProc vecReprProc)
]
]
iopdUpdate mths $ edh'scope'entity clsScope
where
vecAllocator :: ArgsPack -> EdhObjectAllocator
vecAllocator (ArgsPack !ctorArgs !ctorKwargs) !ctorExit !etsCtor = do
let doIt :: Int -> [EdhValue] -> STM ()
note @vs@ got to be lazy
doIt !len vs = do
let !vec = case len of
_ | len < 0 -> V.fromList vs
_ -> V.fromListN len vs
!mvec <- unsafeIOToSTM $ V.thaw vec
!mvv <- newTVar mvec
!vid <- newRUID'STM
ctorExit $ HostStore $ wrapHostValue $ EdhVector mvv vid
case odLookup (AttrByName "length") ctorKwargs of
Nothing -> doIt (-1) ctorArgs
Just (EdhDecimal !d) -> case D.decimalToInteger d of
Just !len
| len >= 0 ->
doIt (fromInteger len) $ ctorArgs ++ repeat edhNA
_ ->
throwEdh etsCtor UsageError $
"length not an positive integer: "
<> T.pack (show d)
Just !badLenVal ->
throwEdh etsCtor UsageError $
"invalid length: "
<> T.pack
(show badLenVal)
vecAppendProc :: [EdhValue] -> EdhHostProc
vecAppendProc !args !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
!mvec' <-
unsafeIOToSTM $ V.thaw . (V.++ V.fromList args) =<< V.freeze mvec
writeTVar mvv mvec'
exitEdh ets exit $
EdhObject $
edh'scope'this $
contextScope $
edh'context ets
vecEqProc :: EdhValue -> EdhHostProc
vecEqProc !other !exit !ets =
castObjectStore' other >>= \case
Nothing -> exitEdh ets exit $ EdhBool False
Just (_, EdhVector !vvOther _) ->
readTVar vvOther >>= \mvecOther ->
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
!conclusion <- unsafeIOToSTM $ do
TODO we 're sacrificing thread safety for zero - copy
!vec <- V.unsafeFreeze mvec
!vecOther <- V.unsafeFreeze mvecOther
return $ vec == vecOther
exitEdh ets exit $ EdhBool conclusion
vecIdxReadProc :: EdhValue -> EdhHostProc
vecIdxReadProc !idxVal !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
let exitWith :: IOVector EdhValue -> STM ()
exitWith !newVec = do
!newVV <- newTVar newVec
!vid <- newRUID'STM
let !newStore = HostStore $ wrapHostValue (EdhVector newVV vid)
edhMutCloneObj ets (edh'scope'that scope) thisVecObj newStore $
\ !thatObjClone -> exitEdh ets exit $ EdhObject thatObjClone
exitWithRange :: Int -> Int -> Int -> STM ()
exitWithRange !start !stop !step = do
!newVec <- unsafeIOToSTM $ do
let (q, r) = quotRem (stop - start) step
!len = if r == 0 then abs q else 1 + abs q
!newVec <- MV.new len
let cpElems :: Int -> Int -> IO ()
cpElems !n !i =
if i >= len
then return ()
else do
MV.unsafeRead mvec n >>= MV.unsafeWrite newVec i
cpElems (n + step) (i + 1)
cpElems start 0
return newVec
exitWith newVec
tryScalarIdx = parseEdhIndex ets idxVal $ \case
Left !err -> throwEdh ets UsageError err
Right (EdhIndex !i) ->
unsafeIOToSTM (MV.read mvec i) >>= exitEdh ets exit
Right EdhAny -> exitEdh ets exit $ EdhObject thisVecObj
Right EdhAll -> exitEdh ets exit $ EdhObject thisVecObj
Right (EdhSlice !start !stop !step) ->
regulateEdhSlice ets (MV.length mvec) (start, stop, step) $
\(!iStart, !iStop, !iStep) ->
if iStep == 1
then exitWith $ MV.unsafeSlice iStart (iStop - iStart) mvec
else exitWithRange iStart iStop iStep
case edhUltimate idxVal of
EdhObject !idxObj -> withHostInstance' idxObj tryScalarIdx $
\(EdhVector vvMask _) ->
readTVar vvMask >>= \ !vMask ->
if MV.length vMask /= MV.length mvec
then
throwEdh ets UsageError $
"index vector size mismatch: "
<> T.pack (show $ MV.length vMask)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
unsafeIOToSTM (MV.new $ MV.length mvec) >>= \ !newVec ->
let copyAt :: Int -> STM ()
copyAt !n | n < 0 = do
!newVV <- newTVar newVec
!vid <- newRUID'STM
let !newStore =
HostStore $
wrapHostValue (EdhVector newVV vid)
edhMutCloneObj
ets
(edh'scope'that scope)
thisVecObj
newStore
$ \ !thatObjClone ->
exitEdh ets exit $
EdhObject thatObjClone
copyAt !n =
unsafeIOToSTM
( liftA2 (,) (MV.read mvec n) (MV.read vMask n)
)
>>= \(!srcVal, !maskVal) ->
edhValueNull ets maskVal $ \case
False -> do
unsafeIOToSTM $ MV.unsafeWrite newVec n srcVal
copyAt (n - 1)
True -> do
unsafeIOToSTM $ MV.unsafeWrite newVec n edhNA
copyAt (n - 1)
in copyAt (MV.length mvec - 1)
_ -> tryScalarIdx
where
!scope = contextScope $ edh'context ets
!thisVecObj = edh'scope'this scope
vecIdxWriteProc :: EdhValue -> EdhValue -> EdhHostProc
vecIdxWriteProc !idxVal !other !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
let exitWithRangeAssign :: Int -> Int -> Int -> STM ()
exitWithRangeAssign !start !stop !step =
castObjectStore' (edhUltimate other) >>= \case
Nothing -> exitWithNonVecAssign
Just (_, EdhVector !vvOther _) ->
readTVar vvOther >>= \mvecOther -> do
unsafeIOToSTM $ assignWithVec start 0 mvecOther
exitEdh ets exit other
where
(q, r) = quotRem (stop - start) step
!len = if r == 0 then abs q else 1 + abs q
exitWithNonVecAssign = case edhUltimate other of
EdhArgsPack (ArgsPack !args' _) -> do
unsafeIOToSTM $ assignWithList start $ take len args'
exitEdh ets exit other
EdhList (List _ !lsv) -> do
!ls <- readTVar lsv
unsafeIOToSTM $ assignWithList start $ take len ls
exitEdh ets exit other
!badList ->
throwEdh ets UsageError $
"not assignable to indexed vector: "
<> edhTypeNameOf badList
assignWithList :: Int -> [EdhValue] -> IO ()
assignWithList _ [] = return ()
assignWithList !n (x : xs) = do
MV.unsafeWrite mvec n x
assignWithList (n + step) xs
assignWithVec :: Int -> Int -> IOVector EdhValue -> IO ()
assignWithVec !n !i !mvec' =
if i >= len || i >= MV.length mvec'
then return ()
else do
MV.unsafeRead mvec' i >>= MV.unsafeWrite mvec n
assignWithVec (n + step) (i + 1) mvec'
tryScalarIdx = parseEdhIndex ets idxVal $ \case
Left !err -> throwEdh ets UsageError err
Right (EdhIndex !i) -> do
unsafeIOToSTM $ MV.unsafeWrite mvec i other
exitEdh ets exit other
Right EdhAny -> do
unsafeIOToSTM $ MV.set mvec other
exitEdh ets exit other
Right EdhAll -> exitWithRangeAssign 0 (MV.length mvec) 1
Right (EdhSlice !start !stop !step) ->
regulateEdhSlice ets (MV.length mvec) (start, stop, step) $
\(!iStart, !iStop, !iStep) ->
exitWithRangeAssign iStart iStop iStep
case edhUltimate idxVal of
EdhObject !idxObj -> withHostInstance' idxObj tryScalarIdx $
\(EdhVector vvMask _) ->
readTVar vvMask >>= \ !vMask ->
if MV.length vMask /= MV.length mvec
then
throwEdh ets UsageError $
"index vector size mismatch: "
<> T.pack (show $ MV.length vMask)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
castObjectStore' (edhUltimate other) >>= \case
Nothing ->
let updAt :: Int -> STM ()
updAt !n | n < 0 = exitEdh ets exit other
updAt !n =
unsafeIOToSTM (MV.read vMask n)
>>= \ !maskVal ->
edhValueNull ets maskVal $ \case
False -> do
unsafeIOToSTM $
MV.unsafeWrite mvec n other
updAt (n - 1)
True -> updAt (n - 1)
in updAt (MV.length mvec - 1)
Just (_, EdhVector !vvOther _) ->
readTVar vvOther >>= \mvecOther ->
if MV.length mvecOther /= MV.length mvec
then
throwEdh ets UsageError $
"value vector size mismatch: "
<> T.pack (show $ MV.length mvecOther)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
let updAt :: Int -> STM ()
updAt !n | n < 0 = exitEdh ets exit other
updAt !n =
unsafeIOToSTM (MV.read vMask n)
>>= \ !maskVal ->
edhValueNull ets maskVal $ \case
False -> do
unsafeIOToSTM $
MV.unsafeRead mvecOther n
>>= MV.unsafeWrite mvec n
updAt (n - 1)
True -> updAt (n - 1)
in updAt (MV.length mvec - 1)
_ -> tryScalarIdx
vecCopyWithOpProc :: Bool -> OpSymbol -> EdhValue -> EdhHostProc
TODO go element - wise op when other is a Vector of same length
vecCopyWithOpProc !flipOperands !opSym !other !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec ->
unsafeIOToSTM (MV.new $ MV.length mvec) >>= \ !newVec ->
let copyAt :: Int -> STM ()
copyAt !n | n < 0 = do
!newVV <- newTVar newVec
!vid <- newRUID'STM
let !newStore =
HostStore $ wrapHostValue (EdhVector newVV vid)
edhMutCloneObj
ets
(edh'scope'that scope)
thisVecObj
newStore
$ \ !thatObjClone ->
exitEdh ets exit $
EdhObject thatObjClone
copyAt !n =
unsafeIOToSTM (MV.read mvec n) >>= \ !srcVal -> do
let writeNA _lhv _rhv _ets = do
unsafeIOToSTM $ MV.unsafeWrite newVec n edhNA
copyAt (n - 1)
runEdhTx ets $
( if flipOperands
then
evalInfix'
opSym
writeNA
(LitExpr $ ValueLiteral other)
(LitExpr $ ValueLiteral srcVal)
else
evalInfix'
opSym
writeNA
(LitExpr $ ValueLiteral srcVal)
(LitExpr $ ValueLiteral other)
)
$ \ !rv _ets -> do
unsafeIOToSTM $ MV.unsafeWrite newVec n rv
copyAt (n - 1)
in copyAt (MV.length mvec - 1)
where
!scope = contextScope $ edh'context ets
!thisVecObj = edh'scope'this scope
opAssignElems ::
EdhThreadState ->
OpSymbol ->
EdhValue ->
IOVector EdhValue ->
Int ->
Int ->
Int ->
STM () ->
STM ()
opAssignElems !ets !opSym !rhVal !mvec !start !stop !step !exit =
assignAt
start
where
assignAt :: Int -> STM ()
assignAt !n =
if n >= stop
then exit
else
unsafeIOToSTM (MV.read mvec n) >>= \ !oldVal ->
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
assignAt (n + step)
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral rhVal)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
assignAt (n + step)
vecAssignWithOpProc :: OpSymbol -> EdhValue -> EdhHostProc
TODO go element - wise op when other is a Vector of same length
vecAssignWithOpProc !opSym !other !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec ->
opAssignElems ets opSym other mvec 0 (MV.length mvec) 1 $
exitEdh ets exit $
EdhObject thisVecObj
where
!scope = contextScope $ edh'context ets
!thisVecObj = edh'scope'this scope
opAssignRange ::
EdhThreadState ->
OpSymbol ->
EdhValue ->
IOVector EdhValue ->
Int ->
Int ->
Int ->
EdhTxExit EdhValue ->
STM ()
opAssignRange !ets !opSym !rhVal !mvec !start !stop !step !exit =
castObjectStore' (edhUltimate rhVal) >>= \case
Nothing -> exitWithNonVecAssign
Just (_, EdhVector !vvOther _) ->
readTVar vvOther >>= \mvecOther ->
assignWithVec start 0 mvecOther
where
(q, r) = quotRem (stop - start) step
!len = if r == 0 then abs q else 1 + abs q
exitWithNonVecAssign = case edhUltimate rhVal of
EdhArgsPack (ArgsPack !args' _) ->
assignWithList start $
take len args'
EdhList (List _ !lsv) -> do
!ls <- readTVar lsv
assignWithList start $ take len ls
_ ->
opAssignElems ets opSym rhVal mvec start stop step $
exitEdh ets exit nil
assignWithList :: Int -> [EdhValue] -> STM ()
assignWithList _ [] = exitEdh ets exit nil
assignWithList !n (x : xs) =
unsafeIOToSTM (MV.read mvec n) >>= \ !oldVal ->
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
assignWithList (n + step) xs
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral x)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
assignWithList (n + step) xs
assignWithVec :: Int -> Int -> IOVector EdhValue -> STM ()
assignWithVec !n !i !mvec' =
if i >= len || i >= MV.length mvec'
then exitEdh ets exit nil
else do
!oldVal <- unsafeIOToSTM $ MV.unsafeRead mvec n
!otherVal <- unsafeIOToSTM $ MV.unsafeRead mvec' i
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
assignWithVec (n + step) (i + 1) mvec'
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral otherVal)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
assignWithVec (n + step) (i + 1) mvec'
vecIdxAssignWithOpProc :: OpSymbol -> EdhValue -> EdhValue -> EdhHostProc
vecIdxAssignWithOpProc !opSym !idxVal !other !exit !ets =
withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
let tryScalarIdx = parseEdhIndex ets idxVal $ \case
Left !err -> throwEdh ets UsageError err
Right (EdhIndex !i) ->
unsafeIOToSTM (MV.read mvec i) >>= \ !oldVal ->
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec i edhNA
exitEdh ets exit edhNA
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral other)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec i opRtnV
exitEdh ets exit opRtnV
Right EdhAny ->
opAssignElems ets opSym other mvec 0 (MV.length mvec) 1 $
exitEdh ets exit nil
Right EdhAll ->
opAssignRange ets opSym other mvec 0 (MV.length mvec) 1 exit
Right (EdhSlice !start !stop !step) ->
regulateEdhSlice ets (MV.length mvec) (start, stop, step) $
\(!iStart, !iStop, !iStep) ->
opAssignRange ets opSym other mvec iStart iStop iStep exit
case edhUltimate idxVal of
EdhObject !idxObj -> withHostInstance' idxObj tryScalarIdx $
\(EdhVector vvMask _) ->
readTVar vvMask >>= \ !vMask ->
if MV.length vMask /= MV.length mvec
then
throwEdh ets UsageError $
"index vector size mismatch: "
<> T.pack (show $ MV.length vMask)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
castObjectStore' (edhUltimate other) >>= \case
Nothing ->
let updAt :: Int -> STM ()
updAt !n | n < 0 = exitEdh ets exit other
updAt !n = do
!maskVal <- unsafeIOToSTM (MV.read vMask n)
edhValueNull ets maskVal $ \case
False -> do
!oldVal <- unsafeIOToSTM (MV.unsafeRead mvec n)
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
updAt (n - 1)
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral other)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
updAt (n - 1)
True -> updAt (n - 1)
in updAt (MV.length mvec - 1)
Just (_, EdhVector !vvOther _) ->
readTVar vvOther
>>= \mvecOther ->
if MV.length mvecOther /= MV.length mvec
then
throwEdh ets UsageError $
"value vector size mismatch: "
<> T.pack (show $ MV.length mvecOther)
<> " vs "
<> T.pack (show $ MV.length mvec)
else
let updAt :: Int -> STM ()
updAt !n | n < 0 = exitEdh ets exit other
updAt !n = do
!maskVal <- unsafeIOToSTM (MV.read vMask n)
edhValueNull ets maskVal $ \case
False -> do
(!oldVal, !otherVal) <-
unsafeIOToSTM $
liftA2
(,)
(MV.unsafeRead mvec n)
(MV.unsafeRead mvecOther n)
runEdhTx ets $
evalInfix'
opSym
( \_ _ _ -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n edhNA
updAt (n - 1)
)
(LitExpr $ ValueLiteral oldVal)
(LitExpr $ ValueLiteral otherVal)
$ \ !opRtnV _ets -> do
unsafeIOToSTM $ MV.unsafeWrite mvec n opRtnV
updAt (n - 1)
True -> updAt (n - 1)
in updAt (MV.length mvec - 1)
_ -> tryScalarIdx
vecNullProc :: EdhHostProc
vecNullProc !exit !ets = withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec ->
exitEdh ets exit $
EdhBool $
MV.length mvec <= 0
vecLenProc :: EdhHostProc
vecLenProc !exit !ets = withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec ->
exitEdh ets exit $ EdhDecimal $ fromIntegral $ MV.length mvec
vecReprProc :: EdhHostProc
vecReprProc !exit !ets = withThisHostObj ets $ \(EdhVector !mvv _) ->
readTVar mvv >>= \ !mvec -> do
let go :: [EdhValue] -> [Text] -> STM ()
go [] !rs =
exitEdh ets exit $
EdhString $
"Vector( "
<> T.concat (reverse $ (<> ", ") <$> rs)
<> ")"
go (v : rest) rs = edhValueRepr ets v $ \ !r -> go rest (r : rs)
!vec <- unsafeIOToSTM $ V.freeze mvec
go (V.toList vec) []
|
ea58dc0c55f4e1724ca8bdc9c29bdbc349046e34d825dae712d7f0eca23a8fb2 | raspasov/neversleep | to_disk_mem.clj | (ns neversleep-db.to-disk-mem
(:require [taoensso.nippy :as nippy]
[neversleep-db.mysql-lib :as mysql-lib]
[neversleep-db.println-m :refer [println-m]]
[qbits.nippy-lz4 :refer [lz4-compressor lz4hc-compressor]]
[neversleep-db.util :as util]))
;save-node [uuid data]
;get-node [uuid]
;DB TABLE
;nodes
;=============
uuid | data
;
;roots
;id | uuid | timestamp | data
DATA JUGGLING
(defn serialize [data]
;(println-m "data before serialize: " data)
(let [frozen-data (nippy/freeze data
;{:compressor lz4hc-compressor}
{:skip-header? true})]
; (println-m "frozen data: " frozen-data)
frozen-data
)
)
(defn de-serialize [blob]
(if blob
(nippy/thaw blob
;{:compressor lz4-compressor}
{:skip-header? true}
)
nil))
(defn save-node
"Saves a node to durable storage"
[uuid data]
(mysql-lib/execute! ["INSERT INTO `nodes` (`uuid`, `data`) VALUES (?, ?);"
uuid (util/serialize data)]))
(defn get-node
"Gets a node from durable storage"
[uuid]
(-> (mysql-lib/query ["SELECT `data` FROM `nodes` WHERE `uuid` = ? LIMIT 1;" uuid])
(first)
(:data nil)
(util/de-serialize)))
(defn save-root
"Saves a root to durable storage"
[id data]
(mysql-lib/execute! ["INSERT INTO `roots` (`id`, `data`, `timestamp`) VALUES (?, ?, CURRENT_TIMESTAMP);"
id (util/serialize data)]))
(defn get-root [id]
(-> (mysql-lib/query ["SELECT * FROM `roots` WHERE `id` = ? ORDER BY `auto-inc` DESC;"
id])
(first)
(:data nil)
(util/de-serialize)
))
(defn get-root-as-of [id timestamp]
)
;DURABLE MAPS
;=========================
;in-memory storage
(def nodes-maps-table (atom {}))
(def roots-maps-table (atom {}))
(def allocate-id-counter (atom 0))
(defn allocate-id
"Allocates a row in the db for the incoming blob of nodes"
[]
(println-m "allocate-id MEMORY")
(swap! allocate-id-counter + 1))
(defn save-blob [blob id]
(println-m "saving to MySQL (mem) blob-id" id)
;(mysql-lib/execute! ["UPDATE `nodes-maps` SET `data` = ? WHERE `id` = ?;" blob id])
(swap! nodes-maps-table (fn [old-state] (assoc old-state id blob))))
(defn get-blob [id]
(get @nodes-maps-table id nil))
(defn save-root-maps
"Saves a root to durable storage"
[data id]
(swap! roots-maps-table (fn [old-state] (assoc old-state id (conj (get old-state id []) data)))))
(defn get-root-maps
"WARNING: experimental, do not use (non optimized query)"
[id]
(-> (get @roots-maps-table id nil)
(peek)))
(defn get-root-maps-as-of
"WARNING: experimental, do not use (non optimized query)"
[id db-id]
(-> (get @roots-maps-table id nil)
(nth (- db-id 1))))
(defn truncate-all-tables []
(reset! roots-maps-table {})
(reset! nodes-maps-table {})
(reset! allocate-id-counter 0)
true)
| null | https://raw.githubusercontent.com/raspasov/neversleep/7fd968f4ab20fa6ef71e1049e3eec289ea6691e4/src/neversleep_db/to_disk_mem.clj | clojure | save-node [uuid data]
get-node [uuid]
DB TABLE
nodes
=============
roots
id | uuid | timestamp | data
(println-m "data before serialize: " data)
{:compressor lz4hc-compressor}
(println-m "frozen data: " frozen-data)
{:compressor lz4-compressor}
DURABLE MAPS
=========================
in-memory storage
(mysql-lib/execute! ["UPDATE `nodes-maps` SET `data` = ? WHERE `id` = ?;" blob id]) | (ns neversleep-db.to-disk-mem
(:require [taoensso.nippy :as nippy]
[neversleep-db.mysql-lib :as mysql-lib]
[neversleep-db.println-m :refer [println-m]]
[qbits.nippy-lz4 :refer [lz4-compressor lz4hc-compressor]]
[neversleep-db.util :as util]))
uuid | data
DATA JUGGLING
(defn serialize [data]
(let [frozen-data (nippy/freeze data
{:skip-header? true})]
frozen-data
)
)
(defn de-serialize [blob]
(if blob
(nippy/thaw blob
{:skip-header? true}
)
nil))
(defn save-node
"Saves a node to durable storage"
[uuid data]
(mysql-lib/execute! ["INSERT INTO `nodes` (`uuid`, `data`) VALUES (?, ?);"
uuid (util/serialize data)]))
(defn get-node
"Gets a node from durable storage"
[uuid]
(-> (mysql-lib/query ["SELECT `data` FROM `nodes` WHERE `uuid` = ? LIMIT 1;" uuid])
(first)
(:data nil)
(util/de-serialize)))
(defn save-root
"Saves a root to durable storage"
[id data]
(mysql-lib/execute! ["INSERT INTO `roots` (`id`, `data`, `timestamp`) VALUES (?, ?, CURRENT_TIMESTAMP);"
id (util/serialize data)]))
(defn get-root [id]
(-> (mysql-lib/query ["SELECT * FROM `roots` WHERE `id` = ? ORDER BY `auto-inc` DESC;"
id])
(first)
(:data nil)
(util/de-serialize)
))
(defn get-root-as-of [id timestamp]
)
(def nodes-maps-table (atom {}))
(def roots-maps-table (atom {}))
(def allocate-id-counter (atom 0))
(defn allocate-id
"Allocates a row in the db for the incoming blob of nodes"
[]
(println-m "allocate-id MEMORY")
(swap! allocate-id-counter + 1))
(defn save-blob [blob id]
(println-m "saving to MySQL (mem) blob-id" id)
(swap! nodes-maps-table (fn [old-state] (assoc old-state id blob))))
(defn get-blob [id]
(get @nodes-maps-table id nil))
(defn save-root-maps
"Saves a root to durable storage"
[data id]
(swap! roots-maps-table (fn [old-state] (assoc old-state id (conj (get old-state id []) data)))))
(defn get-root-maps
"WARNING: experimental, do not use (non optimized query)"
[id]
(-> (get @roots-maps-table id nil)
(peek)))
(defn get-root-maps-as-of
"WARNING: experimental, do not use (non optimized query)"
[id db-id]
(-> (get @roots-maps-table id nil)
(nth (- db-id 1))))
(defn truncate-all-tables []
(reset! roots-maps-table {})
(reset! nodes-maps-table {})
(reset! allocate-id-counter 0)
true)
|
fc262eaaefc4fedb735f3faa52593e2ec36d2bc300f63b0be02e8a7029657f4c | vinted/kafka-elasticsearch-tool | properties.clj | (ns core.properties
(:require [clojure.string :as str])
(:import (java.util Properties)))
(defn ^Properties opts->properties [opts]
(reduce (fn [^Properties props [k v]]
(.put props ^String (name k) ^String v)
props)
(Properties.)
opts))
(defn opts-valid? [required-key opts]
(assert
(not (and (str/blank? (get opts required-key))
(str/blank? (get opts (keyword required-key)))))
(format "Required kafka param='%s' option is not provided."
required-key)))
| null | https://raw.githubusercontent.com/vinted/kafka-elasticsearch-tool/281de24d51cbd2a7ad78e3c1d13a4619b786210d/src/core/properties.clj | clojure | (ns core.properties
(:require [clojure.string :as str])
(:import (java.util Properties)))
(defn ^Properties opts->properties [opts]
(reduce (fn [^Properties props [k v]]
(.put props ^String (name k) ^String v)
props)
(Properties.)
opts))
(defn opts-valid? [required-key opts]
(assert
(not (and (str/blank? (get opts required-key))
(str/blank? (get opts (keyword required-key)))))
(format "Required kafka param='%s' option is not provided."
required-key)))
| |
22608e11e7ea0dd2742c239a9e1c23a55daaa8d0398e7c4577f15089c064983f | mattgreen/hython | Lexer.hs | # OPTIONS_GHC -fno - warn - unused - do - bind #
module Language.Python.Lexer
where
import Prelude hiding (exp, lex)
import Control.Monad
import Data.Complex
import Data.Char hiding (digitToInt)
import Data.List
import Data.Text (Text)
import qualified Data.Text as T
import Text.Parsec hiding (newline, tokens)
import Language.Python
data Token
= Newline
| Indent
| Dedent
| Identifier String
| Keyword String
| Operator String
| Delimiter String
| Literal Constant
| StringLiteral Text
deriving Show
type Tokens = [Token]
data LexerState = LexerState
{ lexerIndents :: [Int]
, lexerImplicitJoins :: Int
}
type Lexer a = Parsec Text LexerState a
lex :: Text -> Either ParseError Tokens
lex code = runParser program initialState "" newlineTerminatedCode
where
initialState = LexerState { lexerIndents = [0], lexerImplicitJoins = 0 }
newlineTerminatedCode = T.snoc code '\n'
program :: Lexer Tokens
program = do
tokens <- many (choice [try blankLine, logicalLine])
dedents <- remainingDedents
return $ concat tokens ++ dedents
where
remainingDedents = do
indents <- lexerIndents <$> getState
return $ replicate (length indents - 1) Dedent
blankLine :: Lexer Tokens
blankLine = do
skipMany (oneOf " \t\f")
optional comment
newline
return []
logicalLine :: Lexer Tokens
logicalLine = do
indent <- indentation
tokens <- manyTill1 lexemes (try newline)
return $ indent ++ concat tokens ++ [Newline]
where
lexemes = choice [ comment
, lexeme explicitLineJoin
, lexeme (try literal)
, lexeme (try keyword)
, lexeme identifier
, lexeme delimiterOrOperator
]
explicitLineJoin = do
char '\\'
newline
return []
lexeme p = do
x <- p
ws <- skippableWhitespace
skipMany (oneOf ws)
return x
skippableWhitespace = do
implicitJoin <- lexerImplicitJoins <$> getState
if implicitJoin > 0
then return " \t\f\r\n"
else return " \t\f"
comment :: Lexer Tokens
comment = do
char '#'
manyTill anyChar (lookAhead newline)
return []
indentation :: Lexer Tokens
indentation = do
indentChars <- many (oneOf " \t")
let level = calculateIndent indentChars
indents <- fmap lexerIndents getState
case indents of
(x:_) -> case compare level x of
EQ -> return []
GT -> do
modifyState $ \s -> s { lexerIndents = level:indents }
return [Indent]
LT -> do
(removed, remaining) <- dedent level indents
modifyState $ \s -> s { lexerIndents = remaining }
return $ replicate (length removed) Dedent
[] -> unexpected "indent stack should never be empty"
where
calculateIndent = foldl indentValue 0
indentValue :: Int -> Char -> Int
indentValue 0 '\t' = 8
indentValue acc '\t' = 8 * ceiling ((fromIntegral acc / 8) :: Double)
indentValue acc _ = acc + 1
dedent level indents = do
unless (level `elem` indents) $
unexpected "unindent does not match any outer indentation level"
return $ span (> level) indents
newline :: Lexer Tokens
newline = do
try (string "\r\n") <|> string "\r" <|> string "\n"
return [Newline]
delimiterOrOperator :: Lexer Tokens
delimiterOrOperator = choice [try longDelim, try longOp, shortOp, shortDelim]
where
longDelim = do
let longDelims = ["+=", "-=", "*=", "**=", "/=", "//=", "%=", "&=",
"|=", "^=", ">>=", "<<="]
d <- choice $ map (try . string ) longDelims
return [Delimiter d]
longOp = do
let longOps = ["**", "//", "<<", ">>", "<=", ">=", "==", "!="]
op <- choice $ map (try . string) longOps
return [Operator op]
shortOp = do
c <- oneOf "+-*/%&|^~<>"
return [Operator [c]]
shortDelim = do
c <- oneOf "()[]{},:.;@="
when (c `elem` "([{") $
modifyState $ \s -> s{ lexerImplicitJoins = lexerImplicitJoins s + 1 }
when (c `elem` ")]}") $
modifyState $ \s -> s{ lexerImplicitJoins = max 0 (lexerImplicitJoins s - 1) }
return [Delimiter [c]]
identifier :: Lexer Tokens
identifier = do
x <- letter <|> char '_'
xs <- many (alphaNum <|> char '_')
return [Identifier (x:xs)]
keyword :: Lexer Tokens
keyword = do
kw <- many1 (alphaNum <|> char '_')
unless (kw `elem` keywords)
parserZero
return [Keyword kw]
where
keywords = ["False", "None", "True", "and", "as", "assert", "break", "class",
"continue", "def", "del", "elif", "else", "except", "finally",
"for", "from", "global", "if", "import", "in", "is", "lambda",
"nonlocal", "not", "or", "pass", "raise", "return", "try",
"while", "with", "yield"]
literal :: Lexer Tokens
literal = choice [try imaginaryLiteral, try floatLiteral, integerLiteral, stringLiteral]
integerLiteral :: Lexer Tokens
integerLiteral = do
n <- try binInt <|> try octInt <|> try hexInt <|> zeroDecInt <|> decInt
return [Literal $ ConstantInt n]
where
binInt = do
char '0'
oneOf "bB"
digits <- many1 digit
return $ readBin digits
zeroDecInt = do
many1 (char '0')
return 0
decInt = do
first <- oneOf "123456789"
digits <- many digit
return $ read (first:digits)
octInt = do
char '0'
oneOf "oO"
digits <- many1 digit
return $ read ("0o" ++ digits)
hexInt = do
char '0'
oneOf "xX"
digits <- many1 hexDigit
return $ read ("0x" ++ digits)
floatLiteral :: Lexer Tokens
floatLiteral = do
s <- try exponentFloat <|> pointFloat
return [Literal $ ConstantFloat (read s)]
where
exponentFloat = do
leading <- try pointFloat <|> many1 digit
oneOf "eE"
sign <- option '+' (oneOf "+-")
exp <- many1 digit
return $ leading ++ "e" ++ [sign] ++ exp
pointFloat = pointFloatLeading <|> pointFloatNoLeading
pointFloatLeading = do
leading <- many1 digit
char '.'
trailing <- option "0" (many1 digit)
return $ leading ++ "." ++ trailing
pointFloatNoLeading = do
char '.'
trailing <- many1 digit
return $ "0." ++ trailing
imaginaryLiteral :: Lexer Tokens
imaginaryLiteral = do
s <- try floatLit <|> many1 digit
oneOf "jJ"
return [Literal (ConstantImag (0.0 :+ read s))]
where
floatLit = do
lit <- floatLiteral
case lit of
[Literal (ConstantFloat f)] -> return $ show f
t -> fail $ "unexpected token from floatLiteral" ++ show t
stringLiteral :: Lexer Tokens
stringLiteral = try tripleQuotedString <|> singleQuotedString
where
singleQuotedString = do
prefix <- option '_' stringPrefix
quote <- char '"' <|> char '\''
contents <- manyTill stringChar (try (char quote))
return [stringType prefix contents]
tripleQuotedString = do
prefix <- option '_' stringPrefix
quotes <- string "\"\"\"" <|> string "'''"
contents <- manyTill stringChar (try (string quotes))
return [stringType prefix contents]
stringPrefix = oneOf "bB"
stringType prefix s
| 'B' == toUpper prefix = Literal $ ConstantBytes s
| otherwise = StringLiteral $ T.pack s
stringChar = try escapedChar <|> anyChar
escapedChar = do
char '\\'
c <- anyChar
case lookup c escapeSequences of
Just r -> return r
Nothing -> parserZero
escapeSequences = [('"', '"'), ('\'', '\''), ('a', '\a'), ('b', '\b'),
('f', '\f'), ('n', '\n'), ('r', '\r'), ('t', '\t'),
('v', '\v')]
readBin :: Num b => String -> b
readBin = foldl' (\acc x -> acc * 2 + digitToInt x) 0
where
digitToInt x = if x == '0' then 0 else 1
manyTill1
:: Stream s m t =>
ParsecT s u m a -> ParsecT s u m end -> ParsecT s u m [a]
manyTill1 p end = do
x <- p
xs <- manyTill p end
return (x:xs)
| null | https://raw.githubusercontent.com/mattgreen/hython/fcbde98e9b5a033f0d4bea73ac9914bc5e12b746/src/Language/Python/Lexer.hs | haskell | # OPTIONS_GHC -fno - warn - unused - do - bind #
module Language.Python.Lexer
where
import Prelude hiding (exp, lex)
import Control.Monad
import Data.Complex
import Data.Char hiding (digitToInt)
import Data.List
import Data.Text (Text)
import qualified Data.Text as T
import Text.Parsec hiding (newline, tokens)
import Language.Python
data Token
= Newline
| Indent
| Dedent
| Identifier String
| Keyword String
| Operator String
| Delimiter String
| Literal Constant
| StringLiteral Text
deriving Show
type Tokens = [Token]
data LexerState = LexerState
{ lexerIndents :: [Int]
, lexerImplicitJoins :: Int
}
type Lexer a = Parsec Text LexerState a
lex :: Text -> Either ParseError Tokens
lex code = runParser program initialState "" newlineTerminatedCode
where
initialState = LexerState { lexerIndents = [0], lexerImplicitJoins = 0 }
newlineTerminatedCode = T.snoc code '\n'
program :: Lexer Tokens
program = do
tokens <- many (choice [try blankLine, logicalLine])
dedents <- remainingDedents
return $ concat tokens ++ dedents
where
remainingDedents = do
indents <- lexerIndents <$> getState
return $ replicate (length indents - 1) Dedent
blankLine :: Lexer Tokens
blankLine = do
skipMany (oneOf " \t\f")
optional comment
newline
return []
logicalLine :: Lexer Tokens
logicalLine = do
indent <- indentation
tokens <- manyTill1 lexemes (try newline)
return $ indent ++ concat tokens ++ [Newline]
where
lexemes = choice [ comment
, lexeme explicitLineJoin
, lexeme (try literal)
, lexeme (try keyword)
, lexeme identifier
, lexeme delimiterOrOperator
]
explicitLineJoin = do
char '\\'
newline
return []
lexeme p = do
x <- p
ws <- skippableWhitespace
skipMany (oneOf ws)
return x
skippableWhitespace = do
implicitJoin <- lexerImplicitJoins <$> getState
if implicitJoin > 0
then return " \t\f\r\n"
else return " \t\f"
comment :: Lexer Tokens
comment = do
char '#'
manyTill anyChar (lookAhead newline)
return []
indentation :: Lexer Tokens
indentation = do
indentChars <- many (oneOf " \t")
let level = calculateIndent indentChars
indents <- fmap lexerIndents getState
case indents of
(x:_) -> case compare level x of
EQ -> return []
GT -> do
modifyState $ \s -> s { lexerIndents = level:indents }
return [Indent]
LT -> do
(removed, remaining) <- dedent level indents
modifyState $ \s -> s { lexerIndents = remaining }
return $ replicate (length removed) Dedent
[] -> unexpected "indent stack should never be empty"
where
calculateIndent = foldl indentValue 0
indentValue :: Int -> Char -> Int
indentValue 0 '\t' = 8
indentValue acc '\t' = 8 * ceiling ((fromIntegral acc / 8) :: Double)
indentValue acc _ = acc + 1
dedent level indents = do
unless (level `elem` indents) $
unexpected "unindent does not match any outer indentation level"
return $ span (> level) indents
newline :: Lexer Tokens
newline = do
try (string "\r\n") <|> string "\r" <|> string "\n"
return [Newline]
delimiterOrOperator :: Lexer Tokens
delimiterOrOperator = choice [try longDelim, try longOp, shortOp, shortDelim]
where
longDelim = do
let longDelims = ["+=", "-=", "*=", "**=", "/=", "//=", "%=", "&=",
"|=", "^=", ">>=", "<<="]
d <- choice $ map (try . string ) longDelims
return [Delimiter d]
longOp = do
let longOps = ["**", "//", "<<", ">>", "<=", ">=", "==", "!="]
op <- choice $ map (try . string) longOps
return [Operator op]
shortOp = do
c <- oneOf "+-*/%&|^~<>"
return [Operator [c]]
shortDelim = do
c <- oneOf "()[]{},:.;@="
when (c `elem` "([{") $
modifyState $ \s -> s{ lexerImplicitJoins = lexerImplicitJoins s + 1 }
when (c `elem` ")]}") $
modifyState $ \s -> s{ lexerImplicitJoins = max 0 (lexerImplicitJoins s - 1) }
return [Delimiter [c]]
identifier :: Lexer Tokens
identifier = do
x <- letter <|> char '_'
xs <- many (alphaNum <|> char '_')
return [Identifier (x:xs)]
keyword :: Lexer Tokens
keyword = do
kw <- many1 (alphaNum <|> char '_')
unless (kw `elem` keywords)
parserZero
return [Keyword kw]
where
keywords = ["False", "None", "True", "and", "as", "assert", "break", "class",
"continue", "def", "del", "elif", "else", "except", "finally",
"for", "from", "global", "if", "import", "in", "is", "lambda",
"nonlocal", "not", "or", "pass", "raise", "return", "try",
"while", "with", "yield"]
literal :: Lexer Tokens
literal = choice [try imaginaryLiteral, try floatLiteral, integerLiteral, stringLiteral]
integerLiteral :: Lexer Tokens
integerLiteral = do
n <- try binInt <|> try octInt <|> try hexInt <|> zeroDecInt <|> decInt
return [Literal $ ConstantInt n]
where
binInt = do
char '0'
oneOf "bB"
digits <- many1 digit
return $ readBin digits
zeroDecInt = do
many1 (char '0')
return 0
decInt = do
first <- oneOf "123456789"
digits <- many digit
return $ read (first:digits)
octInt = do
char '0'
oneOf "oO"
digits <- many1 digit
return $ read ("0o" ++ digits)
hexInt = do
char '0'
oneOf "xX"
digits <- many1 hexDigit
return $ read ("0x" ++ digits)
floatLiteral :: Lexer Tokens
floatLiteral = do
s <- try exponentFloat <|> pointFloat
return [Literal $ ConstantFloat (read s)]
where
exponentFloat = do
leading <- try pointFloat <|> many1 digit
oneOf "eE"
sign <- option '+' (oneOf "+-")
exp <- many1 digit
return $ leading ++ "e" ++ [sign] ++ exp
pointFloat = pointFloatLeading <|> pointFloatNoLeading
pointFloatLeading = do
leading <- many1 digit
char '.'
trailing <- option "0" (many1 digit)
return $ leading ++ "." ++ trailing
pointFloatNoLeading = do
char '.'
trailing <- many1 digit
return $ "0." ++ trailing
imaginaryLiteral :: Lexer Tokens
imaginaryLiteral = do
s <- try floatLit <|> many1 digit
oneOf "jJ"
return [Literal (ConstantImag (0.0 :+ read s))]
where
floatLit = do
lit <- floatLiteral
case lit of
[Literal (ConstantFloat f)] -> return $ show f
t -> fail $ "unexpected token from floatLiteral" ++ show t
stringLiteral :: Lexer Tokens
stringLiteral = try tripleQuotedString <|> singleQuotedString
where
singleQuotedString = do
prefix <- option '_' stringPrefix
quote <- char '"' <|> char '\''
contents <- manyTill stringChar (try (char quote))
return [stringType prefix contents]
tripleQuotedString = do
prefix <- option '_' stringPrefix
quotes <- string "\"\"\"" <|> string "'''"
contents <- manyTill stringChar (try (string quotes))
return [stringType prefix contents]
stringPrefix = oneOf "bB"
stringType prefix s
| 'B' == toUpper prefix = Literal $ ConstantBytes s
| otherwise = StringLiteral $ T.pack s
stringChar = try escapedChar <|> anyChar
escapedChar = do
char '\\'
c <- anyChar
case lookup c escapeSequences of
Just r -> return r
Nothing -> parserZero
escapeSequences = [('"', '"'), ('\'', '\''), ('a', '\a'), ('b', '\b'),
('f', '\f'), ('n', '\n'), ('r', '\r'), ('t', '\t'),
('v', '\v')]
readBin :: Num b => String -> b
readBin = foldl' (\acc x -> acc * 2 + digitToInt x) 0
where
digitToInt x = if x == '0' then 0 else 1
manyTill1
:: Stream s m t =>
ParsecT s u m a -> ParsecT s u m end -> ParsecT s u m [a]
manyTill1 p end = do
x <- p
xs <- manyTill p end
return (x:xs)
| |
a514dde3c407f01c1c60e81778a01a2b68b9dd20d18b84a80a22f080dd87113d | OCamlPro/digodoc | fulltext_search.ml | (**************************************************************************)
(* *)
Copyright ( c ) 2022 OCamlPro SAS & Origin Labs SAS
(* *)
(* All rights reserved. *)
(* This file is distributed under the terms of the GNU Lesser General *)
Public License version 2.1 , with the special exception on linking
(* described in the LICENSE.md file in the root directory. *)
(* *)
(* *)
(**************************************************************************)
open Js_of_ocaml
open Js
open Globals
open Data_types
* Module [ Fulltext_Search ] defines behaviour for fulltext search page ( fulltext_search.html )
Fulltext search page is constructed dynamically by sending requests to the API server each
time a new character is inserted in Search Pattern input ( # fpattern_fulltext ) or the targeted file
( ML files , DUNE files or MAKEFILE files ) type is changed using the given checkboxes .
If the user decides to perform the search using pattern instead of RegEx ( by default ) or
if he disables the case sensitive option , he will need to rewrite his pattern in the # fpattern_fulltext
input .
Fulltext search page is constructed dynamically by sending requests to the API server each
time a new character is inserted in Search Pattern input (#fpattern_fulltext) or the targeted file
(ML files, DUNE files or MAKEFILE files) type is changed using the given checkboxes.
If the user decides to perform the search using pattern instead of RegEx (by default) or
if he disables the case sensitive option, he will need to rewrite his pattern in the #fpattern_fulltext
input. *)
type fulltext_search_state = {
mutable pattern : string;
mutable files : file_type;
mutable is_regex : bool;
mutable is_case_sensitive : bool;
mutable last_match_id : int
}
(** State for fulltext search *)
let my_timeout = ref None
* timeout_id to delay request to search - api ( ez_search in this case )
let state = {
pattern = "";
files = ML;
is_regex = true;
is_case_sensitive = true;
last_match_id = 0
}
* Default search_state updated through deref
let search_state = ref state
(** Global variable that stores state of fulltext search page *)
let fulltext_search_state_to_sources_search_info {pattern; files; is_regex; is_case_sensitive; last_match_id} =
let open Data_types in
{
pattern = pattern;
files = files;
is_regex = is_regex;
is_case_sensitive = is_case_sensitive;
last_match_id = last_match_id;
}
* Converts [ fulltext_search_state ] to [ ]
let get_input id = unopt @@ Html.CoerceTo.input @@ get_element_by_id id
(** Returns an input with given id *)
let set_attr elt attr value =
elt##setAttribute (js attr) value
* [ set_attr elt attr ] sets attribute [ attr ] of [ elt ] to [ value ] .
let append_inner elt str =
elt##.innerHTML := concat elt##.innerHTML str
* [ append_inner ] appends [ str ] to the content of [ elt ] .
let preview_fulltext_source pattern regex case_sens loadmore =
let load_more_btn = unopt @@ Html.CoerceTo.button @@ get_element_by_id "load_more" in
let result_div = unopt @@ Html.CoerceTo.div @@ get_element_by_id "result-div" in
let current_pattern = unopt @@ Html.CoerceTo.input @@ get_element_by_id "fpattern_fulltext" in
let msg_div = unopt @@ Html.CoerceTo.div @@ get_element_by_id "noresult" in
let res_ol = unopt @@ Html.CoerceTo.ol @@ get_element_by_id "results-list" in
let page_info = unopt @@ Html.CoerceTo.div @@ get_element_by_id "page-info" in
let handle_checkbox id state =
let target =
match id with
| "fcase_ftype_ml" -> ML
| "fcase_ftype_dune" -> DUNE
| "fcase_ftype_makefile" -> MAKEFILE
| _ -> raise @@ web_app_error "Error in preview_fulltext_source -> handle_checkbox"
in
if to_bool @@ (get_input id)##.checked
then state.files <- target
in
Init fulltext_info
let fulltext_info = {
pattern;
files = ML;
is_regex = regex;
is_case_sensitive = case_sens;
last_match_id = !search_state.last_match_id;
} in
handle_checkbox "fcase_ftype_ml" fulltext_info;
handle_checkbox "fcase_ftype_dune" fulltext_info;
handle_checkbox "fcase_ftype_makefile" fulltext_info;
if not @@ ((to_string current_pattern##.value##trim) = "")
then
Lwt.async @@
Requests.send_generic_request
~request:(Requests.getSources_fulltext @@ fulltext_search_state_to_sources_search_info @@ fulltext_info)
~callback:(fun sources_results ->
if sources_results.totaloccs = 0
then
begin
msg_div##.style##.display := js "none";
page_info##.innerHTML := js "";
res_ol##.innerHTML := js "";
load_more_btn##.style##.display := js "none";
result_div##.style##.display := js "none";
Insertion.write_message_id ("No results found for " ^ (to_string current_pattern##.value)) "noresult" "nores_msg";
end
else
begin
if sources_results.occs = []
then
begin
load_more_btn##.style##.display := js "none";
Headfoot.footerHandler();
end
else
begin
load_more_btn##.style##.display := js "block";
Insertion.insert_Fulltext_Sources (Objects.sources_search_result_to_jsoo sources_results) loadmore;
end;
end;
Lwt.return_unit
)
~error:(fun err ->
begin
match err with
| Unknown -> logs "something is wrong in preview_fulltext_source";
| _ -> warn "fulltext_search.ml/preview_fulltext_source needs to be corrected";
end;
Lwt.return_unit
)
else
begin
msg_div##.style##.display := js "none";
page_info##.innerHTML := js "";
res_ol##.innerHTML := js "";
load_more_btn##.style##.display := js "none";
result_div##.style##.display := js "none";
Headfoot.footerHandler();
end
* Request to get [ ]
let set_handlers () =
let fulltext_form = unopt @@ Html.CoerceTo.input @@ get_element_by_id "fpattern_fulltext" in
let ml_switch = get_input "fcase_ftype_ml" in
let dune_switch = get_input "fcase_ftype_dune" in
let mkfile_switch = get_input "fcase_ftype_makefile" in
let load_more_btn = unopt @@ Html.CoerceTo.button @@ get_element_by_id "load_more" in
let regex_pattern_switch = get_input "fregex" in
let case_sens_switch = get_input "fcase_sens" in
regex_pattern_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false;
_false
);
(** Send new request if regex/pattern is modified *)
case_sens_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false;
_false
);
(** Send new request if case sensitivity is modified *)
ml_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
if to_bool ml_switch##.checked
then
begin
dune_switch##.checked := _false;
mkfile_switch##.checked := _false;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
else
begin
if ((not @@ to_bool dune_switch##.checked) && (not @@ to_bool mkfile_switch##.checked))
then
begin
ml_switch##.checked := _true;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
end;
_false
);
* Handler for the .ml checkbox . The user must select one type of files to perform fulltext search , ML by default .
If changed , then a new request is sent to the API to retrieve the corresponding results .
If changed, then a new request is sent to the API to retrieve the corresponding results. *)
dune_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
if to_bool dune_switch##.checked
then
begin
ml_switch##.checked := _false;
mkfile_switch##.checked := _false;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
else
begin
if ((not @@ to_bool ml_switch##.checked) && (not @@ to_bool mkfile_switch##.checked))
then
begin
ml_switch##.checked := _true;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
end;
_false
);
* Handler for the dune checkbox . The user must select one type of files to perform fulltext search , ML by default .
If changed , then a new request is sent to the API to retrieve the corresponding results .
If changed, then a new request is sent to the API to retrieve the corresponding results. *)
mkfile_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
if to_bool mkfile_switch##.checked
then
begin
ml_switch##.checked := _false;
dune_switch##.checked := _false;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
else
begin
if ((not @@ to_bool dune_switch##.checked) && (not @@ to_bool ml_switch##.checked))
then
begin
ml_switch##.checked := _true;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
end;
_false
);
* Handler for the makefile checkbox . The user must select one type of files to perform fulltext search , ML by default .
If changed , then a new request is sent to the API to retrieve the corresponding results .
If changed, then a new request is sent to the API to retrieve the corresponding results. *)
fulltext_form##.onkeyup := Html.handler (fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
let input_to_query () =
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false;
in
begin
match !my_timeout with
| Some timeout -> window##clearTimeout timeout
| _ -> ()
end;
my_timeout := Some (window##setTimeout (Js.wrap_callback (fun _ -> input_to_query ())) 200.);
_false
);
* Query search - api and display results 20 by 20
fulltext_form##.onpointerenter := Html.handler (fun _ ->
let time = 800. in
let regex_inst = unopt @@ Html.CoerceTo.div @@ get_element_by_id "regex_instructions" in
regex_inst##.style##.opacity := (js "0") |> Js.Optdef.return;
regex_inst##.style##.display := js "block";
let last = ref Js.date##now in
let rec tick () =
let updated_opacity = (js (string_of_float ((float_of_string (to_string (unoptdef @@ regex_inst##.style##.opacity))) +. ((Js.date##now -. !last) /. time)))) in
regex_inst##.style##.opacity := updated_opacity |> Js.Optdef.return;
last := Js.date##now;
if ((float_of_string (to_string updated_opacity)) < 1.)
then window##requestAnimationFrame(Js.wrap_callback (fun _ -> tick ())) |> ignore;
in
tick ();
_false
);
(** Shows regex instructions when pointer is over the text entry form [fulltext-form] (proceeds by slowly increasing opacity
for [time] ms after div [regex_instructions]'s display style option is set to block) *)
fulltext_form##.onpointerleave := Html.handler (fun _ ->
let time = 800. in
let regex_inst = unopt @@ Html.CoerceTo.div @@ get_element_by_id "regex_instructions" in
regex_inst##.style##.opacity := (js "1") |> Js.Optdef.return;
let last = ref Js.date##now in
let rec tick () =
let updated_opacity = (js (string_of_float ((float_of_string (to_string (unoptdef @@ regex_inst##.style##.opacity))) -. ((Js.date##now -. !last) /. time)))) in
regex_inst##.style##.opacity := updated_opacity |> Js.Optdef.return;
last := Js.date##now;
if ((float_of_string (to_string updated_opacity)) > 0.)
then window##requestAnimationFrame(Js.wrap_callback (fun _ -> tick ())) |> ignore
else
begin
regex_inst##.style##.display := js "none";
Headfoot.footerHandler();
end
in
tick ();
_false
);
(** Hides regex instructions when pointer leaves the text entry form [fulltext-form] (proceeds by slowly decreasing opacity
for [time] ms and sets div [regex_instructions]'s display style option to none when opacity gets to 0) *)
load_more_btn##.onclick := Html.handler (fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
let current_last = !search_state.last_match_id in
state.last_match_id <- current_last + 20;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens true;
_false
)
* Loads at most the 20 next results if available
let uninitialized_page () =
Lwt.return_unit
(** Displays unitialized version of the page, empty for the moment *)
let onload () =
set_handlers ();
uninitialized_page ()
(* Onload handler for fulltext search page *)
| null | https://raw.githubusercontent.com/OCamlPro/digodoc/a781c251b3b6ef94badbaea1cfd8dc176d47d364/src/frontend/fulltext_search.ml | ocaml | ************************************************************************
All rights reserved.
This file is distributed under the terms of the GNU Lesser General
described in the LICENSE.md file in the root directory.
************************************************************************
* State for fulltext search
* Global variable that stores state of fulltext search page
* Returns an input with given id
* Send new request if regex/pattern is modified
* Send new request if case sensitivity is modified
* Shows regex instructions when pointer is over the text entry form [fulltext-form] (proceeds by slowly increasing opacity
for [time] ms after div [regex_instructions]'s display style option is set to block)
* Hides regex instructions when pointer leaves the text entry form [fulltext-form] (proceeds by slowly decreasing opacity
for [time] ms and sets div [regex_instructions]'s display style option to none when opacity gets to 0)
* Displays unitialized version of the page, empty for the moment
Onload handler for fulltext search page | Copyright ( c ) 2022 OCamlPro SAS & Origin Labs SAS
Public License version 2.1 , with the special exception on linking
open Js_of_ocaml
open Js
open Globals
open Data_types
* Module [ Fulltext_Search ] defines behaviour for fulltext search page ( fulltext_search.html )
Fulltext search page is constructed dynamically by sending requests to the API server each
time a new character is inserted in Search Pattern input ( # fpattern_fulltext ) or the targeted file
( ML files , DUNE files or MAKEFILE files ) type is changed using the given checkboxes .
If the user decides to perform the search using pattern instead of RegEx ( by default ) or
if he disables the case sensitive option , he will need to rewrite his pattern in the # fpattern_fulltext
input .
Fulltext search page is constructed dynamically by sending requests to the API server each
time a new character is inserted in Search Pattern input (#fpattern_fulltext) or the targeted file
(ML files, DUNE files or MAKEFILE files) type is changed using the given checkboxes.
If the user decides to perform the search using pattern instead of RegEx (by default) or
if he disables the case sensitive option, he will need to rewrite his pattern in the #fpattern_fulltext
input. *)
type fulltext_search_state = {
mutable pattern : string;
mutable files : file_type;
mutable is_regex : bool;
mutable is_case_sensitive : bool;
mutable last_match_id : int
}
let my_timeout = ref None
* timeout_id to delay request to search - api ( ez_search in this case )
let state = {
pattern = "";
files = ML;
is_regex = true;
is_case_sensitive = true;
last_match_id = 0
}
* Default search_state updated through deref
let search_state = ref state
let fulltext_search_state_to_sources_search_info {pattern; files; is_regex; is_case_sensitive; last_match_id} =
let open Data_types in
{
pattern = pattern;
files = files;
is_regex = is_regex;
is_case_sensitive = is_case_sensitive;
last_match_id = last_match_id;
}
* Converts [ fulltext_search_state ] to [ ]
let get_input id = unopt @@ Html.CoerceTo.input @@ get_element_by_id id
let set_attr elt attr value =
elt##setAttribute (js attr) value
* [ set_attr elt attr ] sets attribute [ attr ] of [ elt ] to [ value ] .
let append_inner elt str =
elt##.innerHTML := concat elt##.innerHTML str
* [ append_inner ] appends [ str ] to the content of [ elt ] .
let preview_fulltext_source pattern regex case_sens loadmore =
let load_more_btn = unopt @@ Html.CoerceTo.button @@ get_element_by_id "load_more" in
let result_div = unopt @@ Html.CoerceTo.div @@ get_element_by_id "result-div" in
let current_pattern = unopt @@ Html.CoerceTo.input @@ get_element_by_id "fpattern_fulltext" in
let msg_div = unopt @@ Html.CoerceTo.div @@ get_element_by_id "noresult" in
let res_ol = unopt @@ Html.CoerceTo.ol @@ get_element_by_id "results-list" in
let page_info = unopt @@ Html.CoerceTo.div @@ get_element_by_id "page-info" in
let handle_checkbox id state =
let target =
match id with
| "fcase_ftype_ml" -> ML
| "fcase_ftype_dune" -> DUNE
| "fcase_ftype_makefile" -> MAKEFILE
| _ -> raise @@ web_app_error "Error in preview_fulltext_source -> handle_checkbox"
in
if to_bool @@ (get_input id)##.checked
then state.files <- target
in
Init fulltext_info
let fulltext_info = {
pattern;
files = ML;
is_regex = regex;
is_case_sensitive = case_sens;
last_match_id = !search_state.last_match_id;
} in
handle_checkbox "fcase_ftype_ml" fulltext_info;
handle_checkbox "fcase_ftype_dune" fulltext_info;
handle_checkbox "fcase_ftype_makefile" fulltext_info;
if not @@ ((to_string current_pattern##.value##trim) = "")
then
Lwt.async @@
Requests.send_generic_request
~request:(Requests.getSources_fulltext @@ fulltext_search_state_to_sources_search_info @@ fulltext_info)
~callback:(fun sources_results ->
if sources_results.totaloccs = 0
then
begin
msg_div##.style##.display := js "none";
page_info##.innerHTML := js "";
res_ol##.innerHTML := js "";
load_more_btn##.style##.display := js "none";
result_div##.style##.display := js "none";
Insertion.write_message_id ("No results found for " ^ (to_string current_pattern##.value)) "noresult" "nores_msg";
end
else
begin
if sources_results.occs = []
then
begin
load_more_btn##.style##.display := js "none";
Headfoot.footerHandler();
end
else
begin
load_more_btn##.style##.display := js "block";
Insertion.insert_Fulltext_Sources (Objects.sources_search_result_to_jsoo sources_results) loadmore;
end;
end;
Lwt.return_unit
)
~error:(fun err ->
begin
match err with
| Unknown -> logs "something is wrong in preview_fulltext_source";
| _ -> warn "fulltext_search.ml/preview_fulltext_source needs to be corrected";
end;
Lwt.return_unit
)
else
begin
msg_div##.style##.display := js "none";
page_info##.innerHTML := js "";
res_ol##.innerHTML := js "";
load_more_btn##.style##.display := js "none";
result_div##.style##.display := js "none";
Headfoot.footerHandler();
end
* Request to get [ ]
let set_handlers () =
let fulltext_form = unopt @@ Html.CoerceTo.input @@ get_element_by_id "fpattern_fulltext" in
let ml_switch = get_input "fcase_ftype_ml" in
let dune_switch = get_input "fcase_ftype_dune" in
let mkfile_switch = get_input "fcase_ftype_makefile" in
let load_more_btn = unopt @@ Html.CoerceTo.button @@ get_element_by_id "load_more" in
let regex_pattern_switch = get_input "fregex" in
let case_sens_switch = get_input "fcase_sens" in
regex_pattern_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false;
_false
);
case_sens_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false;
_false
);
ml_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
if to_bool ml_switch##.checked
then
begin
dune_switch##.checked := _false;
mkfile_switch##.checked := _false;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
else
begin
if ((not @@ to_bool dune_switch##.checked) && (not @@ to_bool mkfile_switch##.checked))
then
begin
ml_switch##.checked := _true;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
end;
_false
);
* Handler for the .ml checkbox . The user must select one type of files to perform fulltext search , ML by default .
If changed , then a new request is sent to the API to retrieve the corresponding results .
If changed, then a new request is sent to the API to retrieve the corresponding results. *)
dune_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
if to_bool dune_switch##.checked
then
begin
ml_switch##.checked := _false;
mkfile_switch##.checked := _false;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
else
begin
if ((not @@ to_bool ml_switch##.checked) && (not @@ to_bool mkfile_switch##.checked))
then
begin
ml_switch##.checked := _true;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
end;
_false
);
* Handler for the dune checkbox . The user must select one type of files to perform fulltext search , ML by default .
If changed , then a new request is sent to the API to retrieve the corresponding results .
If changed, then a new request is sent to the API to retrieve the corresponding results. *)
mkfile_switch##.onchange := Html.handler ( fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
if to_bool mkfile_switch##.checked
then
begin
ml_switch##.checked := _false;
dune_switch##.checked := _false;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
else
begin
if ((not @@ to_bool dune_switch##.checked) && (not @@ to_bool ml_switch##.checked))
then
begin
ml_switch##.checked := _true;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false
end
end;
_false
);
* Handler for the makefile checkbox . The user must select one type of files to perform fulltext search , ML by default .
If changed , then a new request is sent to the API to retrieve the corresponding results .
If changed, then a new request is sent to the API to retrieve the corresponding results. *)
fulltext_form##.onkeyup := Html.handler (fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
state.last_match_id <- 0;
let input_to_query () =
preview_fulltext_source (to_string cur_input_value) is_regex case_sens false;
in
begin
match !my_timeout with
| Some timeout -> window##clearTimeout timeout
| _ -> ()
end;
my_timeout := Some (window##setTimeout (Js.wrap_callback (fun _ -> input_to_query ())) 200.);
_false
);
* Query search - api and display results 20 by 20
fulltext_form##.onpointerenter := Html.handler (fun _ ->
let time = 800. in
let regex_inst = unopt @@ Html.CoerceTo.div @@ get_element_by_id "regex_instructions" in
regex_inst##.style##.opacity := (js "0") |> Js.Optdef.return;
regex_inst##.style##.display := js "block";
let last = ref Js.date##now in
let rec tick () =
let updated_opacity = (js (string_of_float ((float_of_string (to_string (unoptdef @@ regex_inst##.style##.opacity))) +. ((Js.date##now -. !last) /. time)))) in
regex_inst##.style##.opacity := updated_opacity |> Js.Optdef.return;
last := Js.date##now;
if ((float_of_string (to_string updated_opacity)) < 1.)
then window##requestAnimationFrame(Js.wrap_callback (fun _ -> tick ())) |> ignore;
in
tick ();
_false
);
fulltext_form##.onpointerleave := Html.handler (fun _ ->
let time = 800. in
let regex_inst = unopt @@ Html.CoerceTo.div @@ get_element_by_id "regex_instructions" in
regex_inst##.style##.opacity := (js "1") |> Js.Optdef.return;
let last = ref Js.date##now in
let rec tick () =
let updated_opacity = (js (string_of_float ((float_of_string (to_string (unoptdef @@ regex_inst##.style##.opacity))) -. ((Js.date##now -. !last) /. time)))) in
regex_inst##.style##.opacity := updated_opacity |> Js.Optdef.return;
last := Js.date##now;
if ((float_of_string (to_string updated_opacity)) > 0.)
then window##requestAnimationFrame(Js.wrap_callback (fun _ -> tick ())) |> ignore
else
begin
regex_inst##.style##.display := js "none";
Headfoot.footerHandler();
end
in
tick ();
_false
);
load_more_btn##.onclick := Html.handler (fun _ ->
let cur_input_value = fulltext_form##.value##trim in
let is_regex = to_bool @@ (get_input "fregex")##.checked in
let case_sens = to_bool @@ (get_input "fcase_sens")##.checked in
let current_last = !search_state.last_match_id in
state.last_match_id <- current_last + 20;
preview_fulltext_source (to_string cur_input_value) is_regex case_sens true;
_false
)
* Loads at most the 20 next results if available
let uninitialized_page () =
Lwt.return_unit
let onload () =
set_handlers ();
uninitialized_page ()
|
862530a205768ab36014b08868c26cc0c119ae72039773ac24f828a6205a9e9a | ocaml-omake/omake | omake_symbol.ml | (*
* Symbols used everywhere.
* Eventually, we should collect all the global symbols and
* put them here.
*)
let braces_sym = Lm_symbol.add "{}"
let builtin_sym = Lm_symbol.add "$builtin"
let map_sym = Lm_symbol.add "$map"
let pervasives_sym = Lm_symbol.add "Pervasives"
let object_sym = Lm_symbol.add "Object"
let int_object_sym = Lm_symbol.add "Int"
let float_object_sym = Lm_symbol.add "Float"
let string_object_sym = Lm_symbol.add "String"
let sequence_object_sym = Lm_symbol.add "Sequence"
let array_object_sym = Lm_symbol.add "Array"
let fun_object_sym = Lm_symbol.add "Fun"
let rule_object_sym = Lm_symbol.add "Rule"
let file_object_sym = Lm_symbol.add "File"
let dir_object_sym = Lm_symbol.add "Dir"
let body_object_sym = Lm_symbol.add "Body"
let in_channel_object_sym = Lm_symbol.add "InChannel"
let out_channel_object_sym = Lm_symbol.add "OutChannel"
let in_out_channel_object_sym = Lm_symbol.add "InOutChannel"
let map_object_sym = Lm_symbol.add "Map"
let shell_object_sym = Lm_symbol.add "Shell"
let select_object_sym = Lm_symbol.add "Select"
let pipe_object_sym = Lm_symbol.add "Pipe"
let stat_object_sym = Lm_symbol.add "Stat"
let passwd_object_sym = Lm_symbol.add "Passwd"
let group_object_sym = Lm_symbol.add "Group"
let lexer_object_sym = Lm_symbol.add "Lexer"
let parser_object_sym = Lm_symbol.add "Parser"
let location_object_sym = Lm_symbol.add "Location"
let target_object_sym = Lm_symbol.add "Target"
let options_object_sym = Lm_symbol.add "Options"
let var_object_sym = Lm_symbol.add "Var"
let tm_object_sym = Lm_symbol.add "Tm"
let wild_sym = Lm_symbol.add "%"
let explicit_target_sym = Lm_symbol.add "$EXPLICIT-TARGET"
let current_prec_sym = Lm_symbol.add "current-prec"
let lex_sym = Lm_symbol.add "lex"
let name_sym = Lm_symbol.add "name"
(* let value_sym = Lm_symbol.add "value" *)
let lexer_sym = Lm_symbol.add "lexer"
let val_sym = Lm_symbol.add "val"
let read_sym = Lm_symbol.add "read"
let write_sym = Lm_symbol.add "write"
let error_sym = Lm_symbol.add "error"
let st_dev_sym = Lm_symbol.add "st_dev"
let st_ino_sym = Lm_symbol.add "st_ino"
let st_kind_sym = Lm_symbol.add "st_kind"
let st_perm_sym = Lm_symbol.add "st_perm"
let st_nlink_sym = Lm_symbol.add "st_nlink"
let st_uid_sym = Lm_symbol.add "st_uid"
let st_gid_sym = Lm_symbol.add "st_gid"
let st_rdev_sym = Lm_symbol.add "st_rdev"
let st_size_sym = Lm_symbol.add "st_size"
let st_atime_sym = Lm_symbol.add "st_atime"
let st_mtime_sym = Lm_symbol.add "st_mtime"
let st_ctime_sym = Lm_symbol.add "st_ctime"
let pw_name_sym = Lm_symbol.add "pw_name"
let pw_passwd_sym = Lm_symbol.add "pw_passwd"
let pw_uid_sym = Lm_symbol.add "pw_uid"
let pw_gid_sym = Lm_symbol.add "pw_gid"
let pw_gecos_sym = Lm_symbol.add "pw_gecos"
let pw_dir_sym = Lm_symbol.add "pw_dir"
let pw_shell_sym = Lm_symbol.add "pw_shell"
let gr_name_sym = Lm_symbol.add "gr_name"
let gr_passwd_sym = Lm_symbol.add "gr_passwd"
let gr_gid_sym = Lm_symbol.add "gr_gid"
let gr_mem_sym = Lm_symbol.add "gr_mem"
let tm_sec_sym = Lm_symbol.add "tm_sec"
let tm_min_sym = Lm_symbol.add "tm_min"
let tm_hour_sym = Lm_symbol.add "tm_hour"
let tm_mday_sym = Lm_symbol.add "tm_mday"
let tm_mon_sym = Lm_symbol.add "tm_mon"
let tm_year_sym = Lm_symbol.add "tm_year"
let tm_wday_sym = Lm_symbol.add "tm_wday"
let tm_yday_sym = Lm_symbol.add "tm_yday"
let tm_isdst_sym = Lm_symbol.add "tm_isdst"
let tm_time_sym = Lm_symbol.add "tm_time"
let target_sym = Lm_symbol.add "target"
let target_effects_sym = Lm_symbol.add "effects"
let scanner_deps_sym = Lm_symbol.add "scanner-deps"
let static_deps_sym = Lm_symbol.add "static-deps"
let build_deps_sym = Lm_symbol.add "build-deps"
let build_values_sym = Lm_symbol.add "build-values"
let build_commands_sym = Lm_symbol.add "build-commands"
let output_file_sym = Lm_symbol.add "output-file"
let argv_sym = Lm_symbol.add "argv"
let star_sym = Lm_symbol.add "*"
let at_sym = Lm_symbol.add "@"
let amp_sym = Lm_symbol.add "&"
let lt_sym = Lm_symbol.add "<"
let gt_sym = Lm_symbol.add ">"
let plus_sym = Lm_symbol.add "+"
let hat_sym = Lm_symbol.add "^"
let zero_sym = Lm_symbol.add "0"
let runtime_exception_sym = Lm_symbol.add "RuntimeException"
let unbuildable_exception_sym = Lm_symbol.add "UnbuildableException"
let parse_loc_sym = Lm_symbol.add "parse-loc"
let loc_sym = Lm_symbol.add "loc"
let pos_sym = Lm_symbol.add "position"
let message_sym = Lm_symbol.add "message"
let stdin_sym = Lm_symbol.add "stdin"
let stdout_sym = Lm_symbol.add "stdout"
let stderr_sym = Lm_symbol.add "stderr"
let printexitvalue_sym = Lm_symbol.add "printexitvalue"
let targets_sym = Lm_symbol.add "TARGETS"
let glob_options_sym = Lm_symbol.add "GLOB_OPTIONS"
let glob_allow_sym = Lm_symbol.add "GLOB_ALLOW"
let glob_ignore_sym = Lm_symbol.add "GLOB_IGNORE"
let this_sym = Lm_symbol.add "this"
let dynamic_sym = Lm_symbol.add "dynamic"
let static_sym = Lm_symbol.add "static"
let allow_empty_subdirs_sym = Lm_symbol.add "ALLOW_EMPTY_SUBDIRS"
let abort_on_command_error_sym = Lm_symbol.add "ABORT_ON_COMMAND_ERROR"
let exit_on_uncaught_exception_sym = Lm_symbol.add "EXIT_ON_UNCAUGHT_EXCEPTION"
let create_subdirs_sym = Lm_symbol.add "CREATE_SUBDIRS"
let scanner_mode_sym = Lm_symbol.add "SCANNER_MODE"
let history_file_sym = Lm_symbol.add "history-file"
let history_length_sym = Lm_symbol.add "history-length"
let build_summary_sym = Lm_symbol.add "BUILD_SUMMARY"
(*
* Special symbols.
*)
let concat_sym = Lm_symbol.add "concat"
let if_sym = Lm_symbol.add "if"
let else_sym = Lm_symbol.add "else"
let elseif_sym = Lm_symbol.add "elseif"
let switch_sym = Lm_symbol.add "switch"
let select_sym = Lm_symbol.add "select"
let case_sym = Lm_symbol.add "case"
let do_sym = Lm_symbol.add "do"
let while_sym = Lm_symbol.add "while"
let default_sym = Lm_symbol.add "default"
let include_sym = Lm_symbol.add "include"
let section_sym = Lm_symbol.add "section"
let try_sym = Lm_symbol.add "try"
let catch_sym = Lm_symbol.add "catch"
let when_sym = Lm_symbol.add "when"
let finally_sym = Lm_symbol.add "finally"
let curry_sym = Lm_symbol.add "curry"
let private_sym = Lm_symbol.add "private"
let protected_sym = Lm_symbol.add "protected"
let public_sym = Lm_symbol.add "public"
let global_sym = Lm_symbol.add "global"
let const_sym = Lm_symbol.add "const"
let rule_sym = Lm_symbol.add "rule"
let system_sym = Lm_symbol.add "system"
let open_sym = Lm_symbol.add "open"
let autoload_sym = Lm_symbol.add "autoload"
let declare_sym = Lm_symbol.add "declare"
let return_sym = Lm_symbol.add "return"
let export_sym = Lm_symbol.add "export"
let value_sym = Lm_symbol.add "value"
let file_sym = Lm_symbol.add "__FILE__"
let file_id_sym = Lm_symbol.add "__ID__"
let foreach_sym = Lm_symbol.add "foreach"
let fun_sym = Lm_symbol.add "fun"
let set_sym = Lm_symbol.add "set"
let neg_fun_sym = Lm_symbol.add "neg"
let add_fun_sym = Lm_symbol.add "add"
let sub_fun_sym = Lm_symbol.add "sub"
let mul_fun_sym = Lm_symbol.add "mul"
let div_fun_sym = Lm_symbol.add "div"
let mod_fun_sym = Lm_symbol.add "mod"
let lsl_fun_sym = Lm_symbol.add "lsl"
let lsr_fun_sym = Lm_symbol.add "lsr"
let asr_fun_sym = Lm_symbol.add "asr"
let lxor_fun_sym = Lm_symbol.add "lxor"
let lor_fun_sym = Lm_symbol.add "lor"
let land_fun_sym = Lm_symbol.add "land"
let and_fun_sym = Lm_symbol.add "and"
let or_fun_sym = Lm_symbol.add "or"
let le_fun_sym = Lm_symbol.add "le"
let lt_fun_sym = Lm_symbol.add "lt"
let equal_fun_sym = Lm_symbol.add "equal"
let nequal_fun_sym = Lm_symbol.add "nequal"
let ge_fun_sym = Lm_symbol.add "ge"
let gt_fun_sym = Lm_symbol.add "gt"
let nth_fun_sym = Lm_symbol.add "nth"
let memo_rule_sym = Lm_symbol.add "memo-rule"
let empty_map_sym = Lm_symbol.add "empty-map"
let create_map_sym = Lm_symbol.add "create-map"
let create_lazy_map_sym = Lm_symbol.add "create-lazy-map"
(*
* Awk values.
*)
let awk_sym = Lm_symbol.add "awk"
let nf_sym = Lm_symbol.add "NF"
let rs_sym = Lm_symbol.add "RS"
let fs_sym = Lm_symbol.add "FS"
let filename_sym = Lm_symbol.add "FILENAME"
let fnr_sym = Lm_symbol.add "FNR"
let fsubst_sym = Lm_symbol.add "fsubst"
(*
* The applications that can have cases.
*)
let cases_syms = [awk_sym; fsubst_sym]
let cases_set = List.fold_left Lm_symbol.SymbolSet.add Lm_symbol.SymbolSet.empty cases_syms
let clauses_syms = [case_sym; default_sym; when_sym; catch_sym; finally_sym; do_sym]
let clauses_set = List.fold_left Lm_symbol.SymbolSet.add Lm_symbol.SymbolSet.empty clauses_syms
(*
* Colon symbols.
*)
let normal_sym = Lm_symbol.add ":normal:"
let optional_sym = Lm_symbol.add ":optional:"
let exists_sym = Lm_symbol.add ":exists:"
let squash_sym = Lm_symbol.add ":squash:"
let effects_sym = Lm_symbol.add ":effects:"
let scanner_sym = Lm_symbol.add ":scanner:"
let values_sym = Lm_symbol.add ":value:"
let key_sym = Lm_symbol.add ":key:"
* Builtin functions .
* Builtin functions.
*)
let extends_sym = Lm_symbol.add "extends"
let omakeflags_sym = Lm_symbol.add "OMakeFlags"
let omakeargv_sym = Lm_symbol.add "OMakeArgv"
(*
* Symbols.
*)
let prompt_sym = Lm_symbol.add "prompt"
let ignoreeof_sym = Lm_symbol.add "ignoreeof"
let cwd_sym = Lm_symbol.add "CWD"
let stdroot_sym = Lm_symbol.add "STDROOT"
let stdlib_sym = Lm_symbol.add "STDLIB"
let ostype_sym = Lm_symbol.add "OSTYPE"
let path_sym = Lm_symbol.add "PATH"
let auto_rehash_sym = Lm_symbol.add "AUTO_REHASH"
let omakepath_sym = Lm_symbol.add "OMAKEPATH"
let oshell_sym = Lm_symbol.add "OSHELL"
let cdpath_sym = Lm_symbol.add "cdpath"
| null | https://raw.githubusercontent.com/ocaml-omake/omake/08b2a83fb558f6eb6847566cbe1a562230da2b14/src/ir/omake_symbol.ml | ocaml |
* Symbols used everywhere.
* Eventually, we should collect all the global symbols and
* put them here.
let value_sym = Lm_symbol.add "value"
* Special symbols.
* Awk values.
* The applications that can have cases.
* Colon symbols.
* Symbols.
|
let braces_sym = Lm_symbol.add "{}"
let builtin_sym = Lm_symbol.add "$builtin"
let map_sym = Lm_symbol.add "$map"
let pervasives_sym = Lm_symbol.add "Pervasives"
let object_sym = Lm_symbol.add "Object"
let int_object_sym = Lm_symbol.add "Int"
let float_object_sym = Lm_symbol.add "Float"
let string_object_sym = Lm_symbol.add "String"
let sequence_object_sym = Lm_symbol.add "Sequence"
let array_object_sym = Lm_symbol.add "Array"
let fun_object_sym = Lm_symbol.add "Fun"
let rule_object_sym = Lm_symbol.add "Rule"
let file_object_sym = Lm_symbol.add "File"
let dir_object_sym = Lm_symbol.add "Dir"
let body_object_sym = Lm_symbol.add "Body"
let in_channel_object_sym = Lm_symbol.add "InChannel"
let out_channel_object_sym = Lm_symbol.add "OutChannel"
let in_out_channel_object_sym = Lm_symbol.add "InOutChannel"
let map_object_sym = Lm_symbol.add "Map"
let shell_object_sym = Lm_symbol.add "Shell"
let select_object_sym = Lm_symbol.add "Select"
let pipe_object_sym = Lm_symbol.add "Pipe"
let stat_object_sym = Lm_symbol.add "Stat"
let passwd_object_sym = Lm_symbol.add "Passwd"
let group_object_sym = Lm_symbol.add "Group"
let lexer_object_sym = Lm_symbol.add "Lexer"
let parser_object_sym = Lm_symbol.add "Parser"
let location_object_sym = Lm_symbol.add "Location"
let target_object_sym = Lm_symbol.add "Target"
let options_object_sym = Lm_symbol.add "Options"
let var_object_sym = Lm_symbol.add "Var"
let tm_object_sym = Lm_symbol.add "Tm"
let wild_sym = Lm_symbol.add "%"
let explicit_target_sym = Lm_symbol.add "$EXPLICIT-TARGET"
let current_prec_sym = Lm_symbol.add "current-prec"
let lex_sym = Lm_symbol.add "lex"
let name_sym = Lm_symbol.add "name"
let lexer_sym = Lm_symbol.add "lexer"
let val_sym = Lm_symbol.add "val"
let read_sym = Lm_symbol.add "read"
let write_sym = Lm_symbol.add "write"
let error_sym = Lm_symbol.add "error"
let st_dev_sym = Lm_symbol.add "st_dev"
let st_ino_sym = Lm_symbol.add "st_ino"
let st_kind_sym = Lm_symbol.add "st_kind"
let st_perm_sym = Lm_symbol.add "st_perm"
let st_nlink_sym = Lm_symbol.add "st_nlink"
let st_uid_sym = Lm_symbol.add "st_uid"
let st_gid_sym = Lm_symbol.add "st_gid"
let st_rdev_sym = Lm_symbol.add "st_rdev"
let st_size_sym = Lm_symbol.add "st_size"
let st_atime_sym = Lm_symbol.add "st_atime"
let st_mtime_sym = Lm_symbol.add "st_mtime"
let st_ctime_sym = Lm_symbol.add "st_ctime"
let pw_name_sym = Lm_symbol.add "pw_name"
let pw_passwd_sym = Lm_symbol.add "pw_passwd"
let pw_uid_sym = Lm_symbol.add "pw_uid"
let pw_gid_sym = Lm_symbol.add "pw_gid"
let pw_gecos_sym = Lm_symbol.add "pw_gecos"
let pw_dir_sym = Lm_symbol.add "pw_dir"
let pw_shell_sym = Lm_symbol.add "pw_shell"
let gr_name_sym = Lm_symbol.add "gr_name"
let gr_passwd_sym = Lm_symbol.add "gr_passwd"
let gr_gid_sym = Lm_symbol.add "gr_gid"
let gr_mem_sym = Lm_symbol.add "gr_mem"
let tm_sec_sym = Lm_symbol.add "tm_sec"
let tm_min_sym = Lm_symbol.add "tm_min"
let tm_hour_sym = Lm_symbol.add "tm_hour"
let tm_mday_sym = Lm_symbol.add "tm_mday"
let tm_mon_sym = Lm_symbol.add "tm_mon"
let tm_year_sym = Lm_symbol.add "tm_year"
let tm_wday_sym = Lm_symbol.add "tm_wday"
let tm_yday_sym = Lm_symbol.add "tm_yday"
let tm_isdst_sym = Lm_symbol.add "tm_isdst"
let tm_time_sym = Lm_symbol.add "tm_time"
let target_sym = Lm_symbol.add "target"
let target_effects_sym = Lm_symbol.add "effects"
let scanner_deps_sym = Lm_symbol.add "scanner-deps"
let static_deps_sym = Lm_symbol.add "static-deps"
let build_deps_sym = Lm_symbol.add "build-deps"
let build_values_sym = Lm_symbol.add "build-values"
let build_commands_sym = Lm_symbol.add "build-commands"
let output_file_sym = Lm_symbol.add "output-file"
let argv_sym = Lm_symbol.add "argv"
let star_sym = Lm_symbol.add "*"
let at_sym = Lm_symbol.add "@"
let amp_sym = Lm_symbol.add "&"
let lt_sym = Lm_symbol.add "<"
let gt_sym = Lm_symbol.add ">"
let plus_sym = Lm_symbol.add "+"
let hat_sym = Lm_symbol.add "^"
let zero_sym = Lm_symbol.add "0"
let runtime_exception_sym = Lm_symbol.add "RuntimeException"
let unbuildable_exception_sym = Lm_symbol.add "UnbuildableException"
let parse_loc_sym = Lm_symbol.add "parse-loc"
let loc_sym = Lm_symbol.add "loc"
let pos_sym = Lm_symbol.add "position"
let message_sym = Lm_symbol.add "message"
let stdin_sym = Lm_symbol.add "stdin"
let stdout_sym = Lm_symbol.add "stdout"
let stderr_sym = Lm_symbol.add "stderr"
let printexitvalue_sym = Lm_symbol.add "printexitvalue"
let targets_sym = Lm_symbol.add "TARGETS"
let glob_options_sym = Lm_symbol.add "GLOB_OPTIONS"
let glob_allow_sym = Lm_symbol.add "GLOB_ALLOW"
let glob_ignore_sym = Lm_symbol.add "GLOB_IGNORE"
let this_sym = Lm_symbol.add "this"
let dynamic_sym = Lm_symbol.add "dynamic"
let static_sym = Lm_symbol.add "static"
let allow_empty_subdirs_sym = Lm_symbol.add "ALLOW_EMPTY_SUBDIRS"
let abort_on_command_error_sym = Lm_symbol.add "ABORT_ON_COMMAND_ERROR"
let exit_on_uncaught_exception_sym = Lm_symbol.add "EXIT_ON_UNCAUGHT_EXCEPTION"
let create_subdirs_sym = Lm_symbol.add "CREATE_SUBDIRS"
let scanner_mode_sym = Lm_symbol.add "SCANNER_MODE"
let history_file_sym = Lm_symbol.add "history-file"
let history_length_sym = Lm_symbol.add "history-length"
let build_summary_sym = Lm_symbol.add "BUILD_SUMMARY"
let concat_sym = Lm_symbol.add "concat"
let if_sym = Lm_symbol.add "if"
let else_sym = Lm_symbol.add "else"
let elseif_sym = Lm_symbol.add "elseif"
let switch_sym = Lm_symbol.add "switch"
let select_sym = Lm_symbol.add "select"
let case_sym = Lm_symbol.add "case"
let do_sym = Lm_symbol.add "do"
let while_sym = Lm_symbol.add "while"
let default_sym = Lm_symbol.add "default"
let include_sym = Lm_symbol.add "include"
let section_sym = Lm_symbol.add "section"
let try_sym = Lm_symbol.add "try"
let catch_sym = Lm_symbol.add "catch"
let when_sym = Lm_symbol.add "when"
let finally_sym = Lm_symbol.add "finally"
let curry_sym = Lm_symbol.add "curry"
let private_sym = Lm_symbol.add "private"
let protected_sym = Lm_symbol.add "protected"
let public_sym = Lm_symbol.add "public"
let global_sym = Lm_symbol.add "global"
let const_sym = Lm_symbol.add "const"
let rule_sym = Lm_symbol.add "rule"
let system_sym = Lm_symbol.add "system"
let open_sym = Lm_symbol.add "open"
let autoload_sym = Lm_symbol.add "autoload"
let declare_sym = Lm_symbol.add "declare"
let return_sym = Lm_symbol.add "return"
let export_sym = Lm_symbol.add "export"
let value_sym = Lm_symbol.add "value"
let file_sym = Lm_symbol.add "__FILE__"
let file_id_sym = Lm_symbol.add "__ID__"
let foreach_sym = Lm_symbol.add "foreach"
let fun_sym = Lm_symbol.add "fun"
let set_sym = Lm_symbol.add "set"
let neg_fun_sym = Lm_symbol.add "neg"
let add_fun_sym = Lm_symbol.add "add"
let sub_fun_sym = Lm_symbol.add "sub"
let mul_fun_sym = Lm_symbol.add "mul"
let div_fun_sym = Lm_symbol.add "div"
let mod_fun_sym = Lm_symbol.add "mod"
let lsl_fun_sym = Lm_symbol.add "lsl"
let lsr_fun_sym = Lm_symbol.add "lsr"
let asr_fun_sym = Lm_symbol.add "asr"
let lxor_fun_sym = Lm_symbol.add "lxor"
let lor_fun_sym = Lm_symbol.add "lor"
let land_fun_sym = Lm_symbol.add "land"
let and_fun_sym = Lm_symbol.add "and"
let or_fun_sym = Lm_symbol.add "or"
let le_fun_sym = Lm_symbol.add "le"
let lt_fun_sym = Lm_symbol.add "lt"
let equal_fun_sym = Lm_symbol.add "equal"
let nequal_fun_sym = Lm_symbol.add "nequal"
let ge_fun_sym = Lm_symbol.add "ge"
let gt_fun_sym = Lm_symbol.add "gt"
let nth_fun_sym = Lm_symbol.add "nth"
let memo_rule_sym = Lm_symbol.add "memo-rule"
let empty_map_sym = Lm_symbol.add "empty-map"
let create_map_sym = Lm_symbol.add "create-map"
let create_lazy_map_sym = Lm_symbol.add "create-lazy-map"
let awk_sym = Lm_symbol.add "awk"
let nf_sym = Lm_symbol.add "NF"
let rs_sym = Lm_symbol.add "RS"
let fs_sym = Lm_symbol.add "FS"
let filename_sym = Lm_symbol.add "FILENAME"
let fnr_sym = Lm_symbol.add "FNR"
let fsubst_sym = Lm_symbol.add "fsubst"
let cases_syms = [awk_sym; fsubst_sym]
let cases_set = List.fold_left Lm_symbol.SymbolSet.add Lm_symbol.SymbolSet.empty cases_syms
let clauses_syms = [case_sym; default_sym; when_sym; catch_sym; finally_sym; do_sym]
let clauses_set = List.fold_left Lm_symbol.SymbolSet.add Lm_symbol.SymbolSet.empty clauses_syms
let normal_sym = Lm_symbol.add ":normal:"
let optional_sym = Lm_symbol.add ":optional:"
let exists_sym = Lm_symbol.add ":exists:"
let squash_sym = Lm_symbol.add ":squash:"
let effects_sym = Lm_symbol.add ":effects:"
let scanner_sym = Lm_symbol.add ":scanner:"
let values_sym = Lm_symbol.add ":value:"
let key_sym = Lm_symbol.add ":key:"
* Builtin functions .
* Builtin functions.
*)
let extends_sym = Lm_symbol.add "extends"
let omakeflags_sym = Lm_symbol.add "OMakeFlags"
let omakeargv_sym = Lm_symbol.add "OMakeArgv"
let prompt_sym = Lm_symbol.add "prompt"
let ignoreeof_sym = Lm_symbol.add "ignoreeof"
let cwd_sym = Lm_symbol.add "CWD"
let stdroot_sym = Lm_symbol.add "STDROOT"
let stdlib_sym = Lm_symbol.add "STDLIB"
let ostype_sym = Lm_symbol.add "OSTYPE"
let path_sym = Lm_symbol.add "PATH"
let auto_rehash_sym = Lm_symbol.add "AUTO_REHASH"
let omakepath_sym = Lm_symbol.add "OMAKEPATH"
let oshell_sym = Lm_symbol.add "OSHELL"
let cdpath_sym = Lm_symbol.add "cdpath"
|
83c33e949d22e7753e716b70dd7e38ddf7dd76096c0d9ebf3519eafacee83315 | district0x/re-frame-google-analytics-fx | project.clj | (defproject district0x.re-frame/google-analytics-fx "1.0.0"
:description "A re-frame effects handler for performing Google Analytics tasks"
:url "-frame-google-analytics-fx"
:license {:name "MIT"}
:dependencies [[org.clojure/clojurescript "1.9.946"]
[re-frame "0.10.2"]])
| null | https://raw.githubusercontent.com/district0x/re-frame-google-analytics-fx/28396a15d8cc7ef4aa2bf686781c6b9cdcebdd89/project.clj | clojure | (defproject district0x.re-frame/google-analytics-fx "1.0.0"
:description "A re-frame effects handler for performing Google Analytics tasks"
:url "-frame-google-analytics-fx"
:license {:name "MIT"}
:dependencies [[org.clojure/clojurescript "1.9.946"]
[re-frame "0.10.2"]])
| |
6c55e909775ba6df7b7c9f1669b4722037702b58a9aee0514085a7cec8da573e | SuYi1995/game_server | hot_swap.erl | %%%-------------------------------------------------------------------
%%% @author sy
( C ) 2019 , < COMPANY >
%%% @doc
%%% 本地热更模块
%%% @end
Created : 30 . 9月 2019 14:05
%%%-------------------------------------------------------------------
-module(hot_swap).
-export([
load/1,
network_load/1,
network_load/2
]).
%% 当前节点热部署:
@param ModList 模块列表(原子列表 )
load(ModList)->
lists:foreach(fun(Module)->
code:purge(Module),
code:load_file(Module)
end,ModList).
%% 节点集热更:
@param ModList 模块列表(原子列表 )
network_load(ModList)->
lists:foreach(fun(Module)->
[begin rpc:call(Node, code, purge, [Module]),rpc:call(Node, code, load_file, [Module]) end || Node <- (nodes()++[node()])]
end,ModList).
%% 指定单节点热更:
@param NodeList 节点集合(原子列表 )
@param ModList 模块列表(原子列表 )
network_load(NodeList, ModList)->
lists:foreach(fun(Node)->
lists:foreach(fun(Module)->
rpc:call(Node, code, purge, [Module]),
rpc:call(Node, code, load_file, [Module])
end,ModList)
end,NodeList).
| null | https://raw.githubusercontent.com/SuYi1995/game_server/b9a8574589075a1264c3d1f9a564d6d2ea8ae574/src/tools/hot_swap.erl | erlang | -------------------------------------------------------------------
@author sy
@doc
本地热更模块
@end
-------------------------------------------------------------------
当前节点热部署:
节点集热更:
指定单节点热更: | ( C ) 2019 , < COMPANY >
Created : 30 . 9月 2019 14:05
-module(hot_swap).
-export([
load/1,
network_load/1,
network_load/2
]).
@param ModList 模块列表(原子列表 )
load(ModList)->
lists:foreach(fun(Module)->
code:purge(Module),
code:load_file(Module)
end,ModList).
@param ModList 模块列表(原子列表 )
network_load(ModList)->
lists:foreach(fun(Module)->
[begin rpc:call(Node, code, purge, [Module]),rpc:call(Node, code, load_file, [Module]) end || Node <- (nodes()++[node()])]
end,ModList).
@param NodeList 节点集合(原子列表 )
@param ModList 模块列表(原子列表 )
network_load(NodeList, ModList)->
lists:foreach(fun(Node)->
lists:foreach(fun(Module)->
rpc:call(Node, code, purge, [Module]),
rpc:call(Node, code, load_file, [Module])
end,ModList)
end,NodeList).
|
c1e1be7ce78c192ecf0abcfa998b1d49bbb5f85392397c9fac7019ef70d0183f | antono/guix-debian | guile-wm.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2013 , 2014 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages guile-wm)
#:use-module (guix licenses)
#:use-module (gnu packages)
#:use-module (gnu packages xorg)
#:use-module (gnu packages guile)
#:use-module (gnu packages pkg-config)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public guile-xcb
(package
(name "guile-xcb")
(version "1.3")
(source (origin
(method url-fetch)
(uri (string-append "-xcb-"
version ".tar.gz"))
(sha256
(base32
"04dvbqdrrs67490gn4gkq9zk8mqy3mkls2818ha4p0ckhh0pm149"))))
(build-system gnu-build-system)
Parallel builds fail .
#:parallel-build? #f
#:configure-flags (list (string-append
"--with-guile-site-dir="
(assoc-ref %outputs "out")
"/share/guile/site/2.0")
(string-append
"--with-guile-site-ccache-dir="
(assoc-ref %outputs "out")
"/share/guile/site/2.0"))))
(native-inputs `(("pkg-config" ,pkg-config)))
(inputs `(("guile" ,guile-2.0)
("xcb" ,xcb-proto)))
(home-page "-xcb/guile-xcb.html")
(synopsis "XCB bindings for Guile")
(description
"Guile-XCB implements the XCB protocol and provides all the tools
necessary to write X client code in Guile Scheme without any external
dependencies.")
(license gpl3+)))
(define-public guile-wm
(package
(name "guile-wm")
(version "1.0")
(source (origin
(method url-fetch)
(uri (string-append "-wm-"
version ".tar.gz"))
(sha256
(base32
"1l9qcz236jxvryndimjy62cf8zxf8i3f8vg3zpqqjhw15j9mdk3r"))))
(build-system gnu-build-system)
(arguments '(;; The '.scm' files go to $(datadir), so set that to the
;; standard value.
#:configure-flags (list (string-append "--datadir="
(assoc-ref %outputs "out")
"/share/guile/site/2.0"))
#:phases (alist-cons-before
'configure 'set-go-directory
(lambda* (#:key outputs #:allow-other-keys)
Install .go files to $ out / share / guile / site/2.0 .
(let ((out (assoc-ref outputs "out")))
(substitute* "module/Makefile.in"
(("^wmdir = .*$")
(string-append "wmdir = " out
"/share/guile/site/2.0\n")))))
(alist-cons-after
'install 'set-load-path
(lambda* (#:key inputs outputs #:allow-other-keys)
Put Guile - XCB 's and Guile - WM 's modules in the
;; search path of PROG.
(let* ((out (assoc-ref outputs "out"))
(prog (string-append out "/bin/guile-wm"))
(mods (string-append
out "/share/guile/site/2.0"))
(xcb (string-append
(assoc-ref inputs "guile-xcb")
"/share/guile/site/2.0")))
(wrap-program
prog
`("GUILE_LOAD_PATH" ":" prefix (,mods ,xcb))
`("GUILE_LOAD_COMPILED_PATH" ":" prefix
(,mods ,xcb)))))
%standard-phases))))
(native-inputs `(("pkg-config" ,pkg-config)))
(inputs `(("guile" ,guile-2.0)
("guile-xcb" ,guile-xcb)))
(home-page "-xcb/guile-wm.html")
(synopsis "X11 window manager toolkit in Scheme")
(description
"Guile-WM is a simple window manager that's completely customizable—you
have total control of what it does by choosing which modules to include.
Included with it are a few modules that provide basic TinyWM-like window
management, some window record-keeping, multi-monitor support, and emacs-like
keymaps and minibuffer. At this point, it's just enough to get you started.")
(license gpl3+)))
| null | https://raw.githubusercontent.com/antono/guix-debian/85ef443788f0788a62010a942973d4f7714d10b4/gnu/packages/guile-wm.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
The '.scm' files go to $(datadir), so set that to the
standard value.
search path of PROG. | Copyright © 2013 , 2014 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages guile-wm)
#:use-module (guix licenses)
#:use-module (gnu packages)
#:use-module (gnu packages xorg)
#:use-module (gnu packages guile)
#:use-module (gnu packages pkg-config)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu))
(define-public guile-xcb
(package
(name "guile-xcb")
(version "1.3")
(source (origin
(method url-fetch)
(uri (string-append "-xcb-"
version ".tar.gz"))
(sha256
(base32
"04dvbqdrrs67490gn4gkq9zk8mqy3mkls2818ha4p0ckhh0pm149"))))
(build-system gnu-build-system)
Parallel builds fail .
#:parallel-build? #f
#:configure-flags (list (string-append
"--with-guile-site-dir="
(assoc-ref %outputs "out")
"/share/guile/site/2.0")
(string-append
"--with-guile-site-ccache-dir="
(assoc-ref %outputs "out")
"/share/guile/site/2.0"))))
(native-inputs `(("pkg-config" ,pkg-config)))
(inputs `(("guile" ,guile-2.0)
("xcb" ,xcb-proto)))
(home-page "-xcb/guile-xcb.html")
(synopsis "XCB bindings for Guile")
(description
"Guile-XCB implements the XCB protocol and provides all the tools
necessary to write X client code in Guile Scheme without any external
dependencies.")
(license gpl3+)))
(define-public guile-wm
(package
(name "guile-wm")
(version "1.0")
(source (origin
(method url-fetch)
(uri (string-append "-wm-"
version ".tar.gz"))
(sha256
(base32
"1l9qcz236jxvryndimjy62cf8zxf8i3f8vg3zpqqjhw15j9mdk3r"))))
(build-system gnu-build-system)
#:configure-flags (list (string-append "--datadir="
(assoc-ref %outputs "out")
"/share/guile/site/2.0"))
#:phases (alist-cons-before
'configure 'set-go-directory
(lambda* (#:key outputs #:allow-other-keys)
Install .go files to $ out / share / guile / site/2.0 .
(let ((out (assoc-ref outputs "out")))
(substitute* "module/Makefile.in"
(("^wmdir = .*$")
(string-append "wmdir = " out
"/share/guile/site/2.0\n")))))
(alist-cons-after
'install 'set-load-path
(lambda* (#:key inputs outputs #:allow-other-keys)
Put Guile - XCB 's and Guile - WM 's modules in the
(let* ((out (assoc-ref outputs "out"))
(prog (string-append out "/bin/guile-wm"))
(mods (string-append
out "/share/guile/site/2.0"))
(xcb (string-append
(assoc-ref inputs "guile-xcb")
"/share/guile/site/2.0")))
(wrap-program
prog
`("GUILE_LOAD_PATH" ":" prefix (,mods ,xcb))
`("GUILE_LOAD_COMPILED_PATH" ":" prefix
(,mods ,xcb)))))
%standard-phases))))
(native-inputs `(("pkg-config" ,pkg-config)))
(inputs `(("guile" ,guile-2.0)
("guile-xcb" ,guile-xcb)))
(home-page "-xcb/guile-wm.html")
(synopsis "X11 window manager toolkit in Scheme")
(description
"Guile-WM is a simple window manager that's completely customizable—you
have total control of what it does by choosing which modules to include.
Included with it are a few modules that provide basic TinyWM-like window
management, some window record-keeping, multi-monitor support, and emacs-like
keymaps and minibuffer. At this point, it's just enough to get you started.")
(license gpl3+)))
|
431efc33ee357cf4b3f8c88bd1435349c4330c2b5948eb85fc6bf8a0b6eaeb25 | nvim-treesitter/nvim-treesitter | locals.scm | ; Scopes
[
(infrastructure)
(call_expression)
(lambda_expression)
(subscript_expression)
(if_statement)
(for_statement)
(array)
(object)
(interpolation)
] @scope
; References
(property_identifier) @reference
(call_expression
(identifier) @reference)
(object_property
(_)
":"
(identifier) @reference)
(resource_expression
(identifier) @reference)
; Definitions
(type) @definition.associated
(object_property
(identifier) @definition.field
(_))
(object_property
(compatible_identifier) @definition.field
(_))
(import_name) @definition.import
(module_declaration
(identifier) @definition.namespace)
(parameter_declaration
(identifier) @definition.parameter
(_))
(type_declaration
(identifier) @definition.type
(_))
(variable_declaration
(identifier) @definition.var
(_))
(metadata_declaration
(identifier) @definition.var
(_))
(output_declaration
(identifier) @definition.var
(_))
(for_statement
"for"
(for_loop_parameters
(loop_variable) @definition.var
(loop_enumerator) @definition.var))
| null | https://raw.githubusercontent.com/nvim-treesitter/nvim-treesitter/028b7dfbddffdd4738a25a76b280d9b26f38ce42/queries/bicep/locals.scm | scheme | Scopes
References
Definitions |
[
(infrastructure)
(call_expression)
(lambda_expression)
(subscript_expression)
(if_statement)
(for_statement)
(array)
(object)
(interpolation)
] @scope
(property_identifier) @reference
(call_expression
(identifier) @reference)
(object_property
(_)
":"
(identifier) @reference)
(resource_expression
(identifier) @reference)
(type) @definition.associated
(object_property
(identifier) @definition.field
(_))
(object_property
(compatible_identifier) @definition.field
(_))
(import_name) @definition.import
(module_declaration
(identifier) @definition.namespace)
(parameter_declaration
(identifier) @definition.parameter
(_))
(type_declaration
(identifier) @definition.type
(_))
(variable_declaration
(identifier) @definition.var
(_))
(metadata_declaration
(identifier) @definition.var
(_))
(output_declaration
(identifier) @definition.var
(_))
(for_statement
"for"
(for_loop_parameters
(loop_variable) @definition.var
(loop_enumerator) @definition.var))
|
2b11d6eda40d06795cdc2203058eec851751f369bca5ff6eb59d61e548905a16 | tlaplus/tlapm | tptp.mli | Copyright 2004 INRIA
open Phrase;;
val translate : string list -> tpphrase list -> phrase list * string;;
| null | https://raw.githubusercontent.com/tlaplus/tlapm/b82e2fd049c5bc1b14508ae16890666c6928975f/zenon/tptp.mli | ocaml | Copyright 2004 INRIA
open Phrase;;
val translate : string list -> tpphrase list -> phrase list * string;;
| |
c0fb9ddcebc8568a9a4c2dc8eed9d13fb3fe303e6ed9370da442053776974f1d | mainland/dph | Vectorised1.hs | {-# LANGUAGE ParallelArrays #-}
{-# OPTIONS -fvectorise #-}
{-# OPTIONS -fno-spec-constr-count #-}
module Vectorised1 (closest1PA, closeststupid1PA) where
import Points2D.Types
import Data.Array.Parallel
import Data.Array.Parallel.Prelude.Double as D
import qualified Data.Array.Parallel.Prelude.Int as I
import qualified Prelude as P
-- removed the sqrt here - only some users actually need it
distance :: Point -> Point -> Double
distance (x1, y1) (x2, y2)
= ( (x2 D.- x1) D.* (x2 D.- x1)
D.+ (y2 D.- y1) D.* (y2 D.- y1) )
Distance between two points , but return a very large number if they 're the same ...
distancex :: Point -> Point -> Double
distancex a b
= let d = distance a b
in if d D.== 0 then 1e100 else d
An n^2 algorithm for finding closest pair .
-- Our divide and conquer drops back to this once there are few enough points
closeststupid :: [: Point :] -> (Point,Point)
closeststupid pts
= let i = minIndexP [: distancex a b | a <- pts, b <- pts :]
in [: (a,b) | a <- pts, b <- pts :] !: i
| Find the points within distance @d@ of the edge along x=@x0@.
near_boundary
:: [:Point:] -- ^ array of points
-> Double -- ^ split / x boundary
-> Double -- ^ maximum distance
-> [:Point:]
near_boundary pts x0 d
= filterP check pts
where
check (x1,_) = D.abs (x1 D.- x0) D.< d
-- | Find pair with minimum distance between tops * bots
merge :: [:Point:] -> [:Point:] -> (Point,Point)
merge tops bots
= let i = minIndexP [: distancex a b | a <- tops, b <- bots :]
in [: (a,b) | a <- tops, b <- bots :] !: i
-- | Given closest pairs in points above and below split,
-- we want to find the minimum of all points.
-- To do this, we find the points within a certain distance
-- from the split boundary, and check them against each other.
-- We then take the minimum of all points.
merge_pairs
:: Double -- ^ split / x boundary
-> [:Point:] -- ^ points above split
-> [:Point:] -- ^ points below split
-> (Point,Point) -- ^ closest pair in points above
-> (Point,Point) -- ^ closest pair in points below
-> (Point,Point)
merge_pairs x0 top bot (a1,a2) (b1,b2)
= let da = distancex a1 a2
db = distancex b1 b2
min2 = if da D.< db then (a1,a2) else (b1,b2)
mind = D.min da db
d = sqrt mind
topn = near_boundary top x0 d
botn = near_boundary bot x0 d
in if lengthP topn I.* lengthP botn I.== 0
then min2
else let
(m,n)= merge topn botn
dm = distancex m n
in if dm D.< mind then (m,n) else min2
| Find closest two points in array of points .
-- Use naive n^2 algorithm when there are few points,
-- otherwise split along median X and do each half recursively.
And merge the result of the two halves .
closest :: [:Point:] -> (Point,Point)
closest pts
| lengthP pts I.< 250 = closeststupid pts
| otherwise =
let (xs,ys) = unzipP pts
xd = maximumP xs D.- minimumP xs
yd = maximumP ys D.- minimumP ys
mid = median xs
top = filterP (\(x,_) -> x D.>= mid) pts
-- NOTE was error here "combine2SPack" when "not (x >= mid)"
bot = filterP (\(x,_) -> x D.< mid) pts
top' = closest top
bot' = closest bot
pair = merge_pairs mid top bot top' bot'
in pair
closest1PA :: PArray Point -> (Point,Point)
closest1PA ps = closest (fromPArrayP ps)
closeststupid1PA :: PArray Point -> (Point,Point)
closeststupid1PA ps = closeststupid (fromPArrayP ps)
median :: [: Double :] -> Double
median xs = median' xs (lengthP xs `I.div` 2)
median':: [: Double :] -> Int -> Double
median' xs k =
let p = xs !: (lengthP xs `I.div` 2)
ls = [:x | x <- xs, x D.< p:]
in if k I.< (lengthP ls)
then median' ls k
else
let gs = [:x | x <- xs, x D.> p:]
len = lengthP xs I.- lengthP gs
in if k I.>= len
then median' gs (k I.- len)
else p
| null | https://raw.githubusercontent.com/mainland/dph/742078c9e18b7dcf6526348e08d2dd16e2334739/dph-examples/examples/spectral/ClosestPairs/dph/Vectorised1.hs | haskell | # LANGUAGE ParallelArrays #
# OPTIONS -fvectorise #
# OPTIONS -fno-spec-constr-count #
removed the sqrt here - only some users actually need it
Our divide and conquer drops back to this once there are few enough points
^ array of points
^ split / x boundary
^ maximum distance
| Find pair with minimum distance between tops * bots
| Given closest pairs in points above and below split,
we want to find the minimum of all points.
To do this, we find the points within a certain distance
from the split boundary, and check them against each other.
We then take the minimum of all points.
^ split / x boundary
^ points above split
^ points below split
^ closest pair in points above
^ closest pair in points below
Use naive n^2 algorithm when there are few points,
otherwise split along median X and do each half recursively.
NOTE was error here "combine2SPack" when "not (x >= mid)" | module Vectorised1 (closest1PA, closeststupid1PA) where
import Points2D.Types
import Data.Array.Parallel
import Data.Array.Parallel.Prelude.Double as D
import qualified Data.Array.Parallel.Prelude.Int as I
import qualified Prelude as P
distance :: Point -> Point -> Double
distance (x1, y1) (x2, y2)
= ( (x2 D.- x1) D.* (x2 D.- x1)
D.+ (y2 D.- y1) D.* (y2 D.- y1) )
Distance between two points , but return a very large number if they 're the same ...
distancex :: Point -> Point -> Double
distancex a b
= let d = distance a b
in if d D.== 0 then 1e100 else d
An n^2 algorithm for finding closest pair .
closeststupid :: [: Point :] -> (Point,Point)
closeststupid pts
= let i = minIndexP [: distancex a b | a <- pts, b <- pts :]
in [: (a,b) | a <- pts, b <- pts :] !: i
| Find the points within distance @d@ of the edge along x=@x0@.
near_boundary
-> [:Point:]
near_boundary pts x0 d
= filterP check pts
where
check (x1,_) = D.abs (x1 D.- x0) D.< d
merge :: [:Point:] -> [:Point:] -> (Point,Point)
merge tops bots
= let i = minIndexP [: distancex a b | a <- tops, b <- bots :]
in [: (a,b) | a <- tops, b <- bots :] !: i
merge_pairs
-> (Point,Point)
merge_pairs x0 top bot (a1,a2) (b1,b2)
= let da = distancex a1 a2
db = distancex b1 b2
min2 = if da D.< db then (a1,a2) else (b1,b2)
mind = D.min da db
d = sqrt mind
topn = near_boundary top x0 d
botn = near_boundary bot x0 d
in if lengthP topn I.* lengthP botn I.== 0
then min2
else let
(m,n)= merge topn botn
dm = distancex m n
in if dm D.< mind then (m,n) else min2
| Find closest two points in array of points .
And merge the result of the two halves .
closest :: [:Point:] -> (Point,Point)
closest pts
| lengthP pts I.< 250 = closeststupid pts
| otherwise =
let (xs,ys) = unzipP pts
xd = maximumP xs D.- minimumP xs
yd = maximumP ys D.- minimumP ys
mid = median xs
top = filterP (\(x,_) -> x D.>= mid) pts
bot = filterP (\(x,_) -> x D.< mid) pts
top' = closest top
bot' = closest bot
pair = merge_pairs mid top bot top' bot'
in pair
closest1PA :: PArray Point -> (Point,Point)
closest1PA ps = closest (fromPArrayP ps)
closeststupid1PA :: PArray Point -> (Point,Point)
closeststupid1PA ps = closeststupid (fromPArrayP ps)
median :: [: Double :] -> Double
median xs = median' xs (lengthP xs `I.div` 2)
median':: [: Double :] -> Int -> Double
median' xs k =
let p = xs !: (lengthP xs `I.div` 2)
ls = [:x | x <- xs, x D.< p:]
in if k I.< (lengthP ls)
then median' ls k
else
let gs = [:x | x <- xs, x D.> p:]
len = lengthP xs I.- lengthP gs
in if k I.>= len
then median' gs (k I.- len)
else p
|
d4ad9e14b66a339dc2e33db12be697fc368fa4a7a3d4ab4e7461ee3f6a7beacf | incoherentsoftware/defect-process | FloatingAttack.hs | module Player.Weapon.All.Scythe.FloatingAttack
( FloatingAttackOnDoneBehavior(..)
, FloatingAttackData(..)
, mkFloatingAttack
, removeFloatingAtkMsg
, updateFloatingAtkStatusMsg
, addFloatingAtkMsgs
) where
import Control.Monad (when)
import Control.Monad.IO.Class (MonadIO)
import Data.Maybe (fromMaybe, isJust)
import qualified Data.Set as S
import Attack
import Collision
import Configs
import Configs.All.Player
import Configs.All.PlayerWeapon
import Configs.All.PlayerWeapon.Scythe
import Constants
import Id
import InfoMsg.Util
import Msg
import Player.Weapon as W
import Player.Weapon.All.Scythe.Data
import Projectile as P
import Util
import Window.Graphics
import Window.InputState
import World.Surface
import World.ZIndex
innerHitboxWidth = 88.0 * 2.0 :: Float
innerHitboxHeight = 88.0 * 2.0 :: Float
vertSlashWallLandEpsilon = 5.0 :: Float
floatingTrackingLineColor = Color 38 40 52 255 :: Color
surfaceCollisionOnlyFrameTagName = FrameTagName "surfaceCollisionOnly" :: FrameTagName
ignoreSurfaceCollisionFrameTagName = FrameTagName "ignoreSurfaceCollision" :: FrameTagName
vertSlashLandPlatformFrameTagName = FrameTagName "vertSlashLandPlatform" :: FrameTagName
vertSlashLandSurfaceFrameTagName = FrameTagName "vertSlashLandSurface" :: FrameTagName
defaultRegisteredCollisions = S.fromList
[ ProjRegisteredEnemyCollision
, ProjRegisteredRoomItemCollision
, ProjRegisteredSurfaceCollision
] :: S.Set ProjectileRegisteredCollision
data FloatingAttackOnDoneBehavior
= LingerOnDone
| VanishOnDone
| NextAttackOnDone AttackDescription
deriving Eq
data FloatingAttackData = FloatingAttackData
{ _attack :: Attack
, _onDone :: FloatingAttackOnDoneBehavior
, _scytheAttackDescs :: ScytheAttackDescriptions
, _config :: ScytheConfig
, _knownPlayerPos :: Maybe Pos2
, _enoughChargeHeld :: Bool
, _glowOverlaySprite :: Maybe Sprite
}
mkFloatingAttackData
:: (ConfigsRead m, MonadIO m)
=> Pos2
-> Direction
-> AttackDescription
-> ScytheAttackDescriptions
-> m FloatingAttackData
mkFloatingAttackData pos dir atkDesc scytheAtkDescs = do
atk <- mkAttack pos dir atkDesc
cfg <- readConfig _playerWeapon _scythe
return $ FloatingAttackData
{ _attack = atk
, _onDone = attackDescOnDoneBehavior atkDesc scytheAtkDescs
, _scytheAttackDescs = scytheAtkDescs
, _config = cfg
, _knownPlayerPos = Nothing
, _enoughChargeHeld = False
, _glowOverlaySprite = Nothing
}
floatingAttackInnerHitbox :: Attack -> Hitbox
floatingAttackInnerHitbox atk = rectHitbox pos innerHitboxWidth innerHitboxHeight
where pos = _pos (atk :: Attack) `vecSub` Pos2 (innerHitboxWidth / 2.0) (innerHitboxHeight / 2.0)
floatingAttackHitbox :: ProjectileHitbox FloatingAttackData
floatingAttackHitbox floatingAtk = fromMaybe (floatingAttackInnerHitbox atk) (attackHitbox atk)
where atk = _attack (P._data floatingAtk :: FloatingAttackData)
attackDescOnDoneBehavior :: AttackDescription -> ScytheAttackDescriptions -> FloatingAttackOnDoneBehavior
attackDescOnDoneBehavior atkDesc scytheAtkDescs
| atkDesc == multiSlash1 = NextAttackOnDone multiSlash2
| atkDesc == multiSlash2 = NextAttackOnDone multiSlash3
| atkDesc == multiSlash3 = NextAttackOnDone multiSlash4
| atkDesc == multiSlash4 = NextAttackOnDone multiSlash5
| atkDesc == multiSlash5 = NextAttackOnDone multiSlashGlow
| atkDesc == multiSlashGlow = LingerOnDone
| atkDesc == vertSpinSlash = NextAttackOnDone vertSpinSlashGlow
| atkDesc == vertSpinSlashGlow = LingerOnDone
| atkDesc == riseSlash = NextAttackOnDone riseSlashGlow
| atkDesc == riseSlashGlow = LingerOnDone
| atkDesc == pullSlash = NextAttackOnDone pullSlashGlow
| atkDesc == pullSlashGlow = LingerOnDone
| atkDesc == diagSpinSlash = NextAttackOnDone diagSpinSlashGlow
| atkDesc == diagSpinSlashGlow = LingerOnDone
| otherwise = VanishOnDone
where
multiSlash1 = _multiSlash1 scytheAtkDescs
multiSlash2 = _multiSlash2 scytheAtkDescs
multiSlash3 = _multiSlash3 scytheAtkDescs
multiSlash4 = _multiSlash4 scytheAtkDescs
multiSlash5 = _multiSlash5 scytheAtkDescs
multiSlashGlow = _multiSlashGlow scytheAtkDescs
vertSpinSlash = _vertSpinSlash scytheAtkDescs
vertSpinSlashGlow = _vertSpinSlashGlow scytheAtkDescs
riseSlash = _riseSlash scytheAtkDescs
riseSlashGlow = _riseSlashGlow scytheAtkDescs
pullSlash = _pullSlash scytheAtkDescs
pullSlashGlow = _pullSlashGlow scytheAtkDescs
diagSpinSlash = _diagSpinSlash scytheAtkDescs
diagSpinSlashGlow = _diagSpinSlashGlow scytheAtkDescs
mkFloatingAttack
:: (ConfigsRead m, MonadIO m)
=> Pos2
-> Direction
-> AttackDescription
-> ScytheAttackDescriptions
-> m (Projectile FloatingAttackData)
mkFloatingAttack pos dir atkDesc scytheAtkDescs = do
floatingAtkData <- mkFloatingAttackData pos dir atkDesc scytheAtkDescs
msgId <- newId
let
atk = _attack (floatingAtkData :: FloatingAttackData)
dummyHbx = DummyHitbox $ _pos (atk :: Attack)
return $ (mkProjectile floatingAtkData msgId dummyHbx maxSecs)
{ _hitbox = floatingAttackHitbox
, _registeredCollisions = defaultRegisteredCollisions
, _think = thinkFloatingAttack
, _update = updateFloatingAttack
, _draw = drawFloatingAttack
, _processCollisions = processFloatingAttackCollisions
}
wasSentRemoveMsg :: MsgsRead ThinkProjectileMsgsPhase m => Projectile FloatingAttackData -> m Bool
wasSentRemoveMsg floatingAtk = processMsgs <$> readMsgsTo (P._msgId floatingAtk)
where
processMsgs :: [ProjectileMsgPayload] -> Bool
processMsgs [] = False
processMsgs (d:ds) = case d of
ProjectileMsgSetTtl 0.0 -> True
_ -> processMsgs ds
thinkFloatingAttack
:: (ConfigsRead m, MonadIO m, MsgsRead ThinkProjectileMsgsPhase m)
=> ProjectileThink FloatingAttackData m
thinkFloatingAttack floatingAtk = wasSentRemoveMsg floatingAtk >>= \case
True -> return [infoPosMsg]
False -> ((infoPosMsg:thinkAttack atk) ++) <$> case _onDone floatingAtkData of
LingerOnDone
| atkCancelable || atkDone -> return floatingAtkReadyMsgs
VanishOnDone
| atkDone ->
let
updateScytheSt = \scythe -> scythe
{ W._data = (W._data scythe) {_floatingAttackStatus = FloatingAttackInactive}
}
in return
[ mkMsg $ PlayerMsgUpdateWeapon updateScytheSt
, mkMsgTo (ProjectileMsgSetTtl 0.0) floatingAtkMsgId
]
NextAttackOnDone nextFloatingAtkDesc
| atkDone -> mkNextFloatingAttackMsgs nextFloatingAtkDesc
| atkCancelable -> return floatingAtkReadyMsgs
_ -> return []
where
floatingAtkData = P._data floatingAtk
floatingAtkMsgId = P._msgId floatingAtk
atk = _attack (floatingAtkData :: FloatingAttackData)
atkPos = _pos (atk :: Attack)
atkDir = _dir (atk :: Attack)
atkDone = _done atk
atkCancelable = attackCancelable atk
infoPosMsg = mkMsg $ InfoMsgProjectilePos atkPos NullId floatingAtkMsgId
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
mkNextFloatingAttackMsgs
:: (ConfigsRead m1, MonadIO m1)
=> AttackDescription
-> m1 [Msg ThinkProjectileMsgsPhase]
mkNextFloatingAttackMsgs nextFloatingAtkDesc = do
nextFloatingAtk <- mkFloatingAttack atkPos atkDir nextFloatingAtkDesc scytheAtkDescs
let
nextFloatingAtkMsgId = P._msgId nextFloatingAtk
floatingAtkStatus = FloatingAttackActive nextFloatingAtkMsgId 0.0
updateScytheSt = \scythe -> scythe
{ W._data = (W._data scythe) {_floatingAttackStatus = floatingAtkStatus}
}
return
[ mkMsg $ NewUpdateProjectileMsgAdd (Some nextFloatingAtk)
, mkMsg $ PlayerMsgUpdateWeapon updateScytheSt
, mkMsgTo (ProjectileMsgSetTtl 0.0) floatingAtkMsgId
]
floatingAtkReadyMsgs =
let
floatingAtkStatus = FloatingAttackActiveReady floatingAtkMsgId atkPos atkDir 0.0
updateScytheSt = \scythe -> scythe
{ W._data = (W._data scythe) {_floatingAttackStatus = floatingAtkStatus}
}
in [mkMsg $ PlayerMsgUpdateWeapon updateScytheSt]
floatingAttackVel :: Attack -> Projectile FloatingAttackData -> Vel2
floatingAttackVel atk floatingAtk
| _done atk = zeroVel2
| otherwise = attackVelToVel2 (attackVel atk) (P._vel floatingAtk)
attackGlowOverlay :: Attack -> ScytheAttackDescriptions -> Maybe Sprite
attackGlowOverlay atk scytheAtkDescs
| atkIs _multiSlashGlow = Just $ _sprite (_multiSlashGlowOverlay scytheAtkDescs)
| atkIs _vertSpinSlashGlow = Just $ _sprite (_vertSpinSlashGlowOverlay scytheAtkDescs)
| atkIs _riseSlashGlow = Just $ _sprite (_riseSlashGlowOverlay scytheAtkDescs)
| atkIs _pullSlashGlow = Just $ _sprite (_pullSlashGlowOverlay scytheAtkDescs)
| atkIs _diagSpinSlashGlow = Just $ _sprite (_diagSpinSlashGlowOverlay scytheAtkDescs)
| otherwise = Nothing
where atkIs = \atkDescField -> _description atk == atkDescField scytheAtkDescs
updateFloatingAttack
:: (InputRead m, MsgsReadWrite UpdateProjectileMsgsPhase m)
=> ProjectileUpdate FloatingAttackData m
updateFloatingAttack floatingAtk =
let
processInfoMsgs :: [InfoMsgPayload] -> Maybe Pos2
processInfoMsgs [] = Nothing
processInfoMsgs (d:ds) = case d of
InfoMsgPlayer playerInfo -> Just $ playerInfoPos playerInfo
_ -> processInfoMsgs ds
floatingAtkData = P._data floatingAtk
atk = _attack (floatingAtkData :: FloatingAttackData)
vel = floatingAttackVel atk floatingAtk
pos = _pos (atk :: Attack) `vecAdd` toPos2 (vel `vecMul` timeStep)
dir = _dir (atk :: Attack)
enoughChargeHeld = _enoughChargeHeld floatingAtkData
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
glowOverlaySpr = case _glowOverlaySprite floatingAtkData of
Just overlaySpr
| enoughChargeHeld -> Just $ updateSprite overlaySpr
Nothing
| enoughChargeHeld -> attackGlowOverlay atk scytheAtkDescs
_ -> Nothing
in do
knownPlayerPos <- processInfoMsgs <$> readMsgs
return $ floatingAtk
{ P._data = floatingAtkData
{ _attack = updateAttack pos dir atk
, _knownPlayerPos = knownPlayerPos
, _glowOverlaySprite = glowOverlaySpr
} :: FloatingAttackData
, P._vel = vel
}
processFloatingAttackCollisions :: ProjectileProcessCollisions FloatingAttackData
processFloatingAttackCollisions collisions floatingAtk = foldr processCollision [] collisions
where
floatingAtkData = P._data floatingAtk
atk = _attack floatingAtkData
isAtkHitbox = isJust $ attackHitbox atk
isSurfaceCollisionOnly = surfaceCollisionOnlyFrameTagName `isAttackFrameTag` atk
isIgnoreSurfaceCollision = ignoreSurfaceCollisionFrameTagName `isAttackFrameTag` atk
processCollision :: ProjectileCollision -> [Msg ThinkCollisionMsgsPhase] -> [Msg ThinkCollisionMsgsPhase]
processCollision collision !msgs = case collision of
ProjEnemyCollision enemy
| isAtkHitbox && not isSurfaceCollisionOnly -> attackEnemyHitMessages enemy atk ++ msgs
ProjRoomItemCollision (Some roomItem)
| isAtkHitbox -> attackCollisionEntityHitMessages roomItem atk ++ msgs
ProjSurfaceCollision surfaceHbx surfaceType
| not isIgnoreSurfaceCollision -> surfaceHitMessages surfaceType surfaceHbx ++ msgs
_ -> msgs
mkVertSlashLandAttack :: MonadIO m => Hitbox -> Projectile FloatingAttackData -> m Attack
mkVertSlashLandAttack surfaceHbx fa = mkAttack pos dir (_vertSlashLand scytheAtkDescs)
where
faAtk = _attack $ P._data fa
x = vecX $ _pos (faAtk :: Attack)
pos = Pos2 x (hitboxTop surfaceHbx)
dir = _dir (faAtk :: Attack)
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
surfaceHitMessages :: SurfaceType -> Hitbox -> [Msg ThinkCollisionMsgsPhase]
surfaceHitMessages surfaceType surfaceHbx = case surfaceType of
GeneralSurface -> generalSurfaceHitMessages surfaceHbx
PlatformSurface -> vertSlashLandPlatformMsgs
SpeedRailSurface _ -> vertSlashLandPlatformMsgs
where
innerHbx = floatingAttackInnerHitbox atk
isVertSlashLandPlatformFrame = vertSlashLandPlatformFrameTagName `isAttackFrameTag` atk
isIntersect = surfaceHbx `intersectsHitbox` innerHbx
vertSlashLandPlatformMsgs
| isVertSlashLandPlatformFrame && isIntersect =
let
update = \fa ->
let
faData = P._data fa
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
in if
-- vert-slash -> vert-slash-land
| _attack faData `attackIs` _vertSlash scytheAtkDescs -> do
vertSlashLandAtk <- mkVertSlashLandAttack surfaceHbx fa
return $ fa
{ P._data = faData {_attack = vertSlashLandAtk}
}
| otherwise -> return fa
in [mkMsgTo (ProjectileMsgUpdateM update) (P._msgId floatingAtk)]
| otherwise = []
generalSurfaceHitMessages :: Hitbox -> [Msg ThinkCollisionMsgsPhase]
generalSurfaceHitMessages surfaceHbx
| surfaceHbx `intersectsHitbox` innerHbx =
let
Vel2 velX velY = floatingAttackVel atk floatingAtk
offsetX
| velX `approxEq` 0.0 = Nothing
| velX < 0.0 = Just $ hitboxRight surfaceHbx - hitboxLeft innerHbx
| otherwise = Just $ hitboxLeft surfaceHbx - hitboxRight innerHbx
offsetY
| velY `approxEq` 0.0 = Nothing
| velY < 0.0 = Just $ hitboxBot surfaceHbx - hitboxTop innerHbx
| otherwise = Just $ hitboxTop surfaceHbx - hitboxBot innerHbx
offset = case (offsetX, offsetY) of
(Nothing, Nothing) -> zeroPos2
(Just offsetX', Nothing) -> Pos2 offsetX' 0.0
(Nothing, Just offsetY') -> Pos2 0.0 offsetY'
(Just offsetX', Just offsetY')
| abs offsetX' < abs offsetY' -> Pos2 offsetX' 0.0
| otherwise -> Pos2 0.0 offsetY'
update = \fa ->
let
faData = P._data fa
faAtk = _attack faData
scytheAttackDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
vertSlash = _vertSlash scytheAttackDescs
noInsideSurfacesAtkDescs =
[ _pullSlash scytheAttackDescs
, _vertSpinSlash scytheAttackDescs
, _diagSpinSlash scytheAttackDescs
, _multiSlash1 scytheAttackDescs
, _riseSlash scytheAttackDescs
]
in do
faAtk' <- if
-- vert-slash -> vert-slash-land
| faAtk `attackIs` vertSlash ->
let
-- ignore wall collisions
approxEqEx' = \f1 f2 -> approxEqEx f1 f2 vertSlashWallLandEpsilon
ignoreIntersect =
approxEqEx' (hitboxRight surfaceHbx) (hitboxLeft innerHbx) ||
approxEqEx' (hitboxLeft surfaceHbx) (hitboxRight innerHbx)
isVertSlashLandSurfaceFrame =
vertSlashLandSurfaceFrameTagName `isAttackFrameTag` faAtk
in if
| ignoreIntersect || not isVertSlashLandSurfaceFrame -> return faAtk
| otherwise ->
mkVertSlashLandAttack surfaceHbx fa
-- prevent various attacks from going inside surfaces
| faAtk `attackIn` noInsideSurfacesAtkDescs -> return $ faAtk
{ _pos = _pos (faAtk :: Attack) `vecAdd` offset
}
| otherwise -> return faAtk
return $ fa
{ P._data = faData {_attack = faAtk'}
}
in [mkMsgTo (ProjectileMsgUpdateM update) (P._msgId floatingAtk)]
| otherwise = []
where innerHbx = floatingAttackInnerHitbox atk
drawFloatingAttack :: (ConfigsRead m, GraphicsReadWrite m, MonadIO m) => ProjectileDraw FloatingAttackData m
drawFloatingAttack floatingAtk =
let
atk = _attack (P._data floatingAtk :: FloatingAttackData)
atkDesc = _description atk
floatingAtkData = P._data floatingAtk
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
cfg = _config (floatingAtkData :: FloatingAttackData)
isAtkLingerOnDone = attackDescOnDoneBehavior atkDesc scytheAtkDescs == LingerOnDone
oscillateOffset
| isAtkLingerOnDone =
let
amplitude = _glowOscillateAmplitude cfg
period = _glowOscillatePeriod cfg
elapsedSecs = _elapsedSecs $ attackSprite atk
offsetY = amplitude * sin (2 * pi / period * elapsedSecs)
in Pos2 0.0 offsetY
| otherwise = zeroPos2
pos = _pos (atk :: Attack) `vecAdd` oscillateOffset
vel = floatingAttackVel atk floatingAtk
dir = _dir (atk :: Attack)
spr = fromMaybe (attackSprite atk) (_glowOverlaySprite floatingAtkData)
in do
pos' <- graphicsLerpPos pos vel
drawSprite pos' dir worldProjectileZIndex spr
when (_showTrackingLine cfg && isAtkLingerOnDone) $
case _knownPlayerPos floatingAtkData of
Nothing -> return ()
Just (Pos2 playerX playerY) -> do
playerHeight <- readConfig _player (_height :: PlayerConfig -> Float)
let playerY' = playerY - playerHeight / 2.0
drawLine (Pos2 playerX playerY') pos' floatingTrackingLineColor playerWeaponOverlayZIndex
removeFloatingAtkMsg :: MsgId -> Msg ThinkPlayerMsgsPhase
removeFloatingAtkMsg floatingAtkMsgId = mkMsgTo (ProjectileMsgSetTtl 0.0) floatingAtkMsgId
updateFloatingAtkStatusMsg :: AllowMsgWrite p PlayerMsgPayload => FloatingAttackStatus -> Msg p
updateFloatingAtkStatusMsg floatingAtkStatus = mkMsg $ PlayerMsgUpdateWeapon updateWpn
where
updateWpn = \scythe -> scythe
{ W._data = (W._data scythe) {_floatingAttackStatus = floatingAtkStatus}
}
addFloatingAtkMsgs
:: (ConfigsRead m, MonadIO m)
=> AttackDescription
-> Pos2
-> Direction
-> AttackDescription
-> MsgId
-> ScytheData
-> m [Msg ThinkPlayerMsgsPhase]
addFloatingAtkMsgs playerSummonMoveDesc pos dir floatingAtkDesc floatingAtkMsgId scytheData = do
let scytheAtkDescs = _scytheAttackDescs (scytheData :: ScytheData)
newFloatingAtk <- mkFloatingAttack pos dir floatingAtkDesc scytheAtkDescs
return
[ mkMsg $ PlayerMsgSetAttackDesc playerSummonMoveDesc
, mkMsg $ NewThinkProjectileMsgAdd (Some newFloatingAtk)
, updateFloatingAtkStatusMsg $ FloatingAttackActive (P._msgId newFloatingAtk) 0.0
, removeFloatingAtkMsg floatingAtkMsgId
]
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/8797aad1d93bff5aadd7226c39a48f45cf76746e/src/Player/Weapon/All/Scythe/FloatingAttack.hs | haskell | vert-slash -> vert-slash-land
vert-slash -> vert-slash-land
ignore wall collisions
prevent various attacks from going inside surfaces | module Player.Weapon.All.Scythe.FloatingAttack
( FloatingAttackOnDoneBehavior(..)
, FloatingAttackData(..)
, mkFloatingAttack
, removeFloatingAtkMsg
, updateFloatingAtkStatusMsg
, addFloatingAtkMsgs
) where
import Control.Monad (when)
import Control.Monad.IO.Class (MonadIO)
import Data.Maybe (fromMaybe, isJust)
import qualified Data.Set as S
import Attack
import Collision
import Configs
import Configs.All.Player
import Configs.All.PlayerWeapon
import Configs.All.PlayerWeapon.Scythe
import Constants
import Id
import InfoMsg.Util
import Msg
import Player.Weapon as W
import Player.Weapon.All.Scythe.Data
import Projectile as P
import Util
import Window.Graphics
import Window.InputState
import World.Surface
import World.ZIndex
innerHitboxWidth = 88.0 * 2.0 :: Float
innerHitboxHeight = 88.0 * 2.0 :: Float
vertSlashWallLandEpsilon = 5.0 :: Float
floatingTrackingLineColor = Color 38 40 52 255 :: Color
surfaceCollisionOnlyFrameTagName = FrameTagName "surfaceCollisionOnly" :: FrameTagName
ignoreSurfaceCollisionFrameTagName = FrameTagName "ignoreSurfaceCollision" :: FrameTagName
vertSlashLandPlatformFrameTagName = FrameTagName "vertSlashLandPlatform" :: FrameTagName
vertSlashLandSurfaceFrameTagName = FrameTagName "vertSlashLandSurface" :: FrameTagName
defaultRegisteredCollisions = S.fromList
[ ProjRegisteredEnemyCollision
, ProjRegisteredRoomItemCollision
, ProjRegisteredSurfaceCollision
] :: S.Set ProjectileRegisteredCollision
data FloatingAttackOnDoneBehavior
= LingerOnDone
| VanishOnDone
| NextAttackOnDone AttackDescription
deriving Eq
data FloatingAttackData = FloatingAttackData
{ _attack :: Attack
, _onDone :: FloatingAttackOnDoneBehavior
, _scytheAttackDescs :: ScytheAttackDescriptions
, _config :: ScytheConfig
, _knownPlayerPos :: Maybe Pos2
, _enoughChargeHeld :: Bool
, _glowOverlaySprite :: Maybe Sprite
}
mkFloatingAttackData
:: (ConfigsRead m, MonadIO m)
=> Pos2
-> Direction
-> AttackDescription
-> ScytheAttackDescriptions
-> m FloatingAttackData
mkFloatingAttackData pos dir atkDesc scytheAtkDescs = do
atk <- mkAttack pos dir atkDesc
cfg <- readConfig _playerWeapon _scythe
return $ FloatingAttackData
{ _attack = atk
, _onDone = attackDescOnDoneBehavior atkDesc scytheAtkDescs
, _scytheAttackDescs = scytheAtkDescs
, _config = cfg
, _knownPlayerPos = Nothing
, _enoughChargeHeld = False
, _glowOverlaySprite = Nothing
}
floatingAttackInnerHitbox :: Attack -> Hitbox
floatingAttackInnerHitbox atk = rectHitbox pos innerHitboxWidth innerHitboxHeight
where pos = _pos (atk :: Attack) `vecSub` Pos2 (innerHitboxWidth / 2.0) (innerHitboxHeight / 2.0)
floatingAttackHitbox :: ProjectileHitbox FloatingAttackData
floatingAttackHitbox floatingAtk = fromMaybe (floatingAttackInnerHitbox atk) (attackHitbox atk)
where atk = _attack (P._data floatingAtk :: FloatingAttackData)
attackDescOnDoneBehavior :: AttackDescription -> ScytheAttackDescriptions -> FloatingAttackOnDoneBehavior
attackDescOnDoneBehavior atkDesc scytheAtkDescs
| atkDesc == multiSlash1 = NextAttackOnDone multiSlash2
| atkDesc == multiSlash2 = NextAttackOnDone multiSlash3
| atkDesc == multiSlash3 = NextAttackOnDone multiSlash4
| atkDesc == multiSlash4 = NextAttackOnDone multiSlash5
| atkDesc == multiSlash5 = NextAttackOnDone multiSlashGlow
| atkDesc == multiSlashGlow = LingerOnDone
| atkDesc == vertSpinSlash = NextAttackOnDone vertSpinSlashGlow
| atkDesc == vertSpinSlashGlow = LingerOnDone
| atkDesc == riseSlash = NextAttackOnDone riseSlashGlow
| atkDesc == riseSlashGlow = LingerOnDone
| atkDesc == pullSlash = NextAttackOnDone pullSlashGlow
| atkDesc == pullSlashGlow = LingerOnDone
| atkDesc == diagSpinSlash = NextAttackOnDone diagSpinSlashGlow
| atkDesc == diagSpinSlashGlow = LingerOnDone
| otherwise = VanishOnDone
where
multiSlash1 = _multiSlash1 scytheAtkDescs
multiSlash2 = _multiSlash2 scytheAtkDescs
multiSlash3 = _multiSlash3 scytheAtkDescs
multiSlash4 = _multiSlash4 scytheAtkDescs
multiSlash5 = _multiSlash5 scytheAtkDescs
multiSlashGlow = _multiSlashGlow scytheAtkDescs
vertSpinSlash = _vertSpinSlash scytheAtkDescs
vertSpinSlashGlow = _vertSpinSlashGlow scytheAtkDescs
riseSlash = _riseSlash scytheAtkDescs
riseSlashGlow = _riseSlashGlow scytheAtkDescs
pullSlash = _pullSlash scytheAtkDescs
pullSlashGlow = _pullSlashGlow scytheAtkDescs
diagSpinSlash = _diagSpinSlash scytheAtkDescs
diagSpinSlashGlow = _diagSpinSlashGlow scytheAtkDescs
mkFloatingAttack
:: (ConfigsRead m, MonadIO m)
=> Pos2
-> Direction
-> AttackDescription
-> ScytheAttackDescriptions
-> m (Projectile FloatingAttackData)
mkFloatingAttack pos dir atkDesc scytheAtkDescs = do
floatingAtkData <- mkFloatingAttackData pos dir atkDesc scytheAtkDescs
msgId <- newId
let
atk = _attack (floatingAtkData :: FloatingAttackData)
dummyHbx = DummyHitbox $ _pos (atk :: Attack)
return $ (mkProjectile floatingAtkData msgId dummyHbx maxSecs)
{ _hitbox = floatingAttackHitbox
, _registeredCollisions = defaultRegisteredCollisions
, _think = thinkFloatingAttack
, _update = updateFloatingAttack
, _draw = drawFloatingAttack
, _processCollisions = processFloatingAttackCollisions
}
wasSentRemoveMsg :: MsgsRead ThinkProjectileMsgsPhase m => Projectile FloatingAttackData -> m Bool
wasSentRemoveMsg floatingAtk = processMsgs <$> readMsgsTo (P._msgId floatingAtk)
where
processMsgs :: [ProjectileMsgPayload] -> Bool
processMsgs [] = False
processMsgs (d:ds) = case d of
ProjectileMsgSetTtl 0.0 -> True
_ -> processMsgs ds
thinkFloatingAttack
:: (ConfigsRead m, MonadIO m, MsgsRead ThinkProjectileMsgsPhase m)
=> ProjectileThink FloatingAttackData m
thinkFloatingAttack floatingAtk = wasSentRemoveMsg floatingAtk >>= \case
True -> return [infoPosMsg]
False -> ((infoPosMsg:thinkAttack atk) ++) <$> case _onDone floatingAtkData of
LingerOnDone
| atkCancelable || atkDone -> return floatingAtkReadyMsgs
VanishOnDone
| atkDone ->
let
updateScytheSt = \scythe -> scythe
{ W._data = (W._data scythe) {_floatingAttackStatus = FloatingAttackInactive}
}
in return
[ mkMsg $ PlayerMsgUpdateWeapon updateScytheSt
, mkMsgTo (ProjectileMsgSetTtl 0.0) floatingAtkMsgId
]
NextAttackOnDone nextFloatingAtkDesc
| atkDone -> mkNextFloatingAttackMsgs nextFloatingAtkDesc
| atkCancelable -> return floatingAtkReadyMsgs
_ -> return []
where
floatingAtkData = P._data floatingAtk
floatingAtkMsgId = P._msgId floatingAtk
atk = _attack (floatingAtkData :: FloatingAttackData)
atkPos = _pos (atk :: Attack)
atkDir = _dir (atk :: Attack)
atkDone = _done atk
atkCancelable = attackCancelable atk
infoPosMsg = mkMsg $ InfoMsgProjectilePos atkPos NullId floatingAtkMsgId
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
mkNextFloatingAttackMsgs
:: (ConfigsRead m1, MonadIO m1)
=> AttackDescription
-> m1 [Msg ThinkProjectileMsgsPhase]
mkNextFloatingAttackMsgs nextFloatingAtkDesc = do
nextFloatingAtk <- mkFloatingAttack atkPos atkDir nextFloatingAtkDesc scytheAtkDescs
let
nextFloatingAtkMsgId = P._msgId nextFloatingAtk
floatingAtkStatus = FloatingAttackActive nextFloatingAtkMsgId 0.0
updateScytheSt = \scythe -> scythe
{ W._data = (W._data scythe) {_floatingAttackStatus = floatingAtkStatus}
}
return
[ mkMsg $ NewUpdateProjectileMsgAdd (Some nextFloatingAtk)
, mkMsg $ PlayerMsgUpdateWeapon updateScytheSt
, mkMsgTo (ProjectileMsgSetTtl 0.0) floatingAtkMsgId
]
floatingAtkReadyMsgs =
let
floatingAtkStatus = FloatingAttackActiveReady floatingAtkMsgId atkPos atkDir 0.0
updateScytheSt = \scythe -> scythe
{ W._data = (W._data scythe) {_floatingAttackStatus = floatingAtkStatus}
}
in [mkMsg $ PlayerMsgUpdateWeapon updateScytheSt]
floatingAttackVel :: Attack -> Projectile FloatingAttackData -> Vel2
floatingAttackVel atk floatingAtk
| _done atk = zeroVel2
| otherwise = attackVelToVel2 (attackVel atk) (P._vel floatingAtk)
attackGlowOverlay :: Attack -> ScytheAttackDescriptions -> Maybe Sprite
attackGlowOverlay atk scytheAtkDescs
| atkIs _multiSlashGlow = Just $ _sprite (_multiSlashGlowOverlay scytheAtkDescs)
| atkIs _vertSpinSlashGlow = Just $ _sprite (_vertSpinSlashGlowOverlay scytheAtkDescs)
| atkIs _riseSlashGlow = Just $ _sprite (_riseSlashGlowOverlay scytheAtkDescs)
| atkIs _pullSlashGlow = Just $ _sprite (_pullSlashGlowOverlay scytheAtkDescs)
| atkIs _diagSpinSlashGlow = Just $ _sprite (_diagSpinSlashGlowOverlay scytheAtkDescs)
| otherwise = Nothing
where atkIs = \atkDescField -> _description atk == atkDescField scytheAtkDescs
updateFloatingAttack
:: (InputRead m, MsgsReadWrite UpdateProjectileMsgsPhase m)
=> ProjectileUpdate FloatingAttackData m
updateFloatingAttack floatingAtk =
let
processInfoMsgs :: [InfoMsgPayload] -> Maybe Pos2
processInfoMsgs [] = Nothing
processInfoMsgs (d:ds) = case d of
InfoMsgPlayer playerInfo -> Just $ playerInfoPos playerInfo
_ -> processInfoMsgs ds
floatingAtkData = P._data floatingAtk
atk = _attack (floatingAtkData :: FloatingAttackData)
vel = floatingAttackVel atk floatingAtk
pos = _pos (atk :: Attack) `vecAdd` toPos2 (vel `vecMul` timeStep)
dir = _dir (atk :: Attack)
enoughChargeHeld = _enoughChargeHeld floatingAtkData
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
glowOverlaySpr = case _glowOverlaySprite floatingAtkData of
Just overlaySpr
| enoughChargeHeld -> Just $ updateSprite overlaySpr
Nothing
| enoughChargeHeld -> attackGlowOverlay atk scytheAtkDescs
_ -> Nothing
in do
knownPlayerPos <- processInfoMsgs <$> readMsgs
return $ floatingAtk
{ P._data = floatingAtkData
{ _attack = updateAttack pos dir atk
, _knownPlayerPos = knownPlayerPos
, _glowOverlaySprite = glowOverlaySpr
} :: FloatingAttackData
, P._vel = vel
}
processFloatingAttackCollisions :: ProjectileProcessCollisions FloatingAttackData
processFloatingAttackCollisions collisions floatingAtk = foldr processCollision [] collisions
where
floatingAtkData = P._data floatingAtk
atk = _attack floatingAtkData
isAtkHitbox = isJust $ attackHitbox atk
isSurfaceCollisionOnly = surfaceCollisionOnlyFrameTagName `isAttackFrameTag` atk
isIgnoreSurfaceCollision = ignoreSurfaceCollisionFrameTagName `isAttackFrameTag` atk
processCollision :: ProjectileCollision -> [Msg ThinkCollisionMsgsPhase] -> [Msg ThinkCollisionMsgsPhase]
processCollision collision !msgs = case collision of
ProjEnemyCollision enemy
| isAtkHitbox && not isSurfaceCollisionOnly -> attackEnemyHitMessages enemy atk ++ msgs
ProjRoomItemCollision (Some roomItem)
| isAtkHitbox -> attackCollisionEntityHitMessages roomItem atk ++ msgs
ProjSurfaceCollision surfaceHbx surfaceType
| not isIgnoreSurfaceCollision -> surfaceHitMessages surfaceType surfaceHbx ++ msgs
_ -> msgs
mkVertSlashLandAttack :: MonadIO m => Hitbox -> Projectile FloatingAttackData -> m Attack
mkVertSlashLandAttack surfaceHbx fa = mkAttack pos dir (_vertSlashLand scytheAtkDescs)
where
faAtk = _attack $ P._data fa
x = vecX $ _pos (faAtk :: Attack)
pos = Pos2 x (hitboxTop surfaceHbx)
dir = _dir (faAtk :: Attack)
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
surfaceHitMessages :: SurfaceType -> Hitbox -> [Msg ThinkCollisionMsgsPhase]
surfaceHitMessages surfaceType surfaceHbx = case surfaceType of
GeneralSurface -> generalSurfaceHitMessages surfaceHbx
PlatformSurface -> vertSlashLandPlatformMsgs
SpeedRailSurface _ -> vertSlashLandPlatformMsgs
where
innerHbx = floatingAttackInnerHitbox atk
isVertSlashLandPlatformFrame = vertSlashLandPlatformFrameTagName `isAttackFrameTag` atk
isIntersect = surfaceHbx `intersectsHitbox` innerHbx
vertSlashLandPlatformMsgs
| isVertSlashLandPlatformFrame && isIntersect =
let
update = \fa ->
let
faData = P._data fa
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
in if
| _attack faData `attackIs` _vertSlash scytheAtkDescs -> do
vertSlashLandAtk <- mkVertSlashLandAttack surfaceHbx fa
return $ fa
{ P._data = faData {_attack = vertSlashLandAtk}
}
| otherwise -> return fa
in [mkMsgTo (ProjectileMsgUpdateM update) (P._msgId floatingAtk)]
| otherwise = []
generalSurfaceHitMessages :: Hitbox -> [Msg ThinkCollisionMsgsPhase]
generalSurfaceHitMessages surfaceHbx
| surfaceHbx `intersectsHitbox` innerHbx =
let
Vel2 velX velY = floatingAttackVel atk floatingAtk
offsetX
| velX `approxEq` 0.0 = Nothing
| velX < 0.0 = Just $ hitboxRight surfaceHbx - hitboxLeft innerHbx
| otherwise = Just $ hitboxLeft surfaceHbx - hitboxRight innerHbx
offsetY
| velY `approxEq` 0.0 = Nothing
| velY < 0.0 = Just $ hitboxBot surfaceHbx - hitboxTop innerHbx
| otherwise = Just $ hitboxTop surfaceHbx - hitboxBot innerHbx
offset = case (offsetX, offsetY) of
(Nothing, Nothing) -> zeroPos2
(Just offsetX', Nothing) -> Pos2 offsetX' 0.0
(Nothing, Just offsetY') -> Pos2 0.0 offsetY'
(Just offsetX', Just offsetY')
| abs offsetX' < abs offsetY' -> Pos2 offsetX' 0.0
| otherwise -> Pos2 0.0 offsetY'
update = \fa ->
let
faData = P._data fa
faAtk = _attack faData
scytheAttackDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
vertSlash = _vertSlash scytheAttackDescs
noInsideSurfacesAtkDescs =
[ _pullSlash scytheAttackDescs
, _vertSpinSlash scytheAttackDescs
, _diagSpinSlash scytheAttackDescs
, _multiSlash1 scytheAttackDescs
, _riseSlash scytheAttackDescs
]
in do
faAtk' <- if
| faAtk `attackIs` vertSlash ->
let
approxEqEx' = \f1 f2 -> approxEqEx f1 f2 vertSlashWallLandEpsilon
ignoreIntersect =
approxEqEx' (hitboxRight surfaceHbx) (hitboxLeft innerHbx) ||
approxEqEx' (hitboxLeft surfaceHbx) (hitboxRight innerHbx)
isVertSlashLandSurfaceFrame =
vertSlashLandSurfaceFrameTagName `isAttackFrameTag` faAtk
in if
| ignoreIntersect || not isVertSlashLandSurfaceFrame -> return faAtk
| otherwise ->
mkVertSlashLandAttack surfaceHbx fa
| faAtk `attackIn` noInsideSurfacesAtkDescs -> return $ faAtk
{ _pos = _pos (faAtk :: Attack) `vecAdd` offset
}
| otherwise -> return faAtk
return $ fa
{ P._data = faData {_attack = faAtk'}
}
in [mkMsgTo (ProjectileMsgUpdateM update) (P._msgId floatingAtk)]
| otherwise = []
where innerHbx = floatingAttackInnerHitbox atk
drawFloatingAttack :: (ConfigsRead m, GraphicsReadWrite m, MonadIO m) => ProjectileDraw FloatingAttackData m
drawFloatingAttack floatingAtk =
let
atk = _attack (P._data floatingAtk :: FloatingAttackData)
atkDesc = _description atk
floatingAtkData = P._data floatingAtk
scytheAtkDescs = _scytheAttackDescs (floatingAtkData :: FloatingAttackData)
cfg = _config (floatingAtkData :: FloatingAttackData)
isAtkLingerOnDone = attackDescOnDoneBehavior atkDesc scytheAtkDescs == LingerOnDone
oscillateOffset
| isAtkLingerOnDone =
let
amplitude = _glowOscillateAmplitude cfg
period = _glowOscillatePeriod cfg
elapsedSecs = _elapsedSecs $ attackSprite atk
offsetY = amplitude * sin (2 * pi / period * elapsedSecs)
in Pos2 0.0 offsetY
| otherwise = zeroPos2
pos = _pos (atk :: Attack) `vecAdd` oscillateOffset
vel = floatingAttackVel atk floatingAtk
dir = _dir (atk :: Attack)
spr = fromMaybe (attackSprite atk) (_glowOverlaySprite floatingAtkData)
in do
pos' <- graphicsLerpPos pos vel
drawSprite pos' dir worldProjectileZIndex spr
when (_showTrackingLine cfg && isAtkLingerOnDone) $
case _knownPlayerPos floatingAtkData of
Nothing -> return ()
Just (Pos2 playerX playerY) -> do
playerHeight <- readConfig _player (_height :: PlayerConfig -> Float)
let playerY' = playerY - playerHeight / 2.0
drawLine (Pos2 playerX playerY') pos' floatingTrackingLineColor playerWeaponOverlayZIndex
removeFloatingAtkMsg :: MsgId -> Msg ThinkPlayerMsgsPhase
removeFloatingAtkMsg floatingAtkMsgId = mkMsgTo (ProjectileMsgSetTtl 0.0) floatingAtkMsgId
updateFloatingAtkStatusMsg :: AllowMsgWrite p PlayerMsgPayload => FloatingAttackStatus -> Msg p
updateFloatingAtkStatusMsg floatingAtkStatus = mkMsg $ PlayerMsgUpdateWeapon updateWpn
where
updateWpn = \scythe -> scythe
{ W._data = (W._data scythe) {_floatingAttackStatus = floatingAtkStatus}
}
addFloatingAtkMsgs
:: (ConfigsRead m, MonadIO m)
=> AttackDescription
-> Pos2
-> Direction
-> AttackDescription
-> MsgId
-> ScytheData
-> m [Msg ThinkPlayerMsgsPhase]
addFloatingAtkMsgs playerSummonMoveDesc pos dir floatingAtkDesc floatingAtkMsgId scytheData = do
let scytheAtkDescs = _scytheAttackDescs (scytheData :: ScytheData)
newFloatingAtk <- mkFloatingAttack pos dir floatingAtkDesc scytheAtkDescs
return
[ mkMsg $ PlayerMsgSetAttackDesc playerSummonMoveDesc
, mkMsg $ NewThinkProjectileMsgAdd (Some newFloatingAtk)
, updateFloatingAtkStatusMsg $ FloatingAttackActive (P._msgId newFloatingAtk) 0.0
, removeFloatingAtkMsg floatingAtkMsgId
]
|
0292a3588fc3e32c0681e1cf5b070b9d0fde77f419698d9917c9d7651876cf22 | haskell-tools/haskell-tools | HigherOrder.hs | module Refactor.GenerateTypeSignature.HigherOrder where
app f x = f x
| null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/builtin-refactorings/examples/Refactor/GenerateTypeSignature/HigherOrder.hs | haskell | module Refactor.GenerateTypeSignature.HigherOrder where
app f x = f x
| |
d88d735821c9d84ca9df35d55f7bc178763619be8191b7c9850f6d540e7899b0 | jguhlin/ODG | pathways.clj | (ns odg.pathways
(:require
[clojure.core.reducers :as r]
[biotools.pmn-pathways :as pmn-pathways]
[odg.db :as db]
[odg.batch :as batch]
[odg.util :as util]
[co.paralleluniverse.pulsar.core :as p]
[odg.db-handler :as dbh]
)
(:import
(org.neo4j.unsafe.batchinsert BatchInserter
BatchInserters
BatchInserterIndexProvider
BatchInserterIndex)))
; example of data that comes from biotools
; #biotools.pmn_pathways.PMNPathway{
: pathway - id PWY-5143 ,
; :pathway-name fatty acid activation,
; :reaction-id RXN-7904,
; :EC 6.2.1.3,
; :protein-id GDQG-2927-MONOMER,
; :protein-name long-chain-fatty-acid-CoA ligase,
; :gene-id GDQG-2927,
: gene - name }
(defn get-ec-number
[ec]
(re-find #"\d+\.\d+\.?\d+?\.?\d+?" ec))
(defn import-pathway
[species version filename]
(with-open [rdr (clojure.java.io/reader filename)]
(let [data (pmn-pathways/parse-reader rdr)
genes (doall (map :gene-name data))
genes-in-database (into
{}
(dbh/batch-get-data
{:index (batch/convert-name species version)
:action :query
:query genes}))
gid (into #{} (keys genes-in-database))
rxns (distinct (map :reaction-id data))
pwys (distinct (map (juxt :pathway-id :pathway-name) data))
ecs (distinct (map :EC data))
results
(apply
merge-with
concat
{:nodes-update-or-create (concat
(for [rxn rxns]
[{:id rxn :type "Reaction"} [(batch/dynamic-label "Reaction") (batch/dynamic-label "Rxn")]])
(for [[pwy-id pwy-name] pwys]
[{:id pwy-id :definition pwy-name :type "Pathway"}
[(batch/dynamic-label "Pathway")]])
(for [ec ecs]
[{:id (str "EC:" (get-ec-number ec))} [(batch/dynamic-label "EC")]]))}
; Only do genes that exist in the database
(for [row (filter (fn [x] (gid (:gene-name x))) data)]
{:nodes-update [[{:id (:gene-name row)
:protein-name (:protein-name row)
:protein-id (:protein-id row)}
[(batch/dynamic-label "PATHWAY_ANNOTATION")]]]
:rels (filter
identity
[
[(:INVOLVED_IN db/rels) (:reaction-id row) (:pathway-id row)]
[(:PART_OF db/rels) (get genes-in-database (:gene-name row)) (:reaction-id row)]
(when (not (= "-" (:EC data)))
[(:BELONGS_TO db/rels) (:reaction-id row) (str "EC:" (get-ec-number (:EC row)))])
])}))]
; Not certain why doall didn't work previously, but this forces it correctly.
(dbh/submit-batch-job
{:rels (doall (:rels results))
:nodes-update (doall (:nodes-update results))
:nodes-update-or-create (doall (:nodes-update-or-create results))
:indices [(batch/convert-name species version)]
}))))
Init DB and parse data into the db
(defn import-cli
"Wrapper for internal command import-pathway, used to initialize the database."
[config opts args]
(println "Pathways Import")
;(batch/connect (get-in config [:global :db_path]) (:memory opts))
(import-pathway (:species opts) (:version opts) (first args)))
| null | https://raw.githubusercontent.com/jguhlin/ODG/c8a09f273c278ba7b3acbd37155477979f8b4851/src/odg/pathways.clj | clojure | example of data that comes from biotools
#biotools.pmn_pathways.PMNPathway{
:pathway-name fatty acid activation,
:reaction-id RXN-7904,
:EC 6.2.1.3,
:protein-id GDQG-2927-MONOMER,
:protein-name long-chain-fatty-acid-CoA ligase,
:gene-id GDQG-2927,
Only do genes that exist in the database
Not certain why doall didn't work previously, but this forces it correctly.
(batch/connect (get-in config [:global :db_path]) (:memory opts)) | (ns odg.pathways
(:require
[clojure.core.reducers :as r]
[biotools.pmn-pathways :as pmn-pathways]
[odg.db :as db]
[odg.batch :as batch]
[odg.util :as util]
[co.paralleluniverse.pulsar.core :as p]
[odg.db-handler :as dbh]
)
(:import
(org.neo4j.unsafe.batchinsert BatchInserter
BatchInserters
BatchInserterIndexProvider
BatchInserterIndex)))
: pathway - id PWY-5143 ,
: gene - name }
(defn get-ec-number
[ec]
(re-find #"\d+\.\d+\.?\d+?\.?\d+?" ec))
(defn import-pathway
[species version filename]
(with-open [rdr (clojure.java.io/reader filename)]
(let [data (pmn-pathways/parse-reader rdr)
genes (doall (map :gene-name data))
genes-in-database (into
{}
(dbh/batch-get-data
{:index (batch/convert-name species version)
:action :query
:query genes}))
gid (into #{} (keys genes-in-database))
rxns (distinct (map :reaction-id data))
pwys (distinct (map (juxt :pathway-id :pathway-name) data))
ecs (distinct (map :EC data))
results
(apply
merge-with
concat
{:nodes-update-or-create (concat
(for [rxn rxns]
[{:id rxn :type "Reaction"} [(batch/dynamic-label "Reaction") (batch/dynamic-label "Rxn")]])
(for [[pwy-id pwy-name] pwys]
[{:id pwy-id :definition pwy-name :type "Pathway"}
[(batch/dynamic-label "Pathway")]])
(for [ec ecs]
[{:id (str "EC:" (get-ec-number ec))} [(batch/dynamic-label "EC")]]))}
(for [row (filter (fn [x] (gid (:gene-name x))) data)]
{:nodes-update [[{:id (:gene-name row)
:protein-name (:protein-name row)
:protein-id (:protein-id row)}
[(batch/dynamic-label "PATHWAY_ANNOTATION")]]]
:rels (filter
identity
[
[(:INVOLVED_IN db/rels) (:reaction-id row) (:pathway-id row)]
[(:PART_OF db/rels) (get genes-in-database (:gene-name row)) (:reaction-id row)]
(when (not (= "-" (:EC data)))
[(:BELONGS_TO db/rels) (:reaction-id row) (str "EC:" (get-ec-number (:EC row)))])
])}))]
(dbh/submit-batch-job
{:rels (doall (:rels results))
:nodes-update (doall (:nodes-update results))
:nodes-update-or-create (doall (:nodes-update-or-create results))
:indices [(batch/convert-name species version)]
}))))
Init DB and parse data into the db
(defn import-cli
"Wrapper for internal command import-pathway, used to initialize the database."
[config opts args]
(println "Pathways Import")
(import-pathway (:species opts) (:version opts) (first args)))
|
437dd232cb96195fa9681b41f1ea5d7dcdeb93541efb93efb7cbd5cde6dd3977 | Oblosys/proxima | Interfaces.hs |
UUAGC 0.9.10 ( Interfaces.ag )
module Interfaces where
import CommonTypes
import SequentialTypes
-- IRoot -------------------------------------------------------
{-
alternatives:
alternative IRoot:
child inters : Interfaces
-}
data IRoot = IRoot (Interfaces)
Interface ---------------------------------------------------
alternatives :
alternative Interface :
child nt : { NontermIdent }
child cons : { [ ConstructorIdent ] }
child seg : Segments
alternatives:
alternative Interface:
child nt : {NontermIdent}
child cons : {[ConstructorIdent]}
child seg : Segments
-}
data Interface = Interface (NontermIdent) ([ConstructorIdent]) (Segments)
-- Interfaces --------------------------------------------------
alternatives :
alternative Cons :
child hd : Interface
child tl : Interfaces
alternative :
alternatives:
alternative Cons:
child hd : Interface
child tl : Interfaces
alternative Nil:
-}
type Interfaces = [(Interface)]
-- Segment -----------------------------------------------------
{-
alternatives:
alternative Segment:
child inh : {[Vertex]}
child syn : {[Vertex]}
-}
data Segment = Segment ([Vertex]) ([Vertex])
-- Segments ----------------------------------------------------
alternatives :
alternative Cons :
child hd : Segment
child tl : Segments
alternative :
alternatives:
alternative Cons:
child hd : Segment
child tl : Segments
alternative Nil:
-}
type Segments = [(Segment)] | null | https://raw.githubusercontent.com/Oblosys/proxima/f154dff2ccb8afe00eeb325d9d06f5e2a5ee7589/uuagc/src-derived/Interfaces.hs | haskell | IRoot -------------------------------------------------------
alternatives:
alternative IRoot:
child inters : Interfaces
-------------------------------------------------
Interfaces --------------------------------------------------
Segment -----------------------------------------------------
alternatives:
alternative Segment:
child inh : {[Vertex]}
child syn : {[Vertex]}
Segments ---------------------------------------------------- |
UUAGC 0.9.10 ( Interfaces.ag )
module Interfaces where
import CommonTypes
import SequentialTypes
data IRoot = IRoot (Interfaces)
alternatives :
alternative Interface :
child nt : { NontermIdent }
child cons : { [ ConstructorIdent ] }
child seg : Segments
alternatives:
alternative Interface:
child nt : {NontermIdent}
child cons : {[ConstructorIdent]}
child seg : Segments
-}
data Interface = Interface (NontermIdent) ([ConstructorIdent]) (Segments)
alternatives :
alternative Cons :
child hd : Interface
child tl : Interfaces
alternative :
alternatives:
alternative Cons:
child hd : Interface
child tl : Interfaces
alternative Nil:
-}
type Interfaces = [(Interface)]
data Segment = Segment ([Vertex]) ([Vertex])
alternatives :
alternative Cons :
child hd : Segment
child tl : Segments
alternative :
alternatives:
alternative Cons:
child hd : Segment
child tl : Segments
alternative Nil:
-}
type Segments = [(Segment)] |
ee8a313abdf05dd3241aac96860dc7111299ff8f81e5dd0ed7e7b273d6361438 | RichiH/git-annex | Locations.hs | git - annex file locations
-
- Copyright 2010 - 2015 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2010-2015 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Annex.Locations (
keyFile,
fileKey,
keyPaths,
keyPath,
annexDir,
objectDir,
gitAnnexLocation,
gitAnnexLocationDepth,
gitAnnexLink,
gitAnnexLinkCanonical,
gitAnnexContentLock,
gitAnnexMapping,
gitAnnexInodeCache,
gitAnnexInodeSentinal,
gitAnnexInodeSentinalCache,
annexLocations,
gitAnnexDir,
gitAnnexObjectDir,
gitAnnexTmpMiscDir,
gitAnnexTmpObjectDir,
gitAnnexTmpObjectLocation,
gitAnnexBadDir,
gitAnnexBadLocation,
gitAnnexUnusedLog,
gitAnnexKeysDb,
gitAnnexKeysDbLock,
gitAnnexFsckState,
gitAnnexFsckDbDir,
gitAnnexFsckDbLock,
gitAnnexFsckResultsLog,
gitAnnexExportDbDir,
gitAnnexExportLock,
gitAnnexScheduleState,
gitAnnexTransferDir,
gitAnnexCredsDir,
gitAnnexWebCertificate,
gitAnnexWebPrivKey,
gitAnnexFeedStateDir,
gitAnnexFeedState,
gitAnnexMergeDir,
gitAnnexJournalDir,
gitAnnexJournalLock,
gitAnnexPreCommitLock,
gitAnnexMergeLock,
gitAnnexIndex,
gitAnnexIndexStatus,
gitAnnexViewIndex,
gitAnnexViewLog,
gitAnnexMergedRefs,
gitAnnexIgnoredRefs,
gitAnnexPidFile,
gitAnnexPidLockFile,
gitAnnexDaemonStatusFile,
gitAnnexLogFile,
gitAnnexFuzzTestLogFile,
gitAnnexHtmlShim,
gitAnnexUrlFile,
gitAnnexTmpCfgFile,
gitAnnexSshDir,
gitAnnexRemotesDir,
gitAnnexAssistantDefaultDir,
HashLevels(..),
hashDirMixed,
hashDirLower,
preSanitizeKeyName,
reSanitizeKeyName,
prop_isomorphic_fileKey
) where
import Data.Char
import Data.Default
import Common
import Key
import Types.Key
import Types.UUID
import Types.GitConfig
import Types.Difference
import qualified Git
import qualified Git.Types as Git
import Git.FilePath
import Annex.DirHashes
import Annex.Fixup
{- Conventions:
-
- Functions ending in "Dir" should always return values ending with a
- trailing path separator. Most code does not rely on that, but a few
- things do.
-
- Everything else should not end in a trailing path sepatator.
-
- Only functions (with names starting with "git") that build a path
- based on a git repository should return full path relative to the git
- repository. Everything else returns path segments.
-}
{- The directory git annex uses for local state, relative to the .git
- directory -}
annexDir :: FilePath
annexDir = addTrailingPathSeparator "annex"
{- The directory git annex uses for locally available object content,
- relative to the .git directory -}
objectDir :: FilePath
objectDir = addTrailingPathSeparator $ annexDir </> "objects"
Annexed file 's possible locations relative to the .git directory .
- There are two different possibilities , using different hashes .
-
- Also , some repositories have a Difference in hash directory depth .
- There are two different possibilities, using different hashes.
-
- Also, some repositories have a Difference in hash directory depth.
-}
annexLocations :: GitConfig -> Key -> [FilePath]
annexLocations config key = map (annexLocation config key) dirHashes
annexLocation :: GitConfig -> Key -> (HashLevels -> Hasher) -> FilePath
annexLocation config key hasher = objectDir </> keyPath key (hasher $ objectHashLevels config)
{- Number of subdirectories from the gitAnnexObjectDir
- to the gitAnnexLocation. -}
gitAnnexLocationDepth :: GitConfig -> Int
gitAnnexLocationDepth config = hashlevels + 1
where
HashLevels hashlevels = objectHashLevels config
Annexed object 's location in a repository .
-
- When there are multiple possible locations , returns the one where the
- file is actually present .
-
- When the file is not present , returns the location where the file should
- be stored .
-
- This does not take direct mode into account , so in direct mode it is not
- the actual location of the file 's content .
-
- When there are multiple possible locations, returns the one where the
- file is actually present.
-
- When the file is not present, returns the location where the file should
- be stored.
-
- This does not take direct mode into account, so in direct mode it is not
- the actual location of the file's content.
-}
gitAnnexLocation :: Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexLocation key r config = gitAnnexLocation' key r config (annexCrippledFileSystem config) (coreSymlinks config) doesFileExist (Git.localGitDir r)
gitAnnexLocation' :: Key -> Git.Repo -> GitConfig -> Bool -> Bool -> (FilePath -> IO Bool) -> FilePath -> IO FilePath
gitAnnexLocation' key r config crippled symlinkssupported checker gitdir
{- Bare repositories default to hashDirLower for new
- content, as it's more portable. But check all locations. -}
| Git.repoIsLocalBare r = checkall
| hasDifference ObjectHashLower (annexDifferences config) =
only hashDirLower
{- Repositories on crippled filesystems use hashDirLower
- for new content, unless symlinks are supported too.
- Then hashDirMixed is used. But, the content could be
- in either location so check both. -}
| crippled = if symlinkssupported
then check $ map inrepo $ reverse $ annexLocations config key
else checkall
Regular repositories only use hashDirMixed , so
- do n't need to do any work to check if the file is
- present .
- don't need to do any work to check if the file is
- present. -}
| otherwise = only hashDirMixed
where
only = return . inrepo . annexLocation config key
checkall = check $ map inrepo $ annexLocations config key
inrepo d = gitdir </> d
check locs@(l:_) = fromMaybe l <$> firstM checker locs
check [] = error "internal"
{- Calculates a symlink target to link a file to an annexed object. -}
gitAnnexLink :: FilePath -> Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexLink file key r config = do
currdir <- getCurrentDirectory
let absfile = absNormPathUnix currdir file
let gitdir = getgitdir currdir
loc <- gitAnnexLocation' key r config False False (\_ -> return True) gitdir
toInternalGitPath <$> relPathDirToFile (parentDir absfile) loc
where
getgitdir currdir
This special case is for git submodules on filesystems not
- supporting symlinks ; generate link target that will
- work portably .
- supporting symlinks; generate link target that will
- work portably. -}
| not (coreSymlinks config) && needsSubmoduleFixup r =
absNormPathUnix currdir $ Git.repoPath r </> ".git"
| otherwise = Git.localGitDir r
absNormPathUnix d p = toInternalGitPath $
absPathFrom (toInternalGitPath d) (toInternalGitPath p)
{- Calculates a symlink target as would be used in a typical git
- repository, with .git in the top of the work tree. -}
gitAnnexLinkCanonical :: FilePath -> Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexLinkCanonical file key r config = gitAnnexLink file key r' config'
where
r' = case r of
Git.Repo { Git.location = { Git.worktree = Just wt } } ->
r { Git.location = l { Git.gitdir = wt </> ".git" } }
_ -> r
config' = config
{ annexCrippledFileSystem = False
, coreSymlinks = True
}
{- File used to lock a key's content. -}
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexContentLock key r config = do
loc <- gitAnnexLocation key r config
return $ loc ++ ".lck"
{- File that maps from a key to the file(s) in the git repository.
- Used in direct mode. -}
gitAnnexMapping :: Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexMapping key r config = do
loc <- gitAnnexLocation key r config
return $ loc ++ ".map"
{- File that caches information about a key's content, used to determine
- if a file has changed.
- Used in direct mode. -}
gitAnnexInodeCache :: Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexInodeCache key r config = do
loc <- gitAnnexLocation key r config
return $ loc ++ ".cache"
gitAnnexInodeSentinal :: Git.Repo -> FilePath
gitAnnexInodeSentinal r = gitAnnexDir r </> "sentinal"
gitAnnexInodeSentinalCache :: Git.Repo -> FilePath
gitAnnexInodeSentinalCache r = gitAnnexInodeSentinal r ++ ".cache"
{- The annex directory of a repository. -}
gitAnnexDir :: Git.Repo -> FilePath
gitAnnexDir r = addTrailingPathSeparator $ Git.localGitDir r </> annexDir
{- The part of the annex directory where file contents are stored. -}
gitAnnexObjectDir :: Git.Repo -> FilePath
gitAnnexObjectDir r = addTrailingPathSeparator $ Git.localGitDir r </> objectDir
{- .git/annex/misctmp/ is used for random temp files -}
gitAnnexTmpMiscDir :: Git.Repo -> FilePath
gitAnnexTmpMiscDir r = addTrailingPathSeparator $ gitAnnexDir r </> "misctmp"
{- .git/annex/tmp/ is used for temp files for key's contents -}
gitAnnexTmpObjectDir :: Git.Repo -> FilePath
gitAnnexTmpObjectDir r = addTrailingPathSeparator $ gitAnnexDir r </> "tmp"
{- The temp file to use for a given key's content. -}
gitAnnexTmpObjectLocation :: Key -> Git.Repo -> FilePath
gitAnnexTmpObjectLocation key r = gitAnnexTmpObjectDir r </> keyFile key
{- .git/annex/bad/ is used for bad files found during fsck -}
gitAnnexBadDir :: Git.Repo -> FilePath
gitAnnexBadDir r = addTrailingPathSeparator $ gitAnnexDir r </> "bad"
{- The bad file to use for a given key. -}
gitAnnexBadLocation :: Key -> Git.Repo -> FilePath
gitAnnexBadLocation key r = gitAnnexBadDir r </> keyFile key
{- .git/annex/foounused is used to number possibly unused keys -}
gitAnnexUnusedLog :: FilePath -> Git.Repo -> FilePath
gitAnnexUnusedLog prefix r = gitAnnexDir r </> (prefix ++ "unused")
{- .git/annex/keys/ contains a database of information about keys. -}
gitAnnexKeysDb :: Git.Repo -> FilePath
gitAnnexKeysDb r = gitAnnexDir r </> "keys"
{- Lock file for the keys database. -}
gitAnnexKeysDbLock :: Git.Repo -> FilePath
gitAnnexKeysDbLock r = gitAnnexKeysDb r ++ ".lck"
{- .git/annex/fsck/uuid/ is used to store information about incremental
- fscks. -}
gitAnnexFsckDir :: UUID -> Git.Repo -> FilePath
gitAnnexFsckDir u r = gitAnnexDir r </> "fsck" </> fromUUID u
{- used to store information about incremental fscks. -}
gitAnnexFsckState :: UUID -> Git.Repo -> FilePath
gitAnnexFsckState u r = gitAnnexFsckDir u r </> "state"
{- Directory containing database used to record fsck info. -}
gitAnnexFsckDbDir :: UUID -> Git.Repo -> FilePath
gitAnnexFsckDbDir u r = gitAnnexFsckDir u r </> "db"
{- Lock file for the fsck database. -}
gitAnnexFsckDbLock :: UUID -> Git.Repo -> FilePath
gitAnnexFsckDbLock u r = gitAnnexFsckDir u r </> "fsck.lck"
{- .git/annex/fsckresults/uuid is used to store results of git fscks -}
gitAnnexFsckResultsLog :: UUID -> Git.Repo -> FilePath
gitAnnexFsckResultsLog u r = gitAnnexDir r </> "fsckresults" </> fromUUID u
{- .git/annex/export/uuid/ is used to store information about
- exports to special remotes. -}
gitAnnexExportDir :: UUID -> Git.Repo -> FilePath
gitAnnexExportDir u r = gitAnnexDir r </> "export" </> fromUUID u
{- Directory containing database used to record export info. -}
gitAnnexExportDbDir :: UUID -> Git.Repo -> FilePath
gitAnnexExportDbDir u r = gitAnnexExportDir u r </> "db"
{- Lock file for export state for a special remote. -}
gitAnnexExportLock :: UUID -> Git.Repo -> FilePath
gitAnnexExportLock u r = gitAnnexExportDbDir u r ++ ".lck"
{- .git/annex/schedulestate is used to store information about when
- scheduled jobs were last run. -}
gitAnnexScheduleState :: Git.Repo -> FilePath
gitAnnexScheduleState r = gitAnnexDir r </> "schedulestate"
{- .git/annex/creds/ is used to store credentials to access some special
- remotes. -}
gitAnnexCredsDir :: Git.Repo -> FilePath
gitAnnexCredsDir r = addTrailingPathSeparator $ gitAnnexDir r </> "creds"
{- .git/annex/certificate.pem and .git/annex/key.pem are used by the webapp
- when HTTPS is enabled -}
gitAnnexWebCertificate :: Git.Repo -> FilePath
gitAnnexWebCertificate r = gitAnnexDir r </> "certificate.pem"
gitAnnexWebPrivKey :: Git.Repo -> FilePath
gitAnnexWebPrivKey r = gitAnnexDir r </> "privkey.pem"
{- .git/annex/feeds/ is used to record per-key (url) state by importfeeds -}
gitAnnexFeedStateDir :: Git.Repo -> FilePath
gitAnnexFeedStateDir r = addTrailingPathSeparator $ gitAnnexDir r </> "feedstate"
gitAnnexFeedState :: Key -> Git.Repo -> FilePath
gitAnnexFeedState k r = gitAnnexFeedStateDir r </> keyFile k
{- .git/annex/merge/ is used as a empty work tree for direct mode merges and
- merges in adjusted branches. -}
gitAnnexMergeDir :: Git.Repo -> FilePath
gitAnnexMergeDir r = addTrailingPathSeparator $ gitAnnexDir r </> "merge"
{- .git/annex/transfer/ is used to record keys currently
- being transferred, and other transfer bookkeeping info. -}
gitAnnexTransferDir :: Git.Repo -> FilePath
gitAnnexTransferDir r = addTrailingPathSeparator $ gitAnnexDir r </> "transfer"
{- .git/annex/journal/ is used to journal changes made to the git-annex
- branch -}
gitAnnexJournalDir :: Git.Repo -> FilePath
gitAnnexJournalDir r = addTrailingPathSeparator $ gitAnnexDir r </> "journal"
{- Lock file for the journal. -}
gitAnnexJournalLock :: Git.Repo -> FilePath
gitAnnexJournalLock r = gitAnnexDir r </> "journal.lck"
{- Lock file for the pre-commit hook. -}
gitAnnexPreCommitLock :: Git.Repo -> FilePath
gitAnnexPreCommitLock r = gitAnnexDir r </> "precommit.lck"
{- Lock file for direct mode merge. -}
gitAnnexMergeLock :: Git.Repo -> FilePath
gitAnnexMergeLock r = gitAnnexDir r </> "merge.lck"
{- .git/annex/index is used to stage changes to the git-annex branch -}
gitAnnexIndex :: Git.Repo -> FilePath
gitAnnexIndex r = gitAnnexDir r </> "index"
{- Holds the ref of the git-annex branch that the index was last updated to.
-
- The .lck in the name is a historical accident; this is not used as a
- lock. -}
gitAnnexIndexStatus :: Git.Repo -> FilePath
gitAnnexIndexStatus r = gitAnnexDir r </> "index.lck"
{- The index file used to generate a filtered branch view._-}
gitAnnexViewIndex :: Git.Repo -> FilePath
gitAnnexViewIndex r = gitAnnexDir r </> "viewindex"
{- File containing a log of recently accessed views. -}
gitAnnexViewLog :: Git.Repo -> FilePath
gitAnnexViewLog r = gitAnnexDir r </> "viewlog"
{- List of refs that have already been merged into the git-annex branch. -}
gitAnnexMergedRefs :: Git.Repo -> FilePath
gitAnnexMergedRefs r = gitAnnexDir r </> "mergedrefs"
{- List of refs that should not be merged into the git-annex branch. -}
gitAnnexIgnoredRefs :: Git.Repo -> FilePath
gitAnnexIgnoredRefs r = gitAnnexDir r </> "ignoredrefs"
Pid file for daemon mode .
gitAnnexPidFile :: Git.Repo -> FilePath
gitAnnexPidFile r = gitAnnexDir r </> "daemon.pid"
Pid lock file for pidlock mode
gitAnnexPidLockFile :: Git.Repo -> FilePath
gitAnnexPidLockFile r = gitAnnexDir r </> "pidlock"
{- Status file for daemon mode. -}
gitAnnexDaemonStatusFile :: Git.Repo -> FilePath
gitAnnexDaemonStatusFile r = gitAnnexDir r </> "daemon.status"
{- Log file for daemon mode. -}
gitAnnexLogFile :: Git.Repo -> FilePath
gitAnnexLogFile r = gitAnnexDir r </> "daemon.log"
{- Log file for fuzz test. -}
gitAnnexFuzzTestLogFile :: Git.Repo -> FilePath
gitAnnexFuzzTestLogFile r = gitAnnexDir r </> "fuzztest.log"
{- Html shim file used to launch the webapp. -}
gitAnnexHtmlShim :: Git.Repo -> FilePath
gitAnnexHtmlShim r = gitAnnexDir r </> "webapp.html"
{- File containing the url to the webapp. -}
gitAnnexUrlFile :: Git.Repo -> FilePath
gitAnnexUrlFile r = gitAnnexDir r </> "url"
Temporary file used to edit configuriation from the git - annex branch .
gitAnnexTmpCfgFile :: Git.Repo -> FilePath
gitAnnexTmpCfgFile r = gitAnnexDir r </> "config.tmp"
{- .git/annex/ssh/ is used for ssh connection caching -}
gitAnnexSshDir :: Git.Repo -> FilePath
gitAnnexSshDir r = addTrailingPathSeparator $ gitAnnexDir r </> "ssh"
{- .git/annex/remotes/ is used for remote-specific state. -}
gitAnnexRemotesDir :: Git.Repo -> FilePath
gitAnnexRemotesDir r = addTrailingPathSeparator $ gitAnnexDir r </> "remotes"
{- This is the base directory name used by the assistant when making
- repositories, by default. -}
gitAnnexAssistantDefaultDir :: FilePath
gitAnnexAssistantDefaultDir = "annex"
a String that will be used as part of a Key 's keyName ,
- dealing with characters that cause problems .
-
- This is used when a new Key is initially being generated , eg by getKey .
- Unlike keyFile and fileKey , it does not need to be a reversable
- escaping . Also , it 's ok to change this to add more problematic
- characters later . Unlike changing keyFile , which could result in the
- filenames used for existing keys changing and contents getting lost .
-
- It is , however , important that the input and output of this function
- have a 1:1 mapping , to avoid two different inputs from mapping to the
- same key .
- dealing with characters that cause problems.
-
- This is used when a new Key is initially being generated, eg by getKey.
- Unlike keyFile and fileKey, it does not need to be a reversable
- escaping. Also, it's ok to change this to add more problematic
- characters later. Unlike changing keyFile, which could result in the
- filenames used for existing keys changing and contents getting lost.
-
- It is, however, important that the input and output of this function
- have a 1:1 mapping, to avoid two different inputs from mapping to the
- same key.
-}
preSanitizeKeyName :: String -> String
preSanitizeKeyName = preSanitizeKeyName' False
preSanitizeKeyName' :: Bool -> String -> String
preSanitizeKeyName' resanitize = concatMap escape
where
escape c
| isAsciiUpper c || isAsciiLower c || isDigit c = [c]
| c `elem` ".-_" = [c] -- common, assumed safe
| c `elem` "/%:" = [c] -- handled by keyFile
-- , is safe and uncommon, so will be used to escape
-- other characters. By itself, it is escaped to
-- doubled form.
| c == ',' = if not resanitize
then ",,"
else ","
| otherwise = ',' : show (ord c)
{- Converts a keyName that has been santizied with an old version of
- preSanitizeKeyName to be sanitized with the new version. -}
reSanitizeKeyName :: String -> String
reSanitizeKeyName = preSanitizeKeyName' True
Converts a key into a filename fragment without any directory .
-
- Escape " / " in the key name , to keep a flat tree of files and avoid
- issues with keys containing " / .. / " or ending with " / " etc .
-
- " / " is escaped to " % " because it 's short and rarely used , and resembles
- a slash
- " % " is escaped to " & s " , and " & " to " & a " ; this ensures that the mapping
- is one to one .
- " : " is escaped to " & c " , because it seemed like a good idea at the time .
-
- Changing what this function escapes and how is not a good idea , as it
- can cause existing objects to get lost .
-
- Escape "/" in the key name, to keep a flat tree of files and avoid
- issues with keys containing "/../" or ending with "/" etc.
-
- "/" is escaped to "%" because it's short and rarely used, and resembles
- a slash
- "%" is escaped to "&s", and "&" to "&a"; this ensures that the mapping
- is one to one.
- ":" is escaped to "&c", because it seemed like a good idea at the time.
-
- Changing what this function escapes and how is not a good idea, as it
- can cause existing objects to get lost.
-}
keyFile :: Key -> FilePath
keyFile = concatMap esc . key2file
where
esc '&' = "&a"
esc '%' = "&s"
esc ':' = "&c"
esc '/' = "%"
esc c = [c]
{- Reverses keyFile, converting a filename fragment (ie, the basename of
- the symlink target) into a key. -}
fileKey :: FilePath -> Maybe Key
fileKey = file2key . unesc []
where
unesc r [] = reverse r
unesc r ('%':cs) = unesc ('/':r) cs
unesc r ('&':'c':cs) = unesc (':':r) cs
unesc r ('&':'s':cs) = unesc ('%':r) cs
unesc r ('&':'a':cs) = unesc ('&':r) cs
unesc r (c:cs) = unesc (c:r) cs
for quickcheck
prop_isomorphic_fileKey :: String -> Bool
prop_isomorphic_fileKey s
| null s = True -- it's not legal for a key to have no keyName
| otherwise= Just k == fileKey (keyFile k)
where
k = stubKey { keyName = s, keyVariety = OtherKey "test" }
{- A location to store a key on a special remote that uses a filesystem.
- A directory hash is used, to protect against filesystems that dislike
- having many items in a single directory.
-
- The file is put in a directory with the same name, this allows
- write-protecting the directory to avoid accidental deletion of the file.
-}
keyPath :: Key -> Hasher -> FilePath
keyPath key hasher = hasher key </> f </> f
where
f = keyFile key
{- All possibile locations to store a key in a special remote
- using different directory hashes.
-
- This is compatible with the annexLocations, for interoperability between
- special remotes and git-annex repos.
-}
keyPaths :: Key -> [FilePath]
keyPaths key = map (\h -> keyPath key (h def)) dirHashes
| null | https://raw.githubusercontent.com/RichiH/git-annex/bbcad2b0af8cd9264d0cb86e6ca126ae626171f3/Annex/Locations.hs | haskell | Conventions:
-
- Functions ending in "Dir" should always return values ending with a
- trailing path separator. Most code does not rely on that, but a few
- things do.
-
- Everything else should not end in a trailing path sepatator.
-
- Only functions (with names starting with "git") that build a path
- based on a git repository should return full path relative to the git
- repository. Everything else returns path segments.
The directory git annex uses for local state, relative to the .git
- directory
The directory git annex uses for locally available object content,
- relative to the .git directory
Number of subdirectories from the gitAnnexObjectDir
- to the gitAnnexLocation.
Bare repositories default to hashDirLower for new
- content, as it's more portable. But check all locations.
Repositories on crippled filesystems use hashDirLower
- for new content, unless symlinks are supported too.
- Then hashDirMixed is used. But, the content could be
- in either location so check both.
Calculates a symlink target to link a file to an annexed object.
Calculates a symlink target as would be used in a typical git
- repository, with .git in the top of the work tree.
File used to lock a key's content.
File that maps from a key to the file(s) in the git repository.
- Used in direct mode.
File that caches information about a key's content, used to determine
- if a file has changed.
- Used in direct mode.
The annex directory of a repository.
The part of the annex directory where file contents are stored.
.git/annex/misctmp/ is used for random temp files
.git/annex/tmp/ is used for temp files for key's contents
The temp file to use for a given key's content.
.git/annex/bad/ is used for bad files found during fsck
The bad file to use for a given key.
.git/annex/foounused is used to number possibly unused keys
.git/annex/keys/ contains a database of information about keys.
Lock file for the keys database.
.git/annex/fsck/uuid/ is used to store information about incremental
- fscks.
used to store information about incremental fscks.
Directory containing database used to record fsck info.
Lock file for the fsck database.
.git/annex/fsckresults/uuid is used to store results of git fscks
.git/annex/export/uuid/ is used to store information about
- exports to special remotes.
Directory containing database used to record export info.
Lock file for export state for a special remote.
.git/annex/schedulestate is used to store information about when
- scheduled jobs were last run.
.git/annex/creds/ is used to store credentials to access some special
- remotes.
.git/annex/certificate.pem and .git/annex/key.pem are used by the webapp
- when HTTPS is enabled
.git/annex/feeds/ is used to record per-key (url) state by importfeeds
.git/annex/merge/ is used as a empty work tree for direct mode merges and
- merges in adjusted branches.
.git/annex/transfer/ is used to record keys currently
- being transferred, and other transfer bookkeeping info.
.git/annex/journal/ is used to journal changes made to the git-annex
- branch
Lock file for the journal.
Lock file for the pre-commit hook.
Lock file for direct mode merge.
.git/annex/index is used to stage changes to the git-annex branch
Holds the ref of the git-annex branch that the index was last updated to.
-
- The .lck in the name is a historical accident; this is not used as a
- lock.
The index file used to generate a filtered branch view._
File containing a log of recently accessed views.
List of refs that have already been merged into the git-annex branch.
List of refs that should not be merged into the git-annex branch.
Status file for daemon mode.
Log file for daemon mode.
Log file for fuzz test.
Html shim file used to launch the webapp.
File containing the url to the webapp.
.git/annex/ssh/ is used for ssh connection caching
.git/annex/remotes/ is used for remote-specific state.
This is the base directory name used by the assistant when making
- repositories, by default.
common, assumed safe
handled by keyFile
, is safe and uncommon, so will be used to escape
other characters. By itself, it is escaped to
doubled form.
Converts a keyName that has been santizied with an old version of
- preSanitizeKeyName to be sanitized with the new version.
Reverses keyFile, converting a filename fragment (ie, the basename of
- the symlink target) into a key.
it's not legal for a key to have no keyName
A location to store a key on a special remote that uses a filesystem.
- A directory hash is used, to protect against filesystems that dislike
- having many items in a single directory.
-
- The file is put in a directory with the same name, this allows
- write-protecting the directory to avoid accidental deletion of the file.
All possibile locations to store a key in a special remote
- using different directory hashes.
-
- This is compatible with the annexLocations, for interoperability between
- special remotes and git-annex repos.
| git - annex file locations
-
- Copyright 2010 - 2015 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2010-2015 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Annex.Locations (
keyFile,
fileKey,
keyPaths,
keyPath,
annexDir,
objectDir,
gitAnnexLocation,
gitAnnexLocationDepth,
gitAnnexLink,
gitAnnexLinkCanonical,
gitAnnexContentLock,
gitAnnexMapping,
gitAnnexInodeCache,
gitAnnexInodeSentinal,
gitAnnexInodeSentinalCache,
annexLocations,
gitAnnexDir,
gitAnnexObjectDir,
gitAnnexTmpMiscDir,
gitAnnexTmpObjectDir,
gitAnnexTmpObjectLocation,
gitAnnexBadDir,
gitAnnexBadLocation,
gitAnnexUnusedLog,
gitAnnexKeysDb,
gitAnnexKeysDbLock,
gitAnnexFsckState,
gitAnnexFsckDbDir,
gitAnnexFsckDbLock,
gitAnnexFsckResultsLog,
gitAnnexExportDbDir,
gitAnnexExportLock,
gitAnnexScheduleState,
gitAnnexTransferDir,
gitAnnexCredsDir,
gitAnnexWebCertificate,
gitAnnexWebPrivKey,
gitAnnexFeedStateDir,
gitAnnexFeedState,
gitAnnexMergeDir,
gitAnnexJournalDir,
gitAnnexJournalLock,
gitAnnexPreCommitLock,
gitAnnexMergeLock,
gitAnnexIndex,
gitAnnexIndexStatus,
gitAnnexViewIndex,
gitAnnexViewLog,
gitAnnexMergedRefs,
gitAnnexIgnoredRefs,
gitAnnexPidFile,
gitAnnexPidLockFile,
gitAnnexDaemonStatusFile,
gitAnnexLogFile,
gitAnnexFuzzTestLogFile,
gitAnnexHtmlShim,
gitAnnexUrlFile,
gitAnnexTmpCfgFile,
gitAnnexSshDir,
gitAnnexRemotesDir,
gitAnnexAssistantDefaultDir,
HashLevels(..),
hashDirMixed,
hashDirLower,
preSanitizeKeyName,
reSanitizeKeyName,
prop_isomorphic_fileKey
) where
import Data.Char
import Data.Default
import Common
import Key
import Types.Key
import Types.UUID
import Types.GitConfig
import Types.Difference
import qualified Git
import qualified Git.Types as Git
import Git.FilePath
import Annex.DirHashes
import Annex.Fixup
annexDir :: FilePath
annexDir = addTrailingPathSeparator "annex"
objectDir :: FilePath
objectDir = addTrailingPathSeparator $ annexDir </> "objects"
Annexed file 's possible locations relative to the .git directory .
- There are two different possibilities , using different hashes .
-
- Also , some repositories have a Difference in hash directory depth .
- There are two different possibilities, using different hashes.
-
- Also, some repositories have a Difference in hash directory depth.
-}
annexLocations :: GitConfig -> Key -> [FilePath]
annexLocations config key = map (annexLocation config key) dirHashes
annexLocation :: GitConfig -> Key -> (HashLevels -> Hasher) -> FilePath
annexLocation config key hasher = objectDir </> keyPath key (hasher $ objectHashLevels config)
gitAnnexLocationDepth :: GitConfig -> Int
gitAnnexLocationDepth config = hashlevels + 1
where
HashLevels hashlevels = objectHashLevels config
Annexed object 's location in a repository .
-
- When there are multiple possible locations , returns the one where the
- file is actually present .
-
- When the file is not present , returns the location where the file should
- be stored .
-
- This does not take direct mode into account , so in direct mode it is not
- the actual location of the file 's content .
-
- When there are multiple possible locations, returns the one where the
- file is actually present.
-
- When the file is not present, returns the location where the file should
- be stored.
-
- This does not take direct mode into account, so in direct mode it is not
- the actual location of the file's content.
-}
gitAnnexLocation :: Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexLocation key r config = gitAnnexLocation' key r config (annexCrippledFileSystem config) (coreSymlinks config) doesFileExist (Git.localGitDir r)
gitAnnexLocation' :: Key -> Git.Repo -> GitConfig -> Bool -> Bool -> (FilePath -> IO Bool) -> FilePath -> IO FilePath
gitAnnexLocation' key r config crippled symlinkssupported checker gitdir
| Git.repoIsLocalBare r = checkall
| hasDifference ObjectHashLower (annexDifferences config) =
only hashDirLower
| crippled = if symlinkssupported
then check $ map inrepo $ reverse $ annexLocations config key
else checkall
Regular repositories only use hashDirMixed , so
- do n't need to do any work to check if the file is
- present .
- don't need to do any work to check if the file is
- present. -}
| otherwise = only hashDirMixed
where
only = return . inrepo . annexLocation config key
checkall = check $ map inrepo $ annexLocations config key
inrepo d = gitdir </> d
check locs@(l:_) = fromMaybe l <$> firstM checker locs
check [] = error "internal"
gitAnnexLink :: FilePath -> Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexLink file key r config = do
currdir <- getCurrentDirectory
let absfile = absNormPathUnix currdir file
let gitdir = getgitdir currdir
loc <- gitAnnexLocation' key r config False False (\_ -> return True) gitdir
toInternalGitPath <$> relPathDirToFile (parentDir absfile) loc
where
getgitdir currdir
This special case is for git submodules on filesystems not
- supporting symlinks ; generate link target that will
- work portably .
- supporting symlinks; generate link target that will
- work portably. -}
| not (coreSymlinks config) && needsSubmoduleFixup r =
absNormPathUnix currdir $ Git.repoPath r </> ".git"
| otherwise = Git.localGitDir r
absNormPathUnix d p = toInternalGitPath $
absPathFrom (toInternalGitPath d) (toInternalGitPath p)
gitAnnexLinkCanonical :: FilePath -> Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexLinkCanonical file key r config = gitAnnexLink file key r' config'
where
r' = case r of
Git.Repo { Git.location = { Git.worktree = Just wt } } ->
r { Git.location = l { Git.gitdir = wt </> ".git" } }
_ -> r
config' = config
{ annexCrippledFileSystem = False
, coreSymlinks = True
}
gitAnnexContentLock :: Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexContentLock key r config = do
loc <- gitAnnexLocation key r config
return $ loc ++ ".lck"
gitAnnexMapping :: Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexMapping key r config = do
loc <- gitAnnexLocation key r config
return $ loc ++ ".map"
gitAnnexInodeCache :: Key -> Git.Repo -> GitConfig -> IO FilePath
gitAnnexInodeCache key r config = do
loc <- gitAnnexLocation key r config
return $ loc ++ ".cache"
gitAnnexInodeSentinal :: Git.Repo -> FilePath
gitAnnexInodeSentinal r = gitAnnexDir r </> "sentinal"
gitAnnexInodeSentinalCache :: Git.Repo -> FilePath
gitAnnexInodeSentinalCache r = gitAnnexInodeSentinal r ++ ".cache"
gitAnnexDir :: Git.Repo -> FilePath
gitAnnexDir r = addTrailingPathSeparator $ Git.localGitDir r </> annexDir
gitAnnexObjectDir :: Git.Repo -> FilePath
gitAnnexObjectDir r = addTrailingPathSeparator $ Git.localGitDir r </> objectDir
gitAnnexTmpMiscDir :: Git.Repo -> FilePath
gitAnnexTmpMiscDir r = addTrailingPathSeparator $ gitAnnexDir r </> "misctmp"
gitAnnexTmpObjectDir :: Git.Repo -> FilePath
gitAnnexTmpObjectDir r = addTrailingPathSeparator $ gitAnnexDir r </> "tmp"
gitAnnexTmpObjectLocation :: Key -> Git.Repo -> FilePath
gitAnnexTmpObjectLocation key r = gitAnnexTmpObjectDir r </> keyFile key
gitAnnexBadDir :: Git.Repo -> FilePath
gitAnnexBadDir r = addTrailingPathSeparator $ gitAnnexDir r </> "bad"
gitAnnexBadLocation :: Key -> Git.Repo -> FilePath
gitAnnexBadLocation key r = gitAnnexBadDir r </> keyFile key
gitAnnexUnusedLog :: FilePath -> Git.Repo -> FilePath
gitAnnexUnusedLog prefix r = gitAnnexDir r </> (prefix ++ "unused")
gitAnnexKeysDb :: Git.Repo -> FilePath
gitAnnexKeysDb r = gitAnnexDir r </> "keys"
gitAnnexKeysDbLock :: Git.Repo -> FilePath
gitAnnexKeysDbLock r = gitAnnexKeysDb r ++ ".lck"
gitAnnexFsckDir :: UUID -> Git.Repo -> FilePath
gitAnnexFsckDir u r = gitAnnexDir r </> "fsck" </> fromUUID u
gitAnnexFsckState :: UUID -> Git.Repo -> FilePath
gitAnnexFsckState u r = gitAnnexFsckDir u r </> "state"
gitAnnexFsckDbDir :: UUID -> Git.Repo -> FilePath
gitAnnexFsckDbDir u r = gitAnnexFsckDir u r </> "db"
gitAnnexFsckDbLock :: UUID -> Git.Repo -> FilePath
gitAnnexFsckDbLock u r = gitAnnexFsckDir u r </> "fsck.lck"
gitAnnexFsckResultsLog :: UUID -> Git.Repo -> FilePath
gitAnnexFsckResultsLog u r = gitAnnexDir r </> "fsckresults" </> fromUUID u
gitAnnexExportDir :: UUID -> Git.Repo -> FilePath
gitAnnexExportDir u r = gitAnnexDir r </> "export" </> fromUUID u
gitAnnexExportDbDir :: UUID -> Git.Repo -> FilePath
gitAnnexExportDbDir u r = gitAnnexExportDir u r </> "db"
gitAnnexExportLock :: UUID -> Git.Repo -> FilePath
gitAnnexExportLock u r = gitAnnexExportDbDir u r ++ ".lck"
gitAnnexScheduleState :: Git.Repo -> FilePath
gitAnnexScheduleState r = gitAnnexDir r </> "schedulestate"
gitAnnexCredsDir :: Git.Repo -> FilePath
gitAnnexCredsDir r = addTrailingPathSeparator $ gitAnnexDir r </> "creds"
gitAnnexWebCertificate :: Git.Repo -> FilePath
gitAnnexWebCertificate r = gitAnnexDir r </> "certificate.pem"
gitAnnexWebPrivKey :: Git.Repo -> FilePath
gitAnnexWebPrivKey r = gitAnnexDir r </> "privkey.pem"
gitAnnexFeedStateDir :: Git.Repo -> FilePath
gitAnnexFeedStateDir r = addTrailingPathSeparator $ gitAnnexDir r </> "feedstate"
gitAnnexFeedState :: Key -> Git.Repo -> FilePath
gitAnnexFeedState k r = gitAnnexFeedStateDir r </> keyFile k
gitAnnexMergeDir :: Git.Repo -> FilePath
gitAnnexMergeDir r = addTrailingPathSeparator $ gitAnnexDir r </> "merge"
gitAnnexTransferDir :: Git.Repo -> FilePath
gitAnnexTransferDir r = addTrailingPathSeparator $ gitAnnexDir r </> "transfer"
gitAnnexJournalDir :: Git.Repo -> FilePath
gitAnnexJournalDir r = addTrailingPathSeparator $ gitAnnexDir r </> "journal"
gitAnnexJournalLock :: Git.Repo -> FilePath
gitAnnexJournalLock r = gitAnnexDir r </> "journal.lck"
gitAnnexPreCommitLock :: Git.Repo -> FilePath
gitAnnexPreCommitLock r = gitAnnexDir r </> "precommit.lck"
gitAnnexMergeLock :: Git.Repo -> FilePath
gitAnnexMergeLock r = gitAnnexDir r </> "merge.lck"
gitAnnexIndex :: Git.Repo -> FilePath
gitAnnexIndex r = gitAnnexDir r </> "index"
gitAnnexIndexStatus :: Git.Repo -> FilePath
gitAnnexIndexStatus r = gitAnnexDir r </> "index.lck"
gitAnnexViewIndex :: Git.Repo -> FilePath
gitAnnexViewIndex r = gitAnnexDir r </> "viewindex"
gitAnnexViewLog :: Git.Repo -> FilePath
gitAnnexViewLog r = gitAnnexDir r </> "viewlog"
gitAnnexMergedRefs :: Git.Repo -> FilePath
gitAnnexMergedRefs r = gitAnnexDir r </> "mergedrefs"
gitAnnexIgnoredRefs :: Git.Repo -> FilePath
gitAnnexIgnoredRefs r = gitAnnexDir r </> "ignoredrefs"
Pid file for daemon mode .
gitAnnexPidFile :: Git.Repo -> FilePath
gitAnnexPidFile r = gitAnnexDir r </> "daemon.pid"
Pid lock file for pidlock mode
gitAnnexPidLockFile :: Git.Repo -> FilePath
gitAnnexPidLockFile r = gitAnnexDir r </> "pidlock"
gitAnnexDaemonStatusFile :: Git.Repo -> FilePath
gitAnnexDaemonStatusFile r = gitAnnexDir r </> "daemon.status"
gitAnnexLogFile :: Git.Repo -> FilePath
gitAnnexLogFile r = gitAnnexDir r </> "daemon.log"
gitAnnexFuzzTestLogFile :: Git.Repo -> FilePath
gitAnnexFuzzTestLogFile r = gitAnnexDir r </> "fuzztest.log"
gitAnnexHtmlShim :: Git.Repo -> FilePath
gitAnnexHtmlShim r = gitAnnexDir r </> "webapp.html"
gitAnnexUrlFile :: Git.Repo -> FilePath
gitAnnexUrlFile r = gitAnnexDir r </> "url"
Temporary file used to edit configuriation from the git - annex branch .
gitAnnexTmpCfgFile :: Git.Repo -> FilePath
gitAnnexTmpCfgFile r = gitAnnexDir r </> "config.tmp"
gitAnnexSshDir :: Git.Repo -> FilePath
gitAnnexSshDir r = addTrailingPathSeparator $ gitAnnexDir r </> "ssh"
gitAnnexRemotesDir :: Git.Repo -> FilePath
gitAnnexRemotesDir r = addTrailingPathSeparator $ gitAnnexDir r </> "remotes"
gitAnnexAssistantDefaultDir :: FilePath
gitAnnexAssistantDefaultDir = "annex"
a String that will be used as part of a Key 's keyName ,
- dealing with characters that cause problems .
-
- This is used when a new Key is initially being generated , eg by getKey .
- Unlike keyFile and fileKey , it does not need to be a reversable
- escaping . Also , it 's ok to change this to add more problematic
- characters later . Unlike changing keyFile , which could result in the
- filenames used for existing keys changing and contents getting lost .
-
- It is , however , important that the input and output of this function
- have a 1:1 mapping , to avoid two different inputs from mapping to the
- same key .
- dealing with characters that cause problems.
-
- This is used when a new Key is initially being generated, eg by getKey.
- Unlike keyFile and fileKey, it does not need to be a reversable
- escaping. Also, it's ok to change this to add more problematic
- characters later. Unlike changing keyFile, which could result in the
- filenames used for existing keys changing and contents getting lost.
-
- It is, however, important that the input and output of this function
- have a 1:1 mapping, to avoid two different inputs from mapping to the
- same key.
-}
preSanitizeKeyName :: String -> String
preSanitizeKeyName = preSanitizeKeyName' False
preSanitizeKeyName' :: Bool -> String -> String
preSanitizeKeyName' resanitize = concatMap escape
where
escape c
| isAsciiUpper c || isAsciiLower c || isDigit c = [c]
| c == ',' = if not resanitize
then ",,"
else ","
| otherwise = ',' : show (ord c)
reSanitizeKeyName :: String -> String
reSanitizeKeyName = preSanitizeKeyName' True
Converts a key into a filename fragment without any directory .
-
- Escape " / " in the key name , to keep a flat tree of files and avoid
- issues with keys containing " / .. / " or ending with " / " etc .
-
- " / " is escaped to " % " because it 's short and rarely used , and resembles
- a slash
- " % " is escaped to " & s " , and " & " to " & a " ; this ensures that the mapping
- is one to one .
- " : " is escaped to " & c " , because it seemed like a good idea at the time .
-
- Changing what this function escapes and how is not a good idea , as it
- can cause existing objects to get lost .
-
- Escape "/" in the key name, to keep a flat tree of files and avoid
- issues with keys containing "/../" or ending with "/" etc.
-
- "/" is escaped to "%" because it's short and rarely used, and resembles
- a slash
- "%" is escaped to "&s", and "&" to "&a"; this ensures that the mapping
- is one to one.
- ":" is escaped to "&c", because it seemed like a good idea at the time.
-
- Changing what this function escapes and how is not a good idea, as it
- can cause existing objects to get lost.
-}
keyFile :: Key -> FilePath
keyFile = concatMap esc . key2file
where
esc '&' = "&a"
esc '%' = "&s"
esc ':' = "&c"
esc '/' = "%"
esc c = [c]
fileKey :: FilePath -> Maybe Key
fileKey = file2key . unesc []
where
unesc r [] = reverse r
unesc r ('%':cs) = unesc ('/':r) cs
unesc r ('&':'c':cs) = unesc (':':r) cs
unesc r ('&':'s':cs) = unesc ('%':r) cs
unesc r ('&':'a':cs) = unesc ('&':r) cs
unesc r (c:cs) = unesc (c:r) cs
for quickcheck
prop_isomorphic_fileKey :: String -> Bool
prop_isomorphic_fileKey s
| otherwise= Just k == fileKey (keyFile k)
where
k = stubKey { keyName = s, keyVariety = OtherKey "test" }
keyPath :: Key -> Hasher -> FilePath
keyPath key hasher = hasher key </> f </> f
where
f = keyFile key
keyPaths :: Key -> [FilePath]
keyPaths key = map (\h -> keyPath key (h def)) dirHashes
|
ee50eea5491ab9c9d32aa0c45df802a0348e22a29afaa9965547ee3b08e10eb6 | alesaccoia/festival_flinger | build_clunits.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ;;;
Carnegie Mellon University ; ; ;
and and ; ; ;
Copyright ( c ) 1998 - 2005 ; ; ;
All Rights Reserved . ; ; ;
;;; ;;;
;;; Permission is hereby granted, free of charge, to use and distribute ;;;
;;; this software and its documentation without restriction, including ;;;
;;; without limitation the rights to use, copy, modify, merge, publish, ;;;
;;; distribute, sublicense, and/or sell copies of this work, and to ;;;
;;; permit persons to whom this work is furnished to do so, subject to ;;;
;;; the following conditions: ;;;
1 . The code must retain the above copyright notice , this list of ; ; ;
;;; conditions and the following disclaimer. ;;;
2 . Any modifications must be clearly marked as such . ; ; ;
3 . Original authors ' names are not deleted . ; ; ;
4 . The authors ' names are not used to endorse or promote products ; ; ;
;;; derived from this software without specific prior written ;;;
;;; permission. ;;;
;;; ;;;
CARNEGIE MELLON UNIVERSITY AND THE CONTRIBUTORS TO THIS WORK ; ; ;
;;; DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;;
;;; ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;;
SHALL CARNEGIE MELLON UNIVERSITY NOR THE CONTRIBUTORS BE LIABLE ; ; ;
;;; FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;;
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , IN ; ; ;
;;; AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;;
;;; ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;;
;;; THIS SOFTWARE. ;;;
;;; ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ;;;
;;; Code for building data for prompts, aligning and unit selection ;;;
;;; synthesizer ;;;
;;; ;;;
;;; This file is only used at database build time ;;;
;;; ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defvar cmu_us_rxr::dir ".")
(require 'clunits_build)
Basic voice definition file with voice defines and
;;; parameter definition for run time.
(load "festvox/cmu_us_rxr_clunits.scm")
(defvar cluster_feature_filename "all.desc")
(defvar split_long_silences t) ;; good for unit selection
;;; Add Build time parameters
(set! cmu_us_rxr::dt_params
(cons
;; in case cmu_us_rxr_clunits defines this too, put this at start
(list 'db_dir (string-append cmu_us_rxr::dir "/"))
(append
cmu_us_rxr::dt_params
(list
;;; In cmu_us_rxr_clunits.scm
;;'(coeffs_dir "lpc/")
' ( coeffs_ext " .lpc " )
'(disttabs_dir "festival/disttabs/")
'(utts_dir "festival/utts/")
'(utts_ext ".utt")
'(dur_pen_weight 0.0)
'(f0_pen_weight 0.0)
'(get_stds_per_unit t)
'(ac_left_context 0.8)
'(ac_weights
(0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5))
;; Join weights in cmu_us_rxr_clunits.scm
;; Features for extraction
'(feats_dir "festival/feats/")
Feats as defined in all.desc
(list
'feats
(mapcar car (car (load (format nil "festival/clunits/%s"
cluster_feature_filename) t))))
Wagon tree building params
( trees_dir " festival / trees/ " ) ; ; in cmu_us_rxr_clunits.scm
(list
'wagon_field_desc
(format nil "festival/clunits/%s"
cluster_feature_filename))
'(wagon_progname "$ESTDIR/bin/wagon")
'(wagon_cluster_size 20)
'(prune_reduce 0)
'(cluster_prune_limit 40)
;; The dictionary of units used at run time
( catalogue_dir " festival / clunits/ " ) ; ; in cmu_us_rxr_clunits.scm
;; Run time parameters
;; all in cmu_us_rxr_clunits.scm
;; Files in db, filled in at build_clunits time
( files ( " " " time0002 " .... ) )
))))
(define (build_clunits file)
"(build_clunits file)
Build cluster synthesizer for the given recorded data and domain."
(build_clunits_init file)
(do_all) ;; someday I'll change the name of this function
)
(define (build_clunits_init file)
"(build_clunits_init file)
Get setup ready for (do_all) (or (do_init))."
(eval (list cmu_us_rxr::closest_voice))
(cmu_us_rxr::select_phoneset)
(cmu_us_rxr::select_tokenizer)
(cmu_us_rxr::select_tagger)
(cmu_us_rxr::select_lexicon)
;; Add specific fileids to the list for this run
(set! cmu_us_rxr::dt_params
(append
cmu_us_rxr::dt_params
(list
(list
'files
(mapcar car (load file t))))))
(set! dt_params cmu_us_rxr::dt_params)
(set! clunits_params cmu_us_rxr::dt_params)
)
(define (do_prompt name text)
"(do_prompt name text)
Synthesize given text and save waveform and labels for prompts."
(let ((utt1 (utt.synth (eval (list 'Utterance 'Text text)))))
(utt.save utt1 (format nil "prompt-utt/%s.utt" name))
(utt.save.segs utt1 (format nil "prompt-lab/%s.lab" name))
(if (member_string "Wave" (utt.relationnames utt1))
(utt.save.wave utt1 (format nil "prompt-wav/%s.wav" name)))
t))
(define (build_prompts_waves file)
"(build_prompt file)
For each utterances in prompt file, synth and save waveform and
labels for prompts and aligning."
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(let ((p (load file t)))
(mapcar
(lambda (l)
(format t "%s PROMPTS with waves\n" (car l))
(unwind-protect
(do_prompt (car l) (cadr l))
nil)
t)
p)
t))
(define (find_silence_name)
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(set! silence (car (cadr (car (PhoneSet.description '(silences))))))
(set! sfd (fopen "etc/silence" "w"))
(format sfd "%s\n" silence)
(fclose sfd)
)
(define (build_prompts file)
"(build_prompt file)
For each utterances in prompt file, synth and save waveform and
labels for prompts and aligning."
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(Parameter.set 'Synth_Method 'None)
(let ((p (load file t)))
(mapcar
(lambda (l)
(format t "%s PROMPTS\n" (car l))
(unwind-protect
(do_prompt (car l) (cadr l))
nil)
t)
p)
t))
(define (build_utts file)
"(build_utts file)
For each utterances in prompt file, synthesize and merge aligned labels
to predicted labels building a new utetrances and saving it."
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(let ((p (load file t)))
(mapcar
(lambda (l)
(format t "%s UTTS\n" (car l))
(unwind-protect
(let ((featlist (caddr l)))
(if (and (consp featlist)
(consp (car featlist)))
(align_utt (car l) (cadr l) featlist)
(align_utt (car l) (cadr l) nil)))
nil)
t)
p)
t))
(define (align_utt name text featlist)
"(align_utts file)
Synth an utterance and load in the actualed aligned segments and merge
them into the synthesizer utterance. featlist is a list of tuples of
features (name-value pairs) to set in the utterance"
(let ((utt1 (utt.load nil (format nil "prompt-utt/%s.utt" name)))
;(utt1 (utt.synth (eval (list 'Utterance 'Text text))))
(silence (car (cadr (car (PhoneSet.description '(silences))))))
segments actual-segments)
(set! my_silence silence)
(utt.relation.load utt1 'actual-segment
(format nil "lab/%s.lab" name))
(set! segments (utt.relation.items utt1 'Segment))
(set! actual-segments (utt.relation.items utt1 'actual-segment))
;; These should align, but if the labels had to be hand edited
;; then they may not, we cater here for insertions and deletions
;; of silences int he corrected hand labelled files (actual-segments)
;; If you need to something more elaborate you'll have to change the
;; code below.
(while (and segments actual-segments)
(cond
((string-equal (string-append "#" (item.name (car segments)))
(item.name (car actual-segments)))
;; junk unit that is to be ignored
(item.set_feat (car segments) "end"
(item.feat (car actual-segments) "end"))
(item.set_feat (car segments) "ignore" "1")
(set! segments (cdr segments))
(set! actual-segments (cdr actual-segments)))
((and (not (string-equal (item.name (car segments))
(item.name (car actual-segments))))
(or (string-equal (item.name (car actual-segments)) silence)
(string-equal (item.name (car actual-segments)) "ssil")
(string-equal (item.name (car actual-segments)) "H#")
(string-equal (item.name (car actual-segments)) "h#")))
(item.insert
(car segments)
(list silence (list (list "end" (item.feat
(car actual-segments) "end"))))
'before)
(set! actual-segments (cdr actual-segments)))
((and (not (string-equal (item.name (car segments))
(item.name (car actual-segments))))
(string-equal (item.name (car segments)) silence))
(item.delete (car segments))
(set! segments (cdr segments)))
((string-equal (item.name (car segments))
(item.name (car actual-segments)))
(item.set_feat (car segments) "end"
(item.feat (car actual-segments) "end"))
(set! segments (cdr segments))
(set! actual-segments (cdr actual-segments)))
(t
(format stderr
"align missmatch at %s (%f) %s (%f)\n"
(item.name (car segments))
(item.feat (car segments) "end")
(item.name (car actual-segments))
(item.feat (car actual-segments) "end"))
(error)))
)
(mapcar
(lambda (a)
;; shorten and split sliences
(while (and (string-equal (item.name a) silence)
(> (item.feat a "segment_duration") 0.300))
; (format t "splitting %s silence of %f at %f\n"
; (item.name a)
; (item.feat a "segment_duration")
; (item.feat a "end"))
(cond
((string-equal "h#" (item.feat a "p.name"))
(item.set_feat (item.prev a) "end"
(+ 0.150 (item.feat a "p.end"))))
((and (string-equal silence (item.feat a "p.name"))
(string-equal silence (item.feat a "p.p.name")))
(item.set_feat (item.prev a) "end"
(+ 0.150 (item.feat a "p.end")))
(item.set_feat (item.prev a) "name" silence))
(t
(item.insert a
(list silence
(list
(list "end"
(+ 0.150
(item.feat a "p.end")))))
'before)))))
(if split_long_silences
(utt.relation.items utt1 'Segment)
nil))
(utt.relation.delete utt1 'actual-segment)
(utt.set_feat utt1 "fileid" name)
(mapcar
(lambda (feattuple)
( format t " Setting Feat : % s % s\n " ( car feattuple ) ( cadr feattuple ) )
(utt.set_feat utt1 (car feattuple) (cadr feattuple)))
featlist)
;; If we have an F0 add in targets too
;; This breaks builds more than it helps them
; (if (probe_file (format nil "f0/%s.f0" name))
; (build::add_targets utt1))
(rephrase utt1)
(utt.save utt1 (format nil "festival/utts/%s.utt" name))
t))
(defvar my_silence "pau")
(define (pau_duration s)
(cond
((null s) 0.0)
((string-equal my_silence (item.name s))
(+ (item.feat s "segment_duration")
(pau_duration (item.next s))))
(t
0.0)))
(define (rephrase utt)
"(rephrase utt)
remove phrasing and recreate it based on the silences in the segment stream."
(let ((silence (car (cadr (car (PhoneSet.description '(silences)))))))
(utt.relation.delete utt 'Phrase)
(utt.relation.create utt 'Phrase)
(set! topphrase nil)
(mapcar
(lambda (w)
(if (null topphrase)
(begin
(set! topphrase (utt.relation.append utt 'Phrase nil))
(item.set_feat topphrase "name" "B")))
(item.relation.append_daughter topphrase 'Phrase w)
(if (and (item.next w)
(string-equal
silence
(item.feat
w "R:SylStructure.daughtern.daughtern.R:Segment.n.name"))
(> (item.feat
w
"R:SylStructure.daughtern.daughtern.R:Segment.n.lisp_pau_duration")
0.080))
(set! topphrase nil))
)
(utt.relation.items utt 'Word))
Not sure if last phrase should get a BB or not
(if topphrase
(item.set_feat topphrase "name" "BB"))
)
)
(define (rebuild_utts file)
"(rebuild_utts file)
Rebuild the utterances from the label files (lab, syl, wrd, phr). Used
after hand correction, or when files come from somewhere else."
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(Parameter.set 'Synth_Method 'None)
(let ((p (load file t)))
(mapcar
(lambda (l)
(format t "rebuild %s UTTS\n" (car l))
(unwind-protect
(align_utt_rebuild (car l) (cadr l))
nil)
t)
p)
t))
(define (align_utt_rebuild name text)
"(align_utts file)
Synth an utterance and load in the actualed aligned segments and merge
them into the synthesizer utterance."
( utt1 ( utt.load nil ( format nil " prompt - utt/%s.utt " name ) ) )
(utt1 (utt.synth (eval (list 'Utterance 'Text text))))
(silence (car (cadr (car (PhoneSet.description '(silences))))))
segments actual-segments)
(utt.relation.load utt1 'actual-segment
(format nil "lab/%s.lab" name))
(if (probe_file (format nil "wrd/%s.wrd" name))
;; There more structure already on disk so adopt it
(build_word_structure utt1 name))
(if (probe_file (format nil "phr/%s.phr" name))
;; There more structure already on disk so adopt it
(build_phrase_structure utt1 name))
(set! segments (utt.relation.items utt1 'Segment))
(set! actual-segments (utt.relation.items utt1 'actual-segment))
;; These should align, but if the labels had to be hand edited
;; then they may not, we cater here for insertions and deletions
;; of silences in the corrected hand labelled files (actual-segments)
;; If you need to something more elaborate you'll have to change the
;; code below.
(while (and segments actual-segments)
(cond
((string-equal (string-append "#" (item.name (car segments)))
(item.name (car actual-segments)))
;; junk unit that is to be ignored
(item.set_feat (car segments) "end"
(item.feat (car actual-segments) "end"))
(item.set_feat (car segments) "ignore" "1")
(set! segments (cdr segments))
(set! actual-segments (cdr actual-segments)))
((and (not (string-equal (item.name (car segments))
(item.name (car actual-segments))))
(or (string-equal (item.name (car actual-segments)) silence)
(string-equal (item.name (car actual-segments)) "H#")
(string-equal (item.name (car actual-segments)) "h#")))
(item.insert
(car segments)
(list silence (list (list "end" (item.feat
(car actual-segments) "end"))))
'before)
(set! actual-segments (cdr actual-segments)))
((and (not (string-equal (item.name (car segments))
(item.name (car actual-segments))))
(string-equal (item.name (car segments)) silence))
(item.delete (car segments))
(set! segments (cdr segments)))
((string-equal (item.name (car segments))
(item.name (car actual-segments)))
(item.set_feat (car segments) "end"
(item.feat (car actual-segments) "end"))
(set! segments (cdr segments))
(set! actual-segments (cdr actual-segments)))
(t
(format stderr
"align missmatch at %s (%f) %s (%f)\n"
(item.name (car segments))
(item.feat (car segments) "end")
(item.name (car actual-segments))
(item.feat (car actual-segments) "end"))
(error)))
)
(mapcar
(lambda (a)
;; shorten and split sliences
(while (and (string-equal (item.name a) silence)
(> (item.feat a "segment_duration") 0.300))
; (format t "splitting %s silence of %f at %f\n"
; (item.name a)
; (item.feat a "segment_duration")
; (item.feat a "end"))
(cond
((string-equal "h#" (item.feat a "p.name"))
(item.set_feat (item.prev a) "end"
(+ 0.150 (item.feat a "p.end"))))
((and (string-equal silence (item.feat a "p.name"))
(string-equal silence (item.feat a "p.p.name")))
(item.set_feat (item.prev a) "end"
(+ 0.150 (item.feat a "p.end")))
(item.set_feat (item.prev a) "name" silence))
(t
(item.insert a
(list silence
(list
(list "end"
(+ 0.150
(item.feat a "p.end")))))
'before)))))
(utt.relation.items utt1 'Segment))
(utt.relation.delete utt1 'actual-segment)
(utt.set_feat utt1 "fileid" name)
;; If we have an F0 add in targets too
(if (probe_file (format nil "f0/%s.f0" name))
(build::add_targets utt1))
(utt.save utt1 (format nil "festival/utts/%s.utt" name))
(cl.utt.save.syllables utt1 (format nil "syl/%s.syl" name))
(cl.utt.save.words utt1 (format nil "wrd/%s.wrd" name))
(cl.utt.save.phr utt1 (format nil "phr/%s.phr" name))
t))
(define (build_labs file)
"(build_utts file)
For each utterances in prompt file, synthesize and merge aligned labels
to predicted labels building a new utetrances and saving it."
(let ((p (load file t)))
(mapcar
(lambda (l)
(let ((name (car l)))
(format t "%s\n" (car l))
(set! utt1 (utt.load nil (format nil "festival/utts/%s.utt" name)))
(cl.utt.save.labs utt1 (format nil "lab/%s.lab" name))
(cl.utt.save.syllables utt1 (format nil "syl/%s.syl" name))
(cl.utt.save.words utt1 (format nil "wrd/%s.wrd" name))
(cl.utt.save.phr utt1 (format nil "phr/%s.phr" name))))
p)))
(define (cl.utt.save.labs utt filename)
"(utt.save.syllables UTT FILE)
Save syllables of UTT in a FILE in xlabel format."
(let ((fd (fopen filename "w")))
(format fd "#\n")
(mapcar
(lambda (s)
(format fd "%2.4f 100 %s%s\n"
(item.feat s "segment_end")
(if (assoc 'ignore (item.features s))
"#"
"")
(item.name s))
)
(utt.relation.items utt 'Segment))
(fclose fd)
utt))
(define (cl.safe_end i endname)
(let ((e (item.feat i endname)))
(if (and (equal? 0 e) (item.prev i))
(cl.safe_end (item.prev i) endname)
e)))
(define (build_word_structure utt name)
"(build_word_structure utt)
Build Word and Syllable, SylStructure and Segments from the labels
on disk."
(let (wrd syl seg)
(utt.relation.delete utt 'Word)
(utt.relation.delete utt 'SylSructure)
(utt.relation.delete utt 'Syllable)
(utt.relation.delete utt 'Segment)
(utt.relation.delete utt 'Phrase)
(utt.relation.load utt 'Word (format nil "wrd/%s.wrd" name))
(utt.relation.load utt 'Syllable (format nil "syl/%s.syl" name))
(utt.relation.load utt 'Segment (format nil "lab/%s.lab" name))
(mapcar
(lambda (syl)
(let ((s (item.name syl)))
(item.set_feat syl "stress" s)
(item.set_name syl "syl")))
(utt.relation.items utt 'Syllable))
Syllable and Word have h # so drop them
(if (string-equal "h#" (item.name (utt.relation.first utt 'Syllable)))
(item.delete (utt.relation.first utt 'Syllable)))
(if (string-equal "h#" (item.name (utt.relation.first utt 'Word)))
(item.delete (utt.relation.first utt 'Word)))
(if (string-equal "h#" (item.name (utt.relation.first utt 'Segment)))
(item.delete (utt.relation.first utt 'Segment)))
(utt.relation.create utt 'SylStructure)
(utt.relation.create utt 'Phrase)
(set! phr nil)
(set! syl (utt.relation.first utt 'Syllable))
(set! seg (utt.relation.first utt 'Segment))
(set! p_sylend 0)
(set! p_segend 0)
(mapcar
(lambda (w)
(if (not phr)
(set! phr (utt.relation.append utt 'Phrase)))
(item.relation.append_daughter phr 'Phrase w)
(set! end (item.feat w "end"))
(item.remove_feature w "end")
(set! sw (utt.relation.append utt 'SylStructure w))
(while (and syl (< (/ (+ (item.feat syl "end") p_sylend) 2.0) end))
(set! sylend (item.feat syl "end"))
(item.remove_feature syl "end")
(set! ss (item.relation.append_daughter sw 'SylStructure syl))
(while (and seg (< (/ (+ (item.feat seg "end") p_segend) 2.0) sylend))
(if (string-matches (item.name seg) "#.*")
(begin
(item.set_feat seg "ignore" "1")
(item.set_name seg (string-after (item.name seg) "#"))))
(if (string-matches (item.name seg) "%.*")
(begin
(item.set_feat seg "ignore" "1")
(item.set_name seg (string-after (item.name seg) "%"))))
(if (not (phone_is_silence (item.name seg)))
(item.relation.append_daughter ss 'SylStructure seg))
(set! p_segend (item.feat seg "end"))
(set! seg (item.next seg)))
(set! p_sylend sylend)
(set! syl (item.next syl)))
(if (or (null seg) (phone_is_silence (item.name seg)))
(set! phr nil))
)
(utt.relation.items utt 'Word))
;; We need to fix up *all* segment names to remove #, not just
;; ones that fall within syllables (it was breaking for pauses)
(mapcar
(lambda (seg)
(if (string-matches (item.name seg) "#.*")
(begin
(item.set_feat seg "ignore" "1")
(item.set_name seg (string-after (item.name seg) "#"))))
(if (string-matches (item.name seg) "%.*")
(begin
(item.set_feat seg "ignore" "1")
(item.set_name seg (string-after (item.name seg) "%")))))
(utt.relation.items utt 'Segment))
(set! ls (utt.relation.last utt 'Segment))
(if (or (not ls) (not (phone_is_silence (item.name ls))))
(format t "final phone is not silence %s\n" (item.name ls)))
Some day we 'll get these from files
(utt.relation.delete utt 'Intonation)
(utt.relation.delete utt 'IntEvent)
; (find_lexical_stress utt)
(Intonation utt)
; (Duration utt)
(Int_Targets utt)
)
)
(define (build_phrase_structure utt name)
"(build_phrase_structure utt)
This builds phrasing, but is done in a different function from
from word structure as this may (for historical reasons) not
exist on all dbs."
(let (phr wrd)
(utt.relation.delete utt 'Phrase)
(utt.relation.load utt 'Phrase (format nil "phr/%s.phr" name))
(item.delete (utt.relation.first utt 'Phrase)) ;; delete h#
(mapcar
(lambda (p)
(item.set_feat p "phrase_type" (item.name p))
(item.set_name p "B"))
(utt.relation.items utt 'Phrase))
(item.set_name (utt.relation.last utt 'Phrase) "BB")
(set! wrd (utt.relation.first utt 'Word))
(set! phr (utt.relation.first utt 'Phrase))
(set! p_wordend 0)
(while phr
(while (and wrd (< (/ (+ (item.feat wrd "word_end")
(item.feat wrd "p.word_end")) 2.0)
(item.feat phr "end")))
(item.relation.append_daughter phr 'Phrase wrd)
(set! wrd (item.next wrd)))
(item.remove_feature phr "end")
(set! phr (item.next phr)))
))
(define (cl.utt.save.syllables utt filename)
"(utt.save.syllables UTT FILE)
Save syllables of UTT in a FILE in xlabel format."
(let ((fd (fopen filename "w")))
(format fd "#\n")
(format fd "%2.4f 100 h#\n"
(item.feat (utt.relation.first utt 'Syllable) "syllable_start"))
(mapcar
(lambda (syl)
(format fd "%2.4f 100 %s%s\n"
(cl.safe_end syl "syllable_end")
(item.feat syl "stress")
(cond
((or (string-equal "none" (item.feat syl "accentedness"))
(string-equal "0" (item.feat syl "accentedness")))
"")
(t
(item.feat syl "accentedness")))))
(utt.relation.items utt 'Syllable))
(fclose fd)
utt))
(define (cl.utt.save.words utt filename)
"(utt.save.words UTT FILE)
Save words of UTT in a FILE in xlabel format."
(let ((fd (fopen filename "w")))
(format fd "#\n")
(format fd "%2.4f 100 h#\n"
(item.feat (utt.relation.first utt 'Word) "word_start"))
(mapcar
(lambda (w)
(format fd "%2.4f 100 %s\n"
(cl.safe_end w "word_end")
(downcase (item.name w))))
(utt.relation.items utt 'Word))
(fclose fd)
utt))
(define (cl.utt.save.phr utt filename)
"(utt.save.phr UTT FILE)
Save phrases of UTT in a FILE in xlabel format."
(let ((fd (fopen filename "w"))
(phrs (utt.relation.first utt 'Phrase)))
(format fd "#\n")
(format fd "%2.4f 100 h#\n"
(item.feat phrs
"daughter1.R:SylStructure.daughter1.daughter1.segment_start"))
(while phrs
(if (member_string (item.name phrs) '("H" "L"))
(set! plab (item.name phrs))
(set! plab "L"))
(format fd "%2.4f 100 %s\n"
(item.feat phrs "daughtern.word_end")
plab)
(set! phrs (item.next phrs)))
(fclose fd)
utt))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Some prosody modeling code
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (build::add_targets utt)
"(build::add_targets utt)
Adds targets based on the F0 in f0/*.f0. Adds a point to each vowel."
(let ((fileid (utt.feat utt "fileid"))
(f0_points))
(set! f0_points (build::load_f0_points fileid))
(set! awb_f0_points f0_points)
Get rid of the old one
(utt.relation.delete utt 'Target)
Create a new one
(utt.relation.create utt 'Target)
(build::add_target
utt
f0_points)
utt))
(define (build::add_target utt f0_points)
"(build::add_target utt f0_points)
Add F0 points at start or syllable, mid point of each vowel, and
last segment before silence. The F0 continued over non-voiced
periods is such a naive and hopless way its embarrassing."
(let ((s (utt.relation.first utt 'Segment))
(f0s f0_points)
targ)
(while s
(if (and (not (member_string
(item.name s)
(cadr (car (PhoneSet.description '(silences))))))
(or (string-equal "1" (item.feat s "syl_initial"))
(string-equal "+" (item.feat s "ph_vc"))
(member_string
(item.feat s "n.name")
(cadr (car (PhoneSet.description '(silences)))))))
(begin
(set! targ (utt.relation.append utt 'Target s))
(if (string-equal "1" (item.feat s "syl_initial"))
(item.relation.append_daughter
targ
'Target
(list
"0"
(list
(list 'f0 (build::get_f0_at f0s (item.feat s "segment_start")))
(list 'pos (item.feat s "segment_start"))))))
(if (string-equal "+" (item.feat s "ph_vc"))
(item.relation.append_daughter
targ
'Target
(list
"0"
(list
(list 'f0 (build::get_f0_at f0s (item.feat s "segment_mid")))
(list 'pos (item.feat s "segment_mid"))))))
(if (member_string
(item.feat s "n.name")
(cadr (car (PhoneSet.description '(silences)))))
(item.relation.append_daughter
targ
'Target
(list
"0"
(list
(list 'f0 (build::get_f0_at f0s (item.feat s "segment_end")))
(list 'pos (item.feat s "segment_end"))))))))
(set! s (item.next s))
))
)
(define (build::get_f0_at f0s position)
"(build::get_f0_at f0s position)
Returns the non-zero F0 nearest to position."
(build::get_f0_at_2
-1
f0s
position))
(define (build::get_f0_at_2 f0 f0s position)
"(build::get_f0_at f0 f0s position)
Returns the non-zero F0 nearest to position."
(cond
((null f0s)
(if (> f0 0)
f0
110 ;; aint nothing there at all at all
))
(t
(if (> 0 (cadr (car f0s)))
(set! f0 (cadr (car f0s))))
(cond
((and (>= position (car (car f0s)))
(<= position (car (cadr f0s))))
(if (< f0 1)
(build::find_first_f0 f0s)
f0))
(t
(build::get_f0_at_2 f0 (cdr f0s) position))))))
(define (build::find_first_f0 f0s)
(cond
((null f0s)
110 ;; last resort
)
((> (cadr (car f0s)) 0)
(cadr (car f0s)))
(t
(build::find_first_f0 (cdr f0s)))))
(define (build::load_f0_points fileid)
"(build::load_f0_points fileid)
Extract F0 as ascii times and values from the F0 file and load
it as a simple assoc list."
(let ((f0asciifile (make_tmp_filename))
f0fd point points
(time 0))
(system
(format nil "$EST%s/bin/ch_track -otype ascii -o %s f0/%s.f0"
"DIR" ;; to stop that var name being mapped.
f0asciifile
fileid))
(set! f0fd (fopen f0asciifile "r"))
(while (not (equal? (set! point (readfp f0fd)) (eof-val)))
(set! points
(cons
(list time point) points))
(set! time (+ 0.005 time))
skip the second field .
(readfp f0fd))
(fclose f0fd)
(delete-file f0asciifile)
(reverse points)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Code to try to find bad labelling by looking at duration distribution
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; A simple sufficient statistics class
(define (suffstats.new)
(list
0 ;; n
0 ;; sum
0 ;; sumx
))
(define (suffstats.set_n x n)
(set-car! x n))
(define (suffstats.set_sum x sum)
(set-car! (cdr x) sum))
(define (suffstats.set_sumx x sumx)
(set-car! (cdr (cdr x)) sumx))
(define (suffstats.n x)
(car x))
(define (suffstats.sum x)
(car (cdr x)))
(define (suffstats.sumx x)
(car (cdr (cdr x))))
(define (suffstats.reset x)
(suffstats.set_n x 0)
(suffstats.set_sum x 0)
(suffstats.set_sumx x 0))
(define (suffstats.add x d)
(suffstats.set_n x (+ (suffstats.n x) 1))
(suffstats.set_sum x (+ (suffstats.sum x) d))
(suffstats.set_sumx x (+ (suffstats.sumx x) (* d d)))
)
(define (suffstats.add_count x d c)
(suffstats.set_n x (+ (suffstats.n x) c))
(suffstats.set_sum x (+ (suffstats.sum x) (* c d)))
(suffstats.set_sumx x (+ (suffstats.sumx x) (* c (* d d))))
)
(define (suffstats.mean x)
(/ (suffstats.sum x) (suffstats.n x)))
(define (suffstats.variance x)
(/ (- (* (suffstats.n x) (suffstats.sumx x))
(* (suffstats.sum x) (suffstats.sum x)))
(* (suffstats.n x) (- (suffstats.n x) 1))))
(define (suffstats.stddev x)
(sqrt (suffstats.variance x)))
(define (cummulate_stats stats phone duration)
(let ((pstat (car (cdr (assoc_string phone stats))))
(newstats stats))
(if (null pstat)
(begin
(set! pstat (suffstats.new))
(set! newstats (cons (list phone pstat) stats))))
(suffstats.add pstat duration)
newstats))
(define (collect_dur_stats utts)
(let ((stats nil))
(mapcar
(lambda (u)
(mapcar
(lambda (s)
(set! stats (cummulate_stats
stats
(item.name s)
(item.feat s "segment_duration"))))
(utt.relation.items u 'Segment)))
utts)
stats))
(define (score_utts utts durstats ofile)
(let ((ofd (fopen ofile "w")))
(mapcar
(lambda (u)
(let ((score 0) (tot 0))
(format ofd "%s " (utt.feat u "fileid"))
(mapcar
(lambda (s)
(let ((stats (car (cdr (assoc_string (item.name s) durstats))))
(dur (item.feat s "segment_duration"))
(zscore))
(set! tot (+ 1 tot))
(set! zscore (/ (- dur (suffstats.mean stats))
(suffstats.stddev stats)))
(if (< zscore 0)
(set! zscore (* -1 zscore)))
(if (or (< dur 0.011)
(> zscore 3))
(set! score (+ 1 score)))))
(utt.relation.items u 'Segment))
(format ofd "%0.4f %d %d\n"
(/ score tot)
score
tot)))
utts)))
(define (make_simple_utt fileid)
(let ((utt (Utterance Text "")))
(utt.relation.load utt 'Segment
(format nil "lab/%s.lab" fileid))
(utt.set_feat utt "fileid" fileid)
utt))
(define (find_outlier_utts file ofile)
(voice_kal_diphone)
(let ((p (load file t))
utts dur_states)
(set! utts (mapcar (lambda (l) (make_simple_utt (car l))) p))
(set! dur_stats (collect_dur_stats utts))
(score_utts utts dur_stats ofile)
t))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Display selected units linked back to the units they came from
;;;
;;;
(define (clunits::display_selected utt)
(clunits::display utt)
(system (format nil "rm -rf scratch/cl\n"))
(system (format nil "mkdir scratch/cl\n"))
(system (format nil "(cd scratch/cl && mkdir wav lab emu emulab_hlb)"))
(set! unum 0)
(mapcar
(lambda (unit)
(set! unit_utt_name
(format nil "%03d_%s_%1.3f_%s"
unum
(item.name unit)
(item.feat unit "end")
(item.feat unit "fileid")))
(set! unum (+ 1 unum))
(system
(format nil "ln lab/%s.lab scratch/cl/lab/%s.lab\n"
(item.feat unit "fileid") unit_utt_name))
(system
(format nil "ln wav/%s.wav scratch/cl/wav/%s.wav\n"
(item.feat unit "fileid") unit_utt_name))
)
(utt.relation.items utt 'Unit))
(system (format nil "(cd scratch/cl && emulabel ../../etc/emu_lab &)\n"))
t
)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Unit Selection tts
;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (tts_test ttd odir)
(mapcar
(lambda (x)
(format t "%s tts" (car x))
(unwind-protect
(begin
(set! utt1 (SynthText (cadr x)))
(utt.save.wave utt1 (format nil "%s/%s.wav" odir (car x))))
(begin
(format t " failed")))
(format t "\n"))
(load ttd t))
t)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Unit Selection measures
;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (usm id text)
(set! utts (SynthText text))
(set! utta (utt.load nil (format nil "festival/utts/%s.utt" id )))
( set ! utta ( Wave_Synth utta ) )
; (mapcar
; (lambda (x y)
; (if (not (string-equal (item.name x) (item.name y)))
( format t " % s % s % s % f\n " i d ( item.name x ) ( item.name y )
; (item.feat y "end"))))
; (utt.relation.items utts 'Segment)
( utt.relation.items utta ' Segment ) )
(apply
+
(mapcar
(lambda (u)
(if (and (string-equal id (item.feat u "fileid"))
(or (null (item.prev u))
(and (string-equal id (item.feat u "p.fileid"))
(equal? (item.feat u "unit_start")
(item.feat u "p.unit_end")))))
1
0
))
(utt.relation.items utts 'Unit)))
)
(define (usm_file ttd odir)
(let ((usm_count 0) (usm_total 0))
(mapcar
(lambda (x)
(set! c (usm (car x) (cadr x)))
(set! us (length (utt.relation.items utts 'Unit)))
(format t "USM %s %2.2f %2.2f %2.2f\n" (car x) c
us
(* 100.0 (/ c us)))
(utt.save.wave utts (format nil "%s/%s.wav" odir (car x)))
(set! usm_count (+ c usm_count))
(set! usm_total (+ us usm_total))
)
(load ttd t))
(format t "USM_TOTAL %2.2f %2.2f %2.2f\n"
usm_count
usm_total
(* 100.0 (/ usm_count usm_total)))
)
)
(provide 'build_clunits)
| null | https://raw.githubusercontent.com/alesaccoia/festival_flinger/87345aad3a3230751a8ff479f74ba1676217accd/lib/voices/us/cmu_us_rxr_cg/festvox/build_clunits.scm | scheme |
;;;
; ;
; ;
; ;
; ;
;;;
Permission is hereby granted, free of charge, to use and distribute ;;;
this software and its documentation without restriction, including ;;;
without limitation the rights to use, copy, modify, merge, publish, ;;;
distribute, sublicense, and/or sell copies of this work, and to ;;;
permit persons to whom this work is furnished to do so, subject to ;;;
the following conditions: ;;;
; ;
conditions and the following disclaimer. ;;;
; ;
; ;
; ;
derived from this software without specific prior written ;;;
permission. ;;;
;;;
; ;
DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;;
ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;;
; ;
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;;
; ;
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;;
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;;
THIS SOFTWARE. ;;;
;;;
;;;
Code for building data for prompts, aligning and unit selection ;;;
synthesizer ;;;
;;;
This file is only used at database build time ;;;
;;;
parameter definition for run time.
good for unit selection
Add Build time parameters
in case cmu_us_rxr_clunits defines this too, put this at start
In cmu_us_rxr_clunits.scm
'(coeffs_dir "lpc/")
Join weights in cmu_us_rxr_clunits.scm
Features for extraction
; in cmu_us_rxr_clunits.scm
The dictionary of units used at run time
; in cmu_us_rxr_clunits.scm
Run time parameters
all in cmu_us_rxr_clunits.scm
Files in db, filled in at build_clunits time
someday I'll change the name of this function
Add specific fileids to the list for this run
(utt1 (utt.synth (eval (list 'Utterance 'Text text))))
These should align, but if the labels had to be hand edited
then they may not, we cater here for insertions and deletions
of silences int he corrected hand labelled files (actual-segments)
If you need to something more elaborate you'll have to change the
code below.
junk unit that is to be ignored
shorten and split sliences
(format t "splitting %s silence of %f at %f\n"
(item.name a)
(item.feat a "segment_duration")
(item.feat a "end"))
If we have an F0 add in targets too
This breaks builds more than it helps them
(if (probe_file (format nil "f0/%s.f0" name))
(build::add_targets utt1))
There more structure already on disk so adopt it
There more structure already on disk so adopt it
These should align, but if the labels had to be hand edited
then they may not, we cater here for insertions and deletions
of silences in the corrected hand labelled files (actual-segments)
If you need to something more elaborate you'll have to change the
code below.
junk unit that is to be ignored
shorten and split sliences
(format t "splitting %s silence of %f at %f\n"
(item.name a)
(item.feat a "segment_duration")
(item.feat a "end"))
If we have an F0 add in targets too
We need to fix up *all* segment names to remove #, not just
ones that fall within syllables (it was breaking for pauses)
(find_lexical_stress utt)
(Duration utt)
delete h#
Some prosody modeling code
aint nothing there at all at all
last resort
to stop that var name being mapped.
Code to try to find bad labelling by looking at duration distribution
A simple sufficient statistics class
n
sum
sumx
Display selected units linked back to the units they came from
Unit Selection tts
Unit Selection measures
(mapcar
(lambda (x y)
(if (not (string-equal (item.name x) (item.name y)))
(item.feat y "end"))))
(utt.relation.items utts 'Segment) |
(defvar cmu_us_rxr::dir ".")
(require 'clunits_build)
Basic voice definition file with voice defines and
(load "festvox/cmu_us_rxr_clunits.scm")
(defvar cluster_feature_filename "all.desc")
(set! cmu_us_rxr::dt_params
(cons
(list 'db_dir (string-append cmu_us_rxr::dir "/"))
(append
cmu_us_rxr::dt_params
(list
' ( coeffs_ext " .lpc " )
'(disttabs_dir "festival/disttabs/")
'(utts_dir "festival/utts/")
'(utts_ext ".utt")
'(dur_pen_weight 0.0)
'(f0_pen_weight 0.0)
'(get_stds_per_unit t)
'(ac_left_context 0.8)
'(ac_weights
(0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5 0.5))
'(feats_dir "festival/feats/")
Feats as defined in all.desc
(list
'feats
(mapcar car (car (load (format nil "festival/clunits/%s"
cluster_feature_filename) t))))
Wagon tree building params
(list
'wagon_field_desc
(format nil "festival/clunits/%s"
cluster_feature_filename))
'(wagon_progname "$ESTDIR/bin/wagon")
'(wagon_cluster_size 20)
'(prune_reduce 0)
'(cluster_prune_limit 40)
( files ( " " " time0002 " .... ) )
))))
(define (build_clunits file)
"(build_clunits file)
Build cluster synthesizer for the given recorded data and domain."
(build_clunits_init file)
)
(define (build_clunits_init file)
"(build_clunits_init file)
Get setup ready for (do_all) (or (do_init))."
(eval (list cmu_us_rxr::closest_voice))
(cmu_us_rxr::select_phoneset)
(cmu_us_rxr::select_tokenizer)
(cmu_us_rxr::select_tagger)
(cmu_us_rxr::select_lexicon)
(set! cmu_us_rxr::dt_params
(append
cmu_us_rxr::dt_params
(list
(list
'files
(mapcar car (load file t))))))
(set! dt_params cmu_us_rxr::dt_params)
(set! clunits_params cmu_us_rxr::dt_params)
)
(define (do_prompt name text)
"(do_prompt name text)
Synthesize given text and save waveform and labels for prompts."
(let ((utt1 (utt.synth (eval (list 'Utterance 'Text text)))))
(utt.save utt1 (format nil "prompt-utt/%s.utt" name))
(utt.save.segs utt1 (format nil "prompt-lab/%s.lab" name))
(if (member_string "Wave" (utt.relationnames utt1))
(utt.save.wave utt1 (format nil "prompt-wav/%s.wav" name)))
t))
(define (build_prompts_waves file)
"(build_prompt file)
For each utterances in prompt file, synth and save waveform and
labels for prompts and aligning."
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(let ((p (load file t)))
(mapcar
(lambda (l)
(format t "%s PROMPTS with waves\n" (car l))
(unwind-protect
(do_prompt (car l) (cadr l))
nil)
t)
p)
t))
(define (find_silence_name)
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(set! silence (car (cadr (car (PhoneSet.description '(silences))))))
(set! sfd (fopen "etc/silence" "w"))
(format sfd "%s\n" silence)
(fclose sfd)
)
(define (build_prompts file)
"(build_prompt file)
For each utterances in prompt file, synth and save waveform and
labels for prompts and aligning."
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(Parameter.set 'Synth_Method 'None)
(let ((p (load file t)))
(mapcar
(lambda (l)
(format t "%s PROMPTS\n" (car l))
(unwind-protect
(do_prompt (car l) (cadr l))
nil)
t)
p)
t))
(define (build_utts file)
"(build_utts file)
For each utterances in prompt file, synthesize and merge aligned labels
to predicted labels building a new utetrances and saving it."
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(let ((p (load file t)))
(mapcar
(lambda (l)
(format t "%s UTTS\n" (car l))
(unwind-protect
(let ((featlist (caddr l)))
(if (and (consp featlist)
(consp (car featlist)))
(align_utt (car l) (cadr l) featlist)
(align_utt (car l) (cadr l) nil)))
nil)
t)
p)
t))
(define (align_utt name text featlist)
"(align_utts file)
Synth an utterance and load in the actualed aligned segments and merge
them into the synthesizer utterance. featlist is a list of tuples of
features (name-value pairs) to set in the utterance"
(let ((utt1 (utt.load nil (format nil "prompt-utt/%s.utt" name)))
(silence (car (cadr (car (PhoneSet.description '(silences))))))
segments actual-segments)
(set! my_silence silence)
(utt.relation.load utt1 'actual-segment
(format nil "lab/%s.lab" name))
(set! segments (utt.relation.items utt1 'Segment))
(set! actual-segments (utt.relation.items utt1 'actual-segment))
(while (and segments actual-segments)
(cond
((string-equal (string-append "#" (item.name (car segments)))
(item.name (car actual-segments)))
(item.set_feat (car segments) "end"
(item.feat (car actual-segments) "end"))
(item.set_feat (car segments) "ignore" "1")
(set! segments (cdr segments))
(set! actual-segments (cdr actual-segments)))
((and (not (string-equal (item.name (car segments))
(item.name (car actual-segments))))
(or (string-equal (item.name (car actual-segments)) silence)
(string-equal (item.name (car actual-segments)) "ssil")
(string-equal (item.name (car actual-segments)) "H#")
(string-equal (item.name (car actual-segments)) "h#")))
(item.insert
(car segments)
(list silence (list (list "end" (item.feat
(car actual-segments) "end"))))
'before)
(set! actual-segments (cdr actual-segments)))
((and (not (string-equal (item.name (car segments))
(item.name (car actual-segments))))
(string-equal (item.name (car segments)) silence))
(item.delete (car segments))
(set! segments (cdr segments)))
((string-equal (item.name (car segments))
(item.name (car actual-segments)))
(item.set_feat (car segments) "end"
(item.feat (car actual-segments) "end"))
(set! segments (cdr segments))
(set! actual-segments (cdr actual-segments)))
(t
(format stderr
"align missmatch at %s (%f) %s (%f)\n"
(item.name (car segments))
(item.feat (car segments) "end")
(item.name (car actual-segments))
(item.feat (car actual-segments) "end"))
(error)))
)
(mapcar
(lambda (a)
(while (and (string-equal (item.name a) silence)
(> (item.feat a "segment_duration") 0.300))
(cond
((string-equal "h#" (item.feat a "p.name"))
(item.set_feat (item.prev a) "end"
(+ 0.150 (item.feat a "p.end"))))
((and (string-equal silence (item.feat a "p.name"))
(string-equal silence (item.feat a "p.p.name")))
(item.set_feat (item.prev a) "end"
(+ 0.150 (item.feat a "p.end")))
(item.set_feat (item.prev a) "name" silence))
(t
(item.insert a
(list silence
(list
(list "end"
(+ 0.150
(item.feat a "p.end")))))
'before)))))
(if split_long_silences
(utt.relation.items utt1 'Segment)
nil))
(utt.relation.delete utt1 'actual-segment)
(utt.set_feat utt1 "fileid" name)
(mapcar
(lambda (feattuple)
( format t " Setting Feat : % s % s\n " ( car feattuple ) ( cadr feattuple ) )
(utt.set_feat utt1 (car feattuple) (cadr feattuple)))
featlist)
(rephrase utt1)
(utt.save utt1 (format nil "festival/utts/%s.utt" name))
t))
(defvar my_silence "pau")
(define (pau_duration s)
(cond
((null s) 0.0)
((string-equal my_silence (item.name s))
(+ (item.feat s "segment_duration")
(pau_duration (item.next s))))
(t
0.0)))
(define (rephrase utt)
"(rephrase utt)
remove phrasing and recreate it based on the silences in the segment stream."
(let ((silence (car (cadr (car (PhoneSet.description '(silences)))))))
(utt.relation.delete utt 'Phrase)
(utt.relation.create utt 'Phrase)
(set! topphrase nil)
(mapcar
(lambda (w)
(if (null topphrase)
(begin
(set! topphrase (utt.relation.append utt 'Phrase nil))
(item.set_feat topphrase "name" "B")))
(item.relation.append_daughter topphrase 'Phrase w)
(if (and (item.next w)
(string-equal
silence
(item.feat
w "R:SylStructure.daughtern.daughtern.R:Segment.n.name"))
(> (item.feat
w
"R:SylStructure.daughtern.daughtern.R:Segment.n.lisp_pau_duration")
0.080))
(set! topphrase nil))
)
(utt.relation.items utt 'Word))
Not sure if last phrase should get a BB or not
(if topphrase
(item.set_feat topphrase "name" "BB"))
)
)
(define (rebuild_utts file)
"(rebuild_utts file)
Rebuild the utterances from the label files (lab, syl, wrd, phr). Used
after hand correction, or when files come from somewhere else."
(set! cmu_us_rxr::clunits_prompting_stage t)
(voice_cmu_us_rxr_clunits)
(Parameter.set 'Synth_Method 'None)
(let ((p (load file t)))
(mapcar
(lambda (l)
(format t "rebuild %s UTTS\n" (car l))
(unwind-protect
(align_utt_rebuild (car l) (cadr l))
nil)
t)
p)
t))
(define (align_utt_rebuild name text)
"(align_utts file)
Synth an utterance and load in the actualed aligned segments and merge
them into the synthesizer utterance."
( utt1 ( utt.load nil ( format nil " prompt - utt/%s.utt " name ) ) )
(utt1 (utt.synth (eval (list 'Utterance 'Text text))))
(silence (car (cadr (car (PhoneSet.description '(silences))))))
segments actual-segments)
(utt.relation.load utt1 'actual-segment
(format nil "lab/%s.lab" name))
(if (probe_file (format nil "wrd/%s.wrd" name))
(build_word_structure utt1 name))
(if (probe_file (format nil "phr/%s.phr" name))
(build_phrase_structure utt1 name))
(set! segments (utt.relation.items utt1 'Segment))
(set! actual-segments (utt.relation.items utt1 'actual-segment))
(while (and segments actual-segments)
(cond
((string-equal (string-append "#" (item.name (car segments)))
(item.name (car actual-segments)))
(item.set_feat (car segments) "end"
(item.feat (car actual-segments) "end"))
(item.set_feat (car segments) "ignore" "1")
(set! segments (cdr segments))
(set! actual-segments (cdr actual-segments)))
((and (not (string-equal (item.name (car segments))
(item.name (car actual-segments))))
(or (string-equal (item.name (car actual-segments)) silence)
(string-equal (item.name (car actual-segments)) "H#")
(string-equal (item.name (car actual-segments)) "h#")))
(item.insert
(car segments)
(list silence (list (list "end" (item.feat
(car actual-segments) "end"))))
'before)
(set! actual-segments (cdr actual-segments)))
((and (not (string-equal (item.name (car segments))
(item.name (car actual-segments))))
(string-equal (item.name (car segments)) silence))
(item.delete (car segments))
(set! segments (cdr segments)))
((string-equal (item.name (car segments))
(item.name (car actual-segments)))
(item.set_feat (car segments) "end"
(item.feat (car actual-segments) "end"))
(set! segments (cdr segments))
(set! actual-segments (cdr actual-segments)))
(t
(format stderr
"align missmatch at %s (%f) %s (%f)\n"
(item.name (car segments))
(item.feat (car segments) "end")
(item.name (car actual-segments))
(item.feat (car actual-segments) "end"))
(error)))
)
(mapcar
(lambda (a)
(while (and (string-equal (item.name a) silence)
(> (item.feat a "segment_duration") 0.300))
(cond
((string-equal "h#" (item.feat a "p.name"))
(item.set_feat (item.prev a) "end"
(+ 0.150 (item.feat a "p.end"))))
((and (string-equal silence (item.feat a "p.name"))
(string-equal silence (item.feat a "p.p.name")))
(item.set_feat (item.prev a) "end"
(+ 0.150 (item.feat a "p.end")))
(item.set_feat (item.prev a) "name" silence))
(t
(item.insert a
(list silence
(list
(list "end"
(+ 0.150
(item.feat a "p.end")))))
'before)))))
(utt.relation.items utt1 'Segment))
(utt.relation.delete utt1 'actual-segment)
(utt.set_feat utt1 "fileid" name)
(if (probe_file (format nil "f0/%s.f0" name))
(build::add_targets utt1))
(utt.save utt1 (format nil "festival/utts/%s.utt" name))
(cl.utt.save.syllables utt1 (format nil "syl/%s.syl" name))
(cl.utt.save.words utt1 (format nil "wrd/%s.wrd" name))
(cl.utt.save.phr utt1 (format nil "phr/%s.phr" name))
t))
(define (build_labs file)
"(build_utts file)
For each utterances in prompt file, synthesize and merge aligned labels
to predicted labels building a new utetrances and saving it."
(let ((p (load file t)))
(mapcar
(lambda (l)
(let ((name (car l)))
(format t "%s\n" (car l))
(set! utt1 (utt.load nil (format nil "festival/utts/%s.utt" name)))
(cl.utt.save.labs utt1 (format nil "lab/%s.lab" name))
(cl.utt.save.syllables utt1 (format nil "syl/%s.syl" name))
(cl.utt.save.words utt1 (format nil "wrd/%s.wrd" name))
(cl.utt.save.phr utt1 (format nil "phr/%s.phr" name))))
p)))
(define (cl.utt.save.labs utt filename)
"(utt.save.syllables UTT FILE)
Save syllables of UTT in a FILE in xlabel format."
(let ((fd (fopen filename "w")))
(format fd "#\n")
(mapcar
(lambda (s)
(format fd "%2.4f 100 %s%s\n"
(item.feat s "segment_end")
(if (assoc 'ignore (item.features s))
"#"
"")
(item.name s))
)
(utt.relation.items utt 'Segment))
(fclose fd)
utt))
(define (cl.safe_end i endname)
(let ((e (item.feat i endname)))
(if (and (equal? 0 e) (item.prev i))
(cl.safe_end (item.prev i) endname)
e)))
(define (build_word_structure utt name)
"(build_word_structure utt)
Build Word and Syllable, SylStructure and Segments from the labels
on disk."
(let (wrd syl seg)
(utt.relation.delete utt 'Word)
(utt.relation.delete utt 'SylSructure)
(utt.relation.delete utt 'Syllable)
(utt.relation.delete utt 'Segment)
(utt.relation.delete utt 'Phrase)
(utt.relation.load utt 'Word (format nil "wrd/%s.wrd" name))
(utt.relation.load utt 'Syllable (format nil "syl/%s.syl" name))
(utt.relation.load utt 'Segment (format nil "lab/%s.lab" name))
(mapcar
(lambda (syl)
(let ((s (item.name syl)))
(item.set_feat syl "stress" s)
(item.set_name syl "syl")))
(utt.relation.items utt 'Syllable))
Syllable and Word have h # so drop them
(if (string-equal "h#" (item.name (utt.relation.first utt 'Syllable)))
(item.delete (utt.relation.first utt 'Syllable)))
(if (string-equal "h#" (item.name (utt.relation.first utt 'Word)))
(item.delete (utt.relation.first utt 'Word)))
(if (string-equal "h#" (item.name (utt.relation.first utt 'Segment)))
(item.delete (utt.relation.first utt 'Segment)))
(utt.relation.create utt 'SylStructure)
(utt.relation.create utt 'Phrase)
(set! phr nil)
(set! syl (utt.relation.first utt 'Syllable))
(set! seg (utt.relation.first utt 'Segment))
(set! p_sylend 0)
(set! p_segend 0)
(mapcar
(lambda (w)
(if (not phr)
(set! phr (utt.relation.append utt 'Phrase)))
(item.relation.append_daughter phr 'Phrase w)
(set! end (item.feat w "end"))
(item.remove_feature w "end")
(set! sw (utt.relation.append utt 'SylStructure w))
(while (and syl (< (/ (+ (item.feat syl "end") p_sylend) 2.0) end))
(set! sylend (item.feat syl "end"))
(item.remove_feature syl "end")
(set! ss (item.relation.append_daughter sw 'SylStructure syl))
(while (and seg (< (/ (+ (item.feat seg "end") p_segend) 2.0) sylend))
(if (string-matches (item.name seg) "#.*")
(begin
(item.set_feat seg "ignore" "1")
(item.set_name seg (string-after (item.name seg) "#"))))
(if (string-matches (item.name seg) "%.*")
(begin
(item.set_feat seg "ignore" "1")
(item.set_name seg (string-after (item.name seg) "%"))))
(if (not (phone_is_silence (item.name seg)))
(item.relation.append_daughter ss 'SylStructure seg))
(set! p_segend (item.feat seg "end"))
(set! seg (item.next seg)))
(set! p_sylend sylend)
(set! syl (item.next syl)))
(if (or (null seg) (phone_is_silence (item.name seg)))
(set! phr nil))
)
(utt.relation.items utt 'Word))
(mapcar
(lambda (seg)
(if (string-matches (item.name seg) "#.*")
(begin
(item.set_feat seg "ignore" "1")
(item.set_name seg (string-after (item.name seg) "#"))))
(if (string-matches (item.name seg) "%.*")
(begin
(item.set_feat seg "ignore" "1")
(item.set_name seg (string-after (item.name seg) "%")))))
(utt.relation.items utt 'Segment))
(set! ls (utt.relation.last utt 'Segment))
(if (or (not ls) (not (phone_is_silence (item.name ls))))
(format t "final phone is not silence %s\n" (item.name ls)))
Some day we 'll get these from files
(utt.relation.delete utt 'Intonation)
(utt.relation.delete utt 'IntEvent)
(Intonation utt)
(Int_Targets utt)
)
)
(define (build_phrase_structure utt name)
"(build_phrase_structure utt)
This builds phrasing, but is done in a different function from
from word structure as this may (for historical reasons) not
exist on all dbs."
(let (phr wrd)
(utt.relation.delete utt 'Phrase)
(utt.relation.load utt 'Phrase (format nil "phr/%s.phr" name))
(mapcar
(lambda (p)
(item.set_feat p "phrase_type" (item.name p))
(item.set_name p "B"))
(utt.relation.items utt 'Phrase))
(item.set_name (utt.relation.last utt 'Phrase) "BB")
(set! wrd (utt.relation.first utt 'Word))
(set! phr (utt.relation.first utt 'Phrase))
(set! p_wordend 0)
(while phr
(while (and wrd (< (/ (+ (item.feat wrd "word_end")
(item.feat wrd "p.word_end")) 2.0)
(item.feat phr "end")))
(item.relation.append_daughter phr 'Phrase wrd)
(set! wrd (item.next wrd)))
(item.remove_feature phr "end")
(set! phr (item.next phr)))
))
(define (cl.utt.save.syllables utt filename)
"(utt.save.syllables UTT FILE)
Save syllables of UTT in a FILE in xlabel format."
(let ((fd (fopen filename "w")))
(format fd "#\n")
(format fd "%2.4f 100 h#\n"
(item.feat (utt.relation.first utt 'Syllable) "syllable_start"))
(mapcar
(lambda (syl)
(format fd "%2.4f 100 %s%s\n"
(cl.safe_end syl "syllable_end")
(item.feat syl "stress")
(cond
((or (string-equal "none" (item.feat syl "accentedness"))
(string-equal "0" (item.feat syl "accentedness")))
"")
(t
(item.feat syl "accentedness")))))
(utt.relation.items utt 'Syllable))
(fclose fd)
utt))
(define (cl.utt.save.words utt filename)
"(utt.save.words UTT FILE)
Save words of UTT in a FILE in xlabel format."
(let ((fd (fopen filename "w")))
(format fd "#\n")
(format fd "%2.4f 100 h#\n"
(item.feat (utt.relation.first utt 'Word) "word_start"))
(mapcar
(lambda (w)
(format fd "%2.4f 100 %s\n"
(cl.safe_end w "word_end")
(downcase (item.name w))))
(utt.relation.items utt 'Word))
(fclose fd)
utt))
(define (cl.utt.save.phr utt filename)
"(utt.save.phr UTT FILE)
Save phrases of UTT in a FILE in xlabel format."
(let ((fd (fopen filename "w"))
(phrs (utt.relation.first utt 'Phrase)))
(format fd "#\n")
(format fd "%2.4f 100 h#\n"
(item.feat phrs
"daughter1.R:SylStructure.daughter1.daughter1.segment_start"))
(while phrs
(if (member_string (item.name phrs) '("H" "L"))
(set! plab (item.name phrs))
(set! plab "L"))
(format fd "%2.4f 100 %s\n"
(item.feat phrs "daughtern.word_end")
plab)
(set! phrs (item.next phrs)))
(fclose fd)
utt))
(define (build::add_targets utt)
"(build::add_targets utt)
Adds targets based on the F0 in f0/*.f0. Adds a point to each vowel."
(let ((fileid (utt.feat utt "fileid"))
(f0_points))
(set! f0_points (build::load_f0_points fileid))
(set! awb_f0_points f0_points)
Get rid of the old one
(utt.relation.delete utt 'Target)
Create a new one
(utt.relation.create utt 'Target)
(build::add_target
utt
f0_points)
utt))
(define (build::add_target utt f0_points)
"(build::add_target utt f0_points)
Add F0 points at start or syllable, mid point of each vowel, and
last segment before silence. The F0 continued over non-voiced
periods is such a naive and hopless way its embarrassing."
(let ((s (utt.relation.first utt 'Segment))
(f0s f0_points)
targ)
(while s
(if (and (not (member_string
(item.name s)
(cadr (car (PhoneSet.description '(silences))))))
(or (string-equal "1" (item.feat s "syl_initial"))
(string-equal "+" (item.feat s "ph_vc"))
(member_string
(item.feat s "n.name")
(cadr (car (PhoneSet.description '(silences)))))))
(begin
(set! targ (utt.relation.append utt 'Target s))
(if (string-equal "1" (item.feat s "syl_initial"))
(item.relation.append_daughter
targ
'Target
(list
"0"
(list
(list 'f0 (build::get_f0_at f0s (item.feat s "segment_start")))
(list 'pos (item.feat s "segment_start"))))))
(if (string-equal "+" (item.feat s "ph_vc"))
(item.relation.append_daughter
targ
'Target
(list
"0"
(list
(list 'f0 (build::get_f0_at f0s (item.feat s "segment_mid")))
(list 'pos (item.feat s "segment_mid"))))))
(if (member_string
(item.feat s "n.name")
(cadr (car (PhoneSet.description '(silences)))))
(item.relation.append_daughter
targ
'Target
(list
"0"
(list
(list 'f0 (build::get_f0_at f0s (item.feat s "segment_end")))
(list 'pos (item.feat s "segment_end"))))))))
(set! s (item.next s))
))
)
(define (build::get_f0_at f0s position)
"(build::get_f0_at f0s position)
Returns the non-zero F0 nearest to position."
(build::get_f0_at_2
-1
f0s
position))
(define (build::get_f0_at_2 f0 f0s position)
"(build::get_f0_at f0 f0s position)
Returns the non-zero F0 nearest to position."
(cond
((null f0s)
(if (> f0 0)
f0
))
(t
(if (> 0 (cadr (car f0s)))
(set! f0 (cadr (car f0s))))
(cond
((and (>= position (car (car f0s)))
(<= position (car (cadr f0s))))
(if (< f0 1)
(build::find_first_f0 f0s)
f0))
(t
(build::get_f0_at_2 f0 (cdr f0s) position))))))
(define (build::find_first_f0 f0s)
(cond
((null f0s)
)
((> (cadr (car f0s)) 0)
(cadr (car f0s)))
(t
(build::find_first_f0 (cdr f0s)))))
(define (build::load_f0_points fileid)
"(build::load_f0_points fileid)
Extract F0 as ascii times and values from the F0 file and load
it as a simple assoc list."
(let ((f0asciifile (make_tmp_filename))
f0fd point points
(time 0))
(system
(format nil "$EST%s/bin/ch_track -otype ascii -o %s f0/%s.f0"
f0asciifile
fileid))
(set! f0fd (fopen f0asciifile "r"))
(while (not (equal? (set! point (readfp f0fd)) (eof-val)))
(set! points
(cons
(list time point) points))
(set! time (+ 0.005 time))
skip the second field .
(readfp f0fd))
(fclose f0fd)
(delete-file f0asciifile)
(reverse points)))
(define (suffstats.new)
(list
))
(define (suffstats.set_n x n)
(set-car! x n))
(define (suffstats.set_sum x sum)
(set-car! (cdr x) sum))
(define (suffstats.set_sumx x sumx)
(set-car! (cdr (cdr x)) sumx))
(define (suffstats.n x)
(car x))
(define (suffstats.sum x)
(car (cdr x)))
(define (suffstats.sumx x)
(car (cdr (cdr x))))
(define (suffstats.reset x)
(suffstats.set_n x 0)
(suffstats.set_sum x 0)
(suffstats.set_sumx x 0))
(define (suffstats.add x d)
(suffstats.set_n x (+ (suffstats.n x) 1))
(suffstats.set_sum x (+ (suffstats.sum x) d))
(suffstats.set_sumx x (+ (suffstats.sumx x) (* d d)))
)
(define (suffstats.add_count x d c)
(suffstats.set_n x (+ (suffstats.n x) c))
(suffstats.set_sum x (+ (suffstats.sum x) (* c d)))
(suffstats.set_sumx x (+ (suffstats.sumx x) (* c (* d d))))
)
(define (suffstats.mean x)
(/ (suffstats.sum x) (suffstats.n x)))
(define (suffstats.variance x)
(/ (- (* (suffstats.n x) (suffstats.sumx x))
(* (suffstats.sum x) (suffstats.sum x)))
(* (suffstats.n x) (- (suffstats.n x) 1))))
(define (suffstats.stddev x)
(sqrt (suffstats.variance x)))
(define (cummulate_stats stats phone duration)
(let ((pstat (car (cdr (assoc_string phone stats))))
(newstats stats))
(if (null pstat)
(begin
(set! pstat (suffstats.new))
(set! newstats (cons (list phone pstat) stats))))
(suffstats.add pstat duration)
newstats))
(define (collect_dur_stats utts)
(let ((stats nil))
(mapcar
(lambda (u)
(mapcar
(lambda (s)
(set! stats (cummulate_stats
stats
(item.name s)
(item.feat s "segment_duration"))))
(utt.relation.items u 'Segment)))
utts)
stats))
(define (score_utts utts durstats ofile)
(let ((ofd (fopen ofile "w")))
(mapcar
(lambda (u)
(let ((score 0) (tot 0))
(format ofd "%s " (utt.feat u "fileid"))
(mapcar
(lambda (s)
(let ((stats (car (cdr (assoc_string (item.name s) durstats))))
(dur (item.feat s "segment_duration"))
(zscore))
(set! tot (+ 1 tot))
(set! zscore (/ (- dur (suffstats.mean stats))
(suffstats.stddev stats)))
(if (< zscore 0)
(set! zscore (* -1 zscore)))
(if (or (< dur 0.011)
(> zscore 3))
(set! score (+ 1 score)))))
(utt.relation.items u 'Segment))
(format ofd "%0.4f %d %d\n"
(/ score tot)
score
tot)))
utts)))
(define (make_simple_utt fileid)
(let ((utt (Utterance Text "")))
(utt.relation.load utt 'Segment
(format nil "lab/%s.lab" fileid))
(utt.set_feat utt "fileid" fileid)
utt))
(define (find_outlier_utts file ofile)
(voice_kal_diphone)
(let ((p (load file t))
utts dur_states)
(set! utts (mapcar (lambda (l) (make_simple_utt (car l))) p))
(set! dur_stats (collect_dur_stats utts))
(score_utts utts dur_stats ofile)
t))
(define (clunits::display_selected utt)
(clunits::display utt)
(system (format nil "rm -rf scratch/cl\n"))
(system (format nil "mkdir scratch/cl\n"))
(system (format nil "(cd scratch/cl && mkdir wav lab emu emulab_hlb)"))
(set! unum 0)
(mapcar
(lambda (unit)
(set! unit_utt_name
(format nil "%03d_%s_%1.3f_%s"
unum
(item.name unit)
(item.feat unit "end")
(item.feat unit "fileid")))
(set! unum (+ 1 unum))
(system
(format nil "ln lab/%s.lab scratch/cl/lab/%s.lab\n"
(item.feat unit "fileid") unit_utt_name))
(system
(format nil "ln wav/%s.wav scratch/cl/wav/%s.wav\n"
(item.feat unit "fileid") unit_utt_name))
)
(utt.relation.items utt 'Unit))
(system (format nil "(cd scratch/cl && emulabel ../../etc/emu_lab &)\n"))
t
)
(define (tts_test ttd odir)
(mapcar
(lambda (x)
(format t "%s tts" (car x))
(unwind-protect
(begin
(set! utt1 (SynthText (cadr x)))
(utt.save.wave utt1 (format nil "%s/%s.wav" odir (car x))))
(begin
(format t " failed")))
(format t "\n"))
(load ttd t))
t)
(define (usm id text)
(set! utts (SynthText text))
(set! utta (utt.load nil (format nil "festival/utts/%s.utt" id )))
( set ! utta ( Wave_Synth utta ) )
( format t " % s % s % s % f\n " i d ( item.name x ) ( item.name y )
( utt.relation.items utta ' Segment ) )
(apply
+
(mapcar
(lambda (u)
(if (and (string-equal id (item.feat u "fileid"))
(or (null (item.prev u))
(and (string-equal id (item.feat u "p.fileid"))
(equal? (item.feat u "unit_start")
(item.feat u "p.unit_end")))))
1
0
))
(utt.relation.items utts 'Unit)))
)
(define (usm_file ttd odir)
(let ((usm_count 0) (usm_total 0))
(mapcar
(lambda (x)
(set! c (usm (car x) (cadr x)))
(set! us (length (utt.relation.items utts 'Unit)))
(format t "USM %s %2.2f %2.2f %2.2f\n" (car x) c
us
(* 100.0 (/ c us)))
(utt.save.wave utts (format nil "%s/%s.wav" odir (car x)))
(set! usm_count (+ c usm_count))
(set! usm_total (+ us usm_total))
)
(load ttd t))
(format t "USM_TOTAL %2.2f %2.2f %2.2f\n"
usm_count
usm_total
(* 100.0 (/ usm_count usm_total)))
)
)
(provide 'build_clunits)
|
4cbc7a5db2288d304e16bc48b17078c40f2942aab32ae6da3ce603b223102d89 | vascokk/NumEr | numer_nifs_tests.erl | -module(numer_nifs_tests).
-compile(export_all).
-include("numer.hrl").
-include_lib("eunit/include/eunit.hrl").
create_destroy_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_write_destroy_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_float_buffer(Ctx),
numer_nifs:write_buffer(Buf, [0.01, 0.002, 0.0003, 0.4, 1.5]),
{ok, 5} = numer_nifs:buffer_size(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_destroy_float_matrix_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_write_destroy_matrix_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
A = [[16.0,2.0,3.0,13.0],[5.0,11.0,10.0,8.0],[9.0,7.0,6.0,12.0],[4.0,14.0,15.0,1.0]],
numer_nifs:write_buffer(Buf, A),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_write_read_destroy_matrix_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
A = [[16.0,2.0,3.0,13.0],[5.0,11.0,10.0,8.0],[9.0,7.0,6.0,12.0],[4.0,14.0,15.0,1.0]],
numer_nifs:write_buffer(Buf, A),
?assertEqual({ok,A}, numer_nifs:read_buffer(Buf)),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_write_read_destroy_empty_matrix_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
A = [[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0]],
%numer_nifs:write_buffer(Buf, A),
?assertEqual({ok,A}, numer_nifs:read_buffer(Buf)),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_from_matrix_write_read_destroy_matrix_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[16.5,2.1029,3.00023,13.00001],[5.0,11.0,10.0,8.0],[9.0,7.0,6.0,12.0],[4.0,14.0,15.0,1.0]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, A} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_matrix_float_2_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[7.0,4.0,3.0],[8.0,4.0,7.0],[15.0,6.0,99.0],[3.0,2.0,4.0]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, A} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_matrix_float_3_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[3.0,2.0,44.0,8.0],[5.0,7.0,12.0,21.0]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, A} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_float_matrix_with_int_values_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[7,4,3],[8,4,7],[15,6,99],[3,2,4]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, [[7.0,4.0,3.0],[8.0,4.0,7.0],[15.0,6.0,99.0],[3.0,2.0,4.0]]} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_float_matrix_with_int_values_2_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[3,2,44,8],[5,7,12,21]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, [[3.0,2.0,44.0,8.0],[5.0,7.0,12.0,21.0]]} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
negative_create_float_matrix_with_wrong_dimensions_test() ->
{ok, Ctx} = numer_nifs:new_context(),
must be ( 4,4 )
A = [[16.0,2.0,3.0,13.0],[5.0,11.0,10.0,8.0],[9.0,7.0,6.0,12.0],[4.0,14.0,15.0,1.0]],
{error,_} = numer_nifs:write_buffer(Buf, A),
%{ok, A} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
negative_create_float_matrix_with_wrong_dimensions_less_data_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
one row less
{error,_} = numer_nifs:write_buffer(Buf, A),
%{ok, A} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
%
%Float matrix operations only supported
%
% GEMM: C = α op ( A ) op ( B ) + β C
gemm_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[7,8,15,3],[4,4,6,2],[3,7,99,4]], %row major
B = [[3,5],[2,7],[44,12],[8,21]], %row major
num_rows_A
_k = 4,%num_cols_A
_n = 2,%num_cols_B
_alpha = 1.0,
_beta= 0.0,
C = [[721.0, 334.0],[300.0,162.0],[4411.0,1336.0]], %row major
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, B, ?ROW_MAJOR),
{ok, Buf_C} = numer_nifs:new_matrix_float_buffer(Ctx, _m,_n, ?ROW_MAJOR),
ok = numer_nifs:gemm(Ctx, ?NO_TRANSPOSE, ?NO_TRANSPOSE, _m, _n, _k, _alpha, Buf_A, Buf_B, _beta, Buf_C),
{ok, C} = numer_nifs:read_buffer(Buf_C),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
ok = numer_nifs:destroy_buffer(Buf_C),
numer_nifs:destroy_context(Ctx).
negative_gemm_wrong_A_dim_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[7,8,15,3],[4,4,6,2],[3,7,99,4]], %row major
B = [[3,5],[2,7],[44,12],[8,21]], %row major
num_rows_A WRONG ! ! ! must be 3
_k = 4,%num_cols_A
_n = 2,%num_cols_B
_alpha = 1.0,
_beta= 0.0,
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, B, ?ROW_MAJOR),
{ok, Buf_C} = numer_nifs:new_matrix_float_buffer(Ctx, _m,_n, ?ROW_MAJOR),
{error,_} = numer_nifs:gemm(Ctx, ?NO_TRANSPOSE, ?NO_TRANSPOSE, _m, _n, _k, _alpha, Buf_A, Buf_B, _beta, Buf_C),
{ok, _} = numer_nifs:read_buffer(Buf_C),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
ok = numer_nifs:destroy_buffer(Buf_C),
numer_nifs:destroy_context(Ctx).
GEMV : y < - α op ( A ) x + β y
gemv_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[4.0,6.0,8.0,2.0],[5.0,7.0,9.0,3.0]],
_m = 2, %rows A
_n = 4, %columns A
_alpha = 1.0,
_beta = 0.0,
X = [2.0,5.0,1.0,7.0],
Y = [0.0, 0.0],
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_X} = numer_nifs:new_float_buffer(Ctx),
numer_nifs:write_buffer(Buf_X, X),
{ok, Buf_Y} = numer_nifs:new_float_buffer(Ctx),
numer_nifs:write_buffer(Buf_Y, Y),
ok = numer_nifs:gemv(Ctx, ?NO_TRANSPOSE , _m, _n, _alpha, Buf_A, Buf_X, _beta, Buf_Y),
{ok, [60.0,75.0]} = numer_nifs:read_buffer(Buf_Y),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_X),
ok = numer_nifs:destroy_buffer(Buf_Y),
numer_nifs:destroy_context(Ctx).
negative_gemv_wrong_A_dim_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[4.0,6.0,8.0,2.0],[5.0,7.0,9.0,3.0]],
rows A WRONG ! ! ! must be 2
_n = 4, %columns A
_alpha = 1.0,
_beta = 0.0,
X = [2.0,5.0,1.0,7.0],
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_X} = numer_nifs:new_float_buffer(Ctx),
numer_nifs:write_buffer(Buf_X, X),
{ok, Buf_Y} = numer_nifs:new_float_buffer(Ctx),
{error, _} = numer_nifs:gemv(Ctx, ?NO_TRANSPOSE, _m, _n, _alpha, Buf_A, Buf_X, _beta, Buf_Y),
{ok, _} = numer_nifs:read_buffer(Buf_Y),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_X),
ok = numer_nifs:destroy_buffer(Buf_Y),
numer_nifs:destroy_context(Ctx).
%SAXPY: y <- α * x + y
saxpy_test()->
{ok, Ctx} = numer_nifs:new_context(),
_a = 2.0, %!!!! this has to be float
X = [2.0, 5.0, 1.0, 7.0],
Y = [0.0, 0.0, 0.0, 0.0],
{ok, Buf_X} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:write_buffer(Buf_X, X),
{ok, Buf_Y} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:write_buffer(Buf_Y, Y),
ok = numer_nifs:saxpy(Ctx, _a, Buf_X, Buf_Y),
{ok, [4.0, 10.0, 2.0, 14.0]} = numer_nifs:read_buffer(Buf_Y),
ok = numer_nifs:destroy_buffer(Buf_X),
ok = numer_nifs:destroy_buffer(Buf_Y),
numer_nifs:destroy_context(Ctx).
negative_saxpy_sizeX_lt_sizeY_test()->
{ok, Ctx} = numer_nifs:new_context(),
_a = 2.0, %!!!! this has to be float
X = [2.0, 5.0, 1.0],
Y = [0.0, 0.0, 0.0, 0.0],
{ok, Buf_X} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:write_buffer(Buf_X, X),
{ok, Buf_Y} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:write_buffer(Buf_Y, Y),
{error, _} = numer_nifs:saxpy(Ctx, _a, Buf_X, Buf_Y),
{ok, _} = numer_nifs:read_buffer(Buf_Y),
ok = numer_nifs:destroy_buffer(Buf_X),
ok = numer_nifs:destroy_buffer(Buf_Y),
numer_nifs:destroy_context(Ctx).
%%%
%%% BLAS-like functions
%%%
%Transpose: B <- transpose(A)
transpose_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[7,8,15,3],[4,4,6,2],[3,7,99,4]], %row major
A_transposed = [[7.0,4.0,3.0],[8.0,4.0,7.0],[15.0,6.0,99.0],[3.0,2.0,4.0]],
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, 4,3, ?ROW_MAJOR),
ok = numer_nifs:transpose(Ctx, Buf_A, Buf_B),
{ok, B} = numer_nifs:read_buffer(Buf_B),
?assertEqual(A_transposed, B),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
numer_nifs:destroy_context(Ctx).
GEAM : C = α op ( A ) + β op ( B )
% (this function is CUBLAS-specific)
geam_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[7,8,15,3],[4,4,6,2],[3,7,99,4]], %row major
B = [[1,2,3,4],[5,6,7,8],[9,10,11,12]],
_alpha = 1.0,
_beta = 1.0,
_m = 3,
_n = 4,
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, B, ?ROW_MAJOR),
{ok, Buf_C} = numer_nifs:new_matrix_float_buffer(Ctx, _m, _n, ?ROW_MAJOR),
ok = numer_nifs:geam(Ctx, ?NO_TRANSPOSE, ?NO_TRANSPOSE, _m, _n, _alpha, Buf_A, _beta, Buf_B, Buf_C),
{ok, C} = numer_nifs:read_buffer(Buf_C),
?assertEqual([[8.0,10.0,18.0,7.0],[9.0,10.0,13.0,10.0],[12.0,17.0,110.0,16.0]], C),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
numer_nifs:destroy_context(Ctx).
smm ( Scalar Matrix Multiply )
% B <- α * A
smm_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[4.0,6.0,8.0,2.0],[5.0,7.0,9.0,3.0]],
_m = 2, %rows A
_n = 4, %columns A
_alpha = 5.0,
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, _m, _n, ?ROW_MAJOR),
ok = numer_nifs:smm(Ctx, _alpha, Buf_A, Buf_B),
{ok, B} = numer_nifs:read_buffer(Buf_B),
?assertEqual([[20.0,30.0,40.0,10.0],[25.0,35.0,45.0,15.0]], B),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
numer_nifs:destroy_context(Ctx).
smm_vector_test()- >
{ ok , : new_context ( ) ,
A = [ 4.0,6.0,8.0,2.0,5.0,7.0,9.0,3.0 ] ,
_ m = 2 , % rows A
_ n = 4 , % columns A
_ alpha = 5.0 ,
{ ok , Buf_A } = numer_nifs : new_float_buffer(Ctx , length(A ) ) ,
numer_nifs : write_buffer(Buf_A , A ) ,
{ ok , Buf_B } = numer_nifs : new_float_buffer(Ctx , length(A ) ) ,
ok = numer_nifs : smm(Ctx , _ alpha , Buf_A , Buf_B ) ,
% {ok, B} = numer_nifs:read_buffer(Buf_B),
% ?assertEqual([20.0,30.0,40.0,10.0,25.0,35.0,45.0,15.0], B),
ok = numer_nifs : ) ,
% ok = numer_nifs:destroy_buffer(Buf_B),
% numer_nifs:destroy_context(Ctx). | null | https://raw.githubusercontent.com/vascokk/NumEr/0d22c31633ef2ab43dde50809a9dbbb6a736f5b7/test/numer_nifs_tests.erl | erlang | numer_nifs:write_buffer(Buf, A),
{ok, A} = numer_nifs:read_buffer(Buf),
{ok, A} = numer_nifs:read_buffer(Buf),
Float matrix operations only supported
GEMM: C = α op ( A ) op ( B ) + β C
row major
row major
num_cols_A
num_cols_B
row major
row major
row major
num_cols_A
num_cols_B
rows A
columns A
columns A
SAXPY: y <- α * x + y
!!!! this has to be float
!!!! this has to be float
BLAS-like functions
Transpose: B <- transpose(A)
row major
(this function is CUBLAS-specific)
row major
B <- α * A
rows A
columns A
rows A
columns A
{ok, B} = numer_nifs:read_buffer(Buf_B),
?assertEqual([20.0,30.0,40.0,10.0,25.0,35.0,45.0,15.0], B),
ok = numer_nifs:destroy_buffer(Buf_B),
numer_nifs:destroy_context(Ctx). | -module(numer_nifs_tests).
-compile(export_all).
-include("numer.hrl").
-include_lib("eunit/include/eunit.hrl").
create_destroy_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_write_destroy_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_float_buffer(Ctx),
numer_nifs:write_buffer(Buf, [0.01, 0.002, 0.0003, 0.4, 1.5]),
{ok, 5} = numer_nifs:buffer_size(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_destroy_float_matrix_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_write_destroy_matrix_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
A = [[16.0,2.0,3.0,13.0],[5.0,11.0,10.0,8.0],[9.0,7.0,6.0,12.0],[4.0,14.0,15.0,1.0]],
numer_nifs:write_buffer(Buf, A),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_write_read_destroy_matrix_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
A = [[16.0,2.0,3.0,13.0],[5.0,11.0,10.0,8.0],[9.0,7.0,6.0,12.0],[4.0,14.0,15.0,1.0]],
numer_nifs:write_buffer(Buf, A),
?assertEqual({ok,A}, numer_nifs:read_buffer(Buf)),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_write_read_destroy_empty_matrix_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
A = [[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0],[0.0,0.0,0.0,0.0]],
?assertEqual({ok,A}, numer_nifs:read_buffer(Buf)),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_from_matrix_write_read_destroy_matrix_float_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[16.5,2.1029,3.00023,13.00001],[5.0,11.0,10.0,8.0],[9.0,7.0,6.0,12.0],[4.0,14.0,15.0,1.0]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, A} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_matrix_float_2_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[7.0,4.0,3.0],[8.0,4.0,7.0],[15.0,6.0,99.0],[3.0,2.0,4.0]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, A} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_matrix_float_3_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[3.0,2.0,44.0,8.0],[5.0,7.0,12.0,21.0]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, A} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_float_matrix_with_int_values_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[7,4,3],[8,4,7],[15,6,99],[3,2,4]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, [[7.0,4.0,3.0],[8.0,4.0,7.0],[15.0,6.0,99.0],[3.0,2.0,4.0]]} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
create_float_matrix_with_int_values_2_test() ->
{ok, Ctx} = numer_nifs:new_context(),
A = [[3,2,44,8],[5,7,12,21]],
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, [[3.0,2.0,44.0,8.0],[5.0,7.0,12.0,21.0]]} = numer_nifs:read_buffer(Buf),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
negative_create_float_matrix_with_wrong_dimensions_test() ->
{ok, Ctx} = numer_nifs:new_context(),
must be ( 4,4 )
A = [[16.0,2.0,3.0,13.0],[5.0,11.0,10.0,8.0],[9.0,7.0,6.0,12.0],[4.0,14.0,15.0,1.0]],
{error,_} = numer_nifs:write_buffer(Buf, A),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
negative_create_float_matrix_with_wrong_dimensions_less_data_test() ->
{ok, Ctx} = numer_nifs:new_context(),
{ok, Buf} = numer_nifs:new_matrix_float_buffer(Ctx, 4,4, ?ROW_MAJOR),
one row less
{error,_} = numer_nifs:write_buffer(Buf, A),
ok = numer_nifs:destroy_buffer(Buf),
numer_nifs:destroy_context(Ctx).
gemm_test()->
{ok, Ctx} = numer_nifs:new_context(),
num_rows_A
_alpha = 1.0,
_beta= 0.0,
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, B, ?ROW_MAJOR),
{ok, Buf_C} = numer_nifs:new_matrix_float_buffer(Ctx, _m,_n, ?ROW_MAJOR),
ok = numer_nifs:gemm(Ctx, ?NO_TRANSPOSE, ?NO_TRANSPOSE, _m, _n, _k, _alpha, Buf_A, Buf_B, _beta, Buf_C),
{ok, C} = numer_nifs:read_buffer(Buf_C),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
ok = numer_nifs:destroy_buffer(Buf_C),
numer_nifs:destroy_context(Ctx).
negative_gemm_wrong_A_dim_test()->
{ok, Ctx} = numer_nifs:new_context(),
num_rows_A WRONG ! ! ! must be 3
_alpha = 1.0,
_beta= 0.0,
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, B, ?ROW_MAJOR),
{ok, Buf_C} = numer_nifs:new_matrix_float_buffer(Ctx, _m,_n, ?ROW_MAJOR),
{error,_} = numer_nifs:gemm(Ctx, ?NO_TRANSPOSE, ?NO_TRANSPOSE, _m, _n, _k, _alpha, Buf_A, Buf_B, _beta, Buf_C),
{ok, _} = numer_nifs:read_buffer(Buf_C),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
ok = numer_nifs:destroy_buffer(Buf_C),
numer_nifs:destroy_context(Ctx).
GEMV : y < - α op ( A ) x + β y
gemv_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[4.0,6.0,8.0,2.0],[5.0,7.0,9.0,3.0]],
_alpha = 1.0,
_beta = 0.0,
X = [2.0,5.0,1.0,7.0],
Y = [0.0, 0.0],
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_X} = numer_nifs:new_float_buffer(Ctx),
numer_nifs:write_buffer(Buf_X, X),
{ok, Buf_Y} = numer_nifs:new_float_buffer(Ctx),
numer_nifs:write_buffer(Buf_Y, Y),
ok = numer_nifs:gemv(Ctx, ?NO_TRANSPOSE , _m, _n, _alpha, Buf_A, Buf_X, _beta, Buf_Y),
{ok, [60.0,75.0]} = numer_nifs:read_buffer(Buf_Y),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_X),
ok = numer_nifs:destroy_buffer(Buf_Y),
numer_nifs:destroy_context(Ctx).
negative_gemv_wrong_A_dim_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[4.0,6.0,8.0,2.0],[5.0,7.0,9.0,3.0]],
rows A WRONG ! ! ! must be 2
_alpha = 1.0,
_beta = 0.0,
X = [2.0,5.0,1.0,7.0],
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_X} = numer_nifs:new_float_buffer(Ctx),
numer_nifs:write_buffer(Buf_X, X),
{ok, Buf_Y} = numer_nifs:new_float_buffer(Ctx),
{error, _} = numer_nifs:gemv(Ctx, ?NO_TRANSPOSE, _m, _n, _alpha, Buf_A, Buf_X, _beta, Buf_Y),
{ok, _} = numer_nifs:read_buffer(Buf_Y),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_X),
ok = numer_nifs:destroy_buffer(Buf_Y),
numer_nifs:destroy_context(Ctx).
saxpy_test()->
{ok, Ctx} = numer_nifs:new_context(),
X = [2.0, 5.0, 1.0, 7.0],
Y = [0.0, 0.0, 0.0, 0.0],
{ok, Buf_X} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:write_buffer(Buf_X, X),
{ok, Buf_Y} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:write_buffer(Buf_Y, Y),
ok = numer_nifs:saxpy(Ctx, _a, Buf_X, Buf_Y),
{ok, [4.0, 10.0, 2.0, 14.0]} = numer_nifs:read_buffer(Buf_Y),
ok = numer_nifs:destroy_buffer(Buf_X),
ok = numer_nifs:destroy_buffer(Buf_Y),
numer_nifs:destroy_context(Ctx).
negative_saxpy_sizeX_lt_sizeY_test()->
{ok, Ctx} = numer_nifs:new_context(),
X = [2.0, 5.0, 1.0],
Y = [0.0, 0.0, 0.0, 0.0],
{ok, Buf_X} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:write_buffer(Buf_X, X),
{ok, Buf_Y} = numer_nifs:new_float_buffer(Ctx),
ok = numer_nifs:write_buffer(Buf_Y, Y),
{error, _} = numer_nifs:saxpy(Ctx, _a, Buf_X, Buf_Y),
{ok, _} = numer_nifs:read_buffer(Buf_Y),
ok = numer_nifs:destroy_buffer(Buf_X),
ok = numer_nifs:destroy_buffer(Buf_Y),
numer_nifs:destroy_context(Ctx).
transpose_test()->
{ok, Ctx} = numer_nifs:new_context(),
A_transposed = [[7.0,4.0,3.0],[8.0,4.0,7.0],[15.0,6.0,99.0],[3.0,2.0,4.0]],
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, 4,3, ?ROW_MAJOR),
ok = numer_nifs:transpose(Ctx, Buf_A, Buf_B),
{ok, B} = numer_nifs:read_buffer(Buf_B),
?assertEqual(A_transposed, B),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
numer_nifs:destroy_context(Ctx).
GEAM : C = α op ( A ) + β op ( B )
geam_test()->
{ok, Ctx} = numer_nifs:new_context(),
B = [[1,2,3,4],[5,6,7,8],[9,10,11,12]],
_alpha = 1.0,
_beta = 1.0,
_m = 3,
_n = 4,
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, B, ?ROW_MAJOR),
{ok, Buf_C} = numer_nifs:new_matrix_float_buffer(Ctx, _m, _n, ?ROW_MAJOR),
ok = numer_nifs:geam(Ctx, ?NO_TRANSPOSE, ?NO_TRANSPOSE, _m, _n, _alpha, Buf_A, _beta, Buf_B, Buf_C),
{ok, C} = numer_nifs:read_buffer(Buf_C),
?assertEqual([[8.0,10.0,18.0,7.0],[9.0,10.0,13.0,10.0],[12.0,17.0,110.0,16.0]], C),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
numer_nifs:destroy_context(Ctx).
smm ( Scalar Matrix Multiply )
smm_test()->
{ok, Ctx} = numer_nifs:new_context(),
A = [[4.0,6.0,8.0,2.0],[5.0,7.0,9.0,3.0]],
_alpha = 5.0,
{ok, Buf_A} = numer_nifs:new_matrix_float_buffer(Ctx, A, ?ROW_MAJOR),
{ok, Buf_B} = numer_nifs:new_matrix_float_buffer(Ctx, _m, _n, ?ROW_MAJOR),
ok = numer_nifs:smm(Ctx, _alpha, Buf_A, Buf_B),
{ok, B} = numer_nifs:read_buffer(Buf_B),
?assertEqual([[20.0,30.0,40.0,10.0],[25.0,35.0,45.0,15.0]], B),
ok = numer_nifs:destroy_buffer(Buf_A),
ok = numer_nifs:destroy_buffer(Buf_B),
numer_nifs:destroy_context(Ctx).
smm_vector_test()- >
{ ok , : new_context ( ) ,
A = [ 4.0,6.0,8.0,2.0,5.0,7.0,9.0,3.0 ] ,
_ alpha = 5.0 ,
{ ok , Buf_A } = numer_nifs : new_float_buffer(Ctx , length(A ) ) ,
numer_nifs : write_buffer(Buf_A , A ) ,
{ ok , Buf_B } = numer_nifs : new_float_buffer(Ctx , length(A ) ) ,
ok = numer_nifs : smm(Ctx , _ alpha , Buf_A , Buf_B ) ,
ok = numer_nifs : ) , |
ba2dde674f85179698c0a16c0974539ae9f99e5117b861455026cb4f7c40bef1 | Eonblast/Scalaxis | tx_item_state.erl | 2009 - 2011 Zuse Institute Berlin
and onScale solutions GmbH
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
%% @doc Part of generic transaction implementation -
The state for a single request in a transaction of a TM and RTM .
%% @version $Id$
-module(tx_item_state).
-author('').
-vsn('$Id$').
%-define(TRACE(X,Y), io:format(X,Y)).
-define(TRACE(X,Y), ok).
%% Operations on tx_item_state
-export([new/1, new/3]).
-export([get_txid/1]).
-export([get_maj_for_prepared/1]).
-export([get_itemid/1, set_itemid/2]).
-export([get_decided/1]).
-export([set_decided/2]).
-export([inc_numprepared/1, inc_numabort/1]).
-export([newly_decided/1]).
-export([get_paxosids_rtlogs_tps/1,set_paxosids_rtlogs_tps/2]).
-export([set_tp_for_paxosid/3]).
-export([get_status/1, set_status/2]).
-export([hold_back/2, get_hold_back/1, set_hold_back/2]).
-ifdef(with_export_type_support).
-export_type([tx_item_id/0, tx_item_state/0]).
-export_type([paxos_id/0]).
-endif.
-type paxos_id() :: {paxos_id, util:global_uid()}.
-type tx_item_id() :: {tx_item_id, util:global_uid()}.
-type tx_item_state() ::
{
1 TxItemId , i d of the item
2 tx_item_state , data type tag for debugging
3 TxId , part of transaction with i d TxId
4 TLogEntry , corresponding transaction log entry
5 Maj_for_prepared , prepare votes to decide prepared
6 Maj_for_abort , abort votes to decide abort
7 Decided ? , current decision status
8 Numprepared , number of received prepare votes
9 Numabort , number of received abort votes
10 [ { PaxosID , RTLogEntry , TP } ] , involved PaxosIDs
11 Status , item status
12 HoldBackQueue , when not initialized
}.
@TODO maybe the following entries are also necessary ? :
%% tx_tm_helper_behaviour to use? needed? for what?,
timeout before the first RTM takes over
-spec new(tx_item_id()) -> tx_item_state().
new(ItemId) ->
ReplDeg = config:read(replication_factor),
{ItemId, tx_item_state, undefined_tx_id, empty_tlog_entry,
quorum:majority_for_accept(ReplDeg), quorum:majority_for_deny(ReplDeg),
false, 0, 0, _no_paxIds = [], uninitialized, _HoldBack = []}.
-spec new(tx_item_id(), tx_state:tx_id(), tx_tlog:tlog_entry())
-> tx_item_state().
new(ItemId, TxId, TLogEntry) ->
%% expand TransLogEntry to replicated translog entries
Module = tx_tlog:get_entry_operation(TLogEntry),
RTLogEntries = apply(Module, validate_prefilter, [TLogEntry]),
PaxosIds = [ { paxos_id , util : get_global_uid ( ) } || _ < - RTLogEntries ] ,
PaxosIds = [ {util:get_global_uid()} || _ <- RTLogEntries ],
TPs = [ unknown || _ <- PaxosIds ],
PaxIDsRTLogsTPs = lists:zip3(PaxosIds, RTLogEntries, TPs),
ReplDeg = config:read(replication_factor),
{ItemId, tx_item_state, TxId, TLogEntry,
quorum:majority_for_accept(ReplDeg), quorum:majority_for_deny(ReplDeg),
false, 0, 0, PaxIDsRTLogsTPs,
uninitialized, _HoldBack = []}.
-spec get_itemid(tx_item_state()) -> tx_item_id().
get_itemid(State) -> element(1, State).
-spec set_itemid(tx_item_state(), tx_item_id()) -> tx_item_state().
set_itemid(State, Val) -> setelement(1, State, Val).
-spec get_txid(tx_item_state()) -> tx_state:tx_id() | undefined_tx_id.
get_txid(State) -> element(3, State).
-spec get_maj_for_prepared(tx_item_state()) -> non_neg_integer().
get_maj_for_prepared(State) -> element(5, State).
-spec get_maj_for_abort(tx_item_state()) -> non_neg_integer().
get_maj_for_abort(State) -> element(6, State).
-spec get_decided(tx_item_state()) -> false | prepared | abort.
get_decided(State) -> element(7, State).
-spec set_decided(tx_item_state(), false | prepared | abort) -> tx_item_state().
set_decided(State, Val) -> setelement(7, State, Val).
-spec get_numprepared(tx_item_state()) -> non_neg_integer().
get_numprepared(State) -> element(8, State).
-spec inc_numprepared(tx_item_state()) -> tx_item_state().
inc_numprepared(State) -> setelement(8, State, element(8,State) + 1).
-spec get_numabort(tx_item_state()) -> non_neg_integer().
get_numabort(State) -> element(9, State).
-spec inc_numabort(tx_item_state()) -> tx_item_state().
inc_numabort(State) -> setelement(9, State, element(9,State) + 1).
-spec get_paxosids_rtlogs_tps(tx_item_state()) ->
[{paxos_id(), tx_tlog:tlog_entry(),
comm:mypid()}].
get_paxosids_rtlogs_tps(State) -> element(10, State).
-spec set_paxosids_rtlogs_tps(tx_item_state(), [{paxos_id(), tx_tlog:tlog_entry(), comm:mypid()}]) -> tx_item_state().
set_paxosids_rtlogs_tps(State, NewTPList) -> setelement(10, State, NewTPList).
-spec get_status(tx_item_state()) -> new | uninitialized | ok.
get_status(State) -> element(11, State).
-spec set_status(tx_item_state(), new | uninitialized | ok) -> tx_item_state().
set_status(State, Status) -> setelement(11, State, Status).
-spec hold_back(comm:message(), tx_item_state()) -> tx_item_state().
hold_back(Msg, State) -> setelement(12, State, [Msg | element(12, State)]).
-spec get_hold_back(tx_item_state()) -> [comm:message()].
get_hold_back(State) -> element(12, State).
-spec set_hold_back(tx_item_state(), [comm:message()]) -> tx_item_state().
set_hold_back(State, Queue) -> setelement(12, State, Queue).
-spec newly_decided(tx_item_state()) -> false | prepared | abort.
newly_decided(State) ->
case get_decided(State) of
false ->
Prepared = get_numprepared(State) =:= get_maj_for_prepared(State),
Abort = get_numabort(State) =:= get_maj_for_abort(State),
case {Prepared, Abort} of
{true, false} -> prepared;
{false, true} -> abort;
_ -> false
end;
_Any -> false
end.
-spec set_tp_for_paxosid(tx_item_state(), any(), paxos_id()) -> tx_item_state().
set_tp_for_paxosid(State, TP, PaxosId) ->
TPList = get_paxosids_rtlogs_tps(State),
Entry = lists:keyfind(PaxosId, 1, TPList),
NewTPList = lists:keyreplace(PaxosId, 1, TPList, setelement(3, Entry, TP)),
set_paxosids_rtlogs_tps(State, NewTPList).
| null | https://raw.githubusercontent.com/Eonblast/Scalaxis/10287d11428e627dca8c41c818745763b9f7e8d4/src/transactions/tx_item_state.erl | erlang | you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc Part of generic transaction implementation -
@version $Id$
-define(TRACE(X,Y), io:format(X,Y)).
Operations on tx_item_state
tx_tm_helper_behaviour to use? needed? for what?,
expand TransLogEntry to replicated translog entries | 2009 - 2011 Zuse Institute Berlin
and onScale solutions GmbH
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
The state for a single request in a transaction of a TM and RTM .
-module(tx_item_state).
-author('').
-vsn('$Id$').
-define(TRACE(X,Y), ok).
-export([new/1, new/3]).
-export([get_txid/1]).
-export([get_maj_for_prepared/1]).
-export([get_itemid/1, set_itemid/2]).
-export([get_decided/1]).
-export([set_decided/2]).
-export([inc_numprepared/1, inc_numabort/1]).
-export([newly_decided/1]).
-export([get_paxosids_rtlogs_tps/1,set_paxosids_rtlogs_tps/2]).
-export([set_tp_for_paxosid/3]).
-export([get_status/1, set_status/2]).
-export([hold_back/2, get_hold_back/1, set_hold_back/2]).
-ifdef(with_export_type_support).
-export_type([tx_item_id/0, tx_item_state/0]).
-export_type([paxos_id/0]).
-endif.
-type paxos_id() :: {paxos_id, util:global_uid()}.
-type tx_item_id() :: {tx_item_id, util:global_uid()}.
-type tx_item_state() ::
{
1 TxItemId , i d of the item
2 tx_item_state , data type tag for debugging
3 TxId , part of transaction with i d TxId
4 TLogEntry , corresponding transaction log entry
5 Maj_for_prepared , prepare votes to decide prepared
6 Maj_for_abort , abort votes to decide abort
7 Decided ? , current decision status
8 Numprepared , number of received prepare votes
9 Numabort , number of received abort votes
10 [ { PaxosID , RTLogEntry , TP } ] , involved PaxosIDs
11 Status , item status
12 HoldBackQueue , when not initialized
}.
@TODO maybe the following entries are also necessary ? :
timeout before the first RTM takes over
-spec new(tx_item_id()) -> tx_item_state().
new(ItemId) ->
ReplDeg = config:read(replication_factor),
{ItemId, tx_item_state, undefined_tx_id, empty_tlog_entry,
quorum:majority_for_accept(ReplDeg), quorum:majority_for_deny(ReplDeg),
false, 0, 0, _no_paxIds = [], uninitialized, _HoldBack = []}.
-spec new(tx_item_id(), tx_state:tx_id(), tx_tlog:tlog_entry())
-> tx_item_state().
new(ItemId, TxId, TLogEntry) ->
Module = tx_tlog:get_entry_operation(TLogEntry),
RTLogEntries = apply(Module, validate_prefilter, [TLogEntry]),
PaxosIds = [ { paxos_id , util : get_global_uid ( ) } || _ < - RTLogEntries ] ,
PaxosIds = [ {util:get_global_uid()} || _ <- RTLogEntries ],
TPs = [ unknown || _ <- PaxosIds ],
PaxIDsRTLogsTPs = lists:zip3(PaxosIds, RTLogEntries, TPs),
ReplDeg = config:read(replication_factor),
{ItemId, tx_item_state, TxId, TLogEntry,
quorum:majority_for_accept(ReplDeg), quorum:majority_for_deny(ReplDeg),
false, 0, 0, PaxIDsRTLogsTPs,
uninitialized, _HoldBack = []}.
-spec get_itemid(tx_item_state()) -> tx_item_id().
get_itemid(State) -> element(1, State).
-spec set_itemid(tx_item_state(), tx_item_id()) -> tx_item_state().
set_itemid(State, Val) -> setelement(1, State, Val).
-spec get_txid(tx_item_state()) -> tx_state:tx_id() | undefined_tx_id.
get_txid(State) -> element(3, State).
-spec get_maj_for_prepared(tx_item_state()) -> non_neg_integer().
get_maj_for_prepared(State) -> element(5, State).
-spec get_maj_for_abort(tx_item_state()) -> non_neg_integer().
get_maj_for_abort(State) -> element(6, State).
-spec get_decided(tx_item_state()) -> false | prepared | abort.
get_decided(State) -> element(7, State).
-spec set_decided(tx_item_state(), false | prepared | abort) -> tx_item_state().
set_decided(State, Val) -> setelement(7, State, Val).
-spec get_numprepared(tx_item_state()) -> non_neg_integer().
get_numprepared(State) -> element(8, State).
-spec inc_numprepared(tx_item_state()) -> tx_item_state().
inc_numprepared(State) -> setelement(8, State, element(8,State) + 1).
-spec get_numabort(tx_item_state()) -> non_neg_integer().
get_numabort(State) -> element(9, State).
-spec inc_numabort(tx_item_state()) -> tx_item_state().
inc_numabort(State) -> setelement(9, State, element(9,State) + 1).
-spec get_paxosids_rtlogs_tps(tx_item_state()) ->
[{paxos_id(), tx_tlog:tlog_entry(),
comm:mypid()}].
get_paxosids_rtlogs_tps(State) -> element(10, State).
-spec set_paxosids_rtlogs_tps(tx_item_state(), [{paxos_id(), tx_tlog:tlog_entry(), comm:mypid()}]) -> tx_item_state().
set_paxosids_rtlogs_tps(State, NewTPList) -> setelement(10, State, NewTPList).
-spec get_status(tx_item_state()) -> new | uninitialized | ok.
get_status(State) -> element(11, State).
-spec set_status(tx_item_state(), new | uninitialized | ok) -> tx_item_state().
set_status(State, Status) -> setelement(11, State, Status).
-spec hold_back(comm:message(), tx_item_state()) -> tx_item_state().
hold_back(Msg, State) -> setelement(12, State, [Msg | element(12, State)]).
-spec get_hold_back(tx_item_state()) -> [comm:message()].
get_hold_back(State) -> element(12, State).
-spec set_hold_back(tx_item_state(), [comm:message()]) -> tx_item_state().
set_hold_back(State, Queue) -> setelement(12, State, Queue).
-spec newly_decided(tx_item_state()) -> false | prepared | abort.
newly_decided(State) ->
case get_decided(State) of
false ->
Prepared = get_numprepared(State) =:= get_maj_for_prepared(State),
Abort = get_numabort(State) =:= get_maj_for_abort(State),
case {Prepared, Abort} of
{true, false} -> prepared;
{false, true} -> abort;
_ -> false
end;
_Any -> false
end.
-spec set_tp_for_paxosid(tx_item_state(), any(), paxos_id()) -> tx_item_state().
set_tp_for_paxosid(State, TP, PaxosId) ->
TPList = get_paxosids_rtlogs_tps(State),
Entry = lists:keyfind(PaxosId, 1, TPList),
NewTPList = lists:keyreplace(PaxosId, 1, TPList, setelement(3, Entry, TP)),
set_paxosids_rtlogs_tps(State, NewTPList).
|
41a6bd5bac0e40419da1c6d9e80d0232a1d4939c2dba01eab96766e613b53b67 | VERIMAG-Polyhedra/VPL | EqSet.mli | (** This module handles equality sets.
Equality sets are maintained in echelon form. *)
module Cs = Cstr.Rat
(** Type of equality set.
Each equality defines a variable in terms of the others. *)
type 'c t = (Var.t * 'c Cons.t) list
val to_string: (Var.t -> string) -> 'c t -> string
val to_string_ext: 'c Factory.t -> (Var.t -> string) -> 'c t -> string
(** Empty equality set: top. *)
val nil : 'c t
(** @return true if the given equality set is top. *)
val isTop: 'c t -> bool
(** @return the list of equalities of the set. *)
val list : 'c t -> 'c Cons.t list
(** Rewrites a constraint according to an equality set.
@param factory the factory
@param set the equality set
@param cons the constraint to rewrite
@return [cons] where each variable in [set] has been substitued with its definition. *)
val filter : 'c Factory.t -> 'c t -> 'c Cons.t -> 'c Cons.t
* Rewrites a constraint according to an equality set .
@param factory the factory
@param set the equality set
@param cons the constraint to rewrite
@return [ cstr ' ] which is equal to [ cons ] where variables have been substitued by their definition in [ eqset ]
@return [ cert ] : the combination of constraints of [ set ] that must be added to [ cons ] to obtain [ cstr ' ]
For instance , [ filter2 f ( x = 2y+1 ) ( 2x < = 1 ) ] returns [ ( 4y<=-1 , 2x - 4y = 2 ) ] .
@param factory the factory
@param set the equality set
@param cons the constraint to rewrite
@return [cstr'] which is equal to [cons] where variables have been substitued by their definition in [eqset]
@return [cert]: the combination of constraints of [set] that must be added to [cons] to obtain [cstr']
For instance, [filter2 f (x = 2y+1) (2x <= 1)] returns [(4y<=-1, 2x - 4y = 2)].*)
val filter2 : 'c Factory.t -> 'c t -> Cs.t -> Cs.t * 'c Cons.t
(** Result of an inclusion testing. *)
type 'c rel_t =
| NoIncl
| Incl of 'c list
* Tests the inclusion between two equality sets
@param factory the factory for [ set1 ]
@param set1 the first equality set
@param set2 the second equality set
@return true if [ set1 ] < = [ set2 ] , ie . if [ set2 ] includes [ set1 ] .
@param factory the factory for [set1]
@param set1 the first equality set
@param set2 the second equality set
@return true if [set1] <= [set2], ie. if [set2] includes [set1]. *)
val leq : 'c1 Factory.t -> 'c1 t -> 'c2 t -> 'c1 rel_t
val satisfy : 'c t -> Cs.Vec.t -> bool
(** Does not check certificates. *)
val equal: 'c1 t -> 'c2 t -> bool
val choose : Cs.t -> Var.t * Cs.Vec.Coeff.t
val rename: 'c Factory.t -> 'c t -> Var.t -> Var.t -> 'c t
val pick: Var.t option Rtree.t -> 'c Cons.t -> Var.t option
(** [subst factory x c s] substitutes [x] in [s] by its definition in [c]. *)
val subst: 'c Factory.t -> Var.t -> 'c Cons.t -> 'c t -> 'c t
val tryDefs: 'c Factory.t -> Var.t option Rtree.t -> 'c t -> ('c Cons.t * Var.t) option * 'c t
val trySubstM: 'c Factory.t -> Var.t option Rtree.t -> 'c t -> ('c Cons.t * Var.t) option * 'c t
val trySubst: 'c Factory.t -> Var.t -> 'c t -> 'c Cons.t option * 'c t
type 'c meetT =
| Added of 'c t
| Bot of 'c
val meetEq: 'c meetT -> 'c meetT -> bool
val meet_to_string : 'c Factory.t -> (Var.t -> string) -> 'c meetT -> string
val addM: 'c Factory.t -> 'c t -> 'c Cons.t list -> 'c meetT
val add: 'c Factory.t -> 'c t -> 'c Cons.t -> 'c meetT
val joinSetup_1: 'c2 Factory.t -> Var.t -> Var.t option Rtree.t -> Var.t -> 'c1 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
val joinSetup_2: 'c1 Factory.t -> Var.t -> Var.t option Rtree.t -> Var.t -> 'c2 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
val minkowskiSetup_1: 'c2 Factory.t -> Var.t -> Var.t option Rtree.t -> 'c1 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
val minkowskiSetup_2: 'c1 Factory.t -> Var.t -> Var.t option Rtree.t -> 'c2 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
| null | https://raw.githubusercontent.com/VERIMAG-Polyhedra/VPL/cd78d6e7d120508fd5a694bdb01300477e5646f8/ocaml/core/EqSet.mli | ocaml | * This module handles equality sets.
Equality sets are maintained in echelon form.
* Type of equality set.
Each equality defines a variable in terms of the others.
* Empty equality set: top.
* @return true if the given equality set is top.
* @return the list of equalities of the set.
* Rewrites a constraint according to an equality set.
@param factory the factory
@param set the equality set
@param cons the constraint to rewrite
@return [cons] where each variable in [set] has been substitued with its definition.
* Result of an inclusion testing.
* Does not check certificates.
* [subst factory x c s] substitutes [x] in [s] by its definition in [c]. |
module Cs = Cstr.Rat
type 'c t = (Var.t * 'c Cons.t) list
val to_string: (Var.t -> string) -> 'c t -> string
val to_string_ext: 'c Factory.t -> (Var.t -> string) -> 'c t -> string
val nil : 'c t
val isTop: 'c t -> bool
val list : 'c t -> 'c Cons.t list
val filter : 'c Factory.t -> 'c t -> 'c Cons.t -> 'c Cons.t
* Rewrites a constraint according to an equality set .
@param factory the factory
@param set the equality set
@param cons the constraint to rewrite
@return [ cstr ' ] which is equal to [ cons ] where variables have been substitued by their definition in [ eqset ]
@return [ cert ] : the combination of constraints of [ set ] that must be added to [ cons ] to obtain [ cstr ' ]
For instance , [ filter2 f ( x = 2y+1 ) ( 2x < = 1 ) ] returns [ ( 4y<=-1 , 2x - 4y = 2 ) ] .
@param factory the factory
@param set the equality set
@param cons the constraint to rewrite
@return [cstr'] which is equal to [cons] where variables have been substitued by their definition in [eqset]
@return [cert]: the combination of constraints of [set] that must be added to [cons] to obtain [cstr']
For instance, [filter2 f (x = 2y+1) (2x <= 1)] returns [(4y<=-1, 2x - 4y = 2)].*)
val filter2 : 'c Factory.t -> 'c t -> Cs.t -> Cs.t * 'c Cons.t
type 'c rel_t =
| NoIncl
| Incl of 'c list
* Tests the inclusion between two equality sets
@param factory the factory for [ set1 ]
@param set1 the first equality set
@param set2 the second equality set
@return true if [ set1 ] < = [ set2 ] , ie . if [ set2 ] includes [ set1 ] .
@param factory the factory for [set1]
@param set1 the first equality set
@param set2 the second equality set
@return true if [set1] <= [set2], ie. if [set2] includes [set1]. *)
val leq : 'c1 Factory.t -> 'c1 t -> 'c2 t -> 'c1 rel_t
val satisfy : 'c t -> Cs.Vec.t -> bool
val equal: 'c1 t -> 'c2 t -> bool
val choose : Cs.t -> Var.t * Cs.Vec.Coeff.t
val rename: 'c Factory.t -> 'c t -> Var.t -> Var.t -> 'c t
val pick: Var.t option Rtree.t -> 'c Cons.t -> Var.t option
val subst: 'c Factory.t -> Var.t -> 'c Cons.t -> 'c t -> 'c t
val tryDefs: 'c Factory.t -> Var.t option Rtree.t -> 'c t -> ('c Cons.t * Var.t) option * 'c t
val trySubstM: 'c Factory.t -> Var.t option Rtree.t -> 'c t -> ('c Cons.t * Var.t) option * 'c t
val trySubst: 'c Factory.t -> Var.t -> 'c t -> 'c Cons.t option * 'c t
type 'c meetT =
| Added of 'c t
| Bot of 'c
val meetEq: 'c meetT -> 'c meetT -> bool
val meet_to_string : 'c Factory.t -> (Var.t -> string) -> 'c meetT -> string
val addM: 'c Factory.t -> 'c t -> 'c Cons.t list -> 'c meetT
val add: 'c Factory.t -> 'c t -> 'c Cons.t -> 'c meetT
val joinSetup_1: 'c2 Factory.t -> Var.t -> Var.t option Rtree.t -> Var.t -> 'c1 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
val joinSetup_2: 'c1 Factory.t -> Var.t -> Var.t option Rtree.t -> Var.t -> 'c2 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
val minkowskiSetup_1: 'c2 Factory.t -> Var.t -> Var.t option Rtree.t -> 'c1 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
val minkowskiSetup_2: 'c1 Factory.t -> Var.t -> Var.t option Rtree.t -> 'c2 t
-> Var.t * Var.t option Rtree.t * (Var.t * (('c1,'c2) Cons.discr_t) Cons.t) list
|
9a7431dfd409b5e577edf008e2f2bb53933b73ab11e16435fd855b61a2f4eacf | static-analysis-engineering/codehawk | bCHDictionary.ml | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 ( c ) 2021 - 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020 Henny Sipma
Copyright (c) 2021-2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
(* chutil *)
open CHLogger
open CHIndexTable
open CHPrettyUtil
open CHStringIndexTable
open CHXmlDocument
(* bchlib *)
open BCHBasicTypes
open BCHDoubleword
open BCHLibTypes
open BCHSumTypeSerializer
open BCHUtilities
let raise_tag_error (name:string) (tag:string) (accepted:string list) =
let msg =
LBLOCK [
STR "Type ";
STR name;
STR " tag: ";
STR tag;
STR " not recognized. Accepted tags: ";
pretty_print_list accepted (fun s -> STR s) "" ", " ""] in
begin
ch_error_log#add "serialization tag" msg;
raise (BCH_failure msg)
end
let mk_constantstring (s:string):constantstring =
if has_control_characters s then
(hex_string s, true, String.length s)
else
(s,false, String.length s)
class bdictionary_t:bdictionary_int =
object (self)
val string_table = mk_string_index_table "string-table"
val address_table = mk_index_table "address-table"
val arm_extension_register_table = mk_index_table "arm-extension-register-table"
val arm_extension_register_element_table =
mk_index_table "arm-extension-register-element-table"
val arm_extension_register_replicated_element_table =
mk_index_table "arm-extension-register-replicated-element-table"
val register_table = mk_index_table "register-table"
val flag_table = mk_index_table "flag-table"
val mutable tables = []
val mutable stringtables = []
initializer
begin
tables <- [
address_table;
arm_extension_register_table;
arm_extension_register_element_table;
arm_extension_register_replicated_element_table;
register_table;
flag_table;
];
stringtables <- [
string_table
]
end
method reset =
begin
List.iter (fun t -> t#reset) stringtables;
List.iter (fun t -> t#reset) tables
end
method index_string (s:string):int = string_table#add s
method get_string (index:int) = string_table#retrieve index
method index_address (dw:doubleword_int) =
address_table#add ([dw#to_hex_string],[])
method index_address_string (s:string) =
address_table#add ([s],[])
method get_address (index:int) =
let (tags,_) = address_table#retrieve index in
let t = t "address" tags in
fail_tvalue
(trerror_record (STR "BCHDictionary.get_address"))
(string_to_doubleword (t 0))
method get_address_string (index:int) =
let (tags,_) = address_table#retrieve index in
let t = t "address" tags in
(t 0)
method index_arm_extension_register (r: arm_extension_register_t) =
arm_extension_register_table#add
([arm_extension_reg_type_mfts#ts r.armxr_type], [r.armxr_index])
method get_arm_extension_register (index: int) =
let name = "arm_extension_register" in
let (tags, args) = arm_extension_register_table#retrieve index in
let t = t name tags in
let a = a name args in
{armxr_type = arm_extension_reg_type_mfts#fs (t 0); armxr_index = a 0}
method index_arm_extension_register_element
(e: arm_extension_register_element_t) =
arm_extension_register_element_table#add
([],
[self#index_arm_extension_register e.armxr;
e.armxr_elem_index;
e.armxr_elem_size])
method get_arm_extension_register_element (index: int) =
let name = "arm_extension_register_element" in
let (_, args) = arm_extension_register_element_table#retrieve index in
let a = a name args in
{armxr = self#get_arm_extension_register (a 0);
armxr_elem_index = (a 1);
armxr_elem_size = (a 2)}
method index_arm_extension_register_replicated_element
(e: arm_extension_register_replicated_element_t) =
arm_extension_register_replicated_element_table#add
([],
[self#index_arm_extension_register e.armxrr;
e.armxrr_elem_size;
e.armxrr_elem_count])
method get_arm_extension_register_replicated_element (index: int) =
let name = "arm_extension_register_replicated_element" in
let (_, args) =
arm_extension_register_replicated_element_table#retrieve index in
let a = a name args in
{armxrr = self#get_arm_extension_register (a 0);
armxrr_elem_size = (a 1);
armxrr_elem_count = (a 2)}
method index_flag (f: flag_t) =
let tags = [flag_mcts#ts f] in
let key = match f with
| X86Flag e -> (tags @ [eflag_mfts#ts e], [])
| ARMCCFlag c -> (tags @ [arm_cc_flag_mfts#ts c], []) in
flag_table#add key
method get_flag (index: int) =
let name = flag_mcts#name in
let (tags, args) = flag_table#retrieve index in
let t = t name tags in
match (t 0) with
| "x" -> X86Flag (eflag_mfts#fs (t 1))
| "a" -> ARMCCFlag (arm_cc_flag_mfts#fs (t 1))
| s -> raise_tag_error name s flag_mcts#tags
method index_register (r:register_t) =
let tags = [register_mcts#ts r] in
let key = match r with
| SegmentRegister s -> (tags @ [ segment_mfts#ts s ],[])
| CPURegister r -> (tags @ [ cpureg_mfts#ts r ],[])
| DoubleRegister (r1,r2) ->
(tags @ [ cpureg_mfts#ts r1 ; cpureg_mfts#ts r2 ],[])
| FloatingPointRegister i
| ControlRegister i
| DebugRegister i
| MmxRegister i
| XmmRegister i -> (tags,[i])
| MIPSRegister r -> (tags @ [ mips_reg_mfts#ts r ],[])
| MIPSSpecialRegister r -> (tags @ [mips_special_reg_mfts#ts r], [])
| MIPSFloatingPointRegister i -> (tags, [i])
| ARMRegister r -> (tags @ [arm_reg_mfts#ts r], [])
| ARMSpecialRegister r -> (tags @ [arm_special_reg_mfts#ts r], [])
| ARMExtensionRegister xr ->
(tags, [self#index_arm_extension_register xr])
| ARMExtensionRegisterElement xre ->
(tags, [self#index_arm_extension_register_element xre])
| ARMExtensionRegisterReplicatedElement xrre ->
(tags, [self#index_arm_extension_register_replicated_element xrre])
| PowerGPRegister r -> (tags, [r]) in
register_table#add key
method get_register (index:int) =
let name = register_mcts#name in
let (tags,args) = register_table#retrieve index in
let t = t name tags in
let a = a name args in
match (t 0) with
| "s" -> SegmentRegister (segment_mfts#fs (t 1))
| "c" -> CPURegister (cpureg_mfts#fs (t 1))
| "d" -> DoubleRegister (cpureg_mfts#fs (t 1),cpureg_mfts#fs (t 2))
| "f" -> FloatingPointRegister (a 0)
| "ctr" -> ControlRegister (a 0)
| "dbg" -> DebugRegister (a 0)
| "m" -> MmxRegister (a 0)
| "x" -> XmmRegister (a 0)
| "p" -> MIPSRegister (mips_reg_mfts#fs (t 1))
| "ps" -> MIPSSpecialRegister (mips_special_reg_mfts#fs (t 1))
| "pfp" -> MIPSFloatingPointRegister (a 0)
| "a" -> ARMRegister (arm_reg_mfts#fs (t 1))
| "as" -> ARMSpecialRegister (arm_special_reg_mfts#fs (t 1))
| "armx" -> ARMExtensionRegister (self#get_arm_extension_register (a 0))
| "armxe" ->
ARMExtensionRegisterElement (self#get_arm_extension_register_element (a 0))
| "armxr" ->
ARMExtensionRegisterReplicatedElement
(self#get_arm_extension_register_replicated_element (a 0))
| "pwrgpr" -> PowerGPRegister (a 0)
| s -> raise_tag_error name s register_mcts#tags
method write_xml_register ?(tag="ireg") (node:xml_element_int) (r:register_t) =
node#setIntAttribute tag (self#index_register r)
method read_xml_register ?(tag="ireg") (node:xml_element_int):register_t =
self#get_register (node#getIntAttribute tag)
method write_xml_string ?(tag="istr") (node:xml_element_int) (s:string) =
node#setIntAttribute tag (self#index_string s)
method read_xml_string ?(tag="istr") (node:xml_element_int):string =
self#get_string (node#getIntAttribute tag)
method write_xml (node:xml_element_int) =
begin
node#appendChildren
(List.map
(fun t -> let tnode = xmlElement t#get_name in
begin t#write_xml tnode ; tnode end) stringtables) ;
node#appendChildren
(List.map
(fun t -> let tnode = xmlElement t#get_name in
begin t#write_xml tnode ; tnode end) tables)
end
method read_xml (node:xml_element_int) =
let getc = node#getTaggedChild in
begin
List.iter (fun t -> t#read_xml (getc t#get_name)) stringtables ;
List.iter (fun t -> t#read_xml (getc t#get_name)) tables
end
end
let bdictionary = new bdictionary_t
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/c1b3158e0d73cda7cfc10d75f6173f4297991a82/CodeHawk/CHB/bchlib/bCHDictionary.ml | ocaml | chutil
bchlib | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 ( c ) 2021 - 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020 Henny Sipma
Copyright (c) 2021-2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHPretty
open CHLogger
open CHIndexTable
open CHPrettyUtil
open CHStringIndexTable
open CHXmlDocument
open BCHBasicTypes
open BCHDoubleword
open BCHLibTypes
open BCHSumTypeSerializer
open BCHUtilities
let raise_tag_error (name:string) (tag:string) (accepted:string list) =
let msg =
LBLOCK [
STR "Type ";
STR name;
STR " tag: ";
STR tag;
STR " not recognized. Accepted tags: ";
pretty_print_list accepted (fun s -> STR s) "" ", " ""] in
begin
ch_error_log#add "serialization tag" msg;
raise (BCH_failure msg)
end
let mk_constantstring (s:string):constantstring =
if has_control_characters s then
(hex_string s, true, String.length s)
else
(s,false, String.length s)
class bdictionary_t:bdictionary_int =
object (self)
val string_table = mk_string_index_table "string-table"
val address_table = mk_index_table "address-table"
val arm_extension_register_table = mk_index_table "arm-extension-register-table"
val arm_extension_register_element_table =
mk_index_table "arm-extension-register-element-table"
val arm_extension_register_replicated_element_table =
mk_index_table "arm-extension-register-replicated-element-table"
val register_table = mk_index_table "register-table"
val flag_table = mk_index_table "flag-table"
val mutable tables = []
val mutable stringtables = []
initializer
begin
tables <- [
address_table;
arm_extension_register_table;
arm_extension_register_element_table;
arm_extension_register_replicated_element_table;
register_table;
flag_table;
];
stringtables <- [
string_table
]
end
method reset =
begin
List.iter (fun t -> t#reset) stringtables;
List.iter (fun t -> t#reset) tables
end
method index_string (s:string):int = string_table#add s
method get_string (index:int) = string_table#retrieve index
method index_address (dw:doubleword_int) =
address_table#add ([dw#to_hex_string],[])
method index_address_string (s:string) =
address_table#add ([s],[])
method get_address (index:int) =
let (tags,_) = address_table#retrieve index in
let t = t "address" tags in
fail_tvalue
(trerror_record (STR "BCHDictionary.get_address"))
(string_to_doubleword (t 0))
method get_address_string (index:int) =
let (tags,_) = address_table#retrieve index in
let t = t "address" tags in
(t 0)
method index_arm_extension_register (r: arm_extension_register_t) =
arm_extension_register_table#add
([arm_extension_reg_type_mfts#ts r.armxr_type], [r.armxr_index])
method get_arm_extension_register (index: int) =
let name = "arm_extension_register" in
let (tags, args) = arm_extension_register_table#retrieve index in
let t = t name tags in
let a = a name args in
{armxr_type = arm_extension_reg_type_mfts#fs (t 0); armxr_index = a 0}
method index_arm_extension_register_element
(e: arm_extension_register_element_t) =
arm_extension_register_element_table#add
([],
[self#index_arm_extension_register e.armxr;
e.armxr_elem_index;
e.armxr_elem_size])
method get_arm_extension_register_element (index: int) =
let name = "arm_extension_register_element" in
let (_, args) = arm_extension_register_element_table#retrieve index in
let a = a name args in
{armxr = self#get_arm_extension_register (a 0);
armxr_elem_index = (a 1);
armxr_elem_size = (a 2)}
method index_arm_extension_register_replicated_element
(e: arm_extension_register_replicated_element_t) =
arm_extension_register_replicated_element_table#add
([],
[self#index_arm_extension_register e.armxrr;
e.armxrr_elem_size;
e.armxrr_elem_count])
method get_arm_extension_register_replicated_element (index: int) =
let name = "arm_extension_register_replicated_element" in
let (_, args) =
arm_extension_register_replicated_element_table#retrieve index in
let a = a name args in
{armxrr = self#get_arm_extension_register (a 0);
armxrr_elem_size = (a 1);
armxrr_elem_count = (a 2)}
method index_flag (f: flag_t) =
let tags = [flag_mcts#ts f] in
let key = match f with
| X86Flag e -> (tags @ [eflag_mfts#ts e], [])
| ARMCCFlag c -> (tags @ [arm_cc_flag_mfts#ts c], []) in
flag_table#add key
method get_flag (index: int) =
let name = flag_mcts#name in
let (tags, args) = flag_table#retrieve index in
let t = t name tags in
match (t 0) with
| "x" -> X86Flag (eflag_mfts#fs (t 1))
| "a" -> ARMCCFlag (arm_cc_flag_mfts#fs (t 1))
| s -> raise_tag_error name s flag_mcts#tags
method index_register (r:register_t) =
let tags = [register_mcts#ts r] in
let key = match r with
| SegmentRegister s -> (tags @ [ segment_mfts#ts s ],[])
| CPURegister r -> (tags @ [ cpureg_mfts#ts r ],[])
| DoubleRegister (r1,r2) ->
(tags @ [ cpureg_mfts#ts r1 ; cpureg_mfts#ts r2 ],[])
| FloatingPointRegister i
| ControlRegister i
| DebugRegister i
| MmxRegister i
| XmmRegister i -> (tags,[i])
| MIPSRegister r -> (tags @ [ mips_reg_mfts#ts r ],[])
| MIPSSpecialRegister r -> (tags @ [mips_special_reg_mfts#ts r], [])
| MIPSFloatingPointRegister i -> (tags, [i])
| ARMRegister r -> (tags @ [arm_reg_mfts#ts r], [])
| ARMSpecialRegister r -> (tags @ [arm_special_reg_mfts#ts r], [])
| ARMExtensionRegister xr ->
(tags, [self#index_arm_extension_register xr])
| ARMExtensionRegisterElement xre ->
(tags, [self#index_arm_extension_register_element xre])
| ARMExtensionRegisterReplicatedElement xrre ->
(tags, [self#index_arm_extension_register_replicated_element xrre])
| PowerGPRegister r -> (tags, [r]) in
register_table#add key
method get_register (index:int) =
let name = register_mcts#name in
let (tags,args) = register_table#retrieve index in
let t = t name tags in
let a = a name args in
match (t 0) with
| "s" -> SegmentRegister (segment_mfts#fs (t 1))
| "c" -> CPURegister (cpureg_mfts#fs (t 1))
| "d" -> DoubleRegister (cpureg_mfts#fs (t 1),cpureg_mfts#fs (t 2))
| "f" -> FloatingPointRegister (a 0)
| "ctr" -> ControlRegister (a 0)
| "dbg" -> DebugRegister (a 0)
| "m" -> MmxRegister (a 0)
| "x" -> XmmRegister (a 0)
| "p" -> MIPSRegister (mips_reg_mfts#fs (t 1))
| "ps" -> MIPSSpecialRegister (mips_special_reg_mfts#fs (t 1))
| "pfp" -> MIPSFloatingPointRegister (a 0)
| "a" -> ARMRegister (arm_reg_mfts#fs (t 1))
| "as" -> ARMSpecialRegister (arm_special_reg_mfts#fs (t 1))
| "armx" -> ARMExtensionRegister (self#get_arm_extension_register (a 0))
| "armxe" ->
ARMExtensionRegisterElement (self#get_arm_extension_register_element (a 0))
| "armxr" ->
ARMExtensionRegisterReplicatedElement
(self#get_arm_extension_register_replicated_element (a 0))
| "pwrgpr" -> PowerGPRegister (a 0)
| s -> raise_tag_error name s register_mcts#tags
method write_xml_register ?(tag="ireg") (node:xml_element_int) (r:register_t) =
node#setIntAttribute tag (self#index_register r)
method read_xml_register ?(tag="ireg") (node:xml_element_int):register_t =
self#get_register (node#getIntAttribute tag)
method write_xml_string ?(tag="istr") (node:xml_element_int) (s:string) =
node#setIntAttribute tag (self#index_string s)
method read_xml_string ?(tag="istr") (node:xml_element_int):string =
self#get_string (node#getIntAttribute tag)
method write_xml (node:xml_element_int) =
begin
node#appendChildren
(List.map
(fun t -> let tnode = xmlElement t#get_name in
begin t#write_xml tnode ; tnode end) stringtables) ;
node#appendChildren
(List.map
(fun t -> let tnode = xmlElement t#get_name in
begin t#write_xml tnode ; tnode end) tables)
end
method read_xml (node:xml_element_int) =
let getc = node#getTaggedChild in
begin
List.iter (fun t -> t#read_xml (getc t#get_name)) stringtables ;
List.iter (fun t -> t#read_xml (getc t#get_name)) tables
end
end
let bdictionary = new bdictionary_t
|
2da64099d1668d3cc928db0f77513d410470a4304058a4428461e852fe9ac804 | mirage/retreat.mirage.io | page.ml | open Tyxml.Html
let header t =
head
(title (txt t))
([meta ~a:[a_charset "UTF-8"] ();
style [ txt
{___|body {
font-family: monospace;
color: #333;
margin: 2% 0 10% 15%;
width: 45%;
}
a, a:visited {
color: #333;
text-decoration: none;
font-weight: bold;
}|___} ]
])
let content =
Omd.to_html (Omd.of_string
{___|# 12th MirageOS hack retreat
We invite you to participate in the twelfth [MirageOS]()
hack retreat!
The goal is to sync various MirageOS subprojects, start new ones,
and fix bugs.
* *When?* May 1st (arrival) - 7th (departure)
* *Where?* Marrakesh, Morocco
* *Travelling information* Please check travel restrictions from your country to Morocco before registering.
* *How much?* 450 EUR<sup>★</sup>, accommodation and food (full board) included. No refunds possible.
* *How do I register?* Register by sending a mail to <> **by March 15th, 2023** including:
* How you became interested in MirageOS;
* Previous experience with MirageOS and OCaml (no upfront experience required) ;
* Project(s) you're interested to work on; and
* Dietary restrictions
* *Who should participate?* Everybody interested in advancing MirageOS.
* *How big?* We have only limited space (25 people). Selection will be done by various diversity criteria.
* *How should I behave while there?* Be kind and empathetic to others; do not harrass or threaten anyone. If you make others unsafe, you may be asked to leave.
<sup>★</sup>: If you cannot afford this, please contact us directly (at <>).
<br/>
More information
* Once you've signed up, you will subscribed to the participants mailing list.
* You can work on anything but if you need inspiration, browse the [open issues](+org%3Amirage.
* The nearest airport is [Marrakesh Menara Airport (RAK)](). There is also Marrakesh Railway Station (train service from and to Tangier, reachable from Spain by ferry).
* From airport or railway station, take a cab to **Jemaa el-Fnaa** (city centre).
* Follow the [map](), the address is **27 Derb el Ferrane Azbezt**.
* A [video]() contains detailed descriptions.
* We are also happy to pick you up at Jemaa el-Fnaa (phone number will be provided once you registered).
Previous retreats:
* 11th October 3rd - 9th 2022 in Mirleft, reports: [Raphaël Proust](-proust.gitlab.io/code/mirage-retreat-2022-10.html) [Jules, Sayo, Enguerrand, Sonja, Jan, Lucas](-10-28-the-mirageos-retreat-a-journey-of-food-cats-and-unikernels) [Pierre](-alain-enssat-teacher-at-11th.html) [mirage.io](-11-07.retreat)
* 10th November 8th - 14th 2021 in Mirleft (cancelled due to Covid19)
* 9th March 13th - 19th 2020 in Marrakesh (partially cancelled due to Covid19)
* 8th September 23rd - 29th 2019 in Marrakesh, reports: [curtisanne (OpenLab Augsburg), in german](-augsburg.de/2019/10/lablinked-marrakesh-mirageos-retreat/) [comparing type classes with modules by mark karpov](-does-a-humped-critter-have-to-teach-us.html)
* 7th March 6th - 13th 2019 in Marrakesh, reports: [report](-spring-retreat-roundup) [lynxis](-2019.html) [gabriel](-retreat-03-2019/) [tarides](-05-06-7th-mirageos-hack-retreat.html)
* 6th October 3rd - 10th 2018 in Marrakesh
* 5th March 7th - 18th 2018 in Marrakesh, reports: [linse](-the-camels.html), [peter]()
* 4th November 29th - December 6th 2017 in Marrakesh, reports: [mirage](-winter-hackathon-roundup)
* 3rd March 1st - 8th 2017 in Marrakesh, reports: [mirage](-march-hackathon-roundup), [reynir](-03-20-11-27-Marrakech%202017.html), [olle](-report-mirageos-hack-retreat-in-marrakesh-2017/)
* 2nd 13th July 2016 at Darwin College in Cambridge, [report](-summer-hackathon-roundup)
* 1st March 11th - 16th 2016 in Marrakesh, [report](-spring-hackathon)
|___})
let rendered =
let buf = Buffer.create 500 in
let fmt = Format.formatter_of_buffer buf in
pp () fmt @@
html
(header "MirageOS hack retreats")
(body [ Unsafe.data content ]) ;
Cstruct.of_string (Buffer.contents buf)
| null | https://raw.githubusercontent.com/mirage/retreat.mirage.io/0204681b462c46c33db0dcea1d9d5c5dc080c2b8/page.ml | ocaml | open Tyxml.Html
let header t =
head
(title (txt t))
([meta ~a:[a_charset "UTF-8"] ();
style [ txt
{___|body {
font-family: monospace;
color: #333;
margin: 2% 0 10% 15%;
width: 45%;
}
a, a:visited {
color: #333;
text-decoration: none;
font-weight: bold;
}|___} ]
])
let content =
Omd.to_html (Omd.of_string
{___|# 12th MirageOS hack retreat
We invite you to participate in the twelfth [MirageOS]()
hack retreat!
The goal is to sync various MirageOS subprojects, start new ones,
and fix bugs.
* *When?* May 1st (arrival) - 7th (departure)
* *Where?* Marrakesh, Morocco
* *Travelling information* Please check travel restrictions from your country to Morocco before registering.
* *How much?* 450 EUR<sup>★</sup>, accommodation and food (full board) included. No refunds possible.
* *How do I register?* Register by sending a mail to <> **by March 15th, 2023** including:
* How you became interested in MirageOS;
* Previous experience with MirageOS and OCaml (no upfront experience required) ;
* Project(s) you're interested to work on; and
* Dietary restrictions
* *Who should participate?* Everybody interested in advancing MirageOS.
* *How big?* We have only limited space (25 people). Selection will be done by various diversity criteria.
* *How should I behave while there?* Be kind and empathetic to others; do not harrass or threaten anyone. If you make others unsafe, you may be asked to leave.
<sup>★</sup>: If you cannot afford this, please contact us directly (at <>).
<br/>
More information
* Once you've signed up, you will subscribed to the participants mailing list.
* You can work on anything but if you need inspiration, browse the [open issues](+org%3Amirage.
* The nearest airport is [Marrakesh Menara Airport (RAK)](). There is also Marrakesh Railway Station (train service from and to Tangier, reachable from Spain by ferry).
* From airport or railway station, take a cab to **Jemaa el-Fnaa** (city centre).
* Follow the [map](), the address is **27 Derb el Ferrane Azbezt**.
* A [video]() contains detailed descriptions.
* We are also happy to pick you up at Jemaa el-Fnaa (phone number will be provided once you registered).
Previous retreats:
* 11th October 3rd - 9th 2022 in Mirleft, reports: [Raphaël Proust](-proust.gitlab.io/code/mirage-retreat-2022-10.html) [Jules, Sayo, Enguerrand, Sonja, Jan, Lucas](-10-28-the-mirageos-retreat-a-journey-of-food-cats-and-unikernels) [Pierre](-alain-enssat-teacher-at-11th.html) [mirage.io](-11-07.retreat)
* 10th November 8th - 14th 2021 in Mirleft (cancelled due to Covid19)
* 9th March 13th - 19th 2020 in Marrakesh (partially cancelled due to Covid19)
* 8th September 23rd - 29th 2019 in Marrakesh, reports: [curtisanne (OpenLab Augsburg), in german](-augsburg.de/2019/10/lablinked-marrakesh-mirageos-retreat/) [comparing type classes with modules by mark karpov](-does-a-humped-critter-have-to-teach-us.html)
* 7th March 6th - 13th 2019 in Marrakesh, reports: [report](-spring-retreat-roundup) [lynxis](-2019.html) [gabriel](-retreat-03-2019/) [tarides](-05-06-7th-mirageos-hack-retreat.html)
* 6th October 3rd - 10th 2018 in Marrakesh
* 5th March 7th - 18th 2018 in Marrakesh, reports: [linse](-the-camels.html), [peter]()
* 4th November 29th - December 6th 2017 in Marrakesh, reports: [mirage](-winter-hackathon-roundup)
* 3rd March 1st - 8th 2017 in Marrakesh, reports: [mirage](-march-hackathon-roundup), [reynir](-03-20-11-27-Marrakech%202017.html), [olle](-report-mirageos-hack-retreat-in-marrakesh-2017/)
* 2nd 13th July 2016 at Darwin College in Cambridge, [report](-summer-hackathon-roundup)
* 1st March 11th - 16th 2016 in Marrakesh, [report](-spring-hackathon)
|___})
let rendered =
let buf = Buffer.create 500 in
let fmt = Format.formatter_of_buffer buf in
pp () fmt @@
html
(header "MirageOS hack retreats")
(body [ Unsafe.data content ]) ;
Cstruct.of_string (Buffer.contents buf)
| |
c04f8673510a08e4aa5328825af6ec29475b7dd4b0363552ff6cb0533cbce1ff | dselsam/arc | EagerFeatures.hs | Copyright ( c ) 2020 Microsoft Corporation . All rights reserved .
Released under Apache 2.0 license as described in the file LICENSE .
Authors : , , .
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE StrictData #-}
module Synth.EagerFeatures where
import Synth.Basic
import Util.Imports
import qualified Util.List as List
import Synth.Ex (Ex(Ex), ForTrain, ForTest)
import qualified Synth.Ex as Ex
import Search.SearchT
import Synth.Context
newtype EagerFeatures a = EagerFeatures {
choices :: [Choice (Ex a)]
} deriving (Eq, Ord, Show)
instance SynthContext (EagerFeatures a) where
partitionOn bs (EagerFeatures choices) =
let bothChoices :: [Choice (Ex a, Ex a)] = map (\(name, ex) -> (name, Ex.partitionOn bs ex)) choices in
(EagerFeatures $ map (\(name, (ex1, _)) -> (name, ex1)) bothChoices,
EagerFeatures $ map (\(name, (_, ex2)) -> (name, ex2)) bothChoices)
instance SynthContextFlatten (EagerFeatures [a]) (EagerFeatures a) where
flattenCtx (EagerFeatures choices) = EagerFeatures $ flip map choices $ \(n, ex) -> (n, fst (Ex.flatten ex))
instance (Eq a, Ord a) => SynthContextSelect (EagerFeatures [a]) (EagerFeatures a) where
selectCtx bs (EagerFeatures choices) = do
let keeps = flip concatMap choices $ \(n, xs) ->
case selectCtx bs xs of
Nothing -> []
Just x -> [(n, x)]
pure $ EagerFeatures keeps
append :: EagerFeatures a -> [Choice (Ex a)] -> EagerFeatures a
append efs cs = EagerFeatures (choices efs ++ cs)
prepend :: EagerFeatures a -> [Choice (Ex a)] -> EagerFeatures a
prepend efs cs = EagerFeatures (cs ++ choices efs)
| null | https://raw.githubusercontent.com/dselsam/arc/7e68a7ed9508bf26926b0f68336db05505f4e765/src/Synth/EagerFeatures.hs | haskell | # LANGUAGE StrictData # | Copyright ( c ) 2020 Microsoft Corporation . All rights reserved .
Released under Apache 2.0 license as described in the file LICENSE .
Authors : , , .
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
module Synth.EagerFeatures where
import Synth.Basic
import Util.Imports
import qualified Util.List as List
import Synth.Ex (Ex(Ex), ForTrain, ForTest)
import qualified Synth.Ex as Ex
import Search.SearchT
import Synth.Context
newtype EagerFeatures a = EagerFeatures {
choices :: [Choice (Ex a)]
} deriving (Eq, Ord, Show)
instance SynthContext (EagerFeatures a) where
partitionOn bs (EagerFeatures choices) =
let bothChoices :: [Choice (Ex a, Ex a)] = map (\(name, ex) -> (name, Ex.partitionOn bs ex)) choices in
(EagerFeatures $ map (\(name, (ex1, _)) -> (name, ex1)) bothChoices,
EagerFeatures $ map (\(name, (_, ex2)) -> (name, ex2)) bothChoices)
instance SynthContextFlatten (EagerFeatures [a]) (EagerFeatures a) where
flattenCtx (EagerFeatures choices) = EagerFeatures $ flip map choices $ \(n, ex) -> (n, fst (Ex.flatten ex))
instance (Eq a, Ord a) => SynthContextSelect (EagerFeatures [a]) (EagerFeatures a) where
selectCtx bs (EagerFeatures choices) = do
let keeps = flip concatMap choices $ \(n, xs) ->
case selectCtx bs xs of
Nothing -> []
Just x -> [(n, x)]
pure $ EagerFeatures keeps
append :: EagerFeatures a -> [Choice (Ex a)] -> EagerFeatures a
append efs cs = EagerFeatures (choices efs ++ cs)
prepend :: EagerFeatures a -> [Choice (Ex a)] -> EagerFeatures a
prepend efs cs = EagerFeatures (cs ++ choices efs)
|
d87339839468ed0a8bae8f9616ae1b91a99d7e5112514eb0d93c9e1a4425a4cd | mwotton/judy | Bench2.hs | import Control.Monad (forM_)
import Criterion.Config
import Criterion.Main
import qualified Data.IntMap as I
import qualified Data.Judy as J
import qualified Data.Map as M
import Data.List (foldl')
import Data.Word
import System.IO.Unsafe
import System.Random.Mersenne
Work around the fact that the GC wo n't run finalizers aggressively
-- enough for us.
myConfig = defaultConfig { cfgPerformGC = ljust True }
main = do
print "Setting up..."
-- judy100k `seq` judy1M `seq` judy10M `seq` return ()
-- judy10k `seq` return ()
-- judy100k `seq` return ()
seq ` return ( )
judy10M `seq` return ()
print "done"
defaultMainWith myConfig [
bench " delete 10k " ( testit 10000 )
bench " delete 100k " ( testit 100000 )
bench " delete 1 M " ( testit 1000000 )
bench "findMax 10M" (testit 10000000)
]
testit :: Int -> IO (Maybe (J.Key, Int))
testit n = do
J.findMax (h n)
where
-- h 10000 = judy10k
-- h 100000 = judy100k
h 1000000 =
h 10000000 = judy10M
------------------------------------------------------------------------
-- construction.
judy10k : : J.JudyL Int
judy10k = unsafePerformIO $ do
g < - getStdGen
rs < - randoms g
j < - J.new : : IO ( J.JudyL Int )
forM _ ( take n rs ) $ \n - > J.insert n ( fromIntegral n : : Int ) j
return j
where
n = truncate 1e5
judy10k :: J.JudyL Int
judy10k = unsafePerformIO $ do
g <- getStdGen
rs <- randoms g
j <- J.new :: IO (J.JudyL Int)
forM_ (take n rs) $ \n -> J.insert n (fromIntegral n :: Int) j
return j
where
n = truncate 1e5
-}
judy100k : : J.JudyL Int
judy100k = unsafePerformIO $ do
g < - getStdGen
rs < - randoms g
j < - J.new : : IO ( J.JudyL Int )
forM _ ( take n rs ) $ \n - > J.insert n ( fromIntegral n : : Int ) j
return j
where
n = truncate 1e6
judy100k :: J.JudyL Int
judy100k = unsafePerformIO $ do
g <- getStdGen
rs <- randoms g
j <- J.new :: IO (J.JudyL Int)
forM_ (take n rs) $ \n -> J.insert n (fromIntegral n :: Int) j
return j
where
n = truncate 1e6
-}
: : J.JudyL Int
judy1 M = unsafePerformIO $ do
g < - getStdGen
rs < - randoms g
j < - J.new : : IO ( J.JudyL Int )
forM _ ( take n rs ) $ \n - > J.insert n ( fromIntegral n : : Int ) j
return j
where
n = truncate 1e7
judy1M :: J.JudyL Int
judy1M = unsafePerformIO $ do
g <- getStdGen
rs <- randoms g
j <- J.new :: IO (J.JudyL Int)
forM_ (take n rs) $ \n -> J.insert n (fromIntegral n :: Int) j
return j
where
n = truncate 1e7
-}
judy10M :: J.JudyL Int
judy10M = unsafePerformIO $ do
g <- getStdGen
rs <- randoms g
j <- J.new :: IO (J.JudyL Int)
forM_ (take n rs) $ \n -> J.insert n (fromIntegral n :: Int) j
return j
where
n = truncate 1e8
------------------------------------------------------------------------
testit n = do
j < - J.new : : IO ( J.JudyL Int )
forM _ [ 1 .. n ] $ \n - > J.insert n ( fromIntegral n : : Int ) j
v < - J.lookup 100 j
v ` seq ` return ( )
testit n = do
j <- J.new :: IO (J.JudyL Int)
forM_ [1..n] $ \n -> J.insert n (fromIntegral n :: Int) j
v <- J.lookup 100 j
v `seq` return ()
-}
testmap :: Int -> Int -> I.IntMap Int
testmap n i =
foldl' (\m k -> I.insert k k m) I.empty [0..(n+i-i)]
testmap : : Int - > Int - > M.Map Int Int
testmap n i =
foldl ' ( \m k - > M.insert k k m ) M.empty [ 0 .. (n+i - i ) ]
testmap :: Int -> Int -> M.Map Int Int
testmap n i =
foldl' (\m k -> M.insert k k m) M.empty [0..(n+i-i)]
-}
| null | https://raw.githubusercontent.com/mwotton/judy/0fdb8687b39e67bfec21430865ce3611a0c6b266/tests/examples/Bench2.hs | haskell | enough for us.
judy100k `seq` judy1M `seq` judy10M `seq` return ()
judy10k `seq` return ()
judy100k `seq` return ()
h 10000 = judy10k
h 100000 = judy100k
----------------------------------------------------------------------
construction.
---------------------------------------------------------------------- | import Control.Monad (forM_)
import Criterion.Config
import Criterion.Main
import qualified Data.IntMap as I
import qualified Data.Judy as J
import qualified Data.Map as M
import Data.List (foldl')
import Data.Word
import System.IO.Unsafe
import System.Random.Mersenne
Work around the fact that the GC wo n't run finalizers aggressively
myConfig = defaultConfig { cfgPerformGC = ljust True }
main = do
print "Setting up..."
seq ` return ( )
judy10M `seq` return ()
print "done"
defaultMainWith myConfig [
bench " delete 10k " ( testit 10000 )
bench " delete 100k " ( testit 100000 )
bench " delete 1 M " ( testit 1000000 )
bench "findMax 10M" (testit 10000000)
]
testit :: Int -> IO (Maybe (J.Key, Int))
testit n = do
J.findMax (h n)
where
h 1000000 =
h 10000000 = judy10M
judy10k : : J.JudyL Int
judy10k = unsafePerformIO $ do
g < - getStdGen
rs < - randoms g
j < - J.new : : IO ( J.JudyL Int )
forM _ ( take n rs ) $ \n - > J.insert n ( fromIntegral n : : Int ) j
return j
where
n = truncate 1e5
judy10k :: J.JudyL Int
judy10k = unsafePerformIO $ do
g <- getStdGen
rs <- randoms g
j <- J.new :: IO (J.JudyL Int)
forM_ (take n rs) $ \n -> J.insert n (fromIntegral n :: Int) j
return j
where
n = truncate 1e5
-}
judy100k : : J.JudyL Int
judy100k = unsafePerformIO $ do
g < - getStdGen
rs < - randoms g
j < - J.new : : IO ( J.JudyL Int )
forM _ ( take n rs ) $ \n - > J.insert n ( fromIntegral n : : Int ) j
return j
where
n = truncate 1e6
judy100k :: J.JudyL Int
judy100k = unsafePerformIO $ do
g <- getStdGen
rs <- randoms g
j <- J.new :: IO (J.JudyL Int)
forM_ (take n rs) $ \n -> J.insert n (fromIntegral n :: Int) j
return j
where
n = truncate 1e6
-}
: : J.JudyL Int
judy1 M = unsafePerformIO $ do
g < - getStdGen
rs < - randoms g
j < - J.new : : IO ( J.JudyL Int )
forM _ ( take n rs ) $ \n - > J.insert n ( fromIntegral n : : Int ) j
return j
where
n = truncate 1e7
judy1M :: J.JudyL Int
judy1M = unsafePerformIO $ do
g <- getStdGen
rs <- randoms g
j <- J.new :: IO (J.JudyL Int)
forM_ (take n rs) $ \n -> J.insert n (fromIntegral n :: Int) j
return j
where
n = truncate 1e7
-}
judy10M :: J.JudyL Int
judy10M = unsafePerformIO $ do
g <- getStdGen
rs <- randoms g
j <- J.new :: IO (J.JudyL Int)
forM_ (take n rs) $ \n -> J.insert n (fromIntegral n :: Int) j
return j
where
n = truncate 1e8
testit n = do
j < - J.new : : IO ( J.JudyL Int )
forM _ [ 1 .. n ] $ \n - > J.insert n ( fromIntegral n : : Int ) j
v < - J.lookup 100 j
v ` seq ` return ( )
testit n = do
j <- J.new :: IO (J.JudyL Int)
forM_ [1..n] $ \n -> J.insert n (fromIntegral n :: Int) j
v <- J.lookup 100 j
v `seq` return ()
-}
testmap :: Int -> Int -> I.IntMap Int
testmap n i =
foldl' (\m k -> I.insert k k m) I.empty [0..(n+i-i)]
testmap : : Int - > Int - > M.Map Int Int
testmap n i =
foldl ' ( \m k - > M.insert k k m ) M.empty [ 0 .. (n+i - i ) ]
testmap :: Int -> Int -> M.Map Int Int
testmap n i =
foldl' (\m k -> M.insert k k m) M.empty [0..(n+i-i)]
-}
|
c2821aaf7e743953061a2f15c9e8300b3a655c0c0ccbc4f2e6e1cc6939297702 | gregcman/lisp-in-small-pieces | other.lisp | (in-package :lisp)
;;;;values
(defparameter *the-false-value* nil)
(defun truep (value)
(not (eq *the-false-value* value)))
(defun booleanp (value)
(or (eq *the-false-value* value)
(eq t value)))
(defparameter *empty-progn* 69)
;;;;environments
;;;;represented as alists where each cell is (symbol . value)
(defparameter *initial-environment* '())
(defun lookup (id env)
(if (consp env)
(if (eq (car (car env)) id)
(cdr (car env))
(lookup id (cdr env)))
(error "no such binding ~s" id)))
(defun update! (id env value)
(if (consp env)
(if (eq (car (car env)) id)
(progn (rplacd (car env) value)
value)
(update! id (cdr env) value))
(error "no such binding ~s" id)))
(defun extend (env variables values)
(cond ((consp variables)
(if (consp values)
(cons (cons (car variables)
(car values))
(extend env
(cdr variables)
(cdr values)))
(error "too few values")))
((null variables)
(if (null values)
env
(error "too many values")))
((symbolp variables)
(cons (cons variables
values)
env))))
| null | https://raw.githubusercontent.com/gregcman/lisp-in-small-pieces/71a89aa2108dc1122a60337177ea75c5170e6828/interpreters/other.lisp | lisp | values
environments
represented as alists where each cell is (symbol . value) | (in-package :lisp)
(defparameter *the-false-value* nil)
(defun truep (value)
(not (eq *the-false-value* value)))
(defun booleanp (value)
(or (eq *the-false-value* value)
(eq t value)))
(defparameter *empty-progn* 69)
(defparameter *initial-environment* '())
(defun lookup (id env)
(if (consp env)
(if (eq (car (car env)) id)
(cdr (car env))
(lookup id (cdr env)))
(error "no such binding ~s" id)))
(defun update! (id env value)
(if (consp env)
(if (eq (car (car env)) id)
(progn (rplacd (car env) value)
value)
(update! id (cdr env) value))
(error "no such binding ~s" id)))
(defun extend (env variables values)
(cond ((consp variables)
(if (consp values)
(cons (cons (car variables)
(car values))
(extend env
(cdr variables)
(cdr values)))
(error "too few values")))
((null variables)
(if (null values)
env
(error "too many values")))
((symbolp variables)
(cons (cons variables
values)
env))))
|
ddfdfa4999c978ad224192a260439db882a49648ec6a7eebe0ee7d5d06b03eb7 | jvf/scalaris | db_hanoidb.erl | 2013 scalaris project
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author
@author < >
%% @doc DB back-end using HanoiDB.
HanoiDB is a memory - cached disk backend .
As disks are large ( TB ) HanoiDB can hold data much larger than RAM ( ) .
%% As disks persist data HanoiDB can be stoped and restarted without data loss.
It is a pure Erlang implementation of Google 's LevelDB disk - backed K / V store .
%% See / for background about storage levels.
How to use scalaris with this hanoidb backend :
-download and compile HanoiDB
-make sure this file is in src/ ( right with db_ets.erl )
%% -rerun scalaris' configure with --enable-hanoidb
%% ./configure --enable-hanoidb=/path/to/hanoidb
-rerun make to rebuild scalaris and run tests
%% ./make
%% ./make test
%% -enjoy
Two keys K and L are considered equal if they match , i.e. K = : = L
Made after svn rev 5666 .
%% @end
-module(db_hanoidb).
-include("scalaris.hrl").
-behaviour(db_backend_beh).
-define(IN(E), erlang:term_to_binary(E, [{minor_version, 1}])).
-define(OUT(E), erlang:binary_to_term(E)).
%% primitives
-export([new/1, open/1]).
-export([put/2, get/2, delete/2]).
-export([close/1, close_and_delete/1]).
%% db info
-export([get_persisted_tables/0, get_name/1, get_load/1,
is_available/0, supports_feature/1]).
%% iteration
-export([foldl/3, foldl/4, foldl/5]).
-export([foldr/3, foldr/4, foldr/5]).
-export([foldl_unordered/3]).
-export([tab2list/1]).
-type db() :: {DB::pid(), FileName::nonempty_string()}.
-type key() :: db_backend_beh:key(). %% '$end_of_table' is not allowed as key() or else iterations won't work!
-type entry() :: db_backend_beh:entry().
-export_type([db/0]).
-type hanoidb_config_option() :: {compress, none | gzip | snappy | lz4}
| {page_size, pos_integer()}
| {read_buffer_size, pos_integer()}
| {write_buffer_size, pos_integer()}
| {merge_strategy, fast | predictable }
| {sync_strategy, none | sync | {seconds, pos_integer()}}
| {expiry_secs, non_neg_integer()}
| {spawn_opt, list()}.
%% @doc Creates new DB handle named DBName.
-spec new(DBName::nonempty_string()) -> db().
new(DBName) ->
hanoidb 's default options . May need tuning .
%% @doc Re-opens an existing-on-disk database.
-spec open(DBName::nonempty_string()) -> db().
open(DBName) ->
hanoidb 's default options . May need tuning .
%% @doc Creates new DB handle named DBName with options.
-spec new_db(DirName::string(), HanoiOptions::[hanoidb_config_option()]) -> db().
new_db(DBName, HanoiOptions) ->
BaseDir = [config:read(db_directory), "/", atom_to_list(node())],
_ = case file:make_dir(BaseDir) of
ok -> ok;
{error, eexist} -> ok;
{error, Error0} -> erlang:exit({db_toke, 'cannot create dir', BaseDir, Error0})
end,
% HanoiDB stores not in a file but a dir store
FullDBDir = lists:flatten([BaseDir, "/", DBName]),
case hanoidb:open(FullDBDir, HanoiOptions) of
{ok, Tree} -> {Tree, DBName};
ignore -> log:log(error, "[ Node ~w:db_hanoidb ] ~.0p", [self(), ignore]),
erlang:error({hanoidb_failed, ignore});
{error, Error2} -> log:log(error, "[ Node ~w:db_hanoidb ] ~.0p", [self(), Error2]),
erlang:error({hanoidb_failed, Error2})
end.
%% @doc Closes the DB named DBName keeping its data on disk.
-spec close(DB::db()) -> true.
close({DB, _FileName}) ->
ok = hanoidb:close(DB),
true.
% hanoidb:stop(). Not needed.
%% @doc Closes and deletes the DB named DBName
-spec close_and_delete(DB::db()) -> true.
close_and_delete({_DB, DBName} = State) ->
close(State),
% A disk backend happens in some directory
DirName = [config:read(db_directory), "/", atom_to_list(node()), "/", DBName],
% Delete all DB files
{ok, Files} = file:list_dir(DirName),
lists:foreach(fun(FileName) ->
FullFileName = lists:flatten([DirName, "/", FileName]),
case file:delete(FullFileName) of
ok -> ok;
{error, Reason} ->
log:log(error, "[ Node ~w:db_toke ] deleting ~.0p failed: ~.0p",
[self(), FileName, Reason])
end
end, Files),
Delete DB dir
case file:del_dir(DirName) of
ok -> ok;
{error, Reason} -> log:log(error, "[ Node ~w:db_hanoidb ] deleting ~.0p failed: ~.0p",
[self(), DirName, Reason])
end.
%% @doc Saves arbitrary tuple Entry in DB DBName and returns the new DB.
The key is expected to be the first element of Entry .
-spec put(DB::db(), Entry::entry()) -> db().
put({DB, _DBName} = State, Entry) ->
ok = hanoidb:put(DB, ?IN(element(1, Entry)), ?IN(Entry) ),
State.
%% @doc Returns the entry that corresponds to Key or {} if no such tuple exists.
-spec get(DB::db(), Key::key()) -> entry() | {}.
get({DB, _DBName}, Key) ->
case hanoidb:get(DB, ?IN(Key)) of
not_found -> {};
{ok, Entry} -> ?OUT(Entry)
end.
%% @doc Deletes the tuple saved under Key and returns the new DB.
%% If such a tuple does not exists nothing is changed.
-spec delete(DB::db(), Key::key()) -> db().
delete({DB, _FileName} = State, Key) ->
ok = hanoidb:delete(DB, ?IN(Key)),
State.
%% @doc Gets a list of persisted tables.
-spec get_persisted_tables() -> [nonempty_string()].
get_persisted_tables() ->
%% TODO: implement
[].
%% @doc Checks for modules required for this DB backend. Returns true if no
%% modules are missing, or else a list of missing modules
-spec is_available() -> boolean() | [atom()].
is_available() ->
case code:which(hanoidb) of
non_existing -> [hanoidb];
_ -> true
end.
%% @doc Returns true if the DB support a specific feature (e.g. recovery), false otherwise.
-spec supports_feature(Feature::atom()) -> boolean().
supports_feature(recover) -> true;
supports_feature(_) -> false.
@doc Returns the name of the DB specified in @see new/1 and open/1 .
-spec get_name(DB::db()) -> nonempty_string().
get_name({_DB, DBName}) ->
DBName.
%% @doc Returns the number of stored keys.
-spec get_load(DB::db()) -> non_neg_integer().
get_load({DB, _DBName}) ->
%% TODO: not really efficient (maybe store the load in the DB?)
hanoidb:fold(DB, fun (_K, _V, Load) -> Load + 1 end, 0).
@equiv : fold_range(DB , Fun , Acc0 , # key_range{from_key = < < > > , to_key = undefined } )
%% @doc Returns a potentially larger-than-memory dataset. Use with care.
-spec foldl(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A) -> Acc1::A.
foldl(State, Fun, Acc) ->
: fold(DB , fun ( K , _ V , AccIn ) - > Fun(?OUT(K ) , AccIn ) end , Acc0 ) .
foldl_helper(State, Fun, Acc, all, -1).
@equiv foldl(DB , Fun , Acc0 , Interval , ) )
%% @doc Returns a potentially larger-than-memory dataset. Use with care.
-spec foldl(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Interval::db_backend_beh:interval()) -> Acc1::A.
foldl(State, Fun, Acc, Interval) ->
: fold_range(DB , Fun , Acc , # key_range{from_key = K1 , to_key = K2 } ) . % TODO check it is possible
foldl_helper(State, Fun, Acc, Interval, -1).
@doc foldl iterates over DB and applies Fun(Entry , AccIn ) to every element
encountered in Interval . On the first call AccIn = = Acc0 . The iteration
stops as soon as MaxNum elements have been encountered .
%% Returns a potentially larger-than-memory dataset. Use with care.
-spec foldl(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Intervall::db_backend_beh:interval(), MaxNum::non_neg_integer()) -> Acc1::A.
foldl(State, Fun, Acc, Interval, MaxNum) ->
%% HINT
Fun can only be applied in a second pass . It could do a delete ( or other
%% write op) but CAN HanoiDB handle writes whiles folding ? (TODO check YES?)
%% Since we reversed the order while accumulating reverse it by using lists
%% fold but "from the other side". TODO check this for HanoiDB
: fold_range(DB , Fun , Acc , # key_range{limit = N , from_key = K1 , to_key = K2 } ) % TODO check it is possible
foldl_helper(State, Fun, Acc, Interval, MaxNum).
@private this helper enables us to use -1 as MaxNum . MaxNum = = -1 signals that all
%% data is to be retrieved.
-spec foldl_helper(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Intervall::db_backend_beh:interval(), MaxNum::integer()) -> Acc1::A.
foldl_helper({DB, _FileName}, Fun, Acc, Interval, MaxNum) ->
hopefully MaxNum caps it .
lists:foldr(Fun, Acc, Keys). % db:foldL calls lists:foldR
TODO May be : fold_range is less RAM intensive : no need to keep all keys in RAM at once , but continuous folding instead .
%% @doc makes a foldr over the whole dataset.
%% Returns a potentially larger-than-memory dataset. Use with care.
-spec foldr(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A) -> Acc1::A.
foldr(State, Fun, Acc) ->
foldr_helper(State, Fun, Acc, all, -1).
@equiv foldr(DB , Fun , Acc0 , Interval , ) )
%% @doc Returns a potentially larger-than-memory dataset. Use with care.
-spec foldr(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Interval::db_backend_beh:interval()) -> Acc1::A.
foldr(State, Fun, Acc, Interval) ->
foldr_helper(State, Fun, Acc, Interval, -1).
@doc foldr iterates over DB and applies Fun(Entry , AccIn ) to every element
encountered in Interval . On the first call AccIn = = Acc0 . The iteration
stops as soon as MaxNum elements have been encountered .
%% Returns a potentially larger-than-memory dataset. Use with care.
-spec foldr(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Intervall::db_backend_beh:interval(), MaxNum::non_neg_integer()) -> Acc1::A.
foldr(State, Fun, Acc, Interval, MaxNum) ->
foldr_helper(State, Fun, Acc, Interval, MaxNum).
@private this helper enables us to use -1 as MaxNum . MaxNum = = -1 signals that all
%% data is to be retrieved.
-spec foldr_helper(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Intervall::db_backend_beh:interval(), MaxNum::integer()) -> Acc1::A.
foldr_helper({DB, _FileName}, Fun, Acc, Interval, MaxNum) ->
TODO evaluate : fold_range(DB , Fun , Acc , # key_range{limit = N , from_key = K1 , to_key = K2 } )
first only retrieve keys so we do n't have to load the whole db into memory
Keys = get_all_keys(DB, Interval, -1),
CutData = case MaxNum of
N when N < 0 ->
Keys;
_ ->
lists:sublist(Keys, MaxNum)
end,
%% see HINT in foldl/5
%% now retrieve actual data
lists:foldl(Fun, Acc, CutData).
@doc Works similar to foldl/3 but uses : fold instead of our own implementation .
%% The order in which will be iterated over is unspecified, but using this fuction
%% might be faster than foldl/3 if it does not matter.
-spec foldl_unordered(DB::db(), Fun::fun((Entry::entry(), AccIn::A) -> AccOut::A), Acc0::A) -> Acc1::A.
foldl_unordered({DB, _DBName}, Fun, Acc) ->
hanoidb:fold(DB, fun (_K, Entry, AccIn) -> Fun(?OUT(Entry), AccIn) end, Acc).
@private get_all_keys/3 retrieves all keys in DB that fall into Interval but
not more than MaxNum . If MaxNum = = -1 all Keys are retrieved . If
MaxNum is positive it starts from the left in term order .
-spec get_all_keys(pid(), db_backend_beh:interval(), -1 | non_neg_integer())
-> [key()].
get_all_keys(DB, Interval, MaxNum) ->
TODO evaluate converting scalaris : Intervals to : ranges
in order to leverage : fold rather than get_all_keys+lists : fold .
Keys = hanoidb:fold(DB, fun(Key, _Entry, AccIn) -> [?OUT(Key) | AccIn] end, []),
{_, In} = lists:foldl(fun
(_, {0, _} = AccIn) ->
AccIn;
(Key, {Max, KeyAcc} = AccIn) ->
case is_in(Interval, Key) of
true ->
{Max - 1, [Key | KeyAcc]};
_ ->
AccIn
end
end, {MaxNum, []}, lists:sort(Keys)),
In.
is_in({Key}, OtherKey) -> Key =:= OtherKey;
is_in(all, _Key) -> true;
is_in({'(', L, R, ')'}, Key) -> Key > L andalso Key < R;
is_in({'(', L, R, ']'}, Key) -> Key > L andalso ((Key < R) orelse (Key =:= R));
is_in({'[', L, R, ')'}, Key) -> ((Key > L) orelse (Key =:= L)) andalso Key < R;
is_in({'[', L, R, ']'}, Key) -> ((Key > L) orelse (Key =:= L)) andalso
((Key < R) orelse (Key =:= R)).
%% @doc Returns a list of all objects in the table Table_name.
-spec tab2list(Table_name::db()) -> [Entries::entry()].
tab2list(_Table_name) ->
%% Not implemented yet.
[].
| null | https://raw.githubusercontent.com/jvf/scalaris/c069f44cf149ea6c69e24bdb08714bda242e7ee0/src/db_hanoidb.erl | erlang | You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc DB back-end using HanoiDB.
As disks persist data HanoiDB can be stoped and restarted without data loss.
See / for background about storage levels.
-rerun scalaris' configure with --enable-hanoidb
./configure --enable-hanoidb=/path/to/hanoidb
./make
./make test
-enjoy
@end
primitives
db info
iteration
'$end_of_table' is not allowed as key() or else iterations won't work!
@doc Creates new DB handle named DBName.
@doc Re-opens an existing-on-disk database.
@doc Creates new DB handle named DBName with options.
HanoiDB stores not in a file but a dir store
@doc Closes the DB named DBName keeping its data on disk.
hanoidb:stop(). Not needed.
@doc Closes and deletes the DB named DBName
A disk backend happens in some directory
Delete all DB files
@doc Saves arbitrary tuple Entry in DB DBName and returns the new DB.
@doc Returns the entry that corresponds to Key or {} if no such tuple exists.
@doc Deletes the tuple saved under Key and returns the new DB.
If such a tuple does not exists nothing is changed.
@doc Gets a list of persisted tables.
TODO: implement
@doc Checks for modules required for this DB backend. Returns true if no
modules are missing, or else a list of missing modules
@doc Returns true if the DB support a specific feature (e.g. recovery), false otherwise.
@doc Returns the number of stored keys.
TODO: not really efficient (maybe store the load in the DB?)
@doc Returns a potentially larger-than-memory dataset. Use with care.
@doc Returns a potentially larger-than-memory dataset. Use with care.
TODO check it is possible
Returns a potentially larger-than-memory dataset. Use with care.
HINT
write op) but CAN HanoiDB handle writes whiles folding ? (TODO check YES?)
Since we reversed the order while accumulating reverse it by using lists
fold but "from the other side". TODO check this for HanoiDB
TODO check it is possible
data is to be retrieved.
db:foldL calls lists:foldR
@doc makes a foldr over the whole dataset.
Returns a potentially larger-than-memory dataset. Use with care.
@doc Returns a potentially larger-than-memory dataset. Use with care.
Returns a potentially larger-than-memory dataset. Use with care.
data is to be retrieved.
see HINT in foldl/5
now retrieve actual data
The order in which will be iterated over is unspecified, but using this fuction
might be faster than foldl/3 if it does not matter.
@doc Returns a list of all objects in the table Table_name.
Not implemented yet.
| 2013 scalaris project
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
distributed under the License is distributed on an " AS IS " BASIS ,
@author
@author < >
HanoiDB is a memory - cached disk backend .
As disks are large ( TB ) HanoiDB can hold data much larger than RAM ( ) .
It is a pure Erlang implementation of Google 's LevelDB disk - backed K / V store .
How to use scalaris with this hanoidb backend :
-download and compile HanoiDB
-make sure this file is in src/ ( right with db_ets.erl )
-rerun make to rebuild scalaris and run tests
Two keys K and L are considered equal if they match , i.e. K = : = L
Made after svn rev 5666 .
-module(db_hanoidb).
-include("scalaris.hrl").
-behaviour(db_backend_beh).
-define(IN(E), erlang:term_to_binary(E, [{minor_version, 1}])).
-define(OUT(E), erlang:binary_to_term(E)).
-export([new/1, open/1]).
-export([put/2, get/2, delete/2]).
-export([close/1, close_and_delete/1]).
-export([get_persisted_tables/0, get_name/1, get_load/1,
is_available/0, supports_feature/1]).
-export([foldl/3, foldl/4, foldl/5]).
-export([foldr/3, foldr/4, foldr/5]).
-export([foldl_unordered/3]).
-export([tab2list/1]).
-type db() :: {DB::pid(), FileName::nonempty_string()}.
-type entry() :: db_backend_beh:entry().
-export_type([db/0]).
-type hanoidb_config_option() :: {compress, none | gzip | snappy | lz4}
| {page_size, pos_integer()}
| {read_buffer_size, pos_integer()}
| {write_buffer_size, pos_integer()}
| {merge_strategy, fast | predictable }
| {sync_strategy, none | sync | {seconds, pos_integer()}}
| {expiry_secs, non_neg_integer()}
| {spawn_opt, list()}.
-spec new(DBName::nonempty_string()) -> db().
new(DBName) ->
hanoidb 's default options . May need tuning .
-spec open(DBName::nonempty_string()) -> db().
open(DBName) ->
hanoidb 's default options . May need tuning .
-spec new_db(DirName::string(), HanoiOptions::[hanoidb_config_option()]) -> db().
new_db(DBName, HanoiOptions) ->
BaseDir = [config:read(db_directory), "/", atom_to_list(node())],
_ = case file:make_dir(BaseDir) of
ok -> ok;
{error, eexist} -> ok;
{error, Error0} -> erlang:exit({db_toke, 'cannot create dir', BaseDir, Error0})
end,
FullDBDir = lists:flatten([BaseDir, "/", DBName]),
case hanoidb:open(FullDBDir, HanoiOptions) of
{ok, Tree} -> {Tree, DBName};
ignore -> log:log(error, "[ Node ~w:db_hanoidb ] ~.0p", [self(), ignore]),
erlang:error({hanoidb_failed, ignore});
{error, Error2} -> log:log(error, "[ Node ~w:db_hanoidb ] ~.0p", [self(), Error2]),
erlang:error({hanoidb_failed, Error2})
end.
-spec close(DB::db()) -> true.
close({DB, _FileName}) ->
ok = hanoidb:close(DB),
true.
-spec close_and_delete(DB::db()) -> true.
close_and_delete({_DB, DBName} = State) ->
close(State),
DirName = [config:read(db_directory), "/", atom_to_list(node()), "/", DBName],
{ok, Files} = file:list_dir(DirName),
lists:foreach(fun(FileName) ->
FullFileName = lists:flatten([DirName, "/", FileName]),
case file:delete(FullFileName) of
ok -> ok;
{error, Reason} ->
log:log(error, "[ Node ~w:db_toke ] deleting ~.0p failed: ~.0p",
[self(), FileName, Reason])
end
end, Files),
Delete DB dir
case file:del_dir(DirName) of
ok -> ok;
{error, Reason} -> log:log(error, "[ Node ~w:db_hanoidb ] deleting ~.0p failed: ~.0p",
[self(), DirName, Reason])
end.
The key is expected to be the first element of Entry .
-spec put(DB::db(), Entry::entry()) -> db().
put({DB, _DBName} = State, Entry) ->
ok = hanoidb:put(DB, ?IN(element(1, Entry)), ?IN(Entry) ),
State.
-spec get(DB::db(), Key::key()) -> entry() | {}.
get({DB, _DBName}, Key) ->
case hanoidb:get(DB, ?IN(Key)) of
not_found -> {};
{ok, Entry} -> ?OUT(Entry)
end.
-spec delete(DB::db(), Key::key()) -> db().
delete({DB, _FileName} = State, Key) ->
ok = hanoidb:delete(DB, ?IN(Key)),
State.
-spec get_persisted_tables() -> [nonempty_string()].
get_persisted_tables() ->
[].
-spec is_available() -> boolean() | [atom()].
is_available() ->
case code:which(hanoidb) of
non_existing -> [hanoidb];
_ -> true
end.
-spec supports_feature(Feature::atom()) -> boolean().
supports_feature(recover) -> true;
supports_feature(_) -> false.
@doc Returns the name of the DB specified in @see new/1 and open/1 .
-spec get_name(DB::db()) -> nonempty_string().
get_name({_DB, DBName}) ->
DBName.
-spec get_load(DB::db()) -> non_neg_integer().
get_load({DB, _DBName}) ->
hanoidb:fold(DB, fun (_K, _V, Load) -> Load + 1 end, 0).
@equiv : fold_range(DB , Fun , Acc0 , # key_range{from_key = < < > > , to_key = undefined } )
-spec foldl(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A) -> Acc1::A.
foldl(State, Fun, Acc) ->
: fold(DB , fun ( K , _ V , AccIn ) - > Fun(?OUT(K ) , AccIn ) end , Acc0 ) .
foldl_helper(State, Fun, Acc, all, -1).
@equiv foldl(DB , Fun , Acc0 , Interval , ) )
-spec foldl(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Interval::db_backend_beh:interval()) -> Acc1::A.
foldl(State, Fun, Acc, Interval) ->
foldl_helper(State, Fun, Acc, Interval, -1).
@doc foldl iterates over DB and applies Fun(Entry , AccIn ) to every element
encountered in Interval . On the first call AccIn = = Acc0 . The iteration
stops as soon as MaxNum elements have been encountered .
-spec foldl(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Intervall::db_backend_beh:interval(), MaxNum::non_neg_integer()) -> Acc1::A.
foldl(State, Fun, Acc, Interval, MaxNum) ->
Fun can only be applied in a second pass . It could do a delete ( or other
foldl_helper(State, Fun, Acc, Interval, MaxNum).
@private this helper enables us to use -1 as MaxNum . MaxNum = = -1 signals that all
-spec foldl_helper(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Intervall::db_backend_beh:interval(), MaxNum::integer()) -> Acc1::A.
foldl_helper({DB, _FileName}, Fun, Acc, Interval, MaxNum) ->
hopefully MaxNum caps it .
TODO May be : fold_range is less RAM intensive : no need to keep all keys in RAM at once , but continuous folding instead .
-spec foldr(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A) -> Acc1::A.
foldr(State, Fun, Acc) ->
foldr_helper(State, Fun, Acc, all, -1).
@equiv foldr(DB , Fun , Acc0 , Interval , ) )
-spec foldr(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Interval::db_backend_beh:interval()) -> Acc1::A.
foldr(State, Fun, Acc, Interval) ->
foldr_helper(State, Fun, Acc, Interval, -1).
@doc foldr iterates over DB and applies Fun(Entry , AccIn ) to every element
encountered in Interval . On the first call AccIn = = Acc0 . The iteration
stops as soon as MaxNum elements have been encountered .
-spec foldr(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Intervall::db_backend_beh:interval(), MaxNum::non_neg_integer()) -> Acc1::A.
foldr(State, Fun, Acc, Interval, MaxNum) ->
foldr_helper(State, Fun, Acc, Interval, MaxNum).
@private this helper enables us to use -1 as MaxNum . MaxNum = = -1 signals that all
-spec foldr_helper(DB::db(), Fun::fun((Key::key(), AccIn::A) -> AccOut::A), Acc0::A,
Intervall::db_backend_beh:interval(), MaxNum::integer()) -> Acc1::A.
foldr_helper({DB, _FileName}, Fun, Acc, Interval, MaxNum) ->
TODO evaluate : fold_range(DB , Fun , Acc , # key_range{limit = N , from_key = K1 , to_key = K2 } )
first only retrieve keys so we do n't have to load the whole db into memory
Keys = get_all_keys(DB, Interval, -1),
CutData = case MaxNum of
N when N < 0 ->
Keys;
_ ->
lists:sublist(Keys, MaxNum)
end,
lists:foldl(Fun, Acc, CutData).
@doc Works similar to foldl/3 but uses : fold instead of our own implementation .
-spec foldl_unordered(DB::db(), Fun::fun((Entry::entry(), AccIn::A) -> AccOut::A), Acc0::A) -> Acc1::A.
foldl_unordered({DB, _DBName}, Fun, Acc) ->
hanoidb:fold(DB, fun (_K, Entry, AccIn) -> Fun(?OUT(Entry), AccIn) end, Acc).
@private get_all_keys/3 retrieves all keys in DB that fall into Interval but
not more than MaxNum . If MaxNum = = -1 all Keys are retrieved . If
MaxNum is positive it starts from the left in term order .
-spec get_all_keys(pid(), db_backend_beh:interval(), -1 | non_neg_integer())
-> [key()].
get_all_keys(DB, Interval, MaxNum) ->
TODO evaluate converting scalaris : Intervals to : ranges
in order to leverage : fold rather than get_all_keys+lists : fold .
Keys = hanoidb:fold(DB, fun(Key, _Entry, AccIn) -> [?OUT(Key) | AccIn] end, []),
{_, In} = lists:foldl(fun
(_, {0, _} = AccIn) ->
AccIn;
(Key, {Max, KeyAcc} = AccIn) ->
case is_in(Interval, Key) of
true ->
{Max - 1, [Key | KeyAcc]};
_ ->
AccIn
end
end, {MaxNum, []}, lists:sort(Keys)),
In.
is_in({Key}, OtherKey) -> Key =:= OtherKey;
is_in(all, _Key) -> true;
is_in({'(', L, R, ')'}, Key) -> Key > L andalso Key < R;
is_in({'(', L, R, ']'}, Key) -> Key > L andalso ((Key < R) orelse (Key =:= R));
is_in({'[', L, R, ')'}, Key) -> ((Key > L) orelse (Key =:= L)) andalso Key < R;
is_in({'[', L, R, ']'}, Key) -> ((Key > L) orelse (Key =:= L)) andalso
((Key < R) orelse (Key =:= R)).
-spec tab2list(Table_name::db()) -> [Entries::entry()].
tab2list(_Table_name) ->
[].
|
39e7f17ae618a2649f99fbc03e4d373dec5f5989c3fd7437293e51a06fa1ff8b | hiroshi-unno/coar | apply_add.ml | let apply f x = f x
let add x y = x + y
let main x = assert (apply (add x) 0 >= x)
[@@@assert "typeof(main) <: int -> unit"]
| null | https://raw.githubusercontent.com/hiroshi-unno/coar/90a23a09332c68f380efd4115b3f6fdc825f413d/benchmarks/OCaml/safety/tacas2015/apply_add.ml | ocaml | let apply f x = f x
let add x y = x + y
let main x = assert (apply (add x) 0 >= x)
[@@@assert "typeof(main) <: int -> unit"]
| |
8a70b8e4887d25c7c281ca23f2af8240aab811906cfb16b19dd772e00d508e7d | footprintanalytics/footprint-web | search_test.clj | (ns metabase.api.search-test
(:require [clojure.set :as set]
[clojure.string :as str]
[clojure.test :refer :all]
[honeysql.core :as hsql]
[metabase.api.search :as api.search]
[metabase.models
:refer
[App Card CardBookmark Collection Dashboard DashboardBookmark DashboardCard
Database Metric PermissionsGroup PermissionsGroupMembership Pulse PulseCard
Segment Table]]
[metabase.models.permissions :as perms]
[metabase.models.permissions-group :as perms-group]
[metabase.search.config :as search-config]
[metabase.search.scoring :as scoring]
[metabase.test :as mt]
[metabase.util :as u]
[schema.core :as s]
[toucan.db :as db]))
(def ^:private default-search-row
{:id true
:description nil
:archived false
:collection {:id false :name nil :authority_level nil :app_id false}
:collection_authority_level nil
:collection_position nil
:app_id false
:moderated_status nil
:context nil
:dashboardcard_count nil
:bookmark nil
:table_id false
:database_id false
:dataset_query nil
:table_schema nil
:table_name nil
:table_description nil
:updated_at true
:initial_sync_status nil})
(defn- table-search-results
"Segments and Metrics come back with information about their Tables as of 0.33.0. The `model-defaults` for Segment and
Metric put them both in the `:checkins` Table."
[]
(merge
{:table_id true, :database_id true}
(db/select-one [Table [:name :table_name] [:schema :table_schema] [:description :table_description]]
:id (mt/id :checkins))))
(defn- sorted-results [results]
(->> results
(sort-by (juxt (comp (var-get #'scoring/model->sort-position) :model)))
reverse))
(defn- make-result
[name & kvs]
(apply assoc default-search-row :name name kvs))
(def ^:private test-collection (make-result "collection test collection"
:bookmark false
:model "collection"
:collection {:id true, :name true :authority_level nil
:app_id false}
:updated_at false))
(defn- default-search-results []
(sorted-results
[(make-result "dashboard test dashboard", :model "dashboard", :bookmark false)
test-collection
(make-result "card test card", :model "card", :bookmark false, :dataset_query nil, :dashboardcard_count 0)
(make-result "dataset test dataset", :model "dataset", :bookmark false, :dataset_query nil, :dashboardcard_count 0)
(make-result "pulse test pulse", :model "pulse", :archived nil, :updated_at false)
(merge
(make-result "metric test metric", :model "metric", :description "Lookin' for a blueberry")
(table-search-results))
(merge
(make-result "segment test segment", :model "segment", :description "Lookin' for a blueberry")
(table-search-results))]))
(defn- default-metric-segment-results []
(filter #(contains? #{"metric" "segment"} (:model %)) (default-search-results)))
(defn- default-archived-results []
(for [result (default-search-results)
:when (false? (:archived result))]
(assoc result :archived true)))
(defn- on-search-types [model-set f coll]
(for [search-item coll]
(if (contains? model-set (:model search-item))
(f search-item)
search-item)))
(defn- default-results-with-collection []
(on-search-types #{"dashboard" "pulse" "card" "dataset"}
#(assoc % :collection {:id true, :name true :authority_level nil :app_id false})
(default-search-results)))
(defn- do-with-search-items [search-string in-root-collection? f]
(let [data-map (fn [instance-name]
{:name (format instance-name search-string)})
coll-data-map (fn [instance-name collection]
(merge (data-map instance-name)
(when-not in-root-collection?
{:collection_id (u/the-id collection)})))]
(mt/with-temp* [Collection [coll (data-map "collection %s collection")]
Card [card (coll-data-map "card %s card" coll)]
Card [dataset (assoc (coll-data-map "dataset %s dataset" coll)
:dataset true)]
Dashboard [dashboard (coll-data-map "dashboard %s dashboard" coll)]
Pulse [pulse (coll-data-map "pulse %s pulse" coll)]
Metric [metric (data-map "metric %s metric")]
Segment [segment (data-map "segment %s segment")]]
(f {:collection coll
:card card
:dataset dataset
:dashboard dashboard
:pulse pulse
:metric metric
:segment segment}))))
(defmacro ^:private with-search-items-in-root-collection [search-string & body]
`(do-with-search-items ~search-string true (fn [~'_] ~@body)))
(defmacro ^:private with-search-items-in-collection [created-items-sym search-string & body]
`(do-with-search-items ~search-string false (fn [~created-items-sym] ~@body)))
(def ^:private ^:dynamic *search-request-results-database-id*
"Filter out all results from `search-request` that don't have this Database ID. Default: the default H2 `test-data`
Database. Other results are filtered out so these tests can be ran from the REPL without the presence of other
Databases causing the tests to fail."
mt/id)
(def ^:private remove-databases
"Remove DBs from the results, which is useful since test databases unrelated to this suite can pollute the results"
(partial remove #(= (:model %) "database")))
(defn- process-raw-data [raw-data keep-database-id]
(for [result raw-data
filter out any results not from the usual test data DB ( e.g. results from other drivers )
:when (contains? #{keep-database-id nil} (:database_id result))]
(-> result
mt/boolean-ids-and-timestamps
(update-in [:collection :name] #(some-> % string?))
;; `:scores` is just used for debugging and would be a pain to match against.
(dissoc :scores))))
(defn- make-search-request [user-kwd params]
(apply mt/user-http-request user-kwd :get 200 "search" params))
(defn- search-request-data-with [xf user-kwd & params]
(let [raw-results-data (:data (make-search-request user-kwd params))
keep-database-id (if (fn? *search-request-results-database-id*)
(*search-request-results-database-id*)
*search-request-results-database-id*)]
(if (:error raw-results-data)
raw-results-data
(vec (xf (process-raw-data raw-results-data keep-database-id))))))
(defn- search-request-with [xf user-kwd & params]
(let [raw-results (make-search-request user-kwd params)
keep-database-id (if (fn? *search-request-results-database-id*)
(*search-request-results-database-id*)
*search-request-results-database-id*)]
(if (:error (:data raw-results))
raw-results
(update-in raw-results [:data]
(fn [raw-data]
(vec (xf (process-raw-data raw-data keep-database-id))))))))
(defn- search-request
[& args]
(apply search-request-with (comp sorted-results remove-databases) args))
(defn- search-request-data
"Gets just the data elements of the search"
[& args]
(apply search-request-data-with (comp sorted-results remove-databases) args))
(defn- unsorted-search-request-data
[& args]
(apply search-request-data-with identity args))
(deftest order-clause-test
(testing "it includes all columns and normalizes the query"
(is (= (hsql/call
:case
[:like (hsql/call :lower :model) "%foo%"] 0
[:like (hsql/call :lower :name) "%foo%"] 0
[:like (hsql/call :lower :display_name) "%foo%"] 0
[:like (hsql/call :lower :description) "%foo%"] 0
[:like (hsql/call :lower :collection_name) "%foo%"] 0
[:like (hsql/call :lower :dataset_query) "%foo%"] 0
[:like (hsql/call :lower :table_schema) "%foo%"] 0
[:like (hsql/call :lower :table_name) "%foo%"] 0
[:like (hsql/call :lower :table_description) "%foo%"] 0
:else 1)
(api.search/order-clause "Foo")))))
(deftest basic-test
(testing "Basic search, should find 1 of each entity type, all items in the root collection"
(with-search-items-in-root-collection "test"
(is (= (default-search-results)
(search-request-data :crowberto :q "test")))))
(testing "Basic search should only return substring matches"
(with-search-items-in-root-collection "test"
(with-search-items-in-root-collection "something different"
(is (= (default-search-results)
(search-request-data :crowberto :q "test"))))))
(testing "It prioritizes exact matches"
(with-search-items-in-root-collection "test"
(with-redefs [search-config/*db-max-results* 1]
(is (= [test-collection]
(search-request-data :crowberto :q "test collection"))))))
(testing "It limits matches properly"
(with-search-items-in-root-collection "test"
(is (>= 2 (count (search-request-data :crowberto :q "test" :limit "2" :offset "0"))))))
(testing "It offsets matches properly"
(with-search-items-in-root-collection "test"
(is (<= 4 (count (search-request-data :crowberto :q "test" :limit "100" :offset "2"))))))
(testing "It offsets without limit properly"
(with-search-items-in-root-collection "test"
(is (<= 5 (count (search-request-data :crowberto :q "test" :offset "2"))))))
(testing "It limits without offset properly"
(with-search-items-in-root-collection "test"
(is (>= 2 (count (search-request-data :crowberto :q "test" :limit "2"))))))
(testing "It subsets matches for model"
(with-search-items-in-root-collection "test"
(is (= 0 (count (search-request-data :crowberto :q "test" :models "database"))))
(is (= 1 (count (search-request-data :crowberto :q "test" :models "database" :models "card"))))))
(testing "It distinguishes datasets from cards"
(with-search-items-in-root-collection "test"
(let [results (search-request-data :crowberto :q "test" :models "dataset")]
(is (= 1 (count results)))
(is (= "dataset" (-> results first :model))))
(let [results (search-request-data :crowberto :q "test" :models "card")]
(is (= 1 (count results)))
(is (= "card" (-> results first :model))))))
(testing "It returns limit and offset params in return result"
(with-search-items-in-root-collection "test"
(is (= 2 (:limit (search-request :crowberto :q "test" :limit "2" :offset "3"))))
(is (= 3 (:offset (search-request :crowberto :q "test" :limit "2" :offset "3")))))))
(deftest query-model-set
(testing "It returns some stuff when you get results"
(with-search-items-in-root-collection "test"
sometimes there is a " table " in these responses . might be do to garbage in CI
(is (set/subset? #{"dashboard" "dataset" "segment" "collection" "pulse" "database" "metric" "card"}
(-> (mt/user-http-request :crowberto :get 200 "search?q=test")
:available_models
set)))))
(testing "It returns nothing if there are no results"
(with-search-items-in-root-collection "test"
(is (= [] (:available_models (mt/user-http-request :crowberto :get 200 "search?q=noresults")))))))
(def ^:private dashboard-count-results
(letfn [(make-card [dashboard-count]
(make-result (str "dashboard-count " dashboard-count) :dashboardcard_count dashboard-count,
:model "card", :bookmark false, :dataset_query nil))]
(set [(make-card 5)
(make-card 3)
(make-card 0)])))
(deftest dashboard-count-test
(testing "It sorts by dashboard count"
(mt/with-temp* [Card [{card-id-3 :id} {:name "dashboard-count 3"}]
Card [{card-id-5 :id} {:name "dashboard-count 5"}]
Card [_ {:name "dashboard-count 0"}]
Dashboard [{dashboard-id :id}]
DashboardCard [_ {:card_id card-id-3, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-3, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-3, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]]
(is (= dashboard-count-results
(set (unsorted-search-request-data :rasta :q "dashboard-count")))))))
(deftest permissions-test
(testing (str "Ensure that users without perms for the root collection don't get results NOTE: Metrics and segments "
"don't have collections, so they'll be returned")
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-root-collection "test"
(is (= (default-metric-segment-results)
(search-request-data :rasta :q "test"))))))
(testing "Users that have root collection permissions should get root collection search results"
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-root-collection "test"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-permissions! group (perms/collection-read-path {:metabase.models.collection.root/is-root? true}))
(is (= (remove (comp #{"collection"} :model) (default-search-results))
(search-request-data :rasta :q "test")))))))
(testing "Users without root collection permissions should still see other collections they have access to"
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-collection {:keys [collection]} "test"
(with-search-items-in-root-collection "test2"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-collection-read-permissions! group (u/the-id collection))
(is (= (sorted-results
This reverse is hokey ; it 's because the test2 results happen to come first in the API response
(into
(default-results-with-collection)
(map #(merge default-search-row % (table-search-results))
[{:name "metric test2 metric", :description "Lookin' for a blueberry", :model "metric"}
{:name "segment test2 segment", :description "Lookin' for a blueberry", :model "segment"}]))))
(search-request-data :rasta :q "test"))))))))
(testing (str "Users with root collection permissions should be able to search root collection data long with "
"collections they have access to")
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-collection {:keys [collection]} "test"
(with-search-items-in-root-collection "test2"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-permissions! group (perms/collection-read-path {:metabase.models.collection.root/is-root? true}))
(perms/grant-collection-read-permissions! group collection)
(is (= (sorted-results
(reverse
(into
(default-results-with-collection)
(for [row (default-search-results)
:when (not= "collection" (:model row))]
(update row :name #(str/replace % "test" "test2"))))))
(search-request-data :rasta :q "test"))))))))
(testing "Users with access to multiple collections should see results from all collections they have access to"
(with-search-items-in-collection {coll-1 :collection} "test"
(with-search-items-in-collection {coll-2 :collection} "test2"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-collection-read-permissions! group (u/the-id coll-1))
(perms/grant-collection-read-permissions! group (u/the-id coll-2))
(is (= (sorted-results
(reverse
(into
(default-results-with-collection)
(map (fn [row] (update row :name #(str/replace % "test" "test2")))
(default-results-with-collection)))))
(search-request-data :rasta :q "test")))))))
(testing "User should only see results in the collection they have access to"
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-collection {coll-1 :collection} "test"
(with-search-items-in-collection _ "test2"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-collection-read-permissions! group (u/the-id coll-1))
(is (= (sorted-results
(reverse
(into
(default-results-with-collection)
(map #(merge default-search-row % (table-search-results))
[{:name "metric test2 metric", :description "Lookin' for a blueberry", :model "metric"}
{:name "segment test2 segment", :description "Lookin' for a blueberry", :model "segment"}]))))
(search-request-data :rasta :q "test"))))))))
(testing "Metrics on tables for which the user does not have access to should not show up in results"
(mt/with-temp* [Database [{db-id :id}]
Table [{table-id :id} {:db_id db-id
:schema nil}]
Metric [_ {:table_id table-id
:name "test metric"}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id)
(is (= []
(search-request-data :rasta :q "test")))))
(testing "Segments on tables for which the user does not have access to should not show up in results"
(mt/with-temp* [Database [{db-id :id}]
Table [{table-id :id} {:db_id db-id
:schema nil}]
Segment [_ {:table_id table-id
:name "test segment"}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id)
(is (= []
(search-request-data :rasta :q "test")))))
(testing "Databases for which the user does not have access to should not show up in results"
(mt/with-temp* [Database [db-1 {:name "db-1"}]
Database [_db-2 {:name "db-2"}]]
(is (= #{"db-2" "db-1"}
(->> (search-request-data-with sorted-results :rasta :q "db")
(map :name)
set)))
(perms/revoke-data-perms! (perms-group/all-users) (:id db-1))
(is (= #{"db-2"}
(->> (search-request-data-with sorted-results :rasta :q "db")
(map :name)
set))))))
(deftest bookmarks-test
(testing "Bookmarks are per user, so other user's bookmarks don't cause search results to be altered"
(with-search-items-in-collection {:keys [card dashboard]} "test"
(mt/with-temp* [CardBookmark [_ {:card_id (u/the-id card)
:user_id (mt/user->id :rasta)}]
DashboardBookmark [_ {:dashboard_id (u/the-id dashboard)
:user_id (mt/user->id :rasta)}]]
(is (= (default-results-with-collection)
(search-request-data :crowberto :q "test"))))))
(testing "Basic search, should find 1 of each entity type and include bookmarks when available"
(with-search-items-in-collection {:keys [card dashboard]} "test"
(mt/with-temp* [CardBookmark [_ {:card_id (u/the-id card)
:user_id (mt/user->id :crowberto)}]
DashboardBookmark [_ {:dashboard_id (u/the-id dashboard)
:user_id (mt/user->id :crowberto)}]]
(is (= (on-search-types #{"dashboard" "card"}
#(assoc % :bookmark true)
(default-results-with-collection))
(search-request-data :crowberto :q "test")))))))
(defn- archived [m]
(assoc m :archived true))
(deftest database-test
(testing "Should search database names and descriptions"
(mt/with-temp* [Database [_ {:name "aviaries"}]
Database [_ {:name "user_favorite_places" :description "Join table between users and their favorite places, which could include aviaries"}]
Database [_ {:name "users" :description "As it sounds"}]]
(letfn [(result [db]
(merge {:name nil
:model "database"
:description nil}
db))]
(is (= (sorted-results
(map result [{:name "aviaries"}
{:name "user_favorite_places"
:description "Join table between users and their favorite places, which could include aviaries"}]))
(map #(select-keys % [:name :model :description])
(search-request-data-with sorted-results :crowberto :q "aviaries"))))))))
(deftest archived-results-test
(testing "Should return unarchived results by default"
(with-search-items-in-root-collection "test"
(mt/with-temp* [Card [_ (archived {:name "card test card 2"})]
Card [_ (archived {:name "dataset test dataset" :dataset true})]
Dashboard [_ (archived {:name "dashboard test dashboard 2"})]
Collection [_ (archived {:name "collection test collection 2"})]
Metric [_ (archived {:name "metric test metric 2"})]
Segment [_ (archived {:name "segment test segment 2"})]]
(is (= (default-search-results)
(search-request-data :crowberto :q "test"))))))
(testing "Should return archived results when specified"
(with-search-items-in-root-collection "test2"
(mt/with-temp* [Card [_ (archived {:name "card test card"})]
Card [_ (archived {:name "card that will not appear in results"})]
Card [_ (archived {:name "dataset test dataset" :dataset true})]
Dashboard [_ (archived {:name "dashboard test dashboard"})]
Collection [_ (archived {:name "collection test collection"})]
Metric [_ (archived {:name "metric test metric"})]
Segment [_ (archived {:name "segment test segment"})]]
(is (= (default-archived-results)
(search-request-data :crowberto :q "test", :archived "true"))))))
(testing "Should return archived results when specified without a search query"
(with-search-items-in-root-collection "test2"
(mt/with-temp* [Card [_ (archived {:name "card test card"})]
Card [_ (archived {:name "dataset test dataset" :dataset true})]
Dashboard [_ (archived {:name "dashboard test dashboard"})]
Collection [_ (archived {:name "collection test collection"})]
Metric [_ (archived {:name "metric test metric"})]
Segment [_ (archived {:name "segment test segment"})]]
(is (= (default-archived-results)
(search-request-data :crowberto :archived "true")))))))
(deftest alerts-test
(testing "Search should not return alerts"
(with-search-items-in-root-collection "test"
(mt/with-temp* [Pulse [pulse {:alert_condition "rows"
:alert_first_only false
:alert_above_goal nil
:name nil}]]
(is (= []
(filter (fn [{:keys [model id]}]
(and (= id (u/the-id pulse))
(= "pulse" model)))
(:data (mt/user-http-request :crowberto :get 200 "search")))))))))
(defn- default-table-search-row [table-name]
(merge
default-search-row
{:name table-name
:table_name table-name
:table_id true
:archived nil
:model "table"
:database_id true
:initial_sync_status "incomplete"}))
(defmacro ^:private do-test-users {:style/indent 1} [[user-binding users] & body]
`(doseq [user# ~users
:let [~user-binding user#]]
(testing (format "\nuser = %s" user#)
~@body)))
(deftest table-test
(testing "You should see Tables in the search results!\n"
(mt/with-temp Table [_ {:name "Round Table"}]
(do-test-users [user [:crowberto :rasta]]
(is (= [(default-table-search-row "Round Table")]
(search-request-data user :q "Round Table"))))))
(testing "You should not see hidden tables"
(mt/with-temp* [Table [_normal {:name "Foo Visible"}]
Table [_hidden {:name "Foo Hidden", :visibility_type "hidden"}]]
(do-test-users [user [:crowberto :rasta]]
(is (= [(default-table-search-row "Foo Visible")]
(search-request-data user :q "Foo"))))))
(testing "You should be able to search by their display name"
(let [lancelot "Lancelot's Favorite Furniture"]
(mt/with-temp Table [_ {:name "Round Table" :display_name lancelot}]
(do-test-users [user [:crowberto :rasta]]
(is (= [(assoc (default-table-search-row "Round Table") :name lancelot)]
(search-request-data user :q "Lancelot")))))))
(testing "When searching with ?archived=true, normal Tables should not show up in the results"
(let [table-name (mt/random-name)]
(mt/with-temp Table [_ {:name table-name}]
(do-test-users [user [:crowberto :rasta]]
(is (= []
(search-request-data user :q table-name :archived true)))))))
(testing "*archived* tables should not appear in search results"
(let [table-name (mt/random-name)]
(mt/with-temp Table [_ {:name table-name, :active false}]
(do-test-users [user [:crowberto :rasta]]
(is (= []
(search-request-data user :q table-name)))))))
(testing "you should not be able to see a Table if the current user doesn't have permissions for that Table"
(mt/with-temp* [Database [{db-id :id}]
Table [table {:db_id db-id}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id)
(is (= []
(binding [*search-request-results-database-id* db-id]
(search-request-data :rasta :q (:name table))))))))
(deftest all-users-no-perms-table-test
(testing (str "If the All Users group doesn't have perms to view a Table, but the current User is in a group that "
"does have perms, they should still be able to see it (#12332)")
(mt/with-temp* [Database [{db-id :id}]
Table [table {:name "Round Table", :db_id db-id}]
PermissionsGroup [{group-id :id}]
PermissionsGroupMembership [_ {:group_id group-id, :user_id (mt/user->id :rasta)}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id (:schema table) (:id table))
(perms/grant-permissions! group-id (perms/table-read-path table))
(do-test-users [user [:crowberto :rasta]]
(is (= [(default-table-search-row "Round Table")]
(binding [*search-request-results-database-id* db-id]
(search-request-data user :q "Round Table"))))))))
(deftest all-users-no-data-perms-table-test
(testing "If the All Users group doesn't have perms to view a Table they sholdn't see it (#16855)"
(mt/with-temp* [Database [{db-id :id}]
Table [table {:name "Round Table", :db_id db-id}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id (:schema table) (:id table))
(is (= []
(filter #(= (:name %) "Round Table")
(binding [*search-request-results-database-id* db-id]
(search-request-data :rasta :q "Round Table"))))))))
(deftest collection-namespaces-test
(testing "Search should only return Collections in the 'default' namespace"
(mt/with-temp* [Collection [_c1 {:name "Normal Collection"}]
Collection [_c2 {:name "Coin Collection", :namespace "currency"}]]
(is (= ["Normal Collection"]
(->> (search-request-data :crowberto :q "Collection")
(filter #(and (= (:model %) "collection")
(#{"Normal Collection" "Coin Collection"} (:name %))))
(map :name)))))))
(deftest no-dashboard-subscription-pulses-test
(testing "Pulses used for Dashboard subscriptions should not be returned by search results (#14190)"
(letfn [(search-for-pulses [{pulse-id :id}]
(->> (:data (mt/user-http-request :crowberto :get "search?q=electro"))
(filter #(and (= (:model %) "pulse")
(= (:id %) pulse-id)))
first))]
(mt/with-temp Pulse [pulse {:name "Electro-Magnetic Pulse"}]
(testing "sanity check: should be able to fetch a Pulse normally"
(is (schema= {:name (s/eq "Electro-Magnetic Pulse")
s/Keyword s/Any}
(search-for-pulses pulse))))
(mt/with-temp* [Card [card-1]
PulseCard [_ {:pulse_id (:id pulse), :card_id (:id card-1)}]
Card [card-2]
PulseCard [_ {:pulse_id (:id pulse), :card_id (:id card-2)}]]
(testing "Create some Pulse Cards: should still be able to search for it it"
(is (schema= {:name (s/eq "Electro-Magnetic Pulse")
s/Keyword s/Any}
(search-for-pulses pulse))))
(testing "Now make this Pulse a dashboard subscription; Pulse should no longer come back from search-results"
(mt/with-temp* [Dashboard [dashboard]]
(db/update! Pulse (:id pulse) :dashboard_id (:id dashboard))
(is (= nil
(search-for-pulses pulse))))))))))
(deftest card-dataset-query-test
(testing "Search results should match a native query's dataset_query column, but not an MBQL query's one."
;;
(let [native-card {:name "Another SQL query"
:query_type "native"
:dataset_query (mt/native-query {:query "SELECT COUNT(1) AS aggregation FROM venues"})}]
(mt/with-temp* [Card [_mbql-card {:name "Venues Count"
:query_type "query"
:dataset_query (mt/mbql-query venues {:aggregation [[:count]]})}]
Card [_native-card native-card]
Card [_dataset (assoc native-card :name "Dataset" :dataset true)]]
(is (= ["Another SQL query" "Dataset"]
(->> (search-request-data :rasta :q "aggregation")
(map :name))))))))
(deftest app-test
(testing "App collections should come with app_id set"
(with-search-items-in-collection {:keys [collection]} "test"
(mt/with-temp App [_app {:collection_id (:id collection)}]
(is (= (mapv
(fn [result]
(cond-> result
(not (#{"metric" "segment"} (:model result))) (assoc-in [:collection :app_id] true)
(= (:model result) "collection") (assoc :model "app" :app_id true)))
(default-results-with-collection))
(search-request-data :rasta :q "test"))))))
(testing "App collections should filterable as \"app\""
(mt/with-temp* [Collection [collection {:name "App collection to find"}]
App [_ {:collection_id (:id collection)}]
Collection [_ {:name "Another collection to find"}]]
(is (partial= [(assoc (select-keys collection [:name])
:model "app")]
(search-request-data :rasta :q "find" :models "app"))))))
(deftest page-test
(testing "Search results should pages with model \"page\""
(mt/with-temp* [Dashboard [_ {:name "Not a page but contains important text!"}]
Dashboard [page {:name "Page"
:description "Contains important text!"
:is_app_page true}]]
(is (partial= [(assoc (select-keys page [:name :description])
:model "page")]
(search-request-data :rasta :q "important text" :models "page"))))))
(deftest collection-app-id-test
(testing "app_id and id of containing collection should not be confused (#25213)"
(mt/with-temp* [Collection [{coll-id :id}]
;; The ignored elements are there to make sure the IDs
;; coll-id and app-id are different.
Collection [{ignored-collection-id :id}]
App [_ignored-app {:collection_id ignored-collection-id}]
App [{app-id :id} {:collection_id coll-id}]
Dashboard [_ {:name "Not a page but contains important text!"
:collection_id coll-id}]
Dashboard [_ {:name "Page"
:description "Contains important text!"
:collection_id coll-id
:is_app_page true}]
Card [_ {:name "Query looking for important text"
:query_type "native"
:dataset_query (mt/native-query {:query "SELECT 0 FROM venues"})
:collection_id coll-id}]
Pulse [_ {:name "Pulse about important text"
:collection_id coll-id}]]
(is (not= app-id coll-id) "app-id and coll-id should be different. Fix the test!")
(is (partial= (repeat 4 {:collection {:app_id app-id
:id coll-id}})
(:data (make-search-request :rasta [:q "important text"])))))))
| null | https://raw.githubusercontent.com/footprintanalytics/footprint-web/d3090d943dd9fcea493c236f79e7ef8a36ae17fc/test/metabase/api/search_test.clj | clojure | `:scores` is just used for debugging and would be a pain to match against.
it 's because the test2 results happen to come first in the API response
The ignored elements are there to make sure the IDs
coll-id and app-id are different. | (ns metabase.api.search-test
(:require [clojure.set :as set]
[clojure.string :as str]
[clojure.test :refer :all]
[honeysql.core :as hsql]
[metabase.api.search :as api.search]
[metabase.models
:refer
[App Card CardBookmark Collection Dashboard DashboardBookmark DashboardCard
Database Metric PermissionsGroup PermissionsGroupMembership Pulse PulseCard
Segment Table]]
[metabase.models.permissions :as perms]
[metabase.models.permissions-group :as perms-group]
[metabase.search.config :as search-config]
[metabase.search.scoring :as scoring]
[metabase.test :as mt]
[metabase.util :as u]
[schema.core :as s]
[toucan.db :as db]))
(def ^:private default-search-row
{:id true
:description nil
:archived false
:collection {:id false :name nil :authority_level nil :app_id false}
:collection_authority_level nil
:collection_position nil
:app_id false
:moderated_status nil
:context nil
:dashboardcard_count nil
:bookmark nil
:table_id false
:database_id false
:dataset_query nil
:table_schema nil
:table_name nil
:table_description nil
:updated_at true
:initial_sync_status nil})
(defn- table-search-results
"Segments and Metrics come back with information about their Tables as of 0.33.0. The `model-defaults` for Segment and
Metric put them both in the `:checkins` Table."
[]
(merge
{:table_id true, :database_id true}
(db/select-one [Table [:name :table_name] [:schema :table_schema] [:description :table_description]]
:id (mt/id :checkins))))
(defn- sorted-results [results]
(->> results
(sort-by (juxt (comp (var-get #'scoring/model->sort-position) :model)))
reverse))
(defn- make-result
[name & kvs]
(apply assoc default-search-row :name name kvs))
(def ^:private test-collection (make-result "collection test collection"
:bookmark false
:model "collection"
:collection {:id true, :name true :authority_level nil
:app_id false}
:updated_at false))
(defn- default-search-results []
(sorted-results
[(make-result "dashboard test dashboard", :model "dashboard", :bookmark false)
test-collection
(make-result "card test card", :model "card", :bookmark false, :dataset_query nil, :dashboardcard_count 0)
(make-result "dataset test dataset", :model "dataset", :bookmark false, :dataset_query nil, :dashboardcard_count 0)
(make-result "pulse test pulse", :model "pulse", :archived nil, :updated_at false)
(merge
(make-result "metric test metric", :model "metric", :description "Lookin' for a blueberry")
(table-search-results))
(merge
(make-result "segment test segment", :model "segment", :description "Lookin' for a blueberry")
(table-search-results))]))
(defn- default-metric-segment-results []
(filter #(contains? #{"metric" "segment"} (:model %)) (default-search-results)))
(defn- default-archived-results []
(for [result (default-search-results)
:when (false? (:archived result))]
(assoc result :archived true)))
(defn- on-search-types [model-set f coll]
(for [search-item coll]
(if (contains? model-set (:model search-item))
(f search-item)
search-item)))
(defn- default-results-with-collection []
(on-search-types #{"dashboard" "pulse" "card" "dataset"}
#(assoc % :collection {:id true, :name true :authority_level nil :app_id false})
(default-search-results)))
(defn- do-with-search-items [search-string in-root-collection? f]
(let [data-map (fn [instance-name]
{:name (format instance-name search-string)})
coll-data-map (fn [instance-name collection]
(merge (data-map instance-name)
(when-not in-root-collection?
{:collection_id (u/the-id collection)})))]
(mt/with-temp* [Collection [coll (data-map "collection %s collection")]
Card [card (coll-data-map "card %s card" coll)]
Card [dataset (assoc (coll-data-map "dataset %s dataset" coll)
:dataset true)]
Dashboard [dashboard (coll-data-map "dashboard %s dashboard" coll)]
Pulse [pulse (coll-data-map "pulse %s pulse" coll)]
Metric [metric (data-map "metric %s metric")]
Segment [segment (data-map "segment %s segment")]]
(f {:collection coll
:card card
:dataset dataset
:dashboard dashboard
:pulse pulse
:metric metric
:segment segment}))))
(defmacro ^:private with-search-items-in-root-collection [search-string & body]
`(do-with-search-items ~search-string true (fn [~'_] ~@body)))
(defmacro ^:private with-search-items-in-collection [created-items-sym search-string & body]
`(do-with-search-items ~search-string false (fn [~created-items-sym] ~@body)))
(def ^:private ^:dynamic *search-request-results-database-id*
"Filter out all results from `search-request` that don't have this Database ID. Default: the default H2 `test-data`
Database. Other results are filtered out so these tests can be ran from the REPL without the presence of other
Databases causing the tests to fail."
mt/id)
(def ^:private remove-databases
"Remove DBs from the results, which is useful since test databases unrelated to this suite can pollute the results"
(partial remove #(= (:model %) "database")))
(defn- process-raw-data [raw-data keep-database-id]
(for [result raw-data
filter out any results not from the usual test data DB ( e.g. results from other drivers )
:when (contains? #{keep-database-id nil} (:database_id result))]
(-> result
mt/boolean-ids-and-timestamps
(update-in [:collection :name] #(some-> % string?))
(dissoc :scores))))
(defn- make-search-request [user-kwd params]
(apply mt/user-http-request user-kwd :get 200 "search" params))
(defn- search-request-data-with [xf user-kwd & params]
(let [raw-results-data (:data (make-search-request user-kwd params))
keep-database-id (if (fn? *search-request-results-database-id*)
(*search-request-results-database-id*)
*search-request-results-database-id*)]
(if (:error raw-results-data)
raw-results-data
(vec (xf (process-raw-data raw-results-data keep-database-id))))))
(defn- search-request-with [xf user-kwd & params]
(let [raw-results (make-search-request user-kwd params)
keep-database-id (if (fn? *search-request-results-database-id*)
(*search-request-results-database-id*)
*search-request-results-database-id*)]
(if (:error (:data raw-results))
raw-results
(update-in raw-results [:data]
(fn [raw-data]
(vec (xf (process-raw-data raw-data keep-database-id))))))))
(defn- search-request
[& args]
(apply search-request-with (comp sorted-results remove-databases) args))
(defn- search-request-data
"Gets just the data elements of the search"
[& args]
(apply search-request-data-with (comp sorted-results remove-databases) args))
(defn- unsorted-search-request-data
[& args]
(apply search-request-data-with identity args))
(deftest order-clause-test
(testing "it includes all columns and normalizes the query"
(is (= (hsql/call
:case
[:like (hsql/call :lower :model) "%foo%"] 0
[:like (hsql/call :lower :name) "%foo%"] 0
[:like (hsql/call :lower :display_name) "%foo%"] 0
[:like (hsql/call :lower :description) "%foo%"] 0
[:like (hsql/call :lower :collection_name) "%foo%"] 0
[:like (hsql/call :lower :dataset_query) "%foo%"] 0
[:like (hsql/call :lower :table_schema) "%foo%"] 0
[:like (hsql/call :lower :table_name) "%foo%"] 0
[:like (hsql/call :lower :table_description) "%foo%"] 0
:else 1)
(api.search/order-clause "Foo")))))
(deftest basic-test
(testing "Basic search, should find 1 of each entity type, all items in the root collection"
(with-search-items-in-root-collection "test"
(is (= (default-search-results)
(search-request-data :crowberto :q "test")))))
(testing "Basic search should only return substring matches"
(with-search-items-in-root-collection "test"
(with-search-items-in-root-collection "something different"
(is (= (default-search-results)
(search-request-data :crowberto :q "test"))))))
(testing "It prioritizes exact matches"
(with-search-items-in-root-collection "test"
(with-redefs [search-config/*db-max-results* 1]
(is (= [test-collection]
(search-request-data :crowberto :q "test collection"))))))
(testing "It limits matches properly"
(with-search-items-in-root-collection "test"
(is (>= 2 (count (search-request-data :crowberto :q "test" :limit "2" :offset "0"))))))
(testing "It offsets matches properly"
(with-search-items-in-root-collection "test"
(is (<= 4 (count (search-request-data :crowberto :q "test" :limit "100" :offset "2"))))))
(testing "It offsets without limit properly"
(with-search-items-in-root-collection "test"
(is (<= 5 (count (search-request-data :crowberto :q "test" :offset "2"))))))
(testing "It limits without offset properly"
(with-search-items-in-root-collection "test"
(is (>= 2 (count (search-request-data :crowberto :q "test" :limit "2"))))))
(testing "It subsets matches for model"
(with-search-items-in-root-collection "test"
(is (= 0 (count (search-request-data :crowberto :q "test" :models "database"))))
(is (= 1 (count (search-request-data :crowberto :q "test" :models "database" :models "card"))))))
(testing "It distinguishes datasets from cards"
(with-search-items-in-root-collection "test"
(let [results (search-request-data :crowberto :q "test" :models "dataset")]
(is (= 1 (count results)))
(is (= "dataset" (-> results first :model))))
(let [results (search-request-data :crowberto :q "test" :models "card")]
(is (= 1 (count results)))
(is (= "card" (-> results first :model))))))
(testing "It returns limit and offset params in return result"
(with-search-items-in-root-collection "test"
(is (= 2 (:limit (search-request :crowberto :q "test" :limit "2" :offset "3"))))
(is (= 3 (:offset (search-request :crowberto :q "test" :limit "2" :offset "3")))))))
(deftest query-model-set
(testing "It returns some stuff when you get results"
(with-search-items-in-root-collection "test"
sometimes there is a " table " in these responses . might be do to garbage in CI
(is (set/subset? #{"dashboard" "dataset" "segment" "collection" "pulse" "database" "metric" "card"}
(-> (mt/user-http-request :crowberto :get 200 "search?q=test")
:available_models
set)))))
(testing "It returns nothing if there are no results"
(with-search-items-in-root-collection "test"
(is (= [] (:available_models (mt/user-http-request :crowberto :get 200 "search?q=noresults")))))))
(def ^:private dashboard-count-results
(letfn [(make-card [dashboard-count]
(make-result (str "dashboard-count " dashboard-count) :dashboardcard_count dashboard-count,
:model "card", :bookmark false, :dataset_query nil))]
(set [(make-card 5)
(make-card 3)
(make-card 0)])))
(deftest dashboard-count-test
(testing "It sorts by dashboard count"
(mt/with-temp* [Card [{card-id-3 :id} {:name "dashboard-count 3"}]
Card [{card-id-5 :id} {:name "dashboard-count 5"}]
Card [_ {:name "dashboard-count 0"}]
Dashboard [{dashboard-id :id}]
DashboardCard [_ {:card_id card-id-3, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-3, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-3, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]
DashboardCard [_ {:card_id card-id-5, :dashboard_id dashboard-id}]]
(is (= dashboard-count-results
(set (unsorted-search-request-data :rasta :q "dashboard-count")))))))
(deftest permissions-test
(testing (str "Ensure that users without perms for the root collection don't get results NOTE: Metrics and segments "
"don't have collections, so they'll be returned")
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-root-collection "test"
(is (= (default-metric-segment-results)
(search-request-data :rasta :q "test"))))))
(testing "Users that have root collection permissions should get root collection search results"
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-root-collection "test"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-permissions! group (perms/collection-read-path {:metabase.models.collection.root/is-root? true}))
(is (= (remove (comp #{"collection"} :model) (default-search-results))
(search-request-data :rasta :q "test")))))))
(testing "Users without root collection permissions should still see other collections they have access to"
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-collection {:keys [collection]} "test"
(with-search-items-in-root-collection "test2"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-collection-read-permissions! group (u/the-id collection))
(is (= (sorted-results
(into
(default-results-with-collection)
(map #(merge default-search-row % (table-search-results))
[{:name "metric test2 metric", :description "Lookin' for a blueberry", :model "metric"}
{:name "segment test2 segment", :description "Lookin' for a blueberry", :model "segment"}]))))
(search-request-data :rasta :q "test"))))))))
(testing (str "Users with root collection permissions should be able to search root collection data long with "
"collections they have access to")
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-collection {:keys [collection]} "test"
(with-search-items-in-root-collection "test2"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-permissions! group (perms/collection-read-path {:metabase.models.collection.root/is-root? true}))
(perms/grant-collection-read-permissions! group collection)
(is (= (sorted-results
(reverse
(into
(default-results-with-collection)
(for [row (default-search-results)
:when (not= "collection" (:model row))]
(update row :name #(str/replace % "test" "test2"))))))
(search-request-data :rasta :q "test"))))))))
(testing "Users with access to multiple collections should see results from all collections they have access to"
(with-search-items-in-collection {coll-1 :collection} "test"
(with-search-items-in-collection {coll-2 :collection} "test2"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-collection-read-permissions! group (u/the-id coll-1))
(perms/grant-collection-read-permissions! group (u/the-id coll-2))
(is (= (sorted-results
(reverse
(into
(default-results-with-collection)
(map (fn [row] (update row :name #(str/replace % "test" "test2")))
(default-results-with-collection)))))
(search-request-data :rasta :q "test")))))))
(testing "User should only see results in the collection they have access to"
(mt/with-non-admin-groups-no-root-collection-perms
(with-search-items-in-collection {coll-1 :collection} "test"
(with-search-items-in-collection _ "test2"
(mt/with-temp* [PermissionsGroup [group]
PermissionsGroupMembership [_ {:user_id (mt/user->id :rasta), :group_id (u/the-id group)}]]
(perms/grant-collection-read-permissions! group (u/the-id coll-1))
(is (= (sorted-results
(reverse
(into
(default-results-with-collection)
(map #(merge default-search-row % (table-search-results))
[{:name "metric test2 metric", :description "Lookin' for a blueberry", :model "metric"}
{:name "segment test2 segment", :description "Lookin' for a blueberry", :model "segment"}]))))
(search-request-data :rasta :q "test"))))))))
(testing "Metrics on tables for which the user does not have access to should not show up in results"
(mt/with-temp* [Database [{db-id :id}]
Table [{table-id :id} {:db_id db-id
:schema nil}]
Metric [_ {:table_id table-id
:name "test metric"}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id)
(is (= []
(search-request-data :rasta :q "test")))))
(testing "Segments on tables for which the user does not have access to should not show up in results"
(mt/with-temp* [Database [{db-id :id}]
Table [{table-id :id} {:db_id db-id
:schema nil}]
Segment [_ {:table_id table-id
:name "test segment"}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id)
(is (= []
(search-request-data :rasta :q "test")))))
(testing "Databases for which the user does not have access to should not show up in results"
(mt/with-temp* [Database [db-1 {:name "db-1"}]
Database [_db-2 {:name "db-2"}]]
(is (= #{"db-2" "db-1"}
(->> (search-request-data-with sorted-results :rasta :q "db")
(map :name)
set)))
(perms/revoke-data-perms! (perms-group/all-users) (:id db-1))
(is (= #{"db-2"}
(->> (search-request-data-with sorted-results :rasta :q "db")
(map :name)
set))))))
(deftest bookmarks-test
(testing "Bookmarks are per user, so other user's bookmarks don't cause search results to be altered"
(with-search-items-in-collection {:keys [card dashboard]} "test"
(mt/with-temp* [CardBookmark [_ {:card_id (u/the-id card)
:user_id (mt/user->id :rasta)}]
DashboardBookmark [_ {:dashboard_id (u/the-id dashboard)
:user_id (mt/user->id :rasta)}]]
(is (= (default-results-with-collection)
(search-request-data :crowberto :q "test"))))))
(testing "Basic search, should find 1 of each entity type and include bookmarks when available"
(with-search-items-in-collection {:keys [card dashboard]} "test"
(mt/with-temp* [CardBookmark [_ {:card_id (u/the-id card)
:user_id (mt/user->id :crowberto)}]
DashboardBookmark [_ {:dashboard_id (u/the-id dashboard)
:user_id (mt/user->id :crowberto)}]]
(is (= (on-search-types #{"dashboard" "card"}
#(assoc % :bookmark true)
(default-results-with-collection))
(search-request-data :crowberto :q "test")))))))
(defn- archived [m]
(assoc m :archived true))
(deftest database-test
(testing "Should search database names and descriptions"
(mt/with-temp* [Database [_ {:name "aviaries"}]
Database [_ {:name "user_favorite_places" :description "Join table between users and their favorite places, which could include aviaries"}]
Database [_ {:name "users" :description "As it sounds"}]]
(letfn [(result [db]
(merge {:name nil
:model "database"
:description nil}
db))]
(is (= (sorted-results
(map result [{:name "aviaries"}
{:name "user_favorite_places"
:description "Join table between users and their favorite places, which could include aviaries"}]))
(map #(select-keys % [:name :model :description])
(search-request-data-with sorted-results :crowberto :q "aviaries"))))))))
(deftest archived-results-test
(testing "Should return unarchived results by default"
(with-search-items-in-root-collection "test"
(mt/with-temp* [Card [_ (archived {:name "card test card 2"})]
Card [_ (archived {:name "dataset test dataset" :dataset true})]
Dashboard [_ (archived {:name "dashboard test dashboard 2"})]
Collection [_ (archived {:name "collection test collection 2"})]
Metric [_ (archived {:name "metric test metric 2"})]
Segment [_ (archived {:name "segment test segment 2"})]]
(is (= (default-search-results)
(search-request-data :crowberto :q "test"))))))
(testing "Should return archived results when specified"
(with-search-items-in-root-collection "test2"
(mt/with-temp* [Card [_ (archived {:name "card test card"})]
Card [_ (archived {:name "card that will not appear in results"})]
Card [_ (archived {:name "dataset test dataset" :dataset true})]
Dashboard [_ (archived {:name "dashboard test dashboard"})]
Collection [_ (archived {:name "collection test collection"})]
Metric [_ (archived {:name "metric test metric"})]
Segment [_ (archived {:name "segment test segment"})]]
(is (= (default-archived-results)
(search-request-data :crowberto :q "test", :archived "true"))))))
(testing "Should return archived results when specified without a search query"
(with-search-items-in-root-collection "test2"
(mt/with-temp* [Card [_ (archived {:name "card test card"})]
Card [_ (archived {:name "dataset test dataset" :dataset true})]
Dashboard [_ (archived {:name "dashboard test dashboard"})]
Collection [_ (archived {:name "collection test collection"})]
Metric [_ (archived {:name "metric test metric"})]
Segment [_ (archived {:name "segment test segment"})]]
(is (= (default-archived-results)
(search-request-data :crowberto :archived "true")))))))
(deftest alerts-test
(testing "Search should not return alerts"
(with-search-items-in-root-collection "test"
(mt/with-temp* [Pulse [pulse {:alert_condition "rows"
:alert_first_only false
:alert_above_goal nil
:name nil}]]
(is (= []
(filter (fn [{:keys [model id]}]
(and (= id (u/the-id pulse))
(= "pulse" model)))
(:data (mt/user-http-request :crowberto :get 200 "search")))))))))
(defn- default-table-search-row [table-name]
(merge
default-search-row
{:name table-name
:table_name table-name
:table_id true
:archived nil
:model "table"
:database_id true
:initial_sync_status "incomplete"}))
(defmacro ^:private do-test-users {:style/indent 1} [[user-binding users] & body]
`(doseq [user# ~users
:let [~user-binding user#]]
(testing (format "\nuser = %s" user#)
~@body)))
(deftest table-test
(testing "You should see Tables in the search results!\n"
(mt/with-temp Table [_ {:name "Round Table"}]
(do-test-users [user [:crowberto :rasta]]
(is (= [(default-table-search-row "Round Table")]
(search-request-data user :q "Round Table"))))))
(testing "You should not see hidden tables"
(mt/with-temp* [Table [_normal {:name "Foo Visible"}]
Table [_hidden {:name "Foo Hidden", :visibility_type "hidden"}]]
(do-test-users [user [:crowberto :rasta]]
(is (= [(default-table-search-row "Foo Visible")]
(search-request-data user :q "Foo"))))))
(testing "You should be able to search by their display name"
(let [lancelot "Lancelot's Favorite Furniture"]
(mt/with-temp Table [_ {:name "Round Table" :display_name lancelot}]
(do-test-users [user [:crowberto :rasta]]
(is (= [(assoc (default-table-search-row "Round Table") :name lancelot)]
(search-request-data user :q "Lancelot")))))))
(testing "When searching with ?archived=true, normal Tables should not show up in the results"
(let [table-name (mt/random-name)]
(mt/with-temp Table [_ {:name table-name}]
(do-test-users [user [:crowberto :rasta]]
(is (= []
(search-request-data user :q table-name :archived true)))))))
(testing "*archived* tables should not appear in search results"
(let [table-name (mt/random-name)]
(mt/with-temp Table [_ {:name table-name, :active false}]
(do-test-users [user [:crowberto :rasta]]
(is (= []
(search-request-data user :q table-name)))))))
(testing "you should not be able to see a Table if the current user doesn't have permissions for that Table"
(mt/with-temp* [Database [{db-id :id}]
Table [table {:db_id db-id}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id)
(is (= []
(binding [*search-request-results-database-id* db-id]
(search-request-data :rasta :q (:name table))))))))
(deftest all-users-no-perms-table-test
(testing (str "If the All Users group doesn't have perms to view a Table, but the current User is in a group that "
"does have perms, they should still be able to see it (#12332)")
(mt/with-temp* [Database [{db-id :id}]
Table [table {:name "Round Table", :db_id db-id}]
PermissionsGroup [{group-id :id}]
PermissionsGroupMembership [_ {:group_id group-id, :user_id (mt/user->id :rasta)}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id (:schema table) (:id table))
(perms/grant-permissions! group-id (perms/table-read-path table))
(do-test-users [user [:crowberto :rasta]]
(is (= [(default-table-search-row "Round Table")]
(binding [*search-request-results-database-id* db-id]
(search-request-data user :q "Round Table"))))))))
(deftest all-users-no-data-perms-table-test
(testing "If the All Users group doesn't have perms to view a Table they sholdn't see it (#16855)"
(mt/with-temp* [Database [{db-id :id}]
Table [table {:name "Round Table", :db_id db-id}]]
(perms/revoke-data-perms! (perms-group/all-users) db-id (:schema table) (:id table))
(is (= []
(filter #(= (:name %) "Round Table")
(binding [*search-request-results-database-id* db-id]
(search-request-data :rasta :q "Round Table"))))))))
(deftest collection-namespaces-test
(testing "Search should only return Collections in the 'default' namespace"
(mt/with-temp* [Collection [_c1 {:name "Normal Collection"}]
Collection [_c2 {:name "Coin Collection", :namespace "currency"}]]
(is (= ["Normal Collection"]
(->> (search-request-data :crowberto :q "Collection")
(filter #(and (= (:model %) "collection")
(#{"Normal Collection" "Coin Collection"} (:name %))))
(map :name)))))))
(deftest no-dashboard-subscription-pulses-test
(testing "Pulses used for Dashboard subscriptions should not be returned by search results (#14190)"
(letfn [(search-for-pulses [{pulse-id :id}]
(->> (:data (mt/user-http-request :crowberto :get "search?q=electro"))
(filter #(and (= (:model %) "pulse")
(= (:id %) pulse-id)))
first))]
(mt/with-temp Pulse [pulse {:name "Electro-Magnetic Pulse"}]
(testing "sanity check: should be able to fetch a Pulse normally"
(is (schema= {:name (s/eq "Electro-Magnetic Pulse")
s/Keyword s/Any}
(search-for-pulses pulse))))
(mt/with-temp* [Card [card-1]
PulseCard [_ {:pulse_id (:id pulse), :card_id (:id card-1)}]
Card [card-2]
PulseCard [_ {:pulse_id (:id pulse), :card_id (:id card-2)}]]
(testing "Create some Pulse Cards: should still be able to search for it it"
(is (schema= {:name (s/eq "Electro-Magnetic Pulse")
s/Keyword s/Any}
(search-for-pulses pulse))))
(testing "Now make this Pulse a dashboard subscription; Pulse should no longer come back from search-results"
(mt/with-temp* [Dashboard [dashboard]]
(db/update! Pulse (:id pulse) :dashboard_id (:id dashboard))
(is (= nil
(search-for-pulses pulse))))))))))
(deftest card-dataset-query-test
(testing "Search results should match a native query's dataset_query column, but not an MBQL query's one."
(let [native-card {:name "Another SQL query"
:query_type "native"
:dataset_query (mt/native-query {:query "SELECT COUNT(1) AS aggregation FROM venues"})}]
(mt/with-temp* [Card [_mbql-card {:name "Venues Count"
:query_type "query"
:dataset_query (mt/mbql-query venues {:aggregation [[:count]]})}]
Card [_native-card native-card]
Card [_dataset (assoc native-card :name "Dataset" :dataset true)]]
(is (= ["Another SQL query" "Dataset"]
(->> (search-request-data :rasta :q "aggregation")
(map :name))))))))
(deftest app-test
(testing "App collections should come with app_id set"
(with-search-items-in-collection {:keys [collection]} "test"
(mt/with-temp App [_app {:collection_id (:id collection)}]
(is (= (mapv
(fn [result]
(cond-> result
(not (#{"metric" "segment"} (:model result))) (assoc-in [:collection :app_id] true)
(= (:model result) "collection") (assoc :model "app" :app_id true)))
(default-results-with-collection))
(search-request-data :rasta :q "test"))))))
(testing "App collections should filterable as \"app\""
(mt/with-temp* [Collection [collection {:name "App collection to find"}]
App [_ {:collection_id (:id collection)}]
Collection [_ {:name "Another collection to find"}]]
(is (partial= [(assoc (select-keys collection [:name])
:model "app")]
(search-request-data :rasta :q "find" :models "app"))))))
(deftest page-test
(testing "Search results should pages with model \"page\""
(mt/with-temp* [Dashboard [_ {:name "Not a page but contains important text!"}]
Dashboard [page {:name "Page"
:description "Contains important text!"
:is_app_page true}]]
(is (partial= [(assoc (select-keys page [:name :description])
:model "page")]
(search-request-data :rasta :q "important text" :models "page"))))))
(deftest collection-app-id-test
(testing "app_id and id of containing collection should not be confused (#25213)"
(mt/with-temp* [Collection [{coll-id :id}]
Collection [{ignored-collection-id :id}]
App [_ignored-app {:collection_id ignored-collection-id}]
App [{app-id :id} {:collection_id coll-id}]
Dashboard [_ {:name "Not a page but contains important text!"
:collection_id coll-id}]
Dashboard [_ {:name "Page"
:description "Contains important text!"
:collection_id coll-id
:is_app_page true}]
Card [_ {:name "Query looking for important text"
:query_type "native"
:dataset_query (mt/native-query {:query "SELECT 0 FROM venues"})
:collection_id coll-id}]
Pulse [_ {:name "Pulse about important text"
:collection_id coll-id}]]
(is (not= app-id coll-id) "app-id and coll-id should be different. Fix the test!")
(is (partial= (repeat 4 {:collection {:app_id app-id
:id coll-id}})
(:data (make-search-request :rasta [:q "important text"])))))))
|
c81dbfe9eba7d172ac6699bd75726051d286ac28e2d6284b585a50abe9b1994d | donaldsonjw/bigloo | gstelement.scm | ;*=====================================================================*/
;* .../project/bigloo/api/gstreamer/src/Llib/gstelement.scm */
;* ------------------------------------------------------------- */
* Author : * /
* Creation : Sun Dec 30 15:46:10 2007 * /
* Last change : Tue Nov 15 16:56:12 2011 ( serrano ) * /
* Copyright : 2007 - 11 * /
;* ------------------------------------------------------------- */
* GstElement wrapper * /
;*=====================================================================*/
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module __gstreamer_gstelement
(include "gst.sch")
(import __gstreamer_gsterror
__gstreamer_gstobject
__gstreamer_gstpluginfeature
__gstreamer_gststructure
__gstreamer_gstelementfactory
__gstreamer_gstcaps
__gstreamer_gstpad
__gstreamer_gstreamer)
(extern (macro %gst-lock!::obj () "bgl_gst_lock")
(macro %gst-unlock!::obj () "bgl_gst_unlock")
(export $gst-state->obj "bgl_gst_state_to_obj")
(export $make-gst-element "bgl_gst_element_new"))
(export (class gst-element::gst-object
(element-factory::gst-element-factory
read-only
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($make-gst-element-factory
($gst-element-get-factory
($gst-element $builtin))
#f)))))
(interface-list::pair-nil
read-only
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($gst-element-interface-list
($gst-element $builtin))))))
(name::string
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($gst-element-get-name
($gst-element $builtin)))))
(set
(lambda (o v)
(with-access::gst-element o ($builtin)
($gst-element-set-name!
($gst-element $builtin) v))))))
($make-gst-element ::$gst-element ::obj)
(gst-element-state::symbol ::gst-element #!optional (timeout #l0))
(gst-element-state-set!::symbol ::gst-element ::symbol)
(gst-element-pad::obj ::gst-element ::bstring)
(gst-element-add-pad! ::gst-element ::gst-pad)
(gst-element-compatible-pad::obj ::gst-element ::gst-pad ::gst-caps)
(gst-element-query-position::llong ::gst-element)
(gst-element-query-duration::llong ::gst-element)
(gst-element-seek::bool ::gst-element ::llong)
(gst-element-link! ::gst-element ::gst-element . els)
(gst-element-link-filtered! ::gst-element ::gst-element ::gst-caps)
(gst-element-link-mime! ::gst-element ::gst-element ::bstring . ::obj)
(gst-element-unlink! ::gst-element ::gst-element . els)
($gst-state->obj::symbol ::$gst-state)))
;*---------------------------------------------------------------------*/
;* $make-gst-element ... */
;*---------------------------------------------------------------------*/
(define ($make-gst-element element::$gst-element finalizer::obj)
(instantiate::gst-element
($builtin ($gst-element->object element))
($finalizer finalizer)))
;*---------------------------------------------------------------------*/
;* object-display ::gst-element ... */
;*---------------------------------------------------------------------*/
(define-method (object-display o::gst-element . port)
(with-access::gst-element o (name)
(let ((p (if (pair? port) (car port) (current-output-port))))
(display "<" p)
(display (find-runtime-type o) p)
(display " refcount=" p)
(with-access::gst-object o ($builtin)
(display ($gst-object-refcount $builtin) p))
(display " name=" p)
(display name p)
(display ">" p))))
;*---------------------------------------------------------------------*/
;* gst-element-query-position ... */
;*---------------------------------------------------------------------*/
(define (gst-element-query-position el::gst-element)
(with-access::gst-element el ($builtin)
($gst-element-query-position ($gst-element $builtin))))
;*---------------------------------------------------------------------*/
;* gst-element-query-duration ... */
;*---------------------------------------------------------------------*/
(define (gst-element-query-duration el::gst-element)
(with-access::gst-element el ($builtin)
($gst-element-query-duration ($gst-element $builtin))))
;*---------------------------------------------------------------------*/
;* gst-element-seek ... */
;*---------------------------------------------------------------------*/
(define (gst-element-seek el::gst-element v)
(with-access::gst-element el ($builtin)
($gst-element-seek-simple
($gst-element $builtin)
$gst-format-time (bit-or $gst-seek-flag-flush $gst-seek-flag-key-unit) v)))
;*---------------------------------------------------------------------*/
;* gst-element-link! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-link! el0::gst-element el1::gst-element . els)
(define (link! src dst)
(if (isa? dst gst-element)
(with-access::gst-element src ((src-builtin $builtin))
(with-access::gst-element dst ((dst-builtin $builtin))
(unless ($gst-element-link! ($gst-element src-builtin)
($gst-element dst-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-link!)
(msg "Element cannot be linked")
(obj (list src dst)))))))
(raise (instantiate::&gst-error
(proc 'gst-element-link!)
(msg "Illegal element ")
(obj dst)))))
(link! el0 el1)
(let loop ((src el1)
(els els))
(when (pair? els)
(link! src (car els))
(loop (car els) (cdr els))))
#unspecified)
;*---------------------------------------------------------------------*/
;* gst-element-link-filtered! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-link-filtered! e0 e1 caps)
(with-access::gst-element e0 ((e0-builtin $builtin))
(with-access::gst-element e1 ((e1-builtin $builtin))
(with-access::gst-caps caps ((caps-builtin $builtin))
(unless ($gst-element-link-filtered! ($gst-element e0-builtin)
($gst-element e1-builtin)
($gst-caps caps-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-link-filtered!)
(msg "Element cannot be linked")
(obj (list e0 e1 caps)))))))))
;*---------------------------------------------------------------------*/
;* gst-element-link-mime! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-link-mime! e0 e1 mime-type . props)
(let ((caps (apply gst-caps-new-simple mime-type props)))
(gst-element-link-filtered! e0 e1 caps)))
;*---------------------------------------------------------------------*/
;* gst-element-link! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-unlink! el0::gst-element el1::gst-element . els)
(define (unlink! src dst)
(with-access::gst-element src ((src-builtin $builtin))
(with-access::gst-element dst ((dst-builtin $builtin))
($gst-element-unlink!
($gst-element src-builtin) ($gst-element dst-builtin)))))
(unlink! el0 el1)
(let loop ((src el1)
(els els))
(when (pair? els)
(unlink! src (car els))
(loop (car els) (cdr els))))
#unspecified)
;*---------------------------------------------------------------------*/
;* $gst-state ... */
;*---------------------------------------------------------------------*/
(define ($gst-state::$gst-state state::symbol)
(case state
((void-pending) $gst-state-void-pending)
((null) $gst-state-null)
((ready) $gst-state-ready)
((paused) $gst-state-paused)
((playing) $gst-state-playing)
(else (raise (instantiate::&gst-error
(proc '$gst-state)
(msg "Illegal state")
(obj state))))))
;*---------------------------------------------------------------------*/
;* $gst-state->obj ... */
;*---------------------------------------------------------------------*/
(define ($gst-state->obj::symbol state::$gst-state)
(cond
((=fx state $gst-state-void-pending) 'void-pending)
((=fx state $gst-state-null) 'null)
((=fx state $gst-state-ready) 'ready)
((=fx state $gst-state-paused) 'paused)
((=fx state $gst-state-playing) 'playing)
(else 'unknown)))
;*---------------------------------------------------------------------*/
;* $gst-state-change-return->obj ... */
;*---------------------------------------------------------------------*/
(define ($gst-state-change-return->obj::obj state::$gst-state-change-return)
(cond
((eq? state $gst-state-change-failure) 'failure)
((eq? state $gst-state-change-success) 'success)
((eq? state $gst-state-change-async) 'async)
((eq? state $gst-state-change-no-preroll) 'no-preroll)
(else 'unknown)))
;*---------------------------------------------------------------------*/
;* gst-element-state-set! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-state-set! el state)
(%gst-lock!)
(%gst-thread-init!)
($gst-invoke-finalizers)
(%gst-unlock!)
(with-access::gst-element el ($builtin)
($gst-state-change-return->obj
($gst-element-set-state! ($gst-element $builtin)
($gst-state state)))))
;*---------------------------------------------------------------------*/
;* gst-element-state ... */
;*---------------------------------------------------------------------*/
(define (gst-element-state el #!optional (timeout #l0))
(with-access::gst-element el ($builtin)
($gst-state-change-return->obj
($gst-element-get-state ($gst-element $builtin)
0 0
(if (<=llong timeout #l0)
$gst-clock-time-none
timeout)))))
;*---------------------------------------------------------------------*/
;* gst-element-pad ... */
;*---------------------------------------------------------------------*/
(define (gst-element-pad el name)
(with-access::gst-element el ((el-builtin $builtin))
(let* (($el::$gst-element ($gst-element el-builtin))
($spad ($gst-element-get-static-pad $el name)))
(if ($gst-pad-null? $spad)
(let (($rpad ($gst-element-get-request-pad $el name)))
(unless ($gst-pad-null? $rpad)
(instantiate::gst-pad
($builtin ($gst-element->object $rpad))
($finalizer (lambda (o)
(with-access::gst-element o ((o-builtin $builtin))
(%gst-object-finalize-closures! o)
($gst-element-release-request-pad!
($gst-element el-builtin)
($gst-pad o-builtin))))))))
(instantiate::gst-pad
($builtin ($gst-element->object $spad))
($finalizer %gst-object-finalize!))))))
;*---------------------------------------------------------------------*/
;* gst-element-add-pad! ... */
;*---------------------------------------------------------------------*/
(define (gst-element-add-pad! el pad)
(with-access::gst-element el ((el-builtin $builtin))
(with-access::gst-pad pad ((pad-builtin $builtin))
(unless ($gst-element-add-pad! ($gst-element el-builtin)
($gst-pad pad-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-add-pad!)
(msg "Cannot add pad")
(obj (list el pad))))))))
;*---------------------------------------------------------------------*/
;* gst-element-compatible-pad ... */
;*---------------------------------------------------------------------*/
(define (gst-element-compatible-pad el pad caps)
(with-access::gst-element el ((el-builtin $builtin))
(with-access::gst-pad pad ((pad-builtin $builtin))
(with-access::gst-caps caps ((caps-builtin $builtin))
(let ((pad::$gst-pad ($gst-element-get-compatible-pad
($gst-element el-builtin)
($gst-pad pad-builtin)
caps-builtin)))
(unless ($gst-pad-null? pad)
(instantiate::gst-pad
($builtin ($gst-element->object pad))
($finalizer %gst-object-finalize!))))))))
| null | https://raw.githubusercontent.com/donaldsonjw/bigloo/a4d06e409d0004e159ce92b9908719510a18aed5/api/gstreamer/src/Llib/gstelement.scm | scheme | *=====================================================================*/
* .../project/bigloo/api/gstreamer/src/Llib/gstelement.scm */
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
*=====================================================================*/
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* $make-gst-element ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* object-display ::gst-element ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-query-position ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-query-duration ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-seek ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-link! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-link-filtered! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-link-mime! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-link! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* $gst-state ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* $gst-state->obj ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* $gst-state-change-return->obj ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-state-set! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-state ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-pad ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-add-pad! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* gst-element-compatible-pad ... */
*---------------------------------------------------------------------*/ | * Author : * /
* Creation : Sun Dec 30 15:46:10 2007 * /
* Last change : Tue Nov 15 16:56:12 2011 ( serrano ) * /
* Copyright : 2007 - 11 * /
* GstElement wrapper * /
(module __gstreamer_gstelement
(include "gst.sch")
(import __gstreamer_gsterror
__gstreamer_gstobject
__gstreamer_gstpluginfeature
__gstreamer_gststructure
__gstreamer_gstelementfactory
__gstreamer_gstcaps
__gstreamer_gstpad
__gstreamer_gstreamer)
(extern (macro %gst-lock!::obj () "bgl_gst_lock")
(macro %gst-unlock!::obj () "bgl_gst_unlock")
(export $gst-state->obj "bgl_gst_state_to_obj")
(export $make-gst-element "bgl_gst_element_new"))
(export (class gst-element::gst-object
(element-factory::gst-element-factory
read-only
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($make-gst-element-factory
($gst-element-get-factory
($gst-element $builtin))
#f)))))
(interface-list::pair-nil
read-only
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($gst-element-interface-list
($gst-element $builtin))))))
(name::string
(get
(lambda (o)
(with-access::gst-element o ($builtin)
($gst-element-get-name
($gst-element $builtin)))))
(set
(lambda (o v)
(with-access::gst-element o ($builtin)
($gst-element-set-name!
($gst-element $builtin) v))))))
($make-gst-element ::$gst-element ::obj)
(gst-element-state::symbol ::gst-element #!optional (timeout #l0))
(gst-element-state-set!::symbol ::gst-element ::symbol)
(gst-element-pad::obj ::gst-element ::bstring)
(gst-element-add-pad! ::gst-element ::gst-pad)
(gst-element-compatible-pad::obj ::gst-element ::gst-pad ::gst-caps)
(gst-element-query-position::llong ::gst-element)
(gst-element-query-duration::llong ::gst-element)
(gst-element-seek::bool ::gst-element ::llong)
(gst-element-link! ::gst-element ::gst-element . els)
(gst-element-link-filtered! ::gst-element ::gst-element ::gst-caps)
(gst-element-link-mime! ::gst-element ::gst-element ::bstring . ::obj)
(gst-element-unlink! ::gst-element ::gst-element . els)
($gst-state->obj::symbol ::$gst-state)))
(define ($make-gst-element element::$gst-element finalizer::obj)
(instantiate::gst-element
($builtin ($gst-element->object element))
($finalizer finalizer)))
(define-method (object-display o::gst-element . port)
(with-access::gst-element o (name)
(let ((p (if (pair? port) (car port) (current-output-port))))
(display "<" p)
(display (find-runtime-type o) p)
(display " refcount=" p)
(with-access::gst-object o ($builtin)
(display ($gst-object-refcount $builtin) p))
(display " name=" p)
(display name p)
(display ">" p))))
(define (gst-element-query-position el::gst-element)
(with-access::gst-element el ($builtin)
($gst-element-query-position ($gst-element $builtin))))
(define (gst-element-query-duration el::gst-element)
(with-access::gst-element el ($builtin)
($gst-element-query-duration ($gst-element $builtin))))
(define (gst-element-seek el::gst-element v)
(with-access::gst-element el ($builtin)
($gst-element-seek-simple
($gst-element $builtin)
$gst-format-time (bit-or $gst-seek-flag-flush $gst-seek-flag-key-unit) v)))
(define (gst-element-link! el0::gst-element el1::gst-element . els)
(define (link! src dst)
(if (isa? dst gst-element)
(with-access::gst-element src ((src-builtin $builtin))
(with-access::gst-element dst ((dst-builtin $builtin))
(unless ($gst-element-link! ($gst-element src-builtin)
($gst-element dst-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-link!)
(msg "Element cannot be linked")
(obj (list src dst)))))))
(raise (instantiate::&gst-error
(proc 'gst-element-link!)
(msg "Illegal element ")
(obj dst)))))
(link! el0 el1)
(let loop ((src el1)
(els els))
(when (pair? els)
(link! src (car els))
(loop (car els) (cdr els))))
#unspecified)
(define (gst-element-link-filtered! e0 e1 caps)
(with-access::gst-element e0 ((e0-builtin $builtin))
(with-access::gst-element e1 ((e1-builtin $builtin))
(with-access::gst-caps caps ((caps-builtin $builtin))
(unless ($gst-element-link-filtered! ($gst-element e0-builtin)
($gst-element e1-builtin)
($gst-caps caps-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-link-filtered!)
(msg "Element cannot be linked")
(obj (list e0 e1 caps)))))))))
(define (gst-element-link-mime! e0 e1 mime-type . props)
(let ((caps (apply gst-caps-new-simple mime-type props)))
(gst-element-link-filtered! e0 e1 caps)))
(define (gst-element-unlink! el0::gst-element el1::gst-element . els)
(define (unlink! src dst)
(with-access::gst-element src ((src-builtin $builtin))
(with-access::gst-element dst ((dst-builtin $builtin))
($gst-element-unlink!
($gst-element src-builtin) ($gst-element dst-builtin)))))
(unlink! el0 el1)
(let loop ((src el1)
(els els))
(when (pair? els)
(unlink! src (car els))
(loop (car els) (cdr els))))
#unspecified)
(define ($gst-state::$gst-state state::symbol)
(case state
((void-pending) $gst-state-void-pending)
((null) $gst-state-null)
((ready) $gst-state-ready)
((paused) $gst-state-paused)
((playing) $gst-state-playing)
(else (raise (instantiate::&gst-error
(proc '$gst-state)
(msg "Illegal state")
(obj state))))))
(define ($gst-state->obj::symbol state::$gst-state)
(cond
((=fx state $gst-state-void-pending) 'void-pending)
((=fx state $gst-state-null) 'null)
((=fx state $gst-state-ready) 'ready)
((=fx state $gst-state-paused) 'paused)
((=fx state $gst-state-playing) 'playing)
(else 'unknown)))
(define ($gst-state-change-return->obj::obj state::$gst-state-change-return)
(cond
((eq? state $gst-state-change-failure) 'failure)
((eq? state $gst-state-change-success) 'success)
((eq? state $gst-state-change-async) 'async)
((eq? state $gst-state-change-no-preroll) 'no-preroll)
(else 'unknown)))
(define (gst-element-state-set! el state)
(%gst-lock!)
(%gst-thread-init!)
($gst-invoke-finalizers)
(%gst-unlock!)
(with-access::gst-element el ($builtin)
($gst-state-change-return->obj
($gst-element-set-state! ($gst-element $builtin)
($gst-state state)))))
(define (gst-element-state el #!optional (timeout #l0))
(with-access::gst-element el ($builtin)
($gst-state-change-return->obj
($gst-element-get-state ($gst-element $builtin)
0 0
(if (<=llong timeout #l0)
$gst-clock-time-none
timeout)))))
(define (gst-element-pad el name)
(with-access::gst-element el ((el-builtin $builtin))
(let* (($el::$gst-element ($gst-element el-builtin))
($spad ($gst-element-get-static-pad $el name)))
(if ($gst-pad-null? $spad)
(let (($rpad ($gst-element-get-request-pad $el name)))
(unless ($gst-pad-null? $rpad)
(instantiate::gst-pad
($builtin ($gst-element->object $rpad))
($finalizer (lambda (o)
(with-access::gst-element o ((o-builtin $builtin))
(%gst-object-finalize-closures! o)
($gst-element-release-request-pad!
($gst-element el-builtin)
($gst-pad o-builtin))))))))
(instantiate::gst-pad
($builtin ($gst-element->object $spad))
($finalizer %gst-object-finalize!))))))
(define (gst-element-add-pad! el pad)
(with-access::gst-element el ((el-builtin $builtin))
(with-access::gst-pad pad ((pad-builtin $builtin))
(unless ($gst-element-add-pad! ($gst-element el-builtin)
($gst-pad pad-builtin))
(raise (instantiate::&gst-error
(proc 'gst-element-add-pad!)
(msg "Cannot add pad")
(obj (list el pad))))))))
(define (gst-element-compatible-pad el pad caps)
(with-access::gst-element el ((el-builtin $builtin))
(with-access::gst-pad pad ((pad-builtin $builtin))
(with-access::gst-caps caps ((caps-builtin $builtin))
(let ((pad::$gst-pad ($gst-element-get-compatible-pad
($gst-element el-builtin)
($gst-pad pad-builtin)
caps-builtin)))
(unless ($gst-pad-null? pad)
(instantiate::gst-pad
($builtin ($gst-element->object pad))
($finalizer %gst-object-finalize!))))))))
|
8e925eec405f3bede3a0fd6a1d73db3b7cae6e0a4a96d070154a7f71946a4652 | pedromartins/cuboid | Main.hs | {-# LANGUAGE Arrows #-}
module Main where
import FRP.Yampa
import Graphics.UI.GLUT hiding (Level,Vector3(..),normalize)
import Data.IORef
import Types
import Input
import Update
import Graphics
mainSF = parseInput >>> update >>> draw
| Main , initializes Yampa and sets up reactimation loop
main :: IO ()
main = do
newInput <- newIORef NoEvent
oldTime <- newIORef (0 :: Int)
rh <- reactInit (initGL >> return NoEvent) (\_ _ b -> b >> return False)
mainSF
displayCallback $= return ()
keyboardMouseCallback $= Just
(\k ks m _ -> writeIORef newInput (Event $ Keyboard k ks m))
idleCallback $= Just (idle newInput oldTime rh)
oldTime' <- get elapsedTime
writeIORef oldTime oldTime'
mainLoop
-- | Reactimation iteration, supplying the input
idle :: IORef (Event Input) -> IORef Int ->
ReactHandle (Event Input) (IO ()) -> IO ()
idle newInput oldTime rh = do
newInput' <- readIORef newInput
newTime' <- get elapsedTime
oldTime' <- get oldTime
let dt = let dt' = (fromIntegral $ newTime' - oldTime')/50
in if dt' < 0.8 then dt' else 0.8
react rh (dt, Just newInput')
writeIORef oldTime newTime'
return ()
| null | https://raw.githubusercontent.com/pedromartins/cuboid/9bb33b94688497949e52998376cdf5ab4d9c9b5a/Main.hs | haskell | # LANGUAGE Arrows #
| Reactimation iteration, supplying the input | module Main where
import FRP.Yampa
import Graphics.UI.GLUT hiding (Level,Vector3(..),normalize)
import Data.IORef
import Types
import Input
import Update
import Graphics
mainSF = parseInput >>> update >>> draw
| Main , initializes Yampa and sets up reactimation loop
main :: IO ()
main = do
newInput <- newIORef NoEvent
oldTime <- newIORef (0 :: Int)
rh <- reactInit (initGL >> return NoEvent) (\_ _ b -> b >> return False)
mainSF
displayCallback $= return ()
keyboardMouseCallback $= Just
(\k ks m _ -> writeIORef newInput (Event $ Keyboard k ks m))
idleCallback $= Just (idle newInput oldTime rh)
oldTime' <- get elapsedTime
writeIORef oldTime oldTime'
mainLoop
idle :: IORef (Event Input) -> IORef Int ->
ReactHandle (Event Input) (IO ()) -> IO ()
idle newInput oldTime rh = do
newInput' <- readIORef newInput
newTime' <- get elapsedTime
oldTime' <- get oldTime
let dt = let dt' = (fromIntegral $ newTime' - oldTime')/50
in if dt' < 0.8 then dt' else 0.8
react rh (dt, Just newInput')
writeIORef oldTime newTime'
return ()
|
273c8b656b0a165af1ed4e73a37183b08e1ca8cd7ea9b0fe0b676bacb0fec8e1 | DSiSc/why3 | typeinv.ml | (********************************************************************)
(* *)
The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
(* *)
(* This software is distributed under the terms of the GNU Lesser *)
General Public License version 2.1 , with the special exception
(* on linking described in file LICENSE. *)
(* *)
(********************************************************************)
open Wstdlib
open Ident
open Ty
open Term
open Ity
open Expr
open Pdecl
let ls_valid =
let v = create_tvsymbol (id_fresh "a") in
create_psymbol (id_fresh "valid") [ty_var v]
let its_solid s =
not s.its_fragile && (* no need to go any further *)
List.for_all (fun f -> f.its_frozen) s.its_arg_flg &&
List.for_all (fun f -> f.its_frozen) s.its_reg_flg
let is_trusted_constructor _kn ls =
ls.ls_constr > 0 &&
match (Opt.get ls.ls_value).ty_node with
| Tyapp (s,_) -> not (its_solid (restore_its s))
| _ -> assert false
let is_trusted_projection kn ls ity =
ls.ls_constr = 0 &&
try let rs = restore_rs ls in
if rs.rs_field = None then false else
match (List.hd rs.rs_cty.cty_args).pv_ity.ity_node with
| Ityreg {reg_its = s} | Ityapp (s,_,_) ->
not (its_solid s) &&
(* we don't trust projections of sum types that produce
fragile values, since they may break cap_of_term *)
(not (ity_fragile ity) ||
List.length (Eval_match.ts_constructors kn s.its_ts) <= 1)
| _ -> assert false
with Not_found -> false
let is_trusted_projection_t kn ls t = match t.t_ty with
| Some ty -> is_trusted_projection kn ls (ity_of_ty_pure ty)
| None -> false
Integer - indexed " pins " represent individual values whose
invariant may be broken . Fresh pins are assigned to values
bottom - up , and the canonical representative pin in a UF class
is the minimal one . Thus , a greater pin can never be reached
from a lesser pin . Each pin is associated to a list of fresh
variables that correspond to " temporary fields " . Committing
a pin means that we prove that the temporary fields satisfy
the invariant and then assume that the temporary fields are
equal to the respective projections .
Recursive " caps " represent deconstructible values from which
pins can be reached . Each variable is associated to a cap .
A cap is either a committed value , a pin ( a non - committed
record with a breakable invariant ) , a constructible value
( characterized by the set of possible constructors ) , or
a non - free record with an unbreakable invariant .
invariant may be broken. Fresh pins are assigned to values
bottom-up, and the canonical representative pin in a UF class
is the minimal one. Thus, a greater pin can never be reached
from a lesser pin. Each pin is associated to a list of fresh
variables that correspond to "temporary fields". Committing
a pin means that we prove that the temporary fields satisfy
the invariant and then assume that the temporary fields are
equal to the respective projections.
Recursive "caps" represent deconstructible values from which
pins can be reached. Each variable is associated to a cap.
A cap is either a committed value, a pin (a non-committed
record with a breakable invariant), a constructible value
(characterized by the set of possible constructors), or
a non-free record with an unbreakable invariant. *)
type cap =
| V (* valid *)
| P of int (* pin *)
| C of cap list Mls.t (* algebraic type *)
| R of cap Mls.t (* non-free unbreakable record *)
let isV = function V -> true | _ -> false
let mkP n =
if n = 0 then V else P n
let mkC css =
let chk _ l = List.for_all isV l in
if Mls.for_all chk css then V else C css
let mkR pjs =
let chk _ c = isV c in
if Mls.for_all chk pjs then V else R pjs
let new_index =
let c = ref 0 in
fun () -> incr c; !c
(* Stage I - Inspect: detect values that must be committed and provide
the necessary ls_valid applications. *)
type pin_inspect = {
p_fields : cap Mls.t; (* record fields *)
p_stem : (term * pattern) list; (* deconstruction from a root *)
p_leaf : term; (* term we can be reached from *)
}
let gl_caps = (Wvs.create 7 : (cap * pin_inspect Mint.t) Wvs.t)
let extract_field cs f ty tyl =
let pat_arg ({pv_vs = v} as a) ty = if pv_equal a f
then pat_var (create_vsymbol (id_clone v.vs_name) ty)
else pat_wild ty in
let pl = List.map2 pat_arg cs.rs_cty.cty_args tyl in
let pat = pat_app (ls_of_rs cs) pl ty in
pat, t_var (Svs.choose pat.pat_vars)
let add_gl_cap kn v =
let rp = ref Mint.empty in
let rec down stem leaf ty = match ty.ty_node with
| Tyvar _ -> V
| Tyapp (s,tl) ->
let s = restore_its s in
if its_solid s then V else
let d = find_its_defn kn s in
let sbs = ts_match_args s.its_ts tl in
if s.its_nonfree then if s.its_fragile then (* breakable record *)
let add_field m f =
let vf = fd_of_rs f in
let ty = Ty.ty_inst sbs vf.pv_vs.vs_ty in
let leaf = fs_app (ls_of_rs f) [leaf] ty in
Mls.add (ls_of_rs f) (down stem leaf ty) m in
let pjs = List.fold_left add_field Mls.empty d.itd_fields in
let pin = {p_fields = pjs; p_stem = stem; p_leaf = leaf} in
let n = new_index () in
rp := Mint.add n pin !rp;
mkP n
else (* unbreakable record *)
let add_field m f =
let vf = fd_of_rs f in
let ty = Ty.ty_inst sbs vf.pv_vs.vs_ty in
let leaf = fs_app (ls_of_rs f) [leaf] ty in
Mls.add (ls_of_rs f) (down stem leaf ty) m in
mkR (List.fold_left add_field Mls.empty d.itd_fields)
else if List.length d.itd_constructors == 1 then (* record type *)
let add_field m f = Mpv.add (fd_of_rs f) (ls_of_rs f) m in
let pjm = List.fold_left add_field Mpv.empty d.itd_fields in
let add_constr m c =
let inst f = Ty.ty_inst sbs f.pv_vs.vs_ty in
let tyl = List.map inst c.rs_cty.cty_args in
let conv_field f ty_f =
let leaf = match Mpv.find_opt f pjm with
| Some pj -> fs_app pj [leaf] ty_f
| None -> t_case_close leaf [extract_field c f ty tyl] in
down stem leaf ty_f in
let fdl = List.map2 conv_field c.rs_cty.cty_args tyl in
Mls.add (ls_of_rs c) fdl m in
mkC (List.fold_left add_constr Mls.empty d.itd_constructors)
else (* sum type *)
let add_constr m c =
let inst f = Ty.ty_inst sbs f.pv_vs.vs_ty in
let tyl = List.map inst c.rs_cty.cty_args in
let conv_field f ty_f =
let pat, t = extract_field c f ty tyl in
down ((leaf, pat)::stem) t ty_f in
let fdl = List.map2 conv_field c.rs_cty.cty_args tyl in
Mls.add (ls_of_rs c) fdl m in
mkC (List.fold_left add_constr Mls.empty d.itd_constructors)
in
let c = down [] (t_var v) v.vs_ty in
Wvs.set gl_caps v (c, !rp);
c, !rp
let inspect kn tl =
let rpins = ref Mint.empty in
let rcommit = ref Mint.empty in
let rec cap_valid = function
| V -> ()
| P n ->
let pin = Mint.find n !rpins in
Mls.iter (fun _ c -> cap_valid c) pin.p_fields;
rcommit := Mint.add n pin !rcommit
| C css -> Mls.iter (fun _ fl -> List.iter cap_valid fl) css
| R pjs -> Mls.iter (fun _ c -> cap_valid c) pjs
in
let rec cap_join c1 c2 = match c1, c2 with
| V, c | c, V ->
cap_valid c; V
| P n1, P n2 ->
if n1 = n2 then c1 else begin
cap_valid c1; cap_valid c2; V
end
| C s1, C s2 ->
let join _ l1 l2 = Some (List.map2 cap_join l1 l2) in
mkC (Mls.union join s1 s2)
| R s1, R s2 ->
let join _ c1 c2 = Some (cap_join c1 c2) in
mkR (Mls.union join s1 s2)
| _ -> assert false
in
let rec add_pat caps c p =
if isV c then
Mvs.set_union caps (Mvs.map (fun () -> V) p.pat_vars)
else match p.pat_node with
| Pwild -> caps
| Pvar v -> Mvs.add v c caps
| Papp (cs,pl) -> begin match c with
| C css -> begin match Mls.find_opt cs css with
| Some cl -> List.fold_left2 add_pat caps cl pl
| None -> caps (* impossible branch *) end
| _ -> assert false (* can never happen *) end
| Por (p,_) -> cap_valid c; add_pat caps V p
| Pas (p,v) -> Mvs.add v c (add_pat caps c p)
in
let rec unwind c pjl0 = match c, pjl0 with
| _, [] -> c
| V, _ -> V
| P n, pj::pjl ->
let pin = Mint.find n !rpins in
unwind (Mls.find pj pin.p_fields) pjl
| C css, pj::pjl when Mls.cardinal css = 1 ->
let cs, fl = Mls.choose css in
let fdl = Eval_match.cs_fields kn cs in
let c = Eval_match.select_field pj fdl fl in
unwind c pjl
| C css, pj::pjl ->
let ty = List.hd pj.ls_args in
let add_branch fdl fl cj =
let c = Eval_match.select_field pj fdl fl in
Some (Opt.fold cap_join (unwind c pjl) cj) in
let csl = Eval_match.ty_constructors kn ty in
let add_branch (cs, fdl) acc =
match Mls.find_opt cs css with
| Some fl -> add_branch fdl fl acc
| None -> acc in
Opt.get (List.fold_right add_branch csl None)
| R pjs, pj::pjl ->
unwind (Mls.find pj pjs) pjl
in
let rec down caps pjl t = match t.t_node with
| Tvar v -> (* projection propagation *)
let c = try Mvs.find v caps with Not_found ->
let c, pins = try Wvs.find gl_caps v with
| Not_found -> add_gl_cap kn v in
rpins := Mint.set_union pins !rpins;
c in
unwind c pjl
| Tconst _ -> V
| Tapp (ls,[t1;t2]) when ls_equal ls ps_equ ->
let c1 = down caps pjl t1 in
let c2 = down caps pjl t2 in
ignore (cap_join c1 c2); V
| Tapp (ls,[t1]) when is_trusted_projection_t kn ls t ->
down caps (ls::pjl) t1
| Tapp (ls,tl) when is_trusted_constructor kn ls ->
begin match pjl with
| pj::pjl ->
let fdl = Eval_match.cs_fields kn ls in
let t = Eval_match.select_field pj fdl tl in
down caps pjl t
| [] ->
let cl = List.map (down caps []) tl in
mkC (Mls.singleton ls cl) end
| Tapp (_,tl) ->
let t_valid t = cap_valid (down caps [] t) in
List.iter t_valid tl; V
| Tif (t0,t1,t2) ->
let _ = down caps [] t0 in
let c1 = down caps pjl t1 in
let c2 = down caps pjl t2 in
cap_join c1 c2
| Tlet (t0,tb) ->
let c0 = down caps [] t0 in
let v, t1 = t_open_bound tb in
let caps = Mvs.add v c0 caps in
down caps pjl t1
| Tcase (t0,bl) ->
let c0 = down caps [] t0 in
let add_branch b cj =
let p, t1 = t_open_branch b in
let caps = add_pat caps c0 p in
let c = down caps pjl t1 in
Some (Opt.fold cap_join c cj) in
Opt.get (List.fold_right add_branch bl None)
| Teps tb ->
let v, f = t_open_bound tb in
let caps = Mvs.add v V caps in
ignore (down caps [] f); V
| Tquant (_,tq) ->
let vl, tt, f = t_open_quant tq in
let add caps v = Mvs.add v V caps in
let caps = List.fold_left add caps vl in
(* NOTE: should we commit triggers? *)
let down t = ignore (down caps [] t) in
List.iter (List.iter down) tt; down f; V
| Tbinop (_,f1,f2) ->
ignore (down caps [] f1);
ignore (down caps [] f2); V
| Tnot f ->
ignore (down caps [] f); V
| Ttrue | Tfalse -> V
in
let add_term t = ignore (down Mvs.empty [] t) in
List.iter add_term tl;
let commit pin =
let f = ps_app ls_valid [pin.p_leaf] in
let add f (t, p) = t_case_close t
[p, f; pat_wild p.pat_ty, t_true] in
List.fold_left add f pin.p_stem in
Mint.values (Mint.map commit !rcommit)
(* Stage II - Inject: replace unsafe projections with
temporary fields and expand ls_valid wherever needed. *)
type pin_inject = {
p_vars : (vsymbol * cap) Mls.t; (* temporary fields *)
p_inv : term list; (* instantiated invariant *)
}
let add_var kn pins vl v =
let rv = ref vl in
let rp = ref pins in
let rec down ty = match ty.ty_node with
| Tyvar _ -> V
| Tyapp (s,tl) ->
let s = restore_its s in
if its_solid s then V else
let d = find_its_defn kn s in
let sbs = Ty.ts_match_args s.its_ts tl in
if s.its_nonfree then if s.its_fragile then (* breakable record *)
let bn = v.vs_name.id_string in
let add_field (m,mv) f =
let vf = fd_of_rs f in
let ty = Ty.ty_inst sbs vf.pv_vs.vs_ty in
let nm = bn ^ "_" ^ f.rs_name.id_string in
let v = create_vsymbol (id_fresh nm) ty in
rv := v :: !rv;
let mv = Mvs.add vf.pv_vs (t_var v) mv in
Mls.add (ls_of_rs f) (v, down ty) m, mv in
let pjs, mv = List.fold_left add_field
(Mls.empty, Mvs.empty) d.itd_fields in
let inv = List.map (t_ty_subst sbs mv) d.itd_invariant in
let pin = {p_vars = pjs; p_inv = inv} in
let n = new_index () in
rp := Mint.add n pin !rp;
mkP n
else (* unbreakable record *)
let add_field m f =
let vf = fd_of_rs f in
let ty = Ty.ty_inst sbs vf.pv_vs.vs_ty in
Mls.add (ls_of_rs f) (down ty) m in
mkR (List.fold_left add_field Mls.empty d.itd_fields)
else (* constructible type *)
let add_constr m c =
let field vf = down (Ty.ty_inst sbs vf.pv_vs.vs_ty) in
Mls.add (ls_of_rs c) (List.map field c.rs_cty.cty_args) m in
mkC (List.fold_left add_constr Mls.empty d.itd_constructors)
in
let c = down v.vs_ty in
(* do not inline *) c, !rp, !rv
let add_cap v c caps =
if isV c then caps else Mvs.add v c caps
let rec get_index uf n =
if n = 0 then 0 else
match Mint.find_opt n uf with
| Some n -> get_index uf n
| None -> n
let cap_valid uf c =
let rec down = function
| V -> ()
| P n -> if get_index uf n <> 0 then raise Exit
| C css -> Mls.iter (fun _ fl -> List.iter down fl) css
| R pjs -> Mls.iter (fun _ c -> down c) pjs in
try down c; true with Exit -> false
let rec cap_join uf c1 c2 = match c1, c2 with
| V, c | c, V ->
assert (cap_valid uf c); V
| P n1, P n2 ->
let n1 = get_index uf n1 in
let n2 = get_index uf n2 in
assert (n1 = n2);
mkP n1
| C s1, C s2 ->
let join _ l1 l2 = Some (List.map2 (cap_join uf) l1 l2) in
mkC (Mls.union join s1 s2)
| R s1, R s2 ->
let join _ c1 c2 = Some (cap_join uf c1 c2) in
mkR (Mls.union join s1 s2)
| _ -> assert false
let rec add_pat uf caps c p =
if isV c then caps else
match p.pat_node with
| Pwild -> caps
| Pvar v -> Mvs.add v c caps
| Papp (cs,pl) -> begin match c with
| C css -> begin match Mls.find_opt cs css with
| Some cl -> List.fold_left2 (add_pat uf) caps cl pl
| None -> caps (* impossible branch *) end
| _ -> assert false (* can never happen *) end
| Por _ -> assert (cap_valid uf c); caps
| Pas (p,v) -> Mvs.add v c (add_pat uf caps c p)
let cap_of_term kn uf pins caps t =
let rec unroll t = function
| (pj,t0)::pjl ->
let t = t_app pj [t] t0.t_ty in
unroll (t_attr_copy t0 t) pjl
| [] -> t in
let rec unwind t c pjl0 = match c, pjl0 with
| _, [] -> t, c
| V, _ -> unroll t pjl0, V
| P n, (pj,t0)::pjl ->
let n = get_index uf n in
if n = 0 then unroll t pjl0, V
else let pin = Mint.find n pins in
let v, c = Mls.find pj pin.p_vars in
unwind (t_attr_copy t0 (t_var v)) c pjl
| C css, (pj,t0)::pjl when Mls.cardinal css = 1 ->
let cs, fl = Mls.choose css in
let fdl = Eval_match.cs_fields kn cs in
let c = Eval_match.select_field pj fdl fl in
let t = t_app pj [t] t0.t_ty in
unwind (t_attr_copy t0 t) c pjl
| C css, (pj,t0)::pjl ->
let ty = Opt.get t.t_ty in
let sbs = Ty.ty_match_args ty in
let v0 = create_vsymbol (id_fresh "q") (Opt.get t0.t_ty) in
let t0 = t_attr_copy t0 (t_var v0) and p0 = pat_var v0 in
let add_branch cs fdl fl (bl, cj) =
let mk_pat fd_ty fd = match fd with
| Some ls when ls_equal pj ls -> p0
| _ -> pat_wild (Ty.ty_inst sbs fd_ty) in
let pl = List.map2 mk_pat cs.ls_args fdl in
let c = Eval_match.select_field pj fdl fl in
let t0, c = unwind t0 c pjl in
let b = t_close_branch (pat_app cs pl ty) t0 in
b::bl, Some (Opt.fold (cap_join uf) c cj) in
let csl = Eval_match.ty_constructors kn ty in
let add_branch (cs, fdl) acc =
match Mls.find_opt cs css with
| Some fl -> add_branch cs fdl fl acc
| None -> acc in
let bb = match Mls.choose css with
| {ls_constr = len}, _ when len > Mls.cardinal css ->
let v = create_vsymbol (id_fresh "q") ty in
[t_close_branch (pat_var v) (unroll (t_var v) pjl0)]
| _ -> [] in
let bl, c = List.fold_right add_branch csl (bb, None) in
t_case t bl, Opt.get c
| R pjs, (pj,t0)::pjl ->
let c = Mls.find pj pjs in
let t = t_app pj [t] t0.t_ty in
unwind (t_attr_copy t0 t) c pjl
in
let rec down caps pjl t = match t.t_node with
| Tvar v -> (* projection propagation *)
unwind t (Mvs.find_def V v caps) pjl
| Tconst _ -> (* constants are valid *)
unroll t pjl, V
| Tapp (ls,[t1;t2]) when ls_equal ls ps_equ ->
let t1, c1 = down caps pjl t1 in
let t2, c2 = down caps pjl t2 in
ignore (cap_join uf c1 c2);
t_attr_copy t (t_equ t1 t2), V
| Tapp (ls,[t1]) when is_trusted_projection_t kn ls t ->
down caps ((ls,t)::pjl) t1
| Tapp (ls,tl) when is_trusted_constructor kn ls ->
begin match pjl with
| (pj,t0)::pjl ->
let fdl = Eval_match.cs_fields kn ls in
let t = Eval_match.select_field pj fdl tl in
down caps pjl (t_attr_copy t0 t)
| [] ->
let tl, cl = List.split (List.map (down caps []) tl) in
let t = t_attr_copy t (t_app ls tl t.t_ty) in
t, mkC (Mls.singleton ls cl) end
| Tapp (ls,tl) ->
let tl = List.map (fun t ->
let t, c = down caps [] t in
assert (cap_valid uf c); t) tl in
unroll (t_attr_copy t (t_app ls tl t.t_ty)) pjl, V
| Tif (t0,t1,t2) ->
let t0, _ = down caps [] t0 in
let t1, c1 = down caps pjl t1 in
let t2, c2 = down caps pjl t2 in
t_attr_copy t (t_if t0 t1 t2), cap_join uf c1 c2
| Tlet (t0,tb) ->
let t0, c0 = down caps [] t0 in
let v, t1 = t_open_bound tb in
let caps = add_cap v c0 caps in
let t1, c1 = down caps pjl t1 in
t_attr_copy t (t_let_close v t0 t1), c1
| Tcase (t0,bl) ->
let t0, c0 = down caps [] t0 in
let add_branch b (bl, cj) =
let p, t1 = t_open_branch b in
let caps = add_pat uf caps c0 p in
let t1, c = down caps pjl t1 in
let b = t_close_branch p t1 in
b::bl, Some (Opt.fold (cap_join uf) c cj) in
let bl, c = List.fold_right add_branch bl ([], None) in
t_attr_copy t (t_case t0 bl), Opt.get c
| Teps tb ->
let v, f = t_open_bound tb in
let f, _ = down caps [] f in
unroll (t_attr_copy t (t_eps_close v f)) pjl, V
| Tquant (q,tq) ->
let vl, tt, t0 = t_open_quant tq in
let down t = fst (down caps [] t) in
let tt = List.map (List.map down) tt in
let tq = t_close_quant vl tt (down t0) in
t_attr_copy t (t_quant q tq), V
| Tbinop (op,f1,f2) ->
let f1, _ = down caps [] f1 in
let f2, _ = down caps [] f2 in
t_attr_copy t (t_binary op f1 f2), V
| Tnot f ->
let f, _ = down caps [] f in
t_attr_copy t (t_not f), V
| Ttrue | Tfalse ->
t, V
in
down caps [] t
let find_term_fields kn cs t =
let ty = Opt.get t.t_ty in
let sbs = Ty.ty_match_args ty in
let fdl = Eval_match.cs_fields kn cs in
let add_pat ty (pl,pll) =
let pw = pat_wild (Ty.ty_inst sbs ty) in
let pv = pat_var (create_vsymbol (id_fresh "v") pw.pat_ty) in
pw :: pl, (pv :: pl) :: List.map (fun pl -> pw :: pl) pll in
let _, pll = List.fold_right add_pat cs.ls_args ([],[]) in
let conv pl = function
| Some pj -> t_app_infer pj [t]
| None ->
let p = pat_app cs pl ty in
let v = Svs.choose p.pat_vars in
t_case_close t [p, t_var v] in
List.map2 conv pll fdl
let cap_equality kn uf pins f t1 c1 t2 c2 =
let rec commit t c fl uf = match c with
| V -> fl, uf
| P n ->
let n = get_index uf n in
if n = 0 then fl, uf else
let p = Mint.find n pins in
let uf = Mint.add n 0 uf in
let add pj (v,c) (fl,uf) =
let tv = t_var v in
let t = t_app_infer pj [t] in
let fl, uf = commit tv c fl uf in
t_equ tv t :: fl, uf in
Mls.fold add p.p_vars (fl,uf)
| C css when (fst (Mls.choose css)).ls_constr = 1 ->
css can not be empty and has at most one elt
let cs, cl = Mls.choose css in
let tl = find_term_fields kn cs t in
let add t c (fl, uf) = commit t c fl uf in
List.fold_right2 add tl cl (fl, uf)
| C css ->
let ty = Opt.get t.t_ty in
let sbs = Ty.ty_match_args ty in
let branch cs cl bl =
let add ty c (pl,fl,uf) =
let v = create_vsymbol (id_fresh "v") (ty_inst sbs ty) in
let fl', uf = commit (t_var v) c fl uf in
let p = if fl' == fl then pat_wild v.vs_ty else pat_var v in
p::pl, fl', uf in
let pl, fl, _ = List.fold_right2 add cs.ls_args cl ([],[],uf) in
t_close_branch (pat_app cs pl ty) (t_and_l fl) :: bl in
let bb = match Mls.choose css with
| {ls_constr = len}, _ when len > Mls.cardinal css ->
[t_close_branch (pat_wild ty) t_true]
| _ -> [] in
t_case t (Mls.fold branch css bb) :: fl, uf
| R pjs ->
let add pj c (fl,uf) = commit (t_app_infer pj [t]) c fl uf in
Mls.fold add pjs (fl,uf)
in
let rec down t1 c1 t2 c2 fl uf = match c1, c2 with
| V, _ -> commit t2 c2 fl uf
| _, V -> commit t1 c1 fl uf
| P n1, P n2 ->
let n1 = get_index uf n1 in
let n2 = get_index uf n2 in
if n1 = n2 then fl, uf else
if n1 = 0 then commit t2 (mkP n2) fl uf else
if n2 = 0 then commit t1 (mkP n1) fl uf else
let p1 = Mint.find n1 pins in
let p2 = Mint.find n2 pins in
let uf = if n1 < n2 then
Mint.add n2 n1 uf else Mint.add n1 n2 uf in
let add _pj (v1,c1) (v2,c2) (fl,uf) =
let t1 = t_var v1 and t2 = t_var v2 in
let fl, uf = down t1 c1 t2 c2 fl uf in
t_equ t1 t2 :: fl, uf in
Mls.fold2_inter add p1.p_vars p2.p_vars (fl,uf)
| C css1, C css2 when (fst (Mls.choose css1)).ls_constr = 1 ->
css1 and css2 can not be empty and have at most one elt
let cs, cl1 = Mls.choose css1 in
let _, cl2 = Mls.choose css2 in
let tl1 = find_term_fields kn cs t1 in
let tl2 = find_term_fields kn cs t2 in
let rec add tl1 cl1 tl2 cl2 acc = match tl1,cl1,tl2,cl2 with
| t1::tl1, c1::cl1, t2::tl2, c2::cl2 ->
let fl, uf = add tl1 cl1 tl2 cl2 acc in
down t1 c1 t2 c2 fl uf
| _ -> acc in
add tl1 cl1 tl2 cl2 (fl,uf)
| C _css1, C _css2 ->
TODO
| R pjs1, R pjs2 ->
let add pj c1 c2 (fl,uf) =
let t1 = t_app_infer pj [t1] in
let t2 = t_app_infer pj [t2] in
down t1 c1 t2 c2 fl uf in
Mls.fold2_inter add pjs1 pjs2 (fl,uf)
| _ -> assert false (* never *) in
let fl, uf = down t1 c1 t2 c2 [] uf in
t_and_l (f :: fl), uf
let uf_inter uf1 uf2 =
let uf1 = Mint.map (get_index uf1) uf1 in
let uf2 = Mint.map (get_index uf2) uf2 in
let inter n m1 m2 acc =
if m1 = m2 then acc else
let easy = if m1 < m2
then get_index uf1 m2 = m1
else get_index uf2 m1 = m2 in
if easy then acc else
let inner b = Some (match b with
| Some m -> min m n
| None -> n) in
let outer b = Some (match b with
| Some m -> Mint.change inner m2 m
| None -> Mint.singleton m2 n) in
Mint.change outer m1 acc in
let map = Mint.fold2_inter inter uf1 uf2 Mint.empty in
let inter n m1 m2 =
if m1 = m2 then Some m1 else
try let m = Mint.find m2 (Mint.find m1 map) in
if m = n then None else Some m
with Not_found -> Some (max m1 m2) in
Mint.inter inter uf1 uf2
let rec inject kn uf pins caps pos f = match f.t_node with
| Tvar _ | Tconst _ | Teps _ -> assert false (* never *)
| Tapp (ls,[t]) when pos && ls_equal ls ls_valid ->
let _, c = cap_of_term kn uf pins caps t in
let n = match c with
| V -> 0
| P n -> get_index uf n
| _ -> assert false (* never *) in
if n = 0 then t_true, uf else
let p = Mint.find n pins in
let check _ (_,c) = assert (cap_valid uf c) in
Mls.iter check p.p_vars;
let inv = List.map (t_attr_copy f) p.p_inv in
t_and_asym_l inv, uf
| Tapp (ls,[t]) when not pos && ls_equal ls ls_valid ->
let t, c = cap_of_term kn uf pins caps t in
let n = match c with
| V -> 0
| P n -> get_index uf n
| _ -> assert false (* never *) in
if n = 0 then t_true, uf else
let p = Mint.find n pins in
let uf = Mint.add n 0 uf in
let add pj (v,c) fl =
assert (cap_valid uf c);
let t = t_app_infer pj [t] in
t_equ (t_var v) t :: fl in
t_attr_copy f (t_and_l (Mls.fold add p.p_vars [])), uf
| Tapp (ls,[t1;t2]) when not pos && ls_equal ls ps_equ ->
let t1, c1 = cap_of_term kn uf pins caps t1 in
let t2, c2 = cap_of_term kn uf pins caps t2 in
let f = t_attr_copy f (t_equ t1 t2) in
cap_equality kn uf pins f t1 c1 t2 c2
| _ when Sattr.mem annot_attr f.t_attrs ->
fst (cap_of_term kn uf pins caps f), uf
| Tapp _ ->
fst (cap_of_term kn uf pins caps f), uf
| Tif (f0,f1,f2) ->
let f0, _ = cap_of_term kn uf pins caps f0 in
let f1, uf1 = inject kn uf pins caps pos f1 in
let f2, uf2 = inject kn uf pins caps pos f2 in
t_attr_copy f (t_if f0 f1 f2), uf_inter uf1 uf2
| Tlet (t0,fb) ->
let t0, c0 = cap_of_term kn uf pins caps t0 in
let v, f1 = t_open_bound fb in
let caps = add_cap v c0 caps in
let f1, uf = inject kn uf pins caps pos f1 in
t_attr_copy f (t_let_close v t0 f1), uf
| Tcase (t0,bl) ->
let t0, c0 = cap_of_term kn uf pins caps t0 in
let add_branch b (bl, ufj) =
let p, f1 = t_open_branch b in
let caps = add_pat uf caps c0 p in
let f1, uf1 = inject kn uf pins caps pos f1 in
let b = t_close_branch p f1 in
b::bl, Some (Opt.fold uf_inter uf1 ufj) in
let bl, uf = List.fold_right add_branch bl ([], None) in
t_attr_copy f (t_case t0 bl), Opt.get uf
| Tquant (q,fq) ->
let vl, tt, f0 = t_open_quant fq in
let down t = fst (cap_of_term kn uf pins caps t) in
let tt = List.map (List.map down) tt in
let valid = match q with
| Tforall -> not pos
| Texists -> pos in
let caps, pins, vl =
if valid then caps, pins, vl else
let add v (caps, pins, vl) =
let c, pins, vl = add_var kn pins vl v in
add_cap v c caps, pins, v::vl in
List.fold_right add vl (caps, pins, []) in
let f0, uf = inject kn uf pins caps pos f0 in
let f0 = t_quant_close_simp q vl tt f0 in
t_attr_copy f f0, uf
| Tbinop (Tand,f1,f2) ->
let f1, uf1 = inject kn uf pins caps pos f1 in
let f2, uf2 = inject kn uf1 pins caps pos f2 in
t_attr_copy f (t_and f1 f2), uf2
| Tbinop (Timplies,f1,f2) ->
let f1, uf1 = inject kn uf pins caps (not pos) f1 in
let f2, _ = inject kn uf1 pins caps pos f2 in
t_attr_copy f (t_implies f1 f2), uf
| Tbinop (Tor,f1,f2) ->
let f1, uf1 = inject kn uf pins caps pos f1 in
let f2, uf2 = inject kn uf pins caps pos f2 in
t_attr_copy f (t_or f1 f2), uf_inter uf1 uf2
| Tbinop (Tiff,_,_) ->
fst (cap_of_term kn uf pins caps f), uf
| Tnot f1 ->
let f1, _ = inject kn uf pins caps (not pos) f1 in
t_attr_copy f (t_not f1), uf
| Ttrue | Tfalse ->
f, uf
let inject kn f =
fst (inject kn Mint.empty Mint.empty Mvs.empty true f)
| null | https://raw.githubusercontent.com/DSiSc/why3/8ba9c2287224b53075adc51544bc377bc8ea5c75/src/mlw/typeinv.ml | ocaml | ******************************************************************
This software is distributed under the terms of the GNU Lesser
on linking described in file LICENSE.
******************************************************************
no need to go any further
we don't trust projections of sum types that produce
fragile values, since they may break cap_of_term
valid
pin
algebraic type
non-free unbreakable record
Stage I - Inspect: detect values that must be committed and provide
the necessary ls_valid applications.
record fields
deconstruction from a root
term we can be reached from
breakable record
unbreakable record
record type
sum type
impossible branch
can never happen
projection propagation
NOTE: should we commit triggers?
Stage II - Inject: replace unsafe projections with
temporary fields and expand ls_valid wherever needed.
temporary fields
instantiated invariant
breakable record
unbreakable record
constructible type
do not inline
impossible branch
can never happen
projection propagation
constants are valid
never
never
never
never | The Why3 Verification Platform / The Why3 Development Team
Copyright 2010 - 2018 -- Inria - CNRS - Paris - Sud University
General Public License version 2.1 , with the special exception
open Wstdlib
open Ident
open Ty
open Term
open Ity
open Expr
open Pdecl
let ls_valid =
let v = create_tvsymbol (id_fresh "a") in
create_psymbol (id_fresh "valid") [ty_var v]
let its_solid s =
List.for_all (fun f -> f.its_frozen) s.its_arg_flg &&
List.for_all (fun f -> f.its_frozen) s.its_reg_flg
let is_trusted_constructor _kn ls =
ls.ls_constr > 0 &&
match (Opt.get ls.ls_value).ty_node with
| Tyapp (s,_) -> not (its_solid (restore_its s))
| _ -> assert false
let is_trusted_projection kn ls ity =
ls.ls_constr = 0 &&
try let rs = restore_rs ls in
if rs.rs_field = None then false else
match (List.hd rs.rs_cty.cty_args).pv_ity.ity_node with
| Ityreg {reg_its = s} | Ityapp (s,_,_) ->
not (its_solid s) &&
(not (ity_fragile ity) ||
List.length (Eval_match.ts_constructors kn s.its_ts) <= 1)
| _ -> assert false
with Not_found -> false
let is_trusted_projection_t kn ls t = match t.t_ty with
| Some ty -> is_trusted_projection kn ls (ity_of_ty_pure ty)
| None -> false
Integer - indexed " pins " represent individual values whose
invariant may be broken . Fresh pins are assigned to values
bottom - up , and the canonical representative pin in a UF class
is the minimal one . Thus , a greater pin can never be reached
from a lesser pin . Each pin is associated to a list of fresh
variables that correspond to " temporary fields " . Committing
a pin means that we prove that the temporary fields satisfy
the invariant and then assume that the temporary fields are
equal to the respective projections .
Recursive " caps " represent deconstructible values from which
pins can be reached . Each variable is associated to a cap .
A cap is either a committed value , a pin ( a non - committed
record with a breakable invariant ) , a constructible value
( characterized by the set of possible constructors ) , or
a non - free record with an unbreakable invariant .
invariant may be broken. Fresh pins are assigned to values
bottom-up, and the canonical representative pin in a UF class
is the minimal one. Thus, a greater pin can never be reached
from a lesser pin. Each pin is associated to a list of fresh
variables that correspond to "temporary fields". Committing
a pin means that we prove that the temporary fields satisfy
the invariant and then assume that the temporary fields are
equal to the respective projections.
Recursive "caps" represent deconstructible values from which
pins can be reached. Each variable is associated to a cap.
A cap is either a committed value, a pin (a non-committed
record with a breakable invariant), a constructible value
(characterized by the set of possible constructors), or
a non-free record with an unbreakable invariant. *)
type cap =
let isV = function V -> true | _ -> false
let mkP n =
if n = 0 then V else P n
let mkC css =
let chk _ l = List.for_all isV l in
if Mls.for_all chk css then V else C css
let mkR pjs =
let chk _ c = isV c in
if Mls.for_all chk pjs then V else R pjs
let new_index =
let c = ref 0 in
fun () -> incr c; !c
type pin_inspect = {
}
let gl_caps = (Wvs.create 7 : (cap * pin_inspect Mint.t) Wvs.t)
let extract_field cs f ty tyl =
let pat_arg ({pv_vs = v} as a) ty = if pv_equal a f
then pat_var (create_vsymbol (id_clone v.vs_name) ty)
else pat_wild ty in
let pl = List.map2 pat_arg cs.rs_cty.cty_args tyl in
let pat = pat_app (ls_of_rs cs) pl ty in
pat, t_var (Svs.choose pat.pat_vars)
let add_gl_cap kn v =
let rp = ref Mint.empty in
let rec down stem leaf ty = match ty.ty_node with
| Tyvar _ -> V
| Tyapp (s,tl) ->
let s = restore_its s in
if its_solid s then V else
let d = find_its_defn kn s in
let sbs = ts_match_args s.its_ts tl in
let add_field m f =
let vf = fd_of_rs f in
let ty = Ty.ty_inst sbs vf.pv_vs.vs_ty in
let leaf = fs_app (ls_of_rs f) [leaf] ty in
Mls.add (ls_of_rs f) (down stem leaf ty) m in
let pjs = List.fold_left add_field Mls.empty d.itd_fields in
let pin = {p_fields = pjs; p_stem = stem; p_leaf = leaf} in
let n = new_index () in
rp := Mint.add n pin !rp;
mkP n
let add_field m f =
let vf = fd_of_rs f in
let ty = Ty.ty_inst sbs vf.pv_vs.vs_ty in
let leaf = fs_app (ls_of_rs f) [leaf] ty in
Mls.add (ls_of_rs f) (down stem leaf ty) m in
mkR (List.fold_left add_field Mls.empty d.itd_fields)
let add_field m f = Mpv.add (fd_of_rs f) (ls_of_rs f) m in
let pjm = List.fold_left add_field Mpv.empty d.itd_fields in
let add_constr m c =
let inst f = Ty.ty_inst sbs f.pv_vs.vs_ty in
let tyl = List.map inst c.rs_cty.cty_args in
let conv_field f ty_f =
let leaf = match Mpv.find_opt f pjm with
| Some pj -> fs_app pj [leaf] ty_f
| None -> t_case_close leaf [extract_field c f ty tyl] in
down stem leaf ty_f in
let fdl = List.map2 conv_field c.rs_cty.cty_args tyl in
Mls.add (ls_of_rs c) fdl m in
mkC (List.fold_left add_constr Mls.empty d.itd_constructors)
let add_constr m c =
let inst f = Ty.ty_inst sbs f.pv_vs.vs_ty in
let tyl = List.map inst c.rs_cty.cty_args in
let conv_field f ty_f =
let pat, t = extract_field c f ty tyl in
down ((leaf, pat)::stem) t ty_f in
let fdl = List.map2 conv_field c.rs_cty.cty_args tyl in
Mls.add (ls_of_rs c) fdl m in
mkC (List.fold_left add_constr Mls.empty d.itd_constructors)
in
let c = down [] (t_var v) v.vs_ty in
Wvs.set gl_caps v (c, !rp);
c, !rp
let inspect kn tl =
let rpins = ref Mint.empty in
let rcommit = ref Mint.empty in
let rec cap_valid = function
| V -> ()
| P n ->
let pin = Mint.find n !rpins in
Mls.iter (fun _ c -> cap_valid c) pin.p_fields;
rcommit := Mint.add n pin !rcommit
| C css -> Mls.iter (fun _ fl -> List.iter cap_valid fl) css
| R pjs -> Mls.iter (fun _ c -> cap_valid c) pjs
in
let rec cap_join c1 c2 = match c1, c2 with
| V, c | c, V ->
cap_valid c; V
| P n1, P n2 ->
if n1 = n2 then c1 else begin
cap_valid c1; cap_valid c2; V
end
| C s1, C s2 ->
let join _ l1 l2 = Some (List.map2 cap_join l1 l2) in
mkC (Mls.union join s1 s2)
| R s1, R s2 ->
let join _ c1 c2 = Some (cap_join c1 c2) in
mkR (Mls.union join s1 s2)
| _ -> assert false
in
let rec add_pat caps c p =
if isV c then
Mvs.set_union caps (Mvs.map (fun () -> V) p.pat_vars)
else match p.pat_node with
| Pwild -> caps
| Pvar v -> Mvs.add v c caps
| Papp (cs,pl) -> begin match c with
| C css -> begin match Mls.find_opt cs css with
| Some cl -> List.fold_left2 add_pat caps cl pl
| Por (p,_) -> cap_valid c; add_pat caps V p
| Pas (p,v) -> Mvs.add v c (add_pat caps c p)
in
let rec unwind c pjl0 = match c, pjl0 with
| _, [] -> c
| V, _ -> V
| P n, pj::pjl ->
let pin = Mint.find n !rpins in
unwind (Mls.find pj pin.p_fields) pjl
| C css, pj::pjl when Mls.cardinal css = 1 ->
let cs, fl = Mls.choose css in
let fdl = Eval_match.cs_fields kn cs in
let c = Eval_match.select_field pj fdl fl in
unwind c pjl
| C css, pj::pjl ->
let ty = List.hd pj.ls_args in
let add_branch fdl fl cj =
let c = Eval_match.select_field pj fdl fl in
Some (Opt.fold cap_join (unwind c pjl) cj) in
let csl = Eval_match.ty_constructors kn ty in
let add_branch (cs, fdl) acc =
match Mls.find_opt cs css with
| Some fl -> add_branch fdl fl acc
| None -> acc in
Opt.get (List.fold_right add_branch csl None)
| R pjs, pj::pjl ->
unwind (Mls.find pj pjs) pjl
in
let rec down caps pjl t = match t.t_node with
let c = try Mvs.find v caps with Not_found ->
let c, pins = try Wvs.find gl_caps v with
| Not_found -> add_gl_cap kn v in
rpins := Mint.set_union pins !rpins;
c in
unwind c pjl
| Tconst _ -> V
| Tapp (ls,[t1;t2]) when ls_equal ls ps_equ ->
let c1 = down caps pjl t1 in
let c2 = down caps pjl t2 in
ignore (cap_join c1 c2); V
| Tapp (ls,[t1]) when is_trusted_projection_t kn ls t ->
down caps (ls::pjl) t1
| Tapp (ls,tl) when is_trusted_constructor kn ls ->
begin match pjl with
| pj::pjl ->
let fdl = Eval_match.cs_fields kn ls in
let t = Eval_match.select_field pj fdl tl in
down caps pjl t
| [] ->
let cl = List.map (down caps []) tl in
mkC (Mls.singleton ls cl) end
| Tapp (_,tl) ->
let t_valid t = cap_valid (down caps [] t) in
List.iter t_valid tl; V
| Tif (t0,t1,t2) ->
let _ = down caps [] t0 in
let c1 = down caps pjl t1 in
let c2 = down caps pjl t2 in
cap_join c1 c2
| Tlet (t0,tb) ->
let c0 = down caps [] t0 in
let v, t1 = t_open_bound tb in
let caps = Mvs.add v c0 caps in
down caps pjl t1
| Tcase (t0,bl) ->
let c0 = down caps [] t0 in
let add_branch b cj =
let p, t1 = t_open_branch b in
let caps = add_pat caps c0 p in
let c = down caps pjl t1 in
Some (Opt.fold cap_join c cj) in
Opt.get (List.fold_right add_branch bl None)
| Teps tb ->
let v, f = t_open_bound tb in
let caps = Mvs.add v V caps in
ignore (down caps [] f); V
| Tquant (_,tq) ->
let vl, tt, f = t_open_quant tq in
let add caps v = Mvs.add v V caps in
let caps = List.fold_left add caps vl in
let down t = ignore (down caps [] t) in
List.iter (List.iter down) tt; down f; V
| Tbinop (_,f1,f2) ->
ignore (down caps [] f1);
ignore (down caps [] f2); V
| Tnot f ->
ignore (down caps [] f); V
| Ttrue | Tfalse -> V
in
let add_term t = ignore (down Mvs.empty [] t) in
List.iter add_term tl;
let commit pin =
let f = ps_app ls_valid [pin.p_leaf] in
let add f (t, p) = t_case_close t
[p, f; pat_wild p.pat_ty, t_true] in
List.fold_left add f pin.p_stem in
Mint.values (Mint.map commit !rcommit)
type pin_inject = {
}
let add_var kn pins vl v =
let rv = ref vl in
let rp = ref pins in
let rec down ty = match ty.ty_node with
| Tyvar _ -> V
| Tyapp (s,tl) ->
let s = restore_its s in
if its_solid s then V else
let d = find_its_defn kn s in
let sbs = Ty.ts_match_args s.its_ts tl in
let bn = v.vs_name.id_string in
let add_field (m,mv) f =
let vf = fd_of_rs f in
let ty = Ty.ty_inst sbs vf.pv_vs.vs_ty in
let nm = bn ^ "_" ^ f.rs_name.id_string in
let v = create_vsymbol (id_fresh nm) ty in
rv := v :: !rv;
let mv = Mvs.add vf.pv_vs (t_var v) mv in
Mls.add (ls_of_rs f) (v, down ty) m, mv in
let pjs, mv = List.fold_left add_field
(Mls.empty, Mvs.empty) d.itd_fields in
let inv = List.map (t_ty_subst sbs mv) d.itd_invariant in
let pin = {p_vars = pjs; p_inv = inv} in
let n = new_index () in
rp := Mint.add n pin !rp;
mkP n
let add_field m f =
let vf = fd_of_rs f in
let ty = Ty.ty_inst sbs vf.pv_vs.vs_ty in
Mls.add (ls_of_rs f) (down ty) m in
mkR (List.fold_left add_field Mls.empty d.itd_fields)
let add_constr m c =
let field vf = down (Ty.ty_inst sbs vf.pv_vs.vs_ty) in
Mls.add (ls_of_rs c) (List.map field c.rs_cty.cty_args) m in
mkC (List.fold_left add_constr Mls.empty d.itd_constructors)
in
let c = down v.vs_ty in
let add_cap v c caps =
if isV c then caps else Mvs.add v c caps
let rec get_index uf n =
if n = 0 then 0 else
match Mint.find_opt n uf with
| Some n -> get_index uf n
| None -> n
let cap_valid uf c =
let rec down = function
| V -> ()
| P n -> if get_index uf n <> 0 then raise Exit
| C css -> Mls.iter (fun _ fl -> List.iter down fl) css
| R pjs -> Mls.iter (fun _ c -> down c) pjs in
try down c; true with Exit -> false
let rec cap_join uf c1 c2 = match c1, c2 with
| V, c | c, V ->
assert (cap_valid uf c); V
| P n1, P n2 ->
let n1 = get_index uf n1 in
let n2 = get_index uf n2 in
assert (n1 = n2);
mkP n1
| C s1, C s2 ->
let join _ l1 l2 = Some (List.map2 (cap_join uf) l1 l2) in
mkC (Mls.union join s1 s2)
| R s1, R s2 ->
let join _ c1 c2 = Some (cap_join uf c1 c2) in
mkR (Mls.union join s1 s2)
| _ -> assert false
let rec add_pat uf caps c p =
if isV c then caps else
match p.pat_node with
| Pwild -> caps
| Pvar v -> Mvs.add v c caps
| Papp (cs,pl) -> begin match c with
| C css -> begin match Mls.find_opt cs css with
| Some cl -> List.fold_left2 (add_pat uf) caps cl pl
| Por _ -> assert (cap_valid uf c); caps
| Pas (p,v) -> Mvs.add v c (add_pat uf caps c p)
let cap_of_term kn uf pins caps t =
let rec unroll t = function
| (pj,t0)::pjl ->
let t = t_app pj [t] t0.t_ty in
unroll (t_attr_copy t0 t) pjl
| [] -> t in
let rec unwind t c pjl0 = match c, pjl0 with
| _, [] -> t, c
| V, _ -> unroll t pjl0, V
| P n, (pj,t0)::pjl ->
let n = get_index uf n in
if n = 0 then unroll t pjl0, V
else let pin = Mint.find n pins in
let v, c = Mls.find pj pin.p_vars in
unwind (t_attr_copy t0 (t_var v)) c pjl
| C css, (pj,t0)::pjl when Mls.cardinal css = 1 ->
let cs, fl = Mls.choose css in
let fdl = Eval_match.cs_fields kn cs in
let c = Eval_match.select_field pj fdl fl in
let t = t_app pj [t] t0.t_ty in
unwind (t_attr_copy t0 t) c pjl
| C css, (pj,t0)::pjl ->
let ty = Opt.get t.t_ty in
let sbs = Ty.ty_match_args ty in
let v0 = create_vsymbol (id_fresh "q") (Opt.get t0.t_ty) in
let t0 = t_attr_copy t0 (t_var v0) and p0 = pat_var v0 in
let add_branch cs fdl fl (bl, cj) =
let mk_pat fd_ty fd = match fd with
| Some ls when ls_equal pj ls -> p0
| _ -> pat_wild (Ty.ty_inst sbs fd_ty) in
let pl = List.map2 mk_pat cs.ls_args fdl in
let c = Eval_match.select_field pj fdl fl in
let t0, c = unwind t0 c pjl in
let b = t_close_branch (pat_app cs pl ty) t0 in
b::bl, Some (Opt.fold (cap_join uf) c cj) in
let csl = Eval_match.ty_constructors kn ty in
let add_branch (cs, fdl) acc =
match Mls.find_opt cs css with
| Some fl -> add_branch cs fdl fl acc
| None -> acc in
let bb = match Mls.choose css with
| {ls_constr = len}, _ when len > Mls.cardinal css ->
let v = create_vsymbol (id_fresh "q") ty in
[t_close_branch (pat_var v) (unroll (t_var v) pjl0)]
| _ -> [] in
let bl, c = List.fold_right add_branch csl (bb, None) in
t_case t bl, Opt.get c
| R pjs, (pj,t0)::pjl ->
let c = Mls.find pj pjs in
let t = t_app pj [t] t0.t_ty in
unwind (t_attr_copy t0 t) c pjl
in
let rec down caps pjl t = match t.t_node with
unwind t (Mvs.find_def V v caps) pjl
unroll t pjl, V
| Tapp (ls,[t1;t2]) when ls_equal ls ps_equ ->
let t1, c1 = down caps pjl t1 in
let t2, c2 = down caps pjl t2 in
ignore (cap_join uf c1 c2);
t_attr_copy t (t_equ t1 t2), V
| Tapp (ls,[t1]) when is_trusted_projection_t kn ls t ->
down caps ((ls,t)::pjl) t1
| Tapp (ls,tl) when is_trusted_constructor kn ls ->
begin match pjl with
| (pj,t0)::pjl ->
let fdl = Eval_match.cs_fields kn ls in
let t = Eval_match.select_field pj fdl tl in
down caps pjl (t_attr_copy t0 t)
| [] ->
let tl, cl = List.split (List.map (down caps []) tl) in
let t = t_attr_copy t (t_app ls tl t.t_ty) in
t, mkC (Mls.singleton ls cl) end
| Tapp (ls,tl) ->
let tl = List.map (fun t ->
let t, c = down caps [] t in
assert (cap_valid uf c); t) tl in
unroll (t_attr_copy t (t_app ls tl t.t_ty)) pjl, V
| Tif (t0,t1,t2) ->
let t0, _ = down caps [] t0 in
let t1, c1 = down caps pjl t1 in
let t2, c2 = down caps pjl t2 in
t_attr_copy t (t_if t0 t1 t2), cap_join uf c1 c2
| Tlet (t0,tb) ->
let t0, c0 = down caps [] t0 in
let v, t1 = t_open_bound tb in
let caps = add_cap v c0 caps in
let t1, c1 = down caps pjl t1 in
t_attr_copy t (t_let_close v t0 t1), c1
| Tcase (t0,bl) ->
let t0, c0 = down caps [] t0 in
let add_branch b (bl, cj) =
let p, t1 = t_open_branch b in
let caps = add_pat uf caps c0 p in
let t1, c = down caps pjl t1 in
let b = t_close_branch p t1 in
b::bl, Some (Opt.fold (cap_join uf) c cj) in
let bl, c = List.fold_right add_branch bl ([], None) in
t_attr_copy t (t_case t0 bl), Opt.get c
| Teps tb ->
let v, f = t_open_bound tb in
let f, _ = down caps [] f in
unroll (t_attr_copy t (t_eps_close v f)) pjl, V
| Tquant (q,tq) ->
let vl, tt, t0 = t_open_quant tq in
let down t = fst (down caps [] t) in
let tt = List.map (List.map down) tt in
let tq = t_close_quant vl tt (down t0) in
t_attr_copy t (t_quant q tq), V
| Tbinop (op,f1,f2) ->
let f1, _ = down caps [] f1 in
let f2, _ = down caps [] f2 in
t_attr_copy t (t_binary op f1 f2), V
| Tnot f ->
let f, _ = down caps [] f in
t_attr_copy t (t_not f), V
| Ttrue | Tfalse ->
t, V
in
down caps [] t
let find_term_fields kn cs t =
let ty = Opt.get t.t_ty in
let sbs = Ty.ty_match_args ty in
let fdl = Eval_match.cs_fields kn cs in
let add_pat ty (pl,pll) =
let pw = pat_wild (Ty.ty_inst sbs ty) in
let pv = pat_var (create_vsymbol (id_fresh "v") pw.pat_ty) in
pw :: pl, (pv :: pl) :: List.map (fun pl -> pw :: pl) pll in
let _, pll = List.fold_right add_pat cs.ls_args ([],[]) in
let conv pl = function
| Some pj -> t_app_infer pj [t]
| None ->
let p = pat_app cs pl ty in
let v = Svs.choose p.pat_vars in
t_case_close t [p, t_var v] in
List.map2 conv pll fdl
let cap_equality kn uf pins f t1 c1 t2 c2 =
let rec commit t c fl uf = match c with
| V -> fl, uf
| P n ->
let n = get_index uf n in
if n = 0 then fl, uf else
let p = Mint.find n pins in
let uf = Mint.add n 0 uf in
let add pj (v,c) (fl,uf) =
let tv = t_var v in
let t = t_app_infer pj [t] in
let fl, uf = commit tv c fl uf in
t_equ tv t :: fl, uf in
Mls.fold add p.p_vars (fl,uf)
| C css when (fst (Mls.choose css)).ls_constr = 1 ->
css can not be empty and has at most one elt
let cs, cl = Mls.choose css in
let tl = find_term_fields kn cs t in
let add t c (fl, uf) = commit t c fl uf in
List.fold_right2 add tl cl (fl, uf)
| C css ->
let ty = Opt.get t.t_ty in
let sbs = Ty.ty_match_args ty in
let branch cs cl bl =
let add ty c (pl,fl,uf) =
let v = create_vsymbol (id_fresh "v") (ty_inst sbs ty) in
let fl', uf = commit (t_var v) c fl uf in
let p = if fl' == fl then pat_wild v.vs_ty else pat_var v in
p::pl, fl', uf in
let pl, fl, _ = List.fold_right2 add cs.ls_args cl ([],[],uf) in
t_close_branch (pat_app cs pl ty) (t_and_l fl) :: bl in
let bb = match Mls.choose css with
| {ls_constr = len}, _ when len > Mls.cardinal css ->
[t_close_branch (pat_wild ty) t_true]
| _ -> [] in
t_case t (Mls.fold branch css bb) :: fl, uf
| R pjs ->
let add pj c (fl,uf) = commit (t_app_infer pj [t]) c fl uf in
Mls.fold add pjs (fl,uf)
in
let rec down t1 c1 t2 c2 fl uf = match c1, c2 with
| V, _ -> commit t2 c2 fl uf
| _, V -> commit t1 c1 fl uf
| P n1, P n2 ->
let n1 = get_index uf n1 in
let n2 = get_index uf n2 in
if n1 = n2 then fl, uf else
if n1 = 0 then commit t2 (mkP n2) fl uf else
if n2 = 0 then commit t1 (mkP n1) fl uf else
let p1 = Mint.find n1 pins in
let p2 = Mint.find n2 pins in
let uf = if n1 < n2 then
Mint.add n2 n1 uf else Mint.add n1 n2 uf in
let add _pj (v1,c1) (v2,c2) (fl,uf) =
let t1 = t_var v1 and t2 = t_var v2 in
let fl, uf = down t1 c1 t2 c2 fl uf in
t_equ t1 t2 :: fl, uf in
Mls.fold2_inter add p1.p_vars p2.p_vars (fl,uf)
| C css1, C css2 when (fst (Mls.choose css1)).ls_constr = 1 ->
css1 and css2 can not be empty and have at most one elt
let cs, cl1 = Mls.choose css1 in
let _, cl2 = Mls.choose css2 in
let tl1 = find_term_fields kn cs t1 in
let tl2 = find_term_fields kn cs t2 in
let rec add tl1 cl1 tl2 cl2 acc = match tl1,cl1,tl2,cl2 with
| t1::tl1, c1::cl1, t2::tl2, c2::cl2 ->
let fl, uf = add tl1 cl1 tl2 cl2 acc in
down t1 c1 t2 c2 fl uf
| _ -> acc in
add tl1 cl1 tl2 cl2 (fl,uf)
| C _css1, C _css2 ->
TODO
| R pjs1, R pjs2 ->
let add pj c1 c2 (fl,uf) =
let t1 = t_app_infer pj [t1] in
let t2 = t_app_infer pj [t2] in
down t1 c1 t2 c2 fl uf in
Mls.fold2_inter add pjs1 pjs2 (fl,uf)
let fl, uf = down t1 c1 t2 c2 [] uf in
t_and_l (f :: fl), uf
let uf_inter uf1 uf2 =
let uf1 = Mint.map (get_index uf1) uf1 in
let uf2 = Mint.map (get_index uf2) uf2 in
let inter n m1 m2 acc =
if m1 = m2 then acc else
let easy = if m1 < m2
then get_index uf1 m2 = m1
else get_index uf2 m1 = m2 in
if easy then acc else
let inner b = Some (match b with
| Some m -> min m n
| None -> n) in
let outer b = Some (match b with
| Some m -> Mint.change inner m2 m
| None -> Mint.singleton m2 n) in
Mint.change outer m1 acc in
let map = Mint.fold2_inter inter uf1 uf2 Mint.empty in
let inter n m1 m2 =
if m1 = m2 then Some m1 else
try let m = Mint.find m2 (Mint.find m1 map) in
if m = n then None else Some m
with Not_found -> Some (max m1 m2) in
Mint.inter inter uf1 uf2
let rec inject kn uf pins caps pos f = match f.t_node with
| Tapp (ls,[t]) when pos && ls_equal ls ls_valid ->
let _, c = cap_of_term kn uf pins caps t in
let n = match c with
| V -> 0
| P n -> get_index uf n
if n = 0 then t_true, uf else
let p = Mint.find n pins in
let check _ (_,c) = assert (cap_valid uf c) in
Mls.iter check p.p_vars;
let inv = List.map (t_attr_copy f) p.p_inv in
t_and_asym_l inv, uf
| Tapp (ls,[t]) when not pos && ls_equal ls ls_valid ->
let t, c = cap_of_term kn uf pins caps t in
let n = match c with
| V -> 0
| P n -> get_index uf n
if n = 0 then t_true, uf else
let p = Mint.find n pins in
let uf = Mint.add n 0 uf in
let add pj (v,c) fl =
assert (cap_valid uf c);
let t = t_app_infer pj [t] in
t_equ (t_var v) t :: fl in
t_attr_copy f (t_and_l (Mls.fold add p.p_vars [])), uf
| Tapp (ls,[t1;t2]) when not pos && ls_equal ls ps_equ ->
let t1, c1 = cap_of_term kn uf pins caps t1 in
let t2, c2 = cap_of_term kn uf pins caps t2 in
let f = t_attr_copy f (t_equ t1 t2) in
cap_equality kn uf pins f t1 c1 t2 c2
| _ when Sattr.mem annot_attr f.t_attrs ->
fst (cap_of_term kn uf pins caps f), uf
| Tapp _ ->
fst (cap_of_term kn uf pins caps f), uf
| Tif (f0,f1,f2) ->
let f0, _ = cap_of_term kn uf pins caps f0 in
let f1, uf1 = inject kn uf pins caps pos f1 in
let f2, uf2 = inject kn uf pins caps pos f2 in
t_attr_copy f (t_if f0 f1 f2), uf_inter uf1 uf2
| Tlet (t0,fb) ->
let t0, c0 = cap_of_term kn uf pins caps t0 in
let v, f1 = t_open_bound fb in
let caps = add_cap v c0 caps in
let f1, uf = inject kn uf pins caps pos f1 in
t_attr_copy f (t_let_close v t0 f1), uf
| Tcase (t0,bl) ->
let t0, c0 = cap_of_term kn uf pins caps t0 in
let add_branch b (bl, ufj) =
let p, f1 = t_open_branch b in
let caps = add_pat uf caps c0 p in
let f1, uf1 = inject kn uf pins caps pos f1 in
let b = t_close_branch p f1 in
b::bl, Some (Opt.fold uf_inter uf1 ufj) in
let bl, uf = List.fold_right add_branch bl ([], None) in
t_attr_copy f (t_case t0 bl), Opt.get uf
| Tquant (q,fq) ->
let vl, tt, f0 = t_open_quant fq in
let down t = fst (cap_of_term kn uf pins caps t) in
let tt = List.map (List.map down) tt in
let valid = match q with
| Tforall -> not pos
| Texists -> pos in
let caps, pins, vl =
if valid then caps, pins, vl else
let add v (caps, pins, vl) =
let c, pins, vl = add_var kn pins vl v in
add_cap v c caps, pins, v::vl in
List.fold_right add vl (caps, pins, []) in
let f0, uf = inject kn uf pins caps pos f0 in
let f0 = t_quant_close_simp q vl tt f0 in
t_attr_copy f f0, uf
| Tbinop (Tand,f1,f2) ->
let f1, uf1 = inject kn uf pins caps pos f1 in
let f2, uf2 = inject kn uf1 pins caps pos f2 in
t_attr_copy f (t_and f1 f2), uf2
| Tbinop (Timplies,f1,f2) ->
let f1, uf1 = inject kn uf pins caps (not pos) f1 in
let f2, _ = inject kn uf1 pins caps pos f2 in
t_attr_copy f (t_implies f1 f2), uf
| Tbinop (Tor,f1,f2) ->
let f1, uf1 = inject kn uf pins caps pos f1 in
let f2, uf2 = inject kn uf pins caps pos f2 in
t_attr_copy f (t_or f1 f2), uf_inter uf1 uf2
| Tbinop (Tiff,_,_) ->
fst (cap_of_term kn uf pins caps f), uf
| Tnot f1 ->
let f1, _ = inject kn uf pins caps (not pos) f1 in
t_attr_copy f (t_not f1), uf
| Ttrue | Tfalse ->
f, uf
let inject kn f =
fst (inject kn Mint.empty Mint.empty Mvs.empty true f)
|
775e52c49d4594ce035248372583ab8895dba472885ab1fcdf5fa50842dd07aa | manzyuk/groebner | Polynomial.hs | # LANGUAGE FlexibleContexts , TypeOperators #
module Polynomial
( Term
, (*^)
, Polynomial
, constant
, variable
, lm
, spoly
, demote
)
where
import Monomial
import Types
import Data.Monoid
data Term r v o = T r (Monomial v o) deriving (Eq, Show)
instance (Num r, Ord v) => Semigroup (Term r v o) where
T a m <> T b n = T (a * b) (m `mappend` n)
instance (Num r, Ord v) => Monoid (Term r v o) where
mempty = T 1 mempty
Polynomials are represented as lists of non - zero terms , ordered in
-- descending order by their monomials. This makes equality test and
-- extraction of the leading monomial very simple and fast. Besides,
-- the relative order of terms does not change if the polynomial is
-- multiplied with a term.
newtype Polynomial r v o = P [Term r v o] deriving Eq
-- Multiply a polynomial with a term.
(*^) :: (Num r, Ord v) => Term r v o -> Polynomial r v o -> Polynomial r v o
u *^ P vs = P [ u `mappend` v | v <- vs ]
instance Ord v => HasDegree (Polynomial r v o) where
degree (P []) = -1
degree (P ts) = maximum [ degree m | T _ m <- ts ]
-- We are trying to make the display of polynomials as close to the
-- mathematical notation as possible. Since we don't know what the
-- ground field 'r' can be, we apply some heuristics.
instance (Eq r, Show r, Num r, Ord v, Show v) => Show (Polynomial r v o) where
show (P []) = "0"
show (P (t:ts)) = showHead t ++ showTail ts
where
showHead (T c m) = prefix ++ show m
where
prefix = case c of
1 -> ""
-1 -> "-"
_ -> show c
showTerm (T c m) = prefix ++ show m
where
prefix = case signum c of
1 -> '+':a
-1 -> '-':a
_ -> "(" ++ show c ++ ")"
a = if abs c == 1 then "" else show (abs c)
showTail = concatMap showTerm
-- Arithmetic operations on polynomials are defined to preserve the
-- invariant of the representation of polynomials.
instance (Eq r, Num r, Ord v, Show v, Ord (Monomial v o))
=> Num (Polynomial r v o) where
f@(P (u@(T a m):us)) + g@(P (v@(T b n):vs))
| m == n && a + b /= 0
= let P ws = P us + P vs in P $ T (a + b) m:ws
| m == n && a + b == 0
= P us + P vs
| m < n
= let P ws = f + P vs in P $ v:ws
| otherwise
= let P ws = P us + g in P $ u:ws
f + P [] = f
P [] + g = g
P (u:us) * P (v:vs)
= let P ws = P us * P vs + u *^ P vs + v *^ P us
in P $ (u `mappend` v):ws
_ * P [] = P []
P [] * _ = P []
negate (P ts) = P $ [ T (negate a) m | T a m <- ts ]
Inclusion of ' abs ' and ' signum ' into ' ' was a stupid idea .
abs _ = error "abs is undefined for polynomials"
signum _ = error "signum is undefined for polynomials"
fromInteger = constant . fromInteger
-- View a constant 'c' as a polynomial (of degree 0 unless c is 0).
constant :: (Eq r, Num r, Ord v) => r -> Polynomial r v o
constant 0 = P []
constant c = P [T c mempty]
-- View a variable 'v' as a polynomial (of degree 1).
variable :: (Num r, Eq v) => v -> Polynomial r v o
variable x = P [T 1 (inject x)]
-- Leading monomial of a polynomial.
lm :: Polynomial r v o -> Monomial v o
lm (P ((T _ m):_)) = m
lm (P []) = error "lm: zero polynomial"
-- s-polynomial of a pair of polynomials.
spoly :: (Eq r, Fractional r, Ord v, Show v, Ord (Monomial v o))
=> Polynomial r v o -> Polynomial r v o -> Polynomial r v o
spoly f@(P (u@(T a m):us)) g@(P (v@(T b n):vs)) = n' *^ f - m' *^ g
where
n' = T 1 (complement m n)
m' = T (a / b) (complement n m)
-- The following function is only used when the input polynomial is
-- effectively a polynomial from r[v1] to adjust its type.
demote :: ( Fractional r
, Ord v1, Ord v2
, Show v1, Show v2
, Ord (Monomial v1 o1)
, Ord (Monomial v2 o2))
=> Polynomial r (v1 :<: v2) (o1, o2) -> Polynomial r v1 o1
demote (P us) = P [ T c x | T c z <- us
, let (x, _) = uninterleave z ]
| null | https://raw.githubusercontent.com/manzyuk/groebner/3f68f3512f19ee0a4740e6ee204f6b117ecbbcad/src/Polynomial.hs | haskell | descending order by their monomials. This makes equality test and
extraction of the leading monomial very simple and fast. Besides,
the relative order of terms does not change if the polynomial is
multiplied with a term.
Multiply a polynomial with a term.
We are trying to make the display of polynomials as close to the
mathematical notation as possible. Since we don't know what the
ground field 'r' can be, we apply some heuristics.
Arithmetic operations on polynomials are defined to preserve the
invariant of the representation of polynomials.
View a constant 'c' as a polynomial (of degree 0 unless c is 0).
View a variable 'v' as a polynomial (of degree 1).
Leading monomial of a polynomial.
s-polynomial of a pair of polynomials.
The following function is only used when the input polynomial is
effectively a polynomial from r[v1] to adjust its type. | # LANGUAGE FlexibleContexts , TypeOperators #
module Polynomial
( Term
, (*^)
, Polynomial
, constant
, variable
, lm
, spoly
, demote
)
where
import Monomial
import Types
import Data.Monoid
data Term r v o = T r (Monomial v o) deriving (Eq, Show)
instance (Num r, Ord v) => Semigroup (Term r v o) where
T a m <> T b n = T (a * b) (m `mappend` n)
instance (Num r, Ord v) => Monoid (Term r v o) where
mempty = T 1 mempty
Polynomials are represented as lists of non - zero terms , ordered in
newtype Polynomial r v o = P [Term r v o] deriving Eq
(*^) :: (Num r, Ord v) => Term r v o -> Polynomial r v o -> Polynomial r v o
u *^ P vs = P [ u `mappend` v | v <- vs ]
instance Ord v => HasDegree (Polynomial r v o) where
degree (P []) = -1
degree (P ts) = maximum [ degree m | T _ m <- ts ]
instance (Eq r, Show r, Num r, Ord v, Show v) => Show (Polynomial r v o) where
show (P []) = "0"
show (P (t:ts)) = showHead t ++ showTail ts
where
showHead (T c m) = prefix ++ show m
where
prefix = case c of
1 -> ""
-1 -> "-"
_ -> show c
showTerm (T c m) = prefix ++ show m
where
prefix = case signum c of
1 -> '+':a
-1 -> '-':a
_ -> "(" ++ show c ++ ")"
a = if abs c == 1 then "" else show (abs c)
showTail = concatMap showTerm
instance (Eq r, Num r, Ord v, Show v, Ord (Monomial v o))
=> Num (Polynomial r v o) where
f@(P (u@(T a m):us)) + g@(P (v@(T b n):vs))
| m == n && a + b /= 0
= let P ws = P us + P vs in P $ T (a + b) m:ws
| m == n && a + b == 0
= P us + P vs
| m < n
= let P ws = f + P vs in P $ v:ws
| otherwise
= let P ws = P us + g in P $ u:ws
f + P [] = f
P [] + g = g
P (u:us) * P (v:vs)
= let P ws = P us * P vs + u *^ P vs + v *^ P us
in P $ (u `mappend` v):ws
_ * P [] = P []
P [] * _ = P []
negate (P ts) = P $ [ T (negate a) m | T a m <- ts ]
Inclusion of ' abs ' and ' signum ' into ' ' was a stupid idea .
abs _ = error "abs is undefined for polynomials"
signum _ = error "signum is undefined for polynomials"
fromInteger = constant . fromInteger
constant :: (Eq r, Num r, Ord v) => r -> Polynomial r v o
constant 0 = P []
constant c = P [T c mempty]
variable :: (Num r, Eq v) => v -> Polynomial r v o
variable x = P [T 1 (inject x)]
lm :: Polynomial r v o -> Monomial v o
lm (P ((T _ m):_)) = m
lm (P []) = error "lm: zero polynomial"
spoly :: (Eq r, Fractional r, Ord v, Show v, Ord (Monomial v o))
=> Polynomial r v o -> Polynomial r v o -> Polynomial r v o
spoly f@(P (u@(T a m):us)) g@(P (v@(T b n):vs)) = n' *^ f - m' *^ g
where
n' = T 1 (complement m n)
m' = T (a / b) (complement n m)
demote :: ( Fractional r
, Ord v1, Ord v2
, Show v1, Show v2
, Ord (Monomial v1 o1)
, Ord (Monomial v2 o2))
=> Polynomial r (v1 :<: v2) (o1, o2) -> Polynomial r v1 o1
demote (P us) = P [ T c x | T c z <- us
, let (x, _) = uninterleave z ]
|
1e543877c71e900f87550350f1b4ed17b0e8ff7604a376167c7fd130c730c74b | BekaValentine/SimpleFP-v2 | REPL.hs | module Dependent.Unification.REPL where
import Control.Monad.Reader (runReaderT)
import System.IO
import Utils.ABT
import Utils.Env
import Utils.Eval
import Utils.Pretty
import Dependent.Core.ConSig
import Dependent.Core.Evaluation
import Dependent.Core.Parser
import Dependent.Core.Term
import Dependent.Unification.Elaborator
import Dependent.Unification.Elaboration
import Dependent.Unification.TypeChecking
flushStr :: String -> IO ()
flushStr str = putStr str >> hFlush stdout
readPrompt :: String -> IO String
readPrompt prompt = flushStr prompt >> getLine
until_ :: Monad m => (a -> Bool) -> m a -> (a -> m ()) -> m ()
until_ p prompt action = do
result <- prompt
if p result
then return ()
else action result >> until_ p prompt action
repl :: String -> IO ()
repl src = case loadProgram src of
Left e -> flushStr ("ERROR: " ++ e ++ "\n")
Right (sig,defs,ctx,env)
-> do hSetBuffering stdin LineBuffering
until_ (== ":quit")
(readPrompt "$> ")
(evalAndPrint sig defs ctx env)
where
loadProgram :: String -> Either String (Signature,Definitions,Context,Env String Term)
loadProgram src
= do prog <- parseProgram src
(_,ElabState sig defs ctx _ _) <- runElaborator0 (elabProgram prog)
let env = definitionsToEnvironment defs
return (sig,defs,ctx,env)
loadTerm :: Signature -> Definitions -> Context -> Env String Term -> String -> Either String Term
loadTerm sig defs ctx env src
= do tm0 <- parseTerm src
let tm = freeToDefined (In . Defined) tm0
case runElaborator (infer tm) sig defs ctx of
Left e -> Left e
Right _ -> runReaderT (eval tm) env
evalAndPrint :: Signature -> Definitions -> Context -> Env String Term -> String -> IO ()
evalAndPrint _ _ _ _ "" = return ()
evalAndPrint sig defs ctx env src
= case loadTerm sig defs ctx env src of
Left e -> flushStr ("ERROR: " ++ e ++ "\n")
Right v -> flushStr (pretty v ++ "\n")
replFile :: String -> IO ()
replFile loc = readFile loc >>= repl | null | https://raw.githubusercontent.com/BekaValentine/SimpleFP-v2/ae00ec809caefcd13664395b0ae2fc66145f6a74/src/Dependent/Unification/REPL.hs | haskell | module Dependent.Unification.REPL where
import Control.Monad.Reader (runReaderT)
import System.IO
import Utils.ABT
import Utils.Env
import Utils.Eval
import Utils.Pretty
import Dependent.Core.ConSig
import Dependent.Core.Evaluation
import Dependent.Core.Parser
import Dependent.Core.Term
import Dependent.Unification.Elaborator
import Dependent.Unification.Elaboration
import Dependent.Unification.TypeChecking
flushStr :: String -> IO ()
flushStr str = putStr str >> hFlush stdout
readPrompt :: String -> IO String
readPrompt prompt = flushStr prompt >> getLine
until_ :: Monad m => (a -> Bool) -> m a -> (a -> m ()) -> m ()
until_ p prompt action = do
result <- prompt
if p result
then return ()
else action result >> until_ p prompt action
repl :: String -> IO ()
repl src = case loadProgram src of
Left e -> flushStr ("ERROR: " ++ e ++ "\n")
Right (sig,defs,ctx,env)
-> do hSetBuffering stdin LineBuffering
until_ (== ":quit")
(readPrompt "$> ")
(evalAndPrint sig defs ctx env)
where
loadProgram :: String -> Either String (Signature,Definitions,Context,Env String Term)
loadProgram src
= do prog <- parseProgram src
(_,ElabState sig defs ctx _ _) <- runElaborator0 (elabProgram prog)
let env = definitionsToEnvironment defs
return (sig,defs,ctx,env)
loadTerm :: Signature -> Definitions -> Context -> Env String Term -> String -> Either String Term
loadTerm sig defs ctx env src
= do tm0 <- parseTerm src
let tm = freeToDefined (In . Defined) tm0
case runElaborator (infer tm) sig defs ctx of
Left e -> Left e
Right _ -> runReaderT (eval tm) env
evalAndPrint :: Signature -> Definitions -> Context -> Env String Term -> String -> IO ()
evalAndPrint _ _ _ _ "" = return ()
evalAndPrint sig defs ctx env src
= case loadTerm sig defs ctx env src of
Left e -> flushStr ("ERROR: " ++ e ++ "\n")
Right v -> flushStr (pretty v ++ "\n")
replFile :: String -> IO ()
replFile loc = readFile loc >>= repl | |
0e93819a43e074cd1745ac3b762e11f4b0b892d65ac38fe7cc853b94c9d0d78e | reanimate/reanimate | doc_svgLogo.hs | #!/usr/bin/env stack
-- stack runghc --package reanimate
module Main (main) where
import Reanimate
import Reanimate.Builtin.Documentation
import Reanimate.Builtin.Images
main :: IO ()
main = reanimate $ docEnv $ animate $ const svgLogo
| null | https://raw.githubusercontent.com/reanimate/reanimate/5ea023980ff7f488934d40593cc5069f5fd038b0/examples/doc_svgLogo.hs | haskell | stack runghc --package reanimate | #!/usr/bin/env stack
module Main (main) where
import Reanimate
import Reanimate.Builtin.Documentation
import Reanimate.Builtin.Images
main :: IO ()
main = reanimate $ docEnv $ animate $ const svgLogo
|
b77344b03e5c73c539d2a82b2ed7f2e74734dc6a0f73cbf22fa2ebe35949834b | 3b/3bmd | wiki-links.lisp | (defpackage #:3bmd-wiki
(:use #:cl #:esrap #:3bmd-ext)
(:export #:*wiki-links*
#:*wiki-links*
#:*wiki-processor*
#:process-wiki-link
))
(in-package #:3bmd-wiki)
;;; example extension for handling wiki-style [[links]]
;;; extending parser:
define independent rules with esrap : then use
;;; define-extension-inline to add the extension to the main grammar
;; allowing markup for now, to be normalized like ref links during printing...
(defrule wiki-link-label (* (and (! #\]) (! #\|) inline))
(:lambda (a)
(mapcar 'third a)))
(defrule wiki-link-arg (* (and (! "|") (! "]]") character))
(:text t))
(defrule normal-wiki-link (and
"[["
wiki-link-label
(* (and "|" wiki-link-arg))
"]]")
(:destructure ([ label args ])
(declare (ignore [ ]))
(list 'wiki-link :label label :args (mapcar 'second args))))
(defrule quoted-wiki-link (and #\'
;; would be nicer to just use wiki-link
;; rule rather than duplicating it
;; here, but then we'd have to
;; serialize it back to text to put the
;; "[[" back, and worry about
;; whitespace, etc
"[["
(* (and (! #\]) character))
"]]")
(:destructure (q &rest link)
(declare (ignore q))
(text link)))
(define-extension-inline *wiki-links* wiki-link
(or quoted-wiki-link normal-wiki-link)
(:character-rule wiki-link-extended-chars #\| #\' #\=)
(:after 3bmd-grammar:emph))
;;; extending printer:
;;; add a method to print-tagged-element specialized for the values
;;; returned by the new parser rules
(defparameter *wiki-processor* nil
"set to something PROCESS-WIKI-LINK etc will recognize to enable wiki link support in printer (see also *wiki-links* to enable wiki link parsing)")
(defgeneric process-wiki-link (wiki normalized-target formatted-target args stream)
;; just ignore the link by default
(:method ((w null) nt formatted a stream)
(declare (ignore w nt a))
(format stream "~a" formatted)))
;; note that we specialize on a symbol in our package rather than a
;; keyword, to avoid conflicts with other extensions
(defmethod print-tagged-element ((tag (eql 'wiki-link)) stream rest)
(destructuring-bind (&key label args) rest
(let ((formatted (with-output-to-string (s)
(loop for i in label do (print-element i s))))
;; todo: figure out how to normalize formatted links, or
;; restrict the grammar to disalow them
(normalized (print-label-to-string label)))
(process-wiki-link *wiki-processor* normalized formatted args stream))))
#++
(let ((3bmd-wiki:*wiki-links* t))
(esrap:parse 'inline "[[foo|bar]]"))
#++
(let ((3bmd-wiki:*wiki-links* t))
(esrap:parse 'inline "'[[foo|bar]]"))
#++
(let ((3bmd-wiki:*wiki-links* t))
(with-output-to-string (s)
(3bmd:parse-string-and-print-to-stream "[[foo|bar]]" s)))
| null | https://raw.githubusercontent.com/3b/3bmd/6fc5759448f6f6df6f6df556e020a289a2643288/wiki-links.lisp | lisp | example extension for handling wiki-style [[links]]
extending parser:
define-extension-inline to add the extension to the main grammar
allowing markup for now, to be normalized like ref links during printing...
would be nicer to just use wiki-link
rule rather than duplicating it
here, but then we'd have to
serialize it back to text to put the
"[[" back, and worry about
whitespace, etc
extending printer:
add a method to print-tagged-element specialized for the values
returned by the new parser rules
just ignore the link by default
note that we specialize on a symbol in our package rather than a
keyword, to avoid conflicts with other extensions
todo: figure out how to normalize formatted links, or
restrict the grammar to disalow them | (defpackage #:3bmd-wiki
(:use #:cl #:esrap #:3bmd-ext)
(:export #:*wiki-links*
#:*wiki-links*
#:*wiki-processor*
#:process-wiki-link
))
(in-package #:3bmd-wiki)
define independent rules with esrap : then use
(defrule wiki-link-label (* (and (! #\]) (! #\|) inline))
(:lambda (a)
(mapcar 'third a)))
(defrule wiki-link-arg (* (and (! "|") (! "]]") character))
(:text t))
(defrule normal-wiki-link (and
"[["
wiki-link-label
(* (and "|" wiki-link-arg))
"]]")
(:destructure ([ label args ])
(declare (ignore [ ]))
(list 'wiki-link :label label :args (mapcar 'second args))))
(defrule quoted-wiki-link (and #\'
"[["
(* (and (! #\]) character))
"]]")
(:destructure (q &rest link)
(declare (ignore q))
(text link)))
(define-extension-inline *wiki-links* wiki-link
(or quoted-wiki-link normal-wiki-link)
(:character-rule wiki-link-extended-chars #\| #\' #\=)
(:after 3bmd-grammar:emph))
(defparameter *wiki-processor* nil
"set to something PROCESS-WIKI-LINK etc will recognize to enable wiki link support in printer (see also *wiki-links* to enable wiki link parsing)")
(defgeneric process-wiki-link (wiki normalized-target formatted-target args stream)
(:method ((w null) nt formatted a stream)
(declare (ignore w nt a))
(format stream "~a" formatted)))
(defmethod print-tagged-element ((tag (eql 'wiki-link)) stream rest)
(destructuring-bind (&key label args) rest
(let ((formatted (with-output-to-string (s)
(loop for i in label do (print-element i s))))
(normalized (print-label-to-string label)))
(process-wiki-link *wiki-processor* normalized formatted args stream))))
#++
(let ((3bmd-wiki:*wiki-links* t))
(esrap:parse 'inline "[[foo|bar]]"))
#++
(let ((3bmd-wiki:*wiki-links* t))
(esrap:parse 'inline "'[[foo|bar]]"))
#++
(let ((3bmd-wiki:*wiki-links* t))
(with-output-to-string (s)
(3bmd:parse-string-and-print-to-stream "[[foo|bar]]" s)))
|
44f17885e4578464e7f565121f73d344ea4993a58c51c97bf504ef2cacd854d7 | dalaing/little-languages | Check.hs | module STLC.Check where
import Data.List.NonEmpty
import Data.Validation
import Control.Lens
import STLC
import STLC.Type
data Error e = Stuck | Expected e e | Arg Int (Error e)
type Errors e = NonEmpty (Error e)
expect :: Type -> AccValidation (Errors Type) Type -> AccValidation (Errors Type) Type
expect ex = over _Validation expectV
where
expectV :: Validation (Errors Type) Type -> Validation (Errors Type) Type
expectV v = v >>= \ac -> if ex == ac then _Success # ex else _Failure # (Expected ex ac :| [])
expectArg :: Int -> Type -> AccValidation (Errors Type) Type -> AccValidation (Errors Type) Type
expectArg n ex = over _Validation expectArgV
where
expectArgV :: Validation (Errors Type) Type -> Validation (Errors Type) Type
expectArgV v = v >>= \ac -> if ex == ac then _Success # ex else _Failure # (Arg n (Expected ex ac) :| [])
tcLoc :: (Term l n a -> AccValidation (Errors Type) Type) -> Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcLoc step = fmap step . preview (_TmLoc . _2)
tcInt :: Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcInt = fmap (const $ _Success # TyInt) . preview _TmInt
tcBool :: Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcBool = fmap (const $ _Success # TyBool) . preview _TmBool
tcAdd :: (Term l n a -> AccValidation (Errors Type) Type) -> Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcAdd step =
fmap (\(x,y) ->
(_Success # (\_ _ -> TyInt)) <*> expectArg 0 TyInt (step x) <*> expectArg 1 TyInt (step y)
) .
preview _TmAdd
tcEq :: (Term l n a -> AccValidation (Errors Type) Type) -> Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcEq step =
fmap (\(x,y) ->
(_Success # (\_ _ -> TyBool)) <*> expectArg 0 TyInt (step x) <*> expectArg 1 TyInt (step y)
) .
preview _TmEq
tcAnd :: (Term l n a -> AccValidation (Errors Type) Type) -> Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcAnd step =
fmap (\(x,y) ->
(_Success # (\_ _ -> TyBool)) <*> expectArg 0 TyBool (step x) <*> expectArg 1 TyBool (step y)
) .
preview _TmAnd
| null | https://raw.githubusercontent.com/dalaing/little-languages/9f089f646a5344b8f7178700455a36a755d29b1f/code/old/prototypes/arith/src/STLC/Check.hs | haskell | module STLC.Check where
import Data.List.NonEmpty
import Data.Validation
import Control.Lens
import STLC
import STLC.Type
data Error e = Stuck | Expected e e | Arg Int (Error e)
type Errors e = NonEmpty (Error e)
expect :: Type -> AccValidation (Errors Type) Type -> AccValidation (Errors Type) Type
expect ex = over _Validation expectV
where
expectV :: Validation (Errors Type) Type -> Validation (Errors Type) Type
expectV v = v >>= \ac -> if ex == ac then _Success # ex else _Failure # (Expected ex ac :| [])
expectArg :: Int -> Type -> AccValidation (Errors Type) Type -> AccValidation (Errors Type) Type
expectArg n ex = over _Validation expectArgV
where
expectArgV :: Validation (Errors Type) Type -> Validation (Errors Type) Type
expectArgV v = v >>= \ac -> if ex == ac then _Success # ex else _Failure # (Arg n (Expected ex ac) :| [])
tcLoc :: (Term l n a -> AccValidation (Errors Type) Type) -> Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcLoc step = fmap step . preview (_TmLoc . _2)
tcInt :: Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcInt = fmap (const $ _Success # TyInt) . preview _TmInt
tcBool :: Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcBool = fmap (const $ _Success # TyBool) . preview _TmBool
tcAdd :: (Term l n a -> AccValidation (Errors Type) Type) -> Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcAdd step =
fmap (\(x,y) ->
(_Success # (\_ _ -> TyInt)) <*> expectArg 0 TyInt (step x) <*> expectArg 1 TyInt (step y)
) .
preview _TmAdd
tcEq :: (Term l n a -> AccValidation (Errors Type) Type) -> Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcEq step =
fmap (\(x,y) ->
(_Success # (\_ _ -> TyBool)) <*> expectArg 0 TyInt (step x) <*> expectArg 1 TyInt (step y)
) .
preview _TmEq
tcAnd :: (Term l n a -> AccValidation (Errors Type) Type) -> Term l n a -> Maybe (AccValidation (Errors Type) Type)
tcAnd step =
fmap (\(x,y) ->
(_Success # (\_ _ -> TyBool)) <*> expectArg 0 TyBool (step x) <*> expectArg 1 TyBool (step y)
) .
preview _TmAnd
| |
1a6a97280f03c3092b741dccd93d2c63ff9cbac45ef966d6e154c10e490a9919 | mmcqd/pure | statics.mli |
module Make (T : Pure.THEORY) :
sig
exception TypeError of string
val synthtype : Pure.term Pure.Context.t * Pure.term Pure.Context.t -> Pure.term -> Pure.term
end
| null | https://raw.githubusercontent.com/mmcqd/pure/1a1dcb29925c6713dc295822164b4972f4c0bfc9/statics.mli | ocaml |
module Make (T : Pure.THEORY) :
sig
exception TypeError of string
val synthtype : Pure.term Pure.Context.t * Pure.term Pure.Context.t -> Pure.term -> Pure.term
end
| |
c1304c30f0f8187a17e9373d0366297ca5f89324bde72680d19cf866eaa04b06 | haskell-opengl/GLUT | Material.hs |
( adapted from material.c which is ( c ) Silicon Graphics , Inc. )
Copyright ( c ) 2002 - 2018 < >
This file is part of HOpenGL and distributed under a BSD - style license
See the file libraries / GLUT / LICENSE
This program demonstrates the use of the GL lighting model . Several
objects are drawn using different material characteristics . A single
light source illuminates the objects .
Material.hs (adapted from material.c which is (c) Silicon Graphics, Inc.)
Copyright (c) Sven Panne 2002-2018 <>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
This program demonstrates the use of the GL lighting model. Several
objects are drawn using different material characteristics. A single
light source illuminates the objects.
-}
import System.Exit ( exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT
Initialize z - buffer , projection matrix , light source , and lighting model .
-- Do not specify a material property here.
myInit :: IO ()
myInit = do
clearColor $= Color4 0 0.1 0.1 0
depthFunc $= Just Less
shadeModel $= Smooth
ambient (Light 0) $= Color4 0 0 0 1
diffuse (Light 0) $= Color4 1 1 1 1
position (Light 0) $= Vertex4 0 3 2 0
lightModelAmbient $= Color4 0.4 0.4 0.4 1
lightModelLocalViewer $= Disabled
lighting $= Enabled
light (Light 0) $= Enabled
Draw twelve spheres in 3 rows with 4 columns .
The spheres in the first row have materials with no ambient reflection .
The second row has materials with significant ambient reflection .
The third row has materials with colored ambient reflection .
--
The first column has materials with blue , diffuse reflection only .
The second column has blue diffuse reflection , as well as specular
-- reflection with a low shininess exponent.
The third column has blue diffuse reflection , as well as specular
-- reflection with a high shininess exponent (a more concentrated highlight).
The fourth column has materials which also include an emissive component .
--
-- translate is used to move spheres to their appropriate locations.
display :: DisplayCallback
display = do
clear [ ColorBuffer, DepthBuffer ]
let draw :: GLfloat -> GLfloat -> Color4 GLfloat -> Color4 GLfloat -> Color4 GLfloat -> GLfloat -> Color4 GLfloat -> IO ()
draw row column amb dif spc shi emi =
preservingMatrix $ do
translate (Vector3 (2.5 * (column - 2.5)) (3 * (2 - row)) 0)
materialAmbient Front $= amb
materialDiffuse Front $= dif
materialSpecular Front $= spc
materialShininess Front $= shi
materialEmission Front $= emi
renderObject Solid (Sphere' 1 16 16)
noMat = Color4 0 0 0 1
matAmbient = Color4 0.7 0.7 0.7 1
matAmbientColor = Color4 0.8 0.8 0.2 1
matDiffuse = Color4 0.1 0.5 0.8 1
matSpecular = Color4 1 1 1 1
noShininess = 0
lowShininess = 5
highShininess = 100
matEmission = Color4 0.3 0.2 0.2 0
draw sphere in first row , first column
-- diffuse reflection only; no ambient or specular
draw 1 1 noMat matDiffuse noMat noShininess noMat
draw sphere in first row , second column
-- diffuse and specular reflection; low shininess; no ambient
draw 1 2 noMat matDiffuse matSpecular lowShininess noMat
draw sphere in first row , third column
-- diffuse and specular reflection; high shininess; no ambient
draw 1 3 noMat matDiffuse matSpecular highShininess noMat
draw sphere in first row , fourth column
-- diffuse reflection; emission; no ambient or specular reflection
draw 1 4 noMat matDiffuse noMat noShininess matEmission
draw sphere in second row , first column
-- ambient and diffuse reflection; no specular
draw 2 1 matAmbient matDiffuse noMat noShininess noMat
draw sphere in second row , second column
-- ambient, diffuse and specular reflection; low shininess
draw 2 2 matAmbient matDiffuse matSpecular lowShininess noMat
draw sphere in second row , third column
-- ambient, diffuse and specular reflection; high shininess
draw 2 3 matAmbient matDiffuse matSpecular highShininess noMat
draw sphere in second row , fourth column
-- ambient and diffuse reflection; emission; no specular
draw 2 4 matAmbient matDiffuse noMat noShininess matEmission
draw sphere in third row , first column
-- colored ambient and diffuse reflection; no specular
draw 3 1 matAmbientColor matDiffuse noMat noShininess noMat
draw sphere in third row , second column
-- colored ambient, diffuse and specular reflection; low shininess
draw 3 2 matAmbientColor matDiffuse matSpecular lowShininess noMat
draw sphere in third row , third column
-- colored ambient, diffuse and specular reflection; high shininess
draw 3 3 matAmbientColor matDiffuse matSpecular highShininess noMat
draw sphere in third row , fourth column
-- colored ambient and diffuse reflection; emission; no specular
draw 3 4 matAmbientColor matDiffuse noMat noShininess matEmission
flush
reshape :: ReshapeCallback
reshape size@(Size w h) = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
let wf = fromIntegral w
hf = fromIntegral h
if w <= h * 2
then ortho (-6) 6 (-3 * (hf * 2) / wf) (3 * (hf * 2) / wf) (-10) 10
else ortho (-6 * wf / (hf * 2)) (6 * wf / (hf * 2)) (-3) 3 (-10) 10
matrixMode $= Modelview 0
loadIdentity
keyboard :: KeyboardMouseCallback
keyboard (Char '\27') Down _ _ = exitWith ExitSuccess
keyboard _ _ _ _ = return ()
main :: IO ()
main = do
(progName, _args) <- getArgsAndInitialize
initialDisplayMode $= [ SingleBuffered, RGBMode, WithDepthBuffer ]
initialWindowSize $= Size 600 450
_ <- createWindow progName
myInit
reshapeCallback $= Just reshape
displayCallback $= display
keyboardMouseCallback $= Just keyboard
mainLoop
| null | https://raw.githubusercontent.com/haskell-opengl/GLUT/36207fa51e4c1ea1e5512aeaa373198a4a56cad0/examples/RedBook4/Material.hs | haskell | Do not specify a material property here.
reflection with a low shininess exponent.
reflection with a high shininess exponent (a more concentrated highlight).
translate is used to move spheres to their appropriate locations.
diffuse reflection only; no ambient or specular
diffuse and specular reflection; low shininess; no ambient
diffuse and specular reflection; high shininess; no ambient
diffuse reflection; emission; no ambient or specular reflection
ambient and diffuse reflection; no specular
ambient, diffuse and specular reflection; low shininess
ambient, diffuse and specular reflection; high shininess
ambient and diffuse reflection; emission; no specular
colored ambient and diffuse reflection; no specular
colored ambient, diffuse and specular reflection; low shininess
colored ambient, diffuse and specular reflection; high shininess
colored ambient and diffuse reflection; emission; no specular |
( adapted from material.c which is ( c ) Silicon Graphics , Inc. )
Copyright ( c ) 2002 - 2018 < >
This file is part of HOpenGL and distributed under a BSD - style license
See the file libraries / GLUT / LICENSE
This program demonstrates the use of the GL lighting model . Several
objects are drawn using different material characteristics . A single
light source illuminates the objects .
Material.hs (adapted from material.c which is (c) Silicon Graphics, Inc.)
Copyright (c) Sven Panne 2002-2018 <>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
This program demonstrates the use of the GL lighting model. Several
objects are drawn using different material characteristics. A single
light source illuminates the objects.
-}
import System.Exit ( exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT
Initialize z - buffer , projection matrix , light source , and lighting model .
myInit :: IO ()
myInit = do
clearColor $= Color4 0 0.1 0.1 0
depthFunc $= Just Less
shadeModel $= Smooth
ambient (Light 0) $= Color4 0 0 0 1
diffuse (Light 0) $= Color4 1 1 1 1
position (Light 0) $= Vertex4 0 3 2 0
lightModelAmbient $= Color4 0.4 0.4 0.4 1
lightModelLocalViewer $= Disabled
lighting $= Enabled
light (Light 0) $= Enabled
Draw twelve spheres in 3 rows with 4 columns .
The spheres in the first row have materials with no ambient reflection .
The second row has materials with significant ambient reflection .
The third row has materials with colored ambient reflection .
The first column has materials with blue , diffuse reflection only .
The second column has blue diffuse reflection , as well as specular
The third column has blue diffuse reflection , as well as specular
The fourth column has materials which also include an emissive component .
display :: DisplayCallback
display = do
clear [ ColorBuffer, DepthBuffer ]
let draw :: GLfloat -> GLfloat -> Color4 GLfloat -> Color4 GLfloat -> Color4 GLfloat -> GLfloat -> Color4 GLfloat -> IO ()
draw row column amb dif spc shi emi =
preservingMatrix $ do
translate (Vector3 (2.5 * (column - 2.5)) (3 * (2 - row)) 0)
materialAmbient Front $= amb
materialDiffuse Front $= dif
materialSpecular Front $= spc
materialShininess Front $= shi
materialEmission Front $= emi
renderObject Solid (Sphere' 1 16 16)
noMat = Color4 0 0 0 1
matAmbient = Color4 0.7 0.7 0.7 1
matAmbientColor = Color4 0.8 0.8 0.2 1
matDiffuse = Color4 0.1 0.5 0.8 1
matSpecular = Color4 1 1 1 1
noShininess = 0
lowShininess = 5
highShininess = 100
matEmission = Color4 0.3 0.2 0.2 0
draw sphere in first row , first column
draw 1 1 noMat matDiffuse noMat noShininess noMat
draw sphere in first row , second column
draw 1 2 noMat matDiffuse matSpecular lowShininess noMat
draw sphere in first row , third column
draw 1 3 noMat matDiffuse matSpecular highShininess noMat
draw sphere in first row , fourth column
draw 1 4 noMat matDiffuse noMat noShininess matEmission
draw sphere in second row , first column
draw 2 1 matAmbient matDiffuse noMat noShininess noMat
draw sphere in second row , second column
draw 2 2 matAmbient matDiffuse matSpecular lowShininess noMat
draw sphere in second row , third column
draw 2 3 matAmbient matDiffuse matSpecular highShininess noMat
draw sphere in second row , fourth column
draw 2 4 matAmbient matDiffuse noMat noShininess matEmission
draw sphere in third row , first column
draw 3 1 matAmbientColor matDiffuse noMat noShininess noMat
draw sphere in third row , second column
draw 3 2 matAmbientColor matDiffuse matSpecular lowShininess noMat
draw sphere in third row , third column
draw 3 3 matAmbientColor matDiffuse matSpecular highShininess noMat
draw sphere in third row , fourth column
draw 3 4 matAmbientColor matDiffuse noMat noShininess matEmission
flush
reshape :: ReshapeCallback
reshape size@(Size w h) = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
let wf = fromIntegral w
hf = fromIntegral h
if w <= h * 2
then ortho (-6) 6 (-3 * (hf * 2) / wf) (3 * (hf * 2) / wf) (-10) 10
else ortho (-6 * wf / (hf * 2)) (6 * wf / (hf * 2)) (-3) 3 (-10) 10
matrixMode $= Modelview 0
loadIdentity
keyboard :: KeyboardMouseCallback
keyboard (Char '\27') Down _ _ = exitWith ExitSuccess
keyboard _ _ _ _ = return ()
main :: IO ()
main = do
(progName, _args) <- getArgsAndInitialize
initialDisplayMode $= [ SingleBuffered, RGBMode, WithDepthBuffer ]
initialWindowSize $= Size 600 450
_ <- createWindow progName
myInit
reshapeCallback $= Just reshape
displayCallback $= display
keyboardMouseCallback $= Just keyboard
mainLoop
|
29efaf3855aae5d5ec507e91eae7aec478d9a401d117aef7f63935e762437a4d | rabbitmq/rabbitmq-clusterer | rabbit_clusterer.erl | The contents of this file are subject to the Mozilla Public License
%% Version 1.1 (the "License"); you may not use this file except in
%% compliance with the License. You may obtain a copy of the License at
%% /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
%% License for the specific language governing rights and limitations
%% under the License.
%%
The Original Code is RabbitMQ .
%%
The Initial Developer of the Original Code is Pivotal Software , Inc.
Portions created by the Initial Developer are Copyright ( C ) 2013 - 2016
Pivotal Software , Inc. All Rights Reserved .
-module(rabbit_clusterer).
-behaviour(application).
-export([boot/0]).
-export([apply_config/0, apply_config/1, %% for 'rabbitmqctl eval ...'
status/0, status/1]).
-export([start/2, stop/1]).
%%----------------------------------------------------------------------------
boot() ->
ok = application:start(rabbitmq_clusterer),
ok = rabbit_clusterer_coordinator:begin_coordination(),
ok.
%% Apply_config allows cluster configs to be dynamically applied to a
running system . Currently that 's best done by , but
%% may be improved in the future.
apply_config() -> apply_config(undefined).
apply_config(Config) -> rabbit_clusterer_coordinator:apply_config(Config).
status() ->
status(node()).
status(Node) ->
{Message, Config, List} =
case rabbit_clusterer_coordinator:request_status(Node) of
preboot ->
{"Clusterer is pre-booting. ~p~n", undefined, []};
{Config1, booting} ->
{"Clusterer is booting Rabbit into cluster configuration: "
"~n~s~n", Config1, []};
{Config1, ready} ->
{"Rabbit is running in cluster configuration: ~n~s~n"
"Running nodes: ~p~n", Config1,
[rabbit_mnesia:cluster_nodes(running)]};
{Config1, {transitioner, join}} ->
{"Clusterer is trying to join into cluster configuration: "
"~n~s~n", Config1, []};
{Config1, {transitioner, rejoin}} ->
{"Clusterer is trying to rejoin cluster configuration: ~n~s~n",
Config1, []}
end,
Config2 = case Config of
undefined -> "";
_ -> rabbit_misc:format(
"~p", [tl(rabbit_clusterer_config:to_proplist(
undefined, Config))])
end,
io:format(Message, [Config2 | List]).
%%----------------------------------------------------------------------------
start(normal, []) -> rabbit_clusterer_sup:start_link().
stop(_State) -> ok.
| null | https://raw.githubusercontent.com/rabbitmq/rabbitmq-clusterer/f08c59ada2d62ef66ff1c02e2ffc404d5c210185/src/rabbit_clusterer.erl | erlang | Version 1.1 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License at
/
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
License for the specific language governing rights and limitations
under the License.
for 'rabbitmqctl eval ...'
----------------------------------------------------------------------------
Apply_config allows cluster configs to be dynamically applied to a
may be improved in the future.
---------------------------------------------------------------------------- | The contents of this file are subject to the Mozilla Public License
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ .
The Initial Developer of the Original Code is Pivotal Software , Inc.
Portions created by the Initial Developer are Copyright ( C ) 2013 - 2016
Pivotal Software , Inc. All Rights Reserved .
-module(rabbit_clusterer).
-behaviour(application).
-export([boot/0]).
status/0, status/1]).
-export([start/2, stop/1]).
boot() ->
ok = application:start(rabbitmq_clusterer),
ok = rabbit_clusterer_coordinator:begin_coordination(),
ok.
running system . Currently that 's best done by , but
apply_config() -> apply_config(undefined).
apply_config(Config) -> rabbit_clusterer_coordinator:apply_config(Config).
status() ->
status(node()).
status(Node) ->
{Message, Config, List} =
case rabbit_clusterer_coordinator:request_status(Node) of
preboot ->
{"Clusterer is pre-booting. ~p~n", undefined, []};
{Config1, booting} ->
{"Clusterer is booting Rabbit into cluster configuration: "
"~n~s~n", Config1, []};
{Config1, ready} ->
{"Rabbit is running in cluster configuration: ~n~s~n"
"Running nodes: ~p~n", Config1,
[rabbit_mnesia:cluster_nodes(running)]};
{Config1, {transitioner, join}} ->
{"Clusterer is trying to join into cluster configuration: "
"~n~s~n", Config1, []};
{Config1, {transitioner, rejoin}} ->
{"Clusterer is trying to rejoin cluster configuration: ~n~s~n",
Config1, []}
end,
Config2 = case Config of
undefined -> "";
_ -> rabbit_misc:format(
"~p", [tl(rabbit_clusterer_config:to_proplist(
undefined, Config))])
end,
io:format(Message, [Config2 | List]).
start(normal, []) -> rabbit_clusterer_sup:start_link().
stop(_State) -> ok.
|
0a2764368ecd90abe5c295a576545a4bc232bc9d614abd54ffb5c513d1b8de6d | tomjridge/tjr_kv | summary.t.ml | (** Summary *)
*
{ 2 Architecture }
{ % html :
< img src=" / drawings / d / e/2PACX-1vSnTmJGnVDyxnrBZ_VOVZ7T0O9etqZa - BDPu - EPH9ziiNjY375TMgO - ENB9UO4e - HT3qmtbJKvFOFl0 / pub?w=453&h=373 " >
< img src=" / drawings / d / e/2PACX-1vTIXhyNa7dovQYXuJXBMwPQZU99 - x_tRdTIH3SkMUDyPwbL31zExWXauT2hO - eRIUcnGP3RVHiSHrjt / pub?w=557&h=428 " >
% }
See { ! Kv_store_with_lru } for more details
{ 2 Main interfaces }
{ [
$ ( INCLUDE("GEN.*.ml _ " ) )
] }
{2 Architecture}
{%html:
<img src="-1vSnTmJGnVDyxnrBZ_VOVZ7T0O9etqZa-BDPu-EPH9ziiNjY375TMgO-ENB9UO4e-HT3qmtbJKvFOFl0/pub?w=453&h=373">
<img src="-1vTIXhyNa7dovQYXuJXBMwPQZU99-x_tRdTIH3SkMUDyPwbL31zExWXauT2hO-eRIUcnGP3RVHiSHrjt/pub?w=557&h=428">
%}
See {!Kv_store_with_lru} for more details
{2 Main interfaces}
{[
$(INCLUDE("GEN.*.ml_"))
]}
*)
| null | https://raw.githubusercontent.com/tomjridge/tjr_kv/68b3d2fa0c7a144765a3720d78cffb96d5478b32/src/summary.t.ml | ocaml | * Summary |
*
{ 2 Architecture }
{ % html :
< img src=" / drawings / d / e/2PACX-1vSnTmJGnVDyxnrBZ_VOVZ7T0O9etqZa - BDPu - EPH9ziiNjY375TMgO - ENB9UO4e - HT3qmtbJKvFOFl0 / pub?w=453&h=373 " >
< img src=" / drawings / d / e/2PACX-1vTIXhyNa7dovQYXuJXBMwPQZU99 - x_tRdTIH3SkMUDyPwbL31zExWXauT2hO - eRIUcnGP3RVHiSHrjt / pub?w=557&h=428 " >
% }
See { ! Kv_store_with_lru } for more details
{ 2 Main interfaces }
{ [
$ ( INCLUDE("GEN.*.ml _ " ) )
] }
{2 Architecture}
{%html:
<img src="-1vSnTmJGnVDyxnrBZ_VOVZ7T0O9etqZa-BDPu-EPH9ziiNjY375TMgO-ENB9UO4e-HT3qmtbJKvFOFl0/pub?w=453&h=373">
<img src="-1vTIXhyNa7dovQYXuJXBMwPQZU99-x_tRdTIH3SkMUDyPwbL31zExWXauT2hO-eRIUcnGP3RVHiSHrjt/pub?w=557&h=428">
%}
See {!Kv_store_with_lru} for more details
{2 Main interfaces}
{[
$(INCLUDE("GEN.*.ml_"))
]}
*)
|
d8610e10c4e6c77c724cd1bd67eb9a0d9252f0c2ee463d63a9f14f1913e4d6e9 | giorgidze/Hydra | Desugar.hs | module Hydra.Stages.Desugar (desugar) where
import Hydra.BNFC.AbsHydra
desugar :: SigRel -> SigRel
desugar = desugarTupleSigRel
desugarTupleSigRel :: SigRel -> SigRel
desugarTupleSigRel sr = case sr of
SigRel pat1 eqs1 -> SigRel pat1 (concatMap desugarTupleEquation eqs1)
desugarTupleEquation :: Equation -> [Equation]
desugarTupleEquation eq = case eq of
EquSigRelApp _ _ -> [eq]
EquLocal _ -> [eq]
EquEqual (ExprPair e1 e2) (ExprPair e3 e4) ->
concatMap desugarTupleEquation [EquEqual e1 e3,EquEqual e2 e4]
EquEqual _ _ -> [eq]
EquInit (ExprPair e1 e2) (ExprPair e3 e4) ->
concatMap desugarTupleEquation [EquInit e1 e3,EquInit e2 e4]
EquInit _ _ -> [eq] | null | https://raw.githubusercontent.com/giorgidze/Hydra/e5b59d0baff27f06368caeb1e9860f1395913ca5/src/Hydra/Stages/Desugar.hs | haskell | module Hydra.Stages.Desugar (desugar) where
import Hydra.BNFC.AbsHydra
desugar :: SigRel -> SigRel
desugar = desugarTupleSigRel
desugarTupleSigRel :: SigRel -> SigRel
desugarTupleSigRel sr = case sr of
SigRel pat1 eqs1 -> SigRel pat1 (concatMap desugarTupleEquation eqs1)
desugarTupleEquation :: Equation -> [Equation]
desugarTupleEquation eq = case eq of
EquSigRelApp _ _ -> [eq]
EquLocal _ -> [eq]
EquEqual (ExprPair e1 e2) (ExprPair e3 e4) ->
concatMap desugarTupleEquation [EquEqual e1 e3,EquEqual e2 e4]
EquEqual _ _ -> [eq]
EquInit (ExprPair e1 e2) (ExprPair e3 e4) ->
concatMap desugarTupleEquation [EquInit e1 e3,EquInit e2 e4]
EquInit _ _ -> [eq] | |
ab8bfe1ac171472ee472014475aefd66a94b5297f4df8a9ad34753d6cc493a5e | noinia/hgeometry | Writer.hs | # OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE UndecidableInstances #
{-# LANGUAGE OverloadedStrings #-}
--------------------------------------------------------------------------------
-- |
-- Module : Geometry.Svg.Writer
Copyright : ( C )
-- License : see the LICENSE file
Maintainer :
-- Description :
--
-- Write geometry to svg
--
--------------------------------------------------------------------------------
module Geometry.Svg.Writer where
import Control.Lens hiding (rmap, Const(..))
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString.Lazy.Char8 as B8
import Data.Ext
import Data.Fixed
import qualified Data.Foldable as F
import qualified Ipe.Attributes as IA
import Ipe.Color (IpeColor(..))
import Ipe.Types
import qualified Ipe.Types as Ipe
import Ipe.Value (IpeValue(..))
import Geometry.Point
import Geometry.PolyLine
import Geometry.Polygon
import Geometry.Svg.MathCoordinateSystem
import Geometry.Matrix (Matrix)
import Data.List.NonEmpty (NonEmpty(..))
import Data.Maybe
import Data.Proxy
import Data.Ratio
import Data.Semigroup.Foldable (toNonEmpty)
import Data.Singletons (Apply)
import Data.Vinyl hiding (Label)
import Data.Vinyl.Functor
import Data.Vinyl.TypeLevel
import Text.Blaze (ToMarkup(toMarkup), ToValue(toValue))
import qualified Text.Blaze.Svg as Svg
import qualified Text.Blaze.Svg.Renderer.Utf8 as SvgRender
import Text.Blaze.Svg11 ((!))
import qualified Text.Blaze.Svg11 as Svg
import qualified Text.Blaze.Svg11.Attributes as A
--------------------------------------------------------------------------------
-- | Converts an element into a valid svg document (including doctype etc.)
The size of the resulting svg is set to 800x600 . Moreover , we flip the axes
-- so that the origin is in the bottom-left.
--
--
toSvgXML :: ToMarkup t => t -> B.ByteString
toSvgXML = SvgRender.renderSvg
. Svg.docTypeSvg
. renderCanvas (createCanvas @Double 800 600) []
. svgO
| Convert an element to Svg using ' toSvgXML ' and prints the resulting svg
-- (as xml) output to stdout.
--
printSvgXML :: ToMarkup t => t -> IO ()
printSvgXML = B8.putStrLn . toSvgXMLElem
| Convert an element to Svg
svgO :: ToMarkup a => a -> Svg.Svg
svgO = Svg.toSvg
| Convert an element to Svg , and render this svg as xml . Note that the xml
-- contains *only* this element.
toSvgXMLElem :: ToMarkup t => t -> B.ByteString
toSvgXMLElem = SvgRender.renderSvg . Svg.toSvg
| Convert an element to Svg , and prints the xml output to stdout .
printSvgXMLElem :: ToMarkup t => t -> IO ()
printSvgXMLElem = B8.putStrLn . toSvgXMLElem
--------------------------------------------------------------------------------
instance Real r => ToMarkup (IpeObject r) where
toMarkup (IpeGroup g) = toMarkup g
toMarkup (IpeImage i) = toMarkup i
toMarkup (IpeTextLabel t) = toMarkup t
toMarkup (IpeMiniPage m) = toMarkup m
toMarkup (IpeUse u) = toMarkup u
toMarkup (IpePath (p :+ ats)) = toMarkup $ p :+ (ats' <> ats)
where
ats' = IA.attr IA.SFill $ IpeColor $ Named "transparent"
-- svg assumes that by default the filling is set to transparent
-- so make sure we do that as well
instance ( ToMarkup g
, AllConstrained IpeToSvgAttr rs
, ReifyConstraint ToValue (IA.Attr f) rs
, RMap rs, RecordToList rs
, RecAll (IA.Attr f) rs ToValue
) => ToMarkup (g :+ IA.Attributes f rs) where
toMarkup (i :+ ats) = toMarkup i `applyAts` svgWriteAttrs ats
instance Real r => ToMarkup (TextLabel r) where
toMarkup (Label t p) = text_ p [] t
instance Real r => ToMarkup (MiniPage r) where
toMarkup (MiniPage t p w) = text_ p [A.width (toPValue w)] t
instance Real r => ToMarkup (Image r) where
toMarkup _ = error "ToMarkup: Image not implemented yet"
-- toMarkup (Image i r) = Svg.image t ! A.xlinkHref (toAValue i)
-- ! A.y (toPValue $ p^.yCoord)
! ( toPValue w )
instance HasResolution p => ToValue (Fixed p) where
toValue = toAValue
instance Integral a => ToValue (Ratio a) where
toValue = toValue @Pico . realToFrac
instance Real r => ToValue (PathSegment r) where
toValue = \case
PolyLineSegment pl -> Svg.mkPath . toPath $ pl^.points.to toNonEmpty
PolygonPath pg -> Svg.mkPath $ do toPath $ pg^.outerBoundaryVector.to toNonEmpty
Svg.z
EllipseSegment _ -> undefined
_ -> error "toValue: not implemented yet"
toPath :: Real r => NonEmpty (Point 2 r :+ p) -> Svg.Path
toPath pts = case (^.core) <$> pts of
(v:|vs) -> do Svg.m (showP $ v^.xCoord) (showP $ v^.yCoord)
mapM_ (\(Point2 x y) -> Svg.l (showP x) (showP y)) vs
instance Real r => ToMarkup (Ipe.Path r) where
toMarkup p = Svg.path ! A.d (toValue p)
instance Real r => ToValue (Path r) where
toValue (Path s) = mconcat . map toValue . F.toList $ s
instance Real r => ToMarkup (Ipe.IpeSymbol r) where
toMarkup (Symbol p _) = Svg.circle ! A.cx (toPValue $ p^.xCoord)
! A.cy (toPValue $ p^.yCoord)
! A.r (toPValue 5)
-- TODO: for now just draw a disk of fixed radius
instance Real r => ToMarkup (Ipe.Group r) where
toMarkup (Group os) = Svg.g (mapM_ toMarkup os)
--------------------------------------------------------------------------------
-- * Dealing with attributes
instance ToValue (Apply f at) => ToValue (IA.Attr f at) where
toValue att = maybe mempty toValue $ IA._getAttr att
applyAts :: Svg.Markup -> [(SvgF, Svg.AttributeValue)] -> Svg.Markup
applyAts x0 = F.foldl' (\x (f,v) -> x ! f v) x0
-- | Functon to write all attributes in a Rec
svgWriteAttrs :: ( AllConstrained IpeToSvgAttr rs
, RMap rs, RecordToList rs
, ReifyConstraint ToValue (IA.Attr f) rs
, RecAll (IA.Attr f) rs ToValue
)
=> IA.Attributes f rs
-> [(SvgF, Svg.AttributeValue)]
svgWriteAttrs (IA.Attrs r) = catMaybes . recordToList $ IA.zipRecsWith f (writeAttrFunctions r)
(writeAttrValues r)
where
f (Const mn) (Const mv) = Const $ (,) <$> mn <*> mv
-- | Writing Attribute names
writeAttrFunctions :: AllConstrained IpeToSvgAttr rs
=> Rec f rs
-> Rec (Const (Maybe SvgF)) rs
writeAttrFunctions RNil = RNil
writeAttrFunctions (x :& xs) = Const (write'' x) :& writeAttrFunctions xs
where
write'' :: forall f s. IpeToSvgAttr s => f s -> Maybe SvgF
write'' _ = attrSvg (Proxy :: Proxy s)
-- | Writing the attribute values
writeAttrValues :: ( ReifyConstraint ToValue (IA.Attr f) rs, RMap rs
, RecAll (IA.Attr f) rs ToValue)
=> Rec (IA.Attr f) rs -> Rec (Const (Maybe Svg.AttributeValue)) rs
writeAttrValues = rmap (\(Compose (Dict x)) -> Const $ toMaybeValue x)
. reifyConstraint @ToValue
toMaybeValue :: ToValue (IA.Attr f at) => IA.Attr f at -> Maybe Svg.AttributeValue
toMaybeValue a = case a of
IA.NoAttr -> Nothing
IA.Attr _ -> Just $ toValue a
type SvgF = Svg.AttributeValue -> Svg.Attribute
-- | For the types representing attribute values we can get the name/key to use
-- when serializing to ipe.
class IpeToSvgAttr (a :: IA.AttributeUniverse) where
attrSvg :: proxy a -> Maybe SvgF
-- CommonAttributeUnivers
instance IpeToSvgAttr IA.Layer where attrSvg _ = Nothing
TODO
instance IpeToSvgAttr IA.Pin where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Transformations where attrSvg _ = Nothing
-- IpeSymbolAttributeUniversre
instance IpeToSvgAttr IA.Stroke where attrSvg _ = Just A.stroke
instance IpeToSvgAttr IA.Fill where attrSvg _ = Just A.fill
instance IpeToSvgAttr IA.Pen where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Size where attrSvg _ = Nothing
-- PathAttributeUniverse
instance IpeToSvgAttr IA.Dash where attrSvg _ = Nothing
instance IpeToSvgAttr IA.LineCap where attrSvg _ = Just A.strokeLinecap
instance IpeToSvgAttr IA.LineJoin where attrSvg _ = Nothing
instance IpeToSvgAttr IA.FillRule where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Arrow where attrSvg _ = Nothing
instance IpeToSvgAttr IA.RArrow where attrSvg _ = Nothing
instance IpeToSvgAttr IA.StrokeOpacity where attrSvg _ = Just A.strokeOpacity
instance IpeToSvgAttr IA.Opacity where attrSvg _ = Just A.fillOpacity
instance IpeToSvgAttr IA.Tiling where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Gradient where attrSvg _ = Nothing
-- TextAttibuteUniverse
instance IpeToSvgAttr IA.Width where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Height where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Depth where attrSvg _ = Nothing
instance IpeToSvgAttr IA.VAlign where attrSvg _ = Nothing
instance IpeToSvgAttr IA.HAlign where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Style where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Clip where attrSvg _ = Just A.clip
--------------------------------------------------------------------------------
deriving instance ToValue LayerName
instance Real r => ToValue (IpeColor r) where
toValue (IpeColor c) = case c of
Named t -> toValue t
Valued v -> toAValue $ fmap showP v
-- TODO:
instance Real r => ToValue (IA.IpePen r) where
toValue _ = mempty
instance Real r => ToValue (IA.IpeSize r) where
toValue _ = mempty
instance Real r => ToValue (IA.IpeArrow r) where
toValue _ = mempty
instance Real r => ToValue (IA.IpeDash r) where
toValue _ = mempty
instance Real r => ToValue (Matrix 3 3 r) where
toValue _ = mempty
instance ToValue IA.HorizontalAlignment where
toValue _ = mempty
instance ToValue IA.VerticalAlignment where
toValue _ = mempty
instance ToValue IA.FillType where
toValue _ = mempty
instance ToValue IA.PinType where
toValue _ = mempty
instance ToValue IA.TransformationTypes where
toValue _ = mempty
| null | https://raw.githubusercontent.com/noinia/hgeometry/89cd3d3109ec68f877bf8e34dc34b6df337a4ec1/hgeometry-svg/src/Geometry/Svg/Writer.hs | haskell | # LANGUAGE OverloadedStrings #
------------------------------------------------------------------------------
|
Module : Geometry.Svg.Writer
License : see the LICENSE file
Description :
Write geometry to svg
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Converts an element into a valid svg document (including doctype etc.)
so that the origin is in the bottom-left.
(as xml) output to stdout.
contains *only* this element.
------------------------------------------------------------------------------
svg assumes that by default the filling is set to transparent
so make sure we do that as well
toMarkup (Image i r) = Svg.image t ! A.xlinkHref (toAValue i)
! A.y (toPValue $ p^.yCoord)
TODO: for now just draw a disk of fixed radius
------------------------------------------------------------------------------
* Dealing with attributes
| Functon to write all attributes in a Rec
| Writing Attribute names
| Writing the attribute values
| For the types representing attribute values we can get the name/key to use
when serializing to ipe.
CommonAttributeUnivers
IpeSymbolAttributeUniversre
PathAttributeUniverse
TextAttibuteUniverse
------------------------------------------------------------------------------
TODO: | # OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE UndecidableInstances #
Copyright : ( C )
Maintainer :
module Geometry.Svg.Writer where
import Control.Lens hiding (rmap, Const(..))
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString.Lazy.Char8 as B8
import Data.Ext
import Data.Fixed
import qualified Data.Foldable as F
import qualified Ipe.Attributes as IA
import Ipe.Color (IpeColor(..))
import Ipe.Types
import qualified Ipe.Types as Ipe
import Ipe.Value (IpeValue(..))
import Geometry.Point
import Geometry.PolyLine
import Geometry.Polygon
import Geometry.Svg.MathCoordinateSystem
import Geometry.Matrix (Matrix)
import Data.List.NonEmpty (NonEmpty(..))
import Data.Maybe
import Data.Proxy
import Data.Ratio
import Data.Semigroup.Foldable (toNonEmpty)
import Data.Singletons (Apply)
import Data.Vinyl hiding (Label)
import Data.Vinyl.Functor
import Data.Vinyl.TypeLevel
import Text.Blaze (ToMarkup(toMarkup), ToValue(toValue))
import qualified Text.Blaze.Svg as Svg
import qualified Text.Blaze.Svg.Renderer.Utf8 as SvgRender
import Text.Blaze.Svg11 ((!))
import qualified Text.Blaze.Svg11 as Svg
import qualified Text.Blaze.Svg11.Attributes as A
The size of the resulting svg is set to 800x600 . Moreover , we flip the axes
toSvgXML :: ToMarkup t => t -> B.ByteString
toSvgXML = SvgRender.renderSvg
. Svg.docTypeSvg
. renderCanvas (createCanvas @Double 800 600) []
. svgO
| Convert an element to Svg using ' toSvgXML ' and prints the resulting svg
printSvgXML :: ToMarkup t => t -> IO ()
printSvgXML = B8.putStrLn . toSvgXMLElem
| Convert an element to Svg
svgO :: ToMarkup a => a -> Svg.Svg
svgO = Svg.toSvg
| Convert an element to Svg , and render this svg as xml . Note that the xml
toSvgXMLElem :: ToMarkup t => t -> B.ByteString
toSvgXMLElem = SvgRender.renderSvg . Svg.toSvg
| Convert an element to Svg , and prints the xml output to stdout .
printSvgXMLElem :: ToMarkup t => t -> IO ()
printSvgXMLElem = B8.putStrLn . toSvgXMLElem
instance Real r => ToMarkup (IpeObject r) where
toMarkup (IpeGroup g) = toMarkup g
toMarkup (IpeImage i) = toMarkup i
toMarkup (IpeTextLabel t) = toMarkup t
toMarkup (IpeMiniPage m) = toMarkup m
toMarkup (IpeUse u) = toMarkup u
toMarkup (IpePath (p :+ ats)) = toMarkup $ p :+ (ats' <> ats)
where
ats' = IA.attr IA.SFill $ IpeColor $ Named "transparent"
instance ( ToMarkup g
, AllConstrained IpeToSvgAttr rs
, ReifyConstraint ToValue (IA.Attr f) rs
, RMap rs, RecordToList rs
, RecAll (IA.Attr f) rs ToValue
) => ToMarkup (g :+ IA.Attributes f rs) where
toMarkup (i :+ ats) = toMarkup i `applyAts` svgWriteAttrs ats
instance Real r => ToMarkup (TextLabel r) where
toMarkup (Label t p) = text_ p [] t
instance Real r => ToMarkup (MiniPage r) where
toMarkup (MiniPage t p w) = text_ p [A.width (toPValue w)] t
instance Real r => ToMarkup (Image r) where
toMarkup _ = error "ToMarkup: Image not implemented yet"
! ( toPValue w )
instance HasResolution p => ToValue (Fixed p) where
toValue = toAValue
instance Integral a => ToValue (Ratio a) where
toValue = toValue @Pico . realToFrac
instance Real r => ToValue (PathSegment r) where
toValue = \case
PolyLineSegment pl -> Svg.mkPath . toPath $ pl^.points.to toNonEmpty
PolygonPath pg -> Svg.mkPath $ do toPath $ pg^.outerBoundaryVector.to toNonEmpty
Svg.z
EllipseSegment _ -> undefined
_ -> error "toValue: not implemented yet"
toPath :: Real r => NonEmpty (Point 2 r :+ p) -> Svg.Path
toPath pts = case (^.core) <$> pts of
(v:|vs) -> do Svg.m (showP $ v^.xCoord) (showP $ v^.yCoord)
mapM_ (\(Point2 x y) -> Svg.l (showP x) (showP y)) vs
instance Real r => ToMarkup (Ipe.Path r) where
toMarkup p = Svg.path ! A.d (toValue p)
instance Real r => ToValue (Path r) where
toValue (Path s) = mconcat . map toValue . F.toList $ s
instance Real r => ToMarkup (Ipe.IpeSymbol r) where
toMarkup (Symbol p _) = Svg.circle ! A.cx (toPValue $ p^.xCoord)
! A.cy (toPValue $ p^.yCoord)
! A.r (toPValue 5)
instance Real r => ToMarkup (Ipe.Group r) where
toMarkup (Group os) = Svg.g (mapM_ toMarkup os)
instance ToValue (Apply f at) => ToValue (IA.Attr f at) where
toValue att = maybe mempty toValue $ IA._getAttr att
applyAts :: Svg.Markup -> [(SvgF, Svg.AttributeValue)] -> Svg.Markup
applyAts x0 = F.foldl' (\x (f,v) -> x ! f v) x0
svgWriteAttrs :: ( AllConstrained IpeToSvgAttr rs
, RMap rs, RecordToList rs
, ReifyConstraint ToValue (IA.Attr f) rs
, RecAll (IA.Attr f) rs ToValue
)
=> IA.Attributes f rs
-> [(SvgF, Svg.AttributeValue)]
svgWriteAttrs (IA.Attrs r) = catMaybes . recordToList $ IA.zipRecsWith f (writeAttrFunctions r)
(writeAttrValues r)
where
f (Const mn) (Const mv) = Const $ (,) <$> mn <*> mv
writeAttrFunctions :: AllConstrained IpeToSvgAttr rs
=> Rec f rs
-> Rec (Const (Maybe SvgF)) rs
writeAttrFunctions RNil = RNil
writeAttrFunctions (x :& xs) = Const (write'' x) :& writeAttrFunctions xs
where
write'' :: forall f s. IpeToSvgAttr s => f s -> Maybe SvgF
write'' _ = attrSvg (Proxy :: Proxy s)
writeAttrValues :: ( ReifyConstraint ToValue (IA.Attr f) rs, RMap rs
, RecAll (IA.Attr f) rs ToValue)
=> Rec (IA.Attr f) rs -> Rec (Const (Maybe Svg.AttributeValue)) rs
writeAttrValues = rmap (\(Compose (Dict x)) -> Const $ toMaybeValue x)
. reifyConstraint @ToValue
toMaybeValue :: ToValue (IA.Attr f at) => IA.Attr f at -> Maybe Svg.AttributeValue
toMaybeValue a = case a of
IA.NoAttr -> Nothing
IA.Attr _ -> Just $ toValue a
type SvgF = Svg.AttributeValue -> Svg.Attribute
class IpeToSvgAttr (a :: IA.AttributeUniverse) where
attrSvg :: proxy a -> Maybe SvgF
instance IpeToSvgAttr IA.Layer where attrSvg _ = Nothing
TODO
instance IpeToSvgAttr IA.Pin where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Transformations where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Stroke where attrSvg _ = Just A.stroke
instance IpeToSvgAttr IA.Fill where attrSvg _ = Just A.fill
instance IpeToSvgAttr IA.Pen where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Size where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Dash where attrSvg _ = Nothing
instance IpeToSvgAttr IA.LineCap where attrSvg _ = Just A.strokeLinecap
instance IpeToSvgAttr IA.LineJoin where attrSvg _ = Nothing
instance IpeToSvgAttr IA.FillRule where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Arrow where attrSvg _ = Nothing
instance IpeToSvgAttr IA.RArrow where attrSvg _ = Nothing
instance IpeToSvgAttr IA.StrokeOpacity where attrSvg _ = Just A.strokeOpacity
instance IpeToSvgAttr IA.Opacity where attrSvg _ = Just A.fillOpacity
instance IpeToSvgAttr IA.Tiling where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Gradient where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Width where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Height where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Depth where attrSvg _ = Nothing
instance IpeToSvgAttr IA.VAlign where attrSvg _ = Nothing
instance IpeToSvgAttr IA.HAlign where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Style where attrSvg _ = Nothing
instance IpeToSvgAttr IA.Clip where attrSvg _ = Just A.clip
deriving instance ToValue LayerName
instance Real r => ToValue (IpeColor r) where
toValue (IpeColor c) = case c of
Named t -> toValue t
Valued v -> toAValue $ fmap showP v
instance Real r => ToValue (IA.IpePen r) where
toValue _ = mempty
instance Real r => ToValue (IA.IpeSize r) where
toValue _ = mempty
instance Real r => ToValue (IA.IpeArrow r) where
toValue _ = mempty
instance Real r => ToValue (IA.IpeDash r) where
toValue _ = mempty
instance Real r => ToValue (Matrix 3 3 r) where
toValue _ = mempty
instance ToValue IA.HorizontalAlignment where
toValue _ = mempty
instance ToValue IA.VerticalAlignment where
toValue _ = mempty
instance ToValue IA.FillType where
toValue _ = mempty
instance ToValue IA.PinType where
toValue _ = mempty
instance ToValue IA.TransformationTypes where
toValue _ = mempty
|
3e7a73738b22864c40adb292d3d251aac39e93f30473fdfad5b5d6b3970621d3 | Helium4Haskell/helium | PMWBug.hs | f "a" = 1
| null | https://raw.githubusercontent.com/Helium4Haskell/helium/5928bff479e6f151b4ceb6c69bbc15d71e29eb47/test/staticwarnings/PMWBug.hs | haskell | f "a" = 1
| |
e93634c4b71ef641c3267486424a4d628303b2efb0f6928e7039d345f42adac8 | stackbuilders/cloud-haskell-chat | Client.hs | module Client where
import Control.Distributed.Process.ManagedProcess.Client (callChan, cast)
import Control.Distributed.Process ( expectTimeout
, whereisRemoteAsync
, spawnLocal
, receiveChan
, link
, NodeId(..)
, Process
, ProcessId
, ReceivePort
, WhereIsReply(..) )
import Control.Distributed.Process.Node ( initRemoteTable
, runProcess
, newLocalNode )
import Network.Transport.TCP (createTransport, defaultTCPParameters)
import Network.Transport (EndPointAddress(..))
import Control.Concurrent (threadDelay)
import Control.Monad.IO.Class (liftIO)
import Control.Monad (void, forever)
import qualified Data.ByteString.Char8 as BS (pack)
import Types
import Logger (runChatLogger, logChatMessage, logStr)
searchChatServer :: ChatName -> ServerAddress -> Process ProcessId
searchChatServer name serverAddr = do
let addr = EndPointAddress (BS.pack serverAddr)
srvId = NodeId addr
whereisRemoteAsync srvId name
reply <- expectTimeout 1000
case reply of
Just (WhereIsReply _ (Just sid)) -> return sid
_ -> searchChatServer name serverAddr
launchChatClient :: ServerAddress -> Host -> Int -> ChatName -> IO ()
launchChatClient serverAddr clientHost port name = do
mt <- createTransport clientHost (show port) defaultTCPParameters
case mt of
Left err -> print err
Right transport -> do
node <- newLocalNode transport initRemoteTable
runChatLogger node
runProcess node $ do
serverPid <- searchChatServer name serverAddr
link serverPid
logStr "Joining chat server ... "
logStr "Please, provide your nickname ... "
nickName <- liftIO getLine
rp <- callChan serverPid (JoinChatMessage nickName) :: Process (ReceivePort ChatMessage)
logStr "You have joined the chat ... "
void $ spawnLocal $ forever $ do
msg <- receiveChan rp
logChatMessage msg
forever $ do
chatInput <- liftIO getLine
cast serverPid (ChatMessage (Client nickName) chatInput)
liftIO $ threadDelay 500000
| null | https://raw.githubusercontent.com/stackbuilders/cloud-haskell-chat/71698c87b2b5a8aa359a30590286bbce3f462b04/src/Client.hs | haskell | module Client where
import Control.Distributed.Process.ManagedProcess.Client (callChan, cast)
import Control.Distributed.Process ( expectTimeout
, whereisRemoteAsync
, spawnLocal
, receiveChan
, link
, NodeId(..)
, Process
, ProcessId
, ReceivePort
, WhereIsReply(..) )
import Control.Distributed.Process.Node ( initRemoteTable
, runProcess
, newLocalNode )
import Network.Transport.TCP (createTransport, defaultTCPParameters)
import Network.Transport (EndPointAddress(..))
import Control.Concurrent (threadDelay)
import Control.Monad.IO.Class (liftIO)
import Control.Monad (void, forever)
import qualified Data.ByteString.Char8 as BS (pack)
import Types
import Logger (runChatLogger, logChatMessage, logStr)
searchChatServer :: ChatName -> ServerAddress -> Process ProcessId
searchChatServer name serverAddr = do
let addr = EndPointAddress (BS.pack serverAddr)
srvId = NodeId addr
whereisRemoteAsync srvId name
reply <- expectTimeout 1000
case reply of
Just (WhereIsReply _ (Just sid)) -> return sid
_ -> searchChatServer name serverAddr
launchChatClient :: ServerAddress -> Host -> Int -> ChatName -> IO ()
launchChatClient serverAddr clientHost port name = do
mt <- createTransport clientHost (show port) defaultTCPParameters
case mt of
Left err -> print err
Right transport -> do
node <- newLocalNode transport initRemoteTable
runChatLogger node
runProcess node $ do
serverPid <- searchChatServer name serverAddr
link serverPid
logStr "Joining chat server ... "
logStr "Please, provide your nickname ... "
nickName <- liftIO getLine
rp <- callChan serverPid (JoinChatMessage nickName) :: Process (ReceivePort ChatMessage)
logStr "You have joined the chat ... "
void $ spawnLocal $ forever $ do
msg <- receiveChan rp
logChatMessage msg
forever $ do
chatInput <- liftIO getLine
cast serverPid (ChatMessage (Client nickName) chatInput)
liftIO $ threadDelay 500000
| |
bb5532ee584d7f6b453286fcff4cca19d30c05baf858816ab22758229247a69c | ohua-dev/ohua-core | Lang.hs | -- |
-- Module : $Header$
Description : Definition of an abstract expression language as the first IR for the Ohua compiler .
Copyright : ( c ) , 2017 . All Rights Reserved .
-- License : EPL-1.0
-- Maintainer : ,
-- Stability : experimental
-- Portability : portable
This source code is licensed under the terms described in the associated LICENSE.TXT file
module Ohua.DAGLang.Lang where
--
This module defines the dag lang which is inspired by the dag calculus :
" Umut A. Acar , , , and . 2016 . Dag - calculus : a calculus for parallel computation . In Proceedings of the 21st ACM SIGPLAN International Conference on Functional Programming ( ICFP 2016 ) . "
-- It contains concepts for spawning and joining tasks.
-- As such it represents the backend IR which contains parallel abstractions.
It allows to implement schedulers and realizes Ohua 's section concept .
We transform the DFLang straight into the DAGLang .
--
| null | https://raw.githubusercontent.com/ohua-dev/ohua-core/978fa3369922f86cc3fc474d5f2c554cc87fd60a/core/src/Ohua/DAGLang/Lang.hs | haskell | |
Module : $Header$
License : EPL-1.0
Maintainer : ,
Stability : experimental
Portability : portable
It contains concepts for spawning and joining tasks.
As such it represents the backend IR which contains parallel abstractions.
| Description : Definition of an abstract expression language as the first IR for the Ohua compiler .
Copyright : ( c ) , 2017 . All Rights Reserved .
This source code is licensed under the terms described in the associated LICENSE.TXT file
module Ohua.DAGLang.Lang where
This module defines the dag lang which is inspired by the dag calculus :
" Umut A. Acar , , , and . 2016 . Dag - calculus : a calculus for parallel computation . In Proceedings of the 21st ACM SIGPLAN International Conference on Functional Programming ( ICFP 2016 ) . "
It allows to implement schedulers and realizes Ohua 's section concept .
We transform the DFLang straight into the DAGLang .
|
a3bc05d433995648144abfa4abeded9c0fb355aa5dabe23aad32cc521c0f773d | daveyarwood/music-theory | util.cljc | (ns music-theory.util)
(defn error
"Throws a Clojure or ClojureScript error/exception."
[msg]
(throw (new #?(:clj Exception :cljs js/Error) msg)))
(defn parse-int
"Parses a string as an integer."
[n]
(#?(:clj Integer/parseInt :cljs js/Number) n))
| null | https://raw.githubusercontent.com/daveyarwood/music-theory/c58beb9fa6d290900afeff78ee4f86c875e393d7/src/music_theory/util.cljc | clojure | (ns music-theory.util)
(defn error
"Throws a Clojure or ClojureScript error/exception."
[msg]
(throw (new #?(:clj Exception :cljs js/Error) msg)))
(defn parse-int
"Parses a string as an integer."
[n]
(#?(:clj Integer/parseInt :cljs js/Number) n))
| |
2707653cb090ce35c9a16472b2300b43f612fb8895ffe67903238e45c6e5a135 | herd/herdtools7 | code.ml | (****************************************************************************)
(* the diy toolsuite *)
(* *)
, University College London , UK .
, INRIA Paris - Rocquencourt , France .
(* *)
Copyright 2010 - present Institut National de Recherche en Informatique et
(* en Automatique and the authors. All rights reserved. *)
(* *)
This software is governed by the CeCILL - B license under French law and
(* abiding by the rules of distribution of free software. You can use, *)
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
(****************************************************************************)
(* Event components *)
type loc = Data of string | Code of Label.t
let as_data = function
| Data loc -> loc
| Code _ -> assert false
let is_data = function
| Data _ -> true
| Code _ -> false
let pp_loc = function Data s | Code s -> s
let loc_eq loc1 loc2 = match loc1,loc2 with
| (Data s1,Data s2)
| (Code s1,Code s2)
-> Misc.string_eq s1 s2
| (Data _,Code _)
| (Code _,Data _)
-> false
let loc_compare loc1 loc2 = match loc1,loc2 with
| Data _,Code _ -> -1
| Code _,Data _ -> 1
| (Data s1,Data s2)
| (Code s1,Code s2)
-> compare s1 s2
module LocOrd = struct
type t = loc
let compare = loc_compare
end
module LocSet = MySet.Make(LocOrd)
module LocMap = MyMap.Make(LocOrd)
let loc_none = Data "*"
let ok_str = "ok"
let ok = Data ok_str
let myok p n = Data (Printf.sprintf "ok%i%i" p n)
let myok_proc p = Data (Printf.sprintf "ok%i" p)
type v = int
type proc = Proc.t
let pp_proc p = Proc.pp p
type env = (string * v) list
(* Direction of event *)
type dir = W | R | J
(* Edges compoments that do not depend on architecture *)
(* Change or proc accross edge *)
type ie = Int|Ext
(* Change of location across edge *)
type sd = Same|Diff
(* Direction of related events *)
type extr = Dir of dir | Irr | NoDir
Associated pretty print & generators
let pp_dir = function
| W -> "W"
| R -> "R"
| J -> "J"
let pp_ie = function
| Int -> "i"
| Ext -> "e"
let pp_extr = function
| Dir d -> pp_dir d
| Irr -> "*"
| NoDir -> ""
let pp_sd = function
| Same -> "s"
| Diff -> "d"
let seq_sd sd1 sd2 =
match sd1,sd2 with
| Same,Same -> Same
| Diff,_|_,Diff -> Diff
let fold_ie f r = f Ext (f Int r)
let fold_sd f r = f Diff (f Same r)
let do_fold_extr withj f r =
let r = f (Dir W) (f (Dir R) (f Irr r)) in
if withj then f (Dir J) r
else r
let fold_extr f r = do_fold_extr false f r
let fold_sd_extr f = fold_sd (fun sd -> fold_extr (fun e -> f sd e))
let fold_sd_extr_extr f =
fold_sd_extr (fun sd e1 -> fold_extr (fun e2 -> f sd e1 e2))
type check =
| Default | Sc | Uni | Thin | Critical
| Free | Ppo | Transitive | Total | MixedCheck
let pp_check =
function
| Default -> "default"
| Sc -> "sc"
| Uni -> "uni"
| Thin -> "thin"
| Critical -> "critical"
| Free -> "free"
| Ppo -> "ppo"
| Transitive -> "transitive"
| Total -> "total"
| MixedCheck -> "mixedcheck"
let checks =
[
"default";
"sc";
"uni";
"thin";
"critical";
"free";
"ppo";
"transitive";
"total";
"mixedcheck";
]
(* Com relation *)
type com = CRf | CFr | CWs
let pp_com = function
| CRf -> "Rf"
| CFr -> "Fr"
| CWs -> "Co"
let fold_com f r = f CRf (f CFr (f CWs r))
(* Info in tests *)
type info = (string * string) list
let plain = "Na"
(* Memory Space *)
type 'a bank = Ord | Tag | CapaTag | CapaSeal | Pte | VecReg of 'a
let pp_bank = function
| Ord -> "Ord"
| Tag -> "Tag"
| CapaTag -> "CapaTag"
| CapaSeal -> "CapaSeal"
| Pte -> "Pte"
| VecReg _ -> "VecReg"
let tag_of_int = function
| 0 -> "green"
| 1 -> "red"
| 2 -> "blue"
| 3 -> "black"
| 4 -> "white"
| 5 -> "cyan"
| 6 -> "yellow"
| 7 -> "magenta"
| n -> Warn.fatal "Sorry, not pretty tag for number %i" n
let add_tag s t = Printf.sprintf "%s:%s" s (tag_of_int t)
let add_capability s t = Printf.sprintf "0xffffc0000:%s:%i" s (if t = 0 then 1 else 0)
let add_vector hexa v =
let open Printf in
let pp = if hexa then sprintf "0x%x" else sprintf "%i" in
sprintf "{%s}"
(String.concat "," (List.map pp v))
| null | https://raw.githubusercontent.com/herd/herdtools7/319d1c8ec9f7d939e98785dd519a0da54435b606/gen/code.ml | ocaml | **************************************************************************
the diy toolsuite
en Automatique and the authors. All rights reserved.
abiding by the rules of distribution of free software. You can use,
**************************************************************************
Event components
Direction of event
Edges compoments that do not depend on architecture
Change or proc accross edge
Change of location across edge
Direction of related events
Com relation
Info in tests
Memory Space | , University College London , UK .
, INRIA Paris - Rocquencourt , France .
Copyright 2010 - present Institut National de Recherche en Informatique et
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
type loc = Data of string | Code of Label.t
let as_data = function
| Data loc -> loc
| Code _ -> assert false
let is_data = function
| Data _ -> true
| Code _ -> false
let pp_loc = function Data s | Code s -> s
let loc_eq loc1 loc2 = match loc1,loc2 with
| (Data s1,Data s2)
| (Code s1,Code s2)
-> Misc.string_eq s1 s2
| (Data _,Code _)
| (Code _,Data _)
-> false
let loc_compare loc1 loc2 = match loc1,loc2 with
| Data _,Code _ -> -1
| Code _,Data _ -> 1
| (Data s1,Data s2)
| (Code s1,Code s2)
-> compare s1 s2
module LocOrd = struct
type t = loc
let compare = loc_compare
end
module LocSet = MySet.Make(LocOrd)
module LocMap = MyMap.Make(LocOrd)
let loc_none = Data "*"
let ok_str = "ok"
let ok = Data ok_str
let myok p n = Data (Printf.sprintf "ok%i%i" p n)
let myok_proc p = Data (Printf.sprintf "ok%i" p)
type v = int
type proc = Proc.t
let pp_proc p = Proc.pp p
type env = (string * v) list
type dir = W | R | J
type ie = Int|Ext
type sd = Same|Diff
type extr = Dir of dir | Irr | NoDir
Associated pretty print & generators
let pp_dir = function
| W -> "W"
| R -> "R"
| J -> "J"
let pp_ie = function
| Int -> "i"
| Ext -> "e"
let pp_extr = function
| Dir d -> pp_dir d
| Irr -> "*"
| NoDir -> ""
let pp_sd = function
| Same -> "s"
| Diff -> "d"
let seq_sd sd1 sd2 =
match sd1,sd2 with
| Same,Same -> Same
| Diff,_|_,Diff -> Diff
let fold_ie f r = f Ext (f Int r)
let fold_sd f r = f Diff (f Same r)
let do_fold_extr withj f r =
let r = f (Dir W) (f (Dir R) (f Irr r)) in
if withj then f (Dir J) r
else r
let fold_extr f r = do_fold_extr false f r
let fold_sd_extr f = fold_sd (fun sd -> fold_extr (fun e -> f sd e))
let fold_sd_extr_extr f =
fold_sd_extr (fun sd e1 -> fold_extr (fun e2 -> f sd e1 e2))
type check =
| Default | Sc | Uni | Thin | Critical
| Free | Ppo | Transitive | Total | MixedCheck
let pp_check =
function
| Default -> "default"
| Sc -> "sc"
| Uni -> "uni"
| Thin -> "thin"
| Critical -> "critical"
| Free -> "free"
| Ppo -> "ppo"
| Transitive -> "transitive"
| Total -> "total"
| MixedCheck -> "mixedcheck"
let checks =
[
"default";
"sc";
"uni";
"thin";
"critical";
"free";
"ppo";
"transitive";
"total";
"mixedcheck";
]
type com = CRf | CFr | CWs
let pp_com = function
| CRf -> "Rf"
| CFr -> "Fr"
| CWs -> "Co"
let fold_com f r = f CRf (f CFr (f CWs r))
type info = (string * string) list
let plain = "Na"
type 'a bank = Ord | Tag | CapaTag | CapaSeal | Pte | VecReg of 'a
let pp_bank = function
| Ord -> "Ord"
| Tag -> "Tag"
| CapaTag -> "CapaTag"
| CapaSeal -> "CapaSeal"
| Pte -> "Pte"
| VecReg _ -> "VecReg"
let tag_of_int = function
| 0 -> "green"
| 1 -> "red"
| 2 -> "blue"
| 3 -> "black"
| 4 -> "white"
| 5 -> "cyan"
| 6 -> "yellow"
| 7 -> "magenta"
| n -> Warn.fatal "Sorry, not pretty tag for number %i" n
let add_tag s t = Printf.sprintf "%s:%s" s (tag_of_int t)
let add_capability s t = Printf.sprintf "0xffffc0000:%s:%i" s (if t = 0 then 1 else 0)
let add_vector hexa v =
let open Printf in
let pp = if hexa then sprintf "0x%x" else sprintf "%i" in
sprintf "{%s}"
(String.concat "," (List.map pp v))
|
74258bf91c856b528fa937026f36772b41df655f1abba08744a700d694b57a1d | spechub/Hets | DataTypes.hs | |
Module : ./Omega / DataTypes.hs
Description : The Omega Data Types
Copyright : ( c ) , DFKI 2008
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
for an intermediate Omega Representation .
Module : ./Omega/DataTypes.hs
Description : The Omega Data Types
Copyright : (c) Ewaryst Schulz, DFKI 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
Datatypes for an intermediate Omega Representation.
-}
module Omega.DataTypes where
justWhen :: Bool -> a -> Maybe a
justWhen b x = if b then Just x else Nothing
| Top level element with libname and a list of theories
data Library = Library String [Theory] deriving (Show, Eq, Ord)
{- | Contains a theoryname a list of imports, signature elements and
sentences (axioms or theorems) -}
data Theory = Theory String [String] [TCElement]
deriving (Show, Eq, Ord)
-- | Theory constitutive elements
data TCElement =
-- | An axiom or theorem element
TCAxiomOrTheorem Bool String Term
-- | Symbol to represent sorts, constants, predicate symbols, etc.
| TCSymbol String
-- | A comment, only for development purposes
| TCComment String
deriving (Show, Eq, Ord)
-- | Term structure
data Term =
-- | Symbol
Symbol String
-- | Simple variable
| Var String
-- | Application of a function to a list of arguments
| App Term [Term]
| Bindersymbol , bound vars , body
| Bind String [Term] Term
deriving (Show, Eq, Ord)
| null | https://raw.githubusercontent.com/spechub/Hets/af7b628a75aab0d510b8ae7f067a5c9bc48d0f9e/Omega/DataTypes.hs | haskell | | Contains a theoryname a list of imports, signature elements and
sentences (axioms or theorems)
| Theory constitutive elements
| An axiom or theorem element
| Symbol to represent sorts, constants, predicate symbols, etc.
| A comment, only for development purposes
| Term structure
| Symbol
| Simple variable
| Application of a function to a list of arguments | |
Module : ./Omega / DataTypes.hs
Description : The Omega Data Types
Copyright : ( c ) , DFKI 2008
License : GPLv2 or higher , see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
for an intermediate Omega Representation .
Module : ./Omega/DataTypes.hs
Description : The Omega Data Types
Copyright : (c) Ewaryst Schulz, DFKI 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer :
Stability : provisional
Portability : portable
Datatypes for an intermediate Omega Representation.
-}
module Omega.DataTypes where
justWhen :: Bool -> a -> Maybe a
justWhen b x = if b then Just x else Nothing
| Top level element with libname and a list of theories
data Library = Library String [Theory] deriving (Show, Eq, Ord)
data Theory = Theory String [String] [TCElement]
deriving (Show, Eq, Ord)
data TCElement =
TCAxiomOrTheorem Bool String Term
| TCSymbol String
| TCComment String
deriving (Show, Eq, Ord)
data Term =
Symbol String
| Var String
| App Term [Term]
| Bindersymbol , bound vars , body
| Bind String [Term] Term
deriving (Show, Eq, Ord)
|
630bbb1ea12d629206e2b53d27542462de9bc78d9083b337c352f7d12cd637c0 | ksrky/Plato | Eval.hs | module Plato.Core.Eval where
import Plato.Core.Context
import Plato.Core.Subst
import Plato.Syntax.Core
import Control.Monad (forM)
----------------------------------------------------------------
-- Evaluation
----------------------------------------------------------------
isval :: Context -> Term -> Bool
isval ctx t = case t of
TmVar i _ -> case getBinding ctx i of
VarBind{} -> True
_ -> False
TmAbs{} -> True
TmTAbs{} -> True
TmRecord fields -> all (\(_, vi) -> isval ctx vi) fields
TmTag _ vs _ -> all (isval ctx) vs
TmApp (TmFold _) t -> isval ctx t
_ -> False
eval :: Context -> Term -> Term
eval ctx t = maybe t (eval ctx) (eval' t)
where
eval' :: Term -> Maybe Term
eval' t = case t of
TmVar i _ -> case getBinding ctx i of
TmAbbBind t _ -> Just t
_ -> Nothing
TmApp (TmUnfold _) (TmApp (TmFold _) v) | isval ctx v -> Just v
TmApp (TmFold tyS) t2 -> do
t2' <- eval' t2
Just $ TmApp (TmFold tyS) t2'
TmApp (TmUnfold tyS) t2 -> do
t2' <- eval' t2
Just $ TmApp (TmUnfold tyS) t2'
TmApp (TmAbs _ _ t12) v2 | isval ctx v2 -> return $ termSubstTop v2 t12
TmApp v1 t2 | isval ctx v1 -> do
t2' <- eval' t2
return $ TmApp v1 t2'
TmApp v1 t2 | isval ctx v1 -> do
t2' <- eval' t2
Just $ TmApp v1 t2'
TmApp t1 t2 -> do
t1' <- eval' t1
Just $ TmApp t1' t2
TmTApp (TmTAbs _ t11) tyT2 -> Just $ tytermSubstTop tyT2 t11
TmTApp t1 tyT2 -> do
t1' <- eval' t1
Just $ TmTApp t1' tyT2
TmLet _ v1 t2 | isval ctx v1 -> Just $ termSubstTop v1 t2
TmLet x t1 t2 -> do
t1' <- eval' t1
Just $ TmLet x t1' t2
TmFix (TmAbs _ _ t12) -> Just $ termSubstTop t t12
TmFix t1 -> do
t1' <- eval' t1
Just $ TmFix t1'
TmProj (TmRecord fields) l -> lookup l fields
TmProj t1 l -> do
t1' <- eval' t1
Just $ TmProj t1' l
TmRecord fields -> do
fields' <- forM fields $ \field -> case field of
(li, vi) | isval ctx vi -> Just (li, vi)
(li, ti) -> do
ti' <- eval' ti
Just (li, ti')
Just $ TmRecord fields'
TmTag _ vs _ | all (isval ctx) vs -> Nothing
TmTag l ts tyT -> do
ts' <- mapM eval' ts
Just $ TmTag l ts' tyT
TmCase (TmTag li vs11 _) alts | all (isval ctx) vs11 -> case lookup li alts of
Just (_, body) -> Just $ foldr termSubstTop body vs11
Nothing -> Nothing
TmCase t1 alts -> do
t1' <- eval' t1
Just $ TmCase t1' alts
_ -> Nothing | null | https://raw.githubusercontent.com/ksrky/Plato/02b1a043efa92cf2093ff7d721a607d8abe9d876/src/Plato/Core/Eval.hs | haskell | --------------------------------------------------------------
Evaluation
-------------------------------------------------------------- | module Plato.Core.Eval where
import Plato.Core.Context
import Plato.Core.Subst
import Plato.Syntax.Core
import Control.Monad (forM)
isval :: Context -> Term -> Bool
isval ctx t = case t of
TmVar i _ -> case getBinding ctx i of
VarBind{} -> True
_ -> False
TmAbs{} -> True
TmTAbs{} -> True
TmRecord fields -> all (\(_, vi) -> isval ctx vi) fields
TmTag _ vs _ -> all (isval ctx) vs
TmApp (TmFold _) t -> isval ctx t
_ -> False
eval :: Context -> Term -> Term
eval ctx t = maybe t (eval ctx) (eval' t)
where
eval' :: Term -> Maybe Term
eval' t = case t of
TmVar i _ -> case getBinding ctx i of
TmAbbBind t _ -> Just t
_ -> Nothing
TmApp (TmUnfold _) (TmApp (TmFold _) v) | isval ctx v -> Just v
TmApp (TmFold tyS) t2 -> do
t2' <- eval' t2
Just $ TmApp (TmFold tyS) t2'
TmApp (TmUnfold tyS) t2 -> do
t2' <- eval' t2
Just $ TmApp (TmUnfold tyS) t2'
TmApp (TmAbs _ _ t12) v2 | isval ctx v2 -> return $ termSubstTop v2 t12
TmApp v1 t2 | isval ctx v1 -> do
t2' <- eval' t2
return $ TmApp v1 t2'
TmApp v1 t2 | isval ctx v1 -> do
t2' <- eval' t2
Just $ TmApp v1 t2'
TmApp t1 t2 -> do
t1' <- eval' t1
Just $ TmApp t1' t2
TmTApp (TmTAbs _ t11) tyT2 -> Just $ tytermSubstTop tyT2 t11
TmTApp t1 tyT2 -> do
t1' <- eval' t1
Just $ TmTApp t1' tyT2
TmLet _ v1 t2 | isval ctx v1 -> Just $ termSubstTop v1 t2
TmLet x t1 t2 -> do
t1' <- eval' t1
Just $ TmLet x t1' t2
TmFix (TmAbs _ _ t12) -> Just $ termSubstTop t t12
TmFix t1 -> do
t1' <- eval' t1
Just $ TmFix t1'
TmProj (TmRecord fields) l -> lookup l fields
TmProj t1 l -> do
t1' <- eval' t1
Just $ TmProj t1' l
TmRecord fields -> do
fields' <- forM fields $ \field -> case field of
(li, vi) | isval ctx vi -> Just (li, vi)
(li, ti) -> do
ti' <- eval' ti
Just (li, ti')
Just $ TmRecord fields'
TmTag _ vs _ | all (isval ctx) vs -> Nothing
TmTag l ts tyT -> do
ts' <- mapM eval' ts
Just $ TmTag l ts' tyT
TmCase (TmTag li vs11 _) alts | all (isval ctx) vs11 -> case lookup li alts of
Just (_, body) -> Just $ foldr termSubstTop body vs11
Nothing -> Nothing
TmCase t1 alts -> do
t1' <- eval' t1
Just $ TmCase t1' alts
_ -> Nothing |
4c9146ff193ad0773d40c807c95420101d792ad6935fc944907b0fc400424e27 | lucian303/corenlpd | layout.clj | (ns corenlpd.layout
(:require [selmer.parser :as parser]
[selmer.filters :as filters]
[markdown.core :refer [md-to-html-string]]
[ring.util.response :refer [content-type response]]
[compojure.response :refer [Renderable]]
[ring.util.anti-forgery :refer [anti-forgery-field]]
[ring.middleware.anti-forgery :refer [*anti-forgery-token*]]
[environ.core :refer [env]]))
(declare ^:dynamic *servlet-context*)
(parser/set-resource-path! (clojure.java.io/resource "templates"))
(parser/add-tag! :csrf-field (fn [_ _] (anti-forgery-field)))
(filters/add-filter! :markdown (fn [content] [:safe (md-to-html-string content)]))
(defn render [template & [params]]
(-> template
(parser/render-file
(assoc params
:page template
:dev (env :dev)
:csrf-token *anti-forgery-token*
:servlet-context *servlet-context*
))
response
(content-type "text/html; charset=utf-8")))
| null | https://raw.githubusercontent.com/lucian303/corenlpd/51454aaa0cbed40f0bea7d9be47aac5d224e31ef/src/corenlpd/layout.clj | clojure | (ns corenlpd.layout
(:require [selmer.parser :as parser]
[selmer.filters :as filters]
[markdown.core :refer [md-to-html-string]]
[ring.util.response :refer [content-type response]]
[compojure.response :refer [Renderable]]
[ring.util.anti-forgery :refer [anti-forgery-field]]
[ring.middleware.anti-forgery :refer [*anti-forgery-token*]]
[environ.core :refer [env]]))
(declare ^:dynamic *servlet-context*)
(parser/set-resource-path! (clojure.java.io/resource "templates"))
(parser/add-tag! :csrf-field (fn [_ _] (anti-forgery-field)))
(filters/add-filter! :markdown (fn [content] [:safe (md-to-html-string content)]))
(defn render [template & [params]]
(-> template
(parser/render-file
(assoc params
:page template
:dev (env :dev)
:csrf-token *anti-forgery-token*
:servlet-context *servlet-context*
))
response
(content-type "text/html; charset=utf-8")))
| |
906e76e207f7645b75769cf6606ccfeb16d2feb8337ec5b179487c3ea366b235 | facebookarchive/duckling_old | cycles.clj | ; Cycles are like a heart beat, the next starts just when the previous ends.
; Unlike durations, they have an absolute position in the time, it's just that this position is periodic.
; Examples of phrases involving cycles:
- this week
; - today (= this day)
; - last month
; - last 2 calendar months (last 2 months is interpreted as a duration)
;
As soon as you put a quantity ( 2 months ) , the cycle becomes a duration .
(
"second (cycle)"
#"(?i)sekunden?"
{:dim :cycle
:grain :second}
"minute (cycle)"
#"(?i)minuten?"
{:dim :cycle
:grain :minute}
"hour (cycle)"
#"(?i)stunden?"
{:dim :cycle
:grain :hour}
"day (cycle)"
#"(?i)tage?n?"
{:dim :cycle
:grain :day}
"week (cycle)"
#"(?i)wochen?"
{:dim :cycle
:grain :week}
"month (cycle)"
#"(?i)monate?n?"
{:dim :cycle
:grain :month}
"quarter (cycle)"
#"(?i)quartale?"
{:dim :cycle
:grain :quarter}
"year (cycle)"
#"(?i)jahre?n?"
{:dim :cycle
:grain :year}
"this <cycle>"
[#"(?i)diese(r|n|s)?|kommende(r|n|s)?" (dim :cycle)]
(cycle-nth (:grain %2) 0)
"last <cycle>"
[#"(?i)letzte(r|n|s)?|vergangene(r|n|s)?" (dim :cycle)]
(cycle-nth (:grain %2) -1)
"next <cycle>"
[#"(?i)nächste(r|n|s)?|kommende(r|n|s)?" (dim :cycle)]
(cycle-nth (:grain %2) 1)
"the <cycle> after <time>"
[#"(?i)der" (dim :cycle) #"(?i)nach" (dim :time)]
(cycle-nth-after (:grain %2) 1 %4)
"the <cycle> before <time>"
[#"(?i)der" (dim :cycle) #"(?i)vor" (dim :time)]
(cycle-nth-after (:grain %2) -1 %4)
"last n <cycle>"
[#"(?i)letzten?|vergangenen?" (integer 1 9999) (dim :cycle)]
(cycle-n-not-immediate (:grain %3) (- (:value %2)))
"next n <cycle>"
[#"(?i)nächsten?|kommenden?" (integer 1 9999) (dim :cycle)]
(cycle-n-not-immediate (:grain %3) (:value %2))
"<ordinal> <cycle> of <time>"
[(dim :ordinal) (dim :cycle) #"(?i)im|in|von" (dim :time)]
(cycle-nth-after-not-immediate (:grain %2) (dec (:value %1)) %4)
"the <ordinal> <cycle> of <time>"
[#"(?i)der|die|das" (dim :ordinal) (dim :cycle) #"(?i)im|in|von" (dim :time)]
(cycle-nth-after-not-immediate (:grain %3) (dec (:value %2)) %5)
the 2 following rules may need a different helper
"<ordinal> <cycle> after <time>"
[(dim :ordinal) (dim :cycle) #"(?i)nach" (dim :time)]
(cycle-nth-after-not-immediate (:grain %2) (dec (:value %1)) %4)
"the <ordinal> <cycle> after <time>"
[#"(?i)the" (dim :ordinal) (dim :cycle) #"(?i)nach" (dim :time)]
(cycle-nth-after-not-immediate (:grain %3) (dec (:value %2)) %5)
quarters are a little bit different , you can say " 3rd quarter " alone
"<ordinal> quarter"
[(dim :ordinal) (dim :cycle #(= :quarter (:grain %)))]
(cycle-nth-after :quarter (dec (:value %1)) (cycle-nth :year 0))
"<ordinal> quarter <year>"
[(dim :ordinal) (dim :cycle #(= :quarter (:grain %))) (dim :time)]
(cycle-nth-after :quarter (dec (:value %1)) %3))
| null | https://raw.githubusercontent.com/facebookarchive/duckling_old/bf5bb9758c36313b56e136a28ba401696eeff10b/resources/languages/de/rules/cycles.clj | clojure | Cycles are like a heart beat, the next starts just when the previous ends.
Unlike durations, they have an absolute position in the time, it's just that this position is periodic.
Examples of phrases involving cycles:
- today (= this day)
- last month
- last 2 calendar months (last 2 months is interpreted as a duration)
| - this week
As soon as you put a quantity ( 2 months ) , the cycle becomes a duration .
(
"second (cycle)"
#"(?i)sekunden?"
{:dim :cycle
:grain :second}
"minute (cycle)"
#"(?i)minuten?"
{:dim :cycle
:grain :minute}
"hour (cycle)"
#"(?i)stunden?"
{:dim :cycle
:grain :hour}
"day (cycle)"
#"(?i)tage?n?"
{:dim :cycle
:grain :day}
"week (cycle)"
#"(?i)wochen?"
{:dim :cycle
:grain :week}
"month (cycle)"
#"(?i)monate?n?"
{:dim :cycle
:grain :month}
"quarter (cycle)"
#"(?i)quartale?"
{:dim :cycle
:grain :quarter}
"year (cycle)"
#"(?i)jahre?n?"
{:dim :cycle
:grain :year}
"this <cycle>"
[#"(?i)diese(r|n|s)?|kommende(r|n|s)?" (dim :cycle)]
(cycle-nth (:grain %2) 0)
"last <cycle>"
[#"(?i)letzte(r|n|s)?|vergangene(r|n|s)?" (dim :cycle)]
(cycle-nth (:grain %2) -1)
"next <cycle>"
[#"(?i)nächste(r|n|s)?|kommende(r|n|s)?" (dim :cycle)]
(cycle-nth (:grain %2) 1)
"the <cycle> after <time>"
[#"(?i)der" (dim :cycle) #"(?i)nach" (dim :time)]
(cycle-nth-after (:grain %2) 1 %4)
"the <cycle> before <time>"
[#"(?i)der" (dim :cycle) #"(?i)vor" (dim :time)]
(cycle-nth-after (:grain %2) -1 %4)
"last n <cycle>"
[#"(?i)letzten?|vergangenen?" (integer 1 9999) (dim :cycle)]
(cycle-n-not-immediate (:grain %3) (- (:value %2)))
"next n <cycle>"
[#"(?i)nächsten?|kommenden?" (integer 1 9999) (dim :cycle)]
(cycle-n-not-immediate (:grain %3) (:value %2))
"<ordinal> <cycle> of <time>"
[(dim :ordinal) (dim :cycle) #"(?i)im|in|von" (dim :time)]
(cycle-nth-after-not-immediate (:grain %2) (dec (:value %1)) %4)
"the <ordinal> <cycle> of <time>"
[#"(?i)der|die|das" (dim :ordinal) (dim :cycle) #"(?i)im|in|von" (dim :time)]
(cycle-nth-after-not-immediate (:grain %3) (dec (:value %2)) %5)
the 2 following rules may need a different helper
"<ordinal> <cycle> after <time>"
[(dim :ordinal) (dim :cycle) #"(?i)nach" (dim :time)]
(cycle-nth-after-not-immediate (:grain %2) (dec (:value %1)) %4)
"the <ordinal> <cycle> after <time>"
[#"(?i)the" (dim :ordinal) (dim :cycle) #"(?i)nach" (dim :time)]
(cycle-nth-after-not-immediate (:grain %3) (dec (:value %2)) %5)
quarters are a little bit different , you can say " 3rd quarter " alone
"<ordinal> quarter"
[(dim :ordinal) (dim :cycle #(= :quarter (:grain %)))]
(cycle-nth-after :quarter (dec (:value %1)) (cycle-nth :year 0))
"<ordinal> quarter <year>"
[(dim :ordinal) (dim :cycle #(= :quarter (:grain %))) (dim :time)]
(cycle-nth-after :quarter (dec (:value %1)) %3))
|
8a26f6deba9c6e918722bc1f7fe05d254d93bb930b338f8c9234895e73f0ecfc | finnishtransportagency/harja | navigaatio.cljs | (ns harja.tiedot.navigaatio
"Tämä nimiavaruus hallinnoi sovelluksen navigoinnin. Sisältää atomit, joilla eri sivuja ja polkua
sovelluksessa ohjataan sekä kytkeytyy selaimen osoitepalkin #-polkuun ja historiaan. Tämä nimiavaruus
ei viittaa itse näkymiin, vaan näkymät voivat hakea täältä tarvitsemansa navigointitiedot."
(:require
;; Reititykset
[goog.events :as events]
[goog.Uri :as Uri]
[goog.history.EventType :as EventType]
[reagent.core :refer [atom wrap]]
[cljs.core.async :refer [<! >! chan close!]]
[harja.loki :refer [log tarkkaile!]]
[harja.asiakas.tapahtumat :as t]
[harja.tiedot.urakoitsijat :as urk]
[harja.tiedot.hallintayksikot :as hy]
[harja.tiedot.istunto :as istunto]
[harja.tiedot.urakat :as ur]
[harja.tiedot.raportit :as raportit]
[harja.tiedot.navigaatio.reitit :as reitit]
[harja.tiedot.hallinta.integraatioloki :as integraatioloki]
[harja.atom :refer-macros [reaction<! reaction-writable]]
[harja.pvm :as pvm]
[clojure.string :as str]
[harja.geo :as geo]
[harja.domain.oikeudet :as oikeudet]
[harja.domain.urakka :as urakka-domain]
[taoensso.timbre :as log])
(:require-macros [cljs.core.async.macros :refer [go]]
[reagent.ratom :refer [reaction run!]])
(:import goog.History))
(def valittu-valilehti reitit/valittu-valilehti)
(def valittu-valilehti-atom reitit/valittu-valilehti-atom)
(def aseta-valittu-valilehti! reitit/aseta-valittu-valilehti!)
(def valittu-sivu (reaction (get @reitit/url-navigaatio :sivu)))
(declare kasittele-url! paivita-url valitse-urakka!)
(defonce murupolku-nakyvissa? (reaction (and (not @raportit/raportit-nakymassa?)
(not= @valittu-sivu :tilannekuva)
(not= @valittu-sivu :info)
(not= @valittu-sivu :tieluvat)
(not= @valittu-sivu :about)
(not= @valittu-sivu :hallinta))))
(defonce kartan-extent (atom nil))
(defonce kartalla-nakyva-alue
Näkyvä alue ,
muuttaa zoom - tasoa tai raahaa karttaa , se asetetaan näkyvään alueeseen .
(atom
(let [[minx miny maxx maxy] @kartan-extent]
{:xmin minx :ymin miny
:xmax maxx :ymax maxy})))
(def kartan-nakyvan-alueen-koko
(reaction
((comp geo/extent-hypotenuusa (juxt :xmin :ymin :xmax :ymax))
@kartalla-nakyva-alue)))
Kartan
;; :hidden (ei näy mitään)
: S ( näkyy -nappi )
: M ( )
: L ( )
(defonce ^{:doc "Kartan koko"} kartan-kokovalinta (atom :S))
(defn vaihda-kartan-koko! [uusi-koko]
(let [vanha-koko @kartan-kokovalinta]
(when uusi-koko
(reset! kartan-kokovalinta uusi-koko)
(t/julkaise! {:aihe :kartan-koko-vaihdettu
:vanha-koko vanha-koko
:uusi-koko uusi-koko}))))
(def valittu-vaylamuoto "Tällä hetkellä valittu väylämuoto" (atom :tie))
(defn vaihda-vaylamuoto! [ut]
muutetaan
(reset! valittu-vaylamuoto (if (= :vesivayla (:arvo ut)) :vesi :tie)))
(def +urakkatyypit+
(filterv
some?
[{:nimi "Hoito" :arvo :hoito}
{:nimi "Tiemerkintä" :arvo :tiemerkinta}
{:nimi "Päällystys" :arvo :paallystys}
{:nimi "Valaistus" :arvo :valaistus}
{:nimi "Siltakorjaus" :arvo :siltakorjaus}
{:nimi "Tekniset laitteet" :arvo :tekniset-laitteet}
" " ei ole , vaan väylämuoto
Vesi - väylämuotoon liittyy todellisuudessa monia urakkatyyppejä ,
kuten hoito , , ..
näitä kaikkia tyyppejä käsitellään Harjan käyttöliittymässä samalla tavalla .
hoito on Vesiväylät - väylämuodon alla . .
(when (istunto/ominaisuus-kaytossa? :vesivayla) {:nimi "Vesiväylät ja kanavat" :arvo :vesivayla})]))
(def +urakkatyypit-ja-kaikki+
(into [{:nimi "Kaikki" :arvo :kaikki}]
+urakkatyypit+))
(defn urakkatyyppi-arvolle [tyyppi]
(when tyyppi
(let [tyyppi (if (str/starts-with? (name tyyppi) "vesivayla")
:vesivayla
tyyppi)]
(if (= tyyppi :teiden-hoito)
{:nimi "Hoito" :arvo :teiden-hoito}
(first (filter #(= tyyppi (:arvo %))
+urakkatyypit+))))))
(defn nayta-urakkatyyppi [tyyppi]
(when tyyppi
(let [tyyppi (if (str/starts-with? (name tyyppi) "vesivayla")
:vesivayla
tyyppi)]
(:nimi (first
(filter #(= tyyppi (:arvo %))
+urakkatyypit+))))))
(defn urakkatyyppi-urakalle [ur]
(when ur
(urakkatyyppi-arvolle (if (urakka-domain/vesivaylaurakka? ur)
:vesivayla
(:tyyppi ur)))))
(def valittu-urakoitsija "Suodatusta varten valittu urakoitsija
tätä valintaa voi käyttää esim. alueurakoitden
urakoitsijakohtaiseen suodatukseen" (atom nil)) ;;(= nil kaikki)
Hallintayksikön valinta id : ( URL parametrista )
(defonce valittu-hallintayksikko-id (atom nil))
Atomi , valitun hallintayksikön
(defonce valittu-hallintayksikko
(reaction (let [id @valittu-hallintayksikko-id
yksikot @hy/vaylamuodon-hallintayksikot]
(when (and id yksikot)
(some #(and (= id (:id %)) %) yksikot)))))
id : n perusteella ( ) , asetetaan se tänne
(defonce valittu-urakka-id (atom nil))
Atomi , valitun hallintayksikön urakat
(defonce hallintayksikon-urakkalista
(reaction<! [yks @valittu-hallintayksikko]
(when yks
(ur/hae-hallintayksikon-urakat yks))))
Atomi , ( tai nil )
Älä resetoi , vaan urakan valittu - urakka - id
(defonce valittu-urakka
(reaction
(let [id @valittu-urakka-id
urakat @hallintayksikon-urakkalista]
(when (and id urakat)
(some #(when (= id (:id %)) %) urakat)))))
Käyttäjän .
ei kuitenkaan ole tämä , vaan alla oleva reaction ( , älä tätä )
Älä myöskään aseta suoraan , käytä vaihda - urakkatyyppi !
(defonce ^{:private true} valittu-urakkatyyppi (atom nil))
valittu väylämuodosta riippuvainen urakkatyyppi
urakkarooleja , valitaan
(defonce urakkatyyppi
(reaction<! [kayttajan-oletus-tyyppi (:urakkatyyppi @istunto/kayttaja)
Jos urakka on valittuna ,
urakan-urakkatyyppi (urakkatyyppi-urakalle @valittu-urakka)
, asetetaan se tyypiksi
valittu-urakkatyyppi @valittu-urakkatyyppi
, onko
oletus-urakkatyyppi (urakkatyyppi-arvolle (:urakkatyyppi @istunto/kayttaja))
valittu-hy-id @valittu-hallintayksikko-id]
(go
(or urakan-urakkatyyppi
valittu-urakkatyyppi
on valittuna ,
. on : tie , ,
onko . , : hoito
Koska hallintayksiköistä ei ole välttämättä , täytyy tässä
ottaa
(when valittu-hy-id
(urakkatyyppi-arvolle
(case (<! (hy/hallintayksikon-vaylamuoto valittu-hy-id))
:tie
(if-not (= :vesivayla kayttajan-oletus-tyyppi)
kayttajan-oletus-tyyppi
:hoito)
:vesi
:vesivayla
nil)))
oletus-urakkatyyppi))))
(defn vaihda-urakkatyyppi!
"Vaihtaa urakkatyypin ja resetoi valitun urakoitsijan, jos kyseinen urakoitsija ei
löydy valitun tyyppisten urakoitsijain listasta."
[ut]
(go
(when (not= @valittu-urakkatyyppi ut)
(valitse-urakka! nil))
(reset! valittu-urakkatyyppi ut)
(vaihda-vaylamuoto! ut)
(<! (hy/aseta-hallintayksikot-vaylamuodolle! @valittu-vaylamuoto))
(swap! valittu-urakoitsija
#(let [nykyisen-urakkatyypin-urakoitsijat
(case (:arvo ut)
:kaikki @urk/urakoitsijat-kaikki
:hoito @urk/urakoitsijat-hoito
:teiden-hoito @urk/urakoitsijat-hoito
:paallystys @urk/urakoitsijat-paallystys
:tiemerkinta @urk/urakoitsijat-tiemerkinta
:valaistus @urk/urakoitsijat-valaistus
:siltakorjaus @urk/urakoitsijat-siltakorjaus
:tekniset-laitteet @urk/urakoitsijat-tekniset-laitteet
:vesivayla @urk/urakoitsijat-vesivaylat)]
(if (nykyisen-urakkatyypin-urakoitsijat (:id %))
%
nil)))))
(def tarvitsen-isoa-karttaa "Set käyttöliittymänäkymiä (keyword), jotka haluavat pakottaa kartan näkyviin.
Jos tässä setissä on itemeitä, tulisi kartta pakottaa näkyviin :L kokoisena vaikka se ei olisikaan muuten näkyvissä."
(atom #{}))
jos haluat palauttaa kartan edelliseen kokoon , ( esim . Valitse kartalta -toiminto )
(def kartan-edellinen-koko (atom nil))
(def kartan-koko
"Kartan laskettu koko riippuu kartan kokovalinnasta sekä kartan pakotteista."
(reaction (let [valittu-koko @kartan-kokovalinta
sivu (valittu-valilehti :sivu)
v-ur @valittu-urakka
tarvitsen-isoa-karttaa @tarvitsen-isoa-karttaa]
(if-not (empty? tarvitsen-isoa-karttaa)
:L
Ei kartan pakotteita , tehdään sivukohtaisia special caseja
tai
(cond (and
(= sivu :hallinta)
Hallintavälilehdellä ei näytetä karttaa , paitsi luonnissa
Tähän tarvitaan molemmat tarkastukset , koska vesiväylä - hallinan
voi jäädä , vaikka siirryttäisikiin esim integraatiolokiin
(not= (valittu-valilehti :hallinta) :toteumatyokalu)
(not= (valittu-valilehti :hallinta) :vesivayla-hallinta)
(not= (valittu-valilehti :vesivayla-hallinta) :kanavaurakoiden-kohteiden-luonti))
:hidden
(= sivu :about) :hidden
(= sivu :tilannekuva) :XL
(and (= sivu :urakat)
(not v-ur)) :XL
:default valittu-koko)))))
(def kartta-nakyvissa?
"Kartta ei piilotettu"
(reaction (let [koko @kartan-koko]
(and (not= :S koko)
(not= :hidden koko)))))
(def kartan-kontrollit-nakyvissa?
(reaction
(let [sivu (valittu-valilehti :sivu)]
ole ,
JA ei olla tilannekuvassa , urakoissa TAI urakkaa ei ole .
(and
(empty? @tarvitsen-isoa-karttaa)
(not= sivu :tilannekuva)
(or
(not= sivu :urakat)
(some? @valittu-urakka))))))
(defn aseta-hallintayksikko-ja-urakka [hy-id ur]
(reset! valittu-hallintayksikko-id hy-id)
go block , että vaihda - urakkatyyppi ! suorittaa
hy / aseta - hallintayksikot - vaylamuodolle ! , !
funktiota kutsutaan , ! triggeröi ! funktion
id : n nilliksi .
(go (<! (vaihda-urakkatyyppi! (urakkatyyppi-urakalle ur)))
(valitse-urakka! ur)))
(defn aseta-hallintayksikko-ja-urakka-id! [hy-id ur-id]
(log/info "ASETA HY: " hy-id ", UR: " ur-id)
(reset! valittu-hallintayksikko-id hy-id)
(reset! valittu-urakka-id ur-id))
(defn valitse-urakoitsija! [u]
(reset! valittu-urakoitsija u))
Rajapinta hallintayksikön valitsemiseen , jota viewit voivat
(defn valitse-hallintayksikko-id! [id]
(reset! valittu-hallintayksikko-id id)
(reset! valittu-urakka-id nil)
(paivita-url))
(defn valitse-hallintayksikko! [yks]
(valitse-hallintayksikko-id! (:id yks)))
(defn valitse-urakka-id! [id]
(reset! valittu-urakka-id id)
(paivita-url))
(defn valitse-urakka! [ur]
(valitse-urakka-id! (:id ur))
(log "VALITTIIN URAKKA: " (pr-str (dissoc ur :alue))))
(defonce urakka-klikkaus-kuuntelija
(t/kuuntele! :urakka-klikattu
(fn [urakka]
(valitse-urakka! urakka))))
;; Quick and dirty history configuration.
(defonce historia (let [h (History. false)]
(events/listen h EventType/NAVIGATE
#(kasittele-url! (.-token %)))
h))
(defn nykyinen-url []
(str (reitit/muodosta-polku @reitit/url-navigaatio)
"?"
(when-let [hy @valittu-hallintayksikko-id] (str "&hy=" hy))
(when-let [u @valittu-urakka-id] (str "&u=" u))))
(defonce ^{:doc "Tämä lippu voi estää URL tokenin päivittämisen, käytetään siirtymissä, joissa
halutaan tehdä useita muutoksia ilman että välissä pävitetään URLia keskeneräisenä."}
esta-url-paivitys? (cljs.core/atom false))
asettaa oikean sisällön perusteella
(defn paivita-url []
(when-not @esta-url-paivitys?
(let [url (nykyinen-url)]
(when (not= url (.-token historia))
(log "URL != token :: " url " != " (.getToken historia))
(.setToken historia url)))))
(defn esta-url-paivitys!
"Estä URL päivitykset kunnes salli-url-paivitys! kutsutaan."
[]
(reset! esta-url-paivitys? true))
(defn salli-url-paivitys!
"Salli URL päivitys ja tee päivitys nyt"
[]
(reset! esta-url-paivitys? false)
(paivita-url))
(defn vaihda-sivu!
"Vaihda nykyinen sivu haluttuun."
[uusi-sivu]
(when-not (= (valittu-valilehti :sivu) uusi-sivu)
(reitit/aseta-valittu-valilehti! :sivu uusi-sivu)))
(def suodatettu-urakkalista "Urakat suodatettuna urakkatyypin ja urakoitsijan mukaan."
(reaction
(let [v-ur-tyyppi (:arvo @urakkatyyppi)
v-urk @valittu-urakoitsija
urakkalista @hallintayksikon-urakkalista
kayttajan-urakat (set (map key (:urakkaroolit @istunto/kayttaja)))]
(when urakkalista
(into []
(comp (filter #(or (= :kaikki v-ur-tyyppi)
(= v-ur-tyyppi (:tyyppi %))
(and (= v-ur-tyyppi :hoito)
(= (:tyyppi %) :teiden-hoito))
(and (= v-ur-tyyppi :teiden-hoito)
(= (:tyyppi %) :hoito))
(and (= v-ur-tyyppi :vesivayla)
(urakka-domain/vesivaylaurakka? %))))
(filter #(or
(kayttajan-urakat (:id %))
(or (nil? v-urk) (= (:id v-urk) (:id (:urakoitsija %))))))
(filter #(oikeudet/voi-lukea? oikeudet/urakat (:id %) @istunto/kayttaja)))
urakkalista)))))
(def urakat-kartalla "Sisältää suodatetuista urakoista aktiiviset"
(reaction (into []
(filter #(pvm/ennen? (pvm/nyt) (:loppupvm %)))
@suodatettu-urakkalista)))
(def render-lupa-hy? (reaction
(some? @hy/vaylamuodon-hallintayksikot)))
(def render-lupa-u? (reaction
, ei estetä
hy : tä ei saatu asetettua - > ei estetä
(some? @hallintayksikon-urakkalista))))
(def render-lupa-url-kasitelty? (atom false))
: evätään render - lupa ? ennen on
(def render-lupa? (reaction
(and @render-lupa-hy? @render-lupa-u?
@render-lupa-url-kasitelty?)))
(defonce urlia-kasitellaan? (atom false))
(defn kasittele-url!
"Käsittelee urlin (route) muutokset."
[url]
(reset! urlia-kasitellaan? true)
(go
(let [uri (Uri/parse url)
polku (.getPath uri)
parametrit (.getQueryData uri)]
(reset! valittu-hallintayksikko-id (some-> parametrit (.get "hy") js/parseInt))
(reset! valittu-urakka-id (some-> parametrit (.get "u") js/parseInt))
, on , jotta voidaan
;; väylämuodon hallintayksiköt
(when (= polku "urakat/yleiset")
(let [vesivaylaurakka? (urakka-domain/vesivaylaurakkatyyppi? (:urakkatyyppi @istunto/kayttaja))
arvo (if vesivaylaurakka?
{:arvo :vesivayla}
{:arvo :hoito})]
(vaihda-vaylamuoto! arvo)))
(when @valittu-hallintayksikko-id
(reset! valittu-vaylamuoto (<! (hy/hallintayksikon-vaylamuoto @valittu-hallintayksikko-id))))
(<! (hy/aseta-hallintayksikot-vaylamuodolle! @valittu-vaylamuoto))
(swap! reitit/url-navigaatio
reitit/tulkitse-polku polku)
Käsitellään linkit yksittäisiin integraatiolokin viesteihin
(when (and (= polku "hallinta/integraatiotilanne/integraatioloki")
(.get parametrit "valittu-jarjestelma")
(.get parametrit "valittu-integraatio")
(.get parametrit "tapahtuma-id")
(.get parametrit "alkanut"))
(let [jarjestelmat (<! (integraatioloki/hae-jarjestelmien-integraatiot))
jarjestelma (.get parametrit "valittu-jarjestelma")
alkanut-pvm (pvm/iso-8601->pvm (.get parametrit "alkanut"))]
(reset! integraatioloki/valittu-jarjestelma (some #(when (= jarjestelma (:jarjestelma %))
%)
jarjestelmat))
(reset! integraatioloki/valittu-integraatio (.get parametrit "valittu-integraatio"))
(reset! integraatioloki/tapahtuma-id #{(try (js/parseInt (.get parametrit "tapahtuma-id"))
(catch :default e
nil))})
(reset! integraatioloki/nayta-uusimmat-tilassa? false)
(reset! integraatioloki/valittu-aikavali [alkanut-pvm alkanut-pvm])
(reset! integraatioloki/tultiin-urlin-kautta true)
Paivitetään url , jotta parametrit eivät enään näy urlissa
(paivita-url))))
(reset! render-lupa-url-kasitelty? true)
(log "Render-lupa annettu!")
(t/julkaise! {:aihe :url-muuttui :url url})
(reset! urlia-kasitellaan? false)))
(.setEnabled historia true)
(defonce paivita-url-navigaatiotilan-muuttuessa
(add-watch reitit/url-navigaatio
::url-muutos
(fn [_ _ vanha uusi]
(when (and (not @urlia-kasitellaan?) (not= vanha uusi))
(paivita-url)))))
(defn paivita-urakan-tiedot! [urakka-id funktio & args]
(swap! hallintayksikon-urakkalista
(fn [urakat]
(mapv (fn [urakka]
(if (= (:id urakka) urakka-id)
(apply funktio urakka args)
urakka))
urakat))))
. Tilannekuvassa halutaan näyttää selite urakkarajoille , jos alueita on valittu . Syklisen riippuvuuden
tänne .
(def tilannekuvassa-alueita-valittu? (atom false))
(defn yllapitourakka-valittu? []
(let [urakkatyyppi (:arvo @urakkatyyppi)]
(or (= urakkatyyppi :paallystys)
(= urakkatyyppi :tiemerkinta))))
| null | https://raw.githubusercontent.com/finnishtransportagency/harja/cdd0f1cfdf0c66de83a0f1a7f23d9b4701691ebe/src/cljs/harja/tiedot/navigaatio.cljs | clojure | Reititykset
:hidden (ei näy mitään)
(= nil kaikki)
Quick and dirty history configuration.
väylämuodon hallintayksiköt | (ns harja.tiedot.navigaatio
"Tämä nimiavaruus hallinnoi sovelluksen navigoinnin. Sisältää atomit, joilla eri sivuja ja polkua
sovelluksessa ohjataan sekä kytkeytyy selaimen osoitepalkin #-polkuun ja historiaan. Tämä nimiavaruus
ei viittaa itse näkymiin, vaan näkymät voivat hakea täältä tarvitsemansa navigointitiedot."
(:require
[goog.events :as events]
[goog.Uri :as Uri]
[goog.history.EventType :as EventType]
[reagent.core :refer [atom wrap]]
[cljs.core.async :refer [<! >! chan close!]]
[harja.loki :refer [log tarkkaile!]]
[harja.asiakas.tapahtumat :as t]
[harja.tiedot.urakoitsijat :as urk]
[harja.tiedot.hallintayksikot :as hy]
[harja.tiedot.istunto :as istunto]
[harja.tiedot.urakat :as ur]
[harja.tiedot.raportit :as raportit]
[harja.tiedot.navigaatio.reitit :as reitit]
[harja.tiedot.hallinta.integraatioloki :as integraatioloki]
[harja.atom :refer-macros [reaction<! reaction-writable]]
[harja.pvm :as pvm]
[clojure.string :as str]
[harja.geo :as geo]
[harja.domain.oikeudet :as oikeudet]
[harja.domain.urakka :as urakka-domain]
[taoensso.timbre :as log])
(:require-macros [cljs.core.async.macros :refer [go]]
[reagent.ratom :refer [reaction run!]])
(:import goog.History))
(def valittu-valilehti reitit/valittu-valilehti)
(def valittu-valilehti-atom reitit/valittu-valilehti-atom)
(def aseta-valittu-valilehti! reitit/aseta-valittu-valilehti!)
(def valittu-sivu (reaction (get @reitit/url-navigaatio :sivu)))
(declare kasittele-url! paivita-url valitse-urakka!)
(defonce murupolku-nakyvissa? (reaction (and (not @raportit/raportit-nakymassa?)
(not= @valittu-sivu :tilannekuva)
(not= @valittu-sivu :info)
(not= @valittu-sivu :tieluvat)
(not= @valittu-sivu :about)
(not= @valittu-sivu :hallinta))))
(defonce kartan-extent (atom nil))
(defonce kartalla-nakyva-alue
Näkyvä alue ,
muuttaa zoom - tasoa tai raahaa karttaa , se asetetaan näkyvään alueeseen .
(atom
(let [[minx miny maxx maxy] @kartan-extent]
{:xmin minx :ymin miny
:xmax maxx :ymax maxy})))
(def kartan-nakyvan-alueen-koko
(reaction
((comp geo/extent-hypotenuusa (juxt :xmin :ymin :xmax :ymax))
@kartalla-nakyva-alue)))
Kartan
: S ( näkyy -nappi )
: M ( )
: L ( )
(defonce ^{:doc "Kartan koko"} kartan-kokovalinta (atom :S))
(defn vaihda-kartan-koko! [uusi-koko]
(let [vanha-koko @kartan-kokovalinta]
(when uusi-koko
(reset! kartan-kokovalinta uusi-koko)
(t/julkaise! {:aihe :kartan-koko-vaihdettu
:vanha-koko vanha-koko
:uusi-koko uusi-koko}))))
(def valittu-vaylamuoto "Tällä hetkellä valittu väylämuoto" (atom :tie))
(defn vaihda-vaylamuoto! [ut]
muutetaan
(reset! valittu-vaylamuoto (if (= :vesivayla (:arvo ut)) :vesi :tie)))
(def +urakkatyypit+
(filterv
some?
[{:nimi "Hoito" :arvo :hoito}
{:nimi "Tiemerkintä" :arvo :tiemerkinta}
{:nimi "Päällystys" :arvo :paallystys}
{:nimi "Valaistus" :arvo :valaistus}
{:nimi "Siltakorjaus" :arvo :siltakorjaus}
{:nimi "Tekniset laitteet" :arvo :tekniset-laitteet}
" " ei ole , vaan väylämuoto
Vesi - väylämuotoon liittyy todellisuudessa monia urakkatyyppejä ,
kuten hoito , , ..
näitä kaikkia tyyppejä käsitellään Harjan käyttöliittymässä samalla tavalla .
hoito on Vesiväylät - väylämuodon alla . .
(when (istunto/ominaisuus-kaytossa? :vesivayla) {:nimi "Vesiväylät ja kanavat" :arvo :vesivayla})]))
(def +urakkatyypit-ja-kaikki+
(into [{:nimi "Kaikki" :arvo :kaikki}]
+urakkatyypit+))
(defn urakkatyyppi-arvolle [tyyppi]
(when tyyppi
(let [tyyppi (if (str/starts-with? (name tyyppi) "vesivayla")
:vesivayla
tyyppi)]
(if (= tyyppi :teiden-hoito)
{:nimi "Hoito" :arvo :teiden-hoito}
(first (filter #(= tyyppi (:arvo %))
+urakkatyypit+))))))
(defn nayta-urakkatyyppi [tyyppi]
(when tyyppi
(let [tyyppi (if (str/starts-with? (name tyyppi) "vesivayla")
:vesivayla
tyyppi)]
(:nimi (first
(filter #(= tyyppi (:arvo %))
+urakkatyypit+))))))
(defn urakkatyyppi-urakalle [ur]
(when ur
(urakkatyyppi-arvolle (if (urakka-domain/vesivaylaurakka? ur)
:vesivayla
(:tyyppi ur)))))
(def valittu-urakoitsija "Suodatusta varten valittu urakoitsija
tätä valintaa voi käyttää esim. alueurakoitden
Hallintayksikön valinta id : ( URL parametrista )
(defonce valittu-hallintayksikko-id (atom nil))
Atomi , valitun hallintayksikön
(defonce valittu-hallintayksikko
(reaction (let [id @valittu-hallintayksikko-id
yksikot @hy/vaylamuodon-hallintayksikot]
(when (and id yksikot)
(some #(and (= id (:id %)) %) yksikot)))))
id : n perusteella ( ) , asetetaan se tänne
(defonce valittu-urakka-id (atom nil))
Atomi , valitun hallintayksikön urakat
(defonce hallintayksikon-urakkalista
(reaction<! [yks @valittu-hallintayksikko]
(when yks
(ur/hae-hallintayksikon-urakat yks))))
Atomi , ( tai nil )
Älä resetoi , vaan urakan valittu - urakka - id
(defonce valittu-urakka
(reaction
(let [id @valittu-urakka-id
urakat @hallintayksikon-urakkalista]
(when (and id urakat)
(some #(when (= id (:id %)) %) urakat)))))
Käyttäjän .
ei kuitenkaan ole tämä , vaan alla oleva reaction ( , älä tätä )
Älä myöskään aseta suoraan , käytä vaihda - urakkatyyppi !
(defonce ^{:private true} valittu-urakkatyyppi (atom nil))
valittu väylämuodosta riippuvainen urakkatyyppi
urakkarooleja , valitaan
(defonce urakkatyyppi
(reaction<! [kayttajan-oletus-tyyppi (:urakkatyyppi @istunto/kayttaja)
Jos urakka on valittuna ,
urakan-urakkatyyppi (urakkatyyppi-urakalle @valittu-urakka)
, asetetaan se tyypiksi
valittu-urakkatyyppi @valittu-urakkatyyppi
, onko
oletus-urakkatyyppi (urakkatyyppi-arvolle (:urakkatyyppi @istunto/kayttaja))
valittu-hy-id @valittu-hallintayksikko-id]
(go
(or urakan-urakkatyyppi
valittu-urakkatyyppi
on valittuna ,
. on : tie , ,
onko . , : hoito
Koska hallintayksiköistä ei ole välttämättä , täytyy tässä
ottaa
(when valittu-hy-id
(urakkatyyppi-arvolle
(case (<! (hy/hallintayksikon-vaylamuoto valittu-hy-id))
:tie
(if-not (= :vesivayla kayttajan-oletus-tyyppi)
kayttajan-oletus-tyyppi
:hoito)
:vesi
:vesivayla
nil)))
oletus-urakkatyyppi))))
(defn vaihda-urakkatyyppi!
"Vaihtaa urakkatyypin ja resetoi valitun urakoitsijan, jos kyseinen urakoitsija ei
löydy valitun tyyppisten urakoitsijain listasta."
[ut]
(go
(when (not= @valittu-urakkatyyppi ut)
(valitse-urakka! nil))
(reset! valittu-urakkatyyppi ut)
(vaihda-vaylamuoto! ut)
(<! (hy/aseta-hallintayksikot-vaylamuodolle! @valittu-vaylamuoto))
(swap! valittu-urakoitsija
#(let [nykyisen-urakkatyypin-urakoitsijat
(case (:arvo ut)
:kaikki @urk/urakoitsijat-kaikki
:hoito @urk/urakoitsijat-hoito
:teiden-hoito @urk/urakoitsijat-hoito
:paallystys @urk/urakoitsijat-paallystys
:tiemerkinta @urk/urakoitsijat-tiemerkinta
:valaistus @urk/urakoitsijat-valaistus
:siltakorjaus @urk/urakoitsijat-siltakorjaus
:tekniset-laitteet @urk/urakoitsijat-tekniset-laitteet
:vesivayla @urk/urakoitsijat-vesivaylat)]
(if (nykyisen-urakkatyypin-urakoitsijat (:id %))
%
nil)))))
(def tarvitsen-isoa-karttaa "Set käyttöliittymänäkymiä (keyword), jotka haluavat pakottaa kartan näkyviin.
Jos tässä setissä on itemeitä, tulisi kartta pakottaa näkyviin :L kokoisena vaikka se ei olisikaan muuten näkyvissä."
(atom #{}))
jos haluat palauttaa kartan edelliseen kokoon , ( esim . Valitse kartalta -toiminto )
(def kartan-edellinen-koko (atom nil))
(def kartan-koko
"Kartan laskettu koko riippuu kartan kokovalinnasta sekä kartan pakotteista."
(reaction (let [valittu-koko @kartan-kokovalinta
sivu (valittu-valilehti :sivu)
v-ur @valittu-urakka
tarvitsen-isoa-karttaa @tarvitsen-isoa-karttaa]
(if-not (empty? tarvitsen-isoa-karttaa)
:L
Ei kartan pakotteita , tehdään sivukohtaisia special caseja
tai
(cond (and
(= sivu :hallinta)
Hallintavälilehdellä ei näytetä karttaa , paitsi luonnissa
Tähän tarvitaan molemmat tarkastukset , koska vesiväylä - hallinan
voi jäädä , vaikka siirryttäisikiin esim integraatiolokiin
(not= (valittu-valilehti :hallinta) :toteumatyokalu)
(not= (valittu-valilehti :hallinta) :vesivayla-hallinta)
(not= (valittu-valilehti :vesivayla-hallinta) :kanavaurakoiden-kohteiden-luonti))
:hidden
(= sivu :about) :hidden
(= sivu :tilannekuva) :XL
(and (= sivu :urakat)
(not v-ur)) :XL
:default valittu-koko)))))
(def kartta-nakyvissa?
"Kartta ei piilotettu"
(reaction (let [koko @kartan-koko]
(and (not= :S koko)
(not= :hidden koko)))))
(def kartan-kontrollit-nakyvissa?
(reaction
(let [sivu (valittu-valilehti :sivu)]
ole ,
JA ei olla tilannekuvassa , urakoissa TAI urakkaa ei ole .
(and
(empty? @tarvitsen-isoa-karttaa)
(not= sivu :tilannekuva)
(or
(not= sivu :urakat)
(some? @valittu-urakka))))))
(defn aseta-hallintayksikko-ja-urakka [hy-id ur]
(reset! valittu-hallintayksikko-id hy-id)
go block , että vaihda - urakkatyyppi ! suorittaa
hy / aseta - hallintayksikot - vaylamuodolle ! , !
funktiota kutsutaan , ! triggeröi ! funktion
id : n nilliksi .
(go (<! (vaihda-urakkatyyppi! (urakkatyyppi-urakalle ur)))
(valitse-urakka! ur)))
(defn aseta-hallintayksikko-ja-urakka-id! [hy-id ur-id]
(log/info "ASETA HY: " hy-id ", UR: " ur-id)
(reset! valittu-hallintayksikko-id hy-id)
(reset! valittu-urakka-id ur-id))
(defn valitse-urakoitsija! [u]
(reset! valittu-urakoitsija u))
Rajapinta hallintayksikön valitsemiseen , jota viewit voivat
(defn valitse-hallintayksikko-id! [id]
(reset! valittu-hallintayksikko-id id)
(reset! valittu-urakka-id nil)
(paivita-url))
(defn valitse-hallintayksikko! [yks]
(valitse-hallintayksikko-id! (:id yks)))
(defn valitse-urakka-id! [id]
(reset! valittu-urakka-id id)
(paivita-url))
(defn valitse-urakka! [ur]
(valitse-urakka-id! (:id ur))
(log "VALITTIIN URAKKA: " (pr-str (dissoc ur :alue))))
(defonce urakka-klikkaus-kuuntelija
(t/kuuntele! :urakka-klikattu
(fn [urakka]
(valitse-urakka! urakka))))
(defonce historia (let [h (History. false)]
(events/listen h EventType/NAVIGATE
#(kasittele-url! (.-token %)))
h))
(defn nykyinen-url []
(str (reitit/muodosta-polku @reitit/url-navigaatio)
"?"
(when-let [hy @valittu-hallintayksikko-id] (str "&hy=" hy))
(when-let [u @valittu-urakka-id] (str "&u=" u))))
(defonce ^{:doc "Tämä lippu voi estää URL tokenin päivittämisen, käytetään siirtymissä, joissa
halutaan tehdä useita muutoksia ilman että välissä pävitetään URLia keskeneräisenä."}
esta-url-paivitys? (cljs.core/atom false))
asettaa oikean sisällön perusteella
(defn paivita-url []
(when-not @esta-url-paivitys?
(let [url (nykyinen-url)]
(when (not= url (.-token historia))
(log "URL != token :: " url " != " (.getToken historia))
(.setToken historia url)))))
(defn esta-url-paivitys!
"Estä URL päivitykset kunnes salli-url-paivitys! kutsutaan."
[]
(reset! esta-url-paivitys? true))
(defn salli-url-paivitys!
"Salli URL päivitys ja tee päivitys nyt"
[]
(reset! esta-url-paivitys? false)
(paivita-url))
(defn vaihda-sivu!
"Vaihda nykyinen sivu haluttuun."
[uusi-sivu]
(when-not (= (valittu-valilehti :sivu) uusi-sivu)
(reitit/aseta-valittu-valilehti! :sivu uusi-sivu)))
(def suodatettu-urakkalista "Urakat suodatettuna urakkatyypin ja urakoitsijan mukaan."
(reaction
(let [v-ur-tyyppi (:arvo @urakkatyyppi)
v-urk @valittu-urakoitsija
urakkalista @hallintayksikon-urakkalista
kayttajan-urakat (set (map key (:urakkaroolit @istunto/kayttaja)))]
(when urakkalista
(into []
(comp (filter #(or (= :kaikki v-ur-tyyppi)
(= v-ur-tyyppi (:tyyppi %))
(and (= v-ur-tyyppi :hoito)
(= (:tyyppi %) :teiden-hoito))
(and (= v-ur-tyyppi :teiden-hoito)
(= (:tyyppi %) :hoito))
(and (= v-ur-tyyppi :vesivayla)
(urakka-domain/vesivaylaurakka? %))))
(filter #(or
(kayttajan-urakat (:id %))
(or (nil? v-urk) (= (:id v-urk) (:id (:urakoitsija %))))))
(filter #(oikeudet/voi-lukea? oikeudet/urakat (:id %) @istunto/kayttaja)))
urakkalista)))))
(def urakat-kartalla "Sisältää suodatetuista urakoista aktiiviset"
(reaction (into []
(filter #(pvm/ennen? (pvm/nyt) (:loppupvm %)))
@suodatettu-urakkalista)))
(def render-lupa-hy? (reaction
(some? @hy/vaylamuodon-hallintayksikot)))
(def render-lupa-u? (reaction
, ei estetä
hy : tä ei saatu asetettua - > ei estetä
(some? @hallintayksikon-urakkalista))))
(def render-lupa-url-kasitelty? (atom false))
: evätään render - lupa ? ennen on
(def render-lupa? (reaction
(and @render-lupa-hy? @render-lupa-u?
@render-lupa-url-kasitelty?)))
(defonce urlia-kasitellaan? (atom false))
(defn kasittele-url!
"Käsittelee urlin (route) muutokset."
[url]
(reset! urlia-kasitellaan? true)
(go
(let [uri (Uri/parse url)
polku (.getPath uri)
parametrit (.getQueryData uri)]
(reset! valittu-hallintayksikko-id (some-> parametrit (.get "hy") js/parseInt))
(reset! valittu-urakka-id (some-> parametrit (.get "u") js/parseInt))
, on , jotta voidaan
(when (= polku "urakat/yleiset")
(let [vesivaylaurakka? (urakka-domain/vesivaylaurakkatyyppi? (:urakkatyyppi @istunto/kayttaja))
arvo (if vesivaylaurakka?
{:arvo :vesivayla}
{:arvo :hoito})]
(vaihda-vaylamuoto! arvo)))
(when @valittu-hallintayksikko-id
(reset! valittu-vaylamuoto (<! (hy/hallintayksikon-vaylamuoto @valittu-hallintayksikko-id))))
(<! (hy/aseta-hallintayksikot-vaylamuodolle! @valittu-vaylamuoto))
(swap! reitit/url-navigaatio
reitit/tulkitse-polku polku)
Käsitellään linkit yksittäisiin integraatiolokin viesteihin
(when (and (= polku "hallinta/integraatiotilanne/integraatioloki")
(.get parametrit "valittu-jarjestelma")
(.get parametrit "valittu-integraatio")
(.get parametrit "tapahtuma-id")
(.get parametrit "alkanut"))
(let [jarjestelmat (<! (integraatioloki/hae-jarjestelmien-integraatiot))
jarjestelma (.get parametrit "valittu-jarjestelma")
alkanut-pvm (pvm/iso-8601->pvm (.get parametrit "alkanut"))]
(reset! integraatioloki/valittu-jarjestelma (some #(when (= jarjestelma (:jarjestelma %))
%)
jarjestelmat))
(reset! integraatioloki/valittu-integraatio (.get parametrit "valittu-integraatio"))
(reset! integraatioloki/tapahtuma-id #{(try (js/parseInt (.get parametrit "tapahtuma-id"))
(catch :default e
nil))})
(reset! integraatioloki/nayta-uusimmat-tilassa? false)
(reset! integraatioloki/valittu-aikavali [alkanut-pvm alkanut-pvm])
(reset! integraatioloki/tultiin-urlin-kautta true)
Paivitetään url , jotta parametrit eivät enään näy urlissa
(paivita-url))))
(reset! render-lupa-url-kasitelty? true)
(log "Render-lupa annettu!")
(t/julkaise! {:aihe :url-muuttui :url url})
(reset! urlia-kasitellaan? false)))
(.setEnabled historia true)
(defonce paivita-url-navigaatiotilan-muuttuessa
(add-watch reitit/url-navigaatio
::url-muutos
(fn [_ _ vanha uusi]
(when (and (not @urlia-kasitellaan?) (not= vanha uusi))
(paivita-url)))))
(defn paivita-urakan-tiedot! [urakka-id funktio & args]
(swap! hallintayksikon-urakkalista
(fn [urakat]
(mapv (fn [urakka]
(if (= (:id urakka) urakka-id)
(apply funktio urakka args)
urakka))
urakat))))
. Tilannekuvassa halutaan näyttää selite urakkarajoille , jos alueita on valittu . Syklisen riippuvuuden
tänne .
(def tilannekuvassa-alueita-valittu? (atom false))
(defn yllapitourakka-valittu? []
(let [urakkatyyppi (:arvo @urakkatyyppi)]
(or (= urakkatyyppi :paallystys)
(= urakkatyyppi :tiemerkinta))))
|
d0053d22e7b6fd30abe9be54a371d8433a55a83db68cf85042ac85c0b2a5f70f | ejgallego/coq-serapi | ser_constrexpr.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(************************************************************************)
(* Coq serialization API/Plugin *)
Copyright 2016 - 2017 MINES ParisTech
(************************************************************************)
(* Status: Very Experimental *)
(************************************************************************)
open Ppx_hash_lib.Std.Hash.Builtin
open Ppx_compare_lib.Builtin
let hash_fold_array = Ppx_hash_lib.Std.Hash.Builtin.hash_fold_array_frozen
open Sexplib.Std
module Loc = Ser_loc
module CAst = Ser_cAst
module Names = Ser_names
module Constr = Ser_constr
module UState = Ser_uState
module Namegen = Ser_namegen
module Pattern = Ser_pattern
module Evar_kinds = Ser_evar_kinds
module Genarg = Ser_genarg
module Libnames = Ser_libnames
module Glob_term = Ser_glob_term
module NumTok = Ser_numTok
module Univ = Ser_univ
module Sorts = Ser_sorts
type sort_name_expr =
[%import: Constrexpr.sort_name_expr]
[@@deriving sexp,yojson,hash,compare]
type univ_level_expr =
[%import: Constrexpr.univ_level_expr]
[@@deriving sexp,yojson,hash,compare]
type sort_expr =
[%import: Constrexpr.sort_expr]
[@@deriving sexp,yojson,hash,compare]
type univ_constraint_expr =
[%import: Constrexpr.univ_constraint_expr]
[@@deriving sexp,yojson,hash,compare]
type instance_expr =
[%import: Constrexpr.instance_expr]
[@@deriving sexp,yojson,hash,compare]
type 'a or_by_notation_r =
[%import: 'a Constrexpr.or_by_notation_r]
[@@deriving sexp,yojson,hash,compare]
type 'a or_by_notation =
[%import: 'a Constrexpr.or_by_notation]
[@@deriving sexp,yojson,hash,compare]
type universe_decl_expr =
[%import: Constrexpr.universe_decl_expr]
[@@deriving sexp,yojson,hash,compare]
type ident_decl =
[%import: Constrexpr.ident_decl]
[@@deriving sexp,yojson,hash,compare]
type cumul_univ_decl_expr =
[%import: Constrexpr.cumul_univ_decl_expr]
[@@deriving sexp,yojson,hash,compare]
type cumul_ident_decl =
[%import: Constrexpr.cumul_ident_decl]
[@@deriving sexp,yojson,hash,compare]
type name_decl =
[%import: Constrexpr.name_decl]
[@@deriving sexp,yojson,hash,compare]
type notation_with_optional_scope =
[%import: Constrexpr.notation_with_optional_scope]
[@@deriving sexp,yojson,hash,compare]
type notation_entry =
[%import: Constrexpr.notation_entry]
[@@deriving sexp,yojson,hash,compare]
type entry_level =
[%import: Constrexpr.entry_level]
[@@deriving sexp,yojson,hash,compare]
type entry_relative_level =
[%import: Constrexpr.entry_relative_level]
[@@deriving sexp,yojson,hash,compare]
type notation_entry_level =
[%import: Constrexpr.notation_entry_level]
[@@deriving sexp,yojson,hash,compare]
type notation_key =
[%import: Constrexpr.notation_key]
[@@deriving sexp,yojson,hash,compare]
type notation = [%import: Constrexpr.notation]
[@@deriving sexp,yojson,hash,compare]
type explicitation = [%import: Constrexpr.explicitation]
[@@deriving sexp,yojson,hash,compare]
type binder_kind = [%import: Constrexpr.binder_kind]
[@@deriving sexp,yojson,hash,compare]
type explicit_flag = [%import: Constrexpr.explicit_flag]
[@@deriving sexp,yojson,hash,compare]
(* type abstraction_kind = [%import: Constrexpr.abstraction_kind]
* [@@deriving sexp,yojson] *)
type proj_flag = [ % import : Constrexpr.proj_flag ]
* [ @@deriving sexp , yojson ]
* [@@deriving sexp,yojson] *)
(* type raw_numeral = [%import: Constrexpr.raw_numeral]
* [@@deriving sexp,yojson] *)
(* type sign = [%import: Constrexpr.sign]
* [@@deriving sexp,yojson] *)
type prim_token = [%import: Constrexpr.prim_token]
[@@deriving sexp,yojson,hash,compare]
type cases_pattern_expr_r = [%import: Constrexpr.cases_pattern_expr_r]
and cases_pattern_expr = [%import: Constrexpr.cases_pattern_expr]
and kinded_cases_pattern_expr = [%import: Constrexpr.kinded_cases_pattern_expr]
and cases_pattern_notation_substitution = [%import: Constrexpr.cases_pattern_notation_substitution]
and constr_expr_r = [%import: Constrexpr.constr_expr_r]
and constr_expr = [%import: Constrexpr.constr_expr]
and case_expr = [%import: Constrexpr.case_expr]
and branch_expr = [%import: Constrexpr.branch_expr]
and fix_expr = [%import: Constrexpr.fix_expr]
and cofix_expr = [%import: Constrexpr.cofix_expr]
and recursion_order_expr_r = [%import: Constrexpr.recursion_order_expr_r]
and recursion_order_expr = [%import: Constrexpr.recursion_order_expr]
and local_binder_expr = [%import: Constrexpr.local_binder_expr]
and constr_notation_substitution = [%import: Constrexpr.constr_notation_substitution]
[@@deriving sexp,yojson,hash,compare]
type constr_pattern_expr = [%import: Constrexpr.constr_pattern_expr]
[@@deriving sexp,yojson,hash,compare]
type with_declaration_ast =
[%import: Constrexpr.with_declaration_ast]
[@@deriving sexp,yojson,hash,compare]
type module_ast_r = [%import: Constrexpr.module_ast_r]
and module_ast =
[%import: Constrexpr.module_ast]
[@@deriving sexp,yojson,hash,compare]
| null | https://raw.githubusercontent.com/ejgallego/coq-serapi/dd9e3fbf7faaf3bf365fa3eff134641055151a9b/serlib/ser_constrexpr.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
**********************************************************************
Coq serialization API/Plugin
**********************************************************************
Status: Very Experimental
**********************************************************************
type abstraction_kind = [%import: Constrexpr.abstraction_kind]
* [@@deriving sexp,yojson]
type raw_numeral = [%import: Constrexpr.raw_numeral]
* [@@deriving sexp,yojson]
type sign = [%import: Constrexpr.sign]
* [@@deriving sexp,yojson] | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Copyright 2016 - 2017 MINES ParisTech
open Ppx_hash_lib.Std.Hash.Builtin
open Ppx_compare_lib.Builtin
let hash_fold_array = Ppx_hash_lib.Std.Hash.Builtin.hash_fold_array_frozen
open Sexplib.Std
module Loc = Ser_loc
module CAst = Ser_cAst
module Names = Ser_names
module Constr = Ser_constr
module UState = Ser_uState
module Namegen = Ser_namegen
module Pattern = Ser_pattern
module Evar_kinds = Ser_evar_kinds
module Genarg = Ser_genarg
module Libnames = Ser_libnames
module Glob_term = Ser_glob_term
module NumTok = Ser_numTok
module Univ = Ser_univ
module Sorts = Ser_sorts
type sort_name_expr =
[%import: Constrexpr.sort_name_expr]
[@@deriving sexp,yojson,hash,compare]
type univ_level_expr =
[%import: Constrexpr.univ_level_expr]
[@@deriving sexp,yojson,hash,compare]
type sort_expr =
[%import: Constrexpr.sort_expr]
[@@deriving sexp,yojson,hash,compare]
type univ_constraint_expr =
[%import: Constrexpr.univ_constraint_expr]
[@@deriving sexp,yojson,hash,compare]
type instance_expr =
[%import: Constrexpr.instance_expr]
[@@deriving sexp,yojson,hash,compare]
type 'a or_by_notation_r =
[%import: 'a Constrexpr.or_by_notation_r]
[@@deriving sexp,yojson,hash,compare]
type 'a or_by_notation =
[%import: 'a Constrexpr.or_by_notation]
[@@deriving sexp,yojson,hash,compare]
type universe_decl_expr =
[%import: Constrexpr.universe_decl_expr]
[@@deriving sexp,yojson,hash,compare]
type ident_decl =
[%import: Constrexpr.ident_decl]
[@@deriving sexp,yojson,hash,compare]
type cumul_univ_decl_expr =
[%import: Constrexpr.cumul_univ_decl_expr]
[@@deriving sexp,yojson,hash,compare]
type cumul_ident_decl =
[%import: Constrexpr.cumul_ident_decl]
[@@deriving sexp,yojson,hash,compare]
type name_decl =
[%import: Constrexpr.name_decl]
[@@deriving sexp,yojson,hash,compare]
type notation_with_optional_scope =
[%import: Constrexpr.notation_with_optional_scope]
[@@deriving sexp,yojson,hash,compare]
type notation_entry =
[%import: Constrexpr.notation_entry]
[@@deriving sexp,yojson,hash,compare]
type entry_level =
[%import: Constrexpr.entry_level]
[@@deriving sexp,yojson,hash,compare]
type entry_relative_level =
[%import: Constrexpr.entry_relative_level]
[@@deriving sexp,yojson,hash,compare]
type notation_entry_level =
[%import: Constrexpr.notation_entry_level]
[@@deriving sexp,yojson,hash,compare]
type notation_key =
[%import: Constrexpr.notation_key]
[@@deriving sexp,yojson,hash,compare]
type notation = [%import: Constrexpr.notation]
[@@deriving sexp,yojson,hash,compare]
type explicitation = [%import: Constrexpr.explicitation]
[@@deriving sexp,yojson,hash,compare]
type binder_kind = [%import: Constrexpr.binder_kind]
[@@deriving sexp,yojson,hash,compare]
type explicit_flag = [%import: Constrexpr.explicit_flag]
[@@deriving sexp,yojson,hash,compare]
type proj_flag = [ % import : Constrexpr.proj_flag ]
* [ @@deriving sexp , yojson ]
* [@@deriving sexp,yojson] *)
type prim_token = [%import: Constrexpr.prim_token]
[@@deriving sexp,yojson,hash,compare]
type cases_pattern_expr_r = [%import: Constrexpr.cases_pattern_expr_r]
and cases_pattern_expr = [%import: Constrexpr.cases_pattern_expr]
and kinded_cases_pattern_expr = [%import: Constrexpr.kinded_cases_pattern_expr]
and cases_pattern_notation_substitution = [%import: Constrexpr.cases_pattern_notation_substitution]
and constr_expr_r = [%import: Constrexpr.constr_expr_r]
and constr_expr = [%import: Constrexpr.constr_expr]
and case_expr = [%import: Constrexpr.case_expr]
and branch_expr = [%import: Constrexpr.branch_expr]
and fix_expr = [%import: Constrexpr.fix_expr]
and cofix_expr = [%import: Constrexpr.cofix_expr]
and recursion_order_expr_r = [%import: Constrexpr.recursion_order_expr_r]
and recursion_order_expr = [%import: Constrexpr.recursion_order_expr]
and local_binder_expr = [%import: Constrexpr.local_binder_expr]
and constr_notation_substitution = [%import: Constrexpr.constr_notation_substitution]
[@@deriving sexp,yojson,hash,compare]
type constr_pattern_expr = [%import: Constrexpr.constr_pattern_expr]
[@@deriving sexp,yojson,hash,compare]
type with_declaration_ast =
[%import: Constrexpr.with_declaration_ast]
[@@deriving sexp,yojson,hash,compare]
type module_ast_r = [%import: Constrexpr.module_ast_r]
and module_ast =
[%import: Constrexpr.module_ast]
[@@deriving sexp,yojson,hash,compare]
|
d61a50c3038744af96359cabd5c416854e2dd684abd56d74ecb61ac9922e3605 | mokus0/junkbox | FIFO.hs | -- |Queues with pre-evaluation, from
--
module Data.FIFO
( FIFO
, empty
, size
, insert
, remove
, null
, fromList
, toList
) where
import Data.List (unfoldr)
import Data.Maybe (isNothing)
import Prelude hiding (null)
Almost - direct translation of Figure 4
-- ('remove' has been made total)
--
all operations except ' size ' take O(1 ) time to evaluate to WHNF .
data FIFO a = FIFO ![a] ![a] ![a]
empty = FIFO [] [] []
size (FIFO l r _) = length l + length r
insert e (FIFO l r lHat) = makeQ l (e:r) lHat
remove (FIFO [] r lHat) = Nothing
remove (FIFO (e:l) r lHat) = Just (e, makeQ l r lHat)
precondition : length lHat = length l - length r + 1
forces one cell of lHat at every queue operation , guaranteeing that
pattern matches on ' l ' always take O(1 ) time . ( ' lHat ' is always a
-- suffix of 'l')
makeQ l r [] = let l' = rot l r [] in FIFO l' [] l'
makeQ l r (_:lHat) = FIFO l r lHat
precondition : length r = length l + 1
this is a fusion of ( l + + reverse r ) which evaluates one step of reverse
-- for each step of (++), guaranteeing that by the time any element of 'r'
-- is demanded, 'r' has been fully reversed ('a' is an accumulator for the
-- reversed part of 'r').
rot [] [r] a = r : a
rot (l:ls) (r:rs) a = l : rot ls rs (r:a)
End figure 4 --
null :: FIFO a -> Bool
null = isNothing . remove
fromList :: [a] -> FIFO a
fromList = foldr insert empty
toList :: FIFO a -> [a]
toList = unfoldr remove
instance Show a => Show (FIFO a) where
showsPrec p fifo
| null fifo = showString "empty"
| otherwise = showParen (p > 10)
( showString "fromList "
. showsPrec 11 (toList fifo)
) | null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Haskell/Data/FIFO.hs | haskell | |Queues with pre-evaluation, from
('remove' has been made total)
suffix of 'l')
for each step of (++), guaranteeing that by the time any element of 'r'
is demanded, 'r' has been fully reversed ('a' is an accumulator for the
reversed part of 'r').
| module Data.FIFO
( FIFO
, empty
, size
, insert
, remove
, null
, fromList
, toList
) where
import Data.List (unfoldr)
import Data.Maybe (isNothing)
import Prelude hiding (null)
Almost - direct translation of Figure 4
all operations except ' size ' take O(1 ) time to evaluate to WHNF .
data FIFO a = FIFO ![a] ![a] ![a]
empty = FIFO [] [] []
size (FIFO l r _) = length l + length r
insert e (FIFO l r lHat) = makeQ l (e:r) lHat
remove (FIFO [] r lHat) = Nothing
remove (FIFO (e:l) r lHat) = Just (e, makeQ l r lHat)
precondition : length lHat = length l - length r + 1
forces one cell of lHat at every queue operation , guaranteeing that
pattern matches on ' l ' always take O(1 ) time . ( ' lHat ' is always a
makeQ l r [] = let l' = rot l r [] in FIFO l' [] l'
makeQ l r (_:lHat) = FIFO l r lHat
precondition : length r = length l + 1
this is a fusion of ( l + + reverse r ) which evaluates one step of reverse
rot [] [r] a = r : a
rot (l:ls) (r:rs) a = l : rot ls rs (r:a)
null :: FIFO a -> Bool
null = isNothing . remove
fromList :: [a] -> FIFO a
fromList = foldr insert empty
toList :: FIFO a -> [a]
toList = unfoldr remove
instance Show a => Show (FIFO a) where
showsPrec p fifo
| null fifo = showString "empty"
| otherwise = showParen (p > 10)
( showString "fromList "
. showsPrec 11 (toList fifo)
) |
67125d49eeef3764e8a90f3b869c88a661c8e10ddfbe3b8ddc48fc3a5b7e6ae3 | malcolmreynolds/GSLL | set-identity.lisp | Regression test SET - IDENTITY for GSLL , automatically generated
(in-package :gsl)
(LISP-UNIT:DEFINE-TEST SET-IDENTITY
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1.0 0.0 0.0) (0.0 1.0 0.0) (0.0 0.0 1.0)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY 'SINGLE-FLOAT :INITIAL-CONTENTS
'((-34.5 8.24 3.29)
(-8.93 34.12 -6.15)
(49.27 -13.49 32.5)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#2A((1.0d0 0.0d0 0.0d0)
(0.0d0 1.0d0 0.0d0)
(0.0d0 0.0d0 1.0d0)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY 'DOUBLE-FLOAT :INITIAL-CONTENTS
'((-34.5d0 8.24d0 3.29d0)
(-8.93d0 34.12d0 -6.15d0)
(49.27d0 -13.49d0 32.5d0)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#2A((#C(1.0 0.0) #C(0.0 0.0) #C(0.0 0.0))
(#C(0.0 0.0) #C(1.0 0.0) #C(0.0 0.0))
(#C(0.0 0.0) #C(0.0 0.0) #C(1.0 0.0))))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(COMPLEX SINGLE-FLOAT)
:INITIAL-CONTENTS
'((-34.5 8.24 3.29 -8.93 34.12
-6.15)
(-8.93 34.12 -6.15 49.27 -13.49
32.5)
(49.27 -13.49 32.5 42.73 -17.24
43.31)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#2A((#C(1.0d0 0.0d0) #C(0.0d0 0.0d0) #C(0.0d0 0.0d0))
(#C(0.0d0 0.0d0) #C(1.0d0 0.0d0) #C(0.0d0 0.0d0))
(#C(0.0d0 0.0d0) #C(0.0d0 0.0d0)
#C(1.0d0 0.0d0))))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(COMPLEX DOUBLE-FLOAT)
:INITIAL-CONTENTS
'((-34.5d0 8.24d0 3.29d0 -8.93d0
34.12d0 -6.15d0)
(-8.93d0 34.12d0 -6.15d0 49.27d0
-13.49d0 32.5d0)
(49.27d0 -13.49d0 32.5d0 42.73d0
-17.24d0 43.31d0)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(SIGNED-BYTE 8) :INITIAL-CONTENTS
'((-64 -68 71) (-91 52 -10)
(73 -5 123)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(UNSIGNED-BYTE 8)
:INITIAL-CONTENTS
'((67 44 189) (116 163 140)
(161 215 98)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(SIGNED-BYTE 16)
:INITIAL-CONTENTS
'((-64 -68 71) (-91 52 -10)
(73 -5 123)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(UNSIGNED-BYTE 16)
:INITIAL-CONTENTS
'((67 44 189) (116 163 140)
(161 215 98)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(SIGNED-BYTE 32)
:INITIAL-CONTENTS
'((-64 -68 71) (-91 52 -10)
(73 -5 123)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(UNSIGNED-BYTE 32)
:INITIAL-CONTENTS
'((67 44 189) (116 163 140)
(161 215 98)))))
(CL-ARRAY (SET-IDENTITY M1)))))
#+int64
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(SIGNED-BYTE 64)
:INITIAL-CONTENTS
'((-64 -68 71) (-91 52 -10)
(73 -5 123)))))
(CL-ARRAY (SET-IDENTITY M1)))))
#+int64
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(UNSIGNED-BYTE 64)
:INITIAL-CONTENTS
'((67 44 189) (116 163 140)
(161 215 98)))))
(CL-ARRAY (SET-IDENTITY M1))))))
| null | https://raw.githubusercontent.com/malcolmreynolds/GSLL/2f722f12f1d08e1b9550a46e2a22adba8e1e52c4/tests/set-identity.lisp | lisp | Regression test SET - IDENTITY for GSLL , automatically generated
(in-package :gsl)
(LISP-UNIT:DEFINE-TEST SET-IDENTITY
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1.0 0.0 0.0) (0.0 1.0 0.0) (0.0 0.0 1.0)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY 'SINGLE-FLOAT :INITIAL-CONTENTS
'((-34.5 8.24 3.29)
(-8.93 34.12 -6.15)
(49.27 -13.49 32.5)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#2A((1.0d0 0.0d0 0.0d0)
(0.0d0 1.0d0 0.0d0)
(0.0d0 0.0d0 1.0d0)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY 'DOUBLE-FLOAT :INITIAL-CONTENTS
'((-34.5d0 8.24d0 3.29d0)
(-8.93d0 34.12d0 -6.15d0)
(49.27d0 -13.49d0 32.5d0)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#2A((#C(1.0 0.0) #C(0.0 0.0) #C(0.0 0.0))
(#C(0.0 0.0) #C(1.0 0.0) #C(0.0 0.0))
(#C(0.0 0.0) #C(0.0 0.0) #C(1.0 0.0))))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(COMPLEX SINGLE-FLOAT)
:INITIAL-CONTENTS
'((-34.5 8.24 3.29 -8.93 34.12
-6.15)
(-8.93 34.12 -6.15 49.27 -13.49
32.5)
(49.27 -13.49 32.5 42.73 -17.24
43.31)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#2A((#C(1.0d0 0.0d0) #C(0.0d0 0.0d0) #C(0.0d0 0.0d0))
(#C(0.0d0 0.0d0) #C(1.0d0 0.0d0) #C(0.0d0 0.0d0))
(#C(0.0d0 0.0d0) #C(0.0d0 0.0d0)
#C(1.0d0 0.0d0))))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(COMPLEX DOUBLE-FLOAT)
:INITIAL-CONTENTS
'((-34.5d0 8.24d0 3.29d0 -8.93d0
34.12d0 -6.15d0)
(-8.93d0 34.12d0 -6.15d0 49.27d0
-13.49d0 32.5d0)
(49.27d0 -13.49d0 32.5d0 42.73d0
-17.24d0 43.31d0)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(SIGNED-BYTE 8) :INITIAL-CONTENTS
'((-64 -68 71) (-91 52 -10)
(73 -5 123)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(UNSIGNED-BYTE 8)
:INITIAL-CONTENTS
'((67 44 189) (116 163 140)
(161 215 98)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(SIGNED-BYTE 16)
:INITIAL-CONTENTS
'((-64 -68 71) (-91 52 -10)
(73 -5 123)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(UNSIGNED-BYTE 16)
:INITIAL-CONTENTS
'((67 44 189) (116 163 140)
(161 215 98)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(SIGNED-BYTE 32)
:INITIAL-CONTENTS
'((-64 -68 71) (-91 52 -10)
(73 -5 123)))))
(CL-ARRAY (SET-IDENTITY M1)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(UNSIGNED-BYTE 32)
:INITIAL-CONTENTS
'((67 44 189) (116 163 140)
(161 215 98)))))
(CL-ARRAY (SET-IDENTITY M1)))))
#+int64
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(SIGNED-BYTE 64)
:INITIAL-CONTENTS
'((-64 -68 71) (-91 52 -10)
(73 -5 123)))))
(CL-ARRAY (SET-IDENTITY M1)))))
#+int64
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST #2A((1 0 0) (0 1 0) (0 0 1)))
(MULTIPLE-VALUE-LIST
(LET ((M1
(MAKE-MARRAY '(UNSIGNED-BYTE 64)
:INITIAL-CONTENTS
'((67 44 189) (116 163 140)
(161 215 98)))))
(CL-ARRAY (SET-IDENTITY M1))))))
| |
9312e54632d375778c144260d4267f375739b38eed881a4da7eed350fd8e83b5 | mirage/irmin-watcher | backend.polling.mli | include module type of struct
include Polling
end
| null | https://raw.githubusercontent.com/mirage/irmin-watcher/b2cf05dc3fd18e99f3cfe53c55b834d07e5775e4/src/backend.polling.mli | ocaml | include module type of struct
include Polling
end
| |
4e2b7b969910de2cd474c54cea4cf272c01f26eb19ecb247794a74ac5ddbedac | unison-code/unison | Predicates.hs | |
Copyright : Copyright ( c ) 2016 , RISE SICS AB
License : BSD3 ( see the LICENSE file )
Maintainer :
Predicate functions for the machine IR program representation .
Copyright : Copyright (c) 2016, RISE SICS AB
License : BSD3 (see the LICENSE file)
Maintainer :
Predicate functions for the machine IR program representation.
-}
Main authors :
< >
This file is part of Unison , see -code.github.io
Main authors:
Roberto Castaneda Lozano <>
This file is part of Unison, see -code.github.io
-}
module MachineIR.Predicates
(
-- * MachineFunctionProperty predicates
isMachineFunctionPropertyRegisters,
isMachineFunctionPropertyConstants,
isMachineFunctionPropertyFixedFrame,
isMachineFunctionPropertyFrame,
isMachineFunctionPropertyJumpTable,
isMachineFunctionPropertyRemovedFreqs,
isMachineFunctionPropertyVersion,
* predicates
isMachineBlockPropertyFreq,
isMachineBlockPropertySuccs,
isMachineBlockPropertySplit,
-- * MachineInstruction predicates
isMachineVirtual,
isMachineTarget,
isMachineBranch,
isMachineCallOrTailCall,
isMachineLast,
isMachineExtractSubReg,
isMachineInsertSubReg,
isMachineRegSequence,
isMachineSubregToReg,
isMachinePhi,
isMachineCopy,
isMachineCFIInstruction,
isMachineEHLabel,
-- * MachineInstructionProperty predicates
isMachineInstructionPropertyMem,
isMachineInstructionPropertyCustom,
isMachineInstructionPropertyCustomOf,
isMachineInstructionPropertyJTIBlocks,
isMachineInstructionPropertyDefs,
isMachineInstructionPropertyBranchTaken,
* MachineOperand predicates
isMachineReg,
isMachineImm,
isMachineBlockRef,
isMachineFrameIndex,
isMachineFrameObject,
isMachineMemPartition,
isMachineProperty,
isMachineNullReg,
isMachineDebugLocation,
isMachineTemp,
isMachineSubRegIndex,
isMachineConstantPoolIndex,
isMachineRegImplicit,
isMachineRegImplicitDef,
* MachineRegState predicates
isMachineRegUndef,
-- * MachineFunction predicates
isMachinePreUnison
)
where
import MachineIR.Base
import Unison.Base
isMachineFunctionPropertyRegisters MachineFunctionPropertyRegisters {} = True
isMachineFunctionPropertyRegisters _ = False
isMachineFunctionPropertyConstants MachineFunctionPropertyConstants {} = True
isMachineFunctionPropertyConstants _ = False
isMachineFunctionPropertyFixedFrame MachineFunctionPropertyFixedFrame {} = True
isMachineFunctionPropertyFixedFrame _ = False
isMachineFunctionPropertyFrame MachineFunctionPropertyFrame {} = True
isMachineFunctionPropertyFrame _ = False
isMachineFunctionPropertyJumpTable MachineFunctionPropertyJumpTable {} = True
isMachineFunctionPropertyJumpTable _ = False
isMachineFunctionPropertyRemovedFreqs MachineFunctionPropertyRemovedFreqs {} = True
isMachineFunctionPropertyRemovedFreqs _ = False
isMachineFunctionPropertyVersion MachineFunctionPropertyVersion {} = True
isMachineFunctionPropertyVersion _ = False
isMachineBlockPropertyFreq MachineBlockPropertyFreq {} = True
isMachineBlockPropertyFreq _ = False
isMachineBlockPropertySuccs MachineBlockPropertySuccs {} = True
isMachineBlockPropertySuccs _ = False
isMachineBlockPropertySplit MachineBlockPropertySplit {} = True
isMachineBlockPropertySplit _ = False
isMachineVirtual MachineSingle {msOpcode = MachineVirtualOpc {}} = True
isMachineVirtual _ = False
isMachineTarget MachineSingle {msOpcode = MachineTargetOpc {}} = True
isMachineTarget _ = False
isMachineBranch itf MachineBundle {mbInstrs = mis} =
any (isMachineBranch itf) mis
isMachineBranch _ mi
| isMachineVirtual mi = False
isMachineBranch itf MachineSingle {msOpcode = MachineTargetOpc i} =
itf i == BranchInstructionType
isMachineCallOrTailCall itf MachineBundle {mbInstrs = mis} =
any (isMachineCallOrTailCall itf) mis
isMachineCallOrTailCall _ mi
| isMachineVirtual mi = False
isMachineCallOrTailCall itf MachineSingle {msOpcode = MachineTargetOpc i} =
itf i == CallInstructionType || itf i == TailCallInstructionType
isMachineLast MachineSingle {msOpcode = MachineVirtualOpc opc}
| opc `elem` [EXIT, RETURN] = True
isMachineLast _ = False
isMachineExtractSubReg
MachineSingle {msOpcode = MachineVirtualOpc EXTRACT_SUBREG} = True
isMachineExtractSubReg _ = False
isMachineInsertSubReg
MachineSingle {msOpcode = MachineVirtualOpc INSERT_SUBREG} = True
isMachineInsertSubReg _ = False
isMachineRegSequence
MachineSingle {msOpcode = MachineVirtualOpc REG_SEQUENCE} = True
isMachineRegSequence _ = False
isMachineSubregToReg
MachineSingle {msOpcode = MachineVirtualOpc SUBREG_TO_REG} = True
isMachineSubregToReg _ = False
isMachinePhi MachineSingle {msOpcode = MachineVirtualOpc PHI} = True
isMachinePhi _ = False
isMachineCopy MachineSingle {msOpcode = MachineVirtualOpc COPY} = True
isMachineCopy _ = False
isMachineCFIInstruction
MachineSingle {msOpcode = MachineVirtualOpc CFI_INSTRUCTION} = True
isMachineCFIInstruction _ = False
isMachineEHLabel MachineSingle {msOpcode = MachineVirtualOpc EH_LABEL} = True
isMachineEHLabel _ = False
isMachineInstructionPropertyMem MachineInstructionPropertyMem {} = True
isMachineInstructionPropertyMem _ = False
isMachineInstructionPropertyCustom MachineInstructionPropertyCustom {} = True
isMachineInstructionPropertyCustom _ = False
isMachineInstructionPropertyCustomOf text
MachineInstructionPropertyCustom {msPropertyCustom = text'}
| text == text' = True
isMachineInstructionPropertyCustomOf _ _ = False
isMachineInstructionPropertyJTIBlocks MachineInstructionPropertyJTIBlocks {} = True
isMachineInstructionPropertyJTIBlocks _ = False
isMachineInstructionPropertyDefs MachineInstructionPropertyDefs {} = True
isMachineInstructionPropertyDefs _ = False
isMachineInstructionPropertyBranchTaken MachineInstructionPropertyBranchTaken {} = True
isMachineInstructionPropertyBranchTaken _ = False
isMachineReg MachineReg {} = True
isMachineReg _ = False
isMachineImm MachineImm {} = True
isMachineImm _ = False
isMachineBlockRef MachineBlockRef {} = True
isMachineBlockRef _ = False
isMachineFrameIndex MachineFrameIndex {} = True
isMachineFrameIndex _ = False
isMachineFrameObject MachineFrameObject {} = True
isMachineFrameObject _ = False
isMachineMemPartition MachineMemPartition {} = True
isMachineMemPartition _ = False
isMachineProperty MachineProperty {} = True
isMachineProperty _ = False
isMachineNullReg MachineNullReg {} = True
isMachineNullReg _ = False
isMachineDebugLocation MachineDebugLocation {} = True
isMachineDebugLocation _ = False
isMachineConstantPoolIndex MachineConstantPoolIndex {} = True
isMachineConstantPoolIndex _ = False
isMachineRegImplicit MachineReg {mrFlags = fs}
| MachineRegImplicit `elem` fs = True
isMachineRegImplicit _ = False
isMachineRegImplicitDef MachineReg {mrFlags = fs}
| MachineRegImplicitDefine `elem` fs = True
isMachineRegImplicitDef _ = False
isMachineTemp MachineTemp {} = True
isMachineTemp _ = False
isMachineSubRegIndex MachineSubRegIndex {} = True
isMachineSubRegIndex _ = False
isMachineRegUndef MachineRegUndef {} = True
isMachineRegUndef _ = False
isMachinePreUnison mf =
let mis = concatMap mbInstructions (mfBlocks mf)
in any isMachinePreUnisonInstruction mis
isMachinePreUnisonInstruction MachineBundle {} = False
isMachinePreUnisonInstruction MachineSingle {msOperands = mos} =
any isMachinePreUnisonOperand mos
isMachinePreUnisonOperand MachineTemp {} = True
isMachinePreUnisonOperand MachineSubTemp {} = True
isMachinePreUnisonOperand MachineSubRegIndex {} = True
isMachinePreUnisonOperand MachineFrameIndex {} = True
isMachinePreUnisonOperand MachineFrameObject {} = True
isMachinePreUnisonOperand MachineFrameSize {} = True
isMachinePreUnisonOperand _ = False
| null | https://raw.githubusercontent.com/unison-code/unison/9f8caf78230f956a57b50a327f8d1dca5839bf64/src/unison/src/MachineIR/Predicates.hs | haskell | * MachineFunctionProperty predicates
* MachineInstruction predicates
* MachineInstructionProperty predicates
* MachineFunction predicates | |
Copyright : Copyright ( c ) 2016 , RISE SICS AB
License : BSD3 ( see the LICENSE file )
Maintainer :
Predicate functions for the machine IR program representation .
Copyright : Copyright (c) 2016, RISE SICS AB
License : BSD3 (see the LICENSE file)
Maintainer :
Predicate functions for the machine IR program representation.
-}
Main authors :
< >
This file is part of Unison , see -code.github.io
Main authors:
Roberto Castaneda Lozano <>
This file is part of Unison, see -code.github.io
-}
module MachineIR.Predicates
(
isMachineFunctionPropertyRegisters,
isMachineFunctionPropertyConstants,
isMachineFunctionPropertyFixedFrame,
isMachineFunctionPropertyFrame,
isMachineFunctionPropertyJumpTable,
isMachineFunctionPropertyRemovedFreqs,
isMachineFunctionPropertyVersion,
* predicates
isMachineBlockPropertyFreq,
isMachineBlockPropertySuccs,
isMachineBlockPropertySplit,
isMachineVirtual,
isMachineTarget,
isMachineBranch,
isMachineCallOrTailCall,
isMachineLast,
isMachineExtractSubReg,
isMachineInsertSubReg,
isMachineRegSequence,
isMachineSubregToReg,
isMachinePhi,
isMachineCopy,
isMachineCFIInstruction,
isMachineEHLabel,
isMachineInstructionPropertyMem,
isMachineInstructionPropertyCustom,
isMachineInstructionPropertyCustomOf,
isMachineInstructionPropertyJTIBlocks,
isMachineInstructionPropertyDefs,
isMachineInstructionPropertyBranchTaken,
* MachineOperand predicates
isMachineReg,
isMachineImm,
isMachineBlockRef,
isMachineFrameIndex,
isMachineFrameObject,
isMachineMemPartition,
isMachineProperty,
isMachineNullReg,
isMachineDebugLocation,
isMachineTemp,
isMachineSubRegIndex,
isMachineConstantPoolIndex,
isMachineRegImplicit,
isMachineRegImplicitDef,
* MachineRegState predicates
isMachineRegUndef,
isMachinePreUnison
)
where
import MachineIR.Base
import Unison.Base
isMachineFunctionPropertyRegisters MachineFunctionPropertyRegisters {} = True
isMachineFunctionPropertyRegisters _ = False
isMachineFunctionPropertyConstants MachineFunctionPropertyConstants {} = True
isMachineFunctionPropertyConstants _ = False
isMachineFunctionPropertyFixedFrame MachineFunctionPropertyFixedFrame {} = True
isMachineFunctionPropertyFixedFrame _ = False
isMachineFunctionPropertyFrame MachineFunctionPropertyFrame {} = True
isMachineFunctionPropertyFrame _ = False
isMachineFunctionPropertyJumpTable MachineFunctionPropertyJumpTable {} = True
isMachineFunctionPropertyJumpTable _ = False
isMachineFunctionPropertyRemovedFreqs MachineFunctionPropertyRemovedFreqs {} = True
isMachineFunctionPropertyRemovedFreqs _ = False
isMachineFunctionPropertyVersion MachineFunctionPropertyVersion {} = True
isMachineFunctionPropertyVersion _ = False
isMachineBlockPropertyFreq MachineBlockPropertyFreq {} = True
isMachineBlockPropertyFreq _ = False
isMachineBlockPropertySuccs MachineBlockPropertySuccs {} = True
isMachineBlockPropertySuccs _ = False
isMachineBlockPropertySplit MachineBlockPropertySplit {} = True
isMachineBlockPropertySplit _ = False
isMachineVirtual MachineSingle {msOpcode = MachineVirtualOpc {}} = True
isMachineVirtual _ = False
isMachineTarget MachineSingle {msOpcode = MachineTargetOpc {}} = True
isMachineTarget _ = False
isMachineBranch itf MachineBundle {mbInstrs = mis} =
any (isMachineBranch itf) mis
isMachineBranch _ mi
| isMachineVirtual mi = False
isMachineBranch itf MachineSingle {msOpcode = MachineTargetOpc i} =
itf i == BranchInstructionType
isMachineCallOrTailCall itf MachineBundle {mbInstrs = mis} =
any (isMachineCallOrTailCall itf) mis
isMachineCallOrTailCall _ mi
| isMachineVirtual mi = False
isMachineCallOrTailCall itf MachineSingle {msOpcode = MachineTargetOpc i} =
itf i == CallInstructionType || itf i == TailCallInstructionType
isMachineLast MachineSingle {msOpcode = MachineVirtualOpc opc}
| opc `elem` [EXIT, RETURN] = True
isMachineLast _ = False
isMachineExtractSubReg
MachineSingle {msOpcode = MachineVirtualOpc EXTRACT_SUBREG} = True
isMachineExtractSubReg _ = False
isMachineInsertSubReg
MachineSingle {msOpcode = MachineVirtualOpc INSERT_SUBREG} = True
isMachineInsertSubReg _ = False
isMachineRegSequence
MachineSingle {msOpcode = MachineVirtualOpc REG_SEQUENCE} = True
isMachineRegSequence _ = False
isMachineSubregToReg
MachineSingle {msOpcode = MachineVirtualOpc SUBREG_TO_REG} = True
isMachineSubregToReg _ = False
isMachinePhi MachineSingle {msOpcode = MachineVirtualOpc PHI} = True
isMachinePhi _ = False
isMachineCopy MachineSingle {msOpcode = MachineVirtualOpc COPY} = True
isMachineCopy _ = False
isMachineCFIInstruction
MachineSingle {msOpcode = MachineVirtualOpc CFI_INSTRUCTION} = True
isMachineCFIInstruction _ = False
isMachineEHLabel MachineSingle {msOpcode = MachineVirtualOpc EH_LABEL} = True
isMachineEHLabel _ = False
isMachineInstructionPropertyMem MachineInstructionPropertyMem {} = True
isMachineInstructionPropertyMem _ = False
isMachineInstructionPropertyCustom MachineInstructionPropertyCustom {} = True
isMachineInstructionPropertyCustom _ = False
isMachineInstructionPropertyCustomOf text
MachineInstructionPropertyCustom {msPropertyCustom = text'}
| text == text' = True
isMachineInstructionPropertyCustomOf _ _ = False
isMachineInstructionPropertyJTIBlocks MachineInstructionPropertyJTIBlocks {} = True
isMachineInstructionPropertyJTIBlocks _ = False
isMachineInstructionPropertyDefs MachineInstructionPropertyDefs {} = True
isMachineInstructionPropertyDefs _ = False
isMachineInstructionPropertyBranchTaken MachineInstructionPropertyBranchTaken {} = True
isMachineInstructionPropertyBranchTaken _ = False
isMachineReg MachineReg {} = True
isMachineReg _ = False
isMachineImm MachineImm {} = True
isMachineImm _ = False
isMachineBlockRef MachineBlockRef {} = True
isMachineBlockRef _ = False
isMachineFrameIndex MachineFrameIndex {} = True
isMachineFrameIndex _ = False
isMachineFrameObject MachineFrameObject {} = True
isMachineFrameObject _ = False
isMachineMemPartition MachineMemPartition {} = True
isMachineMemPartition _ = False
isMachineProperty MachineProperty {} = True
isMachineProperty _ = False
isMachineNullReg MachineNullReg {} = True
isMachineNullReg _ = False
isMachineDebugLocation MachineDebugLocation {} = True
isMachineDebugLocation _ = False
isMachineConstantPoolIndex MachineConstantPoolIndex {} = True
isMachineConstantPoolIndex _ = False
isMachineRegImplicit MachineReg {mrFlags = fs}
| MachineRegImplicit `elem` fs = True
isMachineRegImplicit _ = False
isMachineRegImplicitDef MachineReg {mrFlags = fs}
| MachineRegImplicitDefine `elem` fs = True
isMachineRegImplicitDef _ = False
isMachineTemp MachineTemp {} = True
isMachineTemp _ = False
isMachineSubRegIndex MachineSubRegIndex {} = True
isMachineSubRegIndex _ = False
isMachineRegUndef MachineRegUndef {} = True
isMachineRegUndef _ = False
isMachinePreUnison mf =
let mis = concatMap mbInstructions (mfBlocks mf)
in any isMachinePreUnisonInstruction mis
isMachinePreUnisonInstruction MachineBundle {} = False
isMachinePreUnisonInstruction MachineSingle {msOperands = mos} =
any isMachinePreUnisonOperand mos
isMachinePreUnisonOperand MachineTemp {} = True
isMachinePreUnisonOperand MachineSubTemp {} = True
isMachinePreUnisonOperand MachineSubRegIndex {} = True
isMachinePreUnisonOperand MachineFrameIndex {} = True
isMachinePreUnisonOperand MachineFrameObject {} = True
isMachinePreUnisonOperand MachineFrameSize {} = True
isMachinePreUnisonOperand _ = False
|
ff7cc6cb313ff3ee8ac7533e24e15418e9e11235dd02c2681bd4c2fcc947a817 | seckcoder/course-compiler | scratch.rkt | #lang racket
(define (primitives) (set '+ '- '* 'read))
(define (collect-locals)
(lambda (ast)
(match ast
[`(assign ,x ,e) (list x)]
[`(return ,e) '()]
[else
(error "unmatched in collect-locals S0" ast)]
)))
(define (map2 f ls)
(if (null? ls) (values '() '())
(let-values (((a d) (f (car ls)))
((da dd) (map2 f (cdr ls))))
(values (cons a da) (cons d dd)))))
(define (flatten need-atomic)
(lambda (e)
(match e
[(? symbol?) (values e '())]
[(? integer?) (values e '())]
[`(let ([,x ,e]) ,body)
(define-values (new-e e-ss) ((flatten #f) e))
(define-values (new-body body-ss) ((flatten #f) body))
(values new-body (append e-ss `((assign ,x ,new-e)) body-ss))]
[`(,op ,es ...) #:when (set-member? (primitives) op)
(define-values (new-es sss) (map2 (flatten #t) es))
(define ss (append* sss))
(define prim-apply `(,op ,@new-es))
(cond [need-atomic
(define tmp (gensym 'tmp))
(values tmp (append ss `((assign ,tmp ,prim-apply))))]
[else (values prim-apply ss)])]
[`(program ,extra ,e)
(define-values (new-e ss) ((flatten #f) e))
(define xs (append* (map (collect-locals) ss)))
`(program ,(remove-duplicates xs) ,@(append ss `((return ,new-e))))]
)))
;; select-instructions : C0 -> psuedo-x86
(define (binary-op->inst op)
(match op
['+ 'add] ['- 'sub] ['* 'imul]
[else (error "in binary-op->inst unmatched" op)]
))
(define (unary-op->inst op)
(match op
['- 'neg] [else (error "in unary-op->inst unmatched" op)]
))
(define (commutative? op)
(match op
['+ #t] ['* #t]
[else #f]))
(define (select-instructions)
(lambda (e)
(match e
[(? symbol?) `(var ,e)]
[(? integer?) `(int ,e)]
[`(register ,r) `(register ,r)]
[`(return ,e)
(( select-instructions) `(assign (register rax) ,e))]
[`(assign ,lhs (read))
(define new-lhs (( select-instructions) lhs))
`((call _read_int) (mov (register rax) ,new-lhs))]
[`(assign ,lhs ,x) #:when (symbol? x)
(define new-lhs (( select-instructions) lhs))
(cond [(equal? `(var ,x) new-lhs) '()]
[else `((mov (var ,x) ,new-lhs))])]
[`(assign ,lhs ,n) #:when (integer? n)
(define new-lhs (( select-instructions) lhs))
`((mov (int ,n) ,new-lhs))]
[`(assign ,lhs (,op ,e1 ,e2))
(define new-lhs (( select-instructions) lhs))
(define new-e1 (( select-instructions) e1))
(define new-e2 (( select-instructions) e2))
(define inst (binary-op->inst op))
(cond [(equal? new-e1 new-lhs)
`((,inst ,new-e2 ,new-lhs))]
[(equal? new-e2 new-lhs)
`((,inst ,new-e1 ,new-lhs))]
;; The following can shorten the live range of e2. -JGS
[(and ( commutative? op)
(integer? e1) (symbol? e2))
`((mov ,new-e2 ,new-lhs) (,inst ,new-e1 ,new-lhs))]
[else `((mov ,new-e1 ,new-lhs) (,inst ,new-e2 ,new-lhs))])]
[`(assign ,lhs (,op ,e1))
(define new-lhs (( select-instructions) lhs))
(define new-e1 (( select-instructions) e1))
(define inst (unary-op->inst op))
(cond [(equal? new-e1 new-lhs)
`((,inst ,new-lhs))]
[else `((mov ,new-e1 ,new-lhs) (,inst ,new-lhs))])]
[`(program ,xs ,ss ...)
`(program ,xs ,@(append* (map ( select-instructions) ss)))]
[else (error "instruction selection, unmatched " e)])))
| null | https://raw.githubusercontent.com/seckcoder/course-compiler/4363e5b3e15eaa7553902c3850b6452de80b2ef6/HW-Grammars/scratch.rkt | racket | select-instructions : C0 -> psuedo-x86
The following can shorten the live range of e2. -JGS | #lang racket
(define (primitives) (set '+ '- '* 'read))
(define (collect-locals)
(lambda (ast)
(match ast
[`(assign ,x ,e) (list x)]
[`(return ,e) '()]
[else
(error "unmatched in collect-locals S0" ast)]
)))
(define (map2 f ls)
(if (null? ls) (values '() '())
(let-values (((a d) (f (car ls)))
((da dd) (map2 f (cdr ls))))
(values (cons a da) (cons d dd)))))
(define (flatten need-atomic)
(lambda (e)
(match e
[(? symbol?) (values e '())]
[(? integer?) (values e '())]
[`(let ([,x ,e]) ,body)
(define-values (new-e e-ss) ((flatten #f) e))
(define-values (new-body body-ss) ((flatten #f) body))
(values new-body (append e-ss `((assign ,x ,new-e)) body-ss))]
[`(,op ,es ...) #:when (set-member? (primitives) op)
(define-values (new-es sss) (map2 (flatten #t) es))
(define ss (append* sss))
(define prim-apply `(,op ,@new-es))
(cond [need-atomic
(define tmp (gensym 'tmp))
(values tmp (append ss `((assign ,tmp ,prim-apply))))]
[else (values prim-apply ss)])]
[`(program ,extra ,e)
(define-values (new-e ss) ((flatten #f) e))
(define xs (append* (map (collect-locals) ss)))
`(program ,(remove-duplicates xs) ,@(append ss `((return ,new-e))))]
)))
(define (binary-op->inst op)
(match op
['+ 'add] ['- 'sub] ['* 'imul]
[else (error "in binary-op->inst unmatched" op)]
))
(define (unary-op->inst op)
(match op
['- 'neg] [else (error "in unary-op->inst unmatched" op)]
))
(define (commutative? op)
(match op
['+ #t] ['* #t]
[else #f]))
(define (select-instructions)
(lambda (e)
(match e
[(? symbol?) `(var ,e)]
[(? integer?) `(int ,e)]
[`(register ,r) `(register ,r)]
[`(return ,e)
(( select-instructions) `(assign (register rax) ,e))]
[`(assign ,lhs (read))
(define new-lhs (( select-instructions) lhs))
`((call _read_int) (mov (register rax) ,new-lhs))]
[`(assign ,lhs ,x) #:when (symbol? x)
(define new-lhs (( select-instructions) lhs))
(cond [(equal? `(var ,x) new-lhs) '()]
[else `((mov (var ,x) ,new-lhs))])]
[`(assign ,lhs ,n) #:when (integer? n)
(define new-lhs (( select-instructions) lhs))
`((mov (int ,n) ,new-lhs))]
[`(assign ,lhs (,op ,e1 ,e2))
(define new-lhs (( select-instructions) lhs))
(define new-e1 (( select-instructions) e1))
(define new-e2 (( select-instructions) e2))
(define inst (binary-op->inst op))
(cond [(equal? new-e1 new-lhs)
`((,inst ,new-e2 ,new-lhs))]
[(equal? new-e2 new-lhs)
`((,inst ,new-e1 ,new-lhs))]
[(and ( commutative? op)
(integer? e1) (symbol? e2))
`((mov ,new-e2 ,new-lhs) (,inst ,new-e1 ,new-lhs))]
[else `((mov ,new-e1 ,new-lhs) (,inst ,new-e2 ,new-lhs))])]
[`(assign ,lhs (,op ,e1))
(define new-lhs (( select-instructions) lhs))
(define new-e1 (( select-instructions) e1))
(define inst (unary-op->inst op))
(cond [(equal? new-e1 new-lhs)
`((,inst ,new-lhs))]
[else `((mov ,new-e1 ,new-lhs) (,inst ,new-lhs))])]
[`(program ,xs ,ss ...)
`(program ,xs ,@(append* (map ( select-instructions) ss)))]
[else (error "instruction selection, unmatched " e)])))
|
51ea9a68d925d4bc93618c41c49598cc1b050bfb3f234a4a331a644e1c7badc3 | CardanoSolutions/kupo | Script.hs | module Kupo.Data.Cardano.Script where
import Kupo.Prelude
import Cardano.Binary
( DecoderError (..)
, FromCBOR (..)
, decodeAnnotator
)
import Control.Arrow
( left
)
import Kupo.Data.Cardano.NativeScript
( NativeScript
)
import Kupo.Data.Cardano.ScriptHash
( ScriptHash
)
import Ouroboros.Consensus.Util
( eitherToMaybe
)
import qualified Cardano.Binary as Cbor
import qualified Cardano.Ledger.Alonzo.Data as Ledger
import qualified Cardano.Ledger.Alonzo.Language as Ledger
import qualified Cardano.Ledger.Alonzo.Scripts as Ledger
import qualified Cardano.Ledger.Core as Ledger.Core
import qualified Cardano.Ledger.Era as Ledger
import qualified Cardano.Ledger.SafeHash as Ledger
import qualified Cardano.Ledger.ShelleyMA.AuxiliaryData as Ledger.MaryAllegra
import qualified Cardano.Ledger.ShelleyMA.Timelocks as Ledger
import qualified Cardano.Ledger.ShelleyMA.Timelocks as Ledger.MaryAllegra
import qualified Codec.CBOR.Read as Cbor
import qualified Data.Aeson as Json
import qualified Data.Aeson.Encoding as Json
import qualified Data.ByteString as BS
import qualified Data.Map as Map
type Script =
Ledger.Script (BabbageEra StandardCrypto)
scriptFromAllegraAuxiliaryData
:: forall era. (Ledger.Core.Script era ~ Ledger.Timelock StandardCrypto)
=> (Ledger.Core.Script era -> Script)
-> Ledger.MaryAllegra.AuxiliaryData era
-> Map ScriptHash Script
-> Map ScriptHash Script
scriptFromAllegraAuxiliaryData liftScript (Ledger.MaryAllegra.AuxiliaryData _ scripts) m0 =
foldr
(\(liftScript -> s) -> Map.insert (hashScript s) s)
m0
scripts
# INLINABLE scriptFromAllegraAuxiliaryData #
scriptFromAlonzoAuxiliaryData
:: forall era.
( Ledger.Era era
, Ledger.Core.Script era ~ Ledger.Script era
)
=> (Ledger.Script era -> Script)
-> Ledger.AuxiliaryData era
-> Map ScriptHash Script
-> Map ScriptHash Script
scriptFromAlonzoAuxiliaryData liftScript Ledger.AuxiliaryData{Ledger.scripts} m0 =
foldr
(\(liftScript -> s) -> Map.insert (hashScript s) s)
m0
scripts
# INLINABLE scriptFromAlonzoAuxiliaryData #
fromAllegraScript
:: Ledger.MaryAllegra.Timelock StandardCrypto
-> Script
fromAllegraScript =
Ledger.TimelockScript
# INLINABLE fromAllegraScript #
fromMaryScript
:: Ledger.MaryAllegra.Timelock StandardCrypto
-> Script
fromMaryScript =
Ledger.TimelockScript
# INLINABLE fromMaryScript #
fromAlonzoScript
:: Ledger.Script (AlonzoEra StandardCrypto)
-> Script
fromAlonzoScript = \case
Ledger.TimelockScript script ->
Ledger.TimelockScript script
Ledger.PlutusScript lang bytes ->
Ledger.PlutusScript lang bytes
fromBabbageScript
:: Ledger.Script (BabbageEra StandardCrypto)
-> Script
fromBabbageScript =
identity
# INLINABLE fromBabbageScript #
scriptToJson
:: Script
-> Json.Encoding
scriptToJson script = encodeObject
[ ("script", encodeBytes (Ledger.originalBytes script))
, ("language", case script of
Ledger.TimelockScript{} ->
Json.text "native"
Ledger.PlutusScript Ledger.PlutusV1 _ ->
Json.text "plutus:v1"
Ledger.PlutusScript Ledger.PlutusV2 _ ->
Json.text "plutus:v2"
)
]
scriptToBytes
:: Script
-> ByteString
scriptToBytes = \case
{} ->
BS.singleton 0 <> Ledger.originalBytes script
script@(Ledger.PlutusScript Ledger.PlutusV1 _) ->
BS.singleton 1 <> Ledger.originalBytes script
script@(Ledger.PlutusScript Ledger.PlutusV2 _) ->
BS.singleton 2 <> Ledger.originalBytes script
unsafeScriptFromBytes
:: HasCallStack
=> ByteString
-> Script
unsafeScriptFromBytes =
fromMaybe (error "unsafeScriptFromBytes") . scriptFromBytes
# INLINABLE unsafeScriptFromBytes #
scriptFromBytes
:: ByteString
-> Maybe Script
scriptFromBytes (toLazy -> bytes) =
eitherToMaybe $ do
(script, tag) <- left (DecoderErrorDeserialiseFailure "Script") $
Cbor.deserialiseFromBytes Cbor.decodeWord8 bytes
case tag of
0 -> Ledger.TimelockScript <$> decodeAnnotator "Timelock" fromCBOR script
1 -> pure $ Ledger.PlutusScript Ledger.PlutusV1 (toShort $ toStrict script)
2 -> pure $ Ledger.PlutusScript Ledger.PlutusV2 (toShort $ toStrict script)
t -> Left (DecoderErrorUnknownTag "Script" t)
fromNativeScript
:: NativeScript
-> Script
fromNativeScript =
Ledger.TimelockScript
# INLINABLE fromNativeScript #
hashScript
:: Script
-> ScriptHash
hashScript =
Ledger.hashScript @(BabbageEra StandardCrypto)
# INLINABLE hashScript #
| null | https://raw.githubusercontent.com/CardanoSolutions/kupo/4904123abeed53f672eb34e0ef10c6c710bda61b/src/Kupo/Data/Cardano/Script.hs | haskell | module Kupo.Data.Cardano.Script where
import Kupo.Prelude
import Cardano.Binary
( DecoderError (..)
, FromCBOR (..)
, decodeAnnotator
)
import Control.Arrow
( left
)
import Kupo.Data.Cardano.NativeScript
( NativeScript
)
import Kupo.Data.Cardano.ScriptHash
( ScriptHash
)
import Ouroboros.Consensus.Util
( eitherToMaybe
)
import qualified Cardano.Binary as Cbor
import qualified Cardano.Ledger.Alonzo.Data as Ledger
import qualified Cardano.Ledger.Alonzo.Language as Ledger
import qualified Cardano.Ledger.Alonzo.Scripts as Ledger
import qualified Cardano.Ledger.Core as Ledger.Core
import qualified Cardano.Ledger.Era as Ledger
import qualified Cardano.Ledger.SafeHash as Ledger
import qualified Cardano.Ledger.ShelleyMA.AuxiliaryData as Ledger.MaryAllegra
import qualified Cardano.Ledger.ShelleyMA.Timelocks as Ledger
import qualified Cardano.Ledger.ShelleyMA.Timelocks as Ledger.MaryAllegra
import qualified Codec.CBOR.Read as Cbor
import qualified Data.Aeson as Json
import qualified Data.Aeson.Encoding as Json
import qualified Data.ByteString as BS
import qualified Data.Map as Map
type Script =
Ledger.Script (BabbageEra StandardCrypto)
scriptFromAllegraAuxiliaryData
:: forall era. (Ledger.Core.Script era ~ Ledger.Timelock StandardCrypto)
=> (Ledger.Core.Script era -> Script)
-> Ledger.MaryAllegra.AuxiliaryData era
-> Map ScriptHash Script
-> Map ScriptHash Script
scriptFromAllegraAuxiliaryData liftScript (Ledger.MaryAllegra.AuxiliaryData _ scripts) m0 =
foldr
(\(liftScript -> s) -> Map.insert (hashScript s) s)
m0
scripts
# INLINABLE scriptFromAllegraAuxiliaryData #
scriptFromAlonzoAuxiliaryData
:: forall era.
( Ledger.Era era
, Ledger.Core.Script era ~ Ledger.Script era
)
=> (Ledger.Script era -> Script)
-> Ledger.AuxiliaryData era
-> Map ScriptHash Script
-> Map ScriptHash Script
scriptFromAlonzoAuxiliaryData liftScript Ledger.AuxiliaryData{Ledger.scripts} m0 =
foldr
(\(liftScript -> s) -> Map.insert (hashScript s) s)
m0
scripts
# INLINABLE scriptFromAlonzoAuxiliaryData #
fromAllegraScript
:: Ledger.MaryAllegra.Timelock StandardCrypto
-> Script
fromAllegraScript =
Ledger.TimelockScript
# INLINABLE fromAllegraScript #
fromMaryScript
:: Ledger.MaryAllegra.Timelock StandardCrypto
-> Script
fromMaryScript =
Ledger.TimelockScript
# INLINABLE fromMaryScript #
fromAlonzoScript
:: Ledger.Script (AlonzoEra StandardCrypto)
-> Script
fromAlonzoScript = \case
Ledger.TimelockScript script ->
Ledger.TimelockScript script
Ledger.PlutusScript lang bytes ->
Ledger.PlutusScript lang bytes
fromBabbageScript
:: Ledger.Script (BabbageEra StandardCrypto)
-> Script
fromBabbageScript =
identity
# INLINABLE fromBabbageScript #
scriptToJson
:: Script
-> Json.Encoding
scriptToJson script = encodeObject
[ ("script", encodeBytes (Ledger.originalBytes script))
, ("language", case script of
Ledger.TimelockScript{} ->
Json.text "native"
Ledger.PlutusScript Ledger.PlutusV1 _ ->
Json.text "plutus:v1"
Ledger.PlutusScript Ledger.PlutusV2 _ ->
Json.text "plutus:v2"
)
]
scriptToBytes
:: Script
-> ByteString
scriptToBytes = \case
{} ->
BS.singleton 0 <> Ledger.originalBytes script
script@(Ledger.PlutusScript Ledger.PlutusV1 _) ->
BS.singleton 1 <> Ledger.originalBytes script
script@(Ledger.PlutusScript Ledger.PlutusV2 _) ->
BS.singleton 2 <> Ledger.originalBytes script
unsafeScriptFromBytes
:: HasCallStack
=> ByteString
-> Script
unsafeScriptFromBytes =
fromMaybe (error "unsafeScriptFromBytes") . scriptFromBytes
# INLINABLE unsafeScriptFromBytes #
scriptFromBytes
:: ByteString
-> Maybe Script
scriptFromBytes (toLazy -> bytes) =
eitherToMaybe $ do
(script, tag) <- left (DecoderErrorDeserialiseFailure "Script") $
Cbor.deserialiseFromBytes Cbor.decodeWord8 bytes
case tag of
0 -> Ledger.TimelockScript <$> decodeAnnotator "Timelock" fromCBOR script
1 -> pure $ Ledger.PlutusScript Ledger.PlutusV1 (toShort $ toStrict script)
2 -> pure $ Ledger.PlutusScript Ledger.PlutusV2 (toShort $ toStrict script)
t -> Left (DecoderErrorUnknownTag "Script" t)
fromNativeScript
:: NativeScript
-> Script
fromNativeScript =
Ledger.TimelockScript
# INLINABLE fromNativeScript #
hashScript
:: Script
-> ScriptHash
hashScript =
Ledger.hashScript @(BabbageEra StandardCrypto)
# INLINABLE hashScript #
| |
ac9fa4008d3c3d7af538933e4a0b160db46704c4642a01c378f19a100e0ebc7f | hhucn/decide3 | argumentation.cljc | (ns decide.models.argumentation
(:require
#?@(:clj [[clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as gen]
[datahike.core :as d.core]]
:cljs [[cljs.spec.alpha :as s]
[cljs.spec.gen.alpha :as gen]])
[com.fulcrologic.guardrails.core :refer [>def >defn =>]]
[com.fulcrologic.fulcro.algorithms.tempid :as tempid]
[decide.argument :as-alias argument]
[decide.argumentation]
[decide.models.proposal :as-alias proposal]
[decide.statement :as-alias statement]))
(def schema
[{:db/ident :author
:db/valueType :db.type/ref
:db/cardinality :db.cardinality/one}
{:db/ident :statement/id
:db/unique :db.unique/identity
:db/valueType :db.type/uuid
:db/cardinality :db.cardinality/one}
{:db/ident :statement/content
:db/valueType :db.type/string
; :db/fulltext true
:db/cardinality :db.cardinality/one}
{:db/ident :argument/id
:db/unique :db.unique/identity
:db/valueType :db.type/uuid
:db/cardinality :db.cardinality/one}
{:db/ident :argument/conclusion
:db/valueType :db.type/ref
:db/cardinality :db.cardinality/one}
{:db/ident :argument/premise
:db/valueType :db.type/ref
:db/cardinality :db.cardinality/one}
{:db/ident :argument/type
:db/valueType :db.type/keyword
:db/cardinality :db.cardinality/one}
{:db/ident ::argument/ancestors
:db/doc "All ancestors of an argument up to the root argument(s). This enables improved query performance."
:db/valueType :db.type/ref
:db/cardinality :db.cardinality/many}])
;;; TODO Move the whole id stuff to a util ns
(>def ::tempid/tempid
(s/spec tempid/tempid?
:gen #(gen/return (tempid/tempid))))
(>def :argument/id (s/or :main ::argument/id :tempid ::tempid/tempid))
(>def :argument/type ::argument/type)
(>def :argument/premise
(s/or
:legacy (s/keys :req [:statement/id])
:main ::argument/premise))
(>def :argument/entity (s/and associative? #(contains? % :db/id)))
(>def :statement/id (s/or :main ::statement/id :tempid ::tempid/tempid))
(>def :statement/content ::statement/content)
(>def :statement/entity (s/and associative? #(contains? % :db/id)))
(defn validate [spec x msg] ; move this to a util ns
(when-not (s/valid? spec x)
(throw (ex-info msg (s/explain-data spec x)))))
(>defn make-statement [{:statement/keys [id content]
:or {id #?(:clj (d.core/squuid)
:cljs (tempid/tempid))}}]
[(s/keys :req [:statement/content] :opt [:statement/id]) => (s/keys :req [:statement/id :statement/content])]
#:statement{:id id
:content content})
(>defn make-argument
([] [=> (s/keys :req [:argument/id])] (make-argument {}))
([{:argument/keys [id type]
:or {id #?(:clj (d.core/squuid)
:cljs (tempid/tempid))}}]
[(s/keys :opt [:argument/id :argument/type]) => (s/keys :req [:argument/id])]
(merge
{:argument/id id}
(when type {:argument/type type}))))
(defn proposal [argument]
(let [ancestors (::argument/ancestors argument)]
(some ::proposal/_arguments ancestors)))
| null | https://raw.githubusercontent.com/hhucn/decide3/7c048b0411285282d56a127cd1ec10362d024947/src/main/decide/models/argumentation.cljc | clojure | :db/fulltext true
TODO Move the whole id stuff to a util ns
move this to a util ns | (ns decide.models.argumentation
(:require
#?@(:clj [[clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as gen]
[datahike.core :as d.core]]
:cljs [[cljs.spec.alpha :as s]
[cljs.spec.gen.alpha :as gen]])
[com.fulcrologic.guardrails.core :refer [>def >defn =>]]
[com.fulcrologic.fulcro.algorithms.tempid :as tempid]
[decide.argument :as-alias argument]
[decide.argumentation]
[decide.models.proposal :as-alias proposal]
[decide.statement :as-alias statement]))
(def schema
[{:db/ident :author
:db/valueType :db.type/ref
:db/cardinality :db.cardinality/one}
{:db/ident :statement/id
:db/unique :db.unique/identity
:db/valueType :db.type/uuid
:db/cardinality :db.cardinality/one}
{:db/ident :statement/content
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident :argument/id
:db/unique :db.unique/identity
:db/valueType :db.type/uuid
:db/cardinality :db.cardinality/one}
{:db/ident :argument/conclusion
:db/valueType :db.type/ref
:db/cardinality :db.cardinality/one}
{:db/ident :argument/premise
:db/valueType :db.type/ref
:db/cardinality :db.cardinality/one}
{:db/ident :argument/type
:db/valueType :db.type/keyword
:db/cardinality :db.cardinality/one}
{:db/ident ::argument/ancestors
:db/doc "All ancestors of an argument up to the root argument(s). This enables improved query performance."
:db/valueType :db.type/ref
:db/cardinality :db.cardinality/many}])
(>def ::tempid/tempid
(s/spec tempid/tempid?
:gen #(gen/return (tempid/tempid))))
(>def :argument/id (s/or :main ::argument/id :tempid ::tempid/tempid))
(>def :argument/type ::argument/type)
(>def :argument/premise
(s/or
:legacy (s/keys :req [:statement/id])
:main ::argument/premise))
(>def :argument/entity (s/and associative? #(contains? % :db/id)))
(>def :statement/id (s/or :main ::statement/id :tempid ::tempid/tempid))
(>def :statement/content ::statement/content)
(>def :statement/entity (s/and associative? #(contains? % :db/id)))
(when-not (s/valid? spec x)
(throw (ex-info msg (s/explain-data spec x)))))
(>defn make-statement [{:statement/keys [id content]
:or {id #?(:clj (d.core/squuid)
:cljs (tempid/tempid))}}]
[(s/keys :req [:statement/content] :opt [:statement/id]) => (s/keys :req [:statement/id :statement/content])]
#:statement{:id id
:content content})
(>defn make-argument
([] [=> (s/keys :req [:argument/id])] (make-argument {}))
([{:argument/keys [id type]
:or {id #?(:clj (d.core/squuid)
:cljs (tempid/tempid))}}]
[(s/keys :opt [:argument/id :argument/type]) => (s/keys :req [:argument/id])]
(merge
{:argument/id id}
(when type {:argument/type type}))))
(defn proposal [argument]
(let [ancestors (::argument/ancestors argument)]
(some ::proposal/_arguments ancestors)))
|
768b5ef6f855c6b075b1fcf9ee93bbff151d26c04ac488478e999ca7a6b79594 | ygmpkk/house | Main.hs | -----------------------------------------------------------
( c ) 1999 - 2000 ,
-----------------------------------------------------------
module Main where
import MonParser (prettyFile)
main :: IO ()
main = prettyFile "prelude.m"
| null | https://raw.githubusercontent.com/ygmpkk/house/1ed0eed82139869e85e3c5532f2b579cf2566fa2/ghc-6.2/libraries/parsec/examples/Mondrian/Main.hs | haskell | ---------------------------------------------------------
--------------------------------------------------------- | ( c ) 1999 - 2000 ,
module Main where
import MonParser (prettyFile)
main :: IO ()
main = prettyFile "prelude.m"
|
f3d1084b39c38cf611a61eea3a64d6fff7a307fb8cfce23c9477f4b884259935 | janestreet/merlin-jst | diffing.mli |
(**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cambium , Paris
(* *)
Copyright 2020 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* Parametric diffing
This module implements diffing over lists of arbitrary content .
It is parameterized by
- The content of the two lists
- The equality witness when an element is kept
- The diffing witness when an element is changed
is extended to maintain state depending on the
computed changes while walking through the two lists .
The underlying algorithm is a modified Wagner - Fischer algorithm
( see < > ) .
We provide the following guarantee :
Given two lists [ l ] and [ r ] , if different patches result in different
states , we say that the state diverges .
- We always return the optimal patch on prefixes of [ l ] and [ r ]
on which state does not diverge .
- Otherwise , we return a correct but non - optimal patch where with no divergent states are optimal for the given initial state .
More precisely , the optimality of Wagner - Fischer depends on the property
that the edit - distance between a k - prefix of the left input and a l - prefix
of the right input d(k , l ) satisfies
d(k , l ) = min (
del_cost + d(k-1,l ) ,
insert_cost + d(k , l-1 ) ,
change_cost + d(k-1,l-1 )
)
Under this hypothesis , it is optimal to choose greedily the state of the
minimal patch transforming the left k - prefix into the right l - prefix as a
representative of the states of all possible patches transforming the left
k - prefix into the right l - prefix .
If this property is not satisfied , we can still choose greedily a
representative state . However , the computed patch is no more guaranteed to
be globally optimal .
Nevertheless , it is still a correct patch , which is even optimal among all
explored patches .
This module implements diffing over lists of arbitrary content.
It is parameterized by
- The content of the two lists
- The equality witness when an element is kept
- The diffing witness when an element is changed
Diffing is extended to maintain state depending on the
computed changes while walking through the two lists.
The underlying algorithm is a modified Wagner-Fischer algorithm
(see <>).
We provide the following guarantee:
Given two lists [l] and [r], if different patches result in different
states, we say that the state diverges.
- We always return the optimal patch on prefixes of [l] and [r]
on which state does not diverge.
- Otherwise, we return a correct but non-optimal patch where subpatches
with no divergent states are optimal for the given initial state.
More precisely, the optimality of Wagner-Fischer depends on the property
that the edit-distance between a k-prefix of the left input and a l-prefix
of the right input d(k,l) satisfies
d(k,l) = min (
del_cost + d(k-1,l),
insert_cost + d(k,l-1),
change_cost + d(k-1,l-1)
)
Under this hypothesis, it is optimal to choose greedily the state of the
minimal patch transforming the left k-prefix into the right l-prefix as a
representative of the states of all possible patches transforming the left
k-prefix into the right l-prefix.
If this property is not satisfied, we can still choose greedily a
representative state. However, the computed patch is no more guaranteed to
be globally optimal.
Nevertheless, it is still a correct patch, which is even optimal among all
explored patches.
*)
(** The core types of a diffing implementation *)
module type Defs = sig
type left
type right
type eq
(** Detailed equality trace *)
type diff
(** Detailed difference trace *)
type state
(** environment of a partial patch *)
end
(** The kind of changes which is used to share printing and styling
across implementation*)
type change_kind =
| Deletion
| Insertion
| Modification
| Preservation
val prefix: Format.formatter -> (int * change_kind) -> unit
val style: change_kind -> Misc.Color.style list
type ('left,'right,'eq,'diff) change =
| Delete of 'left
| Insert of 'right
| Keep of 'left * 'right *' eq
| Change of 'left * 'right * 'diff
val classify: _ change -> change_kind
* [ Define(Defs ) ] creates the diffing types from the types
defined in [ ] and the functors that need to be instantatied
with the diffing algorithm parameters
defined in [Defs] and the functors that need to be instantatied
with the diffing algorithm parameters
*)
module Define(D:Defs): sig
open D
(** The type of potential changes on a list. *)
type nonrec change = (left,right,eq,diff) change
type patch = change list
(** A patch is an ordered list of changes. *)
module type Parameters = sig
type update_result
val weight: change -> int
(** [weight ch] returns the weight of the change [ch].
Used to find the smallest patch. *)
val test: state -> left -> right -> (eq, diff) result
*
[ test xr ] tests if the elements [ xl ] and [ xr ] are
co mpatible ( [ Ok ] ) or not ( [ Error ] ) .
[test st xl xr] tests if the elements [xl] and [xr] are
co mpatible ([Ok]) or not ([Error]).
*)
val update: change -> state -> update_result
(** [update ch st] returns the new state after applying a change.
The [update_result] type also contains expansions in the variadic
case.
*)
end
module type S = sig
val diff: state -> left array -> right array -> patch
(** [diff state l r] computes the optimal patch between [l] and [r],
using the initial state [state].
*)
end
module Simple: (Parameters with type update_result := state) -> S
* { 1 Variadic diffing }
Variadic diffing allows to expand the lists being diffed during diffing .
in one specific direction .
Variadic diffing allows to expand the lists being diffed during diffing.
in one specific direction.
*)
module Left_variadic:
(Parameters with type update_result := state * left array) -> S
module Right_variadic:
(Parameters with type update_result := state * right array) -> S
end
| null | https://raw.githubusercontent.com/janestreet/merlin-jst/980b574405617fa0dfb0b79a84a66536b46cd71b/src/ocaml/utils/diffing.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* The core types of a diffing implementation
* Detailed equality trace
* Detailed difference trace
* environment of a partial patch
* The kind of changes which is used to share printing and styling
across implementation
* The type of potential changes on a list.
* A patch is an ordered list of changes.
* [weight ch] returns the weight of the change [ch].
Used to find the smallest patch.
* [update ch st] returns the new state after applying a change.
The [update_result] type also contains expansions in the variadic
case.
* [diff state l r] computes the optimal patch between [l] and [r],
using the initial state [state].
|
, projet Cambium , Paris
Copyright 2020 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* Parametric diffing
This module implements diffing over lists of arbitrary content .
It is parameterized by
- The content of the two lists
- The equality witness when an element is kept
- The diffing witness when an element is changed
is extended to maintain state depending on the
computed changes while walking through the two lists .
The underlying algorithm is a modified Wagner - Fischer algorithm
( see < > ) .
We provide the following guarantee :
Given two lists [ l ] and [ r ] , if different patches result in different
states , we say that the state diverges .
- We always return the optimal patch on prefixes of [ l ] and [ r ]
on which state does not diverge .
- Otherwise , we return a correct but non - optimal patch where with no divergent states are optimal for the given initial state .
More precisely , the optimality of Wagner - Fischer depends on the property
that the edit - distance between a k - prefix of the left input and a l - prefix
of the right input d(k , l ) satisfies
d(k , l ) = min (
del_cost + d(k-1,l ) ,
insert_cost + d(k , l-1 ) ,
change_cost + d(k-1,l-1 )
)
Under this hypothesis , it is optimal to choose greedily the state of the
minimal patch transforming the left k - prefix into the right l - prefix as a
representative of the states of all possible patches transforming the left
k - prefix into the right l - prefix .
If this property is not satisfied , we can still choose greedily a
representative state . However , the computed patch is no more guaranteed to
be globally optimal .
Nevertheless , it is still a correct patch , which is even optimal among all
explored patches .
This module implements diffing over lists of arbitrary content.
It is parameterized by
- The content of the two lists
- The equality witness when an element is kept
- The diffing witness when an element is changed
Diffing is extended to maintain state depending on the
computed changes while walking through the two lists.
The underlying algorithm is a modified Wagner-Fischer algorithm
(see <>).
We provide the following guarantee:
Given two lists [l] and [r], if different patches result in different
states, we say that the state diverges.
- We always return the optimal patch on prefixes of [l] and [r]
on which state does not diverge.
- Otherwise, we return a correct but non-optimal patch where subpatches
with no divergent states are optimal for the given initial state.
More precisely, the optimality of Wagner-Fischer depends on the property
that the edit-distance between a k-prefix of the left input and a l-prefix
of the right input d(k,l) satisfies
d(k,l) = min (
del_cost + d(k-1,l),
insert_cost + d(k,l-1),
change_cost + d(k-1,l-1)
)
Under this hypothesis, it is optimal to choose greedily the state of the
minimal patch transforming the left k-prefix into the right l-prefix as a
representative of the states of all possible patches transforming the left
k-prefix into the right l-prefix.
If this property is not satisfied, we can still choose greedily a
representative state. However, the computed patch is no more guaranteed to
be globally optimal.
Nevertheless, it is still a correct patch, which is even optimal among all
explored patches.
*)
module type Defs = sig
type left
type right
type eq
type diff
type state
end
type change_kind =
| Deletion
| Insertion
| Modification
| Preservation
val prefix: Format.formatter -> (int * change_kind) -> unit
val style: change_kind -> Misc.Color.style list
type ('left,'right,'eq,'diff) change =
| Delete of 'left
| Insert of 'right
| Keep of 'left * 'right *' eq
| Change of 'left * 'right * 'diff
val classify: _ change -> change_kind
* [ Define(Defs ) ] creates the diffing types from the types
defined in [ ] and the functors that need to be instantatied
with the diffing algorithm parameters
defined in [Defs] and the functors that need to be instantatied
with the diffing algorithm parameters
*)
module Define(D:Defs): sig
open D
type nonrec change = (left,right,eq,diff) change
type patch = change list
module type Parameters = sig
type update_result
val weight: change -> int
val test: state -> left -> right -> (eq, diff) result
*
[ test xr ] tests if the elements [ xl ] and [ xr ] are
co mpatible ( [ Ok ] ) or not ( [ Error ] ) .
[test st xl xr] tests if the elements [xl] and [xr] are
co mpatible ([Ok]) or not ([Error]).
*)
val update: change -> state -> update_result
end
module type S = sig
val diff: state -> left array -> right array -> patch
end
module Simple: (Parameters with type update_result := state) -> S
* { 1 Variadic diffing }
Variadic diffing allows to expand the lists being diffed during diffing .
in one specific direction .
Variadic diffing allows to expand the lists being diffed during diffing.
in one specific direction.
*)
module Left_variadic:
(Parameters with type update_result := state * left array) -> S
module Right_variadic:
(Parameters with type update_result := state * right array) -> S
end
|
337e5875b826862fe7ed77a6484812e5f344a9604e4ce69a95c4b405c6b192e1 | plumatic/grab-bag | repl.clj | (ns service.repl
(:use plumbing.core)
(:require
[clojure.pprint :as pprint]
[clojure.reflect :as reflect]
[lazymap.core :as lazymap]
[plumbing.error :as err]
[crane.config :as crane-config]
[store.bucket :as bucket]
store.s3
[store.snapshots :as snapshots]
[service.graft :as graft]
[service.nameserver :as nameserver]
[service.remote-repl :as remote-repl]))
(def config
(let [config (delay (crane-config/read-dot-crane))]
(lazymap/lazy-hash-map
:ec2-keys (select-keys @config [:key :secretkey])
:snapshot-store (fn [service-name] (snapshots/snapshot-store @config service-name))
:nameserver (nameserver/nameserver-client
(bucket/bucket (merge @config {:type :s3 :name "grabbag-nameserver"}))
{:stale-s 1200 :host-key :public}))))
(defn lookup-address [service-name]
(nameserver/lookup-address (:nameserver config) service-name))
(defn s3-bucket [name & [serialize-method key-prefix]]
(bucket/bucket
(merge (:ec2-keys config)
{:type :s3 :name name}
(if serialize-method {:serialize-method serialize-method} {})
(if key-prefix {:key-prefix key-prefix} {}))))
(defn dynamo-bucket [name & [serialize-method]]
(bucket/bucket
(merge (:ec2-keys config)
{:type :dynamo :name name}
(if serialize-method {:serialize-method serialize-method} {}))))
(defn latest-snapshot [service-name]
(-> ((:snapshot-store config) service-name)
snapshots/read-latest-snapshot))
(defn show [c & [no-ancestors?]]
(pprint/print-table (sort-by :name (:members (reflect/reflect c :ancestors (not no-ancestors?))))))
(defn current-service [] (-> remote-repl/resources deref :service))
(defn update-service!
[new-service-sub-graph]
(swap! remote-repl/resources update-in [:service] graft/graft-service-graph new-service-sub-graph)
:success)
(defn restart-from!
"Graft current service, assuming you've recompiled the deploy and all code you care about.
ks are keys to restart from in the service. If no keys are provided, restart everything
except nrepl server. Provide deploy-ns as a keyword, or we default to the service type."
[deploy-ns & ks]
(let [[deploy-ns ks] (if (symbol? deploy-ns)
[deploy-ns ks]
[(symbol (str (safe-get-in @remote-repl/resources [:config :service :type]) ".deploy"))
(cons deploy-ns ks)])
ks (if (seq ks) (set ks) #(not= % :nrepl-server))]
(update-service! (aconcat (for [[k v] @(resolve (symbol (name deploy-ns) "service-graph"))]
(when (ks k) [k v]))))))
(defn init-logger! [& [level]]
(err/init-logger! (or level :info)))
(defn my-defs
"Return a list of all of the vars defined in the current namespace"
[]
(let [ns-name (str "#'" (.getName *ns*) "/")]
(keep (fn [v] (when (.contains (str v) ns-name) (.replaceAll (str v) ns-name ""))) (vals (ns-map *ns*)))))
;; to service?
;; needs to be available all over, and have access to everything
| null | https://raw.githubusercontent.com/plumatic/grab-bag/a15e943322fbbf6f00790ce5614ba6f90de1a9b5/lib/service/src/service/repl.clj | clojure | to service?
needs to be available all over, and have access to everything | (ns service.repl
(:use plumbing.core)
(:require
[clojure.pprint :as pprint]
[clojure.reflect :as reflect]
[lazymap.core :as lazymap]
[plumbing.error :as err]
[crane.config :as crane-config]
[store.bucket :as bucket]
store.s3
[store.snapshots :as snapshots]
[service.graft :as graft]
[service.nameserver :as nameserver]
[service.remote-repl :as remote-repl]))
(def config
(let [config (delay (crane-config/read-dot-crane))]
(lazymap/lazy-hash-map
:ec2-keys (select-keys @config [:key :secretkey])
:snapshot-store (fn [service-name] (snapshots/snapshot-store @config service-name))
:nameserver (nameserver/nameserver-client
(bucket/bucket (merge @config {:type :s3 :name "grabbag-nameserver"}))
{:stale-s 1200 :host-key :public}))))
(defn lookup-address [service-name]
(nameserver/lookup-address (:nameserver config) service-name))
(defn s3-bucket [name & [serialize-method key-prefix]]
(bucket/bucket
(merge (:ec2-keys config)
{:type :s3 :name name}
(if serialize-method {:serialize-method serialize-method} {})
(if key-prefix {:key-prefix key-prefix} {}))))
(defn dynamo-bucket [name & [serialize-method]]
(bucket/bucket
(merge (:ec2-keys config)
{:type :dynamo :name name}
(if serialize-method {:serialize-method serialize-method} {}))))
(defn latest-snapshot [service-name]
(-> ((:snapshot-store config) service-name)
snapshots/read-latest-snapshot))
(defn show [c & [no-ancestors?]]
(pprint/print-table (sort-by :name (:members (reflect/reflect c :ancestors (not no-ancestors?))))))
(defn current-service [] (-> remote-repl/resources deref :service))
(defn update-service!
[new-service-sub-graph]
(swap! remote-repl/resources update-in [:service] graft/graft-service-graph new-service-sub-graph)
:success)
(defn restart-from!
"Graft current service, assuming you've recompiled the deploy and all code you care about.
ks are keys to restart from in the service. If no keys are provided, restart everything
except nrepl server. Provide deploy-ns as a keyword, or we default to the service type."
[deploy-ns & ks]
(let [[deploy-ns ks] (if (symbol? deploy-ns)
[deploy-ns ks]
[(symbol (str (safe-get-in @remote-repl/resources [:config :service :type]) ".deploy"))
(cons deploy-ns ks)])
ks (if (seq ks) (set ks) #(not= % :nrepl-server))]
(update-service! (aconcat (for [[k v] @(resolve (symbol (name deploy-ns) "service-graph"))]
(when (ks k) [k v]))))))
(defn init-logger! [& [level]]
(err/init-logger! (or level :info)))
(defn my-defs
"Return a list of all of the vars defined in the current namespace"
[]
(let [ns-name (str "#'" (.getName *ns*) "/")]
(keep (fn [v] (when (.contains (str v) ns-name) (.replaceAll (str v) ns-name ""))) (vals (ns-map *ns*)))))
|
8fd0cdf5369741ceaa16c1ebf45e598fd23a37f72378cd87d44a323c7b751957 | ober/metis | db-manardb.lisp | (in-package :metis)
(defvar *manard-files* (thread-safe-hash-table))
(defvar *metis-fields* (thread-safe-hash-table))
(defvar *metis-counters* (thread-safe-hash-table))
(defvar *metis-need-files* nil)
(defvar *output-sep* "|")
(defvar ct-fields '(
;;metis::additionalEventData
;;metis::requestID
;;metis::requestParameters
;;metis::resources
;;metis::responseElements
;;metis::userIdentity
metis::eventCategory
metis::serviceEventDetails
metis::readOnly
metis::managementEvent
;;metis::tlsDetails
metis::vpcEndpointId
metis::sessionCredentialFromConsole
metis::apiVersion
metis::awsRegion
metis::errorCode
metis::errorMessage
metis::eventName
metis::eventSource
metis::eventTime
metis::eventType
metis::eventVersion
metis::sourceIPAddress
metis::userAgent
metis::userName
))
(defun manardb/init()
(unless (boundp 'manardb:use-mmap-dir)
(progn
(manardb:use-mmap-dir (or (uiop:getenv "METIS") "~/ct-manardb/"))
(when *metis-need-hashes*
(init-ct-hashes))))
(if (and (eql (hash-table-count *manard-files*) 0) *metis-need-files*)
(allocate-file-hash)))
(manardb:defmmclass files ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventCategory ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass readOnly ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass serviceEventDetails ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass managementEvent ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass sharedEventID ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass tlsDetails ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass vpcEndpointId ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass apiVersion ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass sessionCredentialFromConsole ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass additionalEventData ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass awsRegion ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass errorCode ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass errorMessage ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventID ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventName ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventSource ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventTime ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventType ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventVersion ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass recipientAccountId ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass requestID ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass requestParameters ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass resources ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass responseElements ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass sourceIPAddress ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass userAgent ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass userIdentity ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass userName ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass ct ()
((addionalEventData :initarg :additionalEventData :accessor additionalEventData)
(apiVersion :initarg :apiVersion :accessor apiVersion)
(awsRegion :initarg :awsRegion :accessor awsRegion)
(errorCode :initarg :errorCode :accessor errorCode)
(errorMessage :initarg :errorMessage :accessor errorMessage)
(eventCategory :initarg :eventCategory :accessor eventCategory)
(eventID :initarg :eventID :accessor eventID)
(eventName :initarg :eventName :accessor eventName)
(eventSource :initarg :eventSource :accessor eventSource)
(eventTime :initarg :eventTime :accessor eventTime)
(eventType :initarg :eventType :accessor eventType)
(eventVersion :initarg :eventVersion :accessor eventVersion)
(managementEvent :initarg :managementEvent :accessor managementEvent)
(readOnly :initarg :readOnly :accessor readOnly)
(recipientAccountId :initarg :recipientAccountId :accessor recipientAccountId)
(requestID :initarg :requestID :accessor requestID)
(requestParameters :initarg :requestParameters :accessor requestParameters)
(resources :initarg :resources :accessor resources)
(responseElements :initarg :responseElements :accessor responseElements)
(serviceEventDetails :initarg :serviceEventDetails :accessor serviceEventDetails)
(sessionCredentialFromConsole :initarg :sessionCredentialFromConsole :accessor sessionCredentialFromConsole)
(sharedEventID :initarg :sharedEventID :accessor sharedEventID)
(sourceIPAddress :initarg :sourceIPAddress :accessor sourceIPAddress)
(tlsDetails :initarg :tlsDetails :accessor tlsDetails)
(userAgent :initarg :userAgent :accessor userAgent)
(userIdentity :initarg :userIdentity :accessor userIdentity)
(userName :initarg :userName :accessor username)
(vpcEndpointId :initarg :vpcEndpointId :accessor vpcEndpointId)))
(defun create-klass-hash (klass)
(multiple-value-bind (id seen)
(gethash klass *metis-fields*)
(unless seen
(setf (gethash klass *metis-fields*)
(thread-safe-hash-table)))))
(defun get-obj (klass new-value)
"Return the object for a given value of klass"
(let ((obj nil))
(unless (or (null klass) (null new-value))
(progn
(create-klass-hash klass)
(multiple-value-bind (id seen)
(gethash new-value (gethash klass *metis-fields*))
(if seen
(setf obj id)
(progn
(setf obj (make-instance klass :value new-value))
(setf (gethash new-value (gethash klass *metis-fields*)) obj))))))
obj))
(defun manardb/have-we-seen-this-file (file)
(let ((name (get-filename-hash file)))
(multiple-value-bind (id seen)
(gethash name *manard-files*)
seen)))
(defun manardb/get-files (file)
(format t "manardb-get-files:~A~%" file)
(remove-if-not
(lambda (x) (string-equal
(get-filename-hash file)
(slot-value x :value2)))
(manardb:retrieve-all-instances 'metis::files)))
(defun manardb/mark-file-processed (file)
(let ((name (get-filename-hash file)))
(setf (gethash name *manard-files*) t)
(make-instance 'files :value name :idx 1)))
(defun allocate-file-hash ()
(manardb:doclass (x 'metis::files :fresh-instances nil)
(setf (gethash (slot-value x 'value) *manard-files*) t)))
(defun allocate-klass-hash (klass)
(or (hash-table-p (gethash klass *metis-fields*))
(progn
(format t "allocating class:~A~%" klass)
(create-klass-hash klass)
(manardb:doclass (x klass :fresh-instances nil)
(with-slots (value idx) x
(setf (gethash value
(gethash klass *metis-fields*)) idx)))
(setf (gethash klass *metis-counters*)
(get-max-id-from-hash
(gethash klass *metis-fields*))))))
(defun get-max-id-from-hash (hash)
(let* ((idxs (alexandria:hash-table-values hash))
(max-id 0))
(and idxs
(setf max-id (+ 1 (hash-table-count hash))))
max-id))
(defun init-ct-hashes ()
(mapc
#'(lambda (x)
(time (allocate-klass-hash x)))
ct-fields))
(defun get-stats ()
(format t "Totals ct:~A files:~A flows:~A vpc-files:~A ec:~A srcaddr:~A dstaddr:~A srcport:~A dstport:~A protocol:~A~%"
(manardb:count-all-instances 'metis::ct)
(manardb:count-all-instances 'metis::files)
(manardb:count-all-instances 'metis::flow)
(manardb:count-all-instances 'metis::flow-files)
(manardb:count-all-instances 'metis::errorCode)
(manardb:count-all-instances 'metis::srcaddr)
(manardb:count-all-instances 'metis::dstaddr)
(manardb:count-all-instances 'metis::srcport)
(manardb:count-all-instances 'metis::dstport)
(manardb:count-all-instances 'metis::protocol)))
(defun find-username (userIdentity)
(let ((a (fetch-value '(:|sessionContext| :|sessionIssuer| :|userName|) userIdentity))
(b (fetch-value '(:|sessionContext| :|userName|) userIdentity))
(c (fetch-value '(:|userName|) userIdentity))
(d (car (last (cl-ppcre:split ":" (fetch-value '(:|arn|) userIdentity)))))
(e (fetch-value '(:|type|) userIdentity))
(len (length userIdentity)))
(or a b c d e)))
(defun get-unique-values (klass)
"Return unique list of klass objects"
(manardb:doclass (x klass :fresh-instances nil)
(with-slots (value idx) x
(format t "~%~A: ~A" idx value))))
(defun get-unique-values-list (klass)
"Return unique list of klass objects"
(let ((results '()))
(manardb:doclass (x klass :fresh-instances nil)
(with-slots (value idx) x
(push value results)))
results))
;; lists
(defun get-serviceEventDetails ()
"return unique list of serviceEventDetails"
(get-unique-values 'metis::serviceEventDetails))
(defun get-eventCategory ()
"return unique list of eventCategory"
(get-unique-values 'metis::eventCategory))
(defun get-readOnly ()
"return unique list of readOnly"
(get-unique-values 'metis::readOnly))
(defun get-managementEvent ()
"return unique list of managementEvent"
(get-unique-values 'metis::managementEvent))
(defun get-sharedEventID ()
"return unique list of sharedEventID"
(get-unique-values 'metis::sharedEventID))
(defun get-tlsDetails ()
"return unique list of tlsDetails"
(get-unique-values 'metis::tlsDetails))
(defun get-vpcEndpointId ()
"return unique list of vpcEndpointId"
(get-unique-values 'metis::vpcEndpointId))
(defun get-apiVersion ()
"return unique list of apiVersion"
(get-unique-values 'metis::apiVersion))
(defun get-ct-files ()
"Return unique list of ct files"
(get-unique-values 'metis::files))
(defun get-cts ()
"Return unique cts"
(get-unique-values 'metis::ct))
(defun get-event-list ()
"Return unique list of events"
(get-unique-values 'metis::eventname))
(defun get-errorcode-list ()
"Return unique list of events"
(get-unique-values 'metis::errorcode))
(defun get-response-elements ()
"Return unique list of events"
(get-unique-values 'metis::responseElements))
(defun get-name-list ()
"Return unique list of events"
(get-unique-values 'metis::username))
(defun get-region-list ()
"Return unique list of events"
(get-unique-values 'metis::awsRegion))
(defun get-sourceips-list ()
"Return unique list of events"
(get-unique-values 'metis::sourceIPAddress))
(defun get-val (obj)
(if (null obj)
obj
(slot-value obj 'value)))
(defun get-obj-by-val (klass val)
(let ((obj-list nil))
(manardb:doclass (x klass :fresh-instances nil)
(with-slots (value) x
(if (string-equal val value)
(push x obj-list))))
obj-list))
(defun ct-get-by-klass-value (klass value &optional inverse)
(format t "~{~A~}" (ct-get-by-klass-value-real klass value (or inverse nil))))
(defun ct-get-by-klass-value-real (klass value &optional inverse)
(allocate-klass-hash klass)
(let ((results '())
(klass-hash (gethash klass *metis-fields*))
(slotv nil))
(multiple-value-bind (id seen)
(gethash value klass-hash)
(when (or seen inverse)
(manardb:doclass
(x 'metis::ct :fresh-instances nil)
(with-slots (userName
eventTime
eventName
eventSource
sourceIPAddress
userAgent
errorMessage
errorCode
userIdentity
requestParameters
responseElements
awsRegion) x
(cond
((equal (find-class klass) (find-class 'metis::userName)) (setf slotv userName))
((equal (find-class klass) (find-class 'metis::awsRegion)) (setf slotv awsRegion))
((equal (find-class klass) (find-class 'metis::eventName)) (setf slotv eventName))
((equal (find-class klass) (find-class 'metis::eventSource)) (setf slotv eventSource))
((equal (find-class klass) (find-class 'metis::sourceIPAddress)) (setf slotv sourceIPAddress))
((equal (find-class klass) (find-class 'metis::errorMessage)) (setf slotv errorMessage))
((equal (find-class klass) (find-class 'metis::errorCode)) (setf slotv errorCode))
((equal (find-class klass) (find-class 'metis::requestParameters)) (setf slotv requestParameters))
((equal (find-class klass) (find-class 'metis::responseElements)) (setf slotv responseElements))
)
(when
(or
(and inverse slotv)
(and slotv (ignore-errors (= slotv id))))
(push
(format nil "|~{~A | ~}~%"
(list
(get-val-by-idx 'metis::eventTime eventTime)
(get-val-by-idx 'metis::eventName eventName)
(get-val-by-idx 'metis::userName userName)
(get-val-by-idx 'metis::eventSource eventSource)
(get-val-by-idx 'metis::sourceIPAddress sourceIPAddress)
(get-val-by-idx 'metis::userAgent userAgent)
(get-val-by-idx 'metis::errorMessage errorMessage)
(get-val-by-idx 'metis::errorCode errorCode)
(cleanup-output (cl-ppcre:regex-replace-all "\\n" (format nil "~A" (get-val-by-idx 'metis::requestParameters requestParameters)) ""))
(cleanup-output (cl-ppcre:regex-replace-all "\\n" (format nil "~A" (get-val-by-idx 'metis::responseElements responseElements)) ""))
(get-val-by-idx 'metis::userName userName)
;;(find-username (get-val-by-idx 'metis::userIdentity userIdentity))
))
results))))))
results))
(defun cleanup-output (str)
(let* ((no-dupes (cl-ppcre:regex-replace-all "[\\t ]+" str " "))
(no-returns (cl-ppcre:regex-replace-all "\\n" no-dupes " "))
no-returns)))
;; Search functions
(defun ct-get-all-errors ()
(ct-get-by-klass-value 'metis::errorCode nil t))
(defun ct-get-by-name (name)
(ct-get-by-klass-value 'metis::userName name))
(defun ct-get-by-region (region)
(ct-get-by-klass-value 'metis::awsRegion region))
(defun ct-get-by-errorcode (name)
(ct-get-by-klass-value 'metis::errorCode name))
(defun ct-get-by-errorMessage (name)
(ct-get-by-klass-value 'metis::errorMessage name))
(defun ct-get-by-eventName (name)
(ct-get-by-klass-value 'metis::eventName name))
(defun ct-get-by-eventSource (name)
(ct-get-by-klass-value 'metis::eventSource name))
(defun ct-get-by-sourceIPAddress (name)
(ct-get-by-klass-value 'metis::sourceIPAddress name))
(defun manardb/normalize-insert (record)
(destructuring-bind (
additionalEventData
apiVersion
awsRegion
errorCode
errorMessage
eventCategory
eventID
eventName
eventSource
eventTime
eventType
eventVersion
managementEvent
readOnly
recipientAccountId
requestID
requestParameters
resources
responseElements
serviceEventDetails
sessionCredentialFromConsole
sharedEventID
sourceIPAddress
tlsDetails
userAgent
userIdentity
userName
vpcEndpointId
)
record
(let ((additionalEventData-i (get-idx 'metis::additionalEventData additionalEventData))
(apiVersion-i (get-idx 'metis::apiVersion apiVersion))
(awsRegion-i (get-idx 'metis::awsRegion awsRegion))
(errorCode-i (get-idx 'metis::errorCode errorCode))
(errorMessage-i (get-idx 'metis::errorMessage errorMessage))
(eventCategory-i (get-idx 'metis::eventCategory eventCategory ))
(eventID-i (get-idx 'metis::eventID eventID))
(eventName-i (get-idx 'metis::eventName eventName))
(eventSource-i (get-idx 'metis::eventSource eventSource))
(eventTime-i (get-idx 'metis::eventTime eventTime))
(eventType-i (get-idx 'metis::eventType eventType))
(eventVersion-i (get-idx 'metis::eventVersion eventVersion))
(managementEvent-i (get-idx 'metis::managementEvent managementEvent))
(readOnly-i (get-idx 'metis::readOnly readOnly))
(recipientAccountId-i (get-idx 'metis::recipientAccountId recipientAccountId))
(requestID-i (get-idx 'metis::requestID requestID))
(requestParameters-i (get-idx 'metis::requestParameters requestParameters))
(resources-i (get-idx 'metis::resources resources))
(responseElements-i (get-idx 'metis::responseElements responseElements))
(serviceEventDetails-i (get-idx 'metis::serviceEventDetails serviceEventDetails))
(sessionCredentialFromConsole-i (get-idx 'metis::sessionCredentialFromConsole sessionCredentialFromConsole))
(sharedEventID-i (get-idx 'metis::sharedEventID sharedEventID))
(sourceIPAddress-i (get-idx 'metis::sourceIPAddress sourceIPAddress))
(tlsDetails-i (get-idx 'metis::tlsDetails tlsDetails))
(userAgent-i (get-idx 'metis::userAgent userAgent))
(userIdentity-i (get-idx 'metis::userIdentity userIdentity))
(userName-i (get-idx 'metis::userName (or userName (find-username userIdentity))))
(vpcEndpointId-i (get-idx 'metis::vpcEndpointId vpcEndpointId)))
(make-instance 'ct
:additionalEventData additionalEventData-i
:apiVersion apiVersion-i
:awsRegion awsRegion-i
:errorCode errorCode-i
:errorMessage errorMessage-i
:eventCategory eventCategory-i
:eventID eventID-i
:eventName eventName-i
:eventSource eventSource-i
:eventTime eventTime-i
:eventType eventType-i
:eventVersion eventVersion-i
:managementEvent managementEvent-i
:readOnly readOnly-i
:recipientAccountId recipientAccountId-i
:requestID requestID-i
:requestParameters requestParameters-i
:resources resources-i
:responseElements responseElements-i
:serviceEventDetails serviceEventDetails-i
:sessionCredentialFromConsole sessionCredentialFromConsole-i
:sharedEventID sharedEventID-i
:sourceIPAddress sourceIPAddress-i
:tlsDetails tlsDetails-i
:userAgent userAgent-i
:userIdentity userIdentity-i
:userName userName-i
:vpcEndpointId vpcEndpointId-i
))))
(defun cleanse (var)
(typecase var
(null (string var))
(string var)
(list (format nil "~{~s = ~s~%~}" var))))
| null | https://raw.githubusercontent.com/ober/metis/80062f3b23536cf42ef6e90c6b15f06307f1fdc9/src/db-manardb.lisp | lisp | metis::additionalEventData
metis::requestID
metis::requestParameters
metis::resources
metis::responseElements
metis::userIdentity
metis::tlsDetails
lists
(find-username (get-val-by-idx 'metis::userIdentity userIdentity))
Search functions | (in-package :metis)
(defvar *manard-files* (thread-safe-hash-table))
(defvar *metis-fields* (thread-safe-hash-table))
(defvar *metis-counters* (thread-safe-hash-table))
(defvar *metis-need-files* nil)
(defvar *output-sep* "|")
(defvar ct-fields '(
metis::eventCategory
metis::serviceEventDetails
metis::readOnly
metis::managementEvent
metis::vpcEndpointId
metis::sessionCredentialFromConsole
metis::apiVersion
metis::awsRegion
metis::errorCode
metis::errorMessage
metis::eventName
metis::eventSource
metis::eventTime
metis::eventType
metis::eventVersion
metis::sourceIPAddress
metis::userAgent
metis::userName
))
(defun manardb/init()
(unless (boundp 'manardb:use-mmap-dir)
(progn
(manardb:use-mmap-dir (or (uiop:getenv "METIS") "~/ct-manardb/"))
(when *metis-need-hashes*
(init-ct-hashes))))
(if (and (eql (hash-table-count *manard-files*) 0) *metis-need-files*)
(allocate-file-hash)))
(manardb:defmmclass files ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventCategory ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass readOnly ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass serviceEventDetails ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass managementEvent ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass sharedEventID ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass tlsDetails ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass vpcEndpointId ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass apiVersion ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass sessionCredentialFromConsole ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass additionalEventData ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass awsRegion ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass errorCode ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass errorMessage ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventID ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventName ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventSource ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventTime ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventType ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass eventVersion ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass recipientAccountId ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass requestID ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass requestParameters ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass resources ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass responseElements ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass sourceIPAddress ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass userAgent ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass userIdentity ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass userName ()
((value :initarg :value :accessor value)
(idx :initarg :idx :accessor idx)))
(manardb:defmmclass ct ()
((addionalEventData :initarg :additionalEventData :accessor additionalEventData)
(apiVersion :initarg :apiVersion :accessor apiVersion)
(awsRegion :initarg :awsRegion :accessor awsRegion)
(errorCode :initarg :errorCode :accessor errorCode)
(errorMessage :initarg :errorMessage :accessor errorMessage)
(eventCategory :initarg :eventCategory :accessor eventCategory)
(eventID :initarg :eventID :accessor eventID)
(eventName :initarg :eventName :accessor eventName)
(eventSource :initarg :eventSource :accessor eventSource)
(eventTime :initarg :eventTime :accessor eventTime)
(eventType :initarg :eventType :accessor eventType)
(eventVersion :initarg :eventVersion :accessor eventVersion)
(managementEvent :initarg :managementEvent :accessor managementEvent)
(readOnly :initarg :readOnly :accessor readOnly)
(recipientAccountId :initarg :recipientAccountId :accessor recipientAccountId)
(requestID :initarg :requestID :accessor requestID)
(requestParameters :initarg :requestParameters :accessor requestParameters)
(resources :initarg :resources :accessor resources)
(responseElements :initarg :responseElements :accessor responseElements)
(serviceEventDetails :initarg :serviceEventDetails :accessor serviceEventDetails)
(sessionCredentialFromConsole :initarg :sessionCredentialFromConsole :accessor sessionCredentialFromConsole)
(sharedEventID :initarg :sharedEventID :accessor sharedEventID)
(sourceIPAddress :initarg :sourceIPAddress :accessor sourceIPAddress)
(tlsDetails :initarg :tlsDetails :accessor tlsDetails)
(userAgent :initarg :userAgent :accessor userAgent)
(userIdentity :initarg :userIdentity :accessor userIdentity)
(userName :initarg :userName :accessor username)
(vpcEndpointId :initarg :vpcEndpointId :accessor vpcEndpointId)))
(defun create-klass-hash (klass)
(multiple-value-bind (id seen)
(gethash klass *metis-fields*)
(unless seen
(setf (gethash klass *metis-fields*)
(thread-safe-hash-table)))))
(defun get-obj (klass new-value)
"Return the object for a given value of klass"
(let ((obj nil))
(unless (or (null klass) (null new-value))
(progn
(create-klass-hash klass)
(multiple-value-bind (id seen)
(gethash new-value (gethash klass *metis-fields*))
(if seen
(setf obj id)
(progn
(setf obj (make-instance klass :value new-value))
(setf (gethash new-value (gethash klass *metis-fields*)) obj))))))
obj))
(defun manardb/have-we-seen-this-file (file)
(let ((name (get-filename-hash file)))
(multiple-value-bind (id seen)
(gethash name *manard-files*)
seen)))
(defun manardb/get-files (file)
(format t "manardb-get-files:~A~%" file)
(remove-if-not
(lambda (x) (string-equal
(get-filename-hash file)
(slot-value x :value2)))
(manardb:retrieve-all-instances 'metis::files)))
(defun manardb/mark-file-processed (file)
(let ((name (get-filename-hash file)))
(setf (gethash name *manard-files*) t)
(make-instance 'files :value name :idx 1)))
(defun allocate-file-hash ()
(manardb:doclass (x 'metis::files :fresh-instances nil)
(setf (gethash (slot-value x 'value) *manard-files*) t)))
(defun allocate-klass-hash (klass)
(or (hash-table-p (gethash klass *metis-fields*))
(progn
(format t "allocating class:~A~%" klass)
(create-klass-hash klass)
(manardb:doclass (x klass :fresh-instances nil)
(with-slots (value idx) x
(setf (gethash value
(gethash klass *metis-fields*)) idx)))
(setf (gethash klass *metis-counters*)
(get-max-id-from-hash
(gethash klass *metis-fields*))))))
(defun get-max-id-from-hash (hash)
(let* ((idxs (alexandria:hash-table-values hash))
(max-id 0))
(and idxs
(setf max-id (+ 1 (hash-table-count hash))))
max-id))
(defun init-ct-hashes ()
(mapc
#'(lambda (x)
(time (allocate-klass-hash x)))
ct-fields))
(defun get-stats ()
(format t "Totals ct:~A files:~A flows:~A vpc-files:~A ec:~A srcaddr:~A dstaddr:~A srcport:~A dstport:~A protocol:~A~%"
(manardb:count-all-instances 'metis::ct)
(manardb:count-all-instances 'metis::files)
(manardb:count-all-instances 'metis::flow)
(manardb:count-all-instances 'metis::flow-files)
(manardb:count-all-instances 'metis::errorCode)
(manardb:count-all-instances 'metis::srcaddr)
(manardb:count-all-instances 'metis::dstaddr)
(manardb:count-all-instances 'metis::srcport)
(manardb:count-all-instances 'metis::dstport)
(manardb:count-all-instances 'metis::protocol)))
(defun find-username (userIdentity)
(let ((a (fetch-value '(:|sessionContext| :|sessionIssuer| :|userName|) userIdentity))
(b (fetch-value '(:|sessionContext| :|userName|) userIdentity))
(c (fetch-value '(:|userName|) userIdentity))
(d (car (last (cl-ppcre:split ":" (fetch-value '(:|arn|) userIdentity)))))
(e (fetch-value '(:|type|) userIdentity))
(len (length userIdentity)))
(or a b c d e)))
(defun get-unique-values (klass)
"Return unique list of klass objects"
(manardb:doclass (x klass :fresh-instances nil)
(with-slots (value idx) x
(format t "~%~A: ~A" idx value))))
(defun get-unique-values-list (klass)
"Return unique list of klass objects"
(let ((results '()))
(manardb:doclass (x klass :fresh-instances nil)
(with-slots (value idx) x
(push value results)))
results))
(defun get-serviceEventDetails ()
"return unique list of serviceEventDetails"
(get-unique-values 'metis::serviceEventDetails))
(defun get-eventCategory ()
"return unique list of eventCategory"
(get-unique-values 'metis::eventCategory))
(defun get-readOnly ()
"return unique list of readOnly"
(get-unique-values 'metis::readOnly))
(defun get-managementEvent ()
"return unique list of managementEvent"
(get-unique-values 'metis::managementEvent))
(defun get-sharedEventID ()
"return unique list of sharedEventID"
(get-unique-values 'metis::sharedEventID))
(defun get-tlsDetails ()
"return unique list of tlsDetails"
(get-unique-values 'metis::tlsDetails))
(defun get-vpcEndpointId ()
"return unique list of vpcEndpointId"
(get-unique-values 'metis::vpcEndpointId))
(defun get-apiVersion ()
"return unique list of apiVersion"
(get-unique-values 'metis::apiVersion))
(defun get-ct-files ()
"Return unique list of ct files"
(get-unique-values 'metis::files))
(defun get-cts ()
"Return unique cts"
(get-unique-values 'metis::ct))
(defun get-event-list ()
"Return unique list of events"
(get-unique-values 'metis::eventname))
(defun get-errorcode-list ()
"Return unique list of events"
(get-unique-values 'metis::errorcode))
(defun get-response-elements ()
"Return unique list of events"
(get-unique-values 'metis::responseElements))
(defun get-name-list ()
"Return unique list of events"
(get-unique-values 'metis::username))
(defun get-region-list ()
"Return unique list of events"
(get-unique-values 'metis::awsRegion))
(defun get-sourceips-list ()
"Return unique list of events"
(get-unique-values 'metis::sourceIPAddress))
(defun get-val (obj)
(if (null obj)
obj
(slot-value obj 'value)))
(defun get-obj-by-val (klass val)
(let ((obj-list nil))
(manardb:doclass (x klass :fresh-instances nil)
(with-slots (value) x
(if (string-equal val value)
(push x obj-list))))
obj-list))
(defun ct-get-by-klass-value (klass value &optional inverse)
(format t "~{~A~}" (ct-get-by-klass-value-real klass value (or inverse nil))))
(defun ct-get-by-klass-value-real (klass value &optional inverse)
(allocate-klass-hash klass)
(let ((results '())
(klass-hash (gethash klass *metis-fields*))
(slotv nil))
(multiple-value-bind (id seen)
(gethash value klass-hash)
(when (or seen inverse)
(manardb:doclass
(x 'metis::ct :fresh-instances nil)
(with-slots (userName
eventTime
eventName
eventSource
sourceIPAddress
userAgent
errorMessage
errorCode
userIdentity
requestParameters
responseElements
awsRegion) x
(cond
((equal (find-class klass) (find-class 'metis::userName)) (setf slotv userName))
((equal (find-class klass) (find-class 'metis::awsRegion)) (setf slotv awsRegion))
((equal (find-class klass) (find-class 'metis::eventName)) (setf slotv eventName))
((equal (find-class klass) (find-class 'metis::eventSource)) (setf slotv eventSource))
((equal (find-class klass) (find-class 'metis::sourceIPAddress)) (setf slotv sourceIPAddress))
((equal (find-class klass) (find-class 'metis::errorMessage)) (setf slotv errorMessage))
((equal (find-class klass) (find-class 'metis::errorCode)) (setf slotv errorCode))
((equal (find-class klass) (find-class 'metis::requestParameters)) (setf slotv requestParameters))
((equal (find-class klass) (find-class 'metis::responseElements)) (setf slotv responseElements))
)
(when
(or
(and inverse slotv)
(and slotv (ignore-errors (= slotv id))))
(push
(format nil "|~{~A | ~}~%"
(list
(get-val-by-idx 'metis::eventTime eventTime)
(get-val-by-idx 'metis::eventName eventName)
(get-val-by-idx 'metis::userName userName)
(get-val-by-idx 'metis::eventSource eventSource)
(get-val-by-idx 'metis::sourceIPAddress sourceIPAddress)
(get-val-by-idx 'metis::userAgent userAgent)
(get-val-by-idx 'metis::errorMessage errorMessage)
(get-val-by-idx 'metis::errorCode errorCode)
(cleanup-output (cl-ppcre:regex-replace-all "\\n" (format nil "~A" (get-val-by-idx 'metis::requestParameters requestParameters)) ""))
(cleanup-output (cl-ppcre:regex-replace-all "\\n" (format nil "~A" (get-val-by-idx 'metis::responseElements responseElements)) ""))
(get-val-by-idx 'metis::userName userName)
))
results))))))
results))
(defun cleanup-output (str)
(let* ((no-dupes (cl-ppcre:regex-replace-all "[\\t ]+" str " "))
(no-returns (cl-ppcre:regex-replace-all "\\n" no-dupes " "))
no-returns)))
(defun ct-get-all-errors ()
(ct-get-by-klass-value 'metis::errorCode nil t))
(defun ct-get-by-name (name)
(ct-get-by-klass-value 'metis::userName name))
(defun ct-get-by-region (region)
(ct-get-by-klass-value 'metis::awsRegion region))
(defun ct-get-by-errorcode (name)
(ct-get-by-klass-value 'metis::errorCode name))
(defun ct-get-by-errorMessage (name)
(ct-get-by-klass-value 'metis::errorMessage name))
(defun ct-get-by-eventName (name)
(ct-get-by-klass-value 'metis::eventName name))
(defun ct-get-by-eventSource (name)
(ct-get-by-klass-value 'metis::eventSource name))
(defun ct-get-by-sourceIPAddress (name)
(ct-get-by-klass-value 'metis::sourceIPAddress name))
(defun manardb/normalize-insert (record)
(destructuring-bind (
additionalEventData
apiVersion
awsRegion
errorCode
errorMessage
eventCategory
eventID
eventName
eventSource
eventTime
eventType
eventVersion
managementEvent
readOnly
recipientAccountId
requestID
requestParameters
resources
responseElements
serviceEventDetails
sessionCredentialFromConsole
sharedEventID
sourceIPAddress
tlsDetails
userAgent
userIdentity
userName
vpcEndpointId
)
record
(let ((additionalEventData-i (get-idx 'metis::additionalEventData additionalEventData))
(apiVersion-i (get-idx 'metis::apiVersion apiVersion))
(awsRegion-i (get-idx 'metis::awsRegion awsRegion))
(errorCode-i (get-idx 'metis::errorCode errorCode))
(errorMessage-i (get-idx 'metis::errorMessage errorMessage))
(eventCategory-i (get-idx 'metis::eventCategory eventCategory ))
(eventID-i (get-idx 'metis::eventID eventID))
(eventName-i (get-idx 'metis::eventName eventName))
(eventSource-i (get-idx 'metis::eventSource eventSource))
(eventTime-i (get-idx 'metis::eventTime eventTime))
(eventType-i (get-idx 'metis::eventType eventType))
(eventVersion-i (get-idx 'metis::eventVersion eventVersion))
(managementEvent-i (get-idx 'metis::managementEvent managementEvent))
(readOnly-i (get-idx 'metis::readOnly readOnly))
(recipientAccountId-i (get-idx 'metis::recipientAccountId recipientAccountId))
(requestID-i (get-idx 'metis::requestID requestID))
(requestParameters-i (get-idx 'metis::requestParameters requestParameters))
(resources-i (get-idx 'metis::resources resources))
(responseElements-i (get-idx 'metis::responseElements responseElements))
(serviceEventDetails-i (get-idx 'metis::serviceEventDetails serviceEventDetails))
(sessionCredentialFromConsole-i (get-idx 'metis::sessionCredentialFromConsole sessionCredentialFromConsole))
(sharedEventID-i (get-idx 'metis::sharedEventID sharedEventID))
(sourceIPAddress-i (get-idx 'metis::sourceIPAddress sourceIPAddress))
(tlsDetails-i (get-idx 'metis::tlsDetails tlsDetails))
(userAgent-i (get-idx 'metis::userAgent userAgent))
(userIdentity-i (get-idx 'metis::userIdentity userIdentity))
(userName-i (get-idx 'metis::userName (or userName (find-username userIdentity))))
(vpcEndpointId-i (get-idx 'metis::vpcEndpointId vpcEndpointId)))
(make-instance 'ct
:additionalEventData additionalEventData-i
:apiVersion apiVersion-i
:awsRegion awsRegion-i
:errorCode errorCode-i
:errorMessage errorMessage-i
:eventCategory eventCategory-i
:eventID eventID-i
:eventName eventName-i
:eventSource eventSource-i
:eventTime eventTime-i
:eventType eventType-i
:eventVersion eventVersion-i
:managementEvent managementEvent-i
:readOnly readOnly-i
:recipientAccountId recipientAccountId-i
:requestID requestID-i
:requestParameters requestParameters-i
:resources resources-i
:responseElements responseElements-i
:serviceEventDetails serviceEventDetails-i
:sessionCredentialFromConsole sessionCredentialFromConsole-i
:sharedEventID sharedEventID-i
:sourceIPAddress sourceIPAddress-i
:tlsDetails tlsDetails-i
:userAgent userAgent-i
:userIdentity userIdentity-i
:userName userName-i
:vpcEndpointId vpcEndpointId-i
))))
(defun cleanse (var)
(typecase var
(null (string var))
(string var)
(list (format nil "~{~s = ~s~%~}" var))))
|
97c2b48b0edf82411b8319695c1817b942395c31145d8138c0f8d78ff37c0e34 | malcolmreynolds/GSLL | bessel.lisp | Regression test BESSEL for GSLL , automatically generated
(in-package :gsl)
(LISP-UNIT:DEFINE-TEST BESSEL
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.3971498098638474d0 4.334456411751256d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-J0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.0660433280235491d0 2.1409770694795335d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-J1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.3641281458520729d0 3.974061014982464d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-J 2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
#(0.35283402861563773d0 0.12894324947440206d0
0.033995719807568436d0 0.007039629755871686d0)
(cl-array (cylindrical-bessel-J-array-order 2.0d0 4 2)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.016940739325064968d0 1.8993556609468549d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-Y0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.3979257105570999d0 3.1396236150465943d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-Y1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.18202211595348539d0 3.355735727760045d-16)
(MULTIPLE-VALUE-LIST (cylindrical-bessel-Y 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(-0.6174081041906827d0 -1.127783776840428d0
-2.7659432263306014d0 -9.935989128481978d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(CYLINDRICAL-BESSEL-YN-ARRAY 2.0d0 BESARR 2)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 11.301921952136329d0 2.7297681442535893d-14)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 9.759465153704449d0 1.9210136786427457d-14)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 3.3372757784203446d0 8.06056628872663d-15)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 3.3372757784203437d0 1.1856385307923545d-14)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.6889484476987382d0 0.21273995923985267d0
0.05072856997918024d0 0.009825679323131702d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(CYLINDRICAL-BESSEL-IN-ARRAY 2.0d0 BESARR 2)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.2070019212239867d0 2.241925168997723d-16)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-I0-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.1787508395024353d0 1.1370197115937822d-16)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-I1-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.0611243380296663d0 9.334510342661594d-17)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-I-SCALED 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.061124338029666284d0 1.3572329489101316d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I-SCALED 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.09323903330473338d0 0.028791222639470898d0
0.006865365386320685d0 0.0013297610941881578d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(CYLINDRICAL-BESSEL-IN-SCALED-ARRAY 2.0d0 BESARR 2)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.011159676085853023d0 2.0424662435034432d-17)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.01248349888726843d0 1.767412161819488d-17)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.017401425529487143d0 2.257461693414273d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K 2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.029884924416755682d0 1.0617257976532701d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.6092976692566953d0 3.0340122249326356d-16)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-K0-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.681575945185671d0 3.596132979136138d-16)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-K1-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.9500856418495256d0 1.1481477659153143d-14)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-K-SCALED 2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 1.6316615870352025d0 5.072223134504136d-15)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K-SCALED 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.2537597545660558d0 0.6473853909486341d0
2.1959159274119586d0 9.431049100596468d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(CYLINDRICAL-BESSEL-KN-ARRAY 2.0d0 BESARR 2)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.18920062382698205d0 1.6804391107692678d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-J0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.11611074925915747d0 2.387125482192573d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-J1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.27628368577135015d0 3.680838111259856d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-J2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.22924385795503022d0 7.126330661956055d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-JL 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(-0.18920062382698202d0 0.11611074925915743d0
0.2762836857713501d0 0.22924385795503022d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-JL-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(-0.18920062382698208d0 0.11611074925915742d0
0.27628368577135015d0 0.22924385795503024d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-JL-STEED-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.16341090521590299d0 1.4513803955642766d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-Y0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.2300533501309578d0 1.5324631572452525d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-Y1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.009129107382315343d0 1.6876604955506113d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-Y2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.009129107382315343d0 1.6876604955506113d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-YL 2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.16341090521590299d0 0.2300533501309578d0
0.009129107382315343d0 -0.21864196590306362d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-YL-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.12495806717151219d0 5.5492529314587895d-17)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-I0-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.09380241603560975d0 4.165664081928078d-17)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-I1-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.05460625514480487d0 2.425004870012731d-17)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-I2-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.02554459710460367d0 5.842201171222646d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-IL-SCALED 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.12495806717151212d0 0.09380241603560971d0
0.05460625514480483d0 0.02554459710460367d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-IL-SCALED-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.39269908169872414d0 1.743934249004316d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-K0-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.4908738521234052d0 2.1799178112553949d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-K1-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.760854470791278d0 3.378872607445862d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-K2-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.02554459710460367d0 5.842201171222646d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-KL-SCALED 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.39269908169872414d0 0.4908738521234052d0
0.760854470791278d0 1.4419419406125027d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-KL-SCALED-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.43017147387562193d0 7.641380397338472d-16)
(MULTIPLE-VALUE-LIST (cylindrical-bessel-J 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.6713967071418024d0 0.5130161365618323d0
0.06500818287738516d0))
(MULTIPLE-VALUE-LIST
(cl-array (cylindrical-bessel-J-array-x 0.5d0 #m(1.0d0 2.0d0 3.0d0)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.1820221159534852d0 2.020851441225493d-15)
(MULTIPLE-VALUE-LIST (cylindrical-bessel-Y 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -3.5104011258456183d0 4.776268519767339d-15)
(MULTIPLE-VALUE-LIST (BESSEL-LNKNU 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 14.930917708487813d0 4.4792753125463437d-14)
(MULTIPLE-VALUE-LIST (BESSEL-ZERO-J0 5)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 16.470630050877624d0 3.2941260101755246d-13)
(MULTIPLE-VALUE-LIST (BESSEL-ZERO-J1 5)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 17.95981949498783d0 3.591963898997566d-14)
(MULTIPLE-VALUE-LIST (BESSEL-ZERO-JNU 2.0d0 5))))
| null | https://raw.githubusercontent.com/malcolmreynolds/GSLL/2f722f12f1d08e1b9550a46e2a22adba8e1e52c4/tests/bessel.lisp | lisp | Regression test BESSEL for GSLL , automatically generated
(in-package :gsl)
(LISP-UNIT:DEFINE-TEST BESSEL
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.3971498098638474d0 4.334456411751256d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-J0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.0660433280235491d0 2.1409770694795335d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-J1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.3641281458520729d0 3.974061014982464d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-J 2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
#(0.35283402861563773d0 0.12894324947440206d0
0.033995719807568436d0 0.007039629755871686d0)
(cl-array (cylindrical-bessel-J-array-order 2.0d0 4 2)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.016940739325064968d0 1.8993556609468549d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-Y0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.3979257105570999d0 3.1396236150465943d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-Y1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.18202211595348539d0 3.355735727760045d-16)
(MULTIPLE-VALUE-LIST (cylindrical-bessel-Y 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(-0.6174081041906827d0 -1.127783776840428d0
-2.7659432263306014d0 -9.935989128481978d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(CYLINDRICAL-BESSEL-YN-ARRAY 2.0d0 BESARR 2)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 11.301921952136329d0 2.7297681442535893d-14)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 9.759465153704449d0 1.9210136786427457d-14)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 3.3372757784203446d0 8.06056628872663d-15)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 3.3372757784203437d0 1.1856385307923545d-14)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.6889484476987382d0 0.21273995923985267d0
0.05072856997918024d0 0.009825679323131702d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(CYLINDRICAL-BESSEL-IN-ARRAY 2.0d0 BESARR 2)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.2070019212239867d0 2.241925168997723d-16)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-I0-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.1787508395024353d0 1.1370197115937822d-16)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-I1-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.0611243380296663d0 9.334510342661594d-17)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-I-SCALED 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.061124338029666284d0 1.3572329489101316d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-I-SCALED 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.09323903330473338d0 0.028791222639470898d0
0.006865365386320685d0 0.0013297610941881578d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(CYLINDRICAL-BESSEL-IN-SCALED-ARRAY 2.0d0 BESARR 2)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.011159676085853023d0 2.0424662435034432d-17)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.01248349888726843d0 1.767412161819488d-17)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.017401425529487143d0 2.257461693414273d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K 2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.029884924416755682d0 1.0617257976532701d-16)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.6092976692566953d0 3.0340122249326356d-16)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-K0-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.681575945185671d0 3.596132979136138d-16)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-K1-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.9500856418495256d0 1.1481477659153143d-14)
(MULTIPLE-VALUE-LIST
(CYLINDRICAL-BESSEL-K-SCALED 2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 1.6316615870352025d0 5.072223134504136d-15)
(MULTIPLE-VALUE-LIST (CYLINDRICAL-BESSEL-K-SCALED 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.2537597545660558d0 0.6473853909486341d0
2.1959159274119586d0 9.431049100596468d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(CYLINDRICAL-BESSEL-KN-ARRAY 2.0d0 BESARR 2)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.18920062382698205d0 1.6804391107692678d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-J0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.11611074925915747d0 2.387125482192573d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-J1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.27628368577135015d0 3.680838111259856d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-J2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.22924385795503022d0 7.126330661956055d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-JL 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(-0.18920062382698202d0 0.11611074925915743d0
0.2762836857713501d0 0.22924385795503022d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-JL-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(-0.18920062382698208d0 0.11611074925915742d0
0.27628368577135015d0 0.22924385795503024d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-JL-STEED-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.16341090521590299d0 1.4513803955642766d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-Y0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.2300533501309578d0 1.5324631572452525d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-Y1 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.009129107382315343d0 1.6876604955506113d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-Y2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.009129107382315343d0 1.6876604955506113d-16)
(MULTIPLE-VALUE-LIST (SPHERICAL-BESSEL-YL 2 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.16341090521590299d0 0.2300533501309578d0
0.009129107382315343d0 -0.21864196590306362d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-YL-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.12495806717151219d0 5.5492529314587895d-17)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-I0-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.09380241603560975d0 4.165664081928078d-17)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-I1-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.05460625514480487d0 2.425004870012731d-17)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-I2-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.02554459710460367d0 5.842201171222646d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-IL-SCALED 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.12495806717151212d0 0.09380241603560971d0
0.05460625514480483d0 0.02554459710460367d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-IL-SCALED-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.39269908169872414d0 1.743934249004316d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-K0-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.4908738521234052d0 2.1799178112553949d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-K1-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.760854470791278d0 3.378872607445862d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-K2-SCALED 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.02554459710460367d0 5.842201171222646d-16)
(MULTIPLE-VALUE-LIST
(SPHERICAL-BESSEL-KL-SCALED 3 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.39269908169872414d0 0.4908738521234052d0
0.760854470791278d0 1.4419419406125027d0))
(MULTIPLE-VALUE-LIST
(LET ((BESARR
(MAKE-MARRAY 'DOUBLE-FLOAT :DIMENSIONS 4)))
(SPHERICAL-BESSEL-KL-SCALED-ARRAY 4.0d0 BESARR)
(CL-ARRAY BESARR))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.43017147387562193d0 7.641380397338472d-16)
(MULTIPLE-VALUE-LIST (cylindrical-bessel-J 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST
#(0.6713967071418024d0 0.5130161365618323d0
0.06500818287738516d0))
(MULTIPLE-VALUE-LIST
(cl-array (cylindrical-bessel-J-array-x 0.5d0 #m(1.0d0 2.0d0 3.0d0)))))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -0.1820221159534852d0 2.020851441225493d-15)
(MULTIPLE-VALUE-LIST (cylindrical-bessel-Y 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -3.5104011258456183d0 4.776268519767339d-15)
(MULTIPLE-VALUE-LIST (BESSEL-LNKNU 3.0d0 4.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 14.930917708487813d0 4.4792753125463437d-14)
(MULTIPLE-VALUE-LIST (BESSEL-ZERO-J0 5)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 16.470630050877624d0 3.2941260101755246d-13)
(MULTIPLE-VALUE-LIST (BESSEL-ZERO-J1 5)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 17.95981949498783d0 3.591963898997566d-14)
(MULTIPLE-VALUE-LIST (BESSEL-ZERO-JNU 2.0d0 5))))
| |
42141a752f7c31654e41cbcddbb48fa7afa6b130bb7078b3e9231d20f55cb1cb | awakesecurity/grpc-mqtt | Core.hs |
-- |
-- Module : Network.GRPC.MQTT.Message.Request.Core
Copyright : ( c ) Arista Networks , 2022 - 2023
License : Apache License 2.0 , see COPYING
--
-- Stability : stable
Portability : non - portable ( GHC extensions )
--
Core module defining the ' Request ' type and ' Request ' instances .
--
-- @since 1.0.0
module Network.GRPC.MQTT.Message.Request.Core
( -- * Request
Request (Request, message, options, timeout, metadata),
)
where
---------------------------------------------------------------------------------
import Data.Data (Data)
import Network.GRPC.HighLevel (MetadataMap)
import Relude
---------------------------------------------------------------------------------
Orphan instances @Data and @Ord
import Network.GRPC.HighLevel.Orphans ()
import Network.GRPC.MQTT.Option (ProtoOptions)
-- Request ----------------------------------------------------------------------
-- | The 'Request' message type represents a (wrapped) client request, a
-- dictionary of gRPC metadata bound to the request, and properties configuring
-- how the request is handled.
--
-- @since 1.0.0
data Request msg = Request
{ -- | The request's 'message' is a type representing the protocol buffer
message needed to perform the RPC call the client is requesting .
message :: msg
, -- | The gRPC-MQTT specific protobuf options relevant to this request.
options :: ProtoOptions
, -- | The request's 'timeout' is the timeout period in unit seconds the
requested call has to finished before a deadline - exceeded error
-- response is sent back to the client.
timeout :: {-# UNPACK #-} !Int
, -- | The request's 'metadata' is the metadata bound to a request,
represented as a map associating ' ByteString ' keys to a list of
' ByteString ' values .
--
-- Keys registered in the 'metadata' map must only contain lowercase
characters , digits 0 - 9 , " . " , " _ " , and " - " . Otherwise , a gRPC exception
-- will be raised when the 'Request' is published. The metadata values have
-- no value restrictions.
metadata :: MetadataMap
}
deriving stock (Eq, Ord, Show)
deriving stock (Data, Generic, Typeable)
-- | @since 1.0.0
instance Functor Request where
fmap f (Request x opts to ms) = Request (f x) opts to ms
# INLINE fmap #
| null | https://raw.githubusercontent.com/awakesecurity/grpc-mqtt/fbde6f3fe90e82260469bab922c5ebb9f0b40e95/src/Network/GRPC/MQTT/Message/Request/Core.hs | haskell | |
Module : Network.GRPC.MQTT.Message.Request.Core
Stability : stable
@since 1.0.0
* Request
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
Request ----------------------------------------------------------------------
| The 'Request' message type represents a (wrapped) client request, a
dictionary of gRPC metadata bound to the request, and properties configuring
how the request is handled.
@since 1.0.0
| The request's 'message' is a type representing the protocol buffer
| The gRPC-MQTT specific protobuf options relevant to this request.
| The request's 'timeout' is the timeout period in unit seconds the
response is sent back to the client.
# UNPACK #
| The request's 'metadata' is the metadata bound to a request,
Keys registered in the 'metadata' map must only contain lowercase
will be raised when the 'Request' is published. The metadata values have
no value restrictions.
| @since 1.0.0 |
Copyright : ( c ) Arista Networks , 2022 - 2023
License : Apache License 2.0 , see COPYING
Portability : non - portable ( GHC extensions )
Core module defining the ' Request ' type and ' Request ' instances .
module Network.GRPC.MQTT.Message.Request.Core
Request (Request, message, options, timeout, metadata),
)
where
import Data.Data (Data)
import Network.GRPC.HighLevel (MetadataMap)
import Relude
Orphan instances @Data and @Ord
import Network.GRPC.HighLevel.Orphans ()
import Network.GRPC.MQTT.Option (ProtoOptions)
data Request msg = Request
message needed to perform the RPC call the client is requesting .
message :: msg
options :: ProtoOptions
requested call has to finished before a deadline - exceeded error
represented as a map associating ' ByteString ' keys to a list of
' ByteString ' values .
characters , digits 0 - 9 , " . " , " _ " , and " - " . Otherwise , a gRPC exception
metadata :: MetadataMap
}
deriving stock (Eq, Ord, Show)
deriving stock (Data, Generic, Typeable)
instance Functor Request where
fmap f (Request x opts to ms) = Request (f x) opts to ms
# INLINE fmap #
|
f565dda0579a3da00849967d304fe7cb51649022719e1f87de00ffe3830c4516 | ProjectMAC/propagators | copying-data-test.scm | ;;; ----------------------------------------------------------------------
Copyright 2009 - 2010 .
;;; ----------------------------------------------------------------------
This file is part of Propagator Network Prototype .
;;;
Propagator Network Prototype is free software ; you can
;;; redistribute it and/or modify it under the terms of the GNU
General Public License as published by the Free Software
Foundation , either version 3 of the License , or ( at your option )
;;; any later version.
;;;
Propagator Network Prototype is distributed in the hope that it
;;; will be useful, but WITHOUT ANY WARRANTY; without even the implied
;;; warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
;;; See the GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with Propagator Network Prototype . If not , see
;;; </>.
;;; ----------------------------------------------------------------------
;;; The "copying data" strategy from the thesis is given by these
;;; definitions of the cons-car-cdr propagators:
#|
(define conser (function->propagator-constructor cons))
(define carer (function->propagator-constructor (nary-unpacking car)))
(define cdrer (function->propagator-constructor (nary-unpacking cdr)))
|#
;;; This strategy is tested here, with the definitions in question
;;; appearing inside the test scope below.
;;; The "carrying cells" strategy is elaborated in
;;; extensions/carrying-cells.scm. Since the merging is the same in
both cases , the two strategies may be intermixed within the same
;;; network --- just make sure your propagators know what to expect
;;; (and there is as yet no good story for merging a piece of data and
;;; a cell, so merging a carrying cons with a copying cons will not do
;;; anything good).
(in-test-group
copying-data
(define-test (example)
(interaction
(define conser (function->propagator-constructor cons))
(define carer (function->propagator-constructor (nary-unpacking car)))
(define cdrer (function->propagator-constructor (nary-unpacking cdr)))
(initialize-scheduler)
(define-cell x)
(define-cell y)
(define-cell pair)
(conser x y pair)
(run)
(content pair)
(produces '( #(*the-nothing*) . #(*the-nothing*) ))
(define-cell control)
(define-cell switched-pair)
(switch control pair switched-pair)
(add-content control (make-tms (supported #t '(joe))))
(run)
(content switched-pair)
(produces #(tms (#(supported ( #(*the-nothing*) . #(*the-nothing*) ) (joe)))))
(define-cell x-again)
(carer switched-pair x-again)
(run)
(content x-again)
(produces #(*the-nothing*))
(add-content x (make-tms (supported 4 '(harry))))
(run)
(content pair)
(produces '( #(tms (#(supported 4 (harry)))) . #(*the-nothing*) ))
(content switched-pair)
(produces #(tms (#(supported ( #(tms (#(supported 4 (harry)))) . #(*the-nothing*) )
(joe)))))
(content x-again)
(produces #(tms (#(supported 4 (harry joe)))))
)))
| null | https://raw.githubusercontent.com/ProjectMAC/propagators/add671f009e62441e77735a88980b6b21fad7a79/core/test/copying-data-test.scm | scheme | ----------------------------------------------------------------------
----------------------------------------------------------------------
you can
redistribute it and/or modify it under the terms of the GNU
any later version.
will be useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
</>.
----------------------------------------------------------------------
The "copying data" strategy from the thesis is given by these
definitions of the cons-car-cdr propagators:
(define conser (function->propagator-constructor cons))
(define carer (function->propagator-constructor (nary-unpacking car)))
(define cdrer (function->propagator-constructor (nary-unpacking cdr)))
This strategy is tested here, with the definitions in question
appearing inside the test scope below.
The "carrying cells" strategy is elaborated in
extensions/carrying-cells.scm. Since the merging is the same in
network --- just make sure your propagators know what to expect
(and there is as yet no good story for merging a piece of data and
a cell, so merging a carrying cons with a copying cons will not do
anything good). | Copyright 2009 - 2010 .
This file is part of Propagator Network Prototype .
General Public License as published by the Free Software
Foundation , either version 3 of the License , or ( at your option )
Propagator Network Prototype is distributed in the hope that it
You should have received a copy of the GNU General Public License
along with Propagator Network Prototype . If not , see
both cases , the two strategies may be intermixed within the same
(in-test-group
copying-data
(define-test (example)
(interaction
(define conser (function->propagator-constructor cons))
(define carer (function->propagator-constructor (nary-unpacking car)))
(define cdrer (function->propagator-constructor (nary-unpacking cdr)))
(initialize-scheduler)
(define-cell x)
(define-cell y)
(define-cell pair)
(conser x y pair)
(run)
(content pair)
(produces '( #(*the-nothing*) . #(*the-nothing*) ))
(define-cell control)
(define-cell switched-pair)
(switch control pair switched-pair)
(add-content control (make-tms (supported #t '(joe))))
(run)
(content switched-pair)
(produces #(tms (#(supported ( #(*the-nothing*) . #(*the-nothing*) ) (joe)))))
(define-cell x-again)
(carer switched-pair x-again)
(run)
(content x-again)
(produces #(*the-nothing*))
(add-content x (make-tms (supported 4 '(harry))))
(run)
(content pair)
(produces '( #(tms (#(supported 4 (harry)))) . #(*the-nothing*) ))
(content switched-pair)
(produces #(tms (#(supported ( #(tms (#(supported 4 (harry)))) . #(*the-nothing*) )
(joe)))))
(content x-again)
(produces #(tms (#(supported 4 (harry joe)))))
)))
|
7e0e150656088911418ca88ed7612af9a2b9ec6a236251a85715caa1872f5621 | anthonygalea/riemann-console | db.cljs | (ns riemann-console.db)
(def sample-db
{:loading? false
:saving? false
:dashboard-settings? false
:delete-confirmation? false
:configuring-widget nil
:notification {:type :error
:message "Connected"}
:dashboards {"1" "dashboard name"
"2" "foo board"
"3" "another board"}
:streams {"a" []}
:dashboard {:id "1"
:name "dashboard name"
:description "dashboard description"
:endpoint "127.0.0.1:5556"
:widgets {"a" {:x 0 :y 0 :w 3 :h 6
:type :gauge
:query "status = \"ok\""}
"b" {:x 3 :y 0 :w 3 :h 6
:type :gauge
:query "status = \"not ok\""}
"c" {:x 6 :y 1 :w 6 :h 6
:type :time-series
:query "some other query"}}}})
| null | https://raw.githubusercontent.com/anthonygalea/riemann-console/d4eb21bbde8b191f6a4d853a3b14d370b9fc0761/src/cljs/riemann_console/db.cljs | clojure | (ns riemann-console.db)
(def sample-db
{:loading? false
:saving? false
:dashboard-settings? false
:delete-confirmation? false
:configuring-widget nil
:notification {:type :error
:message "Connected"}
:dashboards {"1" "dashboard name"
"2" "foo board"
"3" "another board"}
:streams {"a" []}
:dashboard {:id "1"
:name "dashboard name"
:description "dashboard description"
:endpoint "127.0.0.1:5556"
:widgets {"a" {:x 0 :y 0 :w 3 :h 6
:type :gauge
:query "status = \"ok\""}
"b" {:x 3 :y 0 :w 3 :h 6
:type :gauge
:query "status = \"not ok\""}
"c" {:x 6 :y 1 :w 6 :h 6
:type :time-series
:query "some other query"}}}})
| |
57d2c10d7894a2df50b7567524897bcd97d1f372777b79cc4d1e822f4fc61d5e | tfausak/patrol | Context.hs | module Patrol.Type.Context where
import qualified Data.Aeson as Aeson
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Patrol.Type.AppContext as AppContext
import qualified Patrol.Type.BrowserContext as BrowserContext
import qualified Patrol.Type.DeviceContext as DeviceContext
import qualified Patrol.Type.GpuContext as GpuContext
import qualified Patrol.Type.OsContext as OsContext
import qualified Patrol.Type.RuntimeContext as RuntimeContext
import qualified Patrol.Type.TraceContext as TraceContext
data Context
= App AppContext.AppContext
| Browser BrowserContext.BrowserContext
| Device DeviceContext.DeviceContext
| Gpu GpuContext.GpuContext
| Os OsContext.OsContext
| Runtime RuntimeContext.RuntimeContext
| Trace TraceContext.TraceContext
| Other (Map.Map Text.Text Aeson.Value)
deriving (Eq, Show)
instance Aeson.ToJSON Context where
toJSON context = case context of
App appContext -> Aeson.toJSON appContext
Browser browserContext -> Aeson.toJSON browserContext
Device deviceContext -> Aeson.toJSON deviceContext
Gpu gpuContext -> Aeson.toJSON gpuContext
Os osContext -> Aeson.toJSON osContext
Runtime runtimeContext -> Aeson.toJSON runtimeContext
Trace traceContext -> Aeson.toJSON traceContext
Other other -> Aeson.toJSON other
| null | https://raw.githubusercontent.com/tfausak/patrol/1cae55b3840b328cda7de85ea424333fcab434cb/source/library/Patrol/Type/Context.hs | haskell | module Patrol.Type.Context where
import qualified Data.Aeson as Aeson
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Patrol.Type.AppContext as AppContext
import qualified Patrol.Type.BrowserContext as BrowserContext
import qualified Patrol.Type.DeviceContext as DeviceContext
import qualified Patrol.Type.GpuContext as GpuContext
import qualified Patrol.Type.OsContext as OsContext
import qualified Patrol.Type.RuntimeContext as RuntimeContext
import qualified Patrol.Type.TraceContext as TraceContext
data Context
= App AppContext.AppContext
| Browser BrowserContext.BrowserContext
| Device DeviceContext.DeviceContext
| Gpu GpuContext.GpuContext
| Os OsContext.OsContext
| Runtime RuntimeContext.RuntimeContext
| Trace TraceContext.TraceContext
| Other (Map.Map Text.Text Aeson.Value)
deriving (Eq, Show)
instance Aeson.ToJSON Context where
toJSON context = case context of
App appContext -> Aeson.toJSON appContext
Browser browserContext -> Aeson.toJSON browserContext
Device deviceContext -> Aeson.toJSON deviceContext
Gpu gpuContext -> Aeson.toJSON gpuContext
Os osContext -> Aeson.toJSON osContext
Runtime runtimeContext -> Aeson.toJSON runtimeContext
Trace traceContext -> Aeson.toJSON traceContext
Other other -> Aeson.toJSON other
| |
73b69b69ff742b0e0eebed12ddb521f300478710c1e8a46807242eac1ee86423 | urueedi/monster-ui-phonebook | wapi_offnet_resource.erl | %%%-------------------------------------------------------------------
( C ) 2011 - 2015 , 2600Hz INC
%%% @doc
%%%
%%% @end
%%% @contributors
%%%-------------------------------------------------------------------
-module(wapi_offnet_resource).
-export([req/1, req_v/1]).
-export([resp/1, resp_v/1]).
-export([publish_req/1, publish_req/2]).
-export([publish_resp/2, publish_resp/3]).
-export([bind_q/2]).
-export([unbind_q/2]).
-export([declare_exchanges/0]).
-export([account_id/1, account_id/2
,account_realm/1, account_realm/2
,b_leg_events/1, b_leg_events/2
,body/1, body/2
,bypass_e164/1, bypass_e164/2
,call_id/1, call_id/2
,control_queue/1, control_queue/2
,custom_channel_vars/1, custom_channel_vars/2
,custom_sip_headers/1, custom_sip_headers/2
,emergency_caller_id_name/1, emergency_caller_id_name/2
,emergency_caller_id_number/1, emergency_caller_id_number/2
,fax_identity_name/1, fax_identity_name/2
,fax_identity_number/1, fax_identity_number/2
,flags/1, flags/2
,force_outbound/1, force_outbound/2
,format_from_uri/1, format_from_uri/2
,from_uri_realm/1, from_uri_realm/2
,hold_media/1, hold_media/2
,hunt_account_id/1, hunt_account_id/2
,ignore_early_media/1, ignore_early_media/2
,media/1, media/2
,message_id/1, message_id/2
,outbound_call_id/1, outbound_call_id/2
,outbound_callee_id_name/1, outbound_callee_id_name/2
,outbound_callee_id_number/1, outbound_callee_id_number/2
,outbound_caller_id_name/1, outbound_caller_id_name/2
,outbound_caller_id_number/1, outbound_caller_id_number/2
,caller_classifier/1, caller_classifier/2
,callee_classifier/1, callee_classifier/2
,presence_id/1, presence_id/2
,resource_type/1, resource_type/2
,ringback/1, ringback/2
,timeout/1, timeout/2
,t38_enabled/1, t38_enabled/2
,to_did/1, to_did/2
,msg_id/1
,server_id/1
,set_outbound_call_id/2
,delete_keys/2
,set_values/2
]).
%% helpers for working with opaque object
-export([jobj_to_req/1
,req_to_jobj/1
,put_callid/1
]).
-include_lib("whistle/include/wh_api.hrl").
-include_lib("whistle/include/wh_amqp.hrl").
-include_lib("whistle/include/wapi_offnet_resource.hrl").
-export_type([req/0
,resp/0
]).
-define(REQ_TYPE(JObj), JObj).
-define(RESP_TYPE(JObj), JObj).
-type req() :: wh_json:object().
-type resp() :: wh_json:object().
%% Offnet Resource Request
-define(OFFNET_RESOURCE_REQ_HEADERS, [?KEY_APPLICATION_NAME
,?KEY_RESOURCE_TYPE
,?KEY_TO_DID
]).
-define(OPTIONAL_OFFNET_RESOURCE_REQ_HEADERS
,[?KEY_ACCOUNT_ID
,?KEY_ACCOUNT_REALM
,?KEY_APPLICATION_DATA
,?KEY_B_LEG_EVENTS
,?KEY_BODY
,?KEY_BYPASS_E164
,?KEY_CALL_ID
,?KEY_CALL_ID
,?KEY_CONTROL_QUEUE
,?KEY_CCVS
,?KEY_CSHS
,?KEY_E_CALLER_ID_NAME
,?KEY_E_CALLER_ID_NUMBER
,?KEY_ENABLE_T38
,?KEY_ENABLE_T38_REQUEST
,?KEY_ENABLE_T38_GATEWAY
,?KEY_ENABLE_T38_PASSTHROUGH
,?KEY_FAX_IDENTITY_NAME
,?KEY_FAX_IDENTITY_NUMBER
,?KEY_FAX_TIMEZONE
,?KEY_T38_ENABLED
,?KEY_FLAGS
,?KEY_FORCE_FAX
,?KEY_FORCE_OUTBOUND
,?KEY_FORMAT_FROM_URI
,?KEY_FROM_URI_REALM
,?KEY_GROUP_ID
,?KEY_HOLD_MEDIA
,?KEY_HUNT_ACCOUNT_ID
,?KEY_IGNORE_EARLY_MEDIA
,?KEY_INCEPTION
,?KEY_MEDIA
,?KEY_MESSAGE_ID
,?KEY_MODE
,?KEY_OUTBOUND_CALL_ID
,?KEY_OUTBOUND_CALLER_ID_NAME
,?KEY_OUTBOUND_CALLER_ID_NUMBER
,?KEY_CALLER_CLASSIFIER
,?KEY_CALLEE_CLASSIFIER
,?KEY_PRESENCE_ID
,?KEY_RINGBACK
,?KEY_TIMEOUT
]).
-define(OFFNET_RESOURCE_REQ_VALUES
,[{?KEY_EVENT_CATEGORY, ?CATEGORY_REQ}
,{?KEY_EVENT_NAME, ?EVENT_REQ}
,{?KEY_RESOURCE_TYPE, [?RESOURCE_TYPE_AUDIO, ?RESOURCE_TYPE_VIDEO, ?RESOURCE_TYPE_ORIGINATE, ?RESOURCE_TYPE_SMS]}
,{?KEY_APPLICATION_NAME, [?APPLICATION_BRIDGE
,?APPLICATION_EAVESDROP
,?APPLICATION_FAX
,?APPLICATION_PARK
,?APPLICATION_SMS
,?APPLICATION_TRANSFER
]}
,{?KEY_MEDIA, [?MEDIA_PROCESS, ?MEDIA_BYPASS, ?MEDIA_AUTO]}
Eavesdrop
,{?KEY_MODE, [?MODE_FULL % talk to both sides
,?MODE_LISTEN % hear both sides - default
talk to one side
]}
]).
-define(OFFNET_RESOURCE_REQ_TYPES
,[{?KEY_ACCOUNT_ID, fun is_binary/1}
,{?KEY_B_LEG_EVENTS, fun wapi_dialplan:b_leg_events_v/1}
,{?KEY_CALL_ID, fun is_binary/1}
,{?KEY_CONTROL_QUEUE, fun is_binary/1}
,{?KEY_CCVS, fun wh_json:is_json_object/1}
,{?KEY_CSHS, fun wh_json:is_json_object/1}
,{?KEY_ENABLE_T38_GATEWAY, fun is_binary/1}
,{?KEY_FLAGS, fun is_list/1}
,{?KEY_FORCE_FAX, fun wh_util:is_boolean/1}
,{?KEY_FORCE_OUTBOUND, fun wh_util:is_boolean/1}
,{?KEY_TO_DID, fun is_binary/1}
,{?KEY_BYPASS_E164, fun wh_util:is_boolean/1}
]).
%% Offnet Resource Response
-define(OFFNET_RESOURCE_RESP_HEADERS, [<<"Response-Message">>]).
-define(OPTIONAL_OFFNET_RESOURCE_RESP_HEADERS, [<<"Error-Message">>, <<"Response-Code">>
,?KEY_CALL_ID, <<"Resource-Response">>
,?KEY_CONTROL_QUEUE
]).
-define(OFFNET_RESOURCE_RESP_VALUES, [{<<"Event-Category">>, <<"resource">>}
,{<<"Event-Name">>, <<"offnet_resp">>}
]).
-define(OFFNET_RESOURCE_RESP_TYPES, []).
%%--------------------------------------------------------------------
%% @doc Offnet resource request - see wiki
Takes proplist , creates JSON string or error
%% @end
%%--------------------------------------------------------------------
-spec req(api_terms()) ->
{'ok', iolist()} |
{'error', string()}.
req(Prop) when is_list(Prop) ->
case req_v(Prop) of
'true' -> wh_api:build_message(Prop, ?OFFNET_RESOURCE_REQ_HEADERS, ?OPTIONAL_OFFNET_RESOURCE_REQ_HEADERS);
'false' -> {'error', "Proplist failed validation for offnet_resource_req"}
end;
req(JObj) -> req(wh_json:to_proplist(JObj)).
-spec req_v(api_terms()) -> boolean().
req_v(Prop) when is_list(Prop) ->
wh_api:validate(Prop, ?OFFNET_RESOURCE_REQ_HEADERS, ?OFFNET_RESOURCE_REQ_VALUES, ?OFFNET_RESOURCE_REQ_TYPES);
req_v(JObj) -> req_v(wh_json:to_proplist(JObj)).
%%--------------------------------------------------------------------
%% @doc Offnet resource request - see wiki
Takes proplist , creates JSON string or error
%% @end
%%--------------------------------------------------------------------
-spec resp(api_terms()) ->
{'ok', iolist()} |
{'error', string()}.
resp(Prop) when is_list(Prop) ->
case resp_v(Prop) of
'true' -> wh_api:build_message(Prop, ?OFFNET_RESOURCE_RESP_HEADERS, ?OPTIONAL_OFFNET_RESOURCE_RESP_HEADERS);
'false' -> {'error', "Proplist failed validation for offnet_resource_resp"}
end;
resp(JObj) -> resp(wh_json:to_proplist(JObj)).
-spec resp_v(api_terms()) -> boolean().
resp_v(Prop) when is_list(Prop) ->
wh_api:validate(Prop, ?OFFNET_RESOURCE_RESP_HEADERS, ?OFFNET_RESOURCE_RESP_VALUES, ?OFFNET_RESOURCE_RESP_TYPES);
resp_v(JObj) -> resp_v(wh_json:to_proplist(JObj)).
-spec bind_q(ne_binary(), proplist()) -> 'ok'.
bind_q(Queue, _Props) ->
amqp_util:bind_q_to_resource(Queue, ?KEY_OFFNET_RESOURCE_REQ).
-spec unbind_q(ne_binary(), wh_proplist()) -> 'ok'.
unbind_q(Queue, _Props) ->
amqp_util:unbind_q_from_resource(Queue, ?KEY_OFFNET_RESOURCE_REQ).
%%--------------------------------------------------------------------
%% @doc
%% declare the exchanges used by this API
%% @end
%%--------------------------------------------------------------------
-spec declare_exchanges() -> 'ok'.
declare_exchanges() ->
amqp_util:resource_exchange().
-spec publish_req(api_terms()) -> 'ok'.
-spec publish_req(api_terms(), ne_binary()) -> 'ok'.
publish_req(JObj) ->
publish_req(JObj, ?DEFAULT_CONTENT_TYPE).
publish_req(Req, ContentType) ->
{'ok', Payload} = wh_api:prepare_api_payload(Req, ?OFFNET_RESOURCE_REQ_VALUES, fun ?MODULE:req/1),
amqp_util:offnet_resource_publish(Payload, ContentType).
-spec publish_resp(ne_binary(), api_terms()) -> 'ok'.
-spec publish_resp(ne_binary(), api_terms(), ne_binary()) -> 'ok'.
publish_resp(TargetQ, JObj) ->
publish_resp(TargetQ, JObj, ?DEFAULT_CONTENT_TYPE).
publish_resp(TargetQ, Resp, ContentType) ->
{'ok', Payload} = wh_api:prepare_api_payload(Resp, ?OFFNET_RESOURCE_RESP_VALUES, fun ?MODULE:resp/1),
amqp_util:targeted_publish(TargetQ, Payload, ContentType).
-spec force_outbound(req()) -> boolean().
-spec force_outbound(req(), Default) -> boolean() | Default.
force_outbound(Req) ->
force_outbound(Req, 'false').
force_outbound(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_FORCE_OUTBOUND, JObj, Default).
-spec resource_type(req()) -> ne_binary().
-spec resource_type(req(), Default) -> ne_binary() | Default.
resource_type(Req) ->
resource_type(Req, ?RESOURCE_TYPE_AUDIO).
resource_type(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_RESOURCE_TYPE, JObj, Default).
-spec account_id(req()) -> api_binary().
-spec account_id(req(), Default) -> ne_binary() | Default.
account_id(Req) ->
account_id(Req, 'undefined').
account_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_ACCOUNT_ID, JObj, Default).
-spec hunt_account_id(req()) -> api_binary().
-spec hunt_account_id(req(), Default) -> ne_binary() | Default.
hunt_account_id(Req) ->
hunt_account_id(Req, 'undefined').
hunt_account_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_HUNT_ACCOUNT_ID, JObj, Default).
-spec outbound_call_id(req()) -> api_binary().
-spec outbound_call_id(req(), Default) -> ne_binary() | Default.
outbound_call_id(Req) ->
outbound_call_id(Req, 'undefined').
outbound_call_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALL_ID, JObj, Default).
-spec outbound_caller_id_number(req()) -> api_binary().
-spec outbound_caller_id_number(req(), Default) -> ne_binary() | Default.
outbound_caller_id_number(Req) ->
outbound_caller_id_number(Req, 'undefined').
outbound_caller_id_number(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALLER_ID_NUMBER, JObj, Default).
-spec outbound_caller_id_name(req()) -> api_binary().
-spec outbound_caller_id_name(req(), Default) -> ne_binary() | Default.
outbound_caller_id_name(Req) ->
outbound_caller_id_name(Req, 'undefined').
outbound_caller_id_name(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALLER_ID_NAME, JObj, Default).
-spec caller_classifier(req()) -> api_binary().
-spec caller_classifier(req(), Default) -> ne_binary() | Default.
caller_classifier(Req) ->
caller_classifier(Req, 'undefined').
caller_classifier(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_CALLER_CLASSIFIER, JObj, Default).
-spec callee_classifier(req()) -> api_binary().
-spec callee_classifier(req(), Default) -> ne_binary() | Default.
callee_classifier(Req) ->
callee_classifier(Req, 'undefined').
callee_classifier(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_CALLEE_CLASSIFIER, JObj, Default).
-spec emergency_caller_id_number(req()) -> api_binary().
-spec emergency_caller_id_number(req(), Default) -> ne_binary() | Default.
emergency_caller_id_number(Req) ->
emergency_caller_id_number(Req, 'undefined').
emergency_caller_id_number(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_E_CALLER_ID_NUMBER, JObj, Default).
-spec emergency_caller_id_name(req()) -> api_binary().
-spec emergency_caller_id_name(req(), Default) -> ne_binary() | Default.
emergency_caller_id_name(Req) ->
emergency_caller_id_name(Req, 'undefined').
emergency_caller_id_name(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_E_CALLER_ID_NAME, JObj, Default).
-spec to_did(req()) -> api_binary().
-spec to_did(req(), Default) -> ne_binary() | Default.
to_did(Req) ->
to_did(Req, 'undefined').
to_did(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_TO_DID, JObj, Default).
-spec call_id(req()) -> api_binary().
-spec call_id(req(), Default) -> ne_binary() | Default.
call_id(Req) ->
call_id(Req, 'undefined').
call_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_CALL_ID, JObj, Default).
-spec control_queue(req()) -> api_binary().
-spec control_queue(req(), Default) -> ne_binary() | Default.
control_queue(Req) ->
control_queue(Req, 'undefined').
control_queue(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_CONTROL_QUEUE, JObj, Default).
-spec flags(req()) -> ne_binaries().
-spec flags(req(), Default) -> ne_binaries() | Default.
flags(Req) ->
flags(Req, []).
flags(?REQ_TYPE(JObj), Default) ->
wh_json:get_list_value(?KEY_FLAGS, JObj, Default).
-spec jobj_to_req(wh_json:object()) -> wapi_offnet_resource:req().
jobj_to_req(JObj) -> ?REQ_TYPE(JObj).
-spec req_to_jobj(wapi_offnet_resource:req()) -> wh_json:object().
req_to_jobj(?REQ_TYPE(JObj)) -> JObj.
-spec put_callid(req()) -> api_binary().
put_callid(?REQ_TYPE(JObj)) ->
wh_util:put_callid(JObj).
-spec set_outbound_call_id(req(), ne_binary()) -> req().
set_outbound_call_id(?REQ_TYPE(JObj), CallId) ->
?REQ_TYPE(wh_json:insert_value(?KEY_OUTBOUND_CALL_ID, CallId, JObj)).
-spec custom_channel_vars(req()) -> api_object().
-spec custom_channel_vars(req(), Default) -> wh_json:object() | Default.
custom_channel_vars(Req) ->
custom_channel_vars(Req, 'undefined').
custom_channel_vars(?REQ_TYPE(JObj), Default) ->
wh_json:get_json_value(?KEY_CCVS, JObj, Default).
-spec custom_sip_headers(req()) -> api_object().
-spec custom_sip_headers(req(), Default) -> wh_json:object() | Default.
custom_sip_headers(Req) ->
custom_sip_headers(Req, 'undefined').
custom_sip_headers(?REQ_TYPE(JObj), Default) ->
wh_json:get_json_value(?KEY_CSHS, JObj, Default).
-spec timeout(req()) -> api_integer().
-spec timeout(req(), Default) -> integer() | Default.
timeout(Req) ->
timeout(Req, 'undefined').
timeout(?REQ_TYPE(JObj), Default) ->
wh_json:get_integer_value(?KEY_TIMEOUT, JObj, Default).
-spec ignore_early_media(req()) -> api_boolean().
-spec ignore_early_media(req(), Default) -> boolean() | Default.
ignore_early_media(Req) ->
ignore_early_media(Req, 'undefined').
ignore_early_media(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_IGNORE_EARLY_MEDIA, JObj, Default).
-spec media(req()) -> api_binary().
-spec media(req(), Default) -> ne_binary() | Default.
media(Req) ->
media(Req, 'undefined').
media(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_MEDIA, JObj, Default).
-spec message_id(req()) -> api_binary().
-spec message_id(req(), Default) -> ne_binary() | Default.
message_id(Req) ->
message_id(Req, 'undefined').
message_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_MESSAGE_ID, JObj, Default).
-spec hold_media(req()) -> api_binary().
-spec hold_media(req(), Default) -> ne_binary() | Default.
hold_media(Req) ->
hold_media(Req, 'undefined').
hold_media(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_HOLD_MEDIA, JObj, Default).
-spec presence_id(req()) -> api_binary().
-spec presence_id(req(), Default) -> ne_binary() | Default.
presence_id(Req) ->
presence_id(Req, 'undefined').
presence_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_PRESENCE_ID, JObj, Default).
-spec ringback(req()) -> api_binary().
-spec ringback(req(), Default) -> ne_binary() | Default.
ringback(Req) ->
ringback(Req, 'undefined').
ringback(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_RINGBACK, JObj, Default).
-spec fax_identity_number(req()) -> api_binary().
-spec fax_identity_number(req(), Default) -> ne_binary() | Default.
fax_identity_number(Req) ->
fax_identity_number(Req, 'undefined').
fax_identity_number(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_FAX_IDENTITY_NUMBER, JObj, Default).
-spec fax_identity_name(req()) -> api_binary().
-spec fax_identity_name(req(), Default) -> ne_binary() | Default.
fax_identity_name(Req) ->
fax_identity_name(Req, 'undefined').
fax_identity_name(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_FAX_IDENTITY_NAME, JObj, Default).
-spec outbound_callee_id_number(req()) -> api_binary().
-spec outbound_callee_id_number(req(), Default) -> ne_binary() | Default.
outbound_callee_id_number(Req) ->
outbound_callee_id_number(Req, 'undefined').
outbound_callee_id_number(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALLEE_ID_NUMBER, JObj, Default).
-spec outbound_callee_id_name(req()) -> api_binary().
-spec outbound_callee_id_name(req(), Default) -> ne_binary() | Default.
outbound_callee_id_name(Req) ->
outbound_callee_id_name(Req, 'undefined').
outbound_callee_id_name(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALLEE_ID_NAME, JObj, Default).
-spec b_leg_events(req()) -> api_binaries().
-spec b_leg_events(req(), Default) -> ne_binaries() | Default.
b_leg_events(Req) ->
b_leg_events(Req, 'undefined').
b_leg_events(?REQ_TYPE(JObj), Default) ->
wh_json:get_list_value(?KEY_B_LEG_EVENTS, JObj, Default).
-spec from_uri_realm(req()) -> api_binary().
-spec from_uri_realm(req(), Default) -> ne_binary() | Default.
from_uri_realm(Req) ->
from_uri_realm(Req, 'undefined').
from_uri_realm(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_FROM_URI_REALM, JObj, Default).
-spec account_realm(req()) -> api_binary().
-spec account_realm(req(), Default) -> ne_binary() | Default.
account_realm(Req) ->
account_realm(Req, 'undefined').
account_realm(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_ACCOUNT_REALM, JObj, Default).
-spec format_from_uri(req()) -> boolean().
-spec format_from_uri(req(), Default) -> boolean() | Default.
format_from_uri(Req) ->
format_from_uri(Req, 'false').
format_from_uri(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_FORMAT_FROM_URI, JObj, Default).
-spec body(req()) -> api_binary().
-spec body(req(), Default) -> ne_binary() | Default.
body(Req) ->
body(Req, 'undefined').
body(?REQ_TYPE(JObj), Default) ->
wh_json:get_value(?KEY_BODY, JObj, Default).
-spec bypass_e164(req()) -> boolean().
-spec bypass_e164(req(), Default) -> boolean() | Default.
bypass_e164(Req) ->
bypass_e164(Req, 'false').
bypass_e164(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_BYPASS_E164, JObj, Default).
-spec t38_enabled(req()) -> boolean().
-spec t38_enabled(req(), Default) -> boolean() | Default.
t38_enabled(Req) ->
t38_enabled(Req, 'false').
t38_enabled(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_T38_ENABLED, JObj, Default).
-spec msg_id(req()) -> api_binary().
msg_id(?REQ_TYPE(JObj)) ->
wh_api:msg_id(JObj).
-spec server_id(req()) -> api_binary().
server_id(?REQ_TYPE(JObj)) ->
wh_api:server_id(JObj).
-spec delete_keys(req(), ne_binaries()) -> req().
delete_keys(?REQ_TYPE(JObj), Keys) ->
?REQ_TYPE(wh_json:delete_keys(Keys, JObj)).
-spec set_values(req(), wh_proplist()) -> req().
set_values(?REQ_TYPE(JObj), Props) ->
?REQ_TYPE(wh_json:set_values(Props, JObj)).
| null | https://raw.githubusercontent.com/urueedi/monster-ui-phonebook/00b1b54996785d7a67fa902a46c82ead12ef022b/kazoo-3/core/whistle-1.0.0/src/api/wapi_offnet_resource.erl | erlang | -------------------------------------------------------------------
@doc
@end
@contributors
-------------------------------------------------------------------
helpers for working with opaque object
Offnet Resource Request
talk to both sides
hear both sides - default
Offnet Resource Response
--------------------------------------------------------------------
@doc Offnet resource request - see wiki
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc Offnet resource request - see wiki
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
declare the exchanges used by this API
@end
-------------------------------------------------------------------- | ( C ) 2011 - 2015 , 2600Hz INC
-module(wapi_offnet_resource).
-export([req/1, req_v/1]).
-export([resp/1, resp_v/1]).
-export([publish_req/1, publish_req/2]).
-export([publish_resp/2, publish_resp/3]).
-export([bind_q/2]).
-export([unbind_q/2]).
-export([declare_exchanges/0]).
-export([account_id/1, account_id/2
,account_realm/1, account_realm/2
,b_leg_events/1, b_leg_events/2
,body/1, body/2
,bypass_e164/1, bypass_e164/2
,call_id/1, call_id/2
,control_queue/1, control_queue/2
,custom_channel_vars/1, custom_channel_vars/2
,custom_sip_headers/1, custom_sip_headers/2
,emergency_caller_id_name/1, emergency_caller_id_name/2
,emergency_caller_id_number/1, emergency_caller_id_number/2
,fax_identity_name/1, fax_identity_name/2
,fax_identity_number/1, fax_identity_number/2
,flags/1, flags/2
,force_outbound/1, force_outbound/2
,format_from_uri/1, format_from_uri/2
,from_uri_realm/1, from_uri_realm/2
,hold_media/1, hold_media/2
,hunt_account_id/1, hunt_account_id/2
,ignore_early_media/1, ignore_early_media/2
,media/1, media/2
,message_id/1, message_id/2
,outbound_call_id/1, outbound_call_id/2
,outbound_callee_id_name/1, outbound_callee_id_name/2
,outbound_callee_id_number/1, outbound_callee_id_number/2
,outbound_caller_id_name/1, outbound_caller_id_name/2
,outbound_caller_id_number/1, outbound_caller_id_number/2
,caller_classifier/1, caller_classifier/2
,callee_classifier/1, callee_classifier/2
,presence_id/1, presence_id/2
,resource_type/1, resource_type/2
,ringback/1, ringback/2
,timeout/1, timeout/2
,t38_enabled/1, t38_enabled/2
,to_did/1, to_did/2
,msg_id/1
,server_id/1
,set_outbound_call_id/2
,delete_keys/2
,set_values/2
]).
-export([jobj_to_req/1
,req_to_jobj/1
,put_callid/1
]).
-include_lib("whistle/include/wh_api.hrl").
-include_lib("whistle/include/wh_amqp.hrl").
-include_lib("whistle/include/wapi_offnet_resource.hrl").
-export_type([req/0
,resp/0
]).
-define(REQ_TYPE(JObj), JObj).
-define(RESP_TYPE(JObj), JObj).
-type req() :: wh_json:object().
-type resp() :: wh_json:object().
-define(OFFNET_RESOURCE_REQ_HEADERS, [?KEY_APPLICATION_NAME
,?KEY_RESOURCE_TYPE
,?KEY_TO_DID
]).
-define(OPTIONAL_OFFNET_RESOURCE_REQ_HEADERS
,[?KEY_ACCOUNT_ID
,?KEY_ACCOUNT_REALM
,?KEY_APPLICATION_DATA
,?KEY_B_LEG_EVENTS
,?KEY_BODY
,?KEY_BYPASS_E164
,?KEY_CALL_ID
,?KEY_CALL_ID
,?KEY_CONTROL_QUEUE
,?KEY_CCVS
,?KEY_CSHS
,?KEY_E_CALLER_ID_NAME
,?KEY_E_CALLER_ID_NUMBER
,?KEY_ENABLE_T38
,?KEY_ENABLE_T38_REQUEST
,?KEY_ENABLE_T38_GATEWAY
,?KEY_ENABLE_T38_PASSTHROUGH
,?KEY_FAX_IDENTITY_NAME
,?KEY_FAX_IDENTITY_NUMBER
,?KEY_FAX_TIMEZONE
,?KEY_T38_ENABLED
,?KEY_FLAGS
,?KEY_FORCE_FAX
,?KEY_FORCE_OUTBOUND
,?KEY_FORMAT_FROM_URI
,?KEY_FROM_URI_REALM
,?KEY_GROUP_ID
,?KEY_HOLD_MEDIA
,?KEY_HUNT_ACCOUNT_ID
,?KEY_IGNORE_EARLY_MEDIA
,?KEY_INCEPTION
,?KEY_MEDIA
,?KEY_MESSAGE_ID
,?KEY_MODE
,?KEY_OUTBOUND_CALL_ID
,?KEY_OUTBOUND_CALLER_ID_NAME
,?KEY_OUTBOUND_CALLER_ID_NUMBER
,?KEY_CALLER_CLASSIFIER
,?KEY_CALLEE_CLASSIFIER
,?KEY_PRESENCE_ID
,?KEY_RINGBACK
,?KEY_TIMEOUT
]).
-define(OFFNET_RESOURCE_REQ_VALUES
,[{?KEY_EVENT_CATEGORY, ?CATEGORY_REQ}
,{?KEY_EVENT_NAME, ?EVENT_REQ}
,{?KEY_RESOURCE_TYPE, [?RESOURCE_TYPE_AUDIO, ?RESOURCE_TYPE_VIDEO, ?RESOURCE_TYPE_ORIGINATE, ?RESOURCE_TYPE_SMS]}
,{?KEY_APPLICATION_NAME, [?APPLICATION_BRIDGE
,?APPLICATION_EAVESDROP
,?APPLICATION_FAX
,?APPLICATION_PARK
,?APPLICATION_SMS
,?APPLICATION_TRANSFER
]}
,{?KEY_MEDIA, [?MEDIA_PROCESS, ?MEDIA_BYPASS, ?MEDIA_AUTO]}
Eavesdrop
talk to one side
]}
]).
-define(OFFNET_RESOURCE_REQ_TYPES
,[{?KEY_ACCOUNT_ID, fun is_binary/1}
,{?KEY_B_LEG_EVENTS, fun wapi_dialplan:b_leg_events_v/1}
,{?KEY_CALL_ID, fun is_binary/1}
,{?KEY_CONTROL_QUEUE, fun is_binary/1}
,{?KEY_CCVS, fun wh_json:is_json_object/1}
,{?KEY_CSHS, fun wh_json:is_json_object/1}
,{?KEY_ENABLE_T38_GATEWAY, fun is_binary/1}
,{?KEY_FLAGS, fun is_list/1}
,{?KEY_FORCE_FAX, fun wh_util:is_boolean/1}
,{?KEY_FORCE_OUTBOUND, fun wh_util:is_boolean/1}
,{?KEY_TO_DID, fun is_binary/1}
,{?KEY_BYPASS_E164, fun wh_util:is_boolean/1}
]).
-define(OFFNET_RESOURCE_RESP_HEADERS, [<<"Response-Message">>]).
-define(OPTIONAL_OFFNET_RESOURCE_RESP_HEADERS, [<<"Error-Message">>, <<"Response-Code">>
,?KEY_CALL_ID, <<"Resource-Response">>
,?KEY_CONTROL_QUEUE
]).
-define(OFFNET_RESOURCE_RESP_VALUES, [{<<"Event-Category">>, <<"resource">>}
,{<<"Event-Name">>, <<"offnet_resp">>}
]).
-define(OFFNET_RESOURCE_RESP_TYPES, []).
Takes proplist , creates JSON string or error
-spec req(api_terms()) ->
{'ok', iolist()} |
{'error', string()}.
req(Prop) when is_list(Prop) ->
case req_v(Prop) of
'true' -> wh_api:build_message(Prop, ?OFFNET_RESOURCE_REQ_HEADERS, ?OPTIONAL_OFFNET_RESOURCE_REQ_HEADERS);
'false' -> {'error', "Proplist failed validation for offnet_resource_req"}
end;
req(JObj) -> req(wh_json:to_proplist(JObj)).
-spec req_v(api_terms()) -> boolean().
req_v(Prop) when is_list(Prop) ->
wh_api:validate(Prop, ?OFFNET_RESOURCE_REQ_HEADERS, ?OFFNET_RESOURCE_REQ_VALUES, ?OFFNET_RESOURCE_REQ_TYPES);
req_v(JObj) -> req_v(wh_json:to_proplist(JObj)).
Takes proplist , creates JSON string or error
-spec resp(api_terms()) ->
{'ok', iolist()} |
{'error', string()}.
resp(Prop) when is_list(Prop) ->
case resp_v(Prop) of
'true' -> wh_api:build_message(Prop, ?OFFNET_RESOURCE_RESP_HEADERS, ?OPTIONAL_OFFNET_RESOURCE_RESP_HEADERS);
'false' -> {'error', "Proplist failed validation for offnet_resource_resp"}
end;
resp(JObj) -> resp(wh_json:to_proplist(JObj)).
-spec resp_v(api_terms()) -> boolean().
resp_v(Prop) when is_list(Prop) ->
wh_api:validate(Prop, ?OFFNET_RESOURCE_RESP_HEADERS, ?OFFNET_RESOURCE_RESP_VALUES, ?OFFNET_RESOURCE_RESP_TYPES);
resp_v(JObj) -> resp_v(wh_json:to_proplist(JObj)).
-spec bind_q(ne_binary(), proplist()) -> 'ok'.
bind_q(Queue, _Props) ->
amqp_util:bind_q_to_resource(Queue, ?KEY_OFFNET_RESOURCE_REQ).
-spec unbind_q(ne_binary(), wh_proplist()) -> 'ok'.
unbind_q(Queue, _Props) ->
amqp_util:unbind_q_from_resource(Queue, ?KEY_OFFNET_RESOURCE_REQ).
-spec declare_exchanges() -> 'ok'.
declare_exchanges() ->
amqp_util:resource_exchange().
-spec publish_req(api_terms()) -> 'ok'.
-spec publish_req(api_terms(), ne_binary()) -> 'ok'.
publish_req(JObj) ->
publish_req(JObj, ?DEFAULT_CONTENT_TYPE).
publish_req(Req, ContentType) ->
{'ok', Payload} = wh_api:prepare_api_payload(Req, ?OFFNET_RESOURCE_REQ_VALUES, fun ?MODULE:req/1),
amqp_util:offnet_resource_publish(Payload, ContentType).
-spec publish_resp(ne_binary(), api_terms()) -> 'ok'.
-spec publish_resp(ne_binary(), api_terms(), ne_binary()) -> 'ok'.
publish_resp(TargetQ, JObj) ->
publish_resp(TargetQ, JObj, ?DEFAULT_CONTENT_TYPE).
publish_resp(TargetQ, Resp, ContentType) ->
{'ok', Payload} = wh_api:prepare_api_payload(Resp, ?OFFNET_RESOURCE_RESP_VALUES, fun ?MODULE:resp/1),
amqp_util:targeted_publish(TargetQ, Payload, ContentType).
-spec force_outbound(req()) -> boolean().
-spec force_outbound(req(), Default) -> boolean() | Default.
force_outbound(Req) ->
force_outbound(Req, 'false').
force_outbound(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_FORCE_OUTBOUND, JObj, Default).
-spec resource_type(req()) -> ne_binary().
-spec resource_type(req(), Default) -> ne_binary() | Default.
resource_type(Req) ->
resource_type(Req, ?RESOURCE_TYPE_AUDIO).
resource_type(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_RESOURCE_TYPE, JObj, Default).
-spec account_id(req()) -> api_binary().
-spec account_id(req(), Default) -> ne_binary() | Default.
account_id(Req) ->
account_id(Req, 'undefined').
account_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_ACCOUNT_ID, JObj, Default).
-spec hunt_account_id(req()) -> api_binary().
-spec hunt_account_id(req(), Default) -> ne_binary() | Default.
hunt_account_id(Req) ->
hunt_account_id(Req, 'undefined').
hunt_account_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_HUNT_ACCOUNT_ID, JObj, Default).
-spec outbound_call_id(req()) -> api_binary().
-spec outbound_call_id(req(), Default) -> ne_binary() | Default.
outbound_call_id(Req) ->
outbound_call_id(Req, 'undefined').
outbound_call_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALL_ID, JObj, Default).
-spec outbound_caller_id_number(req()) -> api_binary().
-spec outbound_caller_id_number(req(), Default) -> ne_binary() | Default.
outbound_caller_id_number(Req) ->
outbound_caller_id_number(Req, 'undefined').
outbound_caller_id_number(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALLER_ID_NUMBER, JObj, Default).
-spec outbound_caller_id_name(req()) -> api_binary().
-spec outbound_caller_id_name(req(), Default) -> ne_binary() | Default.
outbound_caller_id_name(Req) ->
outbound_caller_id_name(Req, 'undefined').
outbound_caller_id_name(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALLER_ID_NAME, JObj, Default).
-spec caller_classifier(req()) -> api_binary().
-spec caller_classifier(req(), Default) -> ne_binary() | Default.
caller_classifier(Req) ->
caller_classifier(Req, 'undefined').
caller_classifier(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_CALLER_CLASSIFIER, JObj, Default).
-spec callee_classifier(req()) -> api_binary().
-spec callee_classifier(req(), Default) -> ne_binary() | Default.
callee_classifier(Req) ->
callee_classifier(Req, 'undefined').
callee_classifier(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_CALLEE_CLASSIFIER, JObj, Default).
-spec emergency_caller_id_number(req()) -> api_binary().
-spec emergency_caller_id_number(req(), Default) -> ne_binary() | Default.
emergency_caller_id_number(Req) ->
emergency_caller_id_number(Req, 'undefined').
emergency_caller_id_number(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_E_CALLER_ID_NUMBER, JObj, Default).
-spec emergency_caller_id_name(req()) -> api_binary().
-spec emergency_caller_id_name(req(), Default) -> ne_binary() | Default.
emergency_caller_id_name(Req) ->
emergency_caller_id_name(Req, 'undefined').
emergency_caller_id_name(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_E_CALLER_ID_NAME, JObj, Default).
-spec to_did(req()) -> api_binary().
-spec to_did(req(), Default) -> ne_binary() | Default.
to_did(Req) ->
to_did(Req, 'undefined').
to_did(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_TO_DID, JObj, Default).
-spec call_id(req()) -> api_binary().
-spec call_id(req(), Default) -> ne_binary() | Default.
call_id(Req) ->
call_id(Req, 'undefined').
call_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_CALL_ID, JObj, Default).
-spec control_queue(req()) -> api_binary().
-spec control_queue(req(), Default) -> ne_binary() | Default.
control_queue(Req) ->
control_queue(Req, 'undefined').
control_queue(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_CONTROL_QUEUE, JObj, Default).
-spec flags(req()) -> ne_binaries().
-spec flags(req(), Default) -> ne_binaries() | Default.
flags(Req) ->
flags(Req, []).
flags(?REQ_TYPE(JObj), Default) ->
wh_json:get_list_value(?KEY_FLAGS, JObj, Default).
-spec jobj_to_req(wh_json:object()) -> wapi_offnet_resource:req().
jobj_to_req(JObj) -> ?REQ_TYPE(JObj).
-spec req_to_jobj(wapi_offnet_resource:req()) -> wh_json:object().
req_to_jobj(?REQ_TYPE(JObj)) -> JObj.
-spec put_callid(req()) -> api_binary().
put_callid(?REQ_TYPE(JObj)) ->
wh_util:put_callid(JObj).
-spec set_outbound_call_id(req(), ne_binary()) -> req().
set_outbound_call_id(?REQ_TYPE(JObj), CallId) ->
?REQ_TYPE(wh_json:insert_value(?KEY_OUTBOUND_CALL_ID, CallId, JObj)).
-spec custom_channel_vars(req()) -> api_object().
-spec custom_channel_vars(req(), Default) -> wh_json:object() | Default.
custom_channel_vars(Req) ->
custom_channel_vars(Req, 'undefined').
custom_channel_vars(?REQ_TYPE(JObj), Default) ->
wh_json:get_json_value(?KEY_CCVS, JObj, Default).
-spec custom_sip_headers(req()) -> api_object().
-spec custom_sip_headers(req(), Default) -> wh_json:object() | Default.
custom_sip_headers(Req) ->
custom_sip_headers(Req, 'undefined').
custom_sip_headers(?REQ_TYPE(JObj), Default) ->
wh_json:get_json_value(?KEY_CSHS, JObj, Default).
-spec timeout(req()) -> api_integer().
-spec timeout(req(), Default) -> integer() | Default.
timeout(Req) ->
timeout(Req, 'undefined').
timeout(?REQ_TYPE(JObj), Default) ->
wh_json:get_integer_value(?KEY_TIMEOUT, JObj, Default).
-spec ignore_early_media(req()) -> api_boolean().
-spec ignore_early_media(req(), Default) -> boolean() | Default.
ignore_early_media(Req) ->
ignore_early_media(Req, 'undefined').
ignore_early_media(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_IGNORE_EARLY_MEDIA, JObj, Default).
-spec media(req()) -> api_binary().
-spec media(req(), Default) -> ne_binary() | Default.
media(Req) ->
media(Req, 'undefined').
media(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_MEDIA, JObj, Default).
-spec message_id(req()) -> api_binary().
-spec message_id(req(), Default) -> ne_binary() | Default.
message_id(Req) ->
message_id(Req, 'undefined').
message_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_MESSAGE_ID, JObj, Default).
-spec hold_media(req()) -> api_binary().
-spec hold_media(req(), Default) -> ne_binary() | Default.
hold_media(Req) ->
hold_media(Req, 'undefined').
hold_media(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_HOLD_MEDIA, JObj, Default).
-spec presence_id(req()) -> api_binary().
-spec presence_id(req(), Default) -> ne_binary() | Default.
presence_id(Req) ->
presence_id(Req, 'undefined').
presence_id(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_PRESENCE_ID, JObj, Default).
-spec ringback(req()) -> api_binary().
-spec ringback(req(), Default) -> ne_binary() | Default.
ringback(Req) ->
ringback(Req, 'undefined').
ringback(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_RINGBACK, JObj, Default).
-spec fax_identity_number(req()) -> api_binary().
-spec fax_identity_number(req(), Default) -> ne_binary() | Default.
fax_identity_number(Req) ->
fax_identity_number(Req, 'undefined').
fax_identity_number(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_FAX_IDENTITY_NUMBER, JObj, Default).
-spec fax_identity_name(req()) -> api_binary().
-spec fax_identity_name(req(), Default) -> ne_binary() | Default.
fax_identity_name(Req) ->
fax_identity_name(Req, 'undefined').
fax_identity_name(?REQ_TYPE(JObj), Default) ->
wh_json:get_binary_value(?KEY_FAX_IDENTITY_NAME, JObj, Default).
-spec outbound_callee_id_number(req()) -> api_binary().
-spec outbound_callee_id_number(req(), Default) -> ne_binary() | Default.
outbound_callee_id_number(Req) ->
outbound_callee_id_number(Req, 'undefined').
outbound_callee_id_number(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALLEE_ID_NUMBER, JObj, Default).
-spec outbound_callee_id_name(req()) -> api_binary().
-spec outbound_callee_id_name(req(), Default) -> ne_binary() | Default.
outbound_callee_id_name(Req) ->
outbound_callee_id_name(Req, 'undefined').
outbound_callee_id_name(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_OUTBOUND_CALLEE_ID_NAME, JObj, Default).
-spec b_leg_events(req()) -> api_binaries().
-spec b_leg_events(req(), Default) -> ne_binaries() | Default.
b_leg_events(Req) ->
b_leg_events(Req, 'undefined').
b_leg_events(?REQ_TYPE(JObj), Default) ->
wh_json:get_list_value(?KEY_B_LEG_EVENTS, JObj, Default).
-spec from_uri_realm(req()) -> api_binary().
-spec from_uri_realm(req(), Default) -> ne_binary() | Default.
from_uri_realm(Req) ->
from_uri_realm(Req, 'undefined').
from_uri_realm(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_FROM_URI_REALM, JObj, Default).
-spec account_realm(req()) -> api_binary().
-spec account_realm(req(), Default) -> ne_binary() | Default.
account_realm(Req) ->
account_realm(Req, 'undefined').
account_realm(?REQ_TYPE(JObj), Default) ->
wh_json:get_ne_value(?KEY_ACCOUNT_REALM, JObj, Default).
-spec format_from_uri(req()) -> boolean().
-spec format_from_uri(req(), Default) -> boolean() | Default.
format_from_uri(Req) ->
format_from_uri(Req, 'false').
format_from_uri(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_FORMAT_FROM_URI, JObj, Default).
-spec body(req()) -> api_binary().
-spec body(req(), Default) -> ne_binary() | Default.
body(Req) ->
body(Req, 'undefined').
body(?REQ_TYPE(JObj), Default) ->
wh_json:get_value(?KEY_BODY, JObj, Default).
-spec bypass_e164(req()) -> boolean().
-spec bypass_e164(req(), Default) -> boolean() | Default.
bypass_e164(Req) ->
bypass_e164(Req, 'false').
bypass_e164(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_BYPASS_E164, JObj, Default).
-spec t38_enabled(req()) -> boolean().
-spec t38_enabled(req(), Default) -> boolean() | Default.
t38_enabled(Req) ->
t38_enabled(Req, 'false').
t38_enabled(?REQ_TYPE(JObj), Default) ->
wh_json:is_true(?KEY_T38_ENABLED, JObj, Default).
-spec msg_id(req()) -> api_binary().
msg_id(?REQ_TYPE(JObj)) ->
wh_api:msg_id(JObj).
-spec server_id(req()) -> api_binary().
server_id(?REQ_TYPE(JObj)) ->
wh_api:server_id(JObj).
-spec delete_keys(req(), ne_binaries()) -> req().
delete_keys(?REQ_TYPE(JObj), Keys) ->
?REQ_TYPE(wh_json:delete_keys(Keys, JObj)).
-spec set_values(req(), wh_proplist()) -> req().
set_values(?REQ_TYPE(JObj), Props) ->
?REQ_TYPE(wh_json:set_values(Props, JObj)).
|
8f2b966fb4c51d33cc787021807c6a63a52e877b1a9bfae4642b6a67a2567559 | karlll/bioerl | origc.erl | %
2013 - 11 - 24 karlll < >
%
%
% Finding origc
%
-module(origc).
-compile([export_all]).
%% -------------------------------------------------------------------------- %%
%% Frequency %%
%% -------------------------------------------------------------------------- %%
%
% Print most frequent k-mers of a specified length in a providede string
%
print_most_frequent_kmers([],_) ->
io:format("None.");
print_most_frequent_kmers(_,0) ->
io:format("None.");
print_most_frequent_kmers(String,Length) ->
TopKmers = most_frequent_kmers(String,Length),
{_,Freq,_} = hd(TopKmers),
io:format("Most frequent K-mer(s), length ~p :~n",[Length]),
lists:keymap(fun(El) -> io:format("~s~n",[atom_to_list(El)]) end, 1, TopKmers),
io:format("Frequency : ~p~n",[Freq]).
%
Returns a tuple list of the most frequent kmers of in
% the provided string, of the format
% [{kmer_atom, frequency}]
%
most_frequent_kmers(String,Length) ->
Freqs = get_kmer_freq(String,Length),
{_,TopFreq,_} = hd(Freqs),
lists:filter(fun(El) -> case El of
{_,TopFreq,_} ->
true;
_ ->
false
end
end,
Freqs
).
%
% Returns a list of the format
% [{kmer_atom,{frequency,position_list}}], sorted in descending frequency
%
get_kmer_freq(String,Length) ->
get_kmer_freq(String,length(String),Length,0,[]).
get_kmer_freq([],_,_,_,Acc) ->
lists:reverse(lists:keysort(2,Acc));
get_kmer_freq(String,StringLength,KmerLength,Pos,Acc) ->
case StringLength of
L when L < KmerLength ->
lists:reverse(lists:keysort(2,Acc));
_L ->
{Kmer,_} = lists:split(KmerLength,String),
Key = list_to_atom(Kmer),
NewAcc = case lists:keyfind(Key,1,Acc) of
first entry
{Key,Count,PosList} -> lists:keyreplace(Key,1,Acc,{Key,Count+1,[Pos|PosList]})
end,
get_kmer_freq(lists:nthtail(1,String),StringLength-1,KmerLength,Pos+1,NewAcc)
end.
%% -------------------------------------------------------------------------- %%
%% Complement %%
%% -------------------------------------------------------------------------- %%
%
% compl(S) -> S'
% Returns the complement of string S
%
compl($A) -> $T;
compl($T) -> $A;
compl($G) -> $C;
compl($C) -> $G;
compl(String) ->
compl(String,[]).
compl([N|Tail],Acc) ->
compl(Tail,[compl(N)|Acc]);
compl([],Acc) ->
Acc.
%% -------------------------------------------------------------------------- %%
%% Find substrings %%
%% -------------------------------------------------------------------------- %%
print_substring_positions(SubString,String) ->
Pos = find_substring(SubString,String),
io:format("Positions :~n"),
lists:foreach(fun(El) -> io:format("~p ",[El]) end, Pos).
%
Find the positions of substring SubString in String
%
find_substring(SubString,String) ->
LenSub = length(SubString),
LenStr = length(String),
lists:reverse(find_substring(SubString,String,LenSub,LenStr,0,[])).
find_substring(_,[],_,0,_,Acc) ->
Acc; % end of string
find_substring(SubString,String,LenSub,LenStr,Pos,Acc) ->
case LenStr of
L when L < LenSub ->
Acc; % no more possible matches
_ ->
case lists:prefix(SubString,String) of
true -> NewAcc = [Pos|Acc];
false -> NewAcc = Acc
end,
find_substring(SubString,lists:nthtail(1,String),LenSub,LenStr-1,Pos+1,NewAcc)
end.
%% -------------------------------------------------------------------------- %%
%% Find clumps %%
%% -------------------------------------------------------------------------- %%
%
% TODO: this is not very efficient, re-write!
%
print_find_clumps(String,WindowLength,KmerLength,ClumpNum) ->
Clumps = find_clumps(String,WindowLength,KmerLength,ClumpNum),
ClumpsStr = lists:map(fun(El) -> {K,_,_} = El, atom_to_list(K) end, lists:flatten(Clumps)),
lists:foreach(fun(El) -> io:format("~s~n",[El]) end, lists:usort(ClumpsStr)).
%
Find the k - mers of length KmerLength within a substring of WindowLength
in String occuring ClumpNum number of times
%
find_clumps(String,WindowLength,KmerLength,ClumpNum) ->
find_clumps(String,length(String),WindowLength,KmerLength,ClumpNum,0,[]).
find_clumps([],_,_,_,_,_,Acc) ->
Acc;
find_clumps(String,StringLen,WindowLength,KmerLength,ClumpNum,OffsetCounter,Acc) ->
case StringLen of
L when L < WindowLength ->
Acc;
_L ->
{Window,_} = lists:split(WindowLength,String),
Clumps = clumps(Window,KmerLength,ClumpNum),
NewAcc = case Clumps of
[] -> Acc;
_ -> [Clumps|Acc]
end,
find_clumps(lists:nthtail(1,String),StringLen-1,WindowLength,KmerLength,ClumpNum,OffsetCounter+1,NewAcc)
end.
clumps(String,KmerLength,ClumpNum) ->
Freqs = get_kmer_freq(String,KmerLength),
lists:filter(fun(El) -> case El of
{_,ClumpNum,_} -> true;
_ -> false
end
end, Freqs).
%% -------------------------------------------------------------------------- %%
%% Skew %%
%% -------------------------------------------------------------------------- %%
print_skew(String) ->
S = skew(String),
lists:foreach(fun(El) -> io:format("~p ", [El]) end, S).
%
% skew(S) -> Skew::list().
%
Caluculate skew for string S
%
skew(String) when is_list(String) ->
skew(String,[0]);
skew($A) -> 0;
skew($T) -> 0;
skew($C) -> -1;
skew($G) -> 1.
skew([],Acc) ->
lists:reverse(Acc);
skew([H|Tail],Acc) ->
skew(Tail,[skew(H)+hd(Acc)|Acc]).
print_min_skew(String) ->
S = min_skew(String),
lists:foreach(fun(El) -> io:format("~p ", [El]) end, S).
%
% min_skew(S) -> Pos::list().
%
% Find position in S which minimizes skew
%
min_skew(String) ->
S = skew(String),
Min = lists:min(S),
min_skew(S,Min,0,[]).
min_skew([],_,_,Acc) ->
lists:reverse(Acc);
min_skew([Min|Tail],Min,Count,Acc) ->
min_skew(Tail,Min,Count+1,[Count|Acc]);
min_skew([_H|Tail],Min,Count,Acc) ->
min_skew(Tail,Min,Count+1,Acc).
%% -------------------------------------------------------------------------- %%
%% Approx. pattern matching %%
%% -------------------------------------------------------------------------- %%
print_approx_match(Pattern,String,Mismatches) ->
ApproxMatches = approx_match(Pattern,String,Mismatches),
lists:foreach(fun(El) -> {Pos,_,_} = El, io:format("~p ", [Pos]) end, ApproxMatches).
write_approx_match_positions(Pattern,String,Mismatches) ->
ApproxMatches = approx_match(Pattern,String,Mismatches),
ResultStr = lists:map(fun(El) -> {Pos,_,_} = El, io_lib:fwrite("~p ", [Pos]) end, ApproxMatches),
util:write_result(lists:flatten(ResultStr),"out.result").
%
Find all patterns in string String which has at most differences
from Pattern .
%
approx_match(P , S , M ) - > [ { Pos::integer(),Count::integer(),Pattern2::string ( ) } ]
%
approx_match(Pattern,String,Mismatches) ->
LenPattern = length(Pattern),
LenString = length(String),
approx_match(Pattern,String,Mismatches,LenPattern,LenString,0,[]).
approx_match(_,[],_,_,_,_,Acc) ->
lists:reverse(Acc);
approx_match(Pattern,String,Mismatches,LenPattern,LenString,Pos,Acc) ->
case LenString of
L when L < LenPattern ->
lists:reverse(Acc);
_ ->
{Pattern2,_} = lists:split(LenPattern,String),
NewAcc = case count_mismatch(Pattern,Pattern2) of
C when C =< Mismatches ->
[{Pos,C,Pattern2}|Acc];
_ ->
Acc
end,
approx_match(Pattern,lists:nthtail(1,String),Mismatches,LenPattern,LenString-1,Pos+1,NewAcc)
end.
count_mismatch(String1,String2) when length(String1) == length(String2) ->
count_mismatch(String1,String2,0).
count_mismatch([],[],Count) ->
Count;
count_mismatch([H|Tail1],[H|Tail2],Count) ->
count_mismatch(Tail1,Tail2,Count);
count_mismatch([_H|Tail1],[_N|Tail2],Count) ->
count_mismatch(Tail1,Tail2,Count+1).
| null | https://raw.githubusercontent.com/karlll/bioerl/6ade2d63bb37f1312e33c3bbad9b7252323ad369/src/origc.erl | erlang |
Finding origc
-------------------------------------------------------------------------- %%
Frequency %%
-------------------------------------------------------------------------- %%
Print most frequent k-mers of a specified length in a providede string
the provided string, of the format
[{kmer_atom, frequency}]
Returns a list of the format
[{kmer_atom,{frequency,position_list}}], sorted in descending frequency
-------------------------------------------------------------------------- %%
Complement %%
-------------------------------------------------------------------------- %%
compl(S) -> S'
Returns the complement of string S
-------------------------------------------------------------------------- %%
Find substrings %%
-------------------------------------------------------------------------- %%
end of string
no more possible matches
-------------------------------------------------------------------------- %%
Find clumps %%
-------------------------------------------------------------------------- %%
TODO: this is not very efficient, re-write!
-------------------------------------------------------------------------- %%
Skew %%
-------------------------------------------------------------------------- %%
skew(S) -> Skew::list().
min_skew(S) -> Pos::list().
Find position in S which minimizes skew
-------------------------------------------------------------------------- %%
Approx. pattern matching %%
-------------------------------------------------------------------------- %%
| 2013 - 11 - 24 karlll < >
-module(origc).
-compile([export_all]).
print_most_frequent_kmers([],_) ->
io:format("None.");
print_most_frequent_kmers(_,0) ->
io:format("None.");
print_most_frequent_kmers(String,Length) ->
TopKmers = most_frequent_kmers(String,Length),
{_,Freq,_} = hd(TopKmers),
io:format("Most frequent K-mer(s), length ~p :~n",[Length]),
lists:keymap(fun(El) -> io:format("~s~n",[atom_to_list(El)]) end, 1, TopKmers),
io:format("Frequency : ~p~n",[Freq]).
Returns a tuple list of the most frequent kmers of in
most_frequent_kmers(String,Length) ->
Freqs = get_kmer_freq(String,Length),
{_,TopFreq,_} = hd(Freqs),
lists:filter(fun(El) -> case El of
{_,TopFreq,_} ->
true;
_ ->
false
end
end,
Freqs
).
get_kmer_freq(String,Length) ->
get_kmer_freq(String,length(String),Length,0,[]).
get_kmer_freq([],_,_,_,Acc) ->
lists:reverse(lists:keysort(2,Acc));
get_kmer_freq(String,StringLength,KmerLength,Pos,Acc) ->
case StringLength of
L when L < KmerLength ->
lists:reverse(lists:keysort(2,Acc));
_L ->
{Kmer,_} = lists:split(KmerLength,String),
Key = list_to_atom(Kmer),
NewAcc = case lists:keyfind(Key,1,Acc) of
first entry
{Key,Count,PosList} -> lists:keyreplace(Key,1,Acc,{Key,Count+1,[Pos|PosList]})
end,
get_kmer_freq(lists:nthtail(1,String),StringLength-1,KmerLength,Pos+1,NewAcc)
end.
compl($A) -> $T;
compl($T) -> $A;
compl($G) -> $C;
compl($C) -> $G;
compl(String) ->
compl(String,[]).
compl([N|Tail],Acc) ->
compl(Tail,[compl(N)|Acc]);
compl([],Acc) ->
Acc.
print_substring_positions(SubString,String) ->
Pos = find_substring(SubString,String),
io:format("Positions :~n"),
lists:foreach(fun(El) -> io:format("~p ",[El]) end, Pos).
Find the positions of substring SubString in String
find_substring(SubString,String) ->
LenSub = length(SubString),
LenStr = length(String),
lists:reverse(find_substring(SubString,String,LenSub,LenStr,0,[])).
find_substring(_,[],_,0,_,Acc) ->
find_substring(SubString,String,LenSub,LenStr,Pos,Acc) ->
case LenStr of
L when L < LenSub ->
_ ->
case lists:prefix(SubString,String) of
true -> NewAcc = [Pos|Acc];
false -> NewAcc = Acc
end,
find_substring(SubString,lists:nthtail(1,String),LenSub,LenStr-1,Pos+1,NewAcc)
end.
print_find_clumps(String,WindowLength,KmerLength,ClumpNum) ->
Clumps = find_clumps(String,WindowLength,KmerLength,ClumpNum),
ClumpsStr = lists:map(fun(El) -> {K,_,_} = El, atom_to_list(K) end, lists:flatten(Clumps)),
lists:foreach(fun(El) -> io:format("~s~n",[El]) end, lists:usort(ClumpsStr)).
Find the k - mers of length KmerLength within a substring of WindowLength
in String occuring ClumpNum number of times
find_clumps(String,WindowLength,KmerLength,ClumpNum) ->
find_clumps(String,length(String),WindowLength,KmerLength,ClumpNum,0,[]).
find_clumps([],_,_,_,_,_,Acc) ->
Acc;
find_clumps(String,StringLen,WindowLength,KmerLength,ClumpNum,OffsetCounter,Acc) ->
case StringLen of
L when L < WindowLength ->
Acc;
_L ->
{Window,_} = lists:split(WindowLength,String),
Clumps = clumps(Window,KmerLength,ClumpNum),
NewAcc = case Clumps of
[] -> Acc;
_ -> [Clumps|Acc]
end,
find_clumps(lists:nthtail(1,String),StringLen-1,WindowLength,KmerLength,ClumpNum,OffsetCounter+1,NewAcc)
end.
clumps(String,KmerLength,ClumpNum) ->
Freqs = get_kmer_freq(String,KmerLength),
lists:filter(fun(El) -> case El of
{_,ClumpNum,_} -> true;
_ -> false
end
end, Freqs).
print_skew(String) ->
S = skew(String),
lists:foreach(fun(El) -> io:format("~p ", [El]) end, S).
Caluculate skew for string S
skew(String) when is_list(String) ->
skew(String,[0]);
skew($A) -> 0;
skew($T) -> 0;
skew($C) -> -1;
skew($G) -> 1.
skew([],Acc) ->
lists:reverse(Acc);
skew([H|Tail],Acc) ->
skew(Tail,[skew(H)+hd(Acc)|Acc]).
print_min_skew(String) ->
S = min_skew(String),
lists:foreach(fun(El) -> io:format("~p ", [El]) end, S).
min_skew(String) ->
S = skew(String),
Min = lists:min(S),
min_skew(S,Min,0,[]).
min_skew([],_,_,Acc) ->
lists:reverse(Acc);
min_skew([Min|Tail],Min,Count,Acc) ->
min_skew(Tail,Min,Count+1,[Count|Acc]);
min_skew([_H|Tail],Min,Count,Acc) ->
min_skew(Tail,Min,Count+1,Acc).
print_approx_match(Pattern,String,Mismatches) ->
ApproxMatches = approx_match(Pattern,String,Mismatches),
lists:foreach(fun(El) -> {Pos,_,_} = El, io:format("~p ", [Pos]) end, ApproxMatches).
write_approx_match_positions(Pattern,String,Mismatches) ->
ApproxMatches = approx_match(Pattern,String,Mismatches),
ResultStr = lists:map(fun(El) -> {Pos,_,_} = El, io_lib:fwrite("~p ", [Pos]) end, ApproxMatches),
util:write_result(lists:flatten(ResultStr),"out.result").
Find all patterns in string String which has at most differences
from Pattern .
approx_match(P , S , M ) - > [ { Pos::integer(),Count::integer(),Pattern2::string ( ) } ]
approx_match(Pattern,String,Mismatches) ->
LenPattern = length(Pattern),
LenString = length(String),
approx_match(Pattern,String,Mismatches,LenPattern,LenString,0,[]).
approx_match(_,[],_,_,_,_,Acc) ->
lists:reverse(Acc);
approx_match(Pattern,String,Mismatches,LenPattern,LenString,Pos,Acc) ->
case LenString of
L when L < LenPattern ->
lists:reverse(Acc);
_ ->
{Pattern2,_} = lists:split(LenPattern,String),
NewAcc = case count_mismatch(Pattern,Pattern2) of
C when C =< Mismatches ->
[{Pos,C,Pattern2}|Acc];
_ ->
Acc
end,
approx_match(Pattern,lists:nthtail(1,String),Mismatches,LenPattern,LenString-1,Pos+1,NewAcc)
end.
count_mismatch(String1,String2) when length(String1) == length(String2) ->
count_mismatch(String1,String2,0).
count_mismatch([],[],Count) ->
Count;
count_mismatch([H|Tail1],[H|Tail2],Count) ->
count_mismatch(Tail1,Tail2,Count);
count_mismatch([_H|Tail1],[_N|Tail2],Count) ->
count_mismatch(Tail1,Tail2,Count+1).
|
5e497a12fa0d108f15da1119fd19b18482f75bf725874dbabf5fe8be0f765520 | mirage/ocaml-qcow | qcow_config.mli |
* Copyright ( C ) 2017 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (C) 2017 David Scott <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
type t = {
id: string;
(** unique name for the prometheus metrics *)
discard: bool;
(** discard (aka TRIM) is enabled *)
keep_erased: int64 option;
(** maintain a free pool of this many erased sectors *)
compact_after_unmaps: int64 option;
(** once more than this many sectors are free, perform a compact *)
check_on_connect: bool;
(** perform an integrity check on connect *)
runtime_asserts: bool;
* constantly verify GC invariants are held
read_only: bool;
(** guarantee to not modify the file *)
}
val create:
?id:string ->
?discard:bool -> ?keep_erased:int64 ->
?compact_after_unmaps:int64 -> ?check_on_connect:bool ->
?runtime_asserts:bool -> ?read_only:bool ->
unit -> t
val default: unit -> t
(** default configuration values *)
val to_string: t -> string
(** convert the configuration to a string *)
val of_string: string -> (t, [> `Msg of string ]) result
(** parse the output of [to_string t] *)
| null | https://raw.githubusercontent.com/mirage/ocaml-qcow/2418c66627dcce8420bcb06d7547db171528060a/lib/qcow_config.mli | ocaml | * unique name for the prometheus metrics
* discard (aka TRIM) is enabled
* maintain a free pool of this many erased sectors
* once more than this many sectors are free, perform a compact
* perform an integrity check on connect
* guarantee to not modify the file
* default configuration values
* convert the configuration to a string
* parse the output of [to_string t] |
* Copyright ( C ) 2017 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (C) 2017 David Scott <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
type t = {
id: string;
discard: bool;
keep_erased: int64 option;
compact_after_unmaps: int64 option;
check_on_connect: bool;
runtime_asserts: bool;
* constantly verify GC invariants are held
read_only: bool;
}
val create:
?id:string ->
?discard:bool -> ?keep_erased:int64 ->
?compact_after_unmaps:int64 -> ?check_on_connect:bool ->
?runtime_asserts:bool -> ?read_only:bool ->
unit -> t
val default: unit -> t
val to_string: t -> string
val of_string: string -> (t, [> `Msg of string ]) result
|
d64f3c6858755cf9aef1f0f4fd26623b9b805c298931273084be5cc9a9dc3931 | ocaml-flambda/flambda-backend | unboxed_invariant_ref.ml | type t = { mutable x : int }
let[@inline] f l =
let t = { x = 0 } in
List.iter (fun () -> t.x <- 1 + t.x) l;
t
let test l = (f l).x
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/92dbdba868235321a48916b8f1bb3f04ee884d3f/middle_end/flambda2/tests/ref_to_var/unboxed_invariant_ref.ml | ocaml | type t = { mutable x : int }
let[@inline] f l =
let t = { x = 0 } in
List.iter (fun () -> t.x <- 1 + t.x) l;
t
let test l = (f l).x
| |
e5554760d5a8d6d2952c3b5d3397fa111ed59998e253c91022ed3d74704e2ff6 | Emmanuel-PLF/facile | fcl_boolean.ml | (***********************************************************************)
(* *)
FaCiLe
A Functional Constraint Library
(* *)
, , LOG , CENA
(* *)
Copyright 2004 CENA . All rights reserved . This file is distributed
(* under the terms of the GNU Lesser General Public License. *)
(***********************************************************************)
open Fcl_var
open Fcl_misc.Operators
(* sum xi = v ou sum xi <> v*)
let linear (terms : Fd.t array) v shared_min shared_max equal =
let name = "Boolean.linear" in
let monotonic_propagate subst =
Array.fold_left
(fun sum x ->
match Fd.value x with
Val 0 -> sum
| Val 1 -> 1 + sum
| Unk attr -> begin Fd.subst x subst;
ignore attr;
subst + sum;
end
| _ -> Fcl_debug.internal_error (name ^ ": non boolean variable"))
0 terms in
let delay c =
Array.iter (fun x -> delay [Fd.on_subst] x c) terms;
delay [Fd.on_min] v c; delay [Fd.on_max] v c
and fprint c =
Printf.fprintf c "%a %s(bool) " Fd.fprint v (if equal then "=" else "<>");
if Array.length terms > 0 then begin
Fd.fprint c terms.(0);
for i = 1 to Array.length terms - 1 do
Printf.fprintf c "+%a" Fd.fprint terms.(i) done end;
flush c
and update _ =
let shared_min = Fcl_stak.get shared_min
and shared_max = Fcl_stak.get shared_max in
if equal then
if the maximum of v is reached , all other variables can be set to 0
if shared_min = Fd.max v then begin
if monotonic_propagate 0 > Fd.max v then
Fcl_stak.fail (name ^ ": monotonic_propagate > max");
Fd.unify v shared_min;
true end
(* and vice versa *)
else if shared_max = Fd.min v then begin
if monotonic_propagate 1 < Fd.min v then
Fcl_stak.fail (name ^ ": monotonic_propagate < min");
Fd.unify v shared_max;
true end
else if shared_min = shared_max then begin
Fd.unify v shared_min; true end
else begin Fd.refine_low_up v shared_min shared_max; false end
else begin (* not equal *)
if shared_min = shared_max then begin
begin match Fd.value v with
Val x -> if x = shared_min then Fcl_stak.fail (name ^ ": (<>)")
| Unk attr ->
Fd.refine v (Fcl_domain.remove shared_min (Attr.dom attr)) end;
true end
else (shared_min > Fd.max v || shared_max < Fd.min v) end in
Fcl_cstr.create ~name ~fprint update delay
let set_cr op stakref = Fcl_stak.set stakref (op (Fcl_stak.get stakref) 1)
let set_decr = set_cr (-)
let set_incr = set_cr (+)
let demon xs shared_min shared_max =
let name = "Boolean.demon" in
let delay c =
Array.iteri (fun i xi -> delay [Fd.on_subst] xi ~waking_id:i c) xs
and fprint c = Printf.fprintf c "%s: %a" name Fd.fprint_array xs
and init () = ()
and update i =
begin match Fd.value xs.(i) with
Val 0 -> set_decr shared_max
| Val 1 -> set_incr shared_min
| _ -> Fcl_debug.internal_error "boolean_demon : variable is not ground or not boolean" end;
true in
Fcl_cstr.create ~name ~fprint ~init update ~nb_wakings:(Array.length xs) delay
let is_boolean x =
let min_x, max_x = Fcl_var.Fd.min_max x in min_x = 0 && max_x = 1
let is_boolean_array l =
try
Array.iter (fun b -> if not (is_boolean b) then raise Exit) l;
true
with Exit -> false
let cstr bools sum =
assert (is_boolean_array bools);
let size = Array.length bools in
let shared_min = Fcl_stak.ref 0 and shared_max = Fcl_stak.ref size in
Fcl_cstr.init (demon bools shared_min shared_max);
linear bools sum shared_min shared_max true
let sum bools =
assert (is_boolean_array bools);
let size = Array.length bools in
let shared_min = Fcl_stak.ref 0 and shared_max = Fcl_stak.ref size in
Fcl_cstr.init (demon bools shared_min shared_max);
let sum = Fd.create (Fcl_domain.interval 0 size) in
Fcl_cstr.post (linear bools sum shared_min shared_max true);
sum
| null | https://raw.githubusercontent.com/Emmanuel-PLF/facile/3b6902e479019c25b582042d9a02152fec145eb0/lib/fcl_boolean.ml | ocaml | *********************************************************************
under the terms of the GNU Lesser General Public License.
*********************************************************************
sum xi = v ou sum xi <> v
and vice versa
not equal | FaCiLe
A Functional Constraint Library
, , LOG , CENA
Copyright 2004 CENA . All rights reserved . This file is distributed
open Fcl_var
open Fcl_misc.Operators
let linear (terms : Fd.t array) v shared_min shared_max equal =
let name = "Boolean.linear" in
let monotonic_propagate subst =
Array.fold_left
(fun sum x ->
match Fd.value x with
Val 0 -> sum
| Val 1 -> 1 + sum
| Unk attr -> begin Fd.subst x subst;
ignore attr;
subst + sum;
end
| _ -> Fcl_debug.internal_error (name ^ ": non boolean variable"))
0 terms in
let delay c =
Array.iter (fun x -> delay [Fd.on_subst] x c) terms;
delay [Fd.on_min] v c; delay [Fd.on_max] v c
and fprint c =
Printf.fprintf c "%a %s(bool) " Fd.fprint v (if equal then "=" else "<>");
if Array.length terms > 0 then begin
Fd.fprint c terms.(0);
for i = 1 to Array.length terms - 1 do
Printf.fprintf c "+%a" Fd.fprint terms.(i) done end;
flush c
and update _ =
let shared_min = Fcl_stak.get shared_min
and shared_max = Fcl_stak.get shared_max in
if equal then
if the maximum of v is reached , all other variables can be set to 0
if shared_min = Fd.max v then begin
if monotonic_propagate 0 > Fd.max v then
Fcl_stak.fail (name ^ ": monotonic_propagate > max");
Fd.unify v shared_min;
true end
else if shared_max = Fd.min v then begin
if monotonic_propagate 1 < Fd.min v then
Fcl_stak.fail (name ^ ": monotonic_propagate < min");
Fd.unify v shared_max;
true end
else if shared_min = shared_max then begin
Fd.unify v shared_min; true end
else begin Fd.refine_low_up v shared_min shared_max; false end
if shared_min = shared_max then begin
begin match Fd.value v with
Val x -> if x = shared_min then Fcl_stak.fail (name ^ ": (<>)")
| Unk attr ->
Fd.refine v (Fcl_domain.remove shared_min (Attr.dom attr)) end;
true end
else (shared_min > Fd.max v || shared_max < Fd.min v) end in
Fcl_cstr.create ~name ~fprint update delay
let set_cr op stakref = Fcl_stak.set stakref (op (Fcl_stak.get stakref) 1)
let set_decr = set_cr (-)
let set_incr = set_cr (+)
let demon xs shared_min shared_max =
let name = "Boolean.demon" in
let delay c =
Array.iteri (fun i xi -> delay [Fd.on_subst] xi ~waking_id:i c) xs
and fprint c = Printf.fprintf c "%s: %a" name Fd.fprint_array xs
and init () = ()
and update i =
begin match Fd.value xs.(i) with
Val 0 -> set_decr shared_max
| Val 1 -> set_incr shared_min
| _ -> Fcl_debug.internal_error "boolean_demon : variable is not ground or not boolean" end;
true in
Fcl_cstr.create ~name ~fprint ~init update ~nb_wakings:(Array.length xs) delay
let is_boolean x =
let min_x, max_x = Fcl_var.Fd.min_max x in min_x = 0 && max_x = 1
let is_boolean_array l =
try
Array.iter (fun b -> if not (is_boolean b) then raise Exit) l;
true
with Exit -> false
let cstr bools sum =
assert (is_boolean_array bools);
let size = Array.length bools in
let shared_min = Fcl_stak.ref 0 and shared_max = Fcl_stak.ref size in
Fcl_cstr.init (demon bools shared_min shared_max);
linear bools sum shared_min shared_max true
let sum bools =
assert (is_boolean_array bools);
let size = Array.length bools in
let shared_min = Fcl_stak.ref 0 and shared_max = Fcl_stak.ref size in
Fcl_cstr.init (demon bools shared_min shared_max);
let sum = Fd.create (Fcl_domain.interval 0 size) in
Fcl_cstr.post (linear bools sum shared_min shared_max true);
sum
|
44b14f2ab0524a7c37bf752765cc06c4dfdaec0c71b7b19b75b37d35795b9e32 | HunterYIboHu/htdp2-solution | image.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname image) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")) #f)))
; 定义
(define 7polygon
(star-polygon 40 7 1 "solid" "darkgon"))
(define sta
(star 20 "solid" "red"))
(define cir
(circle 5 "solid" "blue"))
; 运行
( overlay 7polygon )
; (overlay cir sta 7polygon)
( overlay 7polygon )
(overlay/align "right" "bottom"
(rectangle 20 20 "solid" "sliver")
(rectangle 30 30 "solid" "seagreen")
(rectangle 40 40 "solid" "sliver")
(rectangle 50 50 "solid" "seagreen"))
| null | https://raw.githubusercontent.com/HunterYIboHu/htdp2-solution/6182b4c2ef650ac7059f3c143f639d09cd708516/Chapter1/Section2/image.rkt | racket | about the language level of this file in a form that our tools can easily process.
定义
运行
(overlay cir sta 7polygon) | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname image) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")) #f)))
(define 7polygon
(star-polygon 40 7 1 "solid" "darkgon"))
(define sta
(star 20 "solid" "red"))
(define cir
(circle 5 "solid" "blue"))
( overlay 7polygon )
( overlay 7polygon )
(overlay/align "right" "bottom"
(rectangle 20 20 "solid" "sliver")
(rectangle 30 30 "solid" "seagreen")
(rectangle 40 40 "solid" "sliver")
(rectangle 50 50 "solid" "seagreen"))
|
b13d4db928204ba87787ec13c750d4c487d08b8de8771a6b5b015a75adc1e39c | harpocrates/pretty-ghci | Value.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
module Text.PrettyPrint.GHCi.Value (
prettyPrintValue, value2Doc,
ValuePrintConf(..),
defaultValueConf,
) where
import Text.PrettyPrint.GHCi.Value.Lexer
import Text.PrettyPrint.GHCi.Value.Parser
import System.Terminal.Utils
-- base
import Data.String ( fromString )
import Control.Exception ( catch, ErrorCall )
import System.IO ( stdout )
import qualified Data.List.NonEmpty as N
-- prettyprinter, prettyprinter-ansi-terminal
import Data.Text.Prettyprint.Doc
import Data.Text.Prettyprint.Doc.Render.Terminal
-- | Given a 'Show'-ed value, print that value out to the terminal, add helpful
-- indentation and colours whenever possible. If a structured value cannot be
-- parsed out, this falls back on 'print'.
--
-- The 'Bool' is to enable a slower but potentially smarter layout algorithm.
prettyPrintValue :: Bool -> String -> IO ()
prettyPrintValue smarter str = do
termSize <- getTerminalSize
let layoutOpts = LayoutOptions (AvailablePerLine (maybe 80 snd termSize) 1.0)
layoutAlgo = if smarter then layoutSmart else layoutPretty
rendered = layoutAlgo layoutOpts (value2Doc str)
renderIO stdout rendered `catch` \(_ :: ErrorCall) -> putStr str
-- | Parse a shown value into a pretty 'Doc'. Can throw an error on outputs
-- that could not be parsed properly, but should not throw errors for inputs
-- which are the outputs of 'show' from derived 'Show' instances.
value2Doc :: String -> Doc AnsiStyle
value2Doc shown = case parseValue shown of
Just v -> renderValue defaultValueConf v <> hardline
Nothing -> renderTokens defaultValueConf tokens
where
tokens = lexTokens shown
-- | A Good Enough colour scheme
defaultValueConf :: ValuePrintConf
defaultValueConf = ValuePrintConf
{ vpc_number = color Cyan
, vpc_character = color Blue
, vpc_string = color Green
, vpc_control = bold <> color Magenta
, vpc_comma = color Yellow
, vpc_operator = color White
, vpc_field = italicized <> colorDull Red
, vpc_indent = 2
}
-- | Options for how to colour the terminal output
data ValuePrintConf = ValuePrintConf
{ vpc_number :: AnsiStyle -- ^ all sorts of numeric literals
, vpc_character :: AnsiStyle -- ^ character literals
, vpc_string :: AnsiStyle -- ^ string literals
, vpc_control :: AnsiStyle -- ^ various control characters (ex: parens)
, vpc_comma :: AnsiStyle -- ^ commas
, vpc_operator :: AnsiStyle -- ^ general operators
, vpc_field :: AnsiStyle -- ^ field in a record
, vpc_indent :: Int -- ^ how many spaces is one indent?
}
-- | Function for turning a 'Value' into a 'Doc'
renderValue :: ValuePrintConf -> Value -> Doc AnsiStyle
renderValue vpc = renderVal
where
renderVal v = case v of
Num i -> num (fromString i)
Char c -> char (fromString c)
Str s -> string (fromString s)
List vs -> renderSeq (ctrl "[") (map (align . renderVal) vs) (ctrl "]")
Tuple vs -> renderSeq (ctrl "(") (map (align . renderVal) vs) (ctrl ")")
Either everything goes on one line or the constructor and args each
-- start on a new line (with args indented)
Prefix c [] -> fromString c
Prefix c vs ->
let args = map (align . renderVal) vs
in fromString c <> group (nest n (line <> align (vsep args)))
Either everything goes on one line , or each argument gets its own
-- line with operators at the beginning of the lines
Infix arg0 ops ->
let tails = fmap (\(op,arg) -> optr (fromString op) <+> align (renderVal arg)) ops
in renderVal arg0 <> group (nest n (line <> align (vsep (N.toList tails))))
Either everything goes on one line or the constructor and fields each
-- start on a new line (with fields indented)
Record c vs ->
let fields = zipWith (\l (f,x) -> hsep [ l, field (fromString f)
, ctrl "=", align (renderVal x) ])
(ctrl "{" : repeat (coma ",")) (N.toList vs)
in fromString c <> group (nest n (line <> align (vcat fields) <+> ctrl "}"))
Paren x -> ctrl "(" <> align (renderVal x) <> ctrl ")"
-- Haskell style formatting of sequence-like things, with the comma at the
-- start of the line
renderSeq :: Doc AnsiStyle -> [Doc AnsiStyle] -> Doc AnsiStyle -> Doc AnsiStyle
renderSeq opn [] cls = opn <> cls
renderSeq opn vs cls = align . group . encloseSep opn' cls' (coma ", ") $ vs
where
opn' = flatAlt (opn <> space) opn
cls' = flatAlt (space <> cls) cls
n = vpc_indent vpc
-- Useful annotations
num = annotate (vpc_number vpc)
char = annotate (vpc_character vpc)
string = annotate (vpc_string vpc)
ctrl = annotate (vpc_control vpc)
coma = annotate (vpc_comma vpc)
optr = annotate (vpc_operator vpc)
field = annotate (vpc_field vpc)
| Function for turning a list of ' 's into a ' Doc '
renderTokens :: ValuePrintConf -> [Token] -> Doc AnsiStyle
renderTokens vpc = mconcat . map renderTok
where
renderTok tok = case tok of
WhiteTok w -> renderWhite w
NumberTok i -> num (fromString i)
CharacterTok c -> char (fromString c)
StringTok s -> string (fromString s)
OpenBracket -> ctrl "["
CloseBracket -> ctrl "]"
OpenParen -> ctrl "("
CloseParen -> ctrl ")"
OpenBrace -> ctrl "{"
CloseBrace -> ctrl "}"
Equal -> ctrl "="
OperatorTok op -> optr (fromString op)
IdentifierTok c -> fromString c
Comma -> coma ","
-- Render whitespace (which might have newlines)
renderWhite :: String -> Doc AnsiStyle
renderWhite "" = mempty
renderWhite str = let (ln, str') = span (/= '\n') str
in fromString ln <> hardline <> renderWhite (drop 1 str')
-- Useful annotations
num = annotate (vpc_number vpc)
char = annotate (vpc_character vpc)
string = annotate (vpc_string vpc)
ctrl = annotate (vpc_control vpc)
coma = annotate (vpc_comma vpc)
optr = annotate (vpc_operator vpc)
| null | https://raw.githubusercontent.com/harpocrates/pretty-ghci/179e8453d8af84a2c64fd4dc506c119fa4f971df/src/Text/PrettyPrint/GHCi/Value.hs | haskell | # LANGUAGE OverloadedStrings #
base
prettyprinter, prettyprinter-ansi-terminal
| Given a 'Show'-ed value, print that value out to the terminal, add helpful
indentation and colours whenever possible. If a structured value cannot be
parsed out, this falls back on 'print'.
The 'Bool' is to enable a slower but potentially smarter layout algorithm.
| Parse a shown value into a pretty 'Doc'. Can throw an error on outputs
that could not be parsed properly, but should not throw errors for inputs
which are the outputs of 'show' from derived 'Show' instances.
| A Good Enough colour scheme
| Options for how to colour the terminal output
^ all sorts of numeric literals
^ character literals
^ string literals
^ various control characters (ex: parens)
^ commas
^ general operators
^ field in a record
^ how many spaces is one indent?
| Function for turning a 'Value' into a 'Doc'
start on a new line (with args indented)
line with operators at the beginning of the lines
start on a new line (with fields indented)
Haskell style formatting of sequence-like things, with the comma at the
start of the line
Useful annotations
Render whitespace (which might have newlines)
Useful annotations | # LANGUAGE ScopedTypeVariables #
module Text.PrettyPrint.GHCi.Value (
prettyPrintValue, value2Doc,
ValuePrintConf(..),
defaultValueConf,
) where
import Text.PrettyPrint.GHCi.Value.Lexer
import Text.PrettyPrint.GHCi.Value.Parser
import System.Terminal.Utils
import Data.String ( fromString )
import Control.Exception ( catch, ErrorCall )
import System.IO ( stdout )
import qualified Data.List.NonEmpty as N
import Data.Text.Prettyprint.Doc
import Data.Text.Prettyprint.Doc.Render.Terminal
prettyPrintValue :: Bool -> String -> IO ()
prettyPrintValue smarter str = do
termSize <- getTerminalSize
let layoutOpts = LayoutOptions (AvailablePerLine (maybe 80 snd termSize) 1.0)
layoutAlgo = if smarter then layoutSmart else layoutPretty
rendered = layoutAlgo layoutOpts (value2Doc str)
renderIO stdout rendered `catch` \(_ :: ErrorCall) -> putStr str
value2Doc :: String -> Doc AnsiStyle
value2Doc shown = case parseValue shown of
Just v -> renderValue defaultValueConf v <> hardline
Nothing -> renderTokens defaultValueConf tokens
where
tokens = lexTokens shown
defaultValueConf :: ValuePrintConf
defaultValueConf = ValuePrintConf
{ vpc_number = color Cyan
, vpc_character = color Blue
, vpc_string = color Green
, vpc_control = bold <> color Magenta
, vpc_comma = color Yellow
, vpc_operator = color White
, vpc_field = italicized <> colorDull Red
, vpc_indent = 2
}
data ValuePrintConf = ValuePrintConf
}
renderValue :: ValuePrintConf -> Value -> Doc AnsiStyle
renderValue vpc = renderVal
where
renderVal v = case v of
Num i -> num (fromString i)
Char c -> char (fromString c)
Str s -> string (fromString s)
List vs -> renderSeq (ctrl "[") (map (align . renderVal) vs) (ctrl "]")
Tuple vs -> renderSeq (ctrl "(") (map (align . renderVal) vs) (ctrl ")")
Either everything goes on one line or the constructor and args each
Prefix c [] -> fromString c
Prefix c vs ->
let args = map (align . renderVal) vs
in fromString c <> group (nest n (line <> align (vsep args)))
Either everything goes on one line , or each argument gets its own
Infix arg0 ops ->
let tails = fmap (\(op,arg) -> optr (fromString op) <+> align (renderVal arg)) ops
in renderVal arg0 <> group (nest n (line <> align (vsep (N.toList tails))))
Either everything goes on one line or the constructor and fields each
Record c vs ->
let fields = zipWith (\l (f,x) -> hsep [ l, field (fromString f)
, ctrl "=", align (renderVal x) ])
(ctrl "{" : repeat (coma ",")) (N.toList vs)
in fromString c <> group (nest n (line <> align (vcat fields) <+> ctrl "}"))
Paren x -> ctrl "(" <> align (renderVal x) <> ctrl ")"
renderSeq :: Doc AnsiStyle -> [Doc AnsiStyle] -> Doc AnsiStyle -> Doc AnsiStyle
renderSeq opn [] cls = opn <> cls
renderSeq opn vs cls = align . group . encloseSep opn' cls' (coma ", ") $ vs
where
opn' = flatAlt (opn <> space) opn
cls' = flatAlt (space <> cls) cls
n = vpc_indent vpc
num = annotate (vpc_number vpc)
char = annotate (vpc_character vpc)
string = annotate (vpc_string vpc)
ctrl = annotate (vpc_control vpc)
coma = annotate (vpc_comma vpc)
optr = annotate (vpc_operator vpc)
field = annotate (vpc_field vpc)
| Function for turning a list of ' 's into a ' Doc '
renderTokens :: ValuePrintConf -> [Token] -> Doc AnsiStyle
renderTokens vpc = mconcat . map renderTok
where
renderTok tok = case tok of
WhiteTok w -> renderWhite w
NumberTok i -> num (fromString i)
CharacterTok c -> char (fromString c)
StringTok s -> string (fromString s)
OpenBracket -> ctrl "["
CloseBracket -> ctrl "]"
OpenParen -> ctrl "("
CloseParen -> ctrl ")"
OpenBrace -> ctrl "{"
CloseBrace -> ctrl "}"
Equal -> ctrl "="
OperatorTok op -> optr (fromString op)
IdentifierTok c -> fromString c
Comma -> coma ","
renderWhite :: String -> Doc AnsiStyle
renderWhite "" = mempty
renderWhite str = let (ln, str') = span (/= '\n') str
in fromString ln <> hardline <> renderWhite (drop 1 str')
num = annotate (vpc_number vpc)
char = annotate (vpc_character vpc)
string = annotate (vpc_string vpc)
ctrl = annotate (vpc_control vpc)
coma = annotate (vpc_comma vpc)
optr = annotate (vpc_operator vpc)
|
bee2d58a0bbeca84391d98540e4faa8026d73ea4153edef9f74aec5973499955 | alexbs01/OCaml | tour.mli | val tour : int -> int -> (int * int) list -> int -> (int * int) list
| null | https://raw.githubusercontent.com/alexbs01/OCaml/c71ccf1289e7b158847ec2a3a8209ddb126b834c/p10/tour.mli | ocaml | val tour : int -> int -> (int * int) list -> int -> (int * int) list
| |
8e852fe227deab25cdea0cdaea23056be66866417523c7fcb1b8bc05678ff665 | jaspervdj/number-six | Quote.hs | --------------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module NumberSix.Handlers.Quote
( handler
) where
--------------------------------------------------------------------------------
import Control.Monad.Trans (liftIO)
import Data.Char (isDigit)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Database.SQLite.Simple as Sqlite
import System.Random (randomRIO)
--------------------------------------------------------------------------------
import NumberSix.Bang
import NumberSix.Irc
import NumberSix.Message
import NumberSix.Util
import NumberSix.Util.Error
--------------------------------------------------------------------------------
handler :: UninitializedHandler
handler = makeHandlerWith "Quote"
(map const [addQuoteHook, quoteHook, lastQuoteHook]) initialize
--------------------------------------------------------------------------------
initialize :: Irc ()
initialize = withDatabase $ \db -> Sqlite.execute_ db
-- A global ID and an ID per channel
"CREATE TABLE IF NOT EXISTS quotes ( \
\ id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, \
\ local_id INTEGER NOT NULL, \
\ host TEXT NOT NULL, \
\ channel TEXT NOT NULL, \
\ text TEXT NOT NULL \
\)"
--------------------------------------------------------------------------------
addQuoteHook :: Irc ()
addQuoteHook = onBangCommand "!addquote" $ do
text <- getBangCommandText
host <- getHost
channel <- getChannel
localId <- (fmap (+ 1)) getLastId
withDatabase $ \db -> Sqlite.execute db
"INSERT INTO quotes (local_id, host, channel, text) VALUES (?, ?, ?, ?)"
(localId, host, channel, text)
write $ "Quote " <> T.pack (show localId) <> " added"
--------------------------------------------------------------------------------
quoteHook :: Irc ()
quoteHook = onBangCommand "!quote" $ do
query <- getBangCommandText
if T.null query
-- No query, return a random quote
then do
lastId <- getLastId
r <- liftIO $ randomRIO (1, lastId)
showQuote r
else if T.all isDigit query
-- A number was given, lookup the quote
then showQuote (read $ T.unpack query)
-- A search term was given, search through quotes
else do
qs <- getMatching query
case qs of
[] -> write =<< liftIO randomError
_ -> showQuote =<< liftIO (randomElement qs)
where
getMatching :: Text -> Irc [Integer]
getMatching query = do
host <- getHost
channel <- getChannel
ls <- withDatabase $ \db -> Sqlite.query db
"SELECT local_id FROM quotes \
\WHERE host = ? AND channel = ? AND LOWER(text) LIKE ?"
(host, channel, "%" <> toLower query <> "%")
return [i | Sqlite.Only i <- ls]
--------------------------------------------------------------------------------
lastQuoteHook :: Irc ()
lastQuoteHook = onBangCommand "!lastquote" $ getLastId >>= showQuote
--------------------------------------------------------------------------------
getLastId :: Irc Integer
getLastId = do
host <- getHost
channel <- getChannel
rs <- withDatabase $ \db -> Sqlite.query db
"SELECT MAX(local_id) FROM quotes \
\WHERE host = ? AND channel = ?"
(host, channel)
return $ case rs of
[Sqlite.Only (Just r)] -> r
_ -> 0
--------------------------------------------------------------------------------
showQuote :: Integer -> Irc ()
showQuote n = do
host <- getHost
channel <- getChannel
[Sqlite.Only r] <- withDatabase $ \db -> Sqlite.query db
"SELECT text FROM quotes \
\WHERE host = ? AND channel = ? AND local_id = ?"
(host, channel, n)
write $ "Quote " <> (T.pack $ show n) <> ": " <> r
| null | https://raw.githubusercontent.com/jaspervdj/number-six/1aba681786bd85bd20f79406c681ea581b982cd6/src/NumberSix/Handlers/Quote.hs | haskell | ------------------------------------------------------------------------------
# LANGUAGE OverloadedStrings #
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
A global ID and an ID per channel
------------------------------------------------------------------------------
------------------------------------------------------------------------------
No query, return a random quote
A number was given, lookup the quote
A search term was given, search through quotes
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | module NumberSix.Handlers.Quote
( handler
) where
import Control.Monad.Trans (liftIO)
import Data.Char (isDigit)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Database.SQLite.Simple as Sqlite
import System.Random (randomRIO)
import NumberSix.Bang
import NumberSix.Irc
import NumberSix.Message
import NumberSix.Util
import NumberSix.Util.Error
handler :: UninitializedHandler
handler = makeHandlerWith "Quote"
(map const [addQuoteHook, quoteHook, lastQuoteHook]) initialize
initialize :: Irc ()
initialize = withDatabase $ \db -> Sqlite.execute_ db
"CREATE TABLE IF NOT EXISTS quotes ( \
\ id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, \
\ local_id INTEGER NOT NULL, \
\ host TEXT NOT NULL, \
\ channel TEXT NOT NULL, \
\ text TEXT NOT NULL \
\)"
addQuoteHook :: Irc ()
addQuoteHook = onBangCommand "!addquote" $ do
text <- getBangCommandText
host <- getHost
channel <- getChannel
localId <- (fmap (+ 1)) getLastId
withDatabase $ \db -> Sqlite.execute db
"INSERT INTO quotes (local_id, host, channel, text) VALUES (?, ?, ?, ?)"
(localId, host, channel, text)
write $ "Quote " <> T.pack (show localId) <> " added"
quoteHook :: Irc ()
quoteHook = onBangCommand "!quote" $ do
query <- getBangCommandText
if T.null query
then do
lastId <- getLastId
r <- liftIO $ randomRIO (1, lastId)
showQuote r
else if T.all isDigit query
then showQuote (read $ T.unpack query)
else do
qs <- getMatching query
case qs of
[] -> write =<< liftIO randomError
_ -> showQuote =<< liftIO (randomElement qs)
where
getMatching :: Text -> Irc [Integer]
getMatching query = do
host <- getHost
channel <- getChannel
ls <- withDatabase $ \db -> Sqlite.query db
"SELECT local_id FROM quotes \
\WHERE host = ? AND channel = ? AND LOWER(text) LIKE ?"
(host, channel, "%" <> toLower query <> "%")
return [i | Sqlite.Only i <- ls]
lastQuoteHook :: Irc ()
lastQuoteHook = onBangCommand "!lastquote" $ getLastId >>= showQuote
getLastId :: Irc Integer
getLastId = do
host <- getHost
channel <- getChannel
rs <- withDatabase $ \db -> Sqlite.query db
"SELECT MAX(local_id) FROM quotes \
\WHERE host = ? AND channel = ?"
(host, channel)
return $ case rs of
[Sqlite.Only (Just r)] -> r
_ -> 0
showQuote :: Integer -> Irc ()
showQuote n = do
host <- getHost
channel <- getChannel
[Sqlite.Only r] <- withDatabase $ \db -> Sqlite.query db
"SELECT text FROM quotes \
\WHERE host = ? AND channel = ? AND local_id = ?"
(host, channel, n)
write $ "Quote " <> (T.pack $ show n) <> ": " <> r
|
a9f74f3f35c8e2315db582db0191da19810d285f5f2edaf7aac0e00cede1921b | michalkonecny/aern2 | Integration.hs | # LANGUAGE CPP #
-- #define DEBUG
|
Module : AERN2.RealFun . UnaryBallFun . Integration
Description : unary function integration
Copyright : ( c ) : :
Stability : experimental
Portability : portable
Unary function integration
Module : AERN2.RealFun.UnaryBallFun.Integration
Description : unary function integration
Copyright : (c) Michal Konecny
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Unary function integration
-}
module AERN2.RealFun.UnaryBallFun.Integration
(
integralOnIntervalSubdivide
, integralOnIntervalIncreasePrecision
)
where
#ifdef DEBUG
import Debug.Trace (trace)
#define maybeTrace trace
#else
#define maybeTrace (flip const)
#endif
import MixedTypesNumPrelude
import qualified Prelude as P
-- import Text.Printf
import Control . Arrow
import Control . Applicative
import Control . Lens . Operators
import Control . Lens ( _ Just )
-- import AERN2.MP.Dyadic
import AERN2.MP
import qualified AERN2.MP.Ball as MPBall
import AERN2.QA
import AERN2.Real
import AERN2.Interval (DyadicInterval)
import AERN2.Interval ( Interval ( .. ) , , )
import qualified AERN2.Interval as Interval
import AERN2.RealFun.Operations
import AERN2.RealFun.UnaryBallFun.Type
import AERN2.RealFun.UnaryBallFun.Evaluation ()
instance CanIntegrateOverDom UnaryBallFun DyadicInterval where
type IntegralOverDomType UnaryBallFun DyadicInterval = CauchyRealCN
integrateOverDom f =
integralOnIntervalSubdivide (integralOnIntervalIncreasePrecision getArea)
(\ (AccuracySG _ acG) -> standardPrecisions (ac2prec acG))
integralOnIntervalSubdivide ( ( s , getArea di ) ) standardPrecisions
where
getArea di p =
(apply f diB)*(Interval.width di)
where
diB = raisePrecisionIfBelow p $ mpBall di
integralOnIntervalIncreasePrecision ::
(DyadicInterval -> Precision -> CN MPBall) ->
[Precision] -> DyadicInterval -> Accuracy ->
([Precision], CN MPBall)
integralOnIntervalIncreasePrecision _getArea [] _di _ac =
error "AERN2.RealFun.UnaryBallFun: internal error in integrateOverDom"
integralOnIntervalIncreasePrecision getArea ps@(p1_O:_) di ac =
aux (getArea di p1_O) ps
where
aux diArea1 ps2@(p1:p2:p3rest) =
maybeTrace
(
"integralOnIntervalIncreasePrecision: "
++ "\n di = " ++ show di
++ "\n ac = " ++ show ac
++ "\n p1 = " ++ show p1
++ "\n getAccuracy diArea1 = " ++ show (getAccuracy diArea1)
++ "\n p2 = " ++ show p2
++ "\n getAccuracy diArea2 = " ++ show (getAccuracy diArea2)
)
res
where
res
| getAccuracy diArea1 >= ac
= (ps2, diArea1)
| getAccuracy diArea1 < getAccuracy diArea2
= (p2:p3rest, diArea2)
-- aux diArea2 (p2:p3rest)
| otherwise
= (ps2, diArea2)
diArea2 = getArea di p2
aux diArea1 ps2 = (ps2, diArea1)
integralOnIntervalSubdivide ::
(s -> DyadicInterval -> Accuracy -> (s, CN MPBall))
->
(AccuracySG -> s) -> (DyadicInterval -> CauchyRealCN)
integralOnIntervalSubdivide integralOnInterval initS diO =
newCRCN "integral" [] makeQ
where
makeQ _ ac =
integr (initS ac) diO (_acStrict ac)
integr s di ac
| getAccuracy value >= ac =
maybeTrace
("integrate by subdivide:"
++ "\n di = " ++ show di
++ "\n ac = " ++ show ac
++ "\n getAccuracy value = " ++ show (getAccuracy value)
++ "\n getPrecision value = " ++ show (fmap getPrecision value)
)
value
| otherwise =
maybeTrace
("integrate by subdivide:"
++ "\n di = " ++ show di
++ "\n ac = " ++ show ac
++ "\n getAccuracy value = " ++ show (getAccuracy value)
++ "\n getPrecision value = " ++ show (fmap getPrecision value)
) $
(integr s' diL (ac+1))
+
(integr s' diR (ac+1))
where
(diL, diR) = Interval.split di
(s', value) = integralOnInterval s di ac
| null | https://raw.githubusercontent.com/michalkonecny/aern2/1c8f12dfcb287bd8e3353802a94865d7c2c121ec/aern2-fun-univariate/src/AERN2/RealFun/UnaryBallFun/Integration.hs | haskell | #define DEBUG
import Text.Printf
import AERN2.MP.Dyadic
aux diArea2 (p2:p3rest) | # LANGUAGE CPP #
|
Module : AERN2.RealFun . UnaryBallFun . Integration
Description : unary function integration
Copyright : ( c ) : :
Stability : experimental
Portability : portable
Unary function integration
Module : AERN2.RealFun.UnaryBallFun.Integration
Description : unary function integration
Copyright : (c) Michal Konecny
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Unary function integration
-}
module AERN2.RealFun.UnaryBallFun.Integration
(
integralOnIntervalSubdivide
, integralOnIntervalIncreasePrecision
)
where
#ifdef DEBUG
import Debug.Trace (trace)
#define maybeTrace trace
#else
#define maybeTrace (flip const)
#endif
import MixedTypesNumPrelude
import qualified Prelude as P
import Control . Arrow
import Control . Applicative
import Control . Lens . Operators
import Control . Lens ( _ Just )
import AERN2.MP
import qualified AERN2.MP.Ball as MPBall
import AERN2.QA
import AERN2.Real
import AERN2.Interval (DyadicInterval)
import AERN2.Interval ( Interval ( .. ) , , )
import qualified AERN2.Interval as Interval
import AERN2.RealFun.Operations
import AERN2.RealFun.UnaryBallFun.Type
import AERN2.RealFun.UnaryBallFun.Evaluation ()
instance CanIntegrateOverDom UnaryBallFun DyadicInterval where
type IntegralOverDomType UnaryBallFun DyadicInterval = CauchyRealCN
integrateOverDom f =
integralOnIntervalSubdivide (integralOnIntervalIncreasePrecision getArea)
(\ (AccuracySG _ acG) -> standardPrecisions (ac2prec acG))
integralOnIntervalSubdivide ( ( s , getArea di ) ) standardPrecisions
where
getArea di p =
(apply f diB)*(Interval.width di)
where
diB = raisePrecisionIfBelow p $ mpBall di
integralOnIntervalIncreasePrecision ::
(DyadicInterval -> Precision -> CN MPBall) ->
[Precision] -> DyadicInterval -> Accuracy ->
([Precision], CN MPBall)
integralOnIntervalIncreasePrecision _getArea [] _di _ac =
error "AERN2.RealFun.UnaryBallFun: internal error in integrateOverDom"
integralOnIntervalIncreasePrecision getArea ps@(p1_O:_) di ac =
aux (getArea di p1_O) ps
where
aux diArea1 ps2@(p1:p2:p3rest) =
maybeTrace
(
"integralOnIntervalIncreasePrecision: "
++ "\n di = " ++ show di
++ "\n ac = " ++ show ac
++ "\n p1 = " ++ show p1
++ "\n getAccuracy diArea1 = " ++ show (getAccuracy diArea1)
++ "\n p2 = " ++ show p2
++ "\n getAccuracy diArea2 = " ++ show (getAccuracy diArea2)
)
res
where
res
| getAccuracy diArea1 >= ac
= (ps2, diArea1)
| getAccuracy diArea1 < getAccuracy diArea2
= (p2:p3rest, diArea2)
| otherwise
= (ps2, diArea2)
diArea2 = getArea di p2
aux diArea1 ps2 = (ps2, diArea1)
integralOnIntervalSubdivide ::
(s -> DyadicInterval -> Accuracy -> (s, CN MPBall))
->
(AccuracySG -> s) -> (DyadicInterval -> CauchyRealCN)
integralOnIntervalSubdivide integralOnInterval initS diO =
newCRCN "integral" [] makeQ
where
makeQ _ ac =
integr (initS ac) diO (_acStrict ac)
integr s di ac
| getAccuracy value >= ac =
maybeTrace
("integrate by subdivide:"
++ "\n di = " ++ show di
++ "\n ac = " ++ show ac
++ "\n getAccuracy value = " ++ show (getAccuracy value)
++ "\n getPrecision value = " ++ show (fmap getPrecision value)
)
value
| otherwise =
maybeTrace
("integrate by subdivide:"
++ "\n di = " ++ show di
++ "\n ac = " ++ show ac
++ "\n getAccuracy value = " ++ show (getAccuracy value)
++ "\n getPrecision value = " ++ show (fmap getPrecision value)
) $
(integr s' diL (ac+1))
+
(integr s' diR (ac+1))
where
(diL, diR) = Interval.split di
(s', value) = integralOnInterval s di ac
|
a2b4d3bbface7b99f4b589347e45a0b0270a1428438906ff8aa7764364f0413e | snapframework/xmlhtml | CursorTests.hs | {-# LANGUAGE OverloadedStrings #-}
module Text.XmlHtml.CursorTests (cursorTests) where
import Data.Maybe
import Test.Hspec
import Test.HUnit hiding (Test, Node)
import Text.XmlHtml
import Text.XmlHtml.Cursor
import Text.XmlHtml.TestCommon
------------------------------------------------------------------------------
Tests of navigating with the Cursor type ----------------------------------
------------------------------------------------------------------------------
cursorTests :: Spec
cursorTests = do
testIt "fromNodeAndCurrent " $ fromNodeAndCurrent
testIt "fromNodesAndSiblings " $ fromNodesAndSiblings
testIt "leftSiblings " $ leftSiblings
testIt "emptyFromNodes " $ emptyFromNodes
testIt "cursorNEQ " $ cursorNEQ
it "cursorNavigation " $ cursorNavigation
it "cursorSearch " $ cursorSearch
it "cursorMutation " $ cursorMutation
it "cursorInsertion " $ cursorInsertion
it "cursorDeletion " $ cursorDeletion
fromNodeAndCurrent :: Bool
fromNodeAndCurrent = all (\n -> n == current (fromNode n)) ns
where ns = [
TextNode "foo",
Comment "bar",
Element "foo" [] [],
Element "root" [] [
TextNode "foo",
Comment "bar",
Element "foo" [] []
]
]
fromNodesAndSiblings :: Bool
fromNodesAndSiblings = n == siblings (fromJust $ fromNodes n)
where n = [
TextNode "foo",
Comment "bar",
Element "foo" [] [],
Element "root" [] [
TextNode "foo",
Comment "bar",
Element "foo" [] []
]
]
leftSiblings :: Bool
leftSiblings = fromJust $ do
r <- do
c1 <- fromNodes n
c2 <- right c1
c3 <- right c2
return c3
return (n == siblings r)
where n = [
TextNode "foo",
Comment "bar",
Element "foo" [] [],
Element "root" [] [
TextNode "foo",
Comment "bar",
Element "foo" [] []
]
]
emptyFromNodes :: Bool
emptyFromNodes = isNothing (fromNodes [])
cursorNEQ :: Bool
cursorNEQ = let a = fromNode (Element "a" [] [])
b = fromNode (Element "b" [] [])
in a /= b
-- Sample node structure for running cursor tests.
cursorTestTree :: Node
cursorTestTree = Element "department" [("code", "A17")] [
Element "employee" [("name", "alice")] [
Element "address" [] [
TextNode "124 My Road"
],
Element "phone" [] [
TextNode "555-1234"
]
],
Element "employee" [("name", "bob")] [
Comment "My best friend",
Element "address" [] [
TextNode "123 My Road"
],
Element "temp" [] []
],
Element "employee" [("name", "camille")] [
Element "phone" [] [
TextNode "800-888-8888"
]
]
]
cursorNavigation :: Assertion
cursorNavigation = do
let r = fromNode cursorTestTree
let Just e1 = firstChild r
let Just e2 = getChild 1 r
let Just e3 = lastChild r
assertBool "rootElem" $ isElement (current r)
assertBool "parent of root" $ isNothing (parent r)
assertBool "getChild bounds" $ isNothing (getChild 3 r)
assertBool "getChild negative bounds" $ isNothing (getChild (-1) r)
assertBool "firstChild" $
getAttribute "name" (current e1) == Just "alice"
assertBool "childAt 1 " $
getAttribute "name" (current e2) == Just "bob"
assertBool "lastChild " $
getAttribute "name" (current e3) == Just "camille"
do let Just a = lastChild e2
assertBool "firstChild on empty element" $ isNothing (firstChild a)
assertBool "getChild on empty element" $ isNothing (getChild 0 a)
assertBool "lastChild on empty element" $ isNothing (lastChild a)
do let Just a = right e1
let Just b = right a
assertBool "two paths #1" $ a == e2
assertBool "two paths #2" $ b == e3
assertBool "right off end" $ isNothing (right b)
let Just c = left e3
let Just d = left e2
assertBool "two paths #3" $ c == e2
assertBool "two paths #4" $ d == e1
assertBool "left off end" $ isNothing (left d)
do let Just r1 = parent e2
assertEqual "child -> parent" (current r) (current r1)
do let Just cmt = firstChild e2
assertBool "topNode" $ tagName (topNode cmt) == Just "department"
assertBool "topNodes" $ map tagName (topNodes cmt) == [ Just "department" ]
assertBool "first child of comment" $ isNothing (firstChild cmt)
assertBool "last child of comment" $ isNothing (lastChild cmt)
do assertBool "nextDF down" $ nextDF r == Just e1
let Just cmt = firstChild e2
assertBool "nextDF right" $ nextDF cmt == right cmt
let Just em = lastChild e2
assertBool "nextDF up-right" $ nextDF em == Just e3
let Just pelem = lastChild e3
let Just ptext = lastChild pelem
assertBool "nextDF end" $ isNothing (nextDF ptext)
cursorSearch :: Assertion
cursorSearch = do
let r = fromNode cursorTestTree
let Just e1 = findChild isFirst r
let Just e2 = findChild ((==(Just "bob")) . getAttribute "name" . current) r
let Just e3 = findChild isLast r
assertBool "findChild isFirst" $
getAttribute "name" (current e1) == Just "alice"
assertBool "findChild" $
getAttribute "name" (current e2) == Just "bob"
assertBool "findChild isLast" $
getAttribute "name" (current e3) == Just "camille"
assertBool "findLeft Just" $ findLeft (const True) e2 == Just e1
assertBool "findLeft Nothing" $ findLeft (const False) e2 == Nothing
assertBool "findRight Just" $ findRight (const True) e2 == Just e3
assertBool "findRight Nothing" $ findRight (const False) e2 == Nothing
assertBool "findRec" $ findRec (not . hasChildren) r == (firstChild =<< firstChild e1)
assertBool "isChild true" $ isChild e1
assertBool "isChild false" $ not $ isChild r
assertBool "getNodeIndex" $ getNodeIndex e2 == 1
cursorMutation :: Assertion
cursorMutation = do
let r = fromNode cursorTestTree
let Just e1 = firstChild r
let Just e2 = right e1
let Just e3 = lastChild r
do let Just cmt = firstChild e2
let cmt' = setNode (Comment "Not my friend any more") cmt
let Just e2' = parent cmt'
assertBool "setNode" $ current e2' ==
Element "employee" [("name", "bob")] [
Comment "Not my friend any more",
Element "address" [] [
TextNode "123 My Road"
],
Element "temp" [] []
]
do let e1' = modifyNode (setAttribute "name" "bryan") e1
let n = current e1'
assertBool "modifyNode" $ getAttribute "name" n == Just "bryan"
do let myModifyM = return . setAttribute "name" "chelsea"
e3' <- modifyNodeM myModifyM e3
let n = current e3'
assertBool "modifyNode" $ getAttribute "name" n == Just "chelsea"
cursorInsertion :: Assertion
cursorInsertion = do
let r = fromNode cursorTestTree
let Just alice = firstChild r
let Just bob = getChild 1 r
let Just camille = lastChild r
let fred = Element "employee" [("name", "fred")] []
-- Stock insertLeft
do let ins = insertLeft fred bob
assertBool "insertLeft leaves cursor" $
getAttribute "name" (current ins) == Just "bob"
let Just a = findLeft isFirst ins
assertBool "insertLeft 1" $
getAttribute "name" (current a) == Just "alice"
let Just b = right a
assertBool "insertLeft 2" $
getAttribute "name" (current b) == Just "fred"
let Just c = right b
assertBool "insertLeft 3" $
getAttribute "name" (current c) == Just "bob"
let Just d = right c
assertBool "insertLeft 4" $
getAttribute "name" (current d) == Just "camille"
insertLeft on first child
do let ins = insertLeft fred alice
assertBool "insertLeft firstChild" $
getAttribute "name" (current ins) == Just "alice"
let Just a = findLeft isFirst ins
assertBool "insertLeft firstChild 1" $
getAttribute "name" (current a) == Just "fred"
-- Stock insertRight
do let ins = insertRight fred alice
assertBool "insertRight leaves cursor" $
getAttribute "name" (current ins) == Just "alice"
let Just a = findRight isLast ins
assertBool "insertRight 1" $
getAttribute "name" (current a) == Just "camille"
let Just b = left a
assertBool "insertRight 2" $
getAttribute "name" (current b) == Just "bob"
let Just c = left b
assertBool "insertRight 3" $
getAttribute "name" (current c) == Just "fred"
let Just d = left c
assertBool "insertRight 4" $
getAttribute "name" (current d) == Just "alice"
-- insertRight on last child
do let ins = insertRight fred camille
assertBool "insertRight lastChild" $
getAttribute "name" (current ins) == Just "camille"
let Just a = findRight isLast ins
assertBool "insertRight lastChild 1" $
getAttribute "name" (current a) == Just "fred"
let mary = Element "employee" [("name", "mary")] []
let new = [fred, mary]
-- insertManyLeft
do let ins = insertManyLeft new camille
assertBool "insertManyLeft leaves cursor" $
getAttribute "name" (current ins) == Just "camille"
let Just a = left ins
assertBool "insertManyLeft 1" $
getAttribute "name" (current a) == Just "mary"
let Just b = left a
assertBool "insertManyLeft 2" $
getAttribute "name" (current b) == Just "fred"
let Just c = left b
assertBool "insertManyLeft 3" $
getAttribute "name" (current c) == Just "bob"
-- insertManyRight
do let ins = insertManyRight new alice
assertBool "insertManyRight leaves cursor" $
getAttribute "name" (current ins) == Just "alice"
let Just a = right ins
assertBool "insertManyRight 1" $
getAttribute "name" (current a) == Just "fred"
let Just b = right a
assertBool "insertManyRight 2" $
getAttribute "name" (current b) == Just "mary"
let Just c = right b
assertBool "insertManyRight 3" $
getAttribute "name" (current c) == Just "bob"
-- insertFirstChild and insertLastChild
do let Just ins1 = insertFirstChild fred r
let Just ins2 = insertLastChild mary ins1
let Just a = firstChild ins2
assertBool "insert children 1" $
getAttribute "name" (current a) == Just "fred"
let Just b = right a
assertBool "insert children 2" $
getAttribute "name" (current b) == Just "alice"
let Just c = right b
assertBool "insert children 3" $
getAttribute "name" (current c) == Just "bob"
let Just d = right c
assertBool "insert children 4" $
getAttribute "name" (current d) == Just "camille"
let Just e = right d
assertBool "insert children 5" $
getAttribute "name" (current e) == Just "mary"
assertBool "insert children 6" $ isLast e
-- non-element insertFirstChild and insertLastChild
do let Just cmt = firstChild bob
assertBool "non-elem insertFirstChild" $
insertFirstChild fred cmt == Nothing
assertBool "non-elem insertLastChild" $
insertLastChild fred cmt == Nothing
assertBool "non-elem insertManyFirstChild" $
insertManyFirstChild new cmt == Nothing
assertBool "non-elem insertManyLastChild" $
insertManyLastChild new cmt == Nothing
insertManyFirstChild
do let Just ins = insertManyFirstChild new r
let Just a = firstChild ins
assertBool "insertManyFirstChild 1" $
getAttribute "name" (current a) == Just "fred"
let Just b = right a
assertBool "insertManyFirstChild 2" $
getAttribute "name" (current b) == Just "mary"
let Just c = right b
assertBool "insertManyFirstChild 3" $
getAttribute "name" (current c) == Just "alice"
let Just d = right c
assertBool "insertManyFirstChild 4" $
getAttribute "name" (current d) == Just "bob"
let Just e = right d
assertBool "insertManyFirstChild 5" $
getAttribute "name" (current e) == Just "camille"
assertBool "insertManyFirstChild 6" $ isLast e
-- insertManyLastChild
do let Just ins = insertManyLastChild new r
let Just a = firstChild ins
assertBool "insertManyFirstChild 1" $
getAttribute "name" (current a) == Just "alice"
let Just b = right a
assertBool "insertManyFirstChild 2" $
getAttribute "name" (current b) == Just "bob"
let Just c = right b
assertBool "insertManyFirstChild 3" $
getAttribute "name" (current c) == Just "camille"
let Just d = right c
assertBool "insertManyFirstChild 4" $
getAttribute "name" (current d) == Just "fred"
let Just e = right d
assertBool "insertManyFirstChild 5" $
getAttribute "name" (current e) == Just "mary"
assertBool "insertManyFirstChild 6" $ isLast e
-- insertGoLeft from middle
do let ins = insertGoLeft fred bob
let Just a = right ins
assertBool "insertGoLeft 1" $
getAttribute "name" (current ins) == Just "fred"
assertBool "insertGoLeft 2" $
getAttribute "name" (current a) == Just "bob"
-- insertGoLeft from end
do let ins = insertGoLeft fred alice
let Just a = right ins
assertBool "insertGoLeft 3" $
getAttribute "name" (current ins) == Just "fred"
assertBool "insertGoLeft 4" $
getAttribute "name" (current a) == Just "alice"
insertGoRight from middle
do let ins = insertGoRight fred bob
let Just a = left ins
assertBool "insertGoRight 1" $
getAttribute "name" (current ins) == Just "fred"
assertBool "insertGoRight 2" $
getAttribute "name" (current a) == Just "bob"
insertGoRight from end
do let ins = insertGoRight fred camille
let Just a = left ins
assertBool "insertGoRight 3" $
getAttribute "name" (current ins) == Just "fred"
assertBool "insertGoRight 4" $
getAttribute "name" (current a) == Just "camille"
cursorDeletion :: Assertion
cursorDeletion = do
let r = fromNode cursorTestTree
let Just alice = firstChild r
let Just bob = getChild 1 r
let Just camille = lastChild r
-- removeLeft success
do let Just (n,del) = removeLeft bob
let [b,c] = siblings del
assertBool "removeLeft node1" $ getAttribute "name" n == Just "alice"
assertBool "removeLeft node2" $ getAttribute "name" b == Just "bob"
assertBool "removeLeft node3" $ getAttribute "name" c == Just "camille"
-- removeLeft failure
do assertBool "removeLeft failure" $ isNothing (removeLeft alice)
-- removeRight success
do let Just (n,del) = removeRight bob
let [a,b] = siblings del
assertBool "removeLeft node1" $ getAttribute "name" a == Just "alice"
assertBool "removeLeft node2" $ getAttribute "name" b == Just "bob"
assertBool "removeLeft node3" $ getAttribute "name" n == Just "camille"
failure
do assertBool "removeLeft failure" $ isNothing (removeRight camille)
-- removeGoLeft success
do let Just del = removeGoLeft bob
let Just c = right del
assertBool "removeGoLeft 1" $
getAttribute "name" (current del) == Just "alice"
assertBool "removeGoLeft 2" $
getAttribute "name" (current c) == Just "camille"
-- removeGoLeft failure
do assertBool "removeGoLeft failure" $ isNothing (removeGoLeft alice)
removeGoRight success
do let Just del = removeGoRight bob
let Just a = left del
assertBool "removeGoRight 1" $
getAttribute "name" (current del) == Just "camille"
assertBool "removeGoRight 2" $
getAttribute "name" (current a) == Just "alice"
-- removeGoLeft failure
do assertBool "removeGoRight failure" $ isNothing (removeGoRight camille)
-- removeGoUp success
do let Just del = removeGoUp bob
let [a,c] = childNodes (current del)
assertBool "removeGoUp 1" $ getAttribute "name" a == Just "alice"
assertBool "removeGoUp 2" $ getAttribute "name" c == Just "camille"
-- removeGoUp failure
do assertBool "removeGoUp failure" $ isNothing (removeGoUp r)
| null | https://raw.githubusercontent.com/snapframework/xmlhtml/54463f1691c7b31cc3c4c336a6fe328b1f0ebb95/test/src/Text/XmlHtml/CursorTests.hs | haskell | # LANGUAGE OverloadedStrings #
----------------------------------------------------------------------------
--------------------------------
----------------------------------------------------------------------------
Sample node structure for running cursor tests.
Stock insertLeft
Stock insertRight
insertRight on last child
insertManyLeft
insertManyRight
insertFirstChild and insertLastChild
non-element insertFirstChild and insertLastChild
insertManyLastChild
insertGoLeft from middle
insertGoLeft from end
removeLeft success
removeLeft failure
removeRight success
removeGoLeft success
removeGoLeft failure
removeGoLeft failure
removeGoUp success
removeGoUp failure |
module Text.XmlHtml.CursorTests (cursorTests) where
import Data.Maybe
import Test.Hspec
import Test.HUnit hiding (Test, Node)
import Text.XmlHtml
import Text.XmlHtml.Cursor
import Text.XmlHtml.TestCommon
cursorTests :: Spec
cursorTests = do
testIt "fromNodeAndCurrent " $ fromNodeAndCurrent
testIt "fromNodesAndSiblings " $ fromNodesAndSiblings
testIt "leftSiblings " $ leftSiblings
testIt "emptyFromNodes " $ emptyFromNodes
testIt "cursorNEQ " $ cursorNEQ
it "cursorNavigation " $ cursorNavigation
it "cursorSearch " $ cursorSearch
it "cursorMutation " $ cursorMutation
it "cursorInsertion " $ cursorInsertion
it "cursorDeletion " $ cursorDeletion
fromNodeAndCurrent :: Bool
fromNodeAndCurrent = all (\n -> n == current (fromNode n)) ns
where ns = [
TextNode "foo",
Comment "bar",
Element "foo" [] [],
Element "root" [] [
TextNode "foo",
Comment "bar",
Element "foo" [] []
]
]
fromNodesAndSiblings :: Bool
fromNodesAndSiblings = n == siblings (fromJust $ fromNodes n)
where n = [
TextNode "foo",
Comment "bar",
Element "foo" [] [],
Element "root" [] [
TextNode "foo",
Comment "bar",
Element "foo" [] []
]
]
leftSiblings :: Bool
leftSiblings = fromJust $ do
r <- do
c1 <- fromNodes n
c2 <- right c1
c3 <- right c2
return c3
return (n == siblings r)
where n = [
TextNode "foo",
Comment "bar",
Element "foo" [] [],
Element "root" [] [
TextNode "foo",
Comment "bar",
Element "foo" [] []
]
]
emptyFromNodes :: Bool
emptyFromNodes = isNothing (fromNodes [])
cursorNEQ :: Bool
cursorNEQ = let a = fromNode (Element "a" [] [])
b = fromNode (Element "b" [] [])
in a /= b
cursorTestTree :: Node
cursorTestTree = Element "department" [("code", "A17")] [
Element "employee" [("name", "alice")] [
Element "address" [] [
TextNode "124 My Road"
],
Element "phone" [] [
TextNode "555-1234"
]
],
Element "employee" [("name", "bob")] [
Comment "My best friend",
Element "address" [] [
TextNode "123 My Road"
],
Element "temp" [] []
],
Element "employee" [("name", "camille")] [
Element "phone" [] [
TextNode "800-888-8888"
]
]
]
cursorNavigation :: Assertion
cursorNavigation = do
let r = fromNode cursorTestTree
let Just e1 = firstChild r
let Just e2 = getChild 1 r
let Just e3 = lastChild r
assertBool "rootElem" $ isElement (current r)
assertBool "parent of root" $ isNothing (parent r)
assertBool "getChild bounds" $ isNothing (getChild 3 r)
assertBool "getChild negative bounds" $ isNothing (getChild (-1) r)
assertBool "firstChild" $
getAttribute "name" (current e1) == Just "alice"
assertBool "childAt 1 " $
getAttribute "name" (current e2) == Just "bob"
assertBool "lastChild " $
getAttribute "name" (current e3) == Just "camille"
do let Just a = lastChild e2
assertBool "firstChild on empty element" $ isNothing (firstChild a)
assertBool "getChild on empty element" $ isNothing (getChild 0 a)
assertBool "lastChild on empty element" $ isNothing (lastChild a)
do let Just a = right e1
let Just b = right a
assertBool "two paths #1" $ a == e2
assertBool "two paths #2" $ b == e3
assertBool "right off end" $ isNothing (right b)
let Just c = left e3
let Just d = left e2
assertBool "two paths #3" $ c == e2
assertBool "two paths #4" $ d == e1
assertBool "left off end" $ isNothing (left d)
do let Just r1 = parent e2
assertEqual "child -> parent" (current r) (current r1)
do let Just cmt = firstChild e2
assertBool "topNode" $ tagName (topNode cmt) == Just "department"
assertBool "topNodes" $ map tagName (topNodes cmt) == [ Just "department" ]
assertBool "first child of comment" $ isNothing (firstChild cmt)
assertBool "last child of comment" $ isNothing (lastChild cmt)
do assertBool "nextDF down" $ nextDF r == Just e1
let Just cmt = firstChild e2
assertBool "nextDF right" $ nextDF cmt == right cmt
let Just em = lastChild e2
assertBool "nextDF up-right" $ nextDF em == Just e3
let Just pelem = lastChild e3
let Just ptext = lastChild pelem
assertBool "nextDF end" $ isNothing (nextDF ptext)
cursorSearch :: Assertion
cursorSearch = do
let r = fromNode cursorTestTree
let Just e1 = findChild isFirst r
let Just e2 = findChild ((==(Just "bob")) . getAttribute "name" . current) r
let Just e3 = findChild isLast r
assertBool "findChild isFirst" $
getAttribute "name" (current e1) == Just "alice"
assertBool "findChild" $
getAttribute "name" (current e2) == Just "bob"
assertBool "findChild isLast" $
getAttribute "name" (current e3) == Just "camille"
assertBool "findLeft Just" $ findLeft (const True) e2 == Just e1
assertBool "findLeft Nothing" $ findLeft (const False) e2 == Nothing
assertBool "findRight Just" $ findRight (const True) e2 == Just e3
assertBool "findRight Nothing" $ findRight (const False) e2 == Nothing
assertBool "findRec" $ findRec (not . hasChildren) r == (firstChild =<< firstChild e1)
assertBool "isChild true" $ isChild e1
assertBool "isChild false" $ not $ isChild r
assertBool "getNodeIndex" $ getNodeIndex e2 == 1
cursorMutation :: Assertion
cursorMutation = do
let r = fromNode cursorTestTree
let Just e1 = firstChild r
let Just e2 = right e1
let Just e3 = lastChild r
do let Just cmt = firstChild e2
let cmt' = setNode (Comment "Not my friend any more") cmt
let Just e2' = parent cmt'
assertBool "setNode" $ current e2' ==
Element "employee" [("name", "bob")] [
Comment "Not my friend any more",
Element "address" [] [
TextNode "123 My Road"
],
Element "temp" [] []
]
do let e1' = modifyNode (setAttribute "name" "bryan") e1
let n = current e1'
assertBool "modifyNode" $ getAttribute "name" n == Just "bryan"
do let myModifyM = return . setAttribute "name" "chelsea"
e3' <- modifyNodeM myModifyM e3
let n = current e3'
assertBool "modifyNode" $ getAttribute "name" n == Just "chelsea"
cursorInsertion :: Assertion
cursorInsertion = do
let r = fromNode cursorTestTree
let Just alice = firstChild r
let Just bob = getChild 1 r
let Just camille = lastChild r
let fred = Element "employee" [("name", "fred")] []
do let ins = insertLeft fred bob
assertBool "insertLeft leaves cursor" $
getAttribute "name" (current ins) == Just "bob"
let Just a = findLeft isFirst ins
assertBool "insertLeft 1" $
getAttribute "name" (current a) == Just "alice"
let Just b = right a
assertBool "insertLeft 2" $
getAttribute "name" (current b) == Just "fred"
let Just c = right b
assertBool "insertLeft 3" $
getAttribute "name" (current c) == Just "bob"
let Just d = right c
assertBool "insertLeft 4" $
getAttribute "name" (current d) == Just "camille"
insertLeft on first child
do let ins = insertLeft fred alice
assertBool "insertLeft firstChild" $
getAttribute "name" (current ins) == Just "alice"
let Just a = findLeft isFirst ins
assertBool "insertLeft firstChild 1" $
getAttribute "name" (current a) == Just "fred"
do let ins = insertRight fred alice
assertBool "insertRight leaves cursor" $
getAttribute "name" (current ins) == Just "alice"
let Just a = findRight isLast ins
assertBool "insertRight 1" $
getAttribute "name" (current a) == Just "camille"
let Just b = left a
assertBool "insertRight 2" $
getAttribute "name" (current b) == Just "bob"
let Just c = left b
assertBool "insertRight 3" $
getAttribute "name" (current c) == Just "fred"
let Just d = left c
assertBool "insertRight 4" $
getAttribute "name" (current d) == Just "alice"
do let ins = insertRight fred camille
assertBool "insertRight lastChild" $
getAttribute "name" (current ins) == Just "camille"
let Just a = findRight isLast ins
assertBool "insertRight lastChild 1" $
getAttribute "name" (current a) == Just "fred"
let mary = Element "employee" [("name", "mary")] []
let new = [fred, mary]
do let ins = insertManyLeft new camille
assertBool "insertManyLeft leaves cursor" $
getAttribute "name" (current ins) == Just "camille"
let Just a = left ins
assertBool "insertManyLeft 1" $
getAttribute "name" (current a) == Just "mary"
let Just b = left a
assertBool "insertManyLeft 2" $
getAttribute "name" (current b) == Just "fred"
let Just c = left b
assertBool "insertManyLeft 3" $
getAttribute "name" (current c) == Just "bob"
do let ins = insertManyRight new alice
assertBool "insertManyRight leaves cursor" $
getAttribute "name" (current ins) == Just "alice"
let Just a = right ins
assertBool "insertManyRight 1" $
getAttribute "name" (current a) == Just "fred"
let Just b = right a
assertBool "insertManyRight 2" $
getAttribute "name" (current b) == Just "mary"
let Just c = right b
assertBool "insertManyRight 3" $
getAttribute "name" (current c) == Just "bob"
do let Just ins1 = insertFirstChild fred r
let Just ins2 = insertLastChild mary ins1
let Just a = firstChild ins2
assertBool "insert children 1" $
getAttribute "name" (current a) == Just "fred"
let Just b = right a
assertBool "insert children 2" $
getAttribute "name" (current b) == Just "alice"
let Just c = right b
assertBool "insert children 3" $
getAttribute "name" (current c) == Just "bob"
let Just d = right c
assertBool "insert children 4" $
getAttribute "name" (current d) == Just "camille"
let Just e = right d
assertBool "insert children 5" $
getAttribute "name" (current e) == Just "mary"
assertBool "insert children 6" $ isLast e
do let Just cmt = firstChild bob
assertBool "non-elem insertFirstChild" $
insertFirstChild fred cmt == Nothing
assertBool "non-elem insertLastChild" $
insertLastChild fred cmt == Nothing
assertBool "non-elem insertManyFirstChild" $
insertManyFirstChild new cmt == Nothing
assertBool "non-elem insertManyLastChild" $
insertManyLastChild new cmt == Nothing
insertManyFirstChild
do let Just ins = insertManyFirstChild new r
let Just a = firstChild ins
assertBool "insertManyFirstChild 1" $
getAttribute "name" (current a) == Just "fred"
let Just b = right a
assertBool "insertManyFirstChild 2" $
getAttribute "name" (current b) == Just "mary"
let Just c = right b
assertBool "insertManyFirstChild 3" $
getAttribute "name" (current c) == Just "alice"
let Just d = right c
assertBool "insertManyFirstChild 4" $
getAttribute "name" (current d) == Just "bob"
let Just e = right d
assertBool "insertManyFirstChild 5" $
getAttribute "name" (current e) == Just "camille"
assertBool "insertManyFirstChild 6" $ isLast e
do let Just ins = insertManyLastChild new r
let Just a = firstChild ins
assertBool "insertManyFirstChild 1" $
getAttribute "name" (current a) == Just "alice"
let Just b = right a
assertBool "insertManyFirstChild 2" $
getAttribute "name" (current b) == Just "bob"
let Just c = right b
assertBool "insertManyFirstChild 3" $
getAttribute "name" (current c) == Just "camille"
let Just d = right c
assertBool "insertManyFirstChild 4" $
getAttribute "name" (current d) == Just "fred"
let Just e = right d
assertBool "insertManyFirstChild 5" $
getAttribute "name" (current e) == Just "mary"
assertBool "insertManyFirstChild 6" $ isLast e
do let ins = insertGoLeft fred bob
let Just a = right ins
assertBool "insertGoLeft 1" $
getAttribute "name" (current ins) == Just "fred"
assertBool "insertGoLeft 2" $
getAttribute "name" (current a) == Just "bob"
do let ins = insertGoLeft fred alice
let Just a = right ins
assertBool "insertGoLeft 3" $
getAttribute "name" (current ins) == Just "fred"
assertBool "insertGoLeft 4" $
getAttribute "name" (current a) == Just "alice"
insertGoRight from middle
do let ins = insertGoRight fred bob
let Just a = left ins
assertBool "insertGoRight 1" $
getAttribute "name" (current ins) == Just "fred"
assertBool "insertGoRight 2" $
getAttribute "name" (current a) == Just "bob"
insertGoRight from end
do let ins = insertGoRight fred camille
let Just a = left ins
assertBool "insertGoRight 3" $
getAttribute "name" (current ins) == Just "fred"
assertBool "insertGoRight 4" $
getAttribute "name" (current a) == Just "camille"
cursorDeletion :: Assertion
cursorDeletion = do
let r = fromNode cursorTestTree
let Just alice = firstChild r
let Just bob = getChild 1 r
let Just camille = lastChild r
do let Just (n,del) = removeLeft bob
let [b,c] = siblings del
assertBool "removeLeft node1" $ getAttribute "name" n == Just "alice"
assertBool "removeLeft node2" $ getAttribute "name" b == Just "bob"
assertBool "removeLeft node3" $ getAttribute "name" c == Just "camille"
do assertBool "removeLeft failure" $ isNothing (removeLeft alice)
do let Just (n,del) = removeRight bob
let [a,b] = siblings del
assertBool "removeLeft node1" $ getAttribute "name" a == Just "alice"
assertBool "removeLeft node2" $ getAttribute "name" b == Just "bob"
assertBool "removeLeft node3" $ getAttribute "name" n == Just "camille"
failure
do assertBool "removeLeft failure" $ isNothing (removeRight camille)
do let Just del = removeGoLeft bob
let Just c = right del
assertBool "removeGoLeft 1" $
getAttribute "name" (current del) == Just "alice"
assertBool "removeGoLeft 2" $
getAttribute "name" (current c) == Just "camille"
do assertBool "removeGoLeft failure" $ isNothing (removeGoLeft alice)
removeGoRight success
do let Just del = removeGoRight bob
let Just a = left del
assertBool "removeGoRight 1" $
getAttribute "name" (current del) == Just "camille"
assertBool "removeGoRight 2" $
getAttribute "name" (current a) == Just "alice"
do assertBool "removeGoRight failure" $ isNothing (removeGoRight camille)
do let Just del = removeGoUp bob
let [a,c] = childNodes (current del)
assertBool "removeGoUp 1" $ getAttribute "name" a == Just "alice"
assertBool "removeGoUp 2" $ getAttribute "name" c == Just "camille"
do assertBool "removeGoUp failure" $ isNothing (removeGoUp r)
|
26b4d0cf96221211a2058a71e1522cf0d5a590858dd97085b96c12b595f7fc1d | input-output-hk/cardano-explorer | AddressBalance.hs | # LANGUAGE DataKinds #
{-# LANGUAGE OverloadedStrings #-}
module Explorer.Web.Api.HttpBridge.AddressBalance
( addressBalance
) where
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Trans.Reader (ReaderT)
import Data.ByteString.Char8 (ByteString)
import Data.Text (Text)
import Data.Word (Word16, Word64)
import Database.Esqueleto (InnerJoin (..), Value (..),
(^.), (==.), from, on, select, val, where_)
import Database.Persist.Sql (SqlBackend)
import Explorer.DB (EntityField (..), txOutUnspentP, queryNetworkName)
import Explorer.Web.ClientTypes (CAddress (..), CNetwork (..), CAddressBalance (..),
CAddressBalanceError (..))
import Explorer.Web.Api.Legacy.Util (bsBase16Encode, decodeTextAddress, runQuery)
import Servant (Handler)
This endpoint emulates the endpoint :
--
-- GET: /:network/utxos/:address
--
and returns the current output details :
--
[ { " address " : " 2cWKMJemoBamE3kYCuVLq6pwWwNBJVZmv471Zcb2ok8cH9NjJC4JUkq5rV5ss9ALXWCKN "
, " coin " : 310025
, " index " : 0
-- , "txid": "89eb0d6a8a691dae2cd15ed0369931ce0a949ecafa5c3f93f8121833646e15c3"
-- }
-- ]
-- This endpoint always returns a list (which may be empty).
There are a number of potential failures and wyat
addressBalance
:: SqlBackend -> CNetwork -> CAddress
-> Handler CAddressBalanceError
addressBalance backend (CNetwork networkName) (CAddress addrTxt) =
-- Currently ignore the 'CNetwork' parameter (eg mainnet, testnet etc) as the explorer only
-- supports a single network and returns a result for whichever network its running on.
case decodeTextAddress addrTxt of
Left _ -> pure $ CABError "Invalid address"
Right _ -> runQuery backend $ do
mNetName <- queryNetworkName
case mNetName of
Nothing -> pure $ CABError "Invalid network name"
Just name -> if name /= networkName
then pure $ CABError "Network name mismatch"
else CABValue <$> queryAddressBalance addrTxt
-- -------------------------------------------------------------------------------------------------
queryAddressBalance :: MonadIO m => Text -> ReaderT SqlBackend m [CAddressBalance]
queryAddressBalance addrTxt = do
rows <- select . from $ \ (tx `InnerJoin` txOut) -> do
on (tx ^. TxId ==. txOut ^. TxOutTxId)
txOutUnspentP txOut
where_ (txOut ^. TxOutAddress ==. val addrTxt)
pure (txOut ^. TxOutAddress, tx ^. TxHash, txOut ^. TxOutIndex, txOut ^. TxOutValue)
pure $ map convert rows
where
convert :: (Value Text, Value ByteString, Value Word16, Value Word64) -> CAddressBalance
convert (Value addr, Value txhash, Value index, Value coin) =
CAddressBalance
{ cuaAddress = addr
, cuaTxHash = bsBase16Encode txhash
, cuaIndex = index
, cuaCoin = coin
}
| null | https://raw.githubusercontent.com/input-output-hk/cardano-explorer/3abcb32339edac7c2397114a1d170cc76b82e9b6/cardano-explorer-webapi/src/Explorer/Web/Api/HttpBridge/AddressBalance.hs | haskell | # LANGUAGE OverloadedStrings #
GET: /:network/utxos/:address
, "txid": "89eb0d6a8a691dae2cd15ed0369931ce0a949ecafa5c3f93f8121833646e15c3"
}
]
This endpoint always returns a list (which may be empty).
Currently ignore the 'CNetwork' parameter (eg mainnet, testnet etc) as the explorer only
supports a single network and returns a result for whichever network its running on.
------------------------------------------------------------------------------------------------- | # LANGUAGE DataKinds #
module Explorer.Web.Api.HttpBridge.AddressBalance
( addressBalance
) where
import Control.Monad.IO.Class (MonadIO)
import Control.Monad.Trans.Reader (ReaderT)
import Data.ByteString.Char8 (ByteString)
import Data.Text (Text)
import Data.Word (Word16, Word64)
import Database.Esqueleto (InnerJoin (..), Value (..),
(^.), (==.), from, on, select, val, where_)
import Database.Persist.Sql (SqlBackend)
import Explorer.DB (EntityField (..), txOutUnspentP, queryNetworkName)
import Explorer.Web.ClientTypes (CAddress (..), CNetwork (..), CAddressBalance (..),
CAddressBalanceError (..))
import Explorer.Web.Api.Legacy.Util (bsBase16Encode, decodeTextAddress, runQuery)
import Servant (Handler)
This endpoint emulates the endpoint :
and returns the current output details :
[ { " address " : " 2cWKMJemoBamE3kYCuVLq6pwWwNBJVZmv471Zcb2ok8cH9NjJC4JUkq5rV5ss9ALXWCKN "
, " coin " : 310025
, " index " : 0
There are a number of potential failures and wyat
addressBalance
:: SqlBackend -> CNetwork -> CAddress
-> Handler CAddressBalanceError
addressBalance backend (CNetwork networkName) (CAddress addrTxt) =
case decodeTextAddress addrTxt of
Left _ -> pure $ CABError "Invalid address"
Right _ -> runQuery backend $ do
mNetName <- queryNetworkName
case mNetName of
Nothing -> pure $ CABError "Invalid network name"
Just name -> if name /= networkName
then pure $ CABError "Network name mismatch"
else CABValue <$> queryAddressBalance addrTxt
queryAddressBalance :: MonadIO m => Text -> ReaderT SqlBackend m [CAddressBalance]
queryAddressBalance addrTxt = do
rows <- select . from $ \ (tx `InnerJoin` txOut) -> do
on (tx ^. TxId ==. txOut ^. TxOutTxId)
txOutUnspentP txOut
where_ (txOut ^. TxOutAddress ==. val addrTxt)
pure (txOut ^. TxOutAddress, tx ^. TxHash, txOut ^. TxOutIndex, txOut ^. TxOutValue)
pure $ map convert rows
where
convert :: (Value Text, Value ByteString, Value Word16, Value Word64) -> CAddressBalance
convert (Value addr, Value txhash, Value index, Value coin) =
CAddressBalance
{ cuaAddress = addr
, cuaTxHash = bsBase16Encode txhash
, cuaIndex = index
, cuaCoin = coin
}
|
d26c6b87025a486c592439e735e83c93e9eecf8d52b9975ddc3e3562a7ff3028 | EmileTrotignon/embedded_ocaml_templates | file_handling.ml | type file = File of string | Directory of (string * file array)
let sort_by_int array ~to_int =
ArrayLabels.sort array ~cmp:(fun a b -> compare (to_int a) (to_int b))
let rec print_file file =
match file with
| File f ->
Printf.printf "File %s\n" f
| Directory (s, fa) ->
Printf.printf "File %s (\n" s ;
ArrayLabels.iter fa ~f:print_file ;
print_endline ")"
let path_readdir dirname =
ArrayLabels.map ~f:(Filename.concat dirname) (Sys.readdir dirname)
let rec read_file_or_directory ?(filter = fun _ -> true) ?(sorted = false)
filename =
if not (Sys.file_exists filename) then
Error.fail "file or directory `%s` does not exist" filename ;
let directories_first_sort files =
sort_by_int files ~to_int:(fun f ->
match f with Directory _ -> 0 | File _ -> 1 )
in
match Sys.is_directory filename with
| true ->
Directory
( filename
, let files =
ArrayLabels.map
~f:(fun file ->
match file with
| File name ->
File name
| Directory (name, files) ->
Directory (name, files) )
(CCArrayLabels.filter
~f:(fun file ->
match file with File s -> filter s | Directory _ -> true )
(ArrayLabels.map
~f:(read_file_or_directory ~filter ~sorted)
(ArrayLabels.map ~f:(Filename.concat filename)
(Sys.readdir filename) ) ) )
in
if sorted then directories_first_sort files ;
files )
| false -> (
match Sys.file_exists filename with
| true ->
File filename
| false ->
Printf.eprintf "Unknown file %s\n" filename ;
exit 1 )
| null | https://raw.githubusercontent.com/EmileTrotignon/embedded_ocaml_templates/2baf51d1b07db050fe535615018864e7e850ce38/src/common/file_handling.ml | ocaml | type file = File of string | Directory of (string * file array)
let sort_by_int array ~to_int =
ArrayLabels.sort array ~cmp:(fun a b -> compare (to_int a) (to_int b))
let rec print_file file =
match file with
| File f ->
Printf.printf "File %s\n" f
| Directory (s, fa) ->
Printf.printf "File %s (\n" s ;
ArrayLabels.iter fa ~f:print_file ;
print_endline ")"
let path_readdir dirname =
ArrayLabels.map ~f:(Filename.concat dirname) (Sys.readdir dirname)
let rec read_file_or_directory ?(filter = fun _ -> true) ?(sorted = false)
filename =
if not (Sys.file_exists filename) then
Error.fail "file or directory `%s` does not exist" filename ;
let directories_first_sort files =
sort_by_int files ~to_int:(fun f ->
match f with Directory _ -> 0 | File _ -> 1 )
in
match Sys.is_directory filename with
| true ->
Directory
( filename
, let files =
ArrayLabels.map
~f:(fun file ->
match file with
| File name ->
File name
| Directory (name, files) ->
Directory (name, files) )
(CCArrayLabels.filter
~f:(fun file ->
match file with File s -> filter s | Directory _ -> true )
(ArrayLabels.map
~f:(read_file_or_directory ~filter ~sorted)
(ArrayLabels.map ~f:(Filename.concat filename)
(Sys.readdir filename) ) ) )
in
if sorted then directories_first_sort files ;
files )
| false -> (
match Sys.file_exists filename with
| true ->
File filename
| false ->
Printf.eprintf "Unknown file %s\n" filename ;
exit 1 )
| |
d547392148142deb0b2c65bc4aa43003afe0a0e43e2396104f2cd04915634f67 | PacktWorkshops/The-Clojure-Workshop | core.cljs | (ns ^:figwheel-hooks support-desk.core
(:require [cuerdas.core :as str]
[goog.dom :as gdom]
[jayq.core :as jayq :refer [$]]
[rum.core :as rum]
[support-desk.utils :as utils]))
(def priorities-list [{:title "IE bugs" :priority 2} {:title "404 page" :priority 1} {:title "Forgotten username" :priority 2}
{:title "Login token" :priority 1} {:title "Mobile version" :priority 3} {:title "Load time" :priority 5}])
;; define your app data so that it doesn't get over-written on reload
(defonce app-state (atom {:sort-counter 0
:items (utils/get-sorted-priorities-list (utils/get-priorities-list priorities-list 3))}))
(rum/defc counter [number]
[:div
(str/format "Sorting done: %s times" (utils/get-sort-message number))])
(defn done-button-click [item]
(swap! app-state update-in [:items] #(utils/delete-item-from-list-by-title (:title item) %)))
(rum/defc item [item]
[:li.ui-state-default {:key (:title item)}
(str/format "Priority %s for: %s " (:priority item) (:title item))
[:button.delete
{:on-click #(done-button-click item)}
"Done"]])
(rum/defc items < rum/reactive [num]
[:ul#sortable (vec (for [n num]
(item n)))])
(rum/defc content < rum/reactive []
[:div {}
(items (:items (deref app-state)))
(counter (:sort-counter (rum/react app-state)))])
(defn attrs [a]
(clj->js (sablono.util/html-to-dom-attrs a)))
(defn make-sortable []
(.sortable ($ (str "#sortable"))
(attrs {:stop (utils/handle-sort-finish app-state)})))
(defn get-app-element []
(gdom/getElement "app"))
(defn mount [el]
(rum/mount (content) el))
(defn mount-app-element []
(when-let [el (get-app-element)]
(mount el)))
;; conditionally start your application based on the presence of an "app" element
;; this is particularly helpful for testing this ns without launching the app
(mount-app-element)
(make-sortable)
;; specify reload hook with ^;after-load metadata
(defn ^:after-load on-reload []
(mount-app-element)
(make-sortable)) | null | https://raw.githubusercontent.com/PacktWorkshops/The-Clojure-Workshop/3d309bb0e46a41ce2c93737870433b47ce0ba6a2/Chapter10/Activity10.02/support-desk/src/support_desk/core.cljs | clojure | define your app data so that it doesn't get over-written on reload
conditionally start your application based on the presence of an "app" element
this is particularly helpful for testing this ns without launching the app
specify reload hook with ^;after-load metadata | (ns ^:figwheel-hooks support-desk.core
(:require [cuerdas.core :as str]
[goog.dom :as gdom]
[jayq.core :as jayq :refer [$]]
[rum.core :as rum]
[support-desk.utils :as utils]))
(def priorities-list [{:title "IE bugs" :priority 2} {:title "404 page" :priority 1} {:title "Forgotten username" :priority 2}
{:title "Login token" :priority 1} {:title "Mobile version" :priority 3} {:title "Load time" :priority 5}])
(defonce app-state (atom {:sort-counter 0
:items (utils/get-sorted-priorities-list (utils/get-priorities-list priorities-list 3))}))
(rum/defc counter [number]
[:div
(str/format "Sorting done: %s times" (utils/get-sort-message number))])
(defn done-button-click [item]
(swap! app-state update-in [:items] #(utils/delete-item-from-list-by-title (:title item) %)))
(rum/defc item [item]
[:li.ui-state-default {:key (:title item)}
(str/format "Priority %s for: %s " (:priority item) (:title item))
[:button.delete
{:on-click #(done-button-click item)}
"Done"]])
(rum/defc items < rum/reactive [num]
[:ul#sortable (vec (for [n num]
(item n)))])
(rum/defc content < rum/reactive []
[:div {}
(items (:items (deref app-state)))
(counter (:sort-counter (rum/react app-state)))])
(defn attrs [a]
(clj->js (sablono.util/html-to-dom-attrs a)))
(defn make-sortable []
(.sortable ($ (str "#sortable"))
(attrs {:stop (utils/handle-sort-finish app-state)})))
(defn get-app-element []
(gdom/getElement "app"))
(defn mount [el]
(rum/mount (content) el))
(defn mount-app-element []
(when-let [el (get-app-element)]
(mount el)))
(mount-app-element)
(make-sortable)
(defn ^:after-load on-reload []
(mount-app-element)
(make-sortable)) |
69a796d44d7a7833f01941c73524f2c2cf6c09d4476f877f23bb00beb0bc963a | geophf/1HaskellADay | Solution.hs | module Y2016.M08.D01.Solution where
import Control.List (choose, permute)
import Control.Logic.Frege (assert)
summer :: Int -> Int -> [[Int]]
summer result slotCount =
choose slotCount [1..9] >>= assert ((== result) . sum) >>= permute
-- Note the return value: there may be multiple solutions.
Note also [ 1,2,3 ] for the sum of 6 is a different value than [ 3,2,1 ]
sumThese :: [(Int, Int)]
sumThese = [(4,2), (10,4), (17,2), (12,2), (13,4)]
Answer ( for ( 4,2 ) ) is : [ [ 1,3],[3,1 ] ]
-
* Y2016.M08.D01.Exercise > summer 4 2 ~ > [ [ 1,3],[3,1 ] ]
and , generally :
* Y2016.M08.D01.Exercise > mapM _ ( print . uncurry summer ) sumThese ~ >
[ [ 1,3],[3,1 ] ]
[ [ 1,2,3,4],[1,2,4,3],[1,3,2,4],[1,3,4,2],[1,4,2,3 ] , ...
[ [ 8,9],[9,8 ] ]
[ [ 3,9],[9,3],[4,8],[8,4],[5,7],[7,5 ] ]
[ [ 1,2,3,7],[1,2,7,3],[1,3,2,7],[1,3,7,2],[1,7,2,3 ] , ...
-
*Y2016.M08.D01.Exercise> summer 4 2 ~> [[1,3],[3,1]]
and, generally:
*Y2016.M08.D01.Exercise> mapM_ (print . uncurry summer) sumThese ~>
[[1,3],[3,1]]
[[1,2,3,4],[1,2,4,3],[1,3,2,4],[1,3,4,2],[1,4,2,3],...
[[8,9],[9,8]]
[[3,9],[9,3],[4,8],[8,4],[5,7],[7,5]]
[[1,2,3,7],[1,2,7,3],[1,3,2,7],[1,3,7,2],[1,7,2,3],...
--}
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2016/M08/D01/Solution.hs | haskell | Note the return value: there may be multiple solutions.
} | module Y2016.M08.D01.Solution where
import Control.List (choose, permute)
import Control.Logic.Frege (assert)
summer :: Int -> Int -> [[Int]]
summer result slotCount =
choose slotCount [1..9] >>= assert ((== result) . sum) >>= permute
Note also [ 1,2,3 ] for the sum of 6 is a different value than [ 3,2,1 ]
sumThese :: [(Int, Int)]
sumThese = [(4,2), (10,4), (17,2), (12,2), (13,4)]
Answer ( for ( 4,2 ) ) is : [ [ 1,3],[3,1 ] ]
-
* Y2016.M08.D01.Exercise > summer 4 2 ~ > [ [ 1,3],[3,1 ] ]
and , generally :
* Y2016.M08.D01.Exercise > mapM _ ( print . uncurry summer ) sumThese ~ >
[ [ 1,3],[3,1 ] ]
[ [ 1,2,3,4],[1,2,4,3],[1,3,2,4],[1,3,4,2],[1,4,2,3 ] , ...
[ [ 8,9],[9,8 ] ]
[ [ 3,9],[9,3],[4,8],[8,4],[5,7],[7,5 ] ]
[ [ 1,2,3,7],[1,2,7,3],[1,3,2,7],[1,3,7,2],[1,7,2,3 ] , ...
-
*Y2016.M08.D01.Exercise> summer 4 2 ~> [[1,3],[3,1]]
and, generally:
*Y2016.M08.D01.Exercise> mapM_ (print . uncurry summer) sumThese ~>
[[1,3],[3,1]]
[[1,2,3,4],[1,2,4,3],[1,3,2,4],[1,3,4,2],[1,4,2,3],...
[[8,9],[9,8]]
[[3,9],[9,3],[4,8],[8,4],[5,7],[7,5]]
[[1,2,3,7],[1,2,7,3],[1,3,2,7],[1,3,7,2],[1,7,2,3],...
|
88903d074e27d829649cdc88a7ab4fa9106d47bff9271c6d5188579bf950420b | mfp/oraft | oraft_lwt_conn_wrapper.mli | type -'a conn_wrapper
type simple_wrapper =
Lwt_unix.file_descr -> (Lwt_io.input_channel * Lwt_io.output_channel) Lwt.t
val make_client_conn_wrapper :
simple_wrapper -> [`Outgoing] conn_wrapper
val make_server_conn_wrapper :
incoming:simple_wrapper ->
outgoing:simple_wrapper ->
[`Incoming | `Outgoing] conn_wrapper
val wrap_outgoing_conn :
[> `Outgoing] conn_wrapper -> Lwt_unix.file_descr ->
(Lwt_io.input_channel * Lwt_io.output_channel) Lwt.t
val wrap_incoming_conn :
[> `Incoming] conn_wrapper -> Lwt_unix.file_descr ->
(Lwt_io.input_channel * Lwt_io.output_channel) Lwt.t
val trivial_conn_wrapper :
?buffer_size:int -> unit -> [< `Incoming | `Outgoing] conn_wrapper
| null | https://raw.githubusercontent.com/mfp/oraft/cf7352eb8f1324717d47dc294a058c857ebef9eb/src/oraft_lwt_conn_wrapper.mli | ocaml | type -'a conn_wrapper
type simple_wrapper =
Lwt_unix.file_descr -> (Lwt_io.input_channel * Lwt_io.output_channel) Lwt.t
val make_client_conn_wrapper :
simple_wrapper -> [`Outgoing] conn_wrapper
val make_server_conn_wrapper :
incoming:simple_wrapper ->
outgoing:simple_wrapper ->
[`Incoming | `Outgoing] conn_wrapper
val wrap_outgoing_conn :
[> `Outgoing] conn_wrapper -> Lwt_unix.file_descr ->
(Lwt_io.input_channel * Lwt_io.output_channel) Lwt.t
val wrap_incoming_conn :
[> `Incoming] conn_wrapper -> Lwt_unix.file_descr ->
(Lwt_io.input_channel * Lwt_io.output_channel) Lwt.t
val trivial_conn_wrapper :
?buffer_size:int -> unit -> [< `Incoming | `Outgoing] conn_wrapper
| |
9202e05167c590dabcef6ac3b7a7334b18e02437b3efc05cf1080a136a1545fd | mirleft/ocaml-tls | tls_eio.ml | module Flow = Eio.Flow
exception Tls_alert of Tls.Packet.alert_type
exception Tls_failure of Tls.Engine.failure
module Raw = struct
(* We could replace [`Eof] with [`Error End_of_file] and then use
a regular [result] type here. *)
type t = {
flow : Flow.two_way ;
mutable state : [ `Active of Tls.Engine.state
| `Eof
| `Error of exn ] ;
mutable linger : Cstruct.t option ;
recv_buf : Cstruct.t ;
}
let read_t t cs =
try Flow.single_read t.flow cs
with
| End_of_file as ex ->
t.state <- `Eof;
raise ex
| exn ->
(match t.state with
| `Error _ | `Eof -> ()
| `Active _ -> t.state <- `Error exn) ;
raise exn
let write_t t cs =
try Flow.copy (Flow.cstruct_source [cs]) t.flow
with exn ->
(match t.state with
| `Error _ | `Eof -> ()
| `Active _ -> t.state <- `Error exn) ;
raise exn
let try_write_t t cs =
try write_t t cs
with _ -> Eio.Fiber.check () (* Error is in [t.state] *)
let rec read_react t =
let handle tls buf =
match Tls.Engine.handle_tls tls buf with
| Ok (state', `Response resp, `Data data) ->
let state' = match state' with
| `Ok tls -> `Active tls
| `Eof -> `Eof
| `Alert a -> `Error (Tls_alert a)
in
t.state <- state' ;
Option.iter (try_write_t t) resp;
data
| Error (alert, `Response resp) ->
t.state <- `Error (Tls_failure alert) ;
write_t t resp; read_react t
in
match t.state with
| `Error e -> raise e
| `Eof -> raise End_of_file
| `Active _ ->
let n = read_t t t.recv_buf in
match (t.state, n) with
| (`Active tls, n) -> handle tls (Cstruct.sub t.recv_buf 0 n)
| (`Error e, _) -> raise e
| (`Eof, _) -> raise End_of_file
let rec read t buf =
let writeout res =
let open Cstruct in
let rlen = length res in
let n = min (length buf) rlen in
blit res 0 buf 0 n ;
t.linger <-
(if n < rlen then Some (sub res n (rlen - n)) else None) ;
n in
match t.linger with
| Some res -> writeout res
| None ->
match read_react t with
| None -> read t buf
| Some res -> writeout res
let writev t css =
match t.state with
| `Error err -> raise err
| `Eof -> raise End_of_file
| `Active tls ->
match Tls.Engine.send_application_data tls css with
| Some (tls, tlsdata) ->
( t.state <- `Active tls ; write_t t tlsdata )
| None -> invalid_arg "tls: write: socket not ready"
let write t cs = writev t [cs]
(*
* XXX bad XXX
* This is a point that should particularly be protected from concurrent r/w.
* Doing this before a `t` is returned is safe; redoing it during rekeying is
* not, as the API client already sees the `t` and can mistakenly interleave
* writes while this is in progress.
* *)
let rec drain_handshake t =
let push_linger t mcs =
match (mcs, t.linger) with
| (None, _) -> ()
| (scs, None) -> t.linger <- scs
| (Some cs, Some l) -> t.linger <- Some (Cstruct.append l cs)
in
match t.state with
| `Active tls when not (Tls.Engine.handshake_in_progress tls) ->
t
| _ ->
let cs = read_react t in
push_linger t cs; drain_handshake t
let reneg ?authenticator ?acceptable_cas ?cert ?(drop = true) t =
match t.state with
| `Error err -> raise err
| `Eof -> raise End_of_file
| `Active tls ->
match Tls.Engine.reneg ?authenticator ?acceptable_cas ?cert tls with
| None -> invalid_arg "tls: can't renegotiate"
| Some (tls', buf) ->
if drop then t.linger <- None ;
t.state <- `Active tls' ;
write_t t buf;
ignore (drain_handshake t : t)
let key_update ?request t =
match t.state with
| `Error err -> raise err
| `Eof -> raise End_of_file
| `Active tls ->
match Tls.Engine.key_update ?request tls with
| Error _ -> invalid_arg "tls: can't update key"
| Ok (tls', buf) ->
t.state <- `Active tls' ;
write_t t buf
let close_tls t =
match t.state with
| `Active tls ->
let (_, buf) = Tls.Engine.send_close_notify tls in
t.state <- `Eof ; (* XXX: this looks wrong - we're only trying to close the sending side *)
write_t t buf
| _ -> ()
Not sure if we need to keep both directions open on the underlying flow when closing
one direction at the TLS level .
one direction at the TLS level. *)
let shutdown t = function
| `Send -> close_tls t
| `All -> close_tls t; Flow.shutdown t.flow `All
Not obvious how to do this with TLS , so ignore for now .
let server_of_flow config flow =
drain_handshake {
state = `Active (Tls.Engine.server config) ;
flow = (flow :> Flow.two_way) ;
linger = None ;
recv_buf = Cstruct.create 4096
}
let client_of_flow config ?host flow =
let config' = match host with
| None -> config
| Some host -> Tls.Config.peer config host
in
let t = {
state = `Eof ;
flow = (flow :> Flow.two_way);
linger = None ;
recv_buf = Cstruct.create 4096
} in
let (tls, init) = Tls.Engine.client config' in
let t = { t with state = `Active tls } in
write_t t init;
drain_handshake t
let epoch t =
match t.state with
| `Active tls -> ( match Tls.Engine.epoch tls with
| `InitialEpoch -> assert false (* can never occur! *)
| `Epoch data -> Ok data )
| `Eof -> Error ()
| `Error _ -> Error ()
let copy_from t src =
try
while true do
let buf = Cstruct.create 4096 in
let got = Flow.single_read src buf in
write t (Cstruct.sub buf 0 got)
done
with End_of_file -> ()
end
type t = <
Eio.Flow.two_way;
t : Raw.t;
>
let of_t t =
object
inherit Eio.Flow.two_way
method read_into = Raw.read t
method copy = Raw.copy_from t
method shutdown = Raw.shutdown t
method t = t
end
let server_of_flow config flow = Raw.server_of_flow config flow |> of_t
let client_of_flow config ?host flow = Raw.client_of_flow config ?host flow |> of_t
let reneg ?authenticator ?acceptable_cas ?cert ?drop (t:t) = Raw.reneg ?authenticator ?acceptable_cas ?cert ?drop t#t
let key_update ?request (t:t) = Raw.key_update ?request t#t
let epoch (t:t) = Raw.epoch t#t
let () =
Printexc.register_printer (function
| Tls_alert typ ->
Some ("TLS alert from peer: " ^ Tls.Packet.alert_type_to_string typ)
| Tls_failure f ->
Some ("TLS failure: " ^ Tls.Engine.string_of_failure f)
| _ -> None)
| null | https://raw.githubusercontent.com/mirleft/ocaml-tls/7c3fda0b4321450f48c7b42f64781de16dea0358/eio/tls_eio.ml | ocaml | We could replace [`Eof] with [`Error End_of_file] and then use
a regular [result] type here.
Error is in [t.state]
* XXX bad XXX
* This is a point that should particularly be protected from concurrent r/w.
* Doing this before a `t` is returned is safe; redoing it during rekeying is
* not, as the API client already sees the `t` and can mistakenly interleave
* writes while this is in progress.
*
XXX: this looks wrong - we're only trying to close the sending side
can never occur! | module Flow = Eio.Flow
exception Tls_alert of Tls.Packet.alert_type
exception Tls_failure of Tls.Engine.failure
module Raw = struct
type t = {
flow : Flow.two_way ;
mutable state : [ `Active of Tls.Engine.state
| `Eof
| `Error of exn ] ;
mutable linger : Cstruct.t option ;
recv_buf : Cstruct.t ;
}
let read_t t cs =
try Flow.single_read t.flow cs
with
| End_of_file as ex ->
t.state <- `Eof;
raise ex
| exn ->
(match t.state with
| `Error _ | `Eof -> ()
| `Active _ -> t.state <- `Error exn) ;
raise exn
let write_t t cs =
try Flow.copy (Flow.cstruct_source [cs]) t.flow
with exn ->
(match t.state with
| `Error _ | `Eof -> ()
| `Active _ -> t.state <- `Error exn) ;
raise exn
let try_write_t t cs =
try write_t t cs
let rec read_react t =
let handle tls buf =
match Tls.Engine.handle_tls tls buf with
| Ok (state', `Response resp, `Data data) ->
let state' = match state' with
| `Ok tls -> `Active tls
| `Eof -> `Eof
| `Alert a -> `Error (Tls_alert a)
in
t.state <- state' ;
Option.iter (try_write_t t) resp;
data
| Error (alert, `Response resp) ->
t.state <- `Error (Tls_failure alert) ;
write_t t resp; read_react t
in
match t.state with
| `Error e -> raise e
| `Eof -> raise End_of_file
| `Active _ ->
let n = read_t t t.recv_buf in
match (t.state, n) with
| (`Active tls, n) -> handle tls (Cstruct.sub t.recv_buf 0 n)
| (`Error e, _) -> raise e
| (`Eof, _) -> raise End_of_file
let rec read t buf =
let writeout res =
let open Cstruct in
let rlen = length res in
let n = min (length buf) rlen in
blit res 0 buf 0 n ;
t.linger <-
(if n < rlen then Some (sub res n (rlen - n)) else None) ;
n in
match t.linger with
| Some res -> writeout res
| None ->
match read_react t with
| None -> read t buf
| Some res -> writeout res
let writev t css =
match t.state with
| `Error err -> raise err
| `Eof -> raise End_of_file
| `Active tls ->
match Tls.Engine.send_application_data tls css with
| Some (tls, tlsdata) ->
( t.state <- `Active tls ; write_t t tlsdata )
| None -> invalid_arg "tls: write: socket not ready"
let write t cs = writev t [cs]
let rec drain_handshake t =
let push_linger t mcs =
match (mcs, t.linger) with
| (None, _) -> ()
| (scs, None) -> t.linger <- scs
| (Some cs, Some l) -> t.linger <- Some (Cstruct.append l cs)
in
match t.state with
| `Active tls when not (Tls.Engine.handshake_in_progress tls) ->
t
| _ ->
let cs = read_react t in
push_linger t cs; drain_handshake t
let reneg ?authenticator ?acceptable_cas ?cert ?(drop = true) t =
match t.state with
| `Error err -> raise err
| `Eof -> raise End_of_file
| `Active tls ->
match Tls.Engine.reneg ?authenticator ?acceptable_cas ?cert tls with
| None -> invalid_arg "tls: can't renegotiate"
| Some (tls', buf) ->
if drop then t.linger <- None ;
t.state <- `Active tls' ;
write_t t buf;
ignore (drain_handshake t : t)
let key_update ?request t =
match t.state with
| `Error err -> raise err
| `Eof -> raise End_of_file
| `Active tls ->
match Tls.Engine.key_update ?request tls with
| Error _ -> invalid_arg "tls: can't update key"
| Ok (tls', buf) ->
t.state <- `Active tls' ;
write_t t buf
let close_tls t =
match t.state with
| `Active tls ->
let (_, buf) = Tls.Engine.send_close_notify tls in
write_t t buf
| _ -> ()
Not sure if we need to keep both directions open on the underlying flow when closing
one direction at the TLS level .
one direction at the TLS level. *)
let shutdown t = function
| `Send -> close_tls t
| `All -> close_tls t; Flow.shutdown t.flow `All
Not obvious how to do this with TLS , so ignore for now .
let server_of_flow config flow =
drain_handshake {
state = `Active (Tls.Engine.server config) ;
flow = (flow :> Flow.two_way) ;
linger = None ;
recv_buf = Cstruct.create 4096
}
let client_of_flow config ?host flow =
let config' = match host with
| None -> config
| Some host -> Tls.Config.peer config host
in
let t = {
state = `Eof ;
flow = (flow :> Flow.two_way);
linger = None ;
recv_buf = Cstruct.create 4096
} in
let (tls, init) = Tls.Engine.client config' in
let t = { t with state = `Active tls } in
write_t t init;
drain_handshake t
let epoch t =
match t.state with
| `Active tls -> ( match Tls.Engine.epoch tls with
| `Epoch data -> Ok data )
| `Eof -> Error ()
| `Error _ -> Error ()
let copy_from t src =
try
while true do
let buf = Cstruct.create 4096 in
let got = Flow.single_read src buf in
write t (Cstruct.sub buf 0 got)
done
with End_of_file -> ()
end
type t = <
Eio.Flow.two_way;
t : Raw.t;
>
let of_t t =
object
inherit Eio.Flow.two_way
method read_into = Raw.read t
method copy = Raw.copy_from t
method shutdown = Raw.shutdown t
method t = t
end
let server_of_flow config flow = Raw.server_of_flow config flow |> of_t
let client_of_flow config ?host flow = Raw.client_of_flow config ?host flow |> of_t
let reneg ?authenticator ?acceptable_cas ?cert ?drop (t:t) = Raw.reneg ?authenticator ?acceptable_cas ?cert ?drop t#t
let key_update ?request (t:t) = Raw.key_update ?request t#t
let epoch (t:t) = Raw.epoch t#t
let () =
Printexc.register_printer (function
| Tls_alert typ ->
Some ("TLS alert from peer: " ^ Tls.Packet.alert_type_to_string typ)
| Tls_failure f ->
Some ("TLS failure: " ^ Tls.Engine.string_of_failure f)
| _ -> None)
|
d9cfd5bc3326debe630f18b51f97e01cc266a6f348d92200812599d03ad840ae | zonotope/zanmi | time.clj | (ns zanmi.util.time
(:require [clj-time.core :as time]))
(defn now []
(.toDate (time/now)))
(defn in-hours [hours]
(.toDate (time/plus (time/now) (time/hours hours))))
| null | https://raw.githubusercontent.com/zonotope/zanmi/26e448db2dfb1b5edb3cb6ca9e49d6ff8cac607c/src/zanmi/util/time.clj | clojure | (ns zanmi.util.time
(:require [clj-time.core :as time]))
(defn now []
(.toDate (time/now)))
(defn in-hours [hours]
(.toDate (time/plus (time/now) (time/hours hours))))
| |
2c1ae3b7076be3885ac8ae7514b59c931939be085d792eea5a757511a8a1131b | walang/wa | wa.lisp | ; Wa main.
(in-package :cl-user)
(declaim (optimize (speed 3) (safety 1) (debug 0)))
(load "wc.lisp")
(load "bi.lisp")
(defun main ()
(let ((file (cadr *posix-argv*)))
(if file
(with-wa-readtable (wa-load file))
(with-wa-readtable (wa-repl)))))
(loop repeat 10 do (gc :full t))
(save-lisp-and-die "wa" :toplevel #'main :executable t)
| null | https://raw.githubusercontent.com/walang/wa/74ac218cd222c0db1176775965656fa75b09d5f4/src/wa/wa.lisp | lisp | Wa main. |
(in-package :cl-user)
(declaim (optimize (speed 3) (safety 1) (debug 0)))
(load "wc.lisp")
(load "bi.lisp")
(defun main ()
(let ((file (cadr *posix-argv*)))
(if file
(with-wa-readtable (wa-load file))
(with-wa-readtable (wa-repl)))))
(loop repeat 10 do (gc :full t))
(save-lisp-and-die "wa" :toplevel #'main :executable t)
|
4e88a423a4089754467329576b985dd4144806d6ef508b062e38b0f278b5ceab | zenspider/schemers | exercise.4.47.scm | #!/usr/bin/env csi -s
(require rackunit)
Exercise 4.47
suggests that , since a verb phrase
;; is either a verb or a verb phrase followed by a prepositional
;; phrase, it would be much more straightforward to define the
;; procedure `parse-verb-phrase' as follows (and similarly for noun
;; phrases):
;;
;; (define (parse-verb-phrase)
;; (amb (parse-word verbs)
;; (list 'verb-phrase
;; (parse-verb-phrase)
;; (parse-prepositional-phrase))))
;;
;; Does this work? Does the program's behavior change if we
;; interchange the order of expressions in the `amb'
| null | https://raw.githubusercontent.com/zenspider/schemers/2939ca553ac79013a4c3aaaec812c1bad3933b16/sicp/ch_4/exercise.4.47.scm | scheme | is either a verb or a verb phrase followed by a prepositional
phrase, it would be much more straightforward to define the
procedure `parse-verb-phrase' as follows (and similarly for noun
phrases):
(define (parse-verb-phrase)
(amb (parse-word verbs)
(list 'verb-phrase
(parse-verb-phrase)
(parse-prepositional-phrase))))
Does this work? Does the program's behavior change if we
interchange the order of expressions in the `amb' | #!/usr/bin/env csi -s
(require rackunit)
Exercise 4.47
suggests that , since a verb phrase
|
87d2e069faacfad5ff53e652ad0c71af6abd0b99af78a6aee51971cf601d9b89 | luminus-framework/examples | middleware.clj | (ns multi-client-ws-aleph.middleware
(:require
[multi-client-ws-aleph.env :refer [defaults]]
[cheshire.generate :as cheshire]
[cognitect.transit :as transit]
[clojure.tools.logging :as log]
[multi-client-ws-aleph.layout :refer [error-page]]
[ring.middleware.anti-forgery :refer [wrap-anti-forgery]]
[multi-client-ws-aleph.middleware.formats :as formats]
[muuntaja.middleware :refer [wrap-format wrap-params]]
[multi-client-ws-aleph.config :refer [env]]
[ring-ttl-session.core :refer [ttl-memory-store]]
[ring.middleware.defaults :refer [site-defaults wrap-defaults]])
(:import
))
(defn wrap-internal-error [handler]
(fn [req]
(try
(handler req)
(catch Throwable t
(log/error t (.getMessage t))
(error-page {:status 500
:title "Something very bad has happened!"
:message "We've dispatched a team of highly trained gnomes to take care of the problem."})))))
(defn wrap-csrf [handler]
(wrap-anti-forgery
handler
{:error-response
(error-page
{:status 403
:title "Invalid anti-forgery token"})}))
(defn wrap-formats [handler]
(let [wrapped (-> handler wrap-params (wrap-format formats/instance))]
(fn [request]
disable wrap - formats for websockets
;; since they're not compatible with this middleware
((if (:websocket? request) handler wrapped) request))))
(defn wrap-base [handler]
(-> ((:middleware defaults) handler)
(wrap-defaults
(-> site-defaults
(assoc-in [:security :anti-forgery] false)
(assoc-in [:session :store] (ttl-memory-store (* 60 30)))))
wrap-internal-error))
| null | https://raw.githubusercontent.com/luminus-framework/examples/cbeee2fef8f457a6a6bac2cae0b640370ae2499b/multi-client-ws-aleph/src/clj/multi_client_ws_aleph/middleware.clj | clojure | since they're not compatible with this middleware | (ns multi-client-ws-aleph.middleware
(:require
[multi-client-ws-aleph.env :refer [defaults]]
[cheshire.generate :as cheshire]
[cognitect.transit :as transit]
[clojure.tools.logging :as log]
[multi-client-ws-aleph.layout :refer [error-page]]
[ring.middleware.anti-forgery :refer [wrap-anti-forgery]]
[multi-client-ws-aleph.middleware.formats :as formats]
[muuntaja.middleware :refer [wrap-format wrap-params]]
[multi-client-ws-aleph.config :refer [env]]
[ring-ttl-session.core :refer [ttl-memory-store]]
[ring.middleware.defaults :refer [site-defaults wrap-defaults]])
(:import
))
(defn wrap-internal-error [handler]
(fn [req]
(try
(handler req)
(catch Throwable t
(log/error t (.getMessage t))
(error-page {:status 500
:title "Something very bad has happened!"
:message "We've dispatched a team of highly trained gnomes to take care of the problem."})))))
(defn wrap-csrf [handler]
(wrap-anti-forgery
handler
{:error-response
(error-page
{:status 403
:title "Invalid anti-forgery token"})}))
(defn wrap-formats [handler]
(let [wrapped (-> handler wrap-params (wrap-format formats/instance))]
(fn [request]
disable wrap - formats for websockets
((if (:websocket? request) handler wrapped) request))))
(defn wrap-base [handler]
(-> ((:middleware defaults) handler)
(wrap-defaults
(-> site-defaults
(assoc-in [:security :anti-forgery] false)
(assoc-in [:session :store] (ttl-memory-store (* 60 30)))))
wrap-internal-error))
|
25070c64998e745c3e0ae7c7f66ec42aae225e17f4328917d86daf945521fe9e | xh4/web-toolkit | handler.lisp | (in-package :http)
(defclass handler-class (standard-class)
((function
:initarg :function
:initform nil)
(%function
:initform nil
:accessor handler-function)
(function-lambda-list
:initarg :function-lambda-list
:initform nil
:accessor handler-function-lambda-list)))
(defmethod validate-superclass ((class handler-class) (super-class standard-class))
t)
(defun check-handler-function (function)
(typecase function
(null "Missing handler function body")
(symbol (if (ignore-errors (symbol-function function))
(check-handler-function (symbol-function function))
(error "Symbol not associate with function")))
((or function cl-cont::funcallable/cc)
(let ((function-lambda-list (function-lambda-list function)))
(check-handler-function-lambda-list function-lambda-list)))
(t (error "Bad handler function ~A" function))))
(defun check-handler-function-lambda-list (lambda-list)
(when (> (length lambda-list) 2)
(error "Bad handler function lambda list ~A" lambda-list)))
(defmethod shared-initialize :after ((class handler-class) slot-names &key function &allow-other-keys)
(declare (ignore slot-names))
(if function
(progn (setf function (eval (car function)))
(check-handler-function function)
(let ((function-lambda-list (function-lambda-list function)))
(setf (slot-value class '%function) function)
(setf (slot-value class 'function-lambda-list) function-lambda-list)))
(progn (setf (slot-value class '%function) nil
(slot-value class 'function-lambda-list) nil))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *request* nil)
(defvar *response* nil)
(defvar *handler* nil)
(defvar *next-handlers* nil)
(defmacro call-next-handler ()
(with-gensyms (result)
`(if *next-handlers*
(let ((*handler* (first *next-handlers*))
(*next-handlers* (rest *next-handlers*)))
(let ((,result (call-handler *handler* *request*)))
(when (or (typep ,result 'response)
(typep ,result 'entity))
(setf *response* ,result))
*response*))
*response*)))
(defmacro next-handler ()
`(first *next-handlers*))
(define-condition abort-handler () ())
(defmacro abort-handler ()
`(signal 'abort-handler)))
;; Mapping from handler names (class names of handlers) to handler instances
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *static-handlers* (make-hash-table)))
(defclass handler ()
()
(:metaclass handler-class)
(:function (lambda (request) (call-next-handler))))
(defmethod handler-function ((handler handler))
(handler-function (class-of handler)))
(defmethod (setf handler-function) (function (handler handler))
(check-handler-function function)
(setf (handler-function (class-of handler)) function))
(defmethod handler-function-lambda-list ((handler handler))
(handler-function-lambda-list (class-of handler)))
(defmethod (setf handler-function-lambda-list) (lambda-list (handler handler))
(check-handler-function-lambda-list lambda-list)
(setf (handler-function-lambda-list (class-of handler)) lambda-list))
(defmacro define-handler (handler-name superclasses slots &rest options)
(unless (find 'application-handler superclasses)
(appendf superclasses '(application-handler)))
(let ((function (second (find :function options :key 'first)))
(metaclass (second (find :metaclass options :key 'first)))
(instanize (if-let ((option (find :instanize options :key 'first)))
(second option)
t)))
(let ((options (remove-if (lambda (options)
(member (first options) '(:instanize)))
options)))
(unless metaclass
(rewrite-class-option options :metaclass handler-class))
`(progn
(eval-when (:compile-toplevel :load-toplevel :execute)
(defclass ,handler-name ,superclasses
,slots
,@options)
,@(if instanize
`((defvar ,handler-name
(make-instance ',handler-name))
(setf (gethash ',handler-name *static-handlers*) ,handler-name)
,handler-name)
`((remhash ',handler-name *static-handlers*)
(find-class ',handler-name))))))))
(defgeneric handler-class-precedence-list (handler-class)
(:method ((handler-class handler-class))
(compute-handler-class-precedence-list handler-class))
(:method ((handler handler))
(compute-handler-class-precedence-list (class-of handler))))
(defun compute-handler-class-precedence-list (handler-class)
(let ((handler-classes (compute-class-precedence-list handler-class))
(root-handler-class (find-class 'handler)))
(remove-if-not
(lambda (handler-class)
(subclassp handler-class root-handler-class))
handler-classes)))
(defun make-handler (handler-class)
(make-instance handler-class))
(defun compute-handler-precedence-list (handler)
(let ((handler-classes (handler-class-precedence-list handler)))
(let ((handlers (loop for handler-class in handler-classes
for handler-instance = (gethash (class-name handler-class)
*static-handlers*)
when handler-instance collect handler-instance
else collect (make-handler handler-class))))
(typecase handler
(handler (cons handler (rest handlers)))
(t handlers)))))
(defun invoke-handler (handler request)
(check-type handler handler)
(let ((*request* request)
(*response* (make-instance 'response))
(*next-handlers* (reverse (compute-handler-precedence-list handler))))
(block nil
(handler-bind ((abort-handler
(lambda (c)
(declare (ignore c))
(return *response*)))
(redirect
(lambda (c)
(with-slots (location status) c
(reply (status status))
(reply (header "Location" (case location
;; TODO: handle referer missing condition
(:back (header-field-value
(find-header-field
"Referer"
request)))
(t location))
"Content-Length" 0))
(return)))))
(call-next-handler)))
*response*))
(defun call-handler (handler request)
(check-type handler handler)
(when-let ((function (handler-function handler)))
(let ((function-lambda-list (handler-function-lambda-list handler)))
(cond
((= 0 (length function-lambda-list))
(funcall function))
((= 1 (length function-lambda-list))
(funcall function request))
((= 2 (length function-lambda-list))
(funcall function handler request))))))
(defun handler-form (form)
(typecase form
(symbol form)
(list (let ((object (eval form)))
(typecase object
(handler object)
((or function funcallable/cc)
(let ((function object)
(handler (make-instance 'anonymous-handler)))
(check-handler-function function)
(setf (handler-function handler) function
(handler-function-lambda-list handler) (function-lambda-list function))
handler))
(t "~A evaluate to ~A which is not a valid handler form" form object))))
(t (error "Bad handler form ~A" form))))
| null | https://raw.githubusercontent.com/xh4/web-toolkit/e510d44a25b36ca8acd66734ed1ee9f5fe6ecd09/http/handler.lisp | lisp | Mapping from handler names (class names of handlers) to handler instances
TODO: handle referer missing condition | (in-package :http)
(defclass handler-class (standard-class)
((function
:initarg :function
:initform nil)
(%function
:initform nil
:accessor handler-function)
(function-lambda-list
:initarg :function-lambda-list
:initform nil
:accessor handler-function-lambda-list)))
(defmethod validate-superclass ((class handler-class) (super-class standard-class))
t)
(defun check-handler-function (function)
(typecase function
(null "Missing handler function body")
(symbol (if (ignore-errors (symbol-function function))
(check-handler-function (symbol-function function))
(error "Symbol not associate with function")))
((or function cl-cont::funcallable/cc)
(let ((function-lambda-list (function-lambda-list function)))
(check-handler-function-lambda-list function-lambda-list)))
(t (error "Bad handler function ~A" function))))
(defun check-handler-function-lambda-list (lambda-list)
(when (> (length lambda-list) 2)
(error "Bad handler function lambda list ~A" lambda-list)))
(defmethod shared-initialize :after ((class handler-class) slot-names &key function &allow-other-keys)
(declare (ignore slot-names))
(if function
(progn (setf function (eval (car function)))
(check-handler-function function)
(let ((function-lambda-list (function-lambda-list function)))
(setf (slot-value class '%function) function)
(setf (slot-value class 'function-lambda-list) function-lambda-list)))
(progn (setf (slot-value class '%function) nil
(slot-value class 'function-lambda-list) nil))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *request* nil)
(defvar *response* nil)
(defvar *handler* nil)
(defvar *next-handlers* nil)
(defmacro call-next-handler ()
(with-gensyms (result)
`(if *next-handlers*
(let ((*handler* (first *next-handlers*))
(*next-handlers* (rest *next-handlers*)))
(let ((,result (call-handler *handler* *request*)))
(when (or (typep ,result 'response)
(typep ,result 'entity))
(setf *response* ,result))
*response*))
*response*)))
(defmacro next-handler ()
`(first *next-handlers*))
(define-condition abort-handler () ())
(defmacro abort-handler ()
`(signal 'abort-handler)))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defvar *static-handlers* (make-hash-table)))
(defclass handler ()
()
(:metaclass handler-class)
(:function (lambda (request) (call-next-handler))))
(defmethod handler-function ((handler handler))
(handler-function (class-of handler)))
(defmethod (setf handler-function) (function (handler handler))
(check-handler-function function)
(setf (handler-function (class-of handler)) function))
(defmethod handler-function-lambda-list ((handler handler))
(handler-function-lambda-list (class-of handler)))
(defmethod (setf handler-function-lambda-list) (lambda-list (handler handler))
(check-handler-function-lambda-list lambda-list)
(setf (handler-function-lambda-list (class-of handler)) lambda-list))
(defmacro define-handler (handler-name superclasses slots &rest options)
(unless (find 'application-handler superclasses)
(appendf superclasses '(application-handler)))
(let ((function (second (find :function options :key 'first)))
(metaclass (second (find :metaclass options :key 'first)))
(instanize (if-let ((option (find :instanize options :key 'first)))
(second option)
t)))
(let ((options (remove-if (lambda (options)
(member (first options) '(:instanize)))
options)))
(unless metaclass
(rewrite-class-option options :metaclass handler-class))
`(progn
(eval-when (:compile-toplevel :load-toplevel :execute)
(defclass ,handler-name ,superclasses
,slots
,@options)
,@(if instanize
`((defvar ,handler-name
(make-instance ',handler-name))
(setf (gethash ',handler-name *static-handlers*) ,handler-name)
,handler-name)
`((remhash ',handler-name *static-handlers*)
(find-class ',handler-name))))))))
(defgeneric handler-class-precedence-list (handler-class)
(:method ((handler-class handler-class))
(compute-handler-class-precedence-list handler-class))
(:method ((handler handler))
(compute-handler-class-precedence-list (class-of handler))))
(defun compute-handler-class-precedence-list (handler-class)
(let ((handler-classes (compute-class-precedence-list handler-class))
(root-handler-class (find-class 'handler)))
(remove-if-not
(lambda (handler-class)
(subclassp handler-class root-handler-class))
handler-classes)))
(defun make-handler (handler-class)
(make-instance handler-class))
(defun compute-handler-precedence-list (handler)
(let ((handler-classes (handler-class-precedence-list handler)))
(let ((handlers (loop for handler-class in handler-classes
for handler-instance = (gethash (class-name handler-class)
*static-handlers*)
when handler-instance collect handler-instance
else collect (make-handler handler-class))))
(typecase handler
(handler (cons handler (rest handlers)))
(t handlers)))))
(defun invoke-handler (handler request)
(check-type handler handler)
(let ((*request* request)
(*response* (make-instance 'response))
(*next-handlers* (reverse (compute-handler-precedence-list handler))))
(block nil
(handler-bind ((abort-handler
(lambda (c)
(declare (ignore c))
(return *response*)))
(redirect
(lambda (c)
(with-slots (location status) c
(reply (status status))
(reply (header "Location" (case location
(:back (header-field-value
(find-header-field
"Referer"
request)))
(t location))
"Content-Length" 0))
(return)))))
(call-next-handler)))
*response*))
(defun call-handler (handler request)
(check-type handler handler)
(when-let ((function (handler-function handler)))
(let ((function-lambda-list (handler-function-lambda-list handler)))
(cond
((= 0 (length function-lambda-list))
(funcall function))
((= 1 (length function-lambda-list))
(funcall function request))
((= 2 (length function-lambda-list))
(funcall function handler request))))))
(defun handler-form (form)
(typecase form
(symbol form)
(list (let ((object (eval form)))
(typecase object
(handler object)
((or function funcallable/cc)
(let ((function object)
(handler (make-instance 'anonymous-handler)))
(check-handler-function function)
(setf (handler-function handler) function
(handler-function-lambda-list handler) (function-lambda-list function))
handler))
(t "~A evaluate to ~A which is not a valid handler form" form object))))
(t (error "Bad handler form ~A" form))))
|
38549c8a4476c4b8074b23b9f5e6743a1fd726f909f6bbac86af6831dc81ae0a | footprintanalytics/footprint-web | h2.clj | (ns metabase.cmd.copy.h2
"Functions for working with H2 databases shared between the `load-from-h2` and `dump-to-h2` commands."
(:require [clojure.java.io :as io]
[clojure.string :as str]
[clojure.tools.logging :as log]
[metabase.db.data-source :as mdb.data-source]
[metabase.util :as u]
[metabase.util.i18n :refer [trs]]))
(defn- add-file-prefix-if-needed [h2-filename]
(letfn [(prepend-protocol [s]
(str "file:" (.getAbsolutePath (io/file s))))
(remove-extension [s]
(str/replace s #"\.mv\.db$" ""))]
(cond-> h2-filename
(not (str/starts-with? h2-filename "file:"))
prepend-protocol
(str/ends-with? h2-filename ".mv.db")
remove-extension)))
(defn h2-data-source
"Create a [[javax.sql.DataSource]] for the H2 database with `h2-filename`."
^javax.sql.DataSource [h2-filename]
(let [h2-filename (add-file-prefix-if-needed h2-filename)]
(mdb.data-source/broken-out-details->DataSource :h2 {:db h2-filename})))
(defn delete-existing-h2-database-files!
"Delete existing h2 database files."
[h2-filename]
(doseq [filename [h2-filename
(str h2-filename ".mv.db")]]
(when (.exists (io/file filename))
(io/delete-file filename)
(log/warn (u/format-color 'red (trs "Output H2 database already exists: %s, removing.") filename)))))
| null | https://raw.githubusercontent.com/footprintanalytics/footprint-web/d3090d943dd9fcea493c236f79e7ef8a36ae17fc/src/metabase/cmd/copy/h2.clj | clojure | (ns metabase.cmd.copy.h2
"Functions for working with H2 databases shared between the `load-from-h2` and `dump-to-h2` commands."
(:require [clojure.java.io :as io]
[clojure.string :as str]
[clojure.tools.logging :as log]
[metabase.db.data-source :as mdb.data-source]
[metabase.util :as u]
[metabase.util.i18n :refer [trs]]))
(defn- add-file-prefix-if-needed [h2-filename]
(letfn [(prepend-protocol [s]
(str "file:" (.getAbsolutePath (io/file s))))
(remove-extension [s]
(str/replace s #"\.mv\.db$" ""))]
(cond-> h2-filename
(not (str/starts-with? h2-filename "file:"))
prepend-protocol
(str/ends-with? h2-filename ".mv.db")
remove-extension)))
(defn h2-data-source
"Create a [[javax.sql.DataSource]] for the H2 database with `h2-filename`."
^javax.sql.DataSource [h2-filename]
(let [h2-filename (add-file-prefix-if-needed h2-filename)]
(mdb.data-source/broken-out-details->DataSource :h2 {:db h2-filename})))
(defn delete-existing-h2-database-files!
"Delete existing h2 database files."
[h2-filename]
(doseq [filename [h2-filename
(str h2-filename ".mv.db")]]
(when (.exists (io/file filename))
(io/delete-file filename)
(log/warn (u/format-color 'red (trs "Output H2 database already exists: %s, removing.") filename)))))
| |
0f54159043094d0684c2cf54cbf84b366817321288c920b112d1a6a547e5b5d9 | g-andrade/erlchronos | erlchronos_tests.erl | % vim: set expandtab softtabstop=2 shiftwidth=4:
-module(erlchronos_tests).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-define(IN_RANGE(X, Y, Range), (abs((X) - (Y)) =< Range)).
erlchronos_test_() ->
{foreach,
fun() ->
%error_logger:tty(false)
ok
end,
fun(_) ->
%error_logger:tty(true)
ok
end,
[
{<<"Basic tick works">>,
fun basic_tick_works/0
},
{<<"Multiple ticks work">>,
fun multiple_ticks_work/0
},
{<<"A whole lot of ticks work">>,
fun many_ticks_work/0
},
{<<"gen_server wrapping works">>,
fun gen_server_wrapping_works/0
},
{<<"Moderate message flood is tolerable">>,
fun moderate_message_flood_is_tolerable/0
},
{<<"High message flood is tolerable">>,
fun high_message_flood_is_tolerable/0
}
]
}.
basic_tick_works() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
TickDuration = 100,
Ticks = [{basic_tick, TickDuration}],
NapTime = 1000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, NapTime),
timer:sleep(NapTime),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
BasicTickCount = dict:fetch(basic_tick, Counters),
?assert( ?IN_RANGE(BasicTickCount, NapTime div TickDuration, 1) ),
TickTimestamps = dict:fetch(basic_tick, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 2) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 5) ),
ok.
multiple_ticks_work() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
Ticks = [{tick1, 25}, {tick2, 33}, {tick3, 231}],
NapTime = 1000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, NapTime),
timer:sleep(NapTime),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
Tolerance = 1,
ok = lists:foreach(
fun ({TickId, TickDuration}) ->
Tolerance = trunc((NapTime div Duration ) * 0.1 ) ,
Count = dict:fetch(TickId, Counters),
?assert( ?IN_RANGE(Count, NapTime div TickDuration, Tolerance) ),
TickTimestamps = dict:fetch(TickId, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 2) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 10) )
end,
Ticks).
many_ticks_work() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
Ticks = [{{tick, N}, N} || N <- lists:seq(1, 100)],
NapTime = 1000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, NapTime),
timer:sleep(NapTime),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
Tolerance = 1,
ok = lists:foreach(
fun ({{tick, TickDuration}=TickId, TickDuration}) ->
Tolerance = trunc((NapTime div Duration ) * 0.1 ) ,
Count = dict:fetch(TickId, Counters),
?assert( ?IN_RANGE(Count, NapTime div TickDuration, Tolerance) ),
TickTimestamps = dict:fetch(TickId, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 2) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 3) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 10) )
end,
Ticks).
gen_server_wrapping_works() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
Ticks = [{tick, 10}],
{ok, TickedPid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, 0),
{ok, TicklessPid} = erlchronos_ticked_gen_serv:start_link(SpawnOpts, [], 0),
?assertMatch({ok, _}, gen_server:call(TickedPid, get_counters)),
?assertMatch({ok, _}, gen_server:call(TicklessPid, get_counters)),
?assertMatch({'EXIT',{timeout,{gen_server,call,[_|_]}}},
catch gen_server:call(TickedPid, get_nothing, 10)),
?assertMatch({'EXIT',{timeout,{gen_server,call,[_|_]}}},
catch gen_server:call(TicklessPid, get_nothing, 10)),
ok = gen_server:cast(TickedPid, 'this is a cast'),
ok = gen_server:cast(TicklessPid, 'this is a cast'),
timer:sleep(1000),
TickedPid ! 'this is an info',
TicklessPid ! 'this is an info',
{ok, {TickedPidCalls, TickedPidCasts, TickedPidInfos}} = gen_server:call(TickedPid, get_histories),
{ok, {TicklessPidCalls, TicklessPidCasts, TicklessPidInfos}} = gen_server:call(TickedPid, get_histories),
ok = gen_server:cast(TickedPid, stop),
ok = gen_server:cast(TicklessPid, stop),
?assertEqual(TickedPidCalls, lists:reverse([get_counters, get_nothing])),
?assertEqual(TickedPidCalls, TicklessPidCalls),
?assertEqual(TickedPidCasts, ['this is a cast']),
?assertEqual(TickedPidCasts, TicklessPidCasts),
?assertEqual(TickedPidInfos, ['this is an info']),
?assertEqual(TickedPidInfos, TicklessPidInfos),
ok.
moderate_message_flood_is_tolerable() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
TickDuration = 10,
Ticks = [{tick, TickDuration}],
FloodDuration = 2000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, FloodDuration),
ok = flood(Pid, 500, timestamp_ms() + FloodDuration),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
Count = dict:fetch(tick, Counters),
Tolerance = 1,
?assert( ?IN_RANGE(Count, FloodDuration div TickDuration, Tolerance) ),
TickTimestamps = dict:fetch(tick, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 5) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 10) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 15) ),
ok.
high_message_flood_is_tolerable() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
TickDuration = 10,
Ticks = [{tick, TickDuration}],
FloodDuration = 4000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, FloodDuration),
ok = flood(Pid, 5000, timestamp_ms() + FloodDuration),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
Count = dict:fetch(tick, Counters),
Tolerance = 1,
?assert( ?IN_RANGE(Count, FloodDuration div TickDuration, Tolerance) ),
TickTimestamps = dict:fetch(tick, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 5) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 10) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 20) ),
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
flood(Pid, FloodLevel, Deadline) ->
SchedulersOnline = erlang:system_info(schedulers_online),
[spawn(fun () -> flood_exec(Pid, FloodLevel, Deadline) end)
|| _ <- lists:seq(1, SchedulersOnline - 1)],
flood_exec(Pid, FloodLevel, Deadline).
flood_exec(Pid, FloodLevel, Deadline) ->
case timestamp_ms() >= Deadline of
true -> ok;
false ->
{message_queue_len, QueueSize} = erlang:process_info(Pid, message_queue_len),
case QueueSize < FloodLevel of
true -> Pid ! "o hai ther";
false -> ok
end,
flood_exec(Pid, FloodLevel, Deadline)
end.
timestamp_ms() ->
{MegaSecs, Secs, MicroSecs} = os:timestamp(),
((((MegaSecs * 1000000) + Secs) * 1000000) + MicroSecs) div 1000.
calc_offsets(L) when is_list(L), length(L) > 1 ->
Zipped = lists:zip(lists:sublist(L, length(L) - 1),
lists:nthtail(1, L)),
[B - A || {A, B} <- Zipped].
avg(L) when is_list(L), length(L) > 0 ->
lists:sum(L) / length(L).
percentile(L, V) ->
N = length(L),
SortedL = lists:sort(L),
Index = V * N,
TruncatedIndex = trunc(Index),
case TruncatedIndex == Index of
true ->
[X, Y] = lists:sublist(SortedL, TruncatedIndex, 2),
(X + Y) / 2.0;
false ->
RoundedIndex = TruncatedIndex + 1,
lists:nth(RoundedIndex, SortedL)
end.
-endif. % ifdef(TEST)
| null | https://raw.githubusercontent.com/g-andrade/erlchronos/45bd22a78c0a27d78be44b2b07f330511747e7cd/test/erlchronos_tests.erl | erlang | vim: set expandtab softtabstop=2 shiftwidth=4:
error_logger:tty(false)
error_logger:tty(true)
ifdef(TEST) | -module(erlchronos_tests).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-define(IN_RANGE(X, Y, Range), (abs((X) - (Y)) =< Range)).
erlchronos_test_() ->
{foreach,
fun() ->
ok
end,
fun(_) ->
ok
end,
[
{<<"Basic tick works">>,
fun basic_tick_works/0
},
{<<"Multiple ticks work">>,
fun multiple_ticks_work/0
},
{<<"A whole lot of ticks work">>,
fun many_ticks_work/0
},
{<<"gen_server wrapping works">>,
fun gen_server_wrapping_works/0
},
{<<"Moderate message flood is tolerable">>,
fun moderate_message_flood_is_tolerable/0
},
{<<"High message flood is tolerable">>,
fun high_message_flood_is_tolerable/0
}
]
}.
basic_tick_works() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
TickDuration = 100,
Ticks = [{basic_tick, TickDuration}],
NapTime = 1000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, NapTime),
timer:sleep(NapTime),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
BasicTickCount = dict:fetch(basic_tick, Counters),
?assert( ?IN_RANGE(BasicTickCount, NapTime div TickDuration, 1) ),
TickTimestamps = dict:fetch(basic_tick, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 2) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 5) ),
ok.
multiple_ticks_work() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
Ticks = [{tick1, 25}, {tick2, 33}, {tick3, 231}],
NapTime = 1000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, NapTime),
timer:sleep(NapTime),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
Tolerance = 1,
ok = lists:foreach(
fun ({TickId, TickDuration}) ->
Tolerance = trunc((NapTime div Duration ) * 0.1 ) ,
Count = dict:fetch(TickId, Counters),
?assert( ?IN_RANGE(Count, NapTime div TickDuration, Tolerance) ),
TickTimestamps = dict:fetch(TickId, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 2) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 10) )
end,
Ticks).
many_ticks_work() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
Ticks = [{{tick, N}, N} || N <- lists:seq(1, 100)],
NapTime = 1000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, NapTime),
timer:sleep(NapTime),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
Tolerance = 1,
ok = lists:foreach(
fun ({{tick, TickDuration}=TickId, TickDuration}) ->
Tolerance = trunc((NapTime div Duration ) * 0.1 ) ,
Count = dict:fetch(TickId, Counters),
?assert( ?IN_RANGE(Count, NapTime div TickDuration, Tolerance) ),
TickTimestamps = dict:fetch(TickId, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 2) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 3) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 10) )
end,
Ticks).
gen_server_wrapping_works() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
Ticks = [{tick, 10}],
{ok, TickedPid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, 0),
{ok, TicklessPid} = erlchronos_ticked_gen_serv:start_link(SpawnOpts, [], 0),
?assertMatch({ok, _}, gen_server:call(TickedPid, get_counters)),
?assertMatch({ok, _}, gen_server:call(TicklessPid, get_counters)),
?assertMatch({'EXIT',{timeout,{gen_server,call,[_|_]}}},
catch gen_server:call(TickedPid, get_nothing, 10)),
?assertMatch({'EXIT',{timeout,{gen_server,call,[_|_]}}},
catch gen_server:call(TicklessPid, get_nothing, 10)),
ok = gen_server:cast(TickedPid, 'this is a cast'),
ok = gen_server:cast(TicklessPid, 'this is a cast'),
timer:sleep(1000),
TickedPid ! 'this is an info',
TicklessPid ! 'this is an info',
{ok, {TickedPidCalls, TickedPidCasts, TickedPidInfos}} = gen_server:call(TickedPid, get_histories),
{ok, {TicklessPidCalls, TicklessPidCasts, TicklessPidInfos}} = gen_server:call(TickedPid, get_histories),
ok = gen_server:cast(TickedPid, stop),
ok = gen_server:cast(TicklessPid, stop),
?assertEqual(TickedPidCalls, lists:reverse([get_counters, get_nothing])),
?assertEqual(TickedPidCalls, TicklessPidCalls),
?assertEqual(TickedPidCasts, ['this is a cast']),
?assertEqual(TickedPidCasts, TicklessPidCasts),
?assertEqual(TickedPidInfos, ['this is an info']),
?assertEqual(TickedPidInfos, TicklessPidInfos),
ok.
moderate_message_flood_is_tolerable() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
TickDuration = 10,
Ticks = [{tick, TickDuration}],
FloodDuration = 2000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, FloodDuration),
ok = flood(Pid, 500, timestamp_ms() + FloodDuration),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
Count = dict:fetch(tick, Counters),
Tolerance = 1,
?assert( ?IN_RANGE(Count, FloodDuration div TickDuration, Tolerance) ),
TickTimestamps = dict:fetch(tick, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 5) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 10) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 15) ),
ok.
high_message_flood_is_tolerable() ->
SpawnOpts = [{spawn_opts, [{priority, high}]}],
TickDuration = 10,
Ticks = [{tick, TickDuration}],
FloodDuration = 4000,
{ok, Pid} = erlchronos_ticked_gen_serv:start_link([{ticks, proplists:get_keys(Ticks)}] ++ SpawnOpts,
Ticks, FloodDuration),
ok = flood(Pid, 5000, timestamp_ms() + FloodDuration),
{ok, Counters} = gen_server:call(Pid, get_counters),
{ok, Timestamps} = gen_server:call(Pid, get_timestamps),
exit(Pid, normal),
Count = dict:fetch(tick, Counters),
Tolerance = 1,
?assert( ?IN_RANGE(Count, FloodDuration div TickDuration, Tolerance) ),
TickTimestamps = dict:fetch(tick, Timestamps),
Offsets = calc_offsets(TickTimestamps),
AvgOffset = avg(Offsets),
Percentile90 = percentile(Offsets, 0.90),
Percentile95 = percentile(Offsets, 0.95),
Percentile99 = percentile(Offsets, 0.99),
?assert( ?IN_RANGE(AvgOffset, TickDuration, 1) ),
?assert( ?IN_RANGE(Percentile90, TickDuration, 5) ),
?assert( ?IN_RANGE(Percentile95, TickDuration, 10) ),
?assert( ?IN_RANGE(Percentile99, TickDuration, 20) ),
ok.
flood(Pid, FloodLevel, Deadline) ->
SchedulersOnline = erlang:system_info(schedulers_online),
[spawn(fun () -> flood_exec(Pid, FloodLevel, Deadline) end)
|| _ <- lists:seq(1, SchedulersOnline - 1)],
flood_exec(Pid, FloodLevel, Deadline).
flood_exec(Pid, FloodLevel, Deadline) ->
case timestamp_ms() >= Deadline of
true -> ok;
false ->
{message_queue_len, QueueSize} = erlang:process_info(Pid, message_queue_len),
case QueueSize < FloodLevel of
true -> Pid ! "o hai ther";
false -> ok
end,
flood_exec(Pid, FloodLevel, Deadline)
end.
timestamp_ms() ->
{MegaSecs, Secs, MicroSecs} = os:timestamp(),
((((MegaSecs * 1000000) + Secs) * 1000000) + MicroSecs) div 1000.
calc_offsets(L) when is_list(L), length(L) > 1 ->
Zipped = lists:zip(lists:sublist(L, length(L) - 1),
lists:nthtail(1, L)),
[B - A || {A, B} <- Zipped].
avg(L) when is_list(L), length(L) > 0 ->
lists:sum(L) / length(L).
percentile(L, V) ->
N = length(L),
SortedL = lists:sort(L),
Index = V * N,
TruncatedIndex = trunc(Index),
case TruncatedIndex == Index of
true ->
[X, Y] = lists:sublist(SortedL, TruncatedIndex, 2),
(X + Y) / 2.0;
false ->
RoundedIndex = TruncatedIndex + 1,
lists:nth(RoundedIndex, SortedL)
end.
|
ad292c388830898f56edd247c7bbcbacfe2fb0d1e16e9814206d17e9d6fd0431 | fiddlerwoaroof/lisp-sandbox | lambda-test.lisp | (defpackage :lambda-test
(:use cl)
(:export :mksym :alambda))
(in-package :lambda-test)
(defun mksym (str)
(intern (string-upcase str) *package*))
(defmacro alambda ((&optional (nargs 1)) &body body)
(let ((args (loop for x from 1 to nargs collect (mksym (format nil "$~a" x)))))
`(lambda ,args ,@body)))
(defmacro /. ((&optional (nargs 1)) &body body)
`(alambda (,nargs) ,@body))
| null | https://raw.githubusercontent.com/fiddlerwoaroof/lisp-sandbox/38ff817c95af35db042faf760b477675264220d2/lambda-test.lisp | lisp | (defpackage :lambda-test
(:use cl)
(:export :mksym :alambda))
(in-package :lambda-test)
(defun mksym (str)
(intern (string-upcase str) *package*))
(defmacro alambda ((&optional (nargs 1)) &body body)
(let ((args (loop for x from 1 to nargs collect (mksym (format nil "$~a" x)))))
`(lambda ,args ,@body)))
(defmacro /. ((&optional (nargs 1)) &body body)
`(alambda (,nargs) ,@body))
| |
110c4624672bca1a9b4e43a6ebde6361dcb19f3845fb310c854564625f1a0006 | criticic/llpp | glutils.mli | val vraw : [< Raw.kind > `float ] Raw.t
val filledrect2 :
float ->
float -> float -> float -> float -> float -> float -> float -> unit
val filledrect1 : float -> float -> float -> float -> unit
val filledrect : float -> float -> float -> float -> unit
val linerect : float -> float -> float -> float -> unit
val drawstring : int -> int -> int -> string -> unit
val drawstringf : int -> int -> int -> ('a, unit, string, unit) format4 -> 'a
val redisplay : bool ref
val postRedisplay : string -> unit
| null | https://raw.githubusercontent.com/criticic/llpp/04431d79a40dcc0215f87a2ad577f126a85c1e61/glutils.mli | ocaml | val vraw : [< Raw.kind > `float ] Raw.t
val filledrect2 :
float ->
float -> float -> float -> float -> float -> float -> float -> unit
val filledrect1 : float -> float -> float -> float -> unit
val filledrect : float -> float -> float -> float -> unit
val linerect : float -> float -> float -> float -> unit
val drawstring : int -> int -> int -> string -> unit
val drawstringf : int -> int -> int -> ('a, unit, string, unit) format4 -> 'a
val redisplay : bool ref
val postRedisplay : string -> unit
| |
b9338b28a3e4a5e7a15543b27af599503235f9d89e37a78e385e993e53c733b7 | kupl/FixML | sub66.ml |
type aexp =
| Const of int
| Var of string
| Power of string * int
| Times of aexp list
| Sum of aexp list
let rec diff : aexp * string -> aexp
= fun (exp, var) -> match exp with
|Const n -> Const 0
| Var t -> if t=var then Const 1 else Const 0
|Power (t,n) -> if t=var then Times [Const n; Power (t,n-1)] else Const 0
|Times lst ->Times ( timediff (lst,var))
|Sum lst -> Sum (sumdiff (lst,var))
and timediff : aexp list*string -> aexp list
= fun (lst, var) -> match lst with
|[]-> [Const 1]
|hd::tl -> (match hd with
|Const n->Const n
|Var t->if t=var then Const 1 else Const 0
|Power (t,n) -> if t=var then Times [Const n; Power (t,n-1)] else Const 0
|Times l2 ->Times ( timediff (l2,var))
|Sum l2 -> Sum (sumdiff (l2,var)))::(timediff (tl,var))
and sumdiff : aexp list*string -> aexp list
= fun (lst, var) -> match lst with
|[]->[Const 0]
|hd::tl -> (match hd with
|Const n->Const 0
|Var t-> if t=var then Const 1 else Const 0
|Power (t,n) -> if t=var then Times [Const n; Power (t,n-1)] else Const 0
|Times l2 -> Times (timediff (l2,var))
|Sum l2 -> Sum (sumdiff (l2,var)))::(sumdiff (tl,var)) | null | https://raw.githubusercontent.com/kupl/FixML/0a032a733d68cd8ccc8b1034d2908cd43b241fce/benchmarks/differentiate/diff1/submissions/sub66.ml | ocaml |
type aexp =
| Const of int
| Var of string
| Power of string * int
| Times of aexp list
| Sum of aexp list
let rec diff : aexp * string -> aexp
= fun (exp, var) -> match exp with
|Const n -> Const 0
| Var t -> if t=var then Const 1 else Const 0
|Power (t,n) -> if t=var then Times [Const n; Power (t,n-1)] else Const 0
|Times lst ->Times ( timediff (lst,var))
|Sum lst -> Sum (sumdiff (lst,var))
and timediff : aexp list*string -> aexp list
= fun (lst, var) -> match lst with
|[]-> [Const 1]
|hd::tl -> (match hd with
|Const n->Const n
|Var t->if t=var then Const 1 else Const 0
|Power (t,n) -> if t=var then Times [Const n; Power (t,n-1)] else Const 0
|Times l2 ->Times ( timediff (l2,var))
|Sum l2 -> Sum (sumdiff (l2,var)))::(timediff (tl,var))
and sumdiff : aexp list*string -> aexp list
= fun (lst, var) -> match lst with
|[]->[Const 0]
|hd::tl -> (match hd with
|Const n->Const 0
|Var t-> if t=var then Const 1 else Const 0
|Power (t,n) -> if t=var then Times [Const n; Power (t,n-1)] else Const 0
|Times l2 -> Times (timediff (l2,var))
|Sum l2 -> Sum (sumdiff (l2,var)))::(sumdiff (tl,var)) | |
8fb953d2dcfc147ef417b38872e3896a35895ab2e0867b642480610e4da32979 | karamellpelle/grid | File.hs | grid is a game written in Haskell
Copyright ( C ) 2018
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
You should have received a copy of the GNU General Public License
-- along with grid. If not, see </>.
--
module File
(
#ifdef GRID_PLATFORM_IOS
module File.IOS,
#endif
#ifdef GRID_PLATFORM_GLFW
module File.GLFW,
#endif
module System.Directory,
module System.FilePath,
) where
#ifdef GRID_PLATFORM_IOS
import File.IOS
#endif
#ifdef GRID_PLATFORM_GLFW
import File.GLFW
#endif
import System.Directory
import System.FilePath
| null | https://raw.githubusercontent.com/karamellpelle/grid/56729e63ed6404fd6cfd6d11e73fa358f03c386f/designer/source/File.hs | haskell |
This file is part of grid.
grid is free software: you can redistribute it and/or modify
(at your option) any later version.
grid is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with grid. If not, see </>.
| grid is a game written in Haskell
Copyright ( C ) 2018
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
module File
(
#ifdef GRID_PLATFORM_IOS
module File.IOS,
#endif
#ifdef GRID_PLATFORM_GLFW
module File.GLFW,
#endif
module System.Directory,
module System.FilePath,
) where
#ifdef GRID_PLATFORM_IOS
import File.IOS
#endif
#ifdef GRID_PLATFORM_GLFW
import File.GLFW
#endif
import System.Directory
import System.FilePath
|
fa4b683227e8ecb56c4e87e4f2e10496a19b6a8b5ea6d4a563d43898a1750040 | robert-stuttaford/bridge | base.cljs | (ns bridge.ui.base)
(defmulti load-on-view :view)
(defmethod load-on-view :default [_]
nil)
(defmulti view :view)
| null | https://raw.githubusercontent.com/robert-stuttaford/bridge/867d81354457c600cc5c25917de267a8e267c853/src/bridge/ui/base.cljs | clojure | (ns bridge.ui.base)
(defmulti load-on-view :view)
(defmethod load-on-view :default [_]
nil)
(defmulti view :view)
| |
6a858fd81c74cf137e7c1a6e0096eb673d1a7c8321a924f0957a3bd9b7d4ae64 | mfoemmel/erlang-otp | pg.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(pg).
%% pg provides a process group facility. Messages
%% can be multicasted to all members in the group
-export([create/1,
create/2,
standby/2,
join/2,
send/2,
esend/2,
members/1,
name_to_pid/1,
master/1]).
%% Create a brand new empty process group with the master residing
%% at the local node
-spec create(term()) -> 'ok' | {'error', term()}.
create(PgName) ->
catch begin check(PgName),
Pid = spawn(pg,master,[PgName]),
global:register_name(PgName,Pid),
ok end.
%% Create a brand new empty process group with the master
%% residing at Node
-spec create(term(), node()) -> 'ok' | {'error', term()}.
create(PgName, Node) ->
catch begin check(PgName),
Pid = spawn(Node,pg,master,[PgName]),
global:register_name(PgName,Pid),
ok end.
Have a process on Node that will act as a standby for the process
%% group manager. So if the node where the manager runs fails, the
%% process group will continue to function.
-spec standby(term(), node()) -> 'ok'.
standby(_PgName, _Node) ->
ok.
Tell process group PgName that Pid is a new member of the group
%% synchronously return a list of all old members in the group
-spec join(atom(), pid()) -> [pid()].
join(PgName, Pid) when is_atom(PgName) ->
global:send(PgName, {join,self(),Pid}),
receive
{_P,{members,Members}} ->
Members
end.
%% Multi cast Mess to all members in the group
-spec send(atom() | pid(), term()) -> 'ok'.
send(PgName, Mess) when is_atom(PgName) ->
global:send(PgName, {send, self(), Mess}),
ok;
send(Pg, Mess) when is_pid(Pg) ->
Pg ! {send,self(),Mess},
ok.
%% multi cast a message to all members in the group but ourselves
%% If we are a member
-spec esend(atom() | pid(), term()) -> 'ok'.
esend(PgName, Mess) when is_atom(PgName) ->
global:send(PgName, {esend,self(),Mess}),
ok;
esend(Pg, Mess) when is_pid(Pg) ->
Pg ! {esend,self(),Mess},
ok.
%% Return the members of the group
-spec members(atom() | pid()) -> [pid()].
members(PgName) when is_atom(PgName) ->
global:send(PgName, {self() ,members}),
receive
{_P,{members,Members}} ->
Members
end;
members(Pg) when is_pid(Pg) ->
Pg ! {self,members},
receive
{_P,{members,Members}} ->
Members
end.
-spec name_to_pid(atom()) -> pid() | 'undefined'.
name_to_pid(PgName) when is_atom(PgName) ->
global:whereis_name(PgName).
-spec master(term()) -> no_return().
master(PgName) ->
process_flag(trap_exit, true),
master_loop(PgName, []).
master_loop(PgName,Members) ->
receive
{send,From,Message} ->
send_all(Members,{pg_message,From,PgName,Message}),
master_loop(PgName,Members);
{esend,From,Message} ->
send_all(lists:delete(From,Members),
{pg_message,From,PgName,Message}),
master_loop(PgName,Members);
{join,From,Pid} ->
link(Pid),
send_all(Members,{new_member,PgName,Pid}),
From ! {self(),{members,Members}},
master_loop(PgName,[Pid|Members]);
{From,members} ->
From ! {self(),{members,Members}},
master_loop(PgName,Members);
{'EXIT',From,_} ->
L =
case lists:member(From,Members) of
true ->
NewMembers = lists:delete(From,Members),
send_all(NewMembers, {crashed_member,PgName,From}),
NewMembers;
false ->
Members
end,
master_loop(PgName,L)
end.
send_all([], _) -> ok;
send_all([P|Ps], M) ->
P ! M,
send_all(Ps, M).
%% Check if the process group already exists
check(PgName) ->
case global:whereis_name(PgName) of
Pid when is_pid(Pid) ->
throw({error,already_created});
undefined ->
ok
end.
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/stdlib/src/pg.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
pg provides a process group facility. Messages
can be multicasted to all members in the group
Create a brand new empty process group with the master residing
at the local node
Create a brand new empty process group with the master
residing at Node
group manager. So if the node where the manager runs fails, the
process group will continue to function.
synchronously return a list of all old members in the group
Multi cast Mess to all members in the group
multi cast a message to all members in the group but ourselves
If we are a member
Return the members of the group
Check if the process group already exists | Copyright Ericsson AB 1996 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(pg).
-export([create/1,
create/2,
standby/2,
join/2,
send/2,
esend/2,
members/1,
name_to_pid/1,
master/1]).
-spec create(term()) -> 'ok' | {'error', term()}.
create(PgName) ->
catch begin check(PgName),
Pid = spawn(pg,master,[PgName]),
global:register_name(PgName,Pid),
ok end.
-spec create(term(), node()) -> 'ok' | {'error', term()}.
create(PgName, Node) ->
catch begin check(PgName),
Pid = spawn(Node,pg,master,[PgName]),
global:register_name(PgName,Pid),
ok end.
Have a process on Node that will act as a standby for the process
-spec standby(term(), node()) -> 'ok'.
standby(_PgName, _Node) ->
ok.
Tell process group PgName that Pid is a new member of the group
-spec join(atom(), pid()) -> [pid()].
join(PgName, Pid) when is_atom(PgName) ->
global:send(PgName, {join,self(),Pid}),
receive
{_P,{members,Members}} ->
Members
end.
-spec send(atom() | pid(), term()) -> 'ok'.
send(PgName, Mess) when is_atom(PgName) ->
global:send(PgName, {send, self(), Mess}),
ok;
send(Pg, Mess) when is_pid(Pg) ->
Pg ! {send,self(),Mess},
ok.
-spec esend(atom() | pid(), term()) -> 'ok'.
esend(PgName, Mess) when is_atom(PgName) ->
global:send(PgName, {esend,self(),Mess}),
ok;
esend(Pg, Mess) when is_pid(Pg) ->
Pg ! {esend,self(),Mess},
ok.
-spec members(atom() | pid()) -> [pid()].
members(PgName) when is_atom(PgName) ->
global:send(PgName, {self() ,members}),
receive
{_P,{members,Members}} ->
Members
end;
members(Pg) when is_pid(Pg) ->
Pg ! {self,members},
receive
{_P,{members,Members}} ->
Members
end.
-spec name_to_pid(atom()) -> pid() | 'undefined'.
name_to_pid(PgName) when is_atom(PgName) ->
global:whereis_name(PgName).
-spec master(term()) -> no_return().
master(PgName) ->
process_flag(trap_exit, true),
master_loop(PgName, []).
master_loop(PgName,Members) ->
receive
{send,From,Message} ->
send_all(Members,{pg_message,From,PgName,Message}),
master_loop(PgName,Members);
{esend,From,Message} ->
send_all(lists:delete(From,Members),
{pg_message,From,PgName,Message}),
master_loop(PgName,Members);
{join,From,Pid} ->
link(Pid),
send_all(Members,{new_member,PgName,Pid}),
From ! {self(),{members,Members}},
master_loop(PgName,[Pid|Members]);
{From,members} ->
From ! {self(),{members,Members}},
master_loop(PgName,Members);
{'EXIT',From,_} ->
L =
case lists:member(From,Members) of
true ->
NewMembers = lists:delete(From,Members),
send_all(NewMembers, {crashed_member,PgName,From}),
NewMembers;
false ->
Members
end,
master_loop(PgName,L)
end.
send_all([], _) -> ok;
send_all([P|Ps], M) ->
P ! M,
send_all(Ps, M).
check(PgName) ->
case global:whereis_name(PgName) of
Pid when is_pid(Pid) ->
throw({error,already_created});
undefined ->
ok
end.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.