_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
7096fbd29208105a38b6ba01a9948bee99f9253daee69937a42bac199817c5ba | stuarthalloway/programming-clojure | interop.clj | (ns examples.test.interop
(:use clojure.test)
(:use examples.interop))
(deftest sum-to-variants
(is (= (sum-to 10) 55))
(is (= (integer-sum-to 10) 55))
(is (= (unchecked-sum-to 10) 55))
(is (= (better-sum-to 10) 55))
(is (= (best-sum-to 10) 55))
)
(deftest test-painstakingly-create-array
(is (= (seq (painstakingly-create-array))
["Painstaking" "to" "fill" "in" "arrays"])))
(deftest sax-parsing
(is (= (with-out-str (demo-sax-parse "<foo><bar>hello</bar></foo>" print-element-handler))
"Saw element: foo\nSaw element: bar\n")))
;; skipping test of demo-threads
;; to write this test you would need a cross-thread with-out-str
(deftest test-try-finally
(is (= (with-out-str (is (thrown? Exception (demo-try-finally))))
"we get to clean up\n"))
)
(deftest test-class-available
(is (thrown? ClassNotFoundException (poor-class-available? "java.lang.MicrosoftRocks")))
(is (= String (poor-class-available? "java.lang.String")))
(is (false? (class-available? "java.lang.MicrosoftRocks")))
(is (class-available? "java.lang.String"))
)
(deftest test-describe-class
(is (= {:name "java.lang.String", :final true} (untyped-describe-class String)))
(is (= {:name "java.lang.String", :final true} (typed-describe-class String)))
(is (thrown? IllegalArgumentException (untyped-describe-class "foo")))
(is (thrown? ClassCastException (typed-describe-class "foo")))
)
| null | https://raw.githubusercontent.com/stuarthalloway/programming-clojure/192e2f28d797fd70e50778aabd031b3ff55bd2b9/test/examples/test/interop.clj | clojure | skipping test of demo-threads
to write this test you would need a cross-thread with-out-str | (ns examples.test.interop
(:use clojure.test)
(:use examples.interop))
(deftest sum-to-variants
(is (= (sum-to 10) 55))
(is (= (integer-sum-to 10) 55))
(is (= (unchecked-sum-to 10) 55))
(is (= (better-sum-to 10) 55))
(is (= (best-sum-to 10) 55))
)
(deftest test-painstakingly-create-array
(is (= (seq (painstakingly-create-array))
["Painstaking" "to" "fill" "in" "arrays"])))
(deftest sax-parsing
(is (= (with-out-str (demo-sax-parse "<foo><bar>hello</bar></foo>" print-element-handler))
"Saw element: foo\nSaw element: bar\n")))
(deftest test-try-finally
(is (= (with-out-str (is (thrown? Exception (demo-try-finally))))
"we get to clean up\n"))
)
(deftest test-class-available
(is (thrown? ClassNotFoundException (poor-class-available? "java.lang.MicrosoftRocks")))
(is (= String (poor-class-available? "java.lang.String")))
(is (false? (class-available? "java.lang.MicrosoftRocks")))
(is (class-available? "java.lang.String"))
)
(deftest test-describe-class
(is (= {:name "java.lang.String", :final true} (untyped-describe-class String)))
(is (= {:name "java.lang.String", :final true} (typed-describe-class String)))
(is (thrown? IllegalArgumentException (untyped-describe-class "foo")))
(is (thrown? ClassCastException (typed-describe-class "foo")))
)
|
cd50ef2d9f5d13554bdd1bdba8a165dbcdfbcbf62dd3c89e1d912b9c854766d7 | AndrasKovacs/ELTE-func-lang | Tut07.hs | {-# OPTIONS -Wincomplete-patterns #-}
# LANGUAGE InstanceSigs , DeriveFunctor , DeriveFoldable #
module Tut07 where
import Prelude hiding (Traversable(..))
import Control.Monad
import Data.Monoid
data State s a = State { runState :: s -> (a, s) }
deriving (Functor)
instance Applicative (State s) where
pure = return
(<*>) = ap
instance Monad (State s) where
return a = State (\s -> (a, s))
State f >>= g = State (\s -> case f s of (a, s') -> runState (g a) s')
get :: State s s
get = State (\s -> (s, s))
put :: s -> State s ()
put s = State (\_ -> ((), s))
modify :: (s -> s) -> State s ()
modify f = do s <- get; put (f s)
evalState :: State s a -> s -> a
evalState ma = fst . runState ma
execState :: State s a -> s -> s
execState ma = snd . runState ma
--------------------------------------------------------------------------------
data Tree a = Leaf a
| Node (Tree a) (Tree a)
deriving (Show, Eq, Functor, Foldable)
-- Define the function `labelTree :: Tree a -> Tree Int`.
` labelTree t ` should label the leaves by the integers 0,1 , ...
-- Examples:
-- labelTree (Leaf ()) == Leaf 0
-- labelTree (Node (Leaf ()) (Leaf ()))
-- == Node (Leaf 0) (Leaf 1)
-- labelTree (Node (Node (Leaf ()) (Leaf ())) (Leaf ()))
= = Node ( Node ( Leaf 0 ) ( Leaf 1 ) ) ( Leaf 2 )
labelTree :: Tree a -> Tree Int
labelTree t = evalState (go t) 0
where go :: Tree a -> State Int (Tree Int)
go (Leaf x) = do
a <- get
put (a+1)
pure (Leaf a)
go (Node l r) = Node <$> go l <*> go r
-- go (Node l r) = do
-- -- state: a
-- l' <- go l
-- -- state: a + length l
-- r' <- go r
-- -- state: a + length l + length r
-- pure (Node l' r')
-- go (Node l r) = liftM2 Node (go l) (go r)
-- `relabelTree xs t` should label the leaves using the values of xs.
-- (You can assume that length xs >= length t)
-- Examples:
relabelTree [ 10 ] ( Leaf ( ) ) = = Leaf 10
relabelTree [ 2,1 ] ( Node ( Leaf ( ) ) ( Leaf ( ) ) )
-- == Node (Leaf 2) (Leaf 1)
-- relabelTree [9,2,7] (Node (Node (Leaf ()) (Leaf ()) (Leaf ()))
-- == Node (Node (Leaf 9) (Leaf 2)) (Leaf 7)
relabelTree :: [b] -> Tree a -> Tree b
relabelTree xs t = evalState (go t) xs
where go :: Tree a -> State [b] (Tree b)
go (Leaf x) = do
ys <- get
let z:zs = ys
put zs
pure (Leaf z)
go (Node l r) = Node <$> go l <*> go r
--
In Prelude :
-- lookup :: Eq a => a -> [(a,b)] -> Maybe b
lookup " key1 " [ ( " key1 " , 0 ) , ( " key2 " , 1 ) ] = = Just 0
lookup " key3 " [ ( " key1 " , 0 ) , ( " key2 " , 1 ) ] = = Nothing
-- Examples:
[ ( " a " , 0 ) , ( " b " , 1 ) ] ( Node ( Leaf " b " ) ( Leaf " a " ) )
-- == Just (Node (Leaf 1) (Leaf 0))
[ ( " a " , 0 ) , ( " b " , 1 ) ] ( Node ( Leaf " a " ) ( Leaf " c " ) )
-- == Nothing
-- `mapLookup xs t` should apply the function `swap lookup xs` to the
-- values at the leaves of `t`, and fail (return Nothing)
-- if any of the lookups fails.
mapLookup :: Eq a => [(a,b)] -> Tree a -> Maybe (Tree b)
mapLookup xs (Leaf a) = do
b <- lookup a xs
pure (Leaf b)
mapLookup xs (Node l r) = Node <$> mapLookup xs l <*> mapLookup xs r
--------------------------------------------------------------------------------
class Foldable f => Traversable f where
-- fmap :: (a -> b) -> f a -> f b
-- foldMap :: Monoid m => (a -> m) -> f a -> m
traverse :: Applicative m => (a -> m b) -> f a -> m (f b)
instance Traversable [] where
traverse f [] = pure []
traverse f (x:xs) = (:) <$> f x <*> traverse f xs
relabel' :: Traversable f => [b] -> f a -> f b
relabel' xs t = evalState (go t) xs
where go :: Traversable f => f a -> State [b] (f b)
go = traverse $ \x -> do
ys <- get
let z:zs = ys
put zs
pure z
mapLookup' :: (Eq a, Traversable f) => [(a,b)] -> f a -> Maybe (f b)
mapLookup' xs = traverse $ \a -> lookup a xs
mapLookup ' xs = traverse ( swap lookup xs )
instance Traversable Maybe where
traverse f Nothing = pure Nothing
traverse f (Just x) = Just <$> f x
instance Traversable (Either x) where
traverse = undefined
instance Traversable Tree where
traverse f (Leaf x) = Leaf <$> f x
traverse f (Node x y) = Node <$> traverse f x <*> traverse f y
data Tree2 a = Leaf2 a
| Node2 [Tree2 a]
deriving (Functor, Foldable)
-- fmap f (Node2 xs) = Node2 (fmap (fmap f) xs)
instance Traversable Tree2 where
traverse f (Leaf2 x) = Leaf2 <$> f x
traverse f (Node2 xs) = Node2 <$> traverse (traverse f) xs
Bonus ( fmapDefault and foldMapDefault in Data . ):
fmapFromTraverse :: (Traversable f, Monoid m) => (a -> m) -> f a -> m
fmapFromTraverse = undefined
foldMapFromTraverse :: Traversable f => (a -> b) -> f a -> f b
foldMapFromTraverse = undefined
| null | https://raw.githubusercontent.com/AndrasKovacs/ELTE-func-lang/5ae6c1493b0cb2712f41740b0783be9bc8a8a704/2021-22-1/gyak_3/Tut07.hs | haskell | # OPTIONS -Wincomplete-patterns #
------------------------------------------------------------------------------
Define the function `labelTree :: Tree a -> Tree Int`.
Examples:
labelTree (Leaf ()) == Leaf 0
labelTree (Node (Leaf ()) (Leaf ()))
== Node (Leaf 0) (Leaf 1)
labelTree (Node (Node (Leaf ()) (Leaf ())) (Leaf ()))
go (Node l r) = do
-- state: a
l' <- go l
-- state: a + length l
r' <- go r
-- state: a + length l + length r
pure (Node l' r')
go (Node l r) = liftM2 Node (go l) (go r)
`relabelTree xs t` should label the leaves using the values of xs.
(You can assume that length xs >= length t)
Examples:
== Node (Leaf 2) (Leaf 1)
relabelTree [9,2,7] (Node (Node (Leaf ()) (Leaf ()) (Leaf ()))
== Node (Node (Leaf 9) (Leaf 2)) (Leaf 7)
lookup :: Eq a => a -> [(a,b)] -> Maybe b
Examples:
== Just (Node (Leaf 1) (Leaf 0))
== Nothing
`mapLookup xs t` should apply the function `swap lookup xs` to the
values at the leaves of `t`, and fail (return Nothing)
if any of the lookups fails.
------------------------------------------------------------------------------
fmap :: (a -> b) -> f a -> f b
foldMap :: Monoid m => (a -> m) -> f a -> m
fmap f (Node2 xs) = Node2 (fmap (fmap f) xs) | # LANGUAGE InstanceSigs , DeriveFunctor , DeriveFoldable #
module Tut07 where
import Prelude hiding (Traversable(..))
import Control.Monad
import Data.Monoid
data State s a = State { runState :: s -> (a, s) }
deriving (Functor)
instance Applicative (State s) where
pure = return
(<*>) = ap
instance Monad (State s) where
return a = State (\s -> (a, s))
State f >>= g = State (\s -> case f s of (a, s') -> runState (g a) s')
get :: State s s
get = State (\s -> (s, s))
put :: s -> State s ()
put s = State (\_ -> ((), s))
modify :: (s -> s) -> State s ()
modify f = do s <- get; put (f s)
evalState :: State s a -> s -> a
evalState ma = fst . runState ma
execState :: State s a -> s -> s
execState ma = snd . runState ma
data Tree a = Leaf a
| Node (Tree a) (Tree a)
deriving (Show, Eq, Functor, Foldable)
` labelTree t ` should label the leaves by the integers 0,1 , ...
= = Node ( Node ( Leaf 0 ) ( Leaf 1 ) ) ( Leaf 2 )
labelTree :: Tree a -> Tree Int
labelTree t = evalState (go t) 0
where go :: Tree a -> State Int (Tree Int)
go (Leaf x) = do
a <- get
put (a+1)
pure (Leaf a)
go (Node l r) = Node <$> go l <*> go r
relabelTree [ 10 ] ( Leaf ( ) ) = = Leaf 10
relabelTree [ 2,1 ] ( Node ( Leaf ( ) ) ( Leaf ( ) ) )
relabelTree :: [b] -> Tree a -> Tree b
relabelTree xs t = evalState (go t) xs
where go :: Tree a -> State [b] (Tree b)
go (Leaf x) = do
ys <- get
let z:zs = ys
put zs
pure (Leaf z)
go (Node l r) = Node <$> go l <*> go r
In Prelude :
lookup " key1 " [ ( " key1 " , 0 ) , ( " key2 " , 1 ) ] = = Just 0
lookup " key3 " [ ( " key1 " , 0 ) , ( " key2 " , 1 ) ] = = Nothing
[ ( " a " , 0 ) , ( " b " , 1 ) ] ( Node ( Leaf " b " ) ( Leaf " a " ) )
[ ( " a " , 0 ) , ( " b " , 1 ) ] ( Node ( Leaf " a " ) ( Leaf " c " ) )
mapLookup :: Eq a => [(a,b)] -> Tree a -> Maybe (Tree b)
mapLookup xs (Leaf a) = do
b <- lookup a xs
pure (Leaf b)
mapLookup xs (Node l r) = Node <$> mapLookup xs l <*> mapLookup xs r
class Foldable f => Traversable f where
traverse :: Applicative m => (a -> m b) -> f a -> m (f b)
instance Traversable [] where
traverse f [] = pure []
traverse f (x:xs) = (:) <$> f x <*> traverse f xs
relabel' :: Traversable f => [b] -> f a -> f b
relabel' xs t = evalState (go t) xs
where go :: Traversable f => f a -> State [b] (f b)
go = traverse $ \x -> do
ys <- get
let z:zs = ys
put zs
pure z
mapLookup' :: (Eq a, Traversable f) => [(a,b)] -> f a -> Maybe (f b)
mapLookup' xs = traverse $ \a -> lookup a xs
mapLookup ' xs = traverse ( swap lookup xs )
instance Traversable Maybe where
traverse f Nothing = pure Nothing
traverse f (Just x) = Just <$> f x
instance Traversable (Either x) where
traverse = undefined
instance Traversable Tree where
traverse f (Leaf x) = Leaf <$> f x
traverse f (Node x y) = Node <$> traverse f x <*> traverse f y
data Tree2 a = Leaf2 a
| Node2 [Tree2 a]
deriving (Functor, Foldable)
instance Traversable Tree2 where
traverse f (Leaf2 x) = Leaf2 <$> f x
traverse f (Node2 xs) = Node2 <$> traverse (traverse f) xs
Bonus ( fmapDefault and foldMapDefault in Data . ):
fmapFromTraverse :: (Traversable f, Monoid m) => (a -> m) -> f a -> m
fmapFromTraverse = undefined
foldMapFromTraverse :: Traversable f => (a -> b) -> f a -> f b
foldMapFromTraverse = undefined
|
93a3fa877146c1da7db34d5f8de7ad241502337c84722d3f7a963a4eb0299666 | jimmythompson/halboy | httpkit_test.clj | (ns halboy.http.httpkit-test
(:use org.httpkit.fake)
(:require [clojure.test :refer [deftest testing is]]
[halboy.http.http-kit :as httpkit-client]
[halboy.http.protocol :as http]))
(def base-url "")
(deftest halboy-http
(testing "http_kit"
(with-fake-http
[{:url base-url :method :get} {:status 201 :body "{}"}]
(let [client (httpkit-client/new-http-client)
request {:url base-url
:method :get}]
(is (=
(http/exchange client request)
{:body {}
:headers {:content-type "text/html"
:server "org.httpkit.fake"}
:raw {:body {}
:headers {:content-type "text/html"
:server "org.httpkit.fake"}
:opts {:as :text
:headers {"Accept" "application/hal+json"
"Content-Type" "application/json"}
:method :get
:url ""}
:status 201}
:status 201
:url ""}))))))
| null | https://raw.githubusercontent.com/jimmythompson/halboy/7e0b1fcb072851520917476ade692f7dbf6f9962/test/halboy/http/httpkit_test.clj | clojure | (ns halboy.http.httpkit-test
(:use org.httpkit.fake)
(:require [clojure.test :refer [deftest testing is]]
[halboy.http.http-kit :as httpkit-client]
[halboy.http.protocol :as http]))
(def base-url "")
(deftest halboy-http
(testing "http_kit"
(with-fake-http
[{:url base-url :method :get} {:status 201 :body "{}"}]
(let [client (httpkit-client/new-http-client)
request {:url base-url
:method :get}]
(is (=
(http/exchange client request)
{:body {}
:headers {:content-type "text/html"
:server "org.httpkit.fake"}
:raw {:body {}
:headers {:content-type "text/html"
:server "org.httpkit.fake"}
:opts {:as :text
:headers {"Accept" "application/hal+json"
"Content-Type" "application/json"}
:method :get
:url ""}
:status 201}
:status 201
:url ""}))))))
| |
50021451c662e9cf7a8309c48a4e0ec3dd513530664525951419641e17a53f88 | gsakkas/rite | 3407.ml |
let rec clone x n =
if n < 1
then []
else
(let rec helper acc f x =
match x with | 0 -> acc | _ -> helper (f :: acc) f (x - 1) in
helper [] x n);;
let padZero l1 l2 =
let x = (List.length l1) - (List.length l2) in
if x != 0
then
(if x < 0
then (((clone 0 (abs x)) @ l1), l2)
else (l1, ((clone 0 (abs x)) @ l2)))
else (l1, l2);;
let rec removeZero l =
match l with | x::xs -> if x = 0 then removeZero xs else l | _ -> l;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x =
match x with
| (b,c) ->
let sum = b + c in
if sum < 10
then
(match a with
| (len,[]) -> (len, [sum])
| (len,x'::xs') ->
if x' = (-1)
then
(if sum = 9
then (len, ((-1) :: 0 :: xs'))
else (len, ((sum + 1) :: xs')))
else (len, (sum :: x' :: xs')))
else
(match a with
| (len,[]) -> (len, [(-1); sum mod 10])
| (len,x'::xs') ->
if x' = (-1)
then (-1) :: ((sum mod 10) + 1) :: a
else (len, ((-1) :: (sum mod 10) :: x' :: xs'))) in
let base = ((List.length l1), []) in
let args = List.combine (List.rev l1) (List.rev l2) in
let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
fix
let rec clone x n =
if n < 1
then [ ]
else
( let rec helper acc f x =
match x with | 0 - > acc | _ - > helper ( f : : acc ) f ( x - 1 ) in
helper [ ] x n ) ; ;
let =
let x = ( l1 ) - ( List.length l2 ) in
if x ! = 0
then
( if x < 0
then ( ( ( clone 0 ( abs x ) ) @ l1 ) , l2 )
else ( l1 , ( ( clone 0 ( abs x ) ) @ l2 ) ) )
else ( l1 , l2 ) ; ;
let rec removeZero l =
match l with | x::xs - > if x = 0 then removeZero xs else l | _ - > l ; ;
let bigAdd l1 l2 =
let add ( l1,l2 ) =
let f a x =
match x with
| ( b , c ) - >
let sum = b + c in
if sum < 10
then
( match a with
| ( len , [ ] ) - > ( len , [ sum ] )
| ( len , x'::xs ' ) - >
if x ' = ( -1 )
then
( if sum = 9
then ( len , ( ( -1 ) : : 0 : : xs ' ) )
else ( len , ( ( sum + 1 ) : : xs ' ) ) )
else ( len , ( sum : : x ' : : xs ' ) ) )
else
( match a with
| ( len , [ ] ) - > ( len , [ ( -1 ) ; sum mod 10 ] )
| ( len , x'::xs ' ) - >
if x ' = ( -1 )
then ( len , ( ( -1 ) : : ( ( sum mod 10 ) + 1 ) : : xs ' ) )
else ( len , ( ( -1 ) : : ( sum mod 10 ) : : x ' : : xs ' ) ) ) in
let base = ( ( List.length l1 ) , [ ] ) in
let args = List.combine ( List.rev l1 ) ( List.rev l2 ) in
let ( _ , res ) = List.fold_left f base args in res in
removeZero ( add ( ) ) ; ;
let rec clone x n =
if n < 1
then []
else
(let rec helper acc f x =
match x with | 0 -> acc | _ -> helper (f :: acc) f (x - 1) in
helper [] x n);;
let padZero l1 l2 =
let x = (List.length l1) - (List.length l2) in
if x != 0
then
(if x < 0
then (((clone 0 (abs x)) @ l1), l2)
else (l1, ((clone 0 (abs x)) @ l2)))
else (l1, l2);;
let rec removeZero l =
match l with | x::xs -> if x = 0 then removeZero xs else l | _ -> l;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x =
match x with
| (b,c) ->
let sum = b + c in
if sum < 10
then
(match a with
| (len,[]) -> (len, [sum])
| (len,x'::xs') ->
if x' = (-1)
then
(if sum = 9
then (len, ((-1) :: 0 :: xs'))
else (len, ((sum + 1) :: xs')))
else (len, (sum :: x' :: xs')))
else
(match a with
| (len,[]) -> (len, [(-1); sum mod 10])
| (len,x'::xs') ->
if x' = (-1)
then (len, ((-1) :: ((sum mod 10) + 1) :: xs'))
else (len, ((-1) :: (sum mod 10) :: x' :: xs'))) in
let base = ((List.length l1), []) in
let args = List.combine (List.rev l1) (List.rev l2) in
let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
*)
changed spans
( 44,23)-(44,54 )
( len , ( -1 ) : : ( ( ( sum mod 10 ) + 1 ) : : xs ' ) )
TupleG [ VarG , AppG [ EmptyG , EmptyG ] ]
(44,23)-(44,54)
(len , (-1) :: (((sum mod 10) + 1) :: xs'))
TupleG [VarG,AppG [EmptyG,EmptyG]]
*)
type error slice
( 30,13)-(38,49 )
( 30,20)-(30,21 )
( 43,18)-(45,65 )
( 44,23)-(44,54 )
( 44,31)-(44,54 )
( 44,53)-(44,54 )
( 45,23)-(45,65 )
(30,13)-(38,49)
(30,20)-(30,21)
(43,18)-(45,65)
(44,23)-(44,54)
(44,31)-(44,54)
(44,53)-(44,54)
(45,23)-(45,65)
*)
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14_min/3407.ml | ocaml |
let rec clone x n =
if n < 1
then []
else
(let rec helper acc f x =
match x with | 0 -> acc | _ -> helper (f :: acc) f (x - 1) in
helper [] x n);;
let padZero l1 l2 =
let x = (List.length l1) - (List.length l2) in
if x != 0
then
(if x < 0
then (((clone 0 (abs x)) @ l1), l2)
else (l1, ((clone 0 (abs x)) @ l2)))
else (l1, l2);;
let rec removeZero l =
match l with | x::xs -> if x = 0 then removeZero xs else l | _ -> l;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x =
match x with
| (b,c) ->
let sum = b + c in
if sum < 10
then
(match a with
| (len,[]) -> (len, [sum])
| (len,x'::xs') ->
if x' = (-1)
then
(if sum = 9
then (len, ((-1) :: 0 :: xs'))
else (len, ((sum + 1) :: xs')))
else (len, (sum :: x' :: xs')))
else
(match a with
| (len,[]) -> (len, [(-1); sum mod 10])
| (len,x'::xs') ->
if x' = (-1)
then (-1) :: ((sum mod 10) + 1) :: a
else (len, ((-1) :: (sum mod 10) :: x' :: xs'))) in
let base = ((List.length l1), []) in
let args = List.combine (List.rev l1) (List.rev l2) in
let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
fix
let rec clone x n =
if n < 1
then [ ]
else
( let rec helper acc f x =
match x with | 0 - > acc | _ - > helper ( f : : acc ) f ( x - 1 ) in
helper [ ] x n ) ; ;
let =
let x = ( l1 ) - ( List.length l2 ) in
if x ! = 0
then
( if x < 0
then ( ( ( clone 0 ( abs x ) ) @ l1 ) , l2 )
else ( l1 , ( ( clone 0 ( abs x ) ) @ l2 ) ) )
else ( l1 , l2 ) ; ;
let rec removeZero l =
match l with | x::xs - > if x = 0 then removeZero xs else l | _ - > l ; ;
let bigAdd l1 l2 =
let add ( l1,l2 ) =
let f a x =
match x with
| ( b , c ) - >
let sum = b + c in
if sum < 10
then
( match a with
| ( len , [ ] ) - > ( len , [ sum ] )
| ( len , x'::xs ' ) - >
if x ' = ( -1 )
then
( if sum = 9
then ( len , ( ( -1 ) : : 0 : : xs ' ) )
else ( len , ( ( sum + 1 ) : : xs ' ) ) )
else ( len , ( sum : : x ' : : xs ' ) ) )
else
( match a with
| ( len , [ ] ) - > ( len , [ ( -1 ) ; sum mod 10 ] )
| ( len , x'::xs ' ) - >
if x ' = ( -1 )
then ( len , ( ( -1 ) : : ( ( sum mod 10 ) + 1 ) : : xs ' ) )
else ( len , ( ( -1 ) : : ( sum mod 10 ) : : x ' : : xs ' ) ) ) in
let base = ( ( List.length l1 ) , [ ] ) in
let args = List.combine ( List.rev l1 ) ( List.rev l2 ) in
let ( _ , res ) = List.fold_left f base args in res in
removeZero ( add ( ) ) ; ;
let rec clone x n =
if n < 1
then []
else
(let rec helper acc f x =
match x with | 0 -> acc | _ -> helper (f :: acc) f (x - 1) in
helper [] x n);;
let padZero l1 l2 =
let x = (List.length l1) - (List.length l2) in
if x != 0
then
(if x < 0
then (((clone 0 (abs x)) @ l1), l2)
else (l1, ((clone 0 (abs x)) @ l2)))
else (l1, l2);;
let rec removeZero l =
match l with | x::xs -> if x = 0 then removeZero xs else l | _ -> l;;
let bigAdd l1 l2 =
let add (l1,l2) =
let f a x =
match x with
| (b,c) ->
let sum = b + c in
if sum < 10
then
(match a with
| (len,[]) -> (len, [sum])
| (len,x'::xs') ->
if x' = (-1)
then
(if sum = 9
then (len, ((-1) :: 0 :: xs'))
else (len, ((sum + 1) :: xs')))
else (len, (sum :: x' :: xs')))
else
(match a with
| (len,[]) -> (len, [(-1); sum mod 10])
| (len,x'::xs') ->
if x' = (-1)
then (len, ((-1) :: ((sum mod 10) + 1) :: xs'))
else (len, ((-1) :: (sum mod 10) :: x' :: xs'))) in
let base = ((List.length l1), []) in
let args = List.combine (List.rev l1) (List.rev l2) in
let (_,res) = List.fold_left f base args in res in
removeZero (add (padZero l1 l2));;
*)
changed spans
( 44,23)-(44,54 )
( len , ( -1 ) : : ( ( ( sum mod 10 ) + 1 ) : : xs ' ) )
TupleG [ VarG , AppG [ EmptyG , EmptyG ] ]
(44,23)-(44,54)
(len , (-1) :: (((sum mod 10) + 1) :: xs'))
TupleG [VarG,AppG [EmptyG,EmptyG]]
*)
type error slice
( 30,13)-(38,49 )
( 30,20)-(30,21 )
( 43,18)-(45,65 )
( 44,23)-(44,54 )
( 44,31)-(44,54 )
( 44,53)-(44,54 )
( 45,23)-(45,65 )
(30,13)-(38,49)
(30,20)-(30,21)
(43,18)-(45,65)
(44,23)-(44,54)
(44,31)-(44,54)
(44,53)-(44,54)
(45,23)-(45,65)
*)
| |
011c06f36b4ba0d98acf7c6b161a2658837b5572d23c8dabca7718f70993a9ad | neilprosser/mr-maestro | healthy_test.clj | (ns maestro.messages.healthy-test
(:require [maestro.healthy :as healthy]
[maestro.messages.healthy :refer :all]
[midje.sweet :refer :all]))
(def register-with-healthy-params
{:environment "environment"
:new-state {:auto-scaling-group-name "new-asg"
:tyranitar {:application-properties {:service.port "1234"}
:deployment-params {:health-check-type "EC2"
:healthy {:scheme "scheme"
:path "path"
:port "2345"
:timeout 100}}}}
:region "region"})
(fact "that we don't register anything with Healthy if we've not asked for it"
(let [params (assoc-in register-with-healthy-params [:new-state :tyranitar :deployment-params :healthy] nil)]
(register-with-healthy {:parameters params}) => (contains {:status :success})
(provided
(healthy/register-auto-scaling-group "environment" "region" "new-asg" anything anything anything anything) => nil :times 0)))
(fact "that we register with Healthy if we've asked for it"
(register-with-healthy {:parameters register-with-healthy-params}) => (contains {:status :success})
(provided
(healthy/register-auto-scaling-group "environment" "region" "new-asg" "path" "2345" "scheme" 100) => true))
(fact "that a failure while registering with Healthy is allowed"
(register-with-healthy {:parameters register-with-healthy-params}) => (contains {:status :success})
(provided
(healthy/register-auto-scaling-group "environment" "region" "new-asg" "path" "2345" "scheme" 100) => false))
(fact "that we register with Healthy if we've asked for it but fallback to a port provided in the application properties if nothing is present in deployment params"
(let [params (assoc-in register-with-healthy-params [:new-state :tyranitar :deployment-params :healthy :port] nil)]
(register-with-healthy {:parameters params}) => (contains {:status :success})
(provided
(healthy/register-auto-scaling-group "environment" "region" "new-asg" "path" "1234" "scheme" 100) => true)))
(def deregister-from-healthy-params
{:environment "environment"
:previous-state {:auto-scaling-group-name "previous-asg"
:tyranitar {:deployment-params {:health-check-type "EC2"
:healthy {:scheme "scheme"
:path "path"
:port "2345"
:timeout 100}}}}
:region "region"})
(fact "that we don't deregister from Healthy if the previous state wasn't using Healthy"
(let [params (assoc-in deregister-from-healthy-params [:previous-state :tyranitar :deployment-params :healthy] nil)]
(deregister-from-healthy {:parameters params}) => (contains {:status :success})
(provided
(healthy/deregister-auto-scaling-group anything anything anything) => nil :times 0)))
(fact "that we don't deregister from Healthy if there's nothing which needs deregistering"
(let [params (assoc-in deregister-from-healthy-params [:previous-state :auto-scaling-group-name] nil)]
(deregister-from-healthy {:parameters params}) => (contains {:status :success})
(provided
(healthy/deregister-auto-scaling-group anything anything anything) => nil :times 0)))
(fact "that we deregister from Healthy if we've been told to"
(deregister-from-healthy {:parameters deregister-from-healthy-params}) => (contains {:status :success})
(provided
(healthy/deregister-auto-scaling-group "environment" "region" "previous-asg") => true))
(fact "that a failure deregistering from from Healthy is allowed"
(deregister-from-healthy {:parameters deregister-from-healthy-params}) => (contains {:status :success})
(provided
(healthy/deregister-auto-scaling-group "environment" "region" "previous-asg") => false))
| null | https://raw.githubusercontent.com/neilprosser/mr-maestro/469790fd712262016729c1d83d4b4e11869237a2/test/maestro/messages/healthy_test.clj | clojure | (ns maestro.messages.healthy-test
(:require [maestro.healthy :as healthy]
[maestro.messages.healthy :refer :all]
[midje.sweet :refer :all]))
(def register-with-healthy-params
{:environment "environment"
:new-state {:auto-scaling-group-name "new-asg"
:tyranitar {:application-properties {:service.port "1234"}
:deployment-params {:health-check-type "EC2"
:healthy {:scheme "scheme"
:path "path"
:port "2345"
:timeout 100}}}}
:region "region"})
(fact "that we don't register anything with Healthy if we've not asked for it"
(let [params (assoc-in register-with-healthy-params [:new-state :tyranitar :deployment-params :healthy] nil)]
(register-with-healthy {:parameters params}) => (contains {:status :success})
(provided
(healthy/register-auto-scaling-group "environment" "region" "new-asg" anything anything anything anything) => nil :times 0)))
(fact "that we register with Healthy if we've asked for it"
(register-with-healthy {:parameters register-with-healthy-params}) => (contains {:status :success})
(provided
(healthy/register-auto-scaling-group "environment" "region" "new-asg" "path" "2345" "scheme" 100) => true))
(fact "that a failure while registering with Healthy is allowed"
(register-with-healthy {:parameters register-with-healthy-params}) => (contains {:status :success})
(provided
(healthy/register-auto-scaling-group "environment" "region" "new-asg" "path" "2345" "scheme" 100) => false))
(fact "that we register with Healthy if we've asked for it but fallback to a port provided in the application properties if nothing is present in deployment params"
(let [params (assoc-in register-with-healthy-params [:new-state :tyranitar :deployment-params :healthy :port] nil)]
(register-with-healthy {:parameters params}) => (contains {:status :success})
(provided
(healthy/register-auto-scaling-group "environment" "region" "new-asg" "path" "1234" "scheme" 100) => true)))
(def deregister-from-healthy-params
{:environment "environment"
:previous-state {:auto-scaling-group-name "previous-asg"
:tyranitar {:deployment-params {:health-check-type "EC2"
:healthy {:scheme "scheme"
:path "path"
:port "2345"
:timeout 100}}}}
:region "region"})
(fact "that we don't deregister from Healthy if the previous state wasn't using Healthy"
(let [params (assoc-in deregister-from-healthy-params [:previous-state :tyranitar :deployment-params :healthy] nil)]
(deregister-from-healthy {:parameters params}) => (contains {:status :success})
(provided
(healthy/deregister-auto-scaling-group anything anything anything) => nil :times 0)))
(fact "that we don't deregister from Healthy if there's nothing which needs deregistering"
(let [params (assoc-in deregister-from-healthy-params [:previous-state :auto-scaling-group-name] nil)]
(deregister-from-healthy {:parameters params}) => (contains {:status :success})
(provided
(healthy/deregister-auto-scaling-group anything anything anything) => nil :times 0)))
(fact "that we deregister from Healthy if we've been told to"
(deregister-from-healthy {:parameters deregister-from-healthy-params}) => (contains {:status :success})
(provided
(healthy/deregister-auto-scaling-group "environment" "region" "previous-asg") => true))
(fact "that a failure deregistering from from Healthy is allowed"
(deregister-from-healthy {:parameters deregister-from-healthy-params}) => (contains {:status :success})
(provided
(healthy/deregister-auto-scaling-group "environment" "region" "previous-asg") => false))
| |
e45d074a4192a8239c8b10b452e9f77c336b1a36ece0a102bec198ba9ddfb9ad | Octachron/olivine | vk__builtin__types.ml | module U32 = Unsigned.UInt32
module U64 = Unsigned.UInt64
module U16 = Unsigned.UInt16
module S = Unsigned.Size_t
module type intlike = sig type t val zero: t val ctype: t Ctypes.typ end
let integer_opt (type a) (module I:intlike with type t = a) =
let read x = if x = I.zero then None else Some x in
let write = function None -> I.zero | Some x -> x in
Ctypes.view ~read ~write I.ctype
let integer_opt' zero ctype =
let read x = if x = zero then None else Some x in
let write = function None -> zero | Some x -> x in
Ctypes.view ~read ~write ctype
type uint_64_t = U64.t
let uint_64_t = Ctypes.uint64_t
* { 2 Uint32 special handling }
ASSUME 64 bits
let uint_32_t = Ctypes.view ~read:(U32.to_int) ~write:(U32.of_int)
Ctypes.uint32_t
ASSUME 64 bits
let uint_16_t = Ctypes.view ~read:(U16.to_int) ~write:(U16.of_int)
Ctypes.uint16_t
type void = unit
let void = Ctypes.void
type int_32_t = int
type bool_32 = bool
module Int = struct
type t = int
let zero = 0
let pp = Format.pp_print_int
let ctype = Ctypes.int
end
module U8 = Unsigned.UInt8
module Uint_8_t = struct
open U8
type t = U8.t
let ctype = Ctypes.uint8_t
let zero = of_int 0
let of_int = of_int
let to_int = to_int
let to_string = to_string
let pp ppf x = Format.pp_print_string ppf (to_string x)
end
type uint_8_t = U8.t
let uint_8_t = Ctypes.uint8_t
let bool_32 =
let true' = U32.of_int Vk__Const.true'
and false' = U32.of_int Vk__Const.false' in
Ctypes.view
~read:( (=) true' )
~write:( fun x -> if x then true' else false' )
Ctypes.uint32_t
let bool_32_opt =
let true' = U32.of_int Vk__Const.true'
and false' = U32.of_int Vk__Const.false' in
Ctypes.view
~read:( fun x -> if U32.zero = x then None else if x = true' then Some true else Some false )
~write:(function None -> U32.zero | Some x -> if x then true' else false' )
Ctypes.uint32_t
let bool = bool_32
let integer_opt Ctypes.uint64_t ( module U64 )
module Size_t_0 = struct
let of_int = S.of_int
let to_int = S.to_int
let zero = of_int 0
let to_string = S.to_string
let pp ppf x = Format.fprintf ppf "%s" (S.to_string x)
type t = S.t
let ctype = Ctypes.size_t
end
module Size_t = struct
include Size_t_0
let ctype_opt = integer_opt (module Size_t_0)
end
type size_t = Size_t.t
let size_t = Size_t.ctype
let size_t_opt = integer_opt (module Size_t)
module Uint_32_t_0 = struct
let zero = 0
let of_int x = x
let to_int x = x
let to_string = string_of_int
let pp ppf x = Format.fprintf ppf "%d" x
type t = int
let ctype = uint_32_t
end
module Uint_32_t = struct include Uint_32_t_0
let ctype_opt = integer_opt (module Uint_32_t_0)
end
module Uint_16_t_0 = struct
let zero = 0
let of_int x = x
let to_int x = x
let to_string = string_of_int
let pp ppf x = Format.fprintf ppf "%d" x
type t = int
let ctype = uint_16_t
end
module Uint_16_t = struct include Uint_16_t_0
let ctype_opt = integer_opt (module Uint_16_t_0)
end
module Bool_32 = struct
type t = bool
let t = bool
let ctype = bool_32
let ctype_opt = bool_32_opt
let zero = true
let pp = Format.pp_print_bool
end
module Int_32_t = struct
let zero = 0
let of_int x = x
let to_int x = x
let to_string = string_of_int
let pp ppf x = Format.fprintf ppf "%d" x
type t = int
let read = Int32.to_int
let write = Int32.of_int
let ctype = Ctypes.view ~read ~write Ctypes.int32_t
let ctype_opt = integer_opt' zero ctype
end
let int_32_t = Int_32_t.ctype
module Int_64_t = struct
let zero = 0
let of_int x = x
let to_int x = x
let to_string = string_of_int
let pp ppf x = Format.fprintf ppf "%d" x
type t = int
let read = Int64.to_int
let write = Int64.of_int
let ctype = Ctypes.view ~read ~write Ctypes.int64_t
let ctype_opt = integer_opt' zero ctype
end
let int_64_t = Int_64_t.ctype
module Uint_64_t = struct
let of_int = U64.of_int
let to_int = U64.to_int
let zero = of_int 0
let to_string = U64.to_string
let pp ppf x = Format.fprintf ppf "%s" (U64.to_string x)
type t = U64.t
let ctype = Ctypes.uint64_t
let ctype_opt = integer_opt' zero ctype
end
module type aliased = sig
type t
val zero: t
val ctype:t Ctypes.typ
val ctype_opt: t option Ctypes.typ
val of_int: int -> t
val to_int: t -> int
val to_string: t -> string
end
module type alias = sig
type x
type t = private x
val make: x -> t
val ctype: t Ctypes.typ
val ctype_opt: t option Ctypes.typ
val of_int : int -> t
val zero: t
val to_int: t -> int
val pp: Format.formatter -> t -> unit
end
module Alias(X:aliased): alias with type x := X.t = struct
type t = X.t
let make x = x
let zero = X.zero
let ctype = X.ctype
let ctype_opt = X.ctype_opt
let of_int = X.of_int
let to_int = X.to_int
let pp ppf x = Format.fprintf ppf "%s" (X.to_string x)
end
module Float = struct
type t = float
let pp = Format.pp_print_float
let ctype = Ctypes.float
end
module Double = struct
type t = float
let pp = Format.pp_print_float
let ctype = Ctypes.double
end
module Void = struct
type t = void
let ctype = Ctypes.void
let pp = Vk__helpers.Pp.abstract
end
module Cametallayer = struct
type m
type t = m Ctypes.structure
let ctype : t Ctypes.typ = Ctypes.structure "CAmetallayer"
let pp = Vk__helpers.Pp.abstract
end
type cametallayer = Cametallayer.t Ctypes.structure
| null | https://raw.githubusercontent.com/Octachron/olivine/e93df595ad1e8bad5a8af689bac7d150753ab9fb/lib_aux/vk__builtin__types.ml | ocaml | module U32 = Unsigned.UInt32
module U64 = Unsigned.UInt64
module U16 = Unsigned.UInt16
module S = Unsigned.Size_t
module type intlike = sig type t val zero: t val ctype: t Ctypes.typ end
let integer_opt (type a) (module I:intlike with type t = a) =
let read x = if x = I.zero then None else Some x in
let write = function None -> I.zero | Some x -> x in
Ctypes.view ~read ~write I.ctype
let integer_opt' zero ctype =
let read x = if x = zero then None else Some x in
let write = function None -> zero | Some x -> x in
Ctypes.view ~read ~write ctype
type uint_64_t = U64.t
let uint_64_t = Ctypes.uint64_t
* { 2 Uint32 special handling }
ASSUME 64 bits
let uint_32_t = Ctypes.view ~read:(U32.to_int) ~write:(U32.of_int)
Ctypes.uint32_t
ASSUME 64 bits
let uint_16_t = Ctypes.view ~read:(U16.to_int) ~write:(U16.of_int)
Ctypes.uint16_t
type void = unit
let void = Ctypes.void
type int_32_t = int
type bool_32 = bool
module Int = struct
type t = int
let zero = 0
let pp = Format.pp_print_int
let ctype = Ctypes.int
end
module U8 = Unsigned.UInt8
module Uint_8_t = struct
open U8
type t = U8.t
let ctype = Ctypes.uint8_t
let zero = of_int 0
let of_int = of_int
let to_int = to_int
let to_string = to_string
let pp ppf x = Format.pp_print_string ppf (to_string x)
end
type uint_8_t = U8.t
let uint_8_t = Ctypes.uint8_t
let bool_32 =
let true' = U32.of_int Vk__Const.true'
and false' = U32.of_int Vk__Const.false' in
Ctypes.view
~read:( (=) true' )
~write:( fun x -> if x then true' else false' )
Ctypes.uint32_t
let bool_32_opt =
let true' = U32.of_int Vk__Const.true'
and false' = U32.of_int Vk__Const.false' in
Ctypes.view
~read:( fun x -> if U32.zero = x then None else if x = true' then Some true else Some false )
~write:(function None -> U32.zero | Some x -> if x then true' else false' )
Ctypes.uint32_t
let bool = bool_32
let integer_opt Ctypes.uint64_t ( module U64 )
module Size_t_0 = struct
let of_int = S.of_int
let to_int = S.to_int
let zero = of_int 0
let to_string = S.to_string
let pp ppf x = Format.fprintf ppf "%s" (S.to_string x)
type t = S.t
let ctype = Ctypes.size_t
end
module Size_t = struct
include Size_t_0
let ctype_opt = integer_opt (module Size_t_0)
end
type size_t = Size_t.t
let size_t = Size_t.ctype
let size_t_opt = integer_opt (module Size_t)
module Uint_32_t_0 = struct
let zero = 0
let of_int x = x
let to_int x = x
let to_string = string_of_int
let pp ppf x = Format.fprintf ppf "%d" x
type t = int
let ctype = uint_32_t
end
module Uint_32_t = struct include Uint_32_t_0
let ctype_opt = integer_opt (module Uint_32_t_0)
end
module Uint_16_t_0 = struct
let zero = 0
let of_int x = x
let to_int x = x
let to_string = string_of_int
let pp ppf x = Format.fprintf ppf "%d" x
type t = int
let ctype = uint_16_t
end
module Uint_16_t = struct include Uint_16_t_0
let ctype_opt = integer_opt (module Uint_16_t_0)
end
module Bool_32 = struct
type t = bool
let t = bool
let ctype = bool_32
let ctype_opt = bool_32_opt
let zero = true
let pp = Format.pp_print_bool
end
module Int_32_t = struct
let zero = 0
let of_int x = x
let to_int x = x
let to_string = string_of_int
let pp ppf x = Format.fprintf ppf "%d" x
type t = int
let read = Int32.to_int
let write = Int32.of_int
let ctype = Ctypes.view ~read ~write Ctypes.int32_t
let ctype_opt = integer_opt' zero ctype
end
let int_32_t = Int_32_t.ctype
module Int_64_t = struct
let zero = 0
let of_int x = x
let to_int x = x
let to_string = string_of_int
let pp ppf x = Format.fprintf ppf "%d" x
type t = int
let read = Int64.to_int
let write = Int64.of_int
let ctype = Ctypes.view ~read ~write Ctypes.int64_t
let ctype_opt = integer_opt' zero ctype
end
let int_64_t = Int_64_t.ctype
module Uint_64_t = struct
let of_int = U64.of_int
let to_int = U64.to_int
let zero = of_int 0
let to_string = U64.to_string
let pp ppf x = Format.fprintf ppf "%s" (U64.to_string x)
type t = U64.t
let ctype = Ctypes.uint64_t
let ctype_opt = integer_opt' zero ctype
end
module type aliased = sig
type t
val zero: t
val ctype:t Ctypes.typ
val ctype_opt: t option Ctypes.typ
val of_int: int -> t
val to_int: t -> int
val to_string: t -> string
end
module type alias = sig
type x
type t = private x
val make: x -> t
val ctype: t Ctypes.typ
val ctype_opt: t option Ctypes.typ
val of_int : int -> t
val zero: t
val to_int: t -> int
val pp: Format.formatter -> t -> unit
end
module Alias(X:aliased): alias with type x := X.t = struct
type t = X.t
let make x = x
let zero = X.zero
let ctype = X.ctype
let ctype_opt = X.ctype_opt
let of_int = X.of_int
let to_int = X.to_int
let pp ppf x = Format.fprintf ppf "%s" (X.to_string x)
end
module Float = struct
type t = float
let pp = Format.pp_print_float
let ctype = Ctypes.float
end
module Double = struct
type t = float
let pp = Format.pp_print_float
let ctype = Ctypes.double
end
module Void = struct
type t = void
let ctype = Ctypes.void
let pp = Vk__helpers.Pp.abstract
end
module Cametallayer = struct
type m
type t = m Ctypes.structure
let ctype : t Ctypes.typ = Ctypes.structure "CAmetallayer"
let pp = Vk__helpers.Pp.abstract
end
type cametallayer = Cametallayer.t Ctypes.structure
| |
2b7546494d009db567c24eedba690363a680c92405e093e2c5133bb6f272d59f | patrickt/possession | Exts.hs | # LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE StandaloneDeriving #
# OPTIONS_GHC -fno - warn - orphans #
module Dhall.Exts where
import Data.Monoid
import Data.Semigroup (Max (..))
import Dhall
deriving newtype instance ToDhall a => ToDhall (Sum a)
deriving newtype instance ToDhall a => ToDhall (Max a)
deriving newtype instance FromDhall a => FromDhall (Sum a)
deriving newtype instance FromDhall a => FromDhall (Max a)
| null | https://raw.githubusercontent.com/patrickt/possession/f771bc755dfee7a94ba77310d6f5c69b82e7bfce/src/Dhall/Exts.hs | haskell | # LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE StandaloneDeriving #
# OPTIONS_GHC -fno - warn - orphans #
module Dhall.Exts where
import Data.Monoid
import Data.Semigroup (Max (..))
import Dhall
deriving newtype instance ToDhall a => ToDhall (Sum a)
deriving newtype instance ToDhall a => ToDhall (Max a)
deriving newtype instance FromDhall a => FromDhall (Sum a)
deriving newtype instance FromDhall a => FromDhall (Max a)
| |
6e4e7991ca7ac0992adeeeb6c02bfd7a0f3502ac20b1a8848303406f65afd205 | bsansouci/bsb-native | type_IncompatibleType_1.ml | if 123 then "asd" else "a"
| null | https://raw.githubusercontent.com/bsansouci/bsb-native/9a89457783d6e80deb0fba9ca7372c10a768a9ea/vendor/BetterErrors/tests/type_IncompatibleType/type_IncompatibleType_1.ml | ocaml | if 123 then "asd" else "a"
| |
b597635a1d613e57a0f99c677e213b995e6be83a845f11571f4504caa94a40aa | ianmbloom/gudni | Serialize.hs | # LANGUAGE TemplateHaskell #
# LANGUAGE FlexibleContexts #
-----------------------------------------------------------------------------
-- |
Module : Graphics . Gudni . Raster . Serialize
Copyright : ( c ) 2019
-- License : BSD-style (see the file libraries/base/LICENSE)
--
Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- Functions used by TraverseShapeTree to serialize a scene into data buffers that can be parsed by
-- the rasterizer kernel and building a partitioned tree of tiles.
module Graphics.Gudni.Raster.Serialize
( GeometryPile(..)
, GeometryState(..)
, geoReorderTable
, geoMaxStrandSize
, geoMaxStrandsPerTile
, geoGeometryPile
, geoTileTree
, geoCanvasSize
, geoRandomField
, GeometryMonad(..)
, runGeometryMonad
, resetGeometryMonad
, SubstanceMonad(..)
, execSubstanceMonad
, buildOverScene
, SubstanceState(..)
, suSubstanceId
, suTokenMap
, suCurrentPictureUsage
, suPictureUsages
, suPictureMapping
, suBackgroundColor
, suSubstancePile
, outputGeometryState
, outputSubstanceState
)
where
import Graphics.Gudni.Figure
import Graphics.Gudni.OpenCL.Rasterizer
import Graphics.Gudni.Raster.Constants
import Graphics.Gudni.Raster.ShapeInfo
import Graphics.Gudni.Raster.Types
import Graphics.Gudni.Raster.Enclosure
import Graphics.Gudni.Raster.TraverseShapeTree
import Graphics.Gudni.Raster.ReorderTable
import Graphics.Gudni.Raster.TileTree
import Graphics.Gudni.Util.RandomField
import Graphics.Gudni.Util.Pile
import Graphics.Gudni.Util.Util
import Graphics.Gudni.Util.Debug
import Control.Monad
import Control.Monad.State
import Control.Lens
import Foreign.Storable
import Control.DeepSeq
import qualified Data.Map as M
import Data.Maybe
import Data.List
import Control.Parallel.Strategies
| GeometryPile is just a synonym for a bytepile that stores serialized geometry data for the scene .
type GeometryPile = BytePile
-- add the enclosure data to the geometry pile
appendGeoRef :: Enclosure
-> StateT GeometryPile IO GeoReference
appendGeoRef enclosure =
do geometryPile <- get
(pile', offsetShapeStartBytes) <- liftIO $ addToBytePile "appendGeoRef" geometryPile enclosure
put pile'
the size of the shape data is measured in 64 bit chunks so that a short int can address more data .
let offsetShapeStart = Ref $ fromIntegral offsetShapeStartBytes `div` fromIntegral (sizeOf (undefined :: Point2 SubSpace) * 2)
return $ GeoRef offsetShapeStart (enclosureNumStrands enclosure)
| Append a shape to the Geometry pile and return a ShapeEntry that contains a reference to its geometry data , the strand count
-- and a the bounding box. This can be added to the tiletree to determine in which tiles the shape is present.
makeShapeEntry :: BoundingBox
-> Enclosure
-> StateT GeometryPile IO ShapeEntry
makeShapeEntry box enclosure =
do -- append the geometric enclosure data to the heap and return a reference
geoRef <- appendGeoRef enclosure
return $ ShapeEntry geoRef (enclosureNumStrands enclosure) box
| Return True if a BoundingBox is outside of the canvas .
excludeBox :: Point2 SubSpace
-> BoundingBox
-> Bool
excludeBox canvasSize box =
box ^. leftSide >= canvasSize ^. pX
|| box ^. topSide >= canvasSize ^. pY
|| box ^. rightSide <= 0
|| box ^. bottomSide <= 0
-- | A constructor for holding the state of serializing the geometry from a scene.
data GeometryState = GeometryState
{ _geoReorderTable :: ReorderTable
, _geoMaxStrandSize :: Int
, _geoMaxStrandsPerTile :: NumStrands
, _geoGeometryPile :: GeometryPile
, _geoTileTree :: TileTree
, _geoCanvasSize :: Point2 SubSpace
, _geoRandomField :: RandomField
}
makeLenses ''GeometryState
| A monad for passing a GeometryState
type GeometryMonad m = StateT GeometryState m
| Function for initializing the geometry monad and running a function inside of it∘
-- The geoTileTree and geoCanvas size must be defined later.
runGeometryMonad :: (MonadIO m)
=> RasterSpec
-> RandomField
-> StateT GeometryState m t
-> m t
runGeometryMonad rasterSpec randomField mf =
do geometryPile <- liftIO (newPileSize iNITgEOMETRYpILEsIZE :: IO BytePile)
let reorderTable = buildReorderTable mAXsECTIONsIZE
let geometryState = GeometryState reorderTable mAXsECTIONsIZE (NumStrands . fromIntegral $ rasterSpec ^. specMaxStrandsPerTile) geometryPile undefined undefined randomField
evalStateT mf geometryState
-- | Reuse the geometry monad without reallocating the geometry pile.
resetGeometryMonad :: (MonadIO m)
=> StateT GeometryState m ()
resetGeometryMonad = do geoGeometryPile %= resetPile
-- | Build a shape with the supplied metadata and representation type.
makeShape :: SubstanceId
-> Substance a
-> Compound
-> rep
-> Shape rep
makeShape substanceId substance combineType rep = Shape (ShapeInfo (substanceToSubstanceType substance) combineType substanceId) rep
| On each shape in the shape tree run add the appropriate data to the appropriate buffers and the .
onShape :: MonadIO m
=> (Compound -> ShapeEntry -> Shape ShapeEntry)
-> Compound
-> Transformer SubSpace
-> [Outline SubSpace]
-> GeometryMonad m ()
onShape wrapShape combineType transformer outlines =
do let transformedOutlines = fmap (applyTransformer transformer) $ Group outlines
boundingBox = boxOf transformedOutlines
canvasSize <- use geoCanvasSize
if excludeBox canvasSize boundingBox
then return ()
else do -- Table used to convert strands of coordinates to trees.
reorderTable <- use geoReorderTable
-- Maximum size of a strand.
maxStrandSize <- use geoMaxStrandSize
-- Maximum strands per tile
maxStrandsPerTile <- use geoMaxStrandsPerTile
-- Build an enclosure from the outlines.
let enclosure = enclose reorderTable maxStrandSize (unGroup transformedOutlines)
-- Get the geometry pile.
geometryPile <- use geoGeometryPile
-- Add the shape to the geometry pile.
(entry, geometryPile') <- liftIO $ runStateT (makeShapeEntry boundingBox enclosure) $ geometryPile
-- Put the geometry pile back in the monad.
geoGeometryPile .= geometryPile'
-- Get the tiletree.
tileTree <- use geoTileTree
-- Add the shape to the tile tree.
geoTileTree .= addShapeToTree maxStrandsPerTile tileTree (wrapShape combineType entry)
-- | Constructor for holding the state of serializing substance information from the scene.
data SubstanceState token s = SubstanceState
{ -- | The current substance id incremented with each new substance.
_suSubstanceId :: SubstanceId
-- | A map from tokens to substance id for later identification of shapes.
The token is any type with an instance of that the client program can use to identify shapes in the scene .
, _suTokenMap :: M.Map token SubstanceId
-- | A the latest id for a usage of a picture source, incremented with each new usage of a picture by a new substance from the scene.
, _suCurrentPictureUsage :: Int
-- | A list of picture references collected from the scene.
, _suPictureUsages :: [PictureUsage String s]
-- | The picture memory objects for the scene.
, _suPictureMapping :: PictureMap
-- | The background color for the scene.
, _suBackgroundColor :: Color
-- | A pile of every substance collected from the scene.
, _suSubstancePile :: Pile SubstanceInfo
}
makeLenses ''SubstanceState
instance (NFData token, NFData s) => NFData (SubstanceState token s) where
rnf (SubstanceState a b c d e f g) =
a `deepseq` b `deepseq` c `deepseq` d `deepseq` e `deepseq` f `deepseq` g `deepseq` ()
-- | A monad for serializing substance data from a scene.
type SubstanceMonad token s m = StateT (SubstanceState token s) m
| Function for executing a new SubstanceMonad
execSubstanceMonad :: (MonadIO m, Storable (PictureUsage PictureMemoryReference s))
=> PictureMap
-> SubstanceMonad token s m a
-> m (SubstanceState token s)
execSubstanceMonad pictureMap mf =
do substancePile <- liftIO $ newPile
execStateT mf (SubstanceState (SubstanceId 0) M.empty 0 [] pictureMap clearBlack substancePile)
-- | For each shape in the shapeTree add the serialize the substance metadata and serialize the compound subtree.
onSubstance :: (Storable (PictureUsage PictureMemoryReference (SpaceOf item)),
Space (SpaceOf item),
MonadIO m,
Ord token)
=> ((Compound -> ShapeEntry -> Shape ShapeEntry)
-> Compound -> Transformer (SpaceOf item) -> item -> GeometryMonad m ())
-> ()
-> Transformer (SpaceOf item)
-> SRep token (STree Compound item)
-> SubstanceMonad token (SpaceOf item) (GeometryMonad m) ()
onSubstance onShape () transformer (SRep token substance subTree) =
do -- Get the current substanceId
substanceId <- use suSubstanceId
-- Increment it for the next shape.
suSubstanceId += 1
-- Get the token map.
tokenMap <- use suTokenMap
-- Store the token in the map.
suTokenMap .= M.insert token substanceId tokenMap
-- Depending on the substance of the shape take appropriate actions.
colorOrPicture <-
case substance of
Texture namedTexture -> do
name <- case namedTexture of
NewTexture name image ->
do suPictureMapping %= M.insert name image
return name
SharedTexture name -> return name
-- Transformation information is transfered to the texture here.
let newUsage = applyTransformer transformer $
PictureUsage { pictSource = name
, pictScale = 1
, pictTranslate = zeroPoint
}
-- Add the new usage of the picture to the pile.
suPictureUsages %= (++ [newUsage])
-- Get the current usage id.
current <- use suCurrentPictureUsage
-- Increment for the next usage.
suCurrentPictureUsage += 1
-- return a Substance with the right usage id.
return . Texture . fromIntegral $ current
Solid color -> return . Solid $ color
-- Add the new substance to the pile.
addToPileState suSubstancePile (SubstanceInfo colorOrPicture)
-- Make a closure to pass to the onShape monad with the metadata for the shape.
let wrapShape = makeShape substanceId substance
Traverse the compound tree and serialize each component shape .
lift $ traverseCompoundTree defaultValue transformer (onShape wrapShape) subTree
buildOverScene :: (MonadIO m, Ord token)
=> Scene token
-> SubstanceMonad token SubSpace (GeometryMonad m) ()
buildOverScene scene =
do -- Move the backgound color into the serializer state.
suBackgroundColor .= scene ^. sceneBackgroundColor
Serialize the shape tree .
traverseShapeTree (onSubstance onShape) $ scene ^. sceneShapeTree
outputGeometryState :: GeometryState -> IO ()
outputGeometryState state =
do putStrLn "---------------- geoGeometryPile -----------------------"
putStr =<< fmap unlines (bytePileToGeometry . view geoGeometryPile $ state)
putStrLn "---------------- ReorderTable --------------------------"
putStrLn . show . view geoReorderTable $ state
putStrLn . show . view geoMaxStrandSize $ state
putStrLn . show . view geoTileTree $ state
putStrLn . show . view geoCanvasSize $ state
putStrLn . show . view geoRandomField $ state
outputSubstanceState :: (Show s, Storable (PictureUsage PictureMemoryReference s), Show token)
=> SubstanceState token s -> IO ()
outputSubstanceState state =
do putStrLn $ "suSubstanceId " ++ (show . view suSubstanceId $ state)
putStrLn $ "suTokenMap " ++ (show . view suTokenMap $ state)
putStrLn $ "suCurrentPictureRef" ++ (show . view suCurrentPictureUsage $ state)
putStrLn $ "suPictureMems " ++ (show . view suPictureMapping $ state)
putStrLn $ "suBackgroundColor " ++ (show . view suBackgroundColor $ state)
putStrLn "---------------- suPictureUsages -----------------------"
putStrLn $ show (view suPictureUsages $ state)
putStrLn "---------------- suSubstancePile -----------------------"
putStrList =<< (pileToList . view suSubstancePile $ state)
| null | https://raw.githubusercontent.com/ianmbloom/gudni/fa69f1bf08c194effca05753afe5455ebae51234/src/Graphics/Gudni/Raster/Serialize.hs | haskell | ---------------------------------------------------------------------------
|
License : BSD-style (see the file libraries/base/LICENSE)
Stability : experimental
Portability : portable
Functions used by TraverseShapeTree to serialize a scene into data buffers that can be parsed by
the rasterizer kernel and building a partitioned tree of tiles.
add the enclosure data to the geometry pile
and a the bounding box. This can be added to the tiletree to determine in which tiles the shape is present.
append the geometric enclosure data to the heap and return a reference
| A constructor for holding the state of serializing the geometry from a scene.
The geoTileTree and geoCanvas size must be defined later.
| Reuse the geometry monad without reallocating the geometry pile.
| Build a shape with the supplied metadata and representation type.
Table used to convert strands of coordinates to trees.
Maximum size of a strand.
Maximum strands per tile
Build an enclosure from the outlines.
Get the geometry pile.
Add the shape to the geometry pile.
Put the geometry pile back in the monad.
Get the tiletree.
Add the shape to the tile tree.
| Constructor for holding the state of serializing substance information from the scene.
| The current substance id incremented with each new substance.
| A map from tokens to substance id for later identification of shapes.
| A the latest id for a usage of a picture source, incremented with each new usage of a picture by a new substance from the scene.
| A list of picture references collected from the scene.
| The picture memory objects for the scene.
| The background color for the scene.
| A pile of every substance collected from the scene.
| A monad for serializing substance data from a scene.
| For each shape in the shapeTree add the serialize the substance metadata and serialize the compound subtree.
Get the current substanceId
Increment it for the next shape.
Get the token map.
Store the token in the map.
Depending on the substance of the shape take appropriate actions.
Transformation information is transfered to the texture here.
Add the new usage of the picture to the pile.
Get the current usage id.
Increment for the next usage.
return a Substance with the right usage id.
Add the new substance to the pile.
Make a closure to pass to the onShape monad with the metadata for the shape.
Move the backgound color into the serializer state. | # LANGUAGE TemplateHaskell #
# LANGUAGE FlexibleContexts #
Module : Graphics . Gudni . Raster . Serialize
Copyright : ( c ) 2019
Maintainer :
module Graphics.Gudni.Raster.Serialize
( GeometryPile(..)
, GeometryState(..)
, geoReorderTable
, geoMaxStrandSize
, geoMaxStrandsPerTile
, geoGeometryPile
, geoTileTree
, geoCanvasSize
, geoRandomField
, GeometryMonad(..)
, runGeometryMonad
, resetGeometryMonad
, SubstanceMonad(..)
, execSubstanceMonad
, buildOverScene
, SubstanceState(..)
, suSubstanceId
, suTokenMap
, suCurrentPictureUsage
, suPictureUsages
, suPictureMapping
, suBackgroundColor
, suSubstancePile
, outputGeometryState
, outputSubstanceState
)
where
import Graphics.Gudni.Figure
import Graphics.Gudni.OpenCL.Rasterizer
import Graphics.Gudni.Raster.Constants
import Graphics.Gudni.Raster.ShapeInfo
import Graphics.Gudni.Raster.Types
import Graphics.Gudni.Raster.Enclosure
import Graphics.Gudni.Raster.TraverseShapeTree
import Graphics.Gudni.Raster.ReorderTable
import Graphics.Gudni.Raster.TileTree
import Graphics.Gudni.Util.RandomField
import Graphics.Gudni.Util.Pile
import Graphics.Gudni.Util.Util
import Graphics.Gudni.Util.Debug
import Control.Monad
import Control.Monad.State
import Control.Lens
import Foreign.Storable
import Control.DeepSeq
import qualified Data.Map as M
import Data.Maybe
import Data.List
import Control.Parallel.Strategies
| GeometryPile is just a synonym for a bytepile that stores serialized geometry data for the scene .
type GeometryPile = BytePile
appendGeoRef :: Enclosure
-> StateT GeometryPile IO GeoReference
appendGeoRef enclosure =
do geometryPile <- get
(pile', offsetShapeStartBytes) <- liftIO $ addToBytePile "appendGeoRef" geometryPile enclosure
put pile'
the size of the shape data is measured in 64 bit chunks so that a short int can address more data .
let offsetShapeStart = Ref $ fromIntegral offsetShapeStartBytes `div` fromIntegral (sizeOf (undefined :: Point2 SubSpace) * 2)
return $ GeoRef offsetShapeStart (enclosureNumStrands enclosure)
| Append a shape to the Geometry pile and return a ShapeEntry that contains a reference to its geometry data , the strand count
makeShapeEntry :: BoundingBox
-> Enclosure
-> StateT GeometryPile IO ShapeEntry
makeShapeEntry box enclosure =
geoRef <- appendGeoRef enclosure
return $ ShapeEntry geoRef (enclosureNumStrands enclosure) box
| Return True if a BoundingBox is outside of the canvas .
excludeBox :: Point2 SubSpace
-> BoundingBox
-> Bool
excludeBox canvasSize box =
box ^. leftSide >= canvasSize ^. pX
|| box ^. topSide >= canvasSize ^. pY
|| box ^. rightSide <= 0
|| box ^. bottomSide <= 0
data GeometryState = GeometryState
{ _geoReorderTable :: ReorderTable
, _geoMaxStrandSize :: Int
, _geoMaxStrandsPerTile :: NumStrands
, _geoGeometryPile :: GeometryPile
, _geoTileTree :: TileTree
, _geoCanvasSize :: Point2 SubSpace
, _geoRandomField :: RandomField
}
makeLenses ''GeometryState
| A monad for passing a GeometryState
type GeometryMonad m = StateT GeometryState m
| Function for initializing the geometry monad and running a function inside of it∘
runGeometryMonad :: (MonadIO m)
=> RasterSpec
-> RandomField
-> StateT GeometryState m t
-> m t
runGeometryMonad rasterSpec randomField mf =
do geometryPile <- liftIO (newPileSize iNITgEOMETRYpILEsIZE :: IO BytePile)
let reorderTable = buildReorderTable mAXsECTIONsIZE
let geometryState = GeometryState reorderTable mAXsECTIONsIZE (NumStrands . fromIntegral $ rasterSpec ^. specMaxStrandsPerTile) geometryPile undefined undefined randomField
evalStateT mf geometryState
resetGeometryMonad :: (MonadIO m)
=> StateT GeometryState m ()
resetGeometryMonad = do geoGeometryPile %= resetPile
makeShape :: SubstanceId
-> Substance a
-> Compound
-> rep
-> Shape rep
makeShape substanceId substance combineType rep = Shape (ShapeInfo (substanceToSubstanceType substance) combineType substanceId) rep
| On each shape in the shape tree run add the appropriate data to the appropriate buffers and the .
onShape :: MonadIO m
=> (Compound -> ShapeEntry -> Shape ShapeEntry)
-> Compound
-> Transformer SubSpace
-> [Outline SubSpace]
-> GeometryMonad m ()
onShape wrapShape combineType transformer outlines =
do let transformedOutlines = fmap (applyTransformer transformer) $ Group outlines
boundingBox = boxOf transformedOutlines
canvasSize <- use geoCanvasSize
if excludeBox canvasSize boundingBox
then return ()
reorderTable <- use geoReorderTable
maxStrandSize <- use geoMaxStrandSize
maxStrandsPerTile <- use geoMaxStrandsPerTile
let enclosure = enclose reorderTable maxStrandSize (unGroup transformedOutlines)
geometryPile <- use geoGeometryPile
(entry, geometryPile') <- liftIO $ runStateT (makeShapeEntry boundingBox enclosure) $ geometryPile
geoGeometryPile .= geometryPile'
tileTree <- use geoTileTree
geoTileTree .= addShapeToTree maxStrandsPerTile tileTree (wrapShape combineType entry)
data SubstanceState token s = SubstanceState
_suSubstanceId :: SubstanceId
The token is any type with an instance of that the client program can use to identify shapes in the scene .
, _suTokenMap :: M.Map token SubstanceId
, _suCurrentPictureUsage :: Int
, _suPictureUsages :: [PictureUsage String s]
, _suPictureMapping :: PictureMap
, _suBackgroundColor :: Color
, _suSubstancePile :: Pile SubstanceInfo
}
makeLenses ''SubstanceState
instance (NFData token, NFData s) => NFData (SubstanceState token s) where
rnf (SubstanceState a b c d e f g) =
a `deepseq` b `deepseq` c `deepseq` d `deepseq` e `deepseq` f `deepseq` g `deepseq` ()
type SubstanceMonad token s m = StateT (SubstanceState token s) m
| Function for executing a new SubstanceMonad
execSubstanceMonad :: (MonadIO m, Storable (PictureUsage PictureMemoryReference s))
=> PictureMap
-> SubstanceMonad token s m a
-> m (SubstanceState token s)
execSubstanceMonad pictureMap mf =
do substancePile <- liftIO $ newPile
execStateT mf (SubstanceState (SubstanceId 0) M.empty 0 [] pictureMap clearBlack substancePile)
onSubstance :: (Storable (PictureUsage PictureMemoryReference (SpaceOf item)),
Space (SpaceOf item),
MonadIO m,
Ord token)
=> ((Compound -> ShapeEntry -> Shape ShapeEntry)
-> Compound -> Transformer (SpaceOf item) -> item -> GeometryMonad m ())
-> ()
-> Transformer (SpaceOf item)
-> SRep token (STree Compound item)
-> SubstanceMonad token (SpaceOf item) (GeometryMonad m) ()
onSubstance onShape () transformer (SRep token substance subTree) =
substanceId <- use suSubstanceId
suSubstanceId += 1
tokenMap <- use suTokenMap
suTokenMap .= M.insert token substanceId tokenMap
colorOrPicture <-
case substance of
Texture namedTexture -> do
name <- case namedTexture of
NewTexture name image ->
do suPictureMapping %= M.insert name image
return name
SharedTexture name -> return name
let newUsage = applyTransformer transformer $
PictureUsage { pictSource = name
, pictScale = 1
, pictTranslate = zeroPoint
}
suPictureUsages %= (++ [newUsage])
current <- use suCurrentPictureUsage
suCurrentPictureUsage += 1
return . Texture . fromIntegral $ current
Solid color -> return . Solid $ color
addToPileState suSubstancePile (SubstanceInfo colorOrPicture)
let wrapShape = makeShape substanceId substance
Traverse the compound tree and serialize each component shape .
lift $ traverseCompoundTree defaultValue transformer (onShape wrapShape) subTree
buildOverScene :: (MonadIO m, Ord token)
=> Scene token
-> SubstanceMonad token SubSpace (GeometryMonad m) ()
buildOverScene scene =
suBackgroundColor .= scene ^. sceneBackgroundColor
Serialize the shape tree .
traverseShapeTree (onSubstance onShape) $ scene ^. sceneShapeTree
outputGeometryState :: GeometryState -> IO ()
outputGeometryState state =
do putStrLn "---------------- geoGeometryPile -----------------------"
putStr =<< fmap unlines (bytePileToGeometry . view geoGeometryPile $ state)
putStrLn "---------------- ReorderTable --------------------------"
putStrLn . show . view geoReorderTable $ state
putStrLn . show . view geoMaxStrandSize $ state
putStrLn . show . view geoTileTree $ state
putStrLn . show . view geoCanvasSize $ state
putStrLn . show . view geoRandomField $ state
outputSubstanceState :: (Show s, Storable (PictureUsage PictureMemoryReference s), Show token)
=> SubstanceState token s -> IO ()
outputSubstanceState state =
do putStrLn $ "suSubstanceId " ++ (show . view suSubstanceId $ state)
putStrLn $ "suTokenMap " ++ (show . view suTokenMap $ state)
putStrLn $ "suCurrentPictureRef" ++ (show . view suCurrentPictureUsage $ state)
putStrLn $ "suPictureMems " ++ (show . view suPictureMapping $ state)
putStrLn $ "suBackgroundColor " ++ (show . view suBackgroundColor $ state)
putStrLn "---------------- suPictureUsages -----------------------"
putStrLn $ show (view suPictureUsages $ state)
putStrLn "---------------- suSubstancePile -----------------------"
putStrList =<< (pileToList . view suSubstancePile $ state)
|
3e9f0e39703d15a1696e3c8cf700742245a3e65de6aef25ba0f4c6714a78ca19 | rurban/clisp | gdbm.lisp | Module for GDBM / CLISP
;; </>
Copyright ( C ) 2007 < >
Copyright ( C ) 2007 - 2008 , 2018 < >
This is Free Software , distributed under the GNU GPL v2 +
make sure there are no FFI forms in this file ,
;; otherwise gdbm.c will get overwritten on compilation.
(defpackage #:gdbm
(:documentation
"GDBM - The GNU database manager - </>")
(:use #:lisp)
(:export #:gdbm #:gdbm-p #:gdbm-error #:gdbm-version #:gdbm-path
#:gdbm-error-message #:gdbm-error-code
#:gdbm-open #:gdbm-open-p #:gdbm-close #:do-db #:with-open-db
#:gdbm-store #:gdbm-fetch #:gdbm-delete #:gdbm-exists
#:gdbm-firstkey #:gdbm-nextkey #:gdbm-file-size
#:gdbm-reorganize #:gdbm-sync #:gdbm-opt #:gdbm-count))
(in-package "GDBM")
(pushnew :gdbm *features*)
(provide "gdbm")
(pushnew "GDBM" custom:*system-package-list* :test #'string=)
(setf (documentation (find-package "GDBM") 'sys::impnotes) "gdbm")
;; keep this definition in sync with check_gdbm in gdbm.c
(defstruct (gdbm (:constructor make-gdbm (dbf path key-type value-type)))
dbf
path
key-type
value-type)
(defsetf gdbm-opt %set-gdbm-opt)
(defun gdbm-open-p (gdbm) (not (null (gdbm-dbf gdbm))))
(define-condition gdbm-error (simple-error)
((code :reader gdbm-error-code :initarg :code)
(message :reader gdbm-error-message :initarg :message))
(:report (lambda (condition stream)
(princ (gdbm-error-message condition) stream))))
(defmacro do-db ((key-var gdbm &rest options) &body body)
"Iterate over the GDBM keys in LOOP."
(let ((db (gensym "DO-DB")))
`(loop :with ,db = ,gdbm
:for ,key-var = (gdbm:gdbm-firstkey ,db ,@options)
:then (gdbm:gdbm-nextkey ,db ,key-var ,@options)
:while ,key-var ,@body)))
(defmacro with-open-db ((db filename &rest options) &body body)
"Open a GDBM database, execute BODY, ensure that the DB is closed."
(multiple-value-bind (body-rest declarations) (system::parse-body body)
`(let ((,db (gdbm-open ,filename ,@options)))
(declare (read-only ,db) ,@declarations)
(unwind-protect (multiple-value-prog1 (progn ,@body-rest)
(when ,db (gdbm-close ,db)))
(when ,db (gdbm-close ,db))))))
| null | https://raw.githubusercontent.com/rurban/clisp/75ed2995ff8f5364bcc18727cde9438cca4e7c2c/modules/gdbm/gdbm.lisp | lisp | </>
otherwise gdbm.c will get overwritten on compilation.
keep this definition in sync with check_gdbm in gdbm.c | Module for GDBM / CLISP
Copyright ( C ) 2007 < >
Copyright ( C ) 2007 - 2008 , 2018 < >
This is Free Software , distributed under the GNU GPL v2 +
make sure there are no FFI forms in this file ,
(defpackage #:gdbm
(:documentation
"GDBM - The GNU database manager - </>")
(:use #:lisp)
(:export #:gdbm #:gdbm-p #:gdbm-error #:gdbm-version #:gdbm-path
#:gdbm-error-message #:gdbm-error-code
#:gdbm-open #:gdbm-open-p #:gdbm-close #:do-db #:with-open-db
#:gdbm-store #:gdbm-fetch #:gdbm-delete #:gdbm-exists
#:gdbm-firstkey #:gdbm-nextkey #:gdbm-file-size
#:gdbm-reorganize #:gdbm-sync #:gdbm-opt #:gdbm-count))
(in-package "GDBM")
(pushnew :gdbm *features*)
(provide "gdbm")
(pushnew "GDBM" custom:*system-package-list* :test #'string=)
(setf (documentation (find-package "GDBM") 'sys::impnotes) "gdbm")
(defstruct (gdbm (:constructor make-gdbm (dbf path key-type value-type)))
dbf
path
key-type
value-type)
(defsetf gdbm-opt %set-gdbm-opt)
(defun gdbm-open-p (gdbm) (not (null (gdbm-dbf gdbm))))
(define-condition gdbm-error (simple-error)
((code :reader gdbm-error-code :initarg :code)
(message :reader gdbm-error-message :initarg :message))
(:report (lambda (condition stream)
(princ (gdbm-error-message condition) stream))))
(defmacro do-db ((key-var gdbm &rest options) &body body)
"Iterate over the GDBM keys in LOOP."
(let ((db (gensym "DO-DB")))
`(loop :with ,db = ,gdbm
:for ,key-var = (gdbm:gdbm-firstkey ,db ,@options)
:then (gdbm:gdbm-nextkey ,db ,key-var ,@options)
:while ,key-var ,@body)))
(defmacro with-open-db ((db filename &rest options) &body body)
"Open a GDBM database, execute BODY, ensure that the DB is closed."
(multiple-value-bind (body-rest declarations) (system::parse-body body)
`(let ((,db (gdbm-open ,filename ,@options)))
(declare (read-only ,db) ,@declarations)
(unwind-protect (multiple-value-prog1 (progn ,@body-rest)
(when ,db (gdbm-close ,db)))
(when ,db (gdbm-close ,db))))))
|
d9ccab41cb5433b8f2c408933d1e3c76077ac87cd9eda468ca5e9a49af8303b7 | Rober-t/apxr_run | flatlog.erl | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Apache License
Version 2.0 , January 2004
%%% /
%%%
%%% TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
%%%
%%% 1. Definitions.
%%%
%%% "License" shall mean the terms and conditions for use, reproduction,
%%% and distribution as defined by Sections 1 through 9 of this document.
%%%
" Licensor " shall mean the copyright owner or entity authorized by
%%% the copyright owner that is granting the License.
%%%
%%% "Legal Entity" shall mean the union of the acting entity and all
%%% other entities that control, are controlled by, or are under common
%%% control with that entity. For the purposes of this definition,
%%% "control" means (i) the power, direct or indirect, to cause the
%%% direction or management of such entity, whether by contract or
otherwise , or ( ii ) ownership of fifty percent ( 50 % ) or more of the
%%% outstanding shares, or (iii) beneficial ownership of such entity.
%%%
%%% "You" (or "Your") shall mean an individual or Legal Entity
%%% exercising permissions granted by this License.
%%%
%%% "Source" form shall mean the preferred form for making modifications,
%%% including but not limited to software source code, documentation
%%% source, and configuration files.
%%%
%%% "Object" form shall mean any form resulting from mechanical
%%% transformation or translation of a Source form, including but
%%% not limited to compiled object code, generated documentation,
%%% and conversions to other media types.
%%%
%%% "Work" shall mean the work of authorship, whether in Source or
%%% Object form, made available under the License, as indicated by a
%%% copyright notice that is included in or attached to the work
%%% (an example is provided in the Appendix below).
%%%
" Derivative Works " shall mean any work , whether in Source or Object
%%% form, that is based on (or derived from) the Work and for which the
%%% editorial revisions, annotations, elaborations, or other modifications
%%% represent, as a whole, an original work of authorship. For the purposes
%%% of this License, Derivative Works shall not include works that remain
%%% separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof .
%%%
%%% "Contribution" shall mean any work of authorship, including
%%% the original version of the Work and any modifications or additions
%%% to that Work or Derivative Works thereof, that is intentionally
%%% submitted to Licensor for inclusion in the Work by the copyright owner
%%% or by an individual or Legal Entity authorized to submit on behalf of
%%% the copyright owner. For the purposes of this definition, "submitted"
%%% means any form of electronic, verbal, or written communication sent
%%% to the Licensor or its representatives, including but not limited to
%%% communication on electronic mailing lists, source code control systems,
%%% and issue tracking systems that are managed by, or on behalf of, the
%%% Licensor for the purpose of discussing and improving the Work, but
%%% excluding communication that is conspicuously marked or otherwise
%%% designated in writing by the copyright owner as "Not a Contribution."
%%%
%%% "Contributor" shall mean Licensor and any individual or Legal Entity
%%% on behalf of whom a Contribution has been received by Licensor and
%%% subsequently incorporated within the Work.
%%%
2 . Grant of Copyright License . Subject to the terms and conditions of
%%% this License, each Contributor hereby grants to You a perpetual,
%%% worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce , prepare Derivative Works of ,
%%% publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form .
%%%
3 . Grant of Patent License . Subject to the terms and conditions of
%%% this License, each Contributor hereby grants to You a perpetual,
%%% worldwide, non-exclusive, no-charge, royalty-free, irrevocable
%%% (except as stated in this section) patent license to make, have made,
%%% use, offer to sell, sell, import, and otherwise transfer the Work,
%%% where such license applies only to those patent claims licensable
%%% by such Contributor that are necessarily infringed by their
) alone or by combination of their )
with the Work to which such ) was submitted . If You
%%% institute patent litigation against any entity (including a
%%% cross-claim or counterclaim in a lawsuit) alleging that the Work
%%% or a Contribution incorporated within the Work constitutes direct
%%% or contributory patent infringement, then any patent licenses
%%% granted to You under this License for that Work shall terminate
%%% as of the date such litigation is filed.
%%%
%%% 4. Redistribution. You may reproduce and distribute copies of the
%%% Work or Derivative Works thereof in any medium, with or without
%%% modifications, and in Source or Object form, provided that You
%%% meet the following conditions:
%%%
%%% (a) You must give any other recipients of the Work or
Derivative Works a copy of this License ; and
%%%
%%% (b) You must cause any modified files to carry prominent notices
%%% stating that You changed the files; and
%%%
( c ) You must retain , in the Source form of any Derivative Works
%%% that You distribute, all copyright, patent, trademark, and
%%% attribution notices from the Source form of the Work,
%%% excluding those notices that do not pertain to any part of
the Derivative Works ; and
%%%
%%% (d) If the Work includes a "NOTICE" text file as part of its
distribution , then any Derivative Works that You distribute must
%%% include a readable copy of the attribution notices contained
%%% within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works , in at least one
%%% of the following places: within a NOTICE text file distributed
as part of the Derivative Works ; within the Source form or
documentation , if provided along with the Derivative Works ; or ,
within a display generated by the Derivative Works , if and
wherever such third - party notices normally appear . The contents
%%% of the NOTICE file are for informational purposes only and
%%% do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute , alongside
%%% or as an addendum to the NOTICE text from the Work, provided
%%% that such additional attribution notices cannot be construed
%%% as modifying the License.
%%%
%%% You may add Your own copyright statement to Your modifications and
%%% may provide additional or different license terms and conditions
%%% for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole , provided Your use ,
%%% reproduction, and distribution of the Work otherwise complies with
%%% the conditions stated in this License.
%%%
%%% 5. Submission of Contributions. Unless You explicitly state otherwise,
%%% any Contribution intentionally submitted for inclusion in the Work
%%% by You to the Licensor shall be under the terms and conditions of
%%% this License, without any additional terms or conditions.
%%% Notwithstanding the above, nothing herein shall supersede or modify
%%% the terms of any separate license agreement you may have executed
%%% with Licensor regarding such Contributions.
%%%
%%% 6. Trademarks. This License does not grant permission to use the trade
%%% names, trademarks, service marks, or product names of the Licensor,
%%% except as required for reasonable and customary use in describing the
%%% origin of the Work and reproducing the content of the NOTICE file.
%%%
%%% 7. Disclaimer of Warranty. Unless required by applicable law or
%%% agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions ) on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
%%% implied, including, without limitation, any warranties or conditions
%%% of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
%%% PARTICULAR PURPOSE. You are solely responsible for determining the
%%% appropriateness of using or redistributing the Work and assume any
%%% risks associated with Your exercise of permissions under this License.
%%%
%%% 8. Limitation of Liability. In no event and under no legal theory,
%%% whether in tort (including negligence), contract, or otherwise,
%%% unless required by applicable law (such as deliberate and grossly
%%% negligent acts) or agreed to in writing, shall any Contributor be
%%% liable to You for damages, including any direct, indirect, special,
%%% incidental, or consequential damages of any character arising as a
%%% result of this License or out of the use or inability to use the
%%% Work (including but not limited to damages for loss of goodwill,
%%% work stoppage, computer failure or malfunction, or any and all
%%% other commercial damages or losses), even if such Contributor
%%% has been advised of the possibility of such damages.
%%%
%%% 9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof , You may choose to offer ,
%%% and charge a fee for, acceptance of support, warranty, indemnity,
%%% or other liability obligations and/or rights consistent with this
%%% License. However, in accepting such obligations, You may act only
%%% on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor , and only if You agree to indemnify ,
%%% defend, and hold each Contributor harmless for any liability
%%% incurred by, or claims asserted against, such Contributor by reason
%%% of your accepting any such warranty or additional liability.
%%%
%%% END OF TERMS AND CONDITIONS
%%%
Copyright 2018 , < > .
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Modified Copyright ( C ) 2018 ApproximateReality
%%%
%%% Removed 'at'
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% @doc
%%% This is the main module that exposes custom formatting to the OTP
%%% logger library (part of the `kernel' application since OTP-21).
%%%
%%% The module honors the standard configuration of the kernel's default
%%% logger formatter regarding: max depth, templates.
%%% @end
-module(flatlog).
%% API exports
-export([format/2]).
-ifdef(TEST).
-export([format_msg/2, to_string/2]).
-endif.
-type template() :: [metakey() | {metakey(), template(), template()} | string()].
-type metakey() :: atom() | [atom()].
Xref
-ignore_xref([
format/2
]).
%%====================================================================
%% API functions
%%====================================================================
-spec format(LogEvent, Config) -> unicode:chardata() when
LogEvent :: logger:log_event(),
Config :: logger:formatter_config().
format(#{level:=Level, msg:={report, Msg}, meta:=Meta}, UsrConfig) when is_map(Msg) ->
Config = apply_defaults(UsrConfig),
NewMeta = maps:put(level, Level, Meta),
format_log(maps:get(template, Config), Config, Msg, NewMeta);
format(Map = #{msg := {report, KeyVal}}, UsrConfig) when is_list(KeyVal) ->
format(Map#{msg := {report, maps:from_list(KeyVal)}}, UsrConfig);
format(Map = #{msg := {string, String}}, UsrConfig) ->
format(Map#{msg := {report,
#{unstructured_log =>
unicode:characters_to_binary(String)}}}, UsrConfig);
format(Map = #{msg := {Format, Terms}}, UsrConfig) ->
format(Map#{msg := {report,
#{unstructured_log =>
unicode:characters_to_binary(io_lib:format(Format, Terms))}}},
UsrConfig).
%%====================================================================
Internal functions
%%====================================================================
apply_defaults(Map) ->
maps:merge(
#{term_depth => undefined,
map_depth => -1,
time_offset => 0,
time_designator => $T,
template => ["when=", time, " level=", level,
{id, [" id=", id], ""}, {parent_id, [" parent_id=", parent_id], ""},
{correlation_id, [" correlation_id=", correlation_id], ""},
{pid, [" pid=", pid], ""}, " ", msg, "\n"]
},
Map
).
-spec format_log(template(), Config, Msg, Meta) -> unicode:chardata() when
Config :: logger:formatter_config(),
Msg :: Data,
Meta :: Data,
Data :: #{string() | binary() | atom() => term()}.
format_log(Tpl, Config, Msg, Meta) -> format_log(Tpl, Config, Msg, Meta, []).
format_log([], _Config, _Msg, _Meta, Acc) ->
lists:reverse(Acc);
format_log([msg | Rest], Config, Msg, Meta, Acc) ->
format_log(Rest, Config, Msg, Meta, [format_msg(Msg, Config) | Acc]);
format_log([Key | Rest], Config, Msg, Meta, Acc) when is_atom(Key)
from OTP
case maps:find(Key, Meta) of
error ->
format_log(Rest, Config, Msg, Meta, Acc);
{ok, Val} ->
format_log(Rest, Config, Msg, Meta, [format_val(Key, Val, Config) | Acc])
end;
format_log([{Key, IfExists, Else} | Rest], Config, Msg, Meta, Acc) ->
case maps:find(Key, Meta) of
error ->
format_log(Rest, Config, Msg, Meta, [Else | Acc]);
{ok, Val} ->
format_log(Rest, Config, Msg, Meta,
[format_log(IfExists, Config, Msg, #{Key => Val}, []) | Acc])
end;
format_log([Term | Rest], Config, Msg, Meta, Acc) when is_list(Term) ->
format_log(Rest, Config, Msg, Meta, [Term | Acc]).
format_msg(Data, Config) -> format_msg("", Data, Config).
format_msg(Parents, Data, Config=#{map_depth := 0}) when is_map(Data) ->
to_string(truncate_key(Parents), Config) ++ "=... ";
format_msg(Parents, Data, Config = #{map_depth := Depth}) when is_map(Data) ->
maps:fold(
fun(K, V, Acc) when is_map(V) ->
[format_msg(Parents ++ to_string(K, Config) ++ "_",
V,
Config#{map_depth := Depth-1}) | Acc]
; (K, V, Acc) ->
[Parents ++ to_string(K, Config), $=,
to_string(V, Config), $\s | Acc]
end,
[],
Data
).
format_val(time, Time, Config) ->
format_time(Time, Config);
format_val(mfa, MFA, Config) ->
escape(format_mfa(MFA, Config));
format_val(_Key, Val, Config) ->
to_string(Val, Config).
format_time(N, #{time_offset := O, time_designator := D}) when is_integer(N) ->
calendar:system_time_to_rfc3339(N, [{unit, microsecond},
{offset, O},
{time_designator, D}]).
format_mfa({M, F, A}, _) when is_atom(M), is_atom(F), is_integer(A) ->
[atom_to_list(M), $:, atom_to_list(F), $/, integer_to_list(A)];
format_mfa({M, F, A}, Config) when is_atom(M), is_atom(F), is_list(A) ->
%% arguments are passed as a literal list ({mod, fun, [a, b, c]})
format_mfa({M, F, length(A)}, Config);
format_mfa(MFAStr, Config) -> % passing in a pre-formatted string value
to_string(MFAStr, Config).
to_string(X, _) when is_atom(X) ->
escape(atom_to_list(X));
to_string(X, _) when is_integer(X) ->
integer_to_list(X);
to_string(X, _) when is_pid(X) ->
pid_to_list(X);
to_string(X, _) when is_reference(X) ->
ref_to_list(X);
to_string(X, C) when is_binary(X) ->
case unicode:characters_to_list(X) of
{_, _, _} -> % error or incomplete
escape(format_str(C, X));
List ->
case io_lib:printable_list(List) of
true -> escape(List);
_ -> escape(format_str(C, X))
end
end;
to_string(X, C) when is_list(X) ->
case io_lib:printable_list(X) of
true -> escape(X);
_ -> escape(format_str(C, X))
end;
to_string(X, C) ->
escape(format_str(C, X)).
format_str(#{term_depth := undefined}, T) ->
io_lib:format("~0tp", [T]);
format_str(#{term_depth := D}, T) ->
io_lib:format("~0tP", [T, D]).
escape(Str) ->
case needs_escape(Str) of
false ->
case needs_quoting(Str) of
true -> [$", Str, $"];
false -> Str
end;
true ->
[$", do_escape(Str), $"]
end.
needs_quoting(Str) ->
string:find(Str, " ") =/= nomatch orelse
string:find(Str, "=") =/= nomatch.
needs_escape(Str) ->
string:find(Str, "\"") =/= nomatch orelse
string:find(Str, "\\") =/= nomatch orelse
string:find(Str, "\n") =/= nomatch.
do_escape([]) ->
[];
do_escape(Str) ->
case string:next_grapheme(Str) of
[$\n | Rest] -> [$\\, $\n | do_escape(Rest)];
["\r\n" | Rest] -> [$\\, $\r, $\\, $\n | do_escape(Rest)];
[$" | Rest] -> [$\\, $" | do_escape(Rest)];
[$\\ | Rest] -> [$\\, $\\ | do_escape(Rest)];
[Grapheme | Rest] -> [Grapheme | do_escape(Rest)]
end.
truncate_key([]) -> [];
truncate_key("_") -> "";
truncate_key([H|T]) -> [H | truncate_key(T)]. | null | https://raw.githubusercontent.com/Rober-t/apxr_run/9c62ab028af7ff3768ffe3f27b8eef1799540f05/src/lib/flatlog.erl | erlang |
Apache License
/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
publicly display, publicly perform, sublicense, and distribute the
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
(d) If the Work includes a "NOTICE" text file as part of its
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
of the following places: within a NOTICE text file distributed
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Removed 'at'
@doc
This is the main module that exposes custom formatting to the OTP
logger library (part of the `kernel' application since OTP-21).
The module honors the standard configuration of the kernel's default
logger formatter regarding: max depth, templates.
@end
API exports
====================================================================
API functions
====================================================================
====================================================================
====================================================================
arguments are passed as a literal list ({mod, fun, [a, b, c]})
passing in a pre-formatted string value
error or incomplete | Version 2.0 , January 2004
" Licensor " shall mean the copyright owner or entity authorized by
" Derivative Works " shall mean any work , whether in Source or Object
the Work and Derivative Works thereof .
2 . Grant of Copyright License . Subject to the terms and conditions of
copyright license to reproduce , prepare Derivative Works of ,
Work and such Derivative Works in Source or Object form .
3 . Grant of Patent License . Subject to the terms and conditions of
) alone or by combination of their )
with the Work to which such ) was submitted . If You
Derivative Works a copy of this License ; and
( c ) You must retain , in the Source form of any Derivative Works
the Derivative Works ; and
distribution , then any Derivative Works that You distribute must
pertain to any part of the Derivative Works , in at least one
as part of the Derivative Works ; within the Source form or
documentation , if provided along with the Derivative Works ; or ,
within a display generated by the Derivative Works , if and
wherever such third - party notices normally appear . The contents
notices within Derivative Works that You distribute , alongside
for any such Derivative Works as a whole , provided Your use ,
Contributor provides its Contributions ) on an " AS IS " BASIS ,
the Work or Derivative Works thereof , You may choose to offer ,
of any other Contributor , and only if You agree to indemnify ,
Copyright 2018 , < > .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
Modified Copyright ( C ) 2018 ApproximateReality
-module(flatlog).
-export([format/2]).
-ifdef(TEST).
-export([format_msg/2, to_string/2]).
-endif.
-type template() :: [metakey() | {metakey(), template(), template()} | string()].
-type metakey() :: atom() | [atom()].
Xref
-ignore_xref([
format/2
]).
-spec format(LogEvent, Config) -> unicode:chardata() when
LogEvent :: logger:log_event(),
Config :: logger:formatter_config().
format(#{level:=Level, msg:={report, Msg}, meta:=Meta}, UsrConfig) when is_map(Msg) ->
Config = apply_defaults(UsrConfig),
NewMeta = maps:put(level, Level, Meta),
format_log(maps:get(template, Config), Config, Msg, NewMeta);
format(Map = #{msg := {report, KeyVal}}, UsrConfig) when is_list(KeyVal) ->
format(Map#{msg := {report, maps:from_list(KeyVal)}}, UsrConfig);
format(Map = #{msg := {string, String}}, UsrConfig) ->
format(Map#{msg := {report,
#{unstructured_log =>
unicode:characters_to_binary(String)}}}, UsrConfig);
format(Map = #{msg := {Format, Terms}}, UsrConfig) ->
format(Map#{msg := {report,
#{unstructured_log =>
unicode:characters_to_binary(io_lib:format(Format, Terms))}}},
UsrConfig).
Internal functions
apply_defaults(Map) ->
maps:merge(
#{term_depth => undefined,
map_depth => -1,
time_offset => 0,
time_designator => $T,
template => ["when=", time, " level=", level,
{id, [" id=", id], ""}, {parent_id, [" parent_id=", parent_id], ""},
{correlation_id, [" correlation_id=", correlation_id], ""},
{pid, [" pid=", pid], ""}, " ", msg, "\n"]
},
Map
).
-spec format_log(template(), Config, Msg, Meta) -> unicode:chardata() when
Config :: logger:formatter_config(),
Msg :: Data,
Meta :: Data,
Data :: #{string() | binary() | atom() => term()}.
format_log(Tpl, Config, Msg, Meta) -> format_log(Tpl, Config, Msg, Meta, []).
format_log([], _Config, _Msg, _Meta, Acc) ->
lists:reverse(Acc);
format_log([msg | Rest], Config, Msg, Meta, Acc) ->
format_log(Rest, Config, Msg, Meta, [format_msg(Msg, Config) | Acc]);
format_log([Key | Rest], Config, Msg, Meta, Acc) when is_atom(Key)
from OTP
case maps:find(Key, Meta) of
error ->
format_log(Rest, Config, Msg, Meta, Acc);
{ok, Val} ->
format_log(Rest, Config, Msg, Meta, [format_val(Key, Val, Config) | Acc])
end;
format_log([{Key, IfExists, Else} | Rest], Config, Msg, Meta, Acc) ->
case maps:find(Key, Meta) of
error ->
format_log(Rest, Config, Msg, Meta, [Else | Acc]);
{ok, Val} ->
format_log(Rest, Config, Msg, Meta,
[format_log(IfExists, Config, Msg, #{Key => Val}, []) | Acc])
end;
format_log([Term | Rest], Config, Msg, Meta, Acc) when is_list(Term) ->
format_log(Rest, Config, Msg, Meta, [Term | Acc]).
format_msg(Data, Config) -> format_msg("", Data, Config).
format_msg(Parents, Data, Config=#{map_depth := 0}) when is_map(Data) ->
to_string(truncate_key(Parents), Config) ++ "=... ";
format_msg(Parents, Data, Config = #{map_depth := Depth}) when is_map(Data) ->
maps:fold(
fun(K, V, Acc) when is_map(V) ->
[format_msg(Parents ++ to_string(K, Config) ++ "_",
V,
Config#{map_depth := Depth-1}) | Acc]
; (K, V, Acc) ->
[Parents ++ to_string(K, Config), $=,
to_string(V, Config), $\s | Acc]
end,
[],
Data
).
format_val(time, Time, Config) ->
format_time(Time, Config);
format_val(mfa, MFA, Config) ->
escape(format_mfa(MFA, Config));
format_val(_Key, Val, Config) ->
to_string(Val, Config).
format_time(N, #{time_offset := O, time_designator := D}) when is_integer(N) ->
calendar:system_time_to_rfc3339(N, [{unit, microsecond},
{offset, O},
{time_designator, D}]).
format_mfa({M, F, A}, _) when is_atom(M), is_atom(F), is_integer(A) ->
[atom_to_list(M), $:, atom_to_list(F), $/, integer_to_list(A)];
format_mfa({M, F, A}, Config) when is_atom(M), is_atom(F), is_list(A) ->
format_mfa({M, F, length(A)}, Config);
to_string(MFAStr, Config).
to_string(X, _) when is_atom(X) ->
escape(atom_to_list(X));
to_string(X, _) when is_integer(X) ->
integer_to_list(X);
to_string(X, _) when is_pid(X) ->
pid_to_list(X);
to_string(X, _) when is_reference(X) ->
ref_to_list(X);
to_string(X, C) when is_binary(X) ->
case unicode:characters_to_list(X) of
escape(format_str(C, X));
List ->
case io_lib:printable_list(List) of
true -> escape(List);
_ -> escape(format_str(C, X))
end
end;
to_string(X, C) when is_list(X) ->
case io_lib:printable_list(X) of
true -> escape(X);
_ -> escape(format_str(C, X))
end;
to_string(X, C) ->
escape(format_str(C, X)).
format_str(#{term_depth := undefined}, T) ->
io_lib:format("~0tp", [T]);
format_str(#{term_depth := D}, T) ->
io_lib:format("~0tP", [T, D]).
escape(Str) ->
case needs_escape(Str) of
false ->
case needs_quoting(Str) of
true -> [$", Str, $"];
false -> Str
end;
true ->
[$", do_escape(Str), $"]
end.
needs_quoting(Str) ->
string:find(Str, " ") =/= nomatch orelse
string:find(Str, "=") =/= nomatch.
needs_escape(Str) ->
string:find(Str, "\"") =/= nomatch orelse
string:find(Str, "\\") =/= nomatch orelse
string:find(Str, "\n") =/= nomatch.
do_escape([]) ->
[];
do_escape(Str) ->
case string:next_grapheme(Str) of
[$\n | Rest] -> [$\\, $\n | do_escape(Rest)];
["\r\n" | Rest] -> [$\\, $\r, $\\, $\n | do_escape(Rest)];
[$" | Rest] -> [$\\, $" | do_escape(Rest)];
[$\\ | Rest] -> [$\\, $\\ | do_escape(Rest)];
[Grapheme | Rest] -> [Grapheme | do_escape(Rest)]
end.
truncate_key([]) -> [];
truncate_key("_") -> "";
truncate_key([H|T]) -> [H | truncate_key(T)]. |
4bbd3743b36ae7b6e5ef82330bc6fdc9a24f7bd9428ef816f8c7e80d9c7d7f58 | zyla/ceburilo | Main.hs | # LANGUAGE DataKinds , TypeOperators , RecordWildCards , OverloadedStrings #
module Main where
import Network.Wai
import Network.Wai.Handler.Warp
import Network.Wai.Middleware.Cors (simpleCors)
import Servant
import Data.Maybe
import Data.Ord
import Data.Foldable (minimumBy)
import qualified Data.ByteString as BS
import qualified Data.IntMap.Strict as IM
import Data.Aeson (FromJSON, ToJSON, toJSON, (.=), object, decodeStrict)
import System.Environment (lookupEnv)
import Control.Exception (evaluate)
import Control.DeepSeq (force)
-------------------------------
import Ceburilo.Types
import Ceburilo.Graph
type IMap = IM.IntMap StationPaths
data RouteView = RouteView
{ rvPath :: Path
, rvStations :: [Station]
, rvBegCoord :: Point
, rvDestCoord :: Point
}
instance ToJSON RouteView where
toJSON RouteView{..} = object [ "beg_coord" .= rvBegCoord
, "dest_coord" .= rvDestCoord
, "path" .= rvPath
, "stations" .= rvStations ]
type API = "route" :> QueryParam "beg_lat" Float :> QueryParam "beg_lon" Float
:> QueryParam "dest_lat" Float :> QueryParam "dest_lon" Float
:> Get '[JSON] RouteView
proxyAPI :: Proxy API
proxyAPI = Proxy
generateRouteView :: Point -- ^Begginning lat,lon
-> Point -- ^Destination lat.lon
-> Graph -> IMap -- ^Given graph
-> Maybe RouteView
generateRouteView begPoint dstPoint graph spath =
let lookupStation = fmap spStation . flip IM.lookup spath
lookupPath from to = IM.lookup from spath >>= IM.lookup to . spPaths
allStations = spStation <$> IM.elems spath
nearestStation point = stationNumber $
minimumBy (comparing $ distanceEarth point . stationLocation)
allStations
beginStation = nearestStation begPoint
destStation = nearestStation dstPoint
in fmap (\stationNumbers ->
let stations = mapMaybe lookupStation (beginStation:stationNumbers)
stationPairs = zip (beginStation:stationNumbers) stationNumbers
path = mconcat $ mapMaybe (uncurry lookupPath) stationPairs
in RouteView path stations begPoint dstPoint
) (generateRoute graph beginStation destStation)
parseInput :: Graph -> IMap
-> Maybe Float -> Maybe Float
-> Maybe Float -> Maybe Float
-> Handler RouteView
parseInput g mp (Just blat) (Just blon) (Just dlat) (Just dlon) =
case generateRouteView (Point blat blon) (Point dlat dlon) g mp of
Nothing -> throwError err500
(Just x) -> return x
parseInput _ _ _ _ _ _ = throwError err400
app :: Graph -> IMap -> Application
app gr mp = serve proxyAPI (parseInput gr mp)
parseJSONFromFile :: FromJSON a => FilePath -> IO [Maybe a]
parseJSONFromFile file =
fmap decodeStrict . Prelude.filter (not . BS.null) . BS.split newline <$> BS.readFile file
where
newline = 10
stationsToMap :: [StationPaths] -> IMap
stationsToMap = IM.fromList . map sspToPair
where
sspToPair sp@(StationPaths (Station number _ _) _) = (number, sp)
main :: IO ()
main = do
port <- maybe 4000 read <$> lookupEnv "PORT"
putStrLn "Parsing data file..."
paths <- parseJSONFromFile "paths.json"
putStrLn "Building graph..."
graph <- evaluate $ force $ fromMaybe (error "error loading graph") $ buildGraph paths
imap <- evaluate $ force $ stationsToMap $ fromMaybe (error "error loading graph") $ sequence paths
putStrLn $ "Running server on port " ++ show port
run port $ simpleCors $ app graph imap
| null | https://raw.githubusercontent.com/zyla/ceburilo/7bb24940759bee7b3dff3407fcb093ebf130e710/app/server/Main.hs | haskell | -----------------------------
^Begginning lat,lon
^Destination lat.lon
^Given graph | # LANGUAGE DataKinds , TypeOperators , RecordWildCards , OverloadedStrings #
module Main where
import Network.Wai
import Network.Wai.Handler.Warp
import Network.Wai.Middleware.Cors (simpleCors)
import Servant
import Data.Maybe
import Data.Ord
import Data.Foldable (minimumBy)
import qualified Data.ByteString as BS
import qualified Data.IntMap.Strict as IM
import Data.Aeson (FromJSON, ToJSON, toJSON, (.=), object, decodeStrict)
import System.Environment (lookupEnv)
import Control.Exception (evaluate)
import Control.DeepSeq (force)
import Ceburilo.Types
import Ceburilo.Graph
type IMap = IM.IntMap StationPaths
data RouteView = RouteView
{ rvPath :: Path
, rvStations :: [Station]
, rvBegCoord :: Point
, rvDestCoord :: Point
}
instance ToJSON RouteView where
toJSON RouteView{..} = object [ "beg_coord" .= rvBegCoord
, "dest_coord" .= rvDestCoord
, "path" .= rvPath
, "stations" .= rvStations ]
type API = "route" :> QueryParam "beg_lat" Float :> QueryParam "beg_lon" Float
:> QueryParam "dest_lat" Float :> QueryParam "dest_lon" Float
:> Get '[JSON] RouteView
proxyAPI :: Proxy API
proxyAPI = Proxy
-> Maybe RouteView
generateRouteView begPoint dstPoint graph spath =
let lookupStation = fmap spStation . flip IM.lookup spath
lookupPath from to = IM.lookup from spath >>= IM.lookup to . spPaths
allStations = spStation <$> IM.elems spath
nearestStation point = stationNumber $
minimumBy (comparing $ distanceEarth point . stationLocation)
allStations
beginStation = nearestStation begPoint
destStation = nearestStation dstPoint
in fmap (\stationNumbers ->
let stations = mapMaybe lookupStation (beginStation:stationNumbers)
stationPairs = zip (beginStation:stationNumbers) stationNumbers
path = mconcat $ mapMaybe (uncurry lookupPath) stationPairs
in RouteView path stations begPoint dstPoint
) (generateRoute graph beginStation destStation)
parseInput :: Graph -> IMap
-> Maybe Float -> Maybe Float
-> Maybe Float -> Maybe Float
-> Handler RouteView
parseInput g mp (Just blat) (Just blon) (Just dlat) (Just dlon) =
case generateRouteView (Point blat blon) (Point dlat dlon) g mp of
Nothing -> throwError err500
(Just x) -> return x
parseInput _ _ _ _ _ _ = throwError err400
app :: Graph -> IMap -> Application
app gr mp = serve proxyAPI (parseInput gr mp)
parseJSONFromFile :: FromJSON a => FilePath -> IO [Maybe a]
parseJSONFromFile file =
fmap decodeStrict . Prelude.filter (not . BS.null) . BS.split newline <$> BS.readFile file
where
newline = 10
stationsToMap :: [StationPaths] -> IMap
stationsToMap = IM.fromList . map sspToPair
where
sspToPair sp@(StationPaths (Station number _ _) _) = (number, sp)
main :: IO ()
main = do
port <- maybe 4000 read <$> lookupEnv "PORT"
putStrLn "Parsing data file..."
paths <- parseJSONFromFile "paths.json"
putStrLn "Building graph..."
graph <- evaluate $ force $ fromMaybe (error "error loading graph") $ buildGraph paths
imap <- evaluate $ force $ stationsToMap $ fromMaybe (error "error loading graph") $ sequence paths
putStrLn $ "Running server on port " ++ show port
run port $ simpleCors $ app graph imap
|
8dc00bbc92199c247b7d8721b006d0bcbbedb4aa68e85b2edc4910f2f4463409 | rcherrueau/rastache | partials.rkt | #lang racket/base
; /\ \__ /\ \
; _ __ __ ____\ \ ,_\ __ ___\ \ \___ __
/\`'__\/'__`\ /',__\\ \ \/ /'__`\ /'___\ \ _ ` \ /'__`\
; \ \ \//\ \L\.\_/\__, `\\ \ \_/\ \L\.\_/\ \__/\ \ \ \ \/\ __/
\ \_\\ \__/.\_\/\____/ \ \__\ \__/.\_\ \____\\ \_\ \_\
; \/_/ \/__/\/_/\/___/ \/__/\/__/\/_/\/____/ \/_/\/_/\/____/
; Mustache template engine for Racket
; Partial tags are used to expand an external template into the
; current template.
;
; The tag's content MUST be a non-whitespace character sequence NOT
; containing the current closing delimiter.
;
; This tag's content names the partial to inject. Set Delimiter tags
; MUST NOT affect the parsing of a partial. The partial MUST be
; rendered against the context stack local to the tag. If the named
; partial cannot be found, the empty string SHOULD be used instead, as
; in interpolations.
;
; Partial tags SHOULD be treated as standalone when appropriate. If
; this tag is used standalone, any whitespace preceding the tag should
; treated as indentation, and prepended to each line of the partial
; before rendering.
(provide partials-tests)
(require rackunit
rackunit/text-ui
net/url
"../commons.rkt"
"rastache-test-case.rkt")
(define partials-tests
(test-suite
"Partials tests"
(rast-t-case "Basic Behavior"
#hash()
"\"{{>partials/text1}}\""
"\"from partial\n\""
(list (token-delimiter "{{" "}}")
(token-static "\"")
(token-partial (string->url "partials/text1"))
(token-static "\""))
"The greater-than operator should expand to the named partial.")
(rast-t-case "Failed Lookup"
#hash()
"\"{{>partials/text2}}\""
"\"\""
(list (token-delimiter "{{" "}}")
(token-static "\"")
(token-partial (string->url "partials/text2"))
(token-static "\""))
"The empty string should be used when the named partial is not found.")
(rast-t-case "Context"
#hash{ (text . "content") }
"\"{{>partials/partial1}}\""
"\"*content*\""
(list (token-delimiter "{{" "}}")
(token-static "\"")
(token-partial (string->url "partials/partial1"))
(token-static "\""))
"The greater-than operator should operate within the current context.")
(rast-t-case "Recursion"
#hash{ (content . "X")
(nodes . [ #hash{ (content . "Y")
(nodes . []) } ]) }
"{{>partials/node}}"
"X<Y<>\n>\n"
(list (token-delimiter "{{" "}}")
(token-partial (string->url "partials/node")))
"The greater-than operator should properly recurse.")
Whitespace Sensitivity
(rast-t-case "Surrounding Whitespace"
#hash()
"| {{>partials/partial2}} |"
"| \t|\t\n |"
(list (token-delimiter "{{" "}}")
(token-static "| ")
(token-partial (string->url "partials/partial2"))
(token-static " |"))
"The greater-than operator should not alter surrounding whitespace.")
(rast-t-case "Inline Indentation"
#hash{ (data . "|") }
" {{data}} {{> partials/partial3}}\n"
" | >\n>\n"
(list (token-delimiter "{{" "}}")
(token-static " ")
(token-etag 'data)
(token-static " ")
(token-partial (string->url "partials/partial3"))
(token-static "")
(token-static "\n"))
"Whitespace should be left untouched.")
(rast-t-case "Standalone Line Endings"
#hash()
"|\r\n{{>partials/partial4}}\r\n|"
"|\r\n>|"
(list (token-delimiter "{{" "}}")
(token-static "|\r")
(token-static "\n")
(token-partial (string->url "partials/partial4"))
(token-static "|"))
"'\r\n' should be considered a newline for standalone tags.")
#;
Unsuported by mustache.js
(rast-t-case "Standalone Without Previous Line"
#hash()
" {{>partials/partial3}}\n>"
;; Each line of the partial should be indented before
;; rendering.
" >\n >>"
(list (token-delimiter "{{" "}}")
(token-partial (string->url "partials/partial3"))
(token-static ">"))
"Standalone tags should not require a newline to precede them.")
#;
Unsuported by mustache.js
(rast-t-case "Standalone Without Newline"
#hash()
">\n {{>partials/partial3}}"
;; Each line of the partial should be indented before
;; rendering.
">\n >\n >"
(list (token-delimiter "{{" "}}")
(token-static ">")
(token-static "\n")
(token-partial (string->url "partials/partial3")))
"Standalone tags should not require a newline to follow them.")
#;
Unsuported by mustache.js
(rast-t-case "Standalone Indentation"
#hash()
"
\\
{{>partials/partial5}}
/"
"
\
|
<
->
|
/"
(list (token-delimiter "{{" "}}")
(token-static "")
(token-static "\n")
(token-static " \\")
(token-static "\n")
(token-partial (string->url "partials/partial5"))
(token-static " /"))
"Each line of the partial should be indented before rendering.")
Whitespace Insensitivity
(rast-t-case "Padding Whitespace"
#hash{ (boolean . #t) }
"|{{> partials/partial6 }}|"
"|[]|"
(list (token-delimiter "{{" "}}")
(token-static "|")
(token-partial (string->url "partials/partial6"))
(token-static "|"))
"Superfluous in-tag whitespace should be ignored.")))
| null | https://raw.githubusercontent.com/rcherrueau/rastache/059d00c83416f8ba27cc38fa7f8321b075756d14/rastache/tests/partials.rkt | racket | /\ \__ /\ \
_ __ __ ____\ \ ,_\ __ ___\ \ \___ __
\ \ \//\ \L\.\_/\__, `\\ \ \_/\ \L\.\_/\ \__/\ \ \ \ \/\ __/
\/_/ \/__/\/_/\/___/ \/__/\/__/\/_/\/____/ \/_/\/_/\/____/
Mustache template engine for Racket
Partial tags are used to expand an external template into the
current template.
The tag's content MUST be a non-whitespace character sequence NOT
containing the current closing delimiter.
This tag's content names the partial to inject. Set Delimiter tags
MUST NOT affect the parsing of a partial. The partial MUST be
rendered against the context stack local to the tag. If the named
partial cannot be found, the empty string SHOULD be used instead, as
in interpolations.
Partial tags SHOULD be treated as standalone when appropriate. If
this tag is used standalone, any whitespace preceding the tag should
treated as indentation, and prepended to each line of the partial
before rendering.
Each line of the partial should be indented before
rendering.
Each line of the partial should be indented before
rendering.
| #lang racket/base
/\`'__\/'__`\ /',__\\ \ \/ /'__`\ /'___\ \ _ ` \ /'__`\
\ \_\\ \__/.\_\/\____/ \ \__\ \__/.\_\ \____\\ \_\ \_\
(provide partials-tests)
(require rackunit
rackunit/text-ui
net/url
"../commons.rkt"
"rastache-test-case.rkt")
(define partials-tests
(test-suite
"Partials tests"
(rast-t-case "Basic Behavior"
#hash()
"\"{{>partials/text1}}\""
"\"from partial\n\""
(list (token-delimiter "{{" "}}")
(token-static "\"")
(token-partial (string->url "partials/text1"))
(token-static "\""))
"The greater-than operator should expand to the named partial.")
(rast-t-case "Failed Lookup"
#hash()
"\"{{>partials/text2}}\""
"\"\""
(list (token-delimiter "{{" "}}")
(token-static "\"")
(token-partial (string->url "partials/text2"))
(token-static "\""))
"The empty string should be used when the named partial is not found.")
(rast-t-case "Context"
#hash{ (text . "content") }
"\"{{>partials/partial1}}\""
"\"*content*\""
(list (token-delimiter "{{" "}}")
(token-static "\"")
(token-partial (string->url "partials/partial1"))
(token-static "\""))
"The greater-than operator should operate within the current context.")
(rast-t-case "Recursion"
#hash{ (content . "X")
(nodes . [ #hash{ (content . "Y")
(nodes . []) } ]) }
"{{>partials/node}}"
"X<Y<>\n>\n"
(list (token-delimiter "{{" "}}")
(token-partial (string->url "partials/node")))
"The greater-than operator should properly recurse.")
Whitespace Sensitivity
(rast-t-case "Surrounding Whitespace"
#hash()
"| {{>partials/partial2}} |"
"| \t|\t\n |"
(list (token-delimiter "{{" "}}")
(token-static "| ")
(token-partial (string->url "partials/partial2"))
(token-static " |"))
"The greater-than operator should not alter surrounding whitespace.")
(rast-t-case "Inline Indentation"
#hash{ (data . "|") }
" {{data}} {{> partials/partial3}}\n"
" | >\n>\n"
(list (token-delimiter "{{" "}}")
(token-static " ")
(token-etag 'data)
(token-static " ")
(token-partial (string->url "partials/partial3"))
(token-static "")
(token-static "\n"))
"Whitespace should be left untouched.")
(rast-t-case "Standalone Line Endings"
#hash()
"|\r\n{{>partials/partial4}}\r\n|"
"|\r\n>|"
(list (token-delimiter "{{" "}}")
(token-static "|\r")
(token-static "\n")
(token-partial (string->url "partials/partial4"))
(token-static "|"))
"'\r\n' should be considered a newline for standalone tags.")
Unsuported by mustache.js
(rast-t-case "Standalone Without Previous Line"
#hash()
" {{>partials/partial3}}\n>"
" >\n >>"
(list (token-delimiter "{{" "}}")
(token-partial (string->url "partials/partial3"))
(token-static ">"))
"Standalone tags should not require a newline to precede them.")
Unsuported by mustache.js
(rast-t-case "Standalone Without Newline"
#hash()
">\n {{>partials/partial3}}"
">\n >\n >"
(list (token-delimiter "{{" "}}")
(token-static ">")
(token-static "\n")
(token-partial (string->url "partials/partial3")))
"Standalone tags should not require a newline to follow them.")
Unsuported by mustache.js
(rast-t-case "Standalone Indentation"
#hash()
"
\\
{{>partials/partial5}}
/"
"
\
|
<
->
|
/"
(list (token-delimiter "{{" "}}")
(token-static "")
(token-static "\n")
(token-static " \\")
(token-static "\n")
(token-partial (string->url "partials/partial5"))
(token-static " /"))
"Each line of the partial should be indented before rendering.")
Whitespace Insensitivity
(rast-t-case "Padding Whitespace"
#hash{ (boolean . #t) }
"|{{> partials/partial6 }}|"
"|[]|"
(list (token-delimiter "{{" "}}")
(token-static "|")
(token-partial (string->url "partials/partial6"))
(token-static "|"))
"Superfluous in-tag whitespace should be ignored.")))
|
c4214c4bb8a5a4487220dd7489dd07e87df05867026625e79da2f86b3c5434f5 | glguy/advent2015 | Day10.hs | module Main where
import Data.List
main :: IO ()
main =
do steps <- iterate lookAndSay <$> loadInput
print (length (steps !! 40))
print (length (steps !! 50))
loadInput :: IO String
loadInput = head . words <$> readFile "input10.txt"
lookAndSay :: String -> String
lookAndSay = foldr aux [] . group
where
aux xs = shows (length xs)
. showChar (head xs)
| null | https://raw.githubusercontent.com/glguy/advent2015/e59b93c41363be85eb7f11396db5c95e79e485ad/Day10.hs | haskell | module Main where
import Data.List
main :: IO ()
main =
do steps <- iterate lookAndSay <$> loadInput
print (length (steps !! 40))
print (length (steps !! 50))
loadInput :: IO String
loadInput = head . words <$> readFile "input10.txt"
lookAndSay :: String -> String
lookAndSay = foldr aux [] . group
where
aux xs = shows (length xs)
. showChar (head xs)
| |
7f20117735376b15787d870d0ea912b516bd8b7b66d7434200a0f5fa00a93268 | TheBestTvarynka/Lisp-SQL-Parser | cli.lisp |
(require 'asdf)
(load "priority-queue/priority-queue.asd")
(asdf:load-system 'priority-queue)
; load all functionality code
(load "getenv.lisp")
(load "importer.lisp")
(load "print.lisp")
(load "where.lisp")
(load "orderby.lisp")
(load "select.lisp")
(load "joins.lisp")
(load "union.lisp")
(load "groupby.lisp")
(load "having.lisp")
(load "limit.lisp")
; tables - hashmap where key is tablename and value is a table
(defvar tables (make-hash-table :test 'equal))
(setf (gethash "map_zal-skl9" tables) (readTableFromFile "datasource/map_zal-skl9.csv"))
(setf (gethash "mp-assistants" tables) (readTableFromFile "datasource/mp-assistants.csv"))
(setf (gethash "mp-posts_full" tables) (readTableFromFile "datasource/mp-posts_full.csv"))
(setf (gethash "mps-declarations_rada" tables) (readTableFromFile "datasource/mps-declarations_rada.json"))
(setf (gethash "test" tables) (readTableFromFile "datasource/test.csv"))
(setf (gethash "test2" tables) (readTableFromFile "datasource/test2.csv"))
(setf (gethash "test3" tables) (readTableFromFile "datasource/test3.csv"))
(setf (gethash "test4" tables) (readTableFromFile "datasource/test4.csv"))
( setf ( gethash " plenary_register_mps - skl9 " tables ) mps_declarations_rada )
; define keywords for sql-query
(defvar keyWords #("select"
"from"
"right join"
"left join"
"full outer join"
"inner join"
"where"
"order by"
"group by"
"having"
"limit"))
; priorities for every operation
(defvar priorities (make-hash-table :test 'equal))
(setf (gethash "from" priorities) 1)
(setf (gethash "inner join" priorities) 2)
(setf (gethash "left join" priorities) 2)
(setf (gethash "right join" priorities) 2)
(setf (gethash "full outer join" priorities) 2)
(setf (gethash "where" priorities) 3)
(setf (gethash "group by" priorities) 4)
(setf (gethash "having" priorities) 5)
(setf (gethash "select" priorities) 6)
(setf (gethash "order by" priorities) 7)
(setf (gethash "limit" priorities) 8)
(setf (gethash "" priorities) 0)
(defun from (tableStr &optional table)
"returns table by tablename"
(gethash (string-trim " " tableStr) tables)
)
; hashmap with all functions for quering
(defvar functions (make-hash-table :test 'equal))
(setf (gethash "from" functions) #'from)
(setf (gethash "inner join" functions) #'join)
(setf (gethash "left join" functions) #'join)
(setf (gethash "right join" functions) #'join)
(setf (gethash "full outer join" functions) #'join)
(setf (gethash "where" functions) #'where)
(setf (gethash "group by" functions) #'groupBy)
(setf (gethash "having" functions) #'having)
(setf (gethash "order by" functions) #'orderBy)
(setf (gethash "select" functions) #'select)
(setf (gethash "limit" functions) #'limit)
(setf (gethash "" functions) nil)
(defun getPriority (kword priorities)
"returns priority of operation"
(gethash kword priorities)
)
(defun iterateArr (index arr str)
(cond
((= index (length arr)) "")
((starts-with str (aref arr index)) (aref arr index))
(t (iterateArr (+ index 1) arr str))
)
)
(defun getKeyWord (queryStr keyWords)
"returns sql-keyword if queryStr starts with one of keyWords or '' if not"
(iterateArr 0 keyWords queryStr)
)
(defun make-fn (functionStr parametersStr)
"makes function (lambda) that do some operation on the table corresponds to function name (functionStr)"
(cond
((search "join" functionStr)
(lambda (table)
(funcall (gethash functionStr functions) (concatenate 'string functionStr parametersStr) table tables)
))
(t (lambda (table)
(funcall (gethash functionStr functions) parametersStr table)
))
)
)
(defun parseQuery (queryStr fnStr parameters queue)
"this function parses one query. means without 'union'. only one select"
(let ((kword (getKeyWord queryStr keyWords)))
(cond
((string= queryStr "") (pqueue:pqueue-push (make-fn fnStr parameters)
(getPriority fnStr priorities)
queue))
((string= kword "") (parseQuery (removeOperator queryStr)
fnStr
(concatenate 'string parameters " " (getOperator queryStr))
queue))
(t (setf queue (pqueue:pqueue-push (make-fn fnStr parameters)
(getPriority fnStr priorities)
queue))
(parseQuery (subseq queryStr (length kword)) kword "" queue))
)
)
)
(defun execute-queue (table queue)
"execute query. queue - queue with all operation that query contains"
(cond
((pqueue:pqueue-empty-p queue) table)
(t (execute-queue (funcall (pqueue:pqueue-pop queue) (copy-table table)) queue))
)
)
(defun makeSimpleQuery (queryStr)
"returns lambda that executes the query if we call it"
(let ((queue (parseQuery queryStr "" "" (pqueue:make-pqueue #'<
:key-type 'integer
:value-type 'function))))
(pqueue:pqueue-pop queue)
(lambda ()
(execute-queue (make-table) queue)
)
)
)
(defun buildQueries (queryStr)
"splits queryStr on the queries by 'union' operator and make tree-like structure.
returns lambda that executes all queries and does union if it needs"
(let ((unionPos (search "union" queryStr)))
(cond
((not unionPos) (makeSimpleQuery queryStr))
(t (let ()
(lambda ()
(funcall #'unionTables (makeSimpleQuery (subseq queryStr 0 unionPos)) (buildQueries (subseq queryStr (+ unionPos 6))))
)
))
)
)
)
(defun query (queryStr)
"builds function for executing the query and executes its
prints result of quering"
(let ((fn (buildQueries queryStr)))
(printTable (funcall fn))
)
)
(defun loadTable (tableName)
"loads table command: print whole table"
(printTable (gethash tableName tables))
)
(defun execute (commandQuery)
"executes entered text"
(let ((command (parseCommand commandQuery)))
(cond
((string= command "exit") (exit))
((string= command "query") (query (cutParameter commandQuery)))
((string= command "load") (loadTable (cutParameter commandQuery)))
(t (princ "Error: entered command not found!!!"))
)
)
)
(defun run ()
"runs cli"
(terpri)
(princ (format nil "[~A@~A] $: " (getEnvVariable "USER") (getEnvVariable "PWD")))
(terpri)
(execute (read-line))
(run)
)
(run)
| null | https://raw.githubusercontent.com/TheBestTvarynka/Lisp-SQL-Parser/d8f1283fc00e394d76e4ac28e434c99d1bee72aa/src/cli.lisp | lisp | load all functionality code
tables - hashmap where key is tablename and value is a table
define keywords for sql-query
priorities for every operation
hashmap with all functions for quering |
(require 'asdf)
(load "priority-queue/priority-queue.asd")
(asdf:load-system 'priority-queue)
(load "getenv.lisp")
(load "importer.lisp")
(load "print.lisp")
(load "where.lisp")
(load "orderby.lisp")
(load "select.lisp")
(load "joins.lisp")
(load "union.lisp")
(load "groupby.lisp")
(load "having.lisp")
(load "limit.lisp")
(defvar tables (make-hash-table :test 'equal))
(setf (gethash "map_zal-skl9" tables) (readTableFromFile "datasource/map_zal-skl9.csv"))
(setf (gethash "mp-assistants" tables) (readTableFromFile "datasource/mp-assistants.csv"))
(setf (gethash "mp-posts_full" tables) (readTableFromFile "datasource/mp-posts_full.csv"))
(setf (gethash "mps-declarations_rada" tables) (readTableFromFile "datasource/mps-declarations_rada.json"))
(setf (gethash "test" tables) (readTableFromFile "datasource/test.csv"))
(setf (gethash "test2" tables) (readTableFromFile "datasource/test2.csv"))
(setf (gethash "test3" tables) (readTableFromFile "datasource/test3.csv"))
(setf (gethash "test4" tables) (readTableFromFile "datasource/test4.csv"))
( setf ( gethash " plenary_register_mps - skl9 " tables ) mps_declarations_rada )
(defvar keyWords #("select"
"from"
"right join"
"left join"
"full outer join"
"inner join"
"where"
"order by"
"group by"
"having"
"limit"))
(defvar priorities (make-hash-table :test 'equal))
(setf (gethash "from" priorities) 1)
(setf (gethash "inner join" priorities) 2)
(setf (gethash "left join" priorities) 2)
(setf (gethash "right join" priorities) 2)
(setf (gethash "full outer join" priorities) 2)
(setf (gethash "where" priorities) 3)
(setf (gethash "group by" priorities) 4)
(setf (gethash "having" priorities) 5)
(setf (gethash "select" priorities) 6)
(setf (gethash "order by" priorities) 7)
(setf (gethash "limit" priorities) 8)
(setf (gethash "" priorities) 0)
(defun from (tableStr &optional table)
"returns table by tablename"
(gethash (string-trim " " tableStr) tables)
)
(defvar functions (make-hash-table :test 'equal))
(setf (gethash "from" functions) #'from)
(setf (gethash "inner join" functions) #'join)
(setf (gethash "left join" functions) #'join)
(setf (gethash "right join" functions) #'join)
(setf (gethash "full outer join" functions) #'join)
(setf (gethash "where" functions) #'where)
(setf (gethash "group by" functions) #'groupBy)
(setf (gethash "having" functions) #'having)
(setf (gethash "order by" functions) #'orderBy)
(setf (gethash "select" functions) #'select)
(setf (gethash "limit" functions) #'limit)
(setf (gethash "" functions) nil)
(defun getPriority (kword priorities)
"returns priority of operation"
(gethash kword priorities)
)
(defun iterateArr (index arr str)
(cond
((= index (length arr)) "")
((starts-with str (aref arr index)) (aref arr index))
(t (iterateArr (+ index 1) arr str))
)
)
(defun getKeyWord (queryStr keyWords)
"returns sql-keyword if queryStr starts with one of keyWords or '' if not"
(iterateArr 0 keyWords queryStr)
)
(defun make-fn (functionStr parametersStr)
"makes function (lambda) that do some operation on the table corresponds to function name (functionStr)"
(cond
((search "join" functionStr)
(lambda (table)
(funcall (gethash functionStr functions) (concatenate 'string functionStr parametersStr) table tables)
))
(t (lambda (table)
(funcall (gethash functionStr functions) parametersStr table)
))
)
)
(defun parseQuery (queryStr fnStr parameters queue)
"this function parses one query. means without 'union'. only one select"
(let ((kword (getKeyWord queryStr keyWords)))
(cond
((string= queryStr "") (pqueue:pqueue-push (make-fn fnStr parameters)
(getPriority fnStr priorities)
queue))
((string= kword "") (parseQuery (removeOperator queryStr)
fnStr
(concatenate 'string parameters " " (getOperator queryStr))
queue))
(t (setf queue (pqueue:pqueue-push (make-fn fnStr parameters)
(getPriority fnStr priorities)
queue))
(parseQuery (subseq queryStr (length kword)) kword "" queue))
)
)
)
(defun execute-queue (table queue)
"execute query. queue - queue with all operation that query contains"
(cond
((pqueue:pqueue-empty-p queue) table)
(t (execute-queue (funcall (pqueue:pqueue-pop queue) (copy-table table)) queue))
)
)
(defun makeSimpleQuery (queryStr)
"returns lambda that executes the query if we call it"
(let ((queue (parseQuery queryStr "" "" (pqueue:make-pqueue #'<
:key-type 'integer
:value-type 'function))))
(pqueue:pqueue-pop queue)
(lambda ()
(execute-queue (make-table) queue)
)
)
)
(defun buildQueries (queryStr)
"splits queryStr on the queries by 'union' operator and make tree-like structure.
returns lambda that executes all queries and does union if it needs"
(let ((unionPos (search "union" queryStr)))
(cond
((not unionPos) (makeSimpleQuery queryStr))
(t (let ()
(lambda ()
(funcall #'unionTables (makeSimpleQuery (subseq queryStr 0 unionPos)) (buildQueries (subseq queryStr (+ unionPos 6))))
)
))
)
)
)
(defun query (queryStr)
"builds function for executing the query and executes its
prints result of quering"
(let ((fn (buildQueries queryStr)))
(printTable (funcall fn))
)
)
(defun loadTable (tableName)
"loads table command: print whole table"
(printTable (gethash tableName tables))
)
(defun execute (commandQuery)
"executes entered text"
(let ((command (parseCommand commandQuery)))
(cond
((string= command "exit") (exit))
((string= command "query") (query (cutParameter commandQuery)))
((string= command "load") (loadTable (cutParameter commandQuery)))
(t (princ "Error: entered command not found!!!"))
)
)
)
(defun run ()
"runs cli"
(terpri)
(princ (format nil "[~A@~A] $: " (getEnvVariable "USER") (getEnvVariable "PWD")))
(terpri)
(execute (read-line))
(run)
)
(run)
|
4e10a281980e48211919b6bf96c75784747e8a9da865f94669b04af0ae1c4325 | OCamlPro/techelson | macro.ml | Macro parsing .
(* Recognizes strings corresponding to macro operators. *)
let op (token : string) : Base.Mic.Macro.op option = match token with
| "EQ" -> Some Eq
| "NEQ" -> Some Neq
| "LT" -> Some Lt
| "LE" -> Some Le
| "GE" -> Some Ge
| "GT" -> Some Gt
| _ -> None
(* Parses a prefix and then an operator. *)
let prefixed_op
(err : unit -> string list)
(pref : string)
(build : Base.Mic.Macro.op -> 'a)
(token : string)
: 'a option
= match Utils.tail_of_pref ~pref token with
| None -> None
| Some tail -> (
match op tail with
| Some op -> Some (build op)
| None -> err () |> Base.Exc.throws
)
let pair_op (c : char) : Base.Mic.Macro.pair_op option = match c with
| 'P' -> Some P
| 'A' -> Some A
| 'I' -> Some I
| _ -> None
let pair_ops (token : string) : (Base.Mic.Macro.pair_op list * string) =
Utils.sequence pair_op token
let unpair_op (c : char) : Base.Mic.Macro.unpair_op option = match c with
| 'A' -> Some A
| 'D' -> Some D
| _ -> None
let unpair_ops (token : string) : (Base.Mic.Macro.unpair_op list * string) =
Utils.sequence unpair_op token
| null | https://raw.githubusercontent.com/OCamlPro/techelson/932fbf08675cd13d34a07e3b3d77234bdafcf5bc/src/2_parse/macro.ml | ocaml | Recognizes strings corresponding to macro operators.
Parses a prefix and then an operator. | Macro parsing .
let op (token : string) : Base.Mic.Macro.op option = match token with
| "EQ" -> Some Eq
| "NEQ" -> Some Neq
| "LT" -> Some Lt
| "LE" -> Some Le
| "GE" -> Some Ge
| "GT" -> Some Gt
| _ -> None
let prefixed_op
(err : unit -> string list)
(pref : string)
(build : Base.Mic.Macro.op -> 'a)
(token : string)
: 'a option
= match Utils.tail_of_pref ~pref token with
| None -> None
| Some tail -> (
match op tail with
| Some op -> Some (build op)
| None -> err () |> Base.Exc.throws
)
let pair_op (c : char) : Base.Mic.Macro.pair_op option = match c with
| 'P' -> Some P
| 'A' -> Some A
| 'I' -> Some I
| _ -> None
let pair_ops (token : string) : (Base.Mic.Macro.pair_op list * string) =
Utils.sequence pair_op token
let unpair_op (c : char) : Base.Mic.Macro.unpair_op option = match c with
| 'A' -> Some A
| 'D' -> Some D
| _ -> None
let unpair_ops (token : string) : (Base.Mic.Macro.unpair_op list * string) =
Utils.sequence unpair_op token
|
454695ba1a58a7fe5ceb07e348a839768f095d6a68587b56ae353c1a39ab9a1a | haskellfoundation/error-message-index | Tab.hs | module Tab where
add :: Int -> Int -> Int
add x y =
x + y
| null | https://raw.githubusercontent.com/haskellfoundation/error-message-index/7dcad9cd5d7b51de61621d6a6eeb0ece68dea539/message-index/messages/GHC-94817/tab/after/Tab.hs | haskell | module Tab where
add :: Int -> Int -> Int
add x y =
x + y
| |
3b408e700bfb9ea55186fb63251b3daf85b7ee3d44252528209e26645c4f6af1 | threatgrid/ctia | print_matrix.clj | #!/usr/bin/env bb
;; determines the build matrix for the GitHub Actions build.
;; try it locally:
;; # normal builds
;; $ GITHUB_ENV=$(mktemp) GITHUB_EVENT_NAME=pull_request ./scripts/actions/print-matrix.clj
;; $ GITHUB_ENV=$(mktemp) GITHUB_EVENT_NAME=push ./scripts/actions/print-matrix.clj
# cron build
;; $ GITHUB_ENV=$(mktemp) GITHUB_EVENT_NAME=schedule ./scripts/actions/print-matrix.clj
$ GITHUB_ENV=$(mktemp ) CTIA_COMMIT_MESSAGE='{:test - suite : cron } try cron build ' GITHUB_EVENT_NAME = push ./scripts / actions / print - matrix.clj
(ns actions.print-matrix
(:require [actions.actions-helpers :as h]))
(def ^:private default-java-version "11")
LTS version , do not remove from cron
(def ^:private java-17-version "17")
(def ^:private java-18-version "18")
(def non-cron-ctia-nsplits
"Job parallelism for non cron tests."
10)
(def ^:private cron-ctia-nsplits
"Job parallelism for cron tests."
2)
(defn parse-build-config [{:keys [getenv]}]
(let [m (try (read-string (getenv "CTIA_COMMIT_MESSAGE"))
(catch Exception _))]
(-> (when (map? m) m)
(update :test-suite (fn [test-suite]
(or test-suite
(case (getenv "GITHUB_EVENT_NAME")
"schedule" :cron
("pull_request" "push") :pr)))))))
(defn- valid-split? [{:keys [this_split total_splits
java_version ci_profiles] :as m}]
(and (= #{:this_split :total_splits
:java_version :ci_profiles
:test_suite} (set (keys m)))
(#{:ci :cron} (:test_suite m))
(nat-int? this_split)
((every-pred nat-int? pos?) total_splits)
(<= 0 this_split)
(< this_split total_splits)
((every-pred string? seq) java_version)
((every-pred string? seq) ci_profiles)))
(defn- splits-for [base nsplits]
{:pre [(pos? nsplits)]
:post [(= (range nsplits)
(map :this_split %))
(= #{nsplits}
(into #{} (map :total_splits) %))]}
(for [this-split (range nsplits)]
(assoc base
:this_split this-split
:total_splits nsplits)))
(defn non-cron-matrix
"Actions matrix for non cron builds"
[]
{:post [(every? valid-split? %)
(zero? (mod (count %) non-cron-ctia-nsplits))]}
(sequence
(map #(assoc % :test_suite :ci))
(splits-for
{:ci_profiles "default"
:java_version default-java-version}
non-cron-ctia-nsplits)))
(defn cron-matrix
"Actions matrix for cron builds"
[]
{:post [(every? valid-split? %)
(zero? (mod (count %) cron-ctia-nsplits))]}
(sequence
(comp (mapcat #(splits-for % cron-ctia-nsplits))
(map #(assoc % :test_suite :cron)))
(concat
[{:ci_profiles "default"
:java_version default-java-version}]
(map #(into {:ci_profiles "next-clojure"} %)
[{:java_version default-java-version}
{:java_version java-17-version}
{:java_version java-18-version}]))))
(defn edn-matrix [build-config]
{:post [(seq %)
(every? valid-split? %)]}
(case (:test-suite build-config)
:cron (cron-matrix)
:pr (non-cron-matrix)))
(defn print-matrix [{:keys [add-env set-json-output] :as utils}]
(let [build-config (parse-build-config utils)
_ (println "build-config:" (pr-str build-config))
inform / run - tests.sh which test suite to run
_ (add-env utils
"CTIA_TEST_SUITE"
(case (:test-suite build-config)
:cron "cron"
:pr "ci"))]
(set-json-output utils "matrix" (edn-matrix build-config))))
(defn -main [& _args]
(print-matrix h/utils))
(when (= *file* (System/getProperty "babashka.file")) (-main))
| null | https://raw.githubusercontent.com/threatgrid/ctia/e9ccd310fa0c6ae58b26e8d25e7852be1fe8e710/scripts/actions/print_matrix.clj | clojure | determines the build matrix for the GitHub Actions build.
try it locally:
# normal builds
$ GITHUB_ENV=$(mktemp) GITHUB_EVENT_NAME=pull_request ./scripts/actions/print-matrix.clj
$ GITHUB_ENV=$(mktemp) GITHUB_EVENT_NAME=push ./scripts/actions/print-matrix.clj
$ GITHUB_ENV=$(mktemp) GITHUB_EVENT_NAME=schedule ./scripts/actions/print-matrix.clj | #!/usr/bin/env bb
# cron build
$ GITHUB_ENV=$(mktemp ) CTIA_COMMIT_MESSAGE='{:test - suite : cron } try cron build ' GITHUB_EVENT_NAME = push ./scripts / actions / print - matrix.clj
(ns actions.print-matrix
(:require [actions.actions-helpers :as h]))
(def ^:private default-java-version "11")
LTS version , do not remove from cron
(def ^:private java-17-version "17")
(def ^:private java-18-version "18")
(def non-cron-ctia-nsplits
"Job parallelism for non cron tests."
10)
(def ^:private cron-ctia-nsplits
"Job parallelism for cron tests."
2)
(defn parse-build-config [{:keys [getenv]}]
(let [m (try (read-string (getenv "CTIA_COMMIT_MESSAGE"))
(catch Exception _))]
(-> (when (map? m) m)
(update :test-suite (fn [test-suite]
(or test-suite
(case (getenv "GITHUB_EVENT_NAME")
"schedule" :cron
("pull_request" "push") :pr)))))))
(defn- valid-split? [{:keys [this_split total_splits
java_version ci_profiles] :as m}]
(and (= #{:this_split :total_splits
:java_version :ci_profiles
:test_suite} (set (keys m)))
(#{:ci :cron} (:test_suite m))
(nat-int? this_split)
((every-pred nat-int? pos?) total_splits)
(<= 0 this_split)
(< this_split total_splits)
((every-pred string? seq) java_version)
((every-pred string? seq) ci_profiles)))
(defn- splits-for [base nsplits]
{:pre [(pos? nsplits)]
:post [(= (range nsplits)
(map :this_split %))
(= #{nsplits}
(into #{} (map :total_splits) %))]}
(for [this-split (range nsplits)]
(assoc base
:this_split this-split
:total_splits nsplits)))
(defn non-cron-matrix
"Actions matrix for non cron builds"
[]
{:post [(every? valid-split? %)
(zero? (mod (count %) non-cron-ctia-nsplits))]}
(sequence
(map #(assoc % :test_suite :ci))
(splits-for
{:ci_profiles "default"
:java_version default-java-version}
non-cron-ctia-nsplits)))
(defn cron-matrix
"Actions matrix for cron builds"
[]
{:post [(every? valid-split? %)
(zero? (mod (count %) cron-ctia-nsplits))]}
(sequence
(comp (mapcat #(splits-for % cron-ctia-nsplits))
(map #(assoc % :test_suite :cron)))
(concat
[{:ci_profiles "default"
:java_version default-java-version}]
(map #(into {:ci_profiles "next-clojure"} %)
[{:java_version default-java-version}
{:java_version java-17-version}
{:java_version java-18-version}]))))
(defn edn-matrix [build-config]
{:post [(seq %)
(every? valid-split? %)]}
(case (:test-suite build-config)
:cron (cron-matrix)
:pr (non-cron-matrix)))
(defn print-matrix [{:keys [add-env set-json-output] :as utils}]
(let [build-config (parse-build-config utils)
_ (println "build-config:" (pr-str build-config))
inform / run - tests.sh which test suite to run
_ (add-env utils
"CTIA_TEST_SUITE"
(case (:test-suite build-config)
:cron "cron"
:pr "ci"))]
(set-json-output utils "matrix" (edn-matrix build-config))))
(defn -main [& _args]
(print-matrix h/utils))
(when (= *file* (System/getProperty "babashka.file")) (-main))
|
c10a545d06b2cb4e7b996e3f54dc5b737d0f4ac6ea46e35bc48be3f349a1e03b | rowangithub/DOrder | 123_icse2014.ml | let rec loop x y =
if x < 11 then
if (x >= 5) then loop (x+1) (y+1)
else loop (x+1) y
else y
let main x =
let y = 5 in
let x = if (x > y) then y else x in
let res = loop x y in
assert (res = 11) | null | https://raw.githubusercontent.com/rowangithub/DOrder/e0d5efeb8853d2a51cc4796d7db0f8be3185d7df/tests/mochi2/benchs/123_icse2014.ml | ocaml | let rec loop x y =
if x < 11 then
if (x >= 5) then loop (x+1) (y+1)
else loop (x+1) y
else y
let main x =
let y = 5 in
let x = if (x > y) then y else x in
let res = loop x y in
assert (res = 11) | |
e80b71d2932585181e26edceb790c8850f5c1fdeb2d00160a9a1313076ef079f | ekmett/lens | Lens.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
-----------------------------------------------------------------------------
-- |
-- Module : System.IO.Error.Lens
Copyright : ( C ) 2012 - 2016
-- License : BSD-style (see the file LICENSE)
Maintainer : < >
-- Stability : experimental
-- Portability : Rank2Types
--
----------------------------------------------------------------------------
module System.IO.Error.Lens where
import Control.Lens
import GHC.IO.Exception
import System.IO
import Foreign.C.Types
-- * IOException Lenses
-- | Where the error happened.
location :: Lens' IOException String
location f s = f (ioe_location s) <&> \e -> s { ioe_location = e }
# INLINE location #
-- | Error type specific information.
description :: Lens' IOException String
description f s = f (ioe_description s) <&> \e -> s { ioe_description = e }
# INLINE description #
-- | The handle used by the action flagging this error.
handle :: Lens' IOException (Maybe Handle)
handle f s = f (ioe_handle s) <&> \e -> s { ioe_handle = e }
# INLINE handle #
-- | 'fileName' the error is related to.
--
fileName :: Lens' IOException (Maybe FilePath)
fileName f s = f (ioe_filename s) <&> \e -> s { ioe_filename = e }
# INLINE fileName #
-- | 'errno' leading to this error, if any.
--
errno :: Lens' IOException (Maybe CInt)
errno f s = f (ioe_errno s) <&> \e -> s { ioe_errno = e }
# INLINE errno #
------------------------------------------------------------------------------
-- Error Types
------------------------------------------------------------------------------
-- | What type of error it is
errorType :: Lens' IOException IOErrorType
errorType f s = f (ioe_type s) <&> \e -> s { ioe_type = e }
# INLINE errorType #
-- * IOErrorType Prisms
--
_AlreadyExists :: Prism' IOErrorType ()
_AlreadyExists = only AlreadyExists
_NoSuchThing :: Prism' IOErrorType ()
_NoSuchThing = only NoSuchThing
_ResourceBusy :: Prism' IOErrorType ()
_ResourceBusy = only ResourceBusy
_ResourceExhausted :: Prism' IOErrorType ()
_ResourceExhausted = only ResourceExhausted
_EOF :: Prism' IOErrorType ()
_EOF = only EOF
_IllegalOperation :: Prism' IOErrorType ()
_IllegalOperation = only IllegalOperation
_PermissionDenied :: Prism' IOErrorType ()
_PermissionDenied = only PermissionDenied
_UserError :: Prism' IOErrorType ()
_UserError = only UserError
_UnsatisfiedConstraints :: Prism' IOErrorType ()
_UnsatisfiedConstraints = only UnsatisfiedConstraints
_SystemError :: Prism' IOErrorType ()
_SystemError = only SystemError
_ProtocolError :: Prism' IOErrorType ()
_ProtocolError = only ProtocolError
_OtherError :: Prism' IOErrorType ()
_OtherError = only OtherError
_InvalidArgument :: Prism' IOErrorType ()
_InvalidArgument = only InvalidArgument
_InappropriateType :: Prism' IOErrorType ()
_InappropriateType = only InappropriateType
_HardwareFault :: Prism' IOErrorType ()
_HardwareFault = only HardwareFault
_UnsupportedOperation :: Prism' IOErrorType ()
_UnsupportedOperation = only UnsupportedOperation
_TimeExpired :: Prism' IOErrorType ()
_TimeExpired = only TimeExpired
_ResourceVanished :: Prism' IOErrorType ()
_ResourceVanished = only ResourceVanished
_Interrupted :: Prism' IOErrorType ()
_Interrupted = only Interrupted
| null | https://raw.githubusercontent.com/ekmett/lens/a26281a49e85af46821964d8f7455b82cfd4251d/src/System/IO/Error/Lens.hs | haskell | ---------------------------------------------------------------------------
|
Module : System.IO.Error.Lens
License : BSD-style (see the file LICENSE)
Stability : experimental
Portability : Rank2Types
--------------------------------------------------------------------------
* IOException Lenses
| Where the error happened.
| Error type specific information.
| The handle used by the action flagging this error.
| 'fileName' the error is related to.
| 'errno' leading to this error, if any.
----------------------------------------------------------------------------
Error Types
----------------------------------------------------------------------------
| What type of error it is
* IOErrorType Prisms
| # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
Copyright : ( C ) 2012 - 2016
Maintainer : < >
module System.IO.Error.Lens where
import Control.Lens
import GHC.IO.Exception
import System.IO
import Foreign.C.Types
location :: Lens' IOException String
location f s = f (ioe_location s) <&> \e -> s { ioe_location = e }
# INLINE location #
description :: Lens' IOException String
description f s = f (ioe_description s) <&> \e -> s { ioe_description = e }
# INLINE description #
handle :: Lens' IOException (Maybe Handle)
handle f s = f (ioe_handle s) <&> \e -> s { ioe_handle = e }
# INLINE handle #
fileName :: Lens' IOException (Maybe FilePath)
fileName f s = f (ioe_filename s) <&> \e -> s { ioe_filename = e }
# INLINE fileName #
errno :: Lens' IOException (Maybe CInt)
errno f s = f (ioe_errno s) <&> \e -> s { ioe_errno = e }
# INLINE errno #
errorType :: Lens' IOException IOErrorType
errorType f s = f (ioe_type s) <&> \e -> s { ioe_type = e }
# INLINE errorType #
_AlreadyExists :: Prism' IOErrorType ()
_AlreadyExists = only AlreadyExists
_NoSuchThing :: Prism' IOErrorType ()
_NoSuchThing = only NoSuchThing
_ResourceBusy :: Prism' IOErrorType ()
_ResourceBusy = only ResourceBusy
_ResourceExhausted :: Prism' IOErrorType ()
_ResourceExhausted = only ResourceExhausted
_EOF :: Prism' IOErrorType ()
_EOF = only EOF
_IllegalOperation :: Prism' IOErrorType ()
_IllegalOperation = only IllegalOperation
_PermissionDenied :: Prism' IOErrorType ()
_PermissionDenied = only PermissionDenied
_UserError :: Prism' IOErrorType ()
_UserError = only UserError
_UnsatisfiedConstraints :: Prism' IOErrorType ()
_UnsatisfiedConstraints = only UnsatisfiedConstraints
_SystemError :: Prism' IOErrorType ()
_SystemError = only SystemError
_ProtocolError :: Prism' IOErrorType ()
_ProtocolError = only ProtocolError
_OtherError :: Prism' IOErrorType ()
_OtherError = only OtherError
_InvalidArgument :: Prism' IOErrorType ()
_InvalidArgument = only InvalidArgument
_InappropriateType :: Prism' IOErrorType ()
_InappropriateType = only InappropriateType
_HardwareFault :: Prism' IOErrorType ()
_HardwareFault = only HardwareFault
_UnsupportedOperation :: Prism' IOErrorType ()
_UnsupportedOperation = only UnsupportedOperation
_TimeExpired :: Prism' IOErrorType ()
_TimeExpired = only TimeExpired
_ResourceVanished :: Prism' IOErrorType ()
_ResourceVanished = only ResourceVanished
_Interrupted :: Prism' IOErrorType ()
_Interrupted = only Interrupted
|
b1c19f4ec950a14842297ec1c470fbef869565b6fc626116f3c918321a920aca | haroldcarr/learn-haskell-coq-ml-etc | LibSpec.hs | # LANGUAGE NoImplicitPrelude #
module LibSpec where
import Test.Hspec
------------------------------------------------------------------------------
import Lib
{-
spec :: Spec
spec = do
t01
t02
t03
t04
t05
-}
| null | https://raw.githubusercontent.com/haroldcarr/learn-haskell-coq-ml-etc/b4e83ec7c7af730de688b7376497b9f49dc24a0e/haskell/topic/existentials/2016-06-justin-le-practical-dependent-types-in-haskell-existential-neural-networks-and-types-at-runtime/test/LibSpec.hs | haskell | ----------------------------------------------------------------------------
spec :: Spec
spec = do
t01
t02
t03
t04
t05
| # LANGUAGE NoImplicitPrelude #
module LibSpec where
import Test.Hspec
import Lib
|
95415d6817064f678770432c3d57185baecab0ee2e629452b09dcefd35e7e36b | racket/racket7 | main.rkt | #lang racket/base
(require "config.rkt"
"special.rkt"
"wrap.rkt"
"coerce.rkt"
"readtable.rkt"
"whitespace.rkt"
"delimiter.rkt"
"closer.rkt"
"consume.rkt"
"location.rkt"
"accum-string.rkt"
"error.rkt"
"indentation.rkt"
"parameter.rkt"
"primitive-parameter.rkt"
"special-comment.rkt"
"sequence.rkt"
"vector.rkt"
"struct.rkt"
"graph.rkt"
"hash.rkt"
"symbol-or-number.rkt"
"string.rkt"
"char.rkt"
"quote.rkt"
"constant.rkt"
"box.rkt"
"regexp.rkt"
"extension.rkt"
"language.rkt"
"number.rkt")
(provide read
read-language
current-readtable
make-readtable
readtable?
readtable-mapping
string->number
(all-from-out "primitive-parameter.rkt")
(all-from-out "special-comment.rkt"))
;; This is not the `read` to be exposed from `racket/base`, but a
;; general entry to point implement `read` and variants like
;; `read-syntax` and `read/recursive`. To support syntax objects, the
;; caller should provide the `dynamic-require`, `read-compiled`,
` module - declared ? ` , and ` ` functions , even when implementing
;; a plain `read`, since those might be needed by a
;; `read-syntax/recursive`.
(define (read in
#:wrap [wrap #f]
#:init-c [init-c #f]
#:next-readtable [next-readtable (current-readtable)]
#:readtable [readtable next-readtable]
#:recursive? [recursive? #f]
#:local-graph? [local-graph? #f] ; ignored unless `recursive?`
#:source [source #f]
#:for-syntax? [for-syntax? #f]
see " config.rkt "
see " config.rkt "
see " config.rkt "
see " config.rkt "
see " config.rkt "
#:keep-comment? [keep-comment? recursive?])
(define config
(cond
[(and recursive?
(current-read-config))
=> (lambda (config)
(read-config-update config
#:for-syntax? for-syntax?
#:wrap wrap
#:readtable readtable
#:next-readtable next-readtable
#:reset-graph? local-graph?
#:keep-comment? keep-comment?))]
[else
(make-read-config #:readtable readtable
#:next-readtable next-readtable
#:source source
#:for-syntax? for-syntax?
#:wrap wrap
#:read-compiled read-compiled
#:dynamic-require dynamic-require
#:module-declared? module-declared?
#:coerce coerce
#:coerce-key coerce-key
#:keep-comment? keep-comment?)]))
(define v (read-one init-c in config))
(cond
[(and (or (not recursive?) local-graph?)
(read-config-state-graph (read-config-st config)))
(catch-and-reraise-as-reader
#f config
(make-reader-graph v))]
[(and recursive?
(not local-graph?)
(not for-syntax?)
(not (eof-object? v))
(not (special-comment? v)))
(get-graph-hash config) ; to trigger placeholder resolution
(make-placeholder v)]
[else v]))
(define (read-language in fail-k
#:for-syntax? [for-syntax? #f]
#:wrap [wrap #f]
#:read-compiled [read-compiled #f]
#:dynamic-require [dynamic-require #f]
#:module-declared? [module-declared? #f]
#:coerce [coerce #f]
#:coerce-key [coerce-key #f])
(define config (make-read-config #:readtable #f
#:next-readtable #f
#:for-syntax? for-syntax?
#:wrap wrap
#:read-compiled read-compiled
#:dynamic-require dynamic-require
#:module-declared? module-declared?
#:coerce coerce
#:coerce-key coerce-key))
(define l-config (override-parameter read-accept-reader config #f))
(read-language/get-info read-undotted in config fail-k))
;; ----------------------------------------
;; The top-level reading layer that takes care of parsing into
;; `#%cdot`.
(define (read-one init-c in config)
(cond
[(not (check-parameter read-cdot config))
;; No parsing of `.` as `#%dot`
(read-undotted init-c in config)]
[(check-parameter read-cdot config)
;; Look for `<something> . <something>`
(define-values (line col pos) (port-next-location in))
(define v (read-undotted init-c in config))
(cond
[(special-comment? v) v]
[else
(let loop ([v v])
(define c (peek-char/special in config))
(define ec (effective-char c config))
(cond
[(not (char? ec)) v]
[(char-whitespace? ec)
(consume-char in c)
(loop v)]
[(char=? ec #\.)
(define-values (dot-line dot-col dot-pos) (port-next-location in))
(consume-char in c)
(define cdot (wrap '#%dot in (reading-at config dot-line dot-col dot-pos) #\.))
(define post-v (read-undotted #f in config))
(loop (wrap (list cdot v post-v) in (reading-at config line col pos) #\.))]
[else v]))])]))
;; ----------------------------------------
The top - level reading layer within ` # % dot ` handling --- which is
;; the reader's main dispatch layer.
(define (read-undotted init-c in config)
(define c (read-char/skip-whitespace-and-comments init-c read-one in config))
(define-values (line col pos) (port-next-location* in c))
(cond
[(eof-object? c) eof]
[(not (char? c))
(define v (special-value c))
(cond
[(special-comment? v)
(if (read-config-keep-comment? config)
v
(read-undotted #f in config))]
[else (coerce v in (reading-at config line col pos))])]
[(readtable-handler config c)
=> (lambda (handler)
(define v (readtable-apply handler c in config line col pos))
(retry-special-comment v in config))]
[else
;; Map character via readtable:
(define ec (effective-char c config))
;; Track indentation, unless it's a spurious closer:
(when (not (char-closer? ec config))
(track-indentation! config line col))
(define r-config (reading-at (discard-comment config) line col pos))
(define-syntax-rule (guard-legal e body ...)
(cond
[e body ...]
[else (reader-error in r-config "illegal use of `~a`" c)]))
;; Dispatch on character:
(case ec
[(#\#)
(read-dispatch c in r-config config)]
[(#\')
(read-quote read-one 'quote "quoting '" c in r-config)]
[(#\`)
(guard-legal
(check-parameter read-accept-quasiquote config)
(read-quote read-one 'quasiquote "quasiquoting `" c in r-config))]
[(#\,)
(guard-legal
(check-parameter read-accept-quasiquote config)
(define c2 (peek-char/special in config))
(if (eqv? c2 #\@)
(begin
(consume-char in c2)
(read-quote read-one 'unquote-splicing "unquoting ,@" c in r-config))
(read-quote read-one 'unquote "unquoting ," c in r-config)))]
[(#\()
(wrap (read-unwrapped-sequence read-one ec #\( #\) in r-config #:shape-tag? #t) in r-config ec)]
[(#\))
(reader-error in r-config "~a" (indentation-unexpected-closer-message ec c r-config))]
[(#\[)
(guard-legal
(or (check-parameter read-square-bracket-as-paren config)
(check-parameter read-square-bracket-with-tag config))
(wrap (read-unwrapped-sequence read-one ec #\[ #\] in r-config #:shape-tag? #t) in r-config ec))]
[(#\])
(guard-legal
(or (check-parameter read-square-bracket-as-paren config)
(check-parameter read-square-bracket-with-tag config))
(reader-error in r-config "~a" (indentation-unexpected-closer-message ec c r-config)))]
[(#\{)
(guard-legal
(or (check-parameter read-curly-brace-as-paren config)
(check-parameter read-curly-brace-with-tag config))
(wrap (read-unwrapped-sequence read-one ec #\{ #\} in r-config #:shape-tag? #t) in r-config ec))]
[(#\})
(guard-legal
(or (check-parameter read-curly-brace-as-paren config)
(check-parameter read-curly-brace-with-tag config))
(reader-error in r-config "~a" (indentation-unexpected-closer-message ec c r-config)))]
[(#\")
(read-string in r-config)]
[(#\|)
(read-symbol-or-number c in r-config #:mode 'symbol)]
[else
(define v
(read-symbol-or-number c in r-config
;; Don't read as a number if the effective char
;; is non-numeric:
#:mode (if (or (eq? c ec)
(and ((char->integer ec) . < . 128)
(char-numeric? ec)))
'symbol-or-number
'symbol/indirect)))
(retry-special-comment v in config)])]))
Dispatch on ` # ` character
(define (read-dispatch dispatch-c in config orig-config)
(define c (read-char/special in config))
(cond
[(eof-object? c)
(reader-error in config #:due-to c "bad syntax `~a`" dispatch-c)]
[(not (char? c))
(reader-error in config #:due-to c "bad syntax `~a`" dispatch-c)]
[(readtable-dispatch-handler orig-config c)
=> (lambda (handler)
(define line (read-config-line config))
(define col (read-config-col config))
(define pos (read-config-pos config))
(define v (readtable-apply handler c in config line col pos))
(retry-special-comment v in orig-config))]
[else
(define-syntax-rule (guard-legal e c body ...)
(cond
[e body ...]
[else (bad-syntax-error in config (format "~a~a" dispatch-c c))]))
(case c
[(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9)
;; Vector, graph definition, or graph reference
(read-vector-or-graph read-one dispatch-c c in config)]
[(#\()
(read-vector read-one #\( #\( #\) in config)]
[(#\[)
(guard-legal
(check-parameter read-square-bracket-as-paren config)
c
(read-vector read-one #\[ #\[ #\] in config))]
[(#\{)
(guard-legal
(check-parameter read-curly-brace-as-paren config)
c
(read-vector read-one #\{ #\{ #\} in config))]
[(#\s)
(read-struct read-one dispatch-c in config)]
[(#\&)
(read-box read-one dispatch-c in config)]
[(#\')
(read-quote read-one 'syntax "quoting #'" c in config)]
[(#\`)
(read-quote read-one 'quasisyntax "quasiquoting #`" c in config)]
[(#\,)
(define c2 (peek-char/special in config))
(if (eqv? c2 #\@)
(begin
(consume-char in c2)
(read-quote read-one 'unsyntax-splicing "unquoting #,@" c in config))
(read-quote read-one 'unsyntax "unquoting #," c in config))]
[(#\\)
(read-character in config)]
[(#\")
(read-string in config #:mode '|byte string|)]
[(#\<)
(define c2 (peek-char/special in config))
(cond
[(eqv? #\< c2)
(consume-char in #\<)
(read-here-string in config)]
[else
(reader-error in config #:due-to c2 "bad syntax `~a<`" dispatch-c)])]
[(#\%)
(read-symbol-or-number c in config #:extra-prefix dispatch-c #:mode 'symbol)]
[(#\:)
(read-symbol-or-number #f in config #:mode 'keyword)]
[(#\t #\T)
(define c2 (peek-char/special in config))
(cond
[(char-delimiter? c2 config) (wrap #t in config c)]
[else (read-delimited-constant c (char=? c #\t) '(#\r #\u #\e) #t in config)])]
[(#\f #\F)
(define c2 (peek-char/special in config))
(cond
[(char-delimiter? c2 config) (wrap #f in config c)]
[(or (char=? c2 #\x) (char=? c2 #\l))
(read-fixnum-or-flonum-vector read-one dispatch-c c c2 in config)]
[else (read-delimited-constant c (char=? c #\f) '(#\a #\l #\s #\e) #f in config)])]
[(#\e) (read-symbol-or-number #f in config #:mode "#e")]
[(#\E) (read-symbol-or-number #f in config #:mode "#E")]
[(#\i) (read-symbol-or-number #f in config #:mode "#i")]
[(#\I) (read-symbol-or-number #f in config #:mode "#I")]
[(#\d) (read-symbol-or-number #f in config #:mode "#d")]
[(#\B) (read-symbol-or-number #f in config #:mode "#B")]
[(#\o) (read-symbol-or-number #f in config #:mode "#o")]
[(#\O) (read-symbol-or-number #f in config #:mode "#O")]
[(#\D) (read-symbol-or-number #f in config #:mode "#D")]
[(#\b) (read-symbol-or-number #f in config #:mode "#b")]
[(#\x) (read-symbol-or-number #f in config #:mode "#x")]
[(#\X) (read-symbol-or-number #f in config #:mode "#X")]
[(#\c #\C)
(define c2 (read-char/special in config))
(case c2
[(#\s #\S) (read-one #f in (override-parameter read-case-sensitive config #t))]
[(#\i #\I) (read-one #f in (override-parameter read-case-sensitive config #f))]
[else
(reader-error in config #:due-to c2
"expected `s', `S`, `i', or `I` after `~a~a`"
dispatch-c c)])]
[(#\h #\H) (read-hash read-one dispatch-c c in config)]
[(#\r)
;; Maybe regexp or `#reader`
(define accum-str (accum-string-init! config))
(accum-string-add! accum-str dispatch-c)
(accum-string-add! accum-str c)
(define c2 (read-char/special in config))
(when (char? c2) (accum-string-add! accum-str c2))
(case c2
[(#\x) (read-regexp c accum-str in config)]
[(#\e) (read-extension-reader read-one read-undotted dispatch-c in config)]
[else
(bad-syntax-error in config
#:due-to c2
(accum-string-get! accum-str config))])]
[(#\p)
Maybe
(define accum-str (accum-string-init! config))
(accum-string-add! accum-str dispatch-c)
(accum-string-add! accum-str c)
(define c2 (read-char/special in config))
(when (char? c2) (accum-string-add! accum-str c2))
(case c2
[(#\x) (read-regexp c accum-str in config)]
[else (bad-syntax-error in config #:due-to c2
(accum-string-get! accum-str config))])]
[(#\l)
;; Maybe `#lang`
(read-extension-lang read-undotted dispatch-c in config)]
[(#\!)
;; Maybe `#lang`
(read-extension-#! read-undotted dispatch-c in config)]
[(#\~)
;; Compiled code
(cond
[(check-parameter read-accept-compiled config)
(wrap ((read-config-read-compiled config) in) in config c)]
[else
(reader-error in config
"`~a~~` compiled expressions not enabled"
dispatch-c)])]
[else
(reader-error in config "bad syntax `~a~a`" dispatch-c c)])]))
(define (retry-special-comment v in config)
(cond
[(special-comment? v)
(if (read-config-keep-comment? config)
v
(read-undotted #f in config))]
[else v]))
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/racket/src/expander/read/main.rkt | racket | This is not the `read` to be exposed from `racket/base`, but a
general entry to point implement `read` and variants like
`read-syntax` and `read/recursive`. To support syntax objects, the
caller should provide the `dynamic-require`, `read-compiled`,
a plain `read`, since those might be needed by a
`read-syntax/recursive`.
ignored unless `recursive?`
to trigger placeholder resolution
----------------------------------------
The top-level reading layer that takes care of parsing into
`#%cdot`.
No parsing of `.` as `#%dot`
Look for `<something> . <something>`
----------------------------------------
the reader's main dispatch layer.
Map character via readtable:
Track indentation, unless it's a spurious closer:
Dispatch on character:
Don't read as a number if the effective char
is non-numeric:
Vector, graph definition, or graph reference
Maybe regexp or `#reader`
Maybe `#lang`
Maybe `#lang`
Compiled code | #lang racket/base
(require "config.rkt"
"special.rkt"
"wrap.rkt"
"coerce.rkt"
"readtable.rkt"
"whitespace.rkt"
"delimiter.rkt"
"closer.rkt"
"consume.rkt"
"location.rkt"
"accum-string.rkt"
"error.rkt"
"indentation.rkt"
"parameter.rkt"
"primitive-parameter.rkt"
"special-comment.rkt"
"sequence.rkt"
"vector.rkt"
"struct.rkt"
"graph.rkt"
"hash.rkt"
"symbol-or-number.rkt"
"string.rkt"
"char.rkt"
"quote.rkt"
"constant.rkt"
"box.rkt"
"regexp.rkt"
"extension.rkt"
"language.rkt"
"number.rkt")
(provide read
read-language
current-readtable
make-readtable
readtable?
readtable-mapping
string->number
(all-from-out "primitive-parameter.rkt")
(all-from-out "special-comment.rkt"))
` module - declared ? ` , and ` ` functions , even when implementing
(define (read in
#:wrap [wrap #f]
#:init-c [init-c #f]
#:next-readtable [next-readtable (current-readtable)]
#:readtable [readtable next-readtable]
#:recursive? [recursive? #f]
#:source [source #f]
#:for-syntax? [for-syntax? #f]
see " config.rkt "
see " config.rkt "
see " config.rkt "
see " config.rkt "
see " config.rkt "
#:keep-comment? [keep-comment? recursive?])
(define config
(cond
[(and recursive?
(current-read-config))
=> (lambda (config)
(read-config-update config
#:for-syntax? for-syntax?
#:wrap wrap
#:readtable readtable
#:next-readtable next-readtable
#:reset-graph? local-graph?
#:keep-comment? keep-comment?))]
[else
(make-read-config #:readtable readtable
#:next-readtable next-readtable
#:source source
#:for-syntax? for-syntax?
#:wrap wrap
#:read-compiled read-compiled
#:dynamic-require dynamic-require
#:module-declared? module-declared?
#:coerce coerce
#:coerce-key coerce-key
#:keep-comment? keep-comment?)]))
(define v (read-one init-c in config))
(cond
[(and (or (not recursive?) local-graph?)
(read-config-state-graph (read-config-st config)))
(catch-and-reraise-as-reader
#f config
(make-reader-graph v))]
[(and recursive?
(not local-graph?)
(not for-syntax?)
(not (eof-object? v))
(not (special-comment? v)))
(make-placeholder v)]
[else v]))
(define (read-language in fail-k
#:for-syntax? [for-syntax? #f]
#:wrap [wrap #f]
#:read-compiled [read-compiled #f]
#:dynamic-require [dynamic-require #f]
#:module-declared? [module-declared? #f]
#:coerce [coerce #f]
#:coerce-key [coerce-key #f])
(define config (make-read-config #:readtable #f
#:next-readtable #f
#:for-syntax? for-syntax?
#:wrap wrap
#:read-compiled read-compiled
#:dynamic-require dynamic-require
#:module-declared? module-declared?
#:coerce coerce
#:coerce-key coerce-key))
(define l-config (override-parameter read-accept-reader config #f))
(read-language/get-info read-undotted in config fail-k))
(define (read-one init-c in config)
(cond
[(not (check-parameter read-cdot config))
(read-undotted init-c in config)]
[(check-parameter read-cdot config)
(define-values (line col pos) (port-next-location in))
(define v (read-undotted init-c in config))
(cond
[(special-comment? v) v]
[else
(let loop ([v v])
(define c (peek-char/special in config))
(define ec (effective-char c config))
(cond
[(not (char? ec)) v]
[(char-whitespace? ec)
(consume-char in c)
(loop v)]
[(char=? ec #\.)
(define-values (dot-line dot-col dot-pos) (port-next-location in))
(consume-char in c)
(define cdot (wrap '#%dot in (reading-at config dot-line dot-col dot-pos) #\.))
(define post-v (read-undotted #f in config))
(loop (wrap (list cdot v post-v) in (reading-at config line col pos) #\.))]
[else v]))])]))
The top - level reading layer within ` # % dot ` handling --- which is
(define (read-undotted init-c in config)
(define c (read-char/skip-whitespace-and-comments init-c read-one in config))
(define-values (line col pos) (port-next-location* in c))
(cond
[(eof-object? c) eof]
[(not (char? c))
(define v (special-value c))
(cond
[(special-comment? v)
(if (read-config-keep-comment? config)
v
(read-undotted #f in config))]
[else (coerce v in (reading-at config line col pos))])]
[(readtable-handler config c)
=> (lambda (handler)
(define v (readtable-apply handler c in config line col pos))
(retry-special-comment v in config))]
[else
(define ec (effective-char c config))
(when (not (char-closer? ec config))
(track-indentation! config line col))
(define r-config (reading-at (discard-comment config) line col pos))
(define-syntax-rule (guard-legal e body ...)
(cond
[e body ...]
[else (reader-error in r-config "illegal use of `~a`" c)]))
(case ec
[(#\#)
(read-dispatch c in r-config config)]
[(#\')
(read-quote read-one 'quote "quoting '" c in r-config)]
[(#\`)
(guard-legal
(check-parameter read-accept-quasiquote config)
(read-quote read-one 'quasiquote "quasiquoting `" c in r-config))]
[(#\,)
(guard-legal
(check-parameter read-accept-quasiquote config)
(define c2 (peek-char/special in config))
(if (eqv? c2 #\@)
(begin
(consume-char in c2)
(read-quote read-one 'unquote-splicing "unquoting ,@" c in r-config))
(read-quote read-one 'unquote "unquoting ," c in r-config)))]
[(#\()
(wrap (read-unwrapped-sequence read-one ec #\( #\) in r-config #:shape-tag? #t) in r-config ec)]
[(#\))
(reader-error in r-config "~a" (indentation-unexpected-closer-message ec c r-config))]
[(#\[)
(guard-legal
(or (check-parameter read-square-bracket-as-paren config)
(check-parameter read-square-bracket-with-tag config))
(wrap (read-unwrapped-sequence read-one ec #\[ #\] in r-config #:shape-tag? #t) in r-config ec))]
[(#\])
(guard-legal
(or (check-parameter read-square-bracket-as-paren config)
(check-parameter read-square-bracket-with-tag config))
(reader-error in r-config "~a" (indentation-unexpected-closer-message ec c r-config)))]
[(#\{)
(guard-legal
(or (check-parameter read-curly-brace-as-paren config)
(check-parameter read-curly-brace-with-tag config))
(wrap (read-unwrapped-sequence read-one ec #\{ #\} in r-config #:shape-tag? #t) in r-config ec))]
[(#\})
(guard-legal
(or (check-parameter read-curly-brace-as-paren config)
(check-parameter read-curly-brace-with-tag config))
(reader-error in r-config "~a" (indentation-unexpected-closer-message ec c r-config)))]
[(#\")
(read-string in r-config)]
[(#\|)
(read-symbol-or-number c in r-config #:mode 'symbol)]
[else
(define v
(read-symbol-or-number c in r-config
#:mode (if (or (eq? c ec)
(and ((char->integer ec) . < . 128)
(char-numeric? ec)))
'symbol-or-number
'symbol/indirect)))
(retry-special-comment v in config)])]))
Dispatch on ` # ` character
(define (read-dispatch dispatch-c in config orig-config)
(define c (read-char/special in config))
(cond
[(eof-object? c)
(reader-error in config #:due-to c "bad syntax `~a`" dispatch-c)]
[(not (char? c))
(reader-error in config #:due-to c "bad syntax `~a`" dispatch-c)]
[(readtable-dispatch-handler orig-config c)
=> (lambda (handler)
(define line (read-config-line config))
(define col (read-config-col config))
(define pos (read-config-pos config))
(define v (readtable-apply handler c in config line col pos))
(retry-special-comment v in orig-config))]
[else
(define-syntax-rule (guard-legal e c body ...)
(cond
[e body ...]
[else (bad-syntax-error in config (format "~a~a" dispatch-c c))]))
(case c
[(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9)
(read-vector-or-graph read-one dispatch-c c in config)]
[(#\()
(read-vector read-one #\( #\( #\) in config)]
[(#\[)
(guard-legal
(check-parameter read-square-bracket-as-paren config)
c
(read-vector read-one #\[ #\[ #\] in config))]
[(#\{)
(guard-legal
(check-parameter read-curly-brace-as-paren config)
c
(read-vector read-one #\{ #\{ #\} in config))]
[(#\s)
(read-struct read-one dispatch-c in config)]
[(#\&)
(read-box read-one dispatch-c in config)]
[(#\')
(read-quote read-one 'syntax "quoting #'" c in config)]
[(#\`)
(read-quote read-one 'quasisyntax "quasiquoting #`" c in config)]
[(#\,)
(define c2 (peek-char/special in config))
(if (eqv? c2 #\@)
(begin
(consume-char in c2)
(read-quote read-one 'unsyntax-splicing "unquoting #,@" c in config))
(read-quote read-one 'unsyntax "unquoting #," c in config))]
[(#\\)
(read-character in config)]
[(#\")
(read-string in config #:mode '|byte string|)]
[(#\<)
(define c2 (peek-char/special in config))
(cond
[(eqv? #\< c2)
(consume-char in #\<)
(read-here-string in config)]
[else
(reader-error in config #:due-to c2 "bad syntax `~a<`" dispatch-c)])]
[(#\%)
(read-symbol-or-number c in config #:extra-prefix dispatch-c #:mode 'symbol)]
[(#\:)
(read-symbol-or-number #f in config #:mode 'keyword)]
[(#\t #\T)
(define c2 (peek-char/special in config))
(cond
[(char-delimiter? c2 config) (wrap #t in config c)]
[else (read-delimited-constant c (char=? c #\t) '(#\r #\u #\e) #t in config)])]
[(#\f #\F)
(define c2 (peek-char/special in config))
(cond
[(char-delimiter? c2 config) (wrap #f in config c)]
[(or (char=? c2 #\x) (char=? c2 #\l))
(read-fixnum-or-flonum-vector read-one dispatch-c c c2 in config)]
[else (read-delimited-constant c (char=? c #\f) '(#\a #\l #\s #\e) #f in config)])]
[(#\e) (read-symbol-or-number #f in config #:mode "#e")]
[(#\E) (read-symbol-or-number #f in config #:mode "#E")]
[(#\i) (read-symbol-or-number #f in config #:mode "#i")]
[(#\I) (read-symbol-or-number #f in config #:mode "#I")]
[(#\d) (read-symbol-or-number #f in config #:mode "#d")]
[(#\B) (read-symbol-or-number #f in config #:mode "#B")]
[(#\o) (read-symbol-or-number #f in config #:mode "#o")]
[(#\O) (read-symbol-or-number #f in config #:mode "#O")]
[(#\D) (read-symbol-or-number #f in config #:mode "#D")]
[(#\b) (read-symbol-or-number #f in config #:mode "#b")]
[(#\x) (read-symbol-or-number #f in config #:mode "#x")]
[(#\X) (read-symbol-or-number #f in config #:mode "#X")]
[(#\c #\C)
(define c2 (read-char/special in config))
(case c2
[(#\s #\S) (read-one #f in (override-parameter read-case-sensitive config #t))]
[(#\i #\I) (read-one #f in (override-parameter read-case-sensitive config #f))]
[else
(reader-error in config #:due-to c2
"expected `s', `S`, `i', or `I` after `~a~a`"
dispatch-c c)])]
[(#\h #\H) (read-hash read-one dispatch-c c in config)]
[(#\r)
(define accum-str (accum-string-init! config))
(accum-string-add! accum-str dispatch-c)
(accum-string-add! accum-str c)
(define c2 (read-char/special in config))
(when (char? c2) (accum-string-add! accum-str c2))
(case c2
[(#\x) (read-regexp c accum-str in config)]
[(#\e) (read-extension-reader read-one read-undotted dispatch-c in config)]
[else
(bad-syntax-error in config
#:due-to c2
(accum-string-get! accum-str config))])]
[(#\p)
Maybe
(define accum-str (accum-string-init! config))
(accum-string-add! accum-str dispatch-c)
(accum-string-add! accum-str c)
(define c2 (read-char/special in config))
(when (char? c2) (accum-string-add! accum-str c2))
(case c2
[(#\x) (read-regexp c accum-str in config)]
[else (bad-syntax-error in config #:due-to c2
(accum-string-get! accum-str config))])]
[(#\l)
(read-extension-lang read-undotted dispatch-c in config)]
[(#\!)
(read-extension-#! read-undotted dispatch-c in config)]
[(#\~)
(cond
[(check-parameter read-accept-compiled config)
(wrap ((read-config-read-compiled config) in) in config c)]
[else
(reader-error in config
"`~a~~` compiled expressions not enabled"
dispatch-c)])]
[else
(reader-error in config "bad syntax `~a~a`" dispatch-c c)])]))
(define (retry-special-comment v in config)
(cond
[(special-comment? v)
(if (read-config-keep-comment? config)
v
(read-undotted #f in config))]
[else v]))
|
32cc42d3fe7efd10bf963c07b45067640a47e755fe5fdc4dbe798902a62b7c0c | heidegger/JSConTest | TCJS.ml | open AST
open Annotation
open ProglangUtils
open ExtList
open Contract
open BaseContract
open Analyse
open ASTUtil
open String_of
type bc = BaseContract.t
type a = Analyse.t
type d = Depend.t
type tc = (bc,a,d,DependDown.t) Contract.t
type c = (bc,a,d,DependDown.t) Contract.contract
type exp = tc AST.expression
type 'a env = {
tests: bool;
asserts: bool;
js_namespace: string;
js_test_namespace: string;
js_contract_namespace: string;
variable_prefix: string;
effects_env: 'a;
}
module type TRANS = sig
type t
val transform : t
-> bool option
-> 'c source_element list
-> 'c source_element list
val before_wrapper : t -> 'c identifier list -> 'c expression -> 'c expression
val after_wrapper : t -> 'c identifier list -> 'c expression -> 'c expression
end
module type S = sig
type t
val transform : t env
-> (BaseContract.t, Analyse.t,Depend.t,unit) Contract.t program
-> unit program
end
let so_i = so_identifier
(Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of)
let so_e = so_expression
(Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of)
let fopt f = function
| None -> None
| Some a -> Some (f a)
module Make(T: TRANS) : S with type t = T.t = struct
type t = T.t
type 'c prefix_infos = {
ptest: 'c AST.expression;
pcontract: 'c AST.expression;
ptmp: string;
}
type infos = {
labels: exp;
strings: exp;
numbers: exp;
}
(* generate code for contracts *)
let generate_contract : tc prefix_infos -> infos -> Testlib.varname -> c -> exp =
fun prefix info fname c ->
let rec generate_contractl : c list -> exp list =
fun cl ->
List.map generate_contract cl
and generate_contract : c -> exp =
function
| CUnion cl ->
let el = generate_contractl cl in
(do_mcalle_el
prefix.pcontract "Union"
el)
| BObjectPL (pl,r,al,_) ->
let el = generate_contractl (List.map snd pl) in
let ple =
new_array
((List.map2
(fun (name,c) e ->
new_object
["name",c_to_e (s_to_c name);
"contract",e
]
)
pl
el
) @
(if r
then [(new_object ["random",c_to_e (b_to_c true)])]
else []))
in
do_mcalle_el prefix.pcontract "EObject" [ple]
| BArray c ->
let e = generate_contract c in
do_mcalle_el prefix.pcontract "Array" [e]
| CBase (bc,al,depl) -> generate_basecontract al bc
| CFunction (th,cl,c,dd,effects) ->
let effects_compl = TCssEffJS.js_of_t fname effects in
let el = generate_contractl cl in
let e = generate_contract c in
let theo =
match th with
| None -> None
| Some o -> let the = generate_contract o in
Some the
in
if (DependDown.is_depend dd) then begin
let int_of_exp i = float_to_exp (float_of_int i) in
let oe = List.map int_of_exp (DependDown.get_order dd) in
let iill = DependDown.get_depend dd in
let de =
List.map
(fun iil ->
new_array
(List.map
(fun d ->
let s = Depend.get_scope d in
let p = Depend.get_param d in
new_array [int_of_exp s;int_of_exp p])
iil))
iill
in
(do_mcalle_el prefix.pcontract "DFunction"
[new_array el;e;
new_array oe;
new_array de
])
end else begin
match theo with
| None ->
(do_mcalle_el
prefix.pcontract "Function"
[new_array el; (* Parameter *)
e; (* return *)
effects_compl; (* effekte *)
Testlib.var_name_to_string_exp fname (* fname *)
]
)
| Some the ->
(do_mcalle_el
prefix.pcontract "Method"
[the; (* this object*)
(new_array el); (* Parameter *)
e; (* return *)
effects_compl; (* effekte *)
Testlib.var_name_to_string_exp fname (* fname *)
]
)
end
and generate_basecontract : a list -> bc -> exp
= fun al bc -> match bc with
| BLength ->
read_prop_e prefix.pcontract "Length"
| BNatural ->
read_prop_e prefix.pcontract "Natural"
| BId ->
read_prop_e prefix.pcontract "Id"
| BTop ->
read_prop_e prefix.pcontract "Top"
| BVoid | BUndf ->
read_prop_e prefix.pcontract "Undefined"
| BNull ->
read_prop_e prefix.pcontract "Null"
| BJavaScriptVar jsv ->
if (List.length al > 0) then begin
let params =
if (List.mem Numbers al)
then [info.numbers]
else []
in
let params =
if (List.mem Strings al)
then info.strings :: params
else params
in
let params =
if (List.mem Labels al)
then info.labels :: params
else params
in
do_fcall (s_to_i jsv) params
end else begin
i_to_e (s_to_i jsv)
end
| BJSCContract cc ->
do_mcalle_el prefix.pcontract "load" [s_to_e cc]
| BSBool b ->
read_prop_e prefix.pcontract (if b then "True" else "False")
| BBool -> read_prop_e prefix.pcontract "Boolean"
| BInteger ->
if (List.mem Numbers al) then begin
do_mcalle_el prefix.pcontract "AInteger" [info.numbers]
end else begin
read_prop_e prefix.pcontract "Integer"
end
| BSInteger i ->
do_mcalle_el
prefix.pcontract
"SingletonContract"
[int_to_exp i; s_to_e (string_of_int i)]
| BIInterval (left,right) ->
do_mcalle_el prefix.pcontract "IIntervall"
[int_to_exp left;int_to_exp right]
| BFInterval (f1,f2) ->
do_mcalle_el prefix.pcontract "NIntervall"
[float_to_exp f1; float_to_exp f2]
| BSFloat f ->
do_mcalle_el
prefix.pcontract
"SingletonContract"
[float_to_exp f; s_to_e (string_of_float f)]
| BFloat ->
if (List.mem Numbers al) then begin
do_mcalle_el prefix.pcontract "ANumber" [info.numbers]
end else begin
read_prop_e prefix.pcontract "Number"
end
| BString ->
if (List.mem Strings al) then begin
do_mcalle_el prefix.pcontract "AString" [info.strings]
end else begin
read_prop_e prefix.pcontract "String"
end
| BSString s ->
do_mcalle_el prefix.pcontract "Singleton" [c_to_e (s_to_c s)]
| BObject ->
if (List.mem Labels al) then begin
do_mcalle_el prefix.pcontract "PObject" [info.labels]
end else begin
read_prop_e prefix.pcontract "Object"
end
in
generate_contract c
let generate_tc : tc prefix_infos -> infos -> Testlib.varname -> tc -> (exp * c * GenInfo.t) list =
fun prefix infos fname tc ->
List.map
(fun (c,gI) ->
(generate_contract prefix infos fname c, c, gI))
(Contract.get_clgI tc)
let generate_named_tc : tc prefix_infos -> infos -> Testlib.varname
-> tc -> (exp * c * GenInfo.t * Testlib.varname) list =
fun prefix infos fname tc ->
let ecgIl = generate_tc prefix infos fname tc in
List.map
(fun (e,c,gI) ->
let cn = Testlib.gen_lib_var_name () in
let enamed = Testlib.set_var prefix.ptest cn e in
enamed,c,gI,cn)
ecgIl
let generate_tests : ( bool - > tc prefix_infos - > infos - > exp - > tc - > exp ) =
fun genTests prefix
let add_test fname function_exp contract_exp count_exp =
" add "
[ c_to_e ( s_to_c module_name ) ;
function_exp ;
contract_exp ;
c_to_e ( n_to_c ( float_of_int count_exp ) ) ]
in
let cl = Contract.get_cl tc in
let ecl = List.map
( fun c - > generate_contract prefix infos - > string - > c , c )
cl
in
fun genTests prefix infos v tc ->
let add_test fname function_exp contract_exp count_exp =
do_mcalle_el prefix.ptest "add"
[c_to_e (s_to_c module_name);
function_exp;
contract_exp;
c_to_e (n_to_c (float_of_int count_exp))]
in
let cl = Contract.get_cl tc in
let ecl = List.map
(fun c -> generate_contract prefix infos -> string -> c, c)
cl
in *)
let gen_for_on_c res ( e , gen ) gI =
let at =
if ( genTests & & ( GenInfo.getTests gI ) )
then ( * [ add_test e ( GenInfo.getTestNumber gI ) ]
let at =
if (genTests && (GenInfo.getTests gI))
then (* [add_test e (GenInfo.getTestNumber gI)] *) []
else []
in
let ngn,setV = register e in
((ngn,gI),(gen_run_anonym_fun (gen @ at @ [setV]))) :: res
in
List.fold_left2 gen_for_on_c [] esel (List.map snd cl) *)
let enableAsserts env fnametest = function
| [] -> (fun e -> e, false)
| cis -> (fun e ->
let cl =
new_array
(List.map
Testlib.var_name_to_string_exp
cis)
in
do_mcalle_el
(read_prop (s_to_i env.js_namespace) env.js_test_namespace)
"enableAsserts"
[e; cl; Testlib.var_name_to_string_exp fnametest ], true)
type fun_info = {
contract: tc;
params: tc AST.identifier list;
recursive_name: tc AST.identifier option;
body: tc AST.source_element list;
}
(* transforms the body of a function expression or function statement, returns an
* expression representing the function as a function expression. *)
let transform_body : T.t env -> fun_info -> (exp -> exp * bool) -> exp =
fun env finfo gen_asserts ->
- make function expression for toString override with original function body
-- > org_fcode
* - transform body -- > T.transform
* - add wrapper -- > T.before_wrapper
* - add asserts -- > enableAsserts
* - add wrapper -- > T.afert_wrapper
* - overrideToString -- > use and org_fcode , call overrideToStringOfFunction
--> org_fcode
* - transform body --> T.transform
* - add wrapper --> T.before_wrapper
* - add asserts --> enableAsserts
* - add wrapper --> T.afert_wrapper
* - overrideToString --> use fcode and org_fcode, call overrideToStringOfFunction
*)
let org_fcode : exp =
Function_expression (null_annotation,
Some finfo.contract,
finfo.recursive_name,
finfo.params,
None,
finfo.body)
in
let fcode =
let fbody =
T.transform
env.effects_env
(Contract.get_trans finfo.contract)
finfo.body
in
Function_expression (null_annotation,
Some finfo.contract,
None,
finfo.params,
None,
fbody)
in
let fcode = T.before_wrapper env.effects_env finfo.params fcode in
let fcode, asserts = gen_asserts fcode in
let fcode = T.after_wrapper env.effects_env finfo.params fcode in
do_mcalle_el
(read_prop (s_to_i env.js_namespace) env.js_test_namespace)
"overrideToStringOfFunction"
[fcode;
org_fcode;
c_to_e (b_to_c asserts)
]
let create_code : (T.t env -> infos -> Testlib.varname -> fun_info -> exp) =
TODO :
* - transform function body -- > transform_body
* - generate contract code -- >
* - generate test code -- > add_to_test_suite
* - transform function body --> transform_body
* - generate contract code --> generate_tc
* - generate test code --> add_to_test_suite
*)
fun env info fnametest finfo ->
the prefixes needed by
let prefix = { ptest= read_prop (s_to_i env.js_namespace) env.js_test_namespace;
pcontract = read_prop (s_to_i env.js_namespace) env.js_contract_namespace;
ptmp = env.variable_prefix;
}
in
the expressions for the contracts together with generate Infos and the contracts itself .
(* It also creates the code, that registers the contracts in the test suite namespace *)
let e_c_gI_name_list = generate_named_tc prefix info fnametest finfo.contract in
let assert_contract_names =
if (env.asserts) then
List.map
(fun (_,_,_,name) -> name)
(List.filter
(fun (_,_,gI,_) -> GenInfo.getAsserts gI)
e_c_gI_name_list)
else
[]
in
the new expression that represents the transformed function . To decide if the enableAssert
* part is needed , we need the list of contract names , for which assersts should be generatd .
* If this is empty , no asserts are generated . If it contains at least one element , the contract
* names are passert to enableAssert , and the overrideToStringOfFunction method gets a true as
* last parameter .
* part is needed, we need the list of contract names, for which assersts should be generatd.
* If this is empty, no asserts are generated. If it contains at least one element, the contract
* names are passert to enableAssert, and the overrideToStringOfFunction method gets a true as
* last parameter.
*)
let fcode = transform_body env finfo
(enableAsserts env fnametest assert_contract_names)
in
(* register function under its name in test suite *)
let fcode =
Testlib.set_var
prefix.ptest
fnametest
fcode
in
to add the contracts , for which test cases should be generated , to the library , we
* split the list of contracts into two parts . The first contains all contracts , for which
* test cases should be generated . The second part just creates the contracts and registers them
* under their name in the library . This is done by passing for parameters to
* addContracts(module , value , ccdlist ) , even if the method just ignores the 4th parameter .
* split the list of contracts into two parts. The first contains all contracts, for which
* test cases should be generated. The second part just creates the contracts and registers them
* under their name in the library. This is done by passing for parameters to
* addContracts(module, value, ccdlist), even if the method just ignores the 4th parameter. *)
let e_to_test_list, e_no_test_list =
let tmp1, tmp2 =
(List.partition
(fun (_,_,gI,_) -> GenInfo.getTests gI)
e_c_gI_name_list)
in
List.map
(fun (e,_,gI,_) -> new_object
["contract",e;
"count", int_to_exp (GenInfo.getTestNumber gI)])
tmp1,
List.map (fun (e,_,_,_) -> e) tmp2
in
(* Adds the contract,value pairs to the library. *)
do_mcalle_el
(read_prop (s_to_i env.js_namespace) env.js_test_namespace)
"addContracts"
[Testlib.var_name_to_string_exp fnametest;
fcode;
new_array e_to_test_list;
new_array e_no_test_list]
let create_infos labels strings numbers =
let to_array f l = new_array (List.map (fun x -> c_to_e (f x)) l) in
let numbers_exp = to_array n_to_c numbers in
let strings_exp = to_array s_to_c strings in
let labels_exp = to_array s_to_c labels in
{ numbers= numbers_exp; strings= strings_exp; labels= labels_exp }
(* creates a name from an expression. Used to build the default
* function of get_test_name. *)
let rec pathname = function
| Variable (an,i) -> Some (so_i i)
| Object_access (an,e,i) -> fopt (fun s -> s ^ "_" ^ so_i i) (pathname e)
| Array_access (an,e1,e2) ->
begin
match pathname e1,pathname e2 with
| None, _ | _, None -> None
| Some s1, Some s2 -> Some (s1 ^ "_" ^ s2)
end
| e -> None
creates the name , under which the function is stored inside the test library scope
* First the function name itself is considerd . If this does not exists ,
* the contract is consulted for a name . If both does not have a name , the
* function default is called . Usually it first tries to generate a new name from
* the left hand side of an assignment , if the function is a right hand side of an
* assignment , or it completely generates a new unique name .
* First the function name itself is considerd. If this does not exists,
* the contract is consulted for a name. If both does not have a name, the
* function default is called. Usually it first tries to generate a new name from
* the left hand side of an assignment, if the function is a right hand side of an
* assignment, or it completely generates a new unique name. *)
let get_test_name fn tc default =
let s =
match fn with
| None -> begin
match Contract.get_name tc with
| None -> ""
| Some s -> s
end
| Some i -> i
in
if String.length s < 1 then
default ()
else
s
let generate_tests env program =
gt ga ge gcsseff trans_prefix =
let add_number,add_string,add_label,
new_scope,close_scope,
get_numbers,get_strings,get_labels
=
let labels = ref [] in
let strings = ref [] in
let numbers = ref [] in
let rec add_f e = function
| [] -> []
| h :: t ->
(List.add e h) :: add_f e t
in
let add e ll = ll := add_f e !ll in
let add_number e = add e numbers in
let add_string s = add s strings in
let add_label l = add l labels in
let print_numbers _ = () in
let new_scope () =
labels := [] :: !labels;
strings := [] :: !strings;
numbers := [] :: !numbers;
print_numbers "new_scope: ";
in
let close_scope () =
labels := List.tl !labels;
strings := List.tl !strings;
numbers := List.tl !numbers;
print_numbers "close_scope: ";
in
let get ll =
List.fold_right
List.union
ll
[]
in
let get_numbers () =
print_numbers "get_numbers: ";
get !numbers
in
let get_strings () = get !strings in
let get_labels () = get !labels in
add_number,add_string,add_label,
new_scope,close_scope,
get_numbers,get_strings,get_labels
in
let ba_constant = function
| Number (an,n) ->
add_number n;
Number (an,n)
| String (an,s) ->
add_string s;
String (an,s)
| c -> c
in
let transform_se = function
| Function_declaration (a,c,n,pl,_,sel) as forg ->
let fname = ASTUtil.i_to_s n in
let info = create_infos (get_labels ()) (get_strings ()) (get_numbers ()) in
let ftestname = (Testlib.gen_fun_var_name fname) in
let finfo = {
contract= c;
params= pl;
recursive_name=Some n;
body=sel; }
in
let mod_fd = create_code env info ftestname finfo in
let _ = close_scope () in
[AST.VarDecl
(s_to_i fname,
mod_fd)]
| se -> [se]
in
let lhs_ref = ref [] in
let transform_e = function
| Object_access (_,_,i) as e ->
begin
match i with
| Identifier (_,s) -> add_label s
| _ -> ()
end;
e
| Function_expression (a,Some c,no,pl,lvo,sel) as e ->
let nos = match no with
| None -> None
| Some n -> Some (ASTUtil.i_to_s n)
in
let fname = get_test_name
nos
c
(fun () ->
match List.find
(function | None -> false | Some s -> true)
!lhs_ref
with
| None -> ""
| Some s -> s)
in
let _ = print_endline fname in
let info = create_infos (get_labels ()) (get_strings ()) (get_numbers ()) in
let ftestname = (Testlib.gen_fun_var_name fname) in
let finfo = {
contract= c;
params= pl;
recursive_name=no;
body=sel; }
in
create_code env info ftestname finfo
| Assign (_,lhs,Regular_assign _,rhs) as e ->
lhs_ref := List.tl !lhs_ref;
(* print_endline "remove from stack"; *)
e
| e -> e
in
AST.visit
~ba_c:(fun x -> x)
~ba_constant:ba_constant
~a_source_element:transform_se
~a_expression:transform_e
~b_source_element:
(function
| Function_declaration _ as se -> new_scope ();
[se]
| se -> [se])
~b_expression:
(function
| Function_expression _ as e -> new_scope ();
e
| Assign (_,lhs,Regular_assign _,rhs) as e ->
let so = pathname lhs in
( match so with
| Some s - > ( * print_endline ( " lhs set " ^ s )
| Some s -> (* print_endline ("lhs set " ^ s) *)
| _ -> ()); *)
lhs_ref := so :: !lhs_ref;
e
| e -> e)
program
(* transforms a contract with no informaiton about the dependentcies
into contract that stores the dd information. *)
let ucToddC tc =
let id = fun x -> x in
let gl = ContractCycle.check tc in
match gl with
| None ->
Contract.transform
~ba_bcontract:id
~ba_analyse:id
~ba_depend_up:id
~ba_depend_down: DependDown.create
tc
| Some gl ->
let get_order,next =
let orders : c list list ref =
ref (List.rev
(List.map
(List.map
(fun c ->
Contract.transform_c
~ba_bcontract:id
~ba_analyse:id
~ba_depend_up:id
~ba_depend_down:(DependDown.create)
c)
)
(ContractCycle.get_order gl)))
in
let ao = ref None in
let next : unit -> unit = fun () ->
match !orders with
| o :: ol -> orders := ol; ao := Some o
| _ -> failwith "This should never happen"
in
let get_order : unit -> c list = fun () ->
match !ao with
| None -> failwith "This should never happen"
| Some o -> o
in
get_order,next
in
let compute_order = function
| CFunction (th,cl,r,dd,eff) ->
(* let c_to_s =
Contract.so_contract
BaseContract.string_of
Analyse.string_of
Depend.string_of
in *)
let o = get_order () in
let _ = print_endline
( String_of.string_of_list c_to_s o )
in
(String_of.string_of_list c_to_s o)
in *)
let fo = Utils.first o in
let cli = List.make_index_assoc cl in
let ord (pos1,c1) (pos2,c2) =
(* print_endline (c_to_s c1);
print_endline (c_to_s c2); *)
let g =
match fo c1 c2 with
| None -> pos2 - pos1
| Some b -> if b then -1 else 1
in
(* print_int g; *)
g
in
let clis = List.sort ord cli in
let is = List.map fst clis in
(* print_endline
(String_of.string_of_list
string_of_int
is); *)
DependDown.set_order dd is;
CFunction (th,cl,r,dd,eff)
| c -> c
in
let new_scope,get_scope,add_dul =
let dull = ref [[]] in
let new_scope () =
dull := [] :: !dull
in
let get_scope () =
let r = List.hd !dull in
dull := List.tl !dull;
r
in
let add_dul a =
let s = List.hd !dull in
let t = List.tl !dull in
dull := (a :: s) :: t
in
new_scope,get_scope,add_dul
in
let dependInfo = function
| CBase (_,_,dul) as c ->
print_endline ( string_of_list ) ;
add_dul dul; c
| CFunction (None,pl,r,dd,_) as c ->
DependDown.set_paramnr dd ((List.length pl) + 1);
DependDown.register_dinfo dd (get_scope ());
let dul = DependDown.get_dul dd in
let dul' = List.map Depend.raise_up dul in
add_dul dul';
c
| c -> c
in
let b_c = function
| CFunction _ as c -> new_scope (); c
| c -> c
in
let a_c c =
let c = dependInfo c in
compute_order c
in
(* (print_endline
((Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of)
tc)); *)
Contract.transform
~b_tcontract:(fun c -> next (); c)
~b_contract:(b_c)
~a_contract:(a_c)
~ba_bcontract:id
~ba_analyse:id
~ba_depend_up:id
~ba_depend_down:DependDown.create
tc
We make two visits here , one in transform_priv to
generate the JavaScript Code for the contracts ,
and an other one to remove the contracts .
TODO : This should be done with one visit
generate the JavaScript Code for the contracts,
and an other one to remove the contracts.
TODO: This should be done with one visit
*)
let transform env program =
(* add depend down informaion *)
print_endline
( AST.string_of_ast
( Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of )
program ) ;
(AST.string_of_ast
(Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of)
program); *)
let p =
AST.visit
~ba_c:ucToddC
program
in
(* compile contracts *)
AST.visit
~ba_c:(fun _ -> ())
(generate_tests env p)
end
| null | https://raw.githubusercontent.com/heidegger/JSConTest/7c807a76af998da25775fba1f5cbe1cf8031d121/ocaml/transform/TCJS.ml | ocaml | generate code for contracts
Parameter
return
effekte
fname
this object
Parameter
return
effekte
fname
[add_test e (GenInfo.getTestNumber gI)]
transforms the body of a function expression or function statement, returns an
* expression representing the function as a function expression.
It also creates the code, that registers the contracts in the test suite namespace
register function under its name in test suite
Adds the contract,value pairs to the library.
creates a name from an expression. Used to build the default
* function of get_test_name.
print_endline "remove from stack";
print_endline ("lhs set " ^ s)
transforms a contract with no informaiton about the dependentcies
into contract that stores the dd information.
let c_to_s =
Contract.so_contract
BaseContract.string_of
Analyse.string_of
Depend.string_of
in
print_endline (c_to_s c1);
print_endline (c_to_s c2);
print_int g;
print_endline
(String_of.string_of_list
string_of_int
is);
(print_endline
((Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of)
tc));
add depend down informaion
compile contracts | open AST
open Annotation
open ProglangUtils
open ExtList
open Contract
open BaseContract
open Analyse
open ASTUtil
open String_of
type bc = BaseContract.t
type a = Analyse.t
type d = Depend.t
type tc = (bc,a,d,DependDown.t) Contract.t
type c = (bc,a,d,DependDown.t) Contract.contract
type exp = tc AST.expression
type 'a env = {
tests: bool;
asserts: bool;
js_namespace: string;
js_test_namespace: string;
js_contract_namespace: string;
variable_prefix: string;
effects_env: 'a;
}
module type TRANS = sig
type t
val transform : t
-> bool option
-> 'c source_element list
-> 'c source_element list
val before_wrapper : t -> 'c identifier list -> 'c expression -> 'c expression
val after_wrapper : t -> 'c identifier list -> 'c expression -> 'c expression
end
module type S = sig
type t
val transform : t env
-> (BaseContract.t, Analyse.t,Depend.t,unit) Contract.t program
-> unit program
end
let so_i = so_identifier
(Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of)
let so_e = so_expression
(Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of)
let fopt f = function
| None -> None
| Some a -> Some (f a)
module Make(T: TRANS) : S with type t = T.t = struct
type t = T.t
type 'c prefix_infos = {
ptest: 'c AST.expression;
pcontract: 'c AST.expression;
ptmp: string;
}
type infos = {
labels: exp;
strings: exp;
numbers: exp;
}
let generate_contract : tc prefix_infos -> infos -> Testlib.varname -> c -> exp =
fun prefix info fname c ->
let rec generate_contractl : c list -> exp list =
fun cl ->
List.map generate_contract cl
and generate_contract : c -> exp =
function
| CUnion cl ->
let el = generate_contractl cl in
(do_mcalle_el
prefix.pcontract "Union"
el)
| BObjectPL (pl,r,al,_) ->
let el = generate_contractl (List.map snd pl) in
let ple =
new_array
((List.map2
(fun (name,c) e ->
new_object
["name",c_to_e (s_to_c name);
"contract",e
]
)
pl
el
) @
(if r
then [(new_object ["random",c_to_e (b_to_c true)])]
else []))
in
do_mcalle_el prefix.pcontract "EObject" [ple]
| BArray c ->
let e = generate_contract c in
do_mcalle_el prefix.pcontract "Array" [e]
| CBase (bc,al,depl) -> generate_basecontract al bc
| CFunction (th,cl,c,dd,effects) ->
let effects_compl = TCssEffJS.js_of_t fname effects in
let el = generate_contractl cl in
let e = generate_contract c in
let theo =
match th with
| None -> None
| Some o -> let the = generate_contract o in
Some the
in
if (DependDown.is_depend dd) then begin
let int_of_exp i = float_to_exp (float_of_int i) in
let oe = List.map int_of_exp (DependDown.get_order dd) in
let iill = DependDown.get_depend dd in
let de =
List.map
(fun iil ->
new_array
(List.map
(fun d ->
let s = Depend.get_scope d in
let p = Depend.get_param d in
new_array [int_of_exp s;int_of_exp p])
iil))
iill
in
(do_mcalle_el prefix.pcontract "DFunction"
[new_array el;e;
new_array oe;
new_array de
])
end else begin
match theo with
| None ->
(do_mcalle_el
prefix.pcontract "Function"
]
)
| Some the ->
(do_mcalle_el
prefix.pcontract "Method"
]
)
end
and generate_basecontract : a list -> bc -> exp
= fun al bc -> match bc with
| BLength ->
read_prop_e prefix.pcontract "Length"
| BNatural ->
read_prop_e prefix.pcontract "Natural"
| BId ->
read_prop_e prefix.pcontract "Id"
| BTop ->
read_prop_e prefix.pcontract "Top"
| BVoid | BUndf ->
read_prop_e prefix.pcontract "Undefined"
| BNull ->
read_prop_e prefix.pcontract "Null"
| BJavaScriptVar jsv ->
if (List.length al > 0) then begin
let params =
if (List.mem Numbers al)
then [info.numbers]
else []
in
let params =
if (List.mem Strings al)
then info.strings :: params
else params
in
let params =
if (List.mem Labels al)
then info.labels :: params
else params
in
do_fcall (s_to_i jsv) params
end else begin
i_to_e (s_to_i jsv)
end
| BJSCContract cc ->
do_mcalle_el prefix.pcontract "load" [s_to_e cc]
| BSBool b ->
read_prop_e prefix.pcontract (if b then "True" else "False")
| BBool -> read_prop_e prefix.pcontract "Boolean"
| BInteger ->
if (List.mem Numbers al) then begin
do_mcalle_el prefix.pcontract "AInteger" [info.numbers]
end else begin
read_prop_e prefix.pcontract "Integer"
end
| BSInteger i ->
do_mcalle_el
prefix.pcontract
"SingletonContract"
[int_to_exp i; s_to_e (string_of_int i)]
| BIInterval (left,right) ->
do_mcalle_el prefix.pcontract "IIntervall"
[int_to_exp left;int_to_exp right]
| BFInterval (f1,f2) ->
do_mcalle_el prefix.pcontract "NIntervall"
[float_to_exp f1; float_to_exp f2]
| BSFloat f ->
do_mcalle_el
prefix.pcontract
"SingletonContract"
[float_to_exp f; s_to_e (string_of_float f)]
| BFloat ->
if (List.mem Numbers al) then begin
do_mcalle_el prefix.pcontract "ANumber" [info.numbers]
end else begin
read_prop_e prefix.pcontract "Number"
end
| BString ->
if (List.mem Strings al) then begin
do_mcalle_el prefix.pcontract "AString" [info.strings]
end else begin
read_prop_e prefix.pcontract "String"
end
| BSString s ->
do_mcalle_el prefix.pcontract "Singleton" [c_to_e (s_to_c s)]
| BObject ->
if (List.mem Labels al) then begin
do_mcalle_el prefix.pcontract "PObject" [info.labels]
end else begin
read_prop_e prefix.pcontract "Object"
end
in
generate_contract c
let generate_tc : tc prefix_infos -> infos -> Testlib.varname -> tc -> (exp * c * GenInfo.t) list =
fun prefix infos fname tc ->
List.map
(fun (c,gI) ->
(generate_contract prefix infos fname c, c, gI))
(Contract.get_clgI tc)
let generate_named_tc : tc prefix_infos -> infos -> Testlib.varname
-> tc -> (exp * c * GenInfo.t * Testlib.varname) list =
fun prefix infos fname tc ->
let ecgIl = generate_tc prefix infos fname tc in
List.map
(fun (e,c,gI) ->
let cn = Testlib.gen_lib_var_name () in
let enamed = Testlib.set_var prefix.ptest cn e in
enamed,c,gI,cn)
ecgIl
let generate_tests : ( bool - > tc prefix_infos - > infos - > exp - > tc - > exp ) =
fun genTests prefix
let add_test fname function_exp contract_exp count_exp =
" add "
[ c_to_e ( s_to_c module_name ) ;
function_exp ;
contract_exp ;
c_to_e ( n_to_c ( float_of_int count_exp ) ) ]
in
let cl = Contract.get_cl tc in
let ecl = List.map
( fun c - > generate_contract prefix infos - > string - > c , c )
cl
in
fun genTests prefix infos v tc ->
let add_test fname function_exp contract_exp count_exp =
do_mcalle_el prefix.ptest "add"
[c_to_e (s_to_c module_name);
function_exp;
contract_exp;
c_to_e (n_to_c (float_of_int count_exp))]
in
let cl = Contract.get_cl tc in
let ecl = List.map
(fun c -> generate_contract prefix infos -> string -> c, c)
cl
in *)
let gen_for_on_c res ( e , gen ) gI =
let at =
if ( genTests & & ( GenInfo.getTests gI ) )
then ( * [ add_test e ( GenInfo.getTestNumber gI ) ]
let at =
if (genTests && (GenInfo.getTests gI))
else []
in
let ngn,setV = register e in
((ngn,gI),(gen_run_anonym_fun (gen @ at @ [setV]))) :: res
in
List.fold_left2 gen_for_on_c [] esel (List.map snd cl) *)
let enableAsserts env fnametest = function
| [] -> (fun e -> e, false)
| cis -> (fun e ->
let cl =
new_array
(List.map
Testlib.var_name_to_string_exp
cis)
in
do_mcalle_el
(read_prop (s_to_i env.js_namespace) env.js_test_namespace)
"enableAsserts"
[e; cl; Testlib.var_name_to_string_exp fnametest ], true)
type fun_info = {
contract: tc;
params: tc AST.identifier list;
recursive_name: tc AST.identifier option;
body: tc AST.source_element list;
}
let transform_body : T.t env -> fun_info -> (exp -> exp * bool) -> exp =
fun env finfo gen_asserts ->
- make function expression for toString override with original function body
-- > org_fcode
* - transform body -- > T.transform
* - add wrapper -- > T.before_wrapper
* - add asserts -- > enableAsserts
* - add wrapper -- > T.afert_wrapper
* - overrideToString -- > use and org_fcode , call overrideToStringOfFunction
--> org_fcode
* - transform body --> T.transform
* - add wrapper --> T.before_wrapper
* - add asserts --> enableAsserts
* - add wrapper --> T.afert_wrapper
* - overrideToString --> use fcode and org_fcode, call overrideToStringOfFunction
*)
let org_fcode : exp =
Function_expression (null_annotation,
Some finfo.contract,
finfo.recursive_name,
finfo.params,
None,
finfo.body)
in
let fcode =
let fbody =
T.transform
env.effects_env
(Contract.get_trans finfo.contract)
finfo.body
in
Function_expression (null_annotation,
Some finfo.contract,
None,
finfo.params,
None,
fbody)
in
let fcode = T.before_wrapper env.effects_env finfo.params fcode in
let fcode, asserts = gen_asserts fcode in
let fcode = T.after_wrapper env.effects_env finfo.params fcode in
do_mcalle_el
(read_prop (s_to_i env.js_namespace) env.js_test_namespace)
"overrideToStringOfFunction"
[fcode;
org_fcode;
c_to_e (b_to_c asserts)
]
let create_code : (T.t env -> infos -> Testlib.varname -> fun_info -> exp) =
TODO :
* - transform function body -- > transform_body
* - generate contract code -- >
* - generate test code -- > add_to_test_suite
* - transform function body --> transform_body
* - generate contract code --> generate_tc
* - generate test code --> add_to_test_suite
*)
fun env info fnametest finfo ->
the prefixes needed by
let prefix = { ptest= read_prop (s_to_i env.js_namespace) env.js_test_namespace;
pcontract = read_prop (s_to_i env.js_namespace) env.js_contract_namespace;
ptmp = env.variable_prefix;
}
in
the expressions for the contracts together with generate Infos and the contracts itself .
let e_c_gI_name_list = generate_named_tc prefix info fnametest finfo.contract in
let assert_contract_names =
if (env.asserts) then
List.map
(fun (_,_,_,name) -> name)
(List.filter
(fun (_,_,gI,_) -> GenInfo.getAsserts gI)
e_c_gI_name_list)
else
[]
in
the new expression that represents the transformed function . To decide if the enableAssert
* part is needed , we need the list of contract names , for which assersts should be generatd .
* If this is empty , no asserts are generated . If it contains at least one element , the contract
* names are passert to enableAssert , and the overrideToStringOfFunction method gets a true as
* last parameter .
* part is needed, we need the list of contract names, for which assersts should be generatd.
* If this is empty, no asserts are generated. If it contains at least one element, the contract
* names are passert to enableAssert, and the overrideToStringOfFunction method gets a true as
* last parameter.
*)
let fcode = transform_body env finfo
(enableAsserts env fnametest assert_contract_names)
in
let fcode =
Testlib.set_var
prefix.ptest
fnametest
fcode
in
to add the contracts , for which test cases should be generated , to the library , we
* split the list of contracts into two parts . The first contains all contracts , for which
* test cases should be generated . The second part just creates the contracts and registers them
* under their name in the library . This is done by passing for parameters to
* addContracts(module , value , ccdlist ) , even if the method just ignores the 4th parameter .
* split the list of contracts into two parts. The first contains all contracts, for which
* test cases should be generated. The second part just creates the contracts and registers them
* under their name in the library. This is done by passing for parameters to
* addContracts(module, value, ccdlist), even if the method just ignores the 4th parameter. *)
let e_to_test_list, e_no_test_list =
let tmp1, tmp2 =
(List.partition
(fun (_,_,gI,_) -> GenInfo.getTests gI)
e_c_gI_name_list)
in
List.map
(fun (e,_,gI,_) -> new_object
["contract",e;
"count", int_to_exp (GenInfo.getTestNumber gI)])
tmp1,
List.map (fun (e,_,_,_) -> e) tmp2
in
do_mcalle_el
(read_prop (s_to_i env.js_namespace) env.js_test_namespace)
"addContracts"
[Testlib.var_name_to_string_exp fnametest;
fcode;
new_array e_to_test_list;
new_array e_no_test_list]
let create_infos labels strings numbers =
let to_array f l = new_array (List.map (fun x -> c_to_e (f x)) l) in
let numbers_exp = to_array n_to_c numbers in
let strings_exp = to_array s_to_c strings in
let labels_exp = to_array s_to_c labels in
{ numbers= numbers_exp; strings= strings_exp; labels= labels_exp }
let rec pathname = function
| Variable (an,i) -> Some (so_i i)
| Object_access (an,e,i) -> fopt (fun s -> s ^ "_" ^ so_i i) (pathname e)
| Array_access (an,e1,e2) ->
begin
match pathname e1,pathname e2 with
| None, _ | _, None -> None
| Some s1, Some s2 -> Some (s1 ^ "_" ^ s2)
end
| e -> None
creates the name , under which the function is stored inside the test library scope
* First the function name itself is considerd . If this does not exists ,
* the contract is consulted for a name . If both does not have a name , the
* function default is called . Usually it first tries to generate a new name from
* the left hand side of an assignment , if the function is a right hand side of an
* assignment , or it completely generates a new unique name .
* First the function name itself is considerd. If this does not exists,
* the contract is consulted for a name. If both does not have a name, the
* function default is called. Usually it first tries to generate a new name from
* the left hand side of an assignment, if the function is a right hand side of an
* assignment, or it completely generates a new unique name. *)
let get_test_name fn tc default =
let s =
match fn with
| None -> begin
match Contract.get_name tc with
| None -> ""
| Some s -> s
end
| Some i -> i
in
if String.length s < 1 then
default ()
else
s
let generate_tests env program =
gt ga ge gcsseff trans_prefix =
let add_number,add_string,add_label,
new_scope,close_scope,
get_numbers,get_strings,get_labels
=
let labels = ref [] in
let strings = ref [] in
let numbers = ref [] in
let rec add_f e = function
| [] -> []
| h :: t ->
(List.add e h) :: add_f e t
in
let add e ll = ll := add_f e !ll in
let add_number e = add e numbers in
let add_string s = add s strings in
let add_label l = add l labels in
let print_numbers _ = () in
let new_scope () =
labels := [] :: !labels;
strings := [] :: !strings;
numbers := [] :: !numbers;
print_numbers "new_scope: ";
in
let close_scope () =
labels := List.tl !labels;
strings := List.tl !strings;
numbers := List.tl !numbers;
print_numbers "close_scope: ";
in
let get ll =
List.fold_right
List.union
ll
[]
in
let get_numbers () =
print_numbers "get_numbers: ";
get !numbers
in
let get_strings () = get !strings in
let get_labels () = get !labels in
add_number,add_string,add_label,
new_scope,close_scope,
get_numbers,get_strings,get_labels
in
let ba_constant = function
| Number (an,n) ->
add_number n;
Number (an,n)
| String (an,s) ->
add_string s;
String (an,s)
| c -> c
in
let transform_se = function
| Function_declaration (a,c,n,pl,_,sel) as forg ->
let fname = ASTUtil.i_to_s n in
let info = create_infos (get_labels ()) (get_strings ()) (get_numbers ()) in
let ftestname = (Testlib.gen_fun_var_name fname) in
let finfo = {
contract= c;
params= pl;
recursive_name=Some n;
body=sel; }
in
let mod_fd = create_code env info ftestname finfo in
let _ = close_scope () in
[AST.VarDecl
(s_to_i fname,
mod_fd)]
| se -> [se]
in
let lhs_ref = ref [] in
let transform_e = function
| Object_access (_,_,i) as e ->
begin
match i with
| Identifier (_,s) -> add_label s
| _ -> ()
end;
e
| Function_expression (a,Some c,no,pl,lvo,sel) as e ->
let nos = match no with
| None -> None
| Some n -> Some (ASTUtil.i_to_s n)
in
let fname = get_test_name
nos
c
(fun () ->
match List.find
(function | None -> false | Some s -> true)
!lhs_ref
with
| None -> ""
| Some s -> s)
in
let _ = print_endline fname in
let info = create_infos (get_labels ()) (get_strings ()) (get_numbers ()) in
let ftestname = (Testlib.gen_fun_var_name fname) in
let finfo = {
contract= c;
params= pl;
recursive_name=no;
body=sel; }
in
create_code env info ftestname finfo
| Assign (_,lhs,Regular_assign _,rhs) as e ->
lhs_ref := List.tl !lhs_ref;
e
| e -> e
in
AST.visit
~ba_c:(fun x -> x)
~ba_constant:ba_constant
~a_source_element:transform_se
~a_expression:transform_e
~b_source_element:
(function
| Function_declaration _ as se -> new_scope ();
[se]
| se -> [se])
~b_expression:
(function
| Function_expression _ as e -> new_scope ();
e
| Assign (_,lhs,Regular_assign _,rhs) as e ->
let so = pathname lhs in
( match so with
| Some s - > ( * print_endline ( " lhs set " ^ s )
| _ -> ()); *)
lhs_ref := so :: !lhs_ref;
e
| e -> e)
program
let ucToddC tc =
let id = fun x -> x in
let gl = ContractCycle.check tc in
match gl with
| None ->
Contract.transform
~ba_bcontract:id
~ba_analyse:id
~ba_depend_up:id
~ba_depend_down: DependDown.create
tc
| Some gl ->
let get_order,next =
let orders : c list list ref =
ref (List.rev
(List.map
(List.map
(fun c ->
Contract.transform_c
~ba_bcontract:id
~ba_analyse:id
~ba_depend_up:id
~ba_depend_down:(DependDown.create)
c)
)
(ContractCycle.get_order gl)))
in
let ao = ref None in
let next : unit -> unit = fun () ->
match !orders with
| o :: ol -> orders := ol; ao := Some o
| _ -> failwith "This should never happen"
in
let get_order : unit -> c list = fun () ->
match !ao with
| None -> failwith "This should never happen"
| Some o -> o
in
get_order,next
in
let compute_order = function
| CFunction (th,cl,r,dd,eff) ->
let o = get_order () in
let _ = print_endline
( String_of.string_of_list c_to_s o )
in
(String_of.string_of_list c_to_s o)
in *)
let fo = Utils.first o in
let cli = List.make_index_assoc cl in
let ord (pos1,c1) (pos2,c2) =
let g =
match fo c1 c2 with
| None -> pos2 - pos1
| Some b -> if b then -1 else 1
in
g
in
let clis = List.sort ord cli in
let is = List.map fst clis in
DependDown.set_order dd is;
CFunction (th,cl,r,dd,eff)
| c -> c
in
let new_scope,get_scope,add_dul =
let dull = ref [[]] in
let new_scope () =
dull := [] :: !dull
in
let get_scope () =
let r = List.hd !dull in
dull := List.tl !dull;
r
in
let add_dul a =
let s = List.hd !dull in
let t = List.tl !dull in
dull := (a :: s) :: t
in
new_scope,get_scope,add_dul
in
let dependInfo = function
| CBase (_,_,dul) as c ->
print_endline ( string_of_list ) ;
add_dul dul; c
| CFunction (None,pl,r,dd,_) as c ->
DependDown.set_paramnr dd ((List.length pl) + 1);
DependDown.register_dinfo dd (get_scope ());
let dul = DependDown.get_dul dd in
let dul' = List.map Depend.raise_up dul in
add_dul dul';
c
| c -> c
in
let b_c = function
| CFunction _ as c -> new_scope (); c
| c -> c
in
let a_c c =
let c = dependInfo c in
compute_order c
in
Contract.transform
~b_tcontract:(fun c -> next (); c)
~b_contract:(b_c)
~a_contract:(a_c)
~ba_bcontract:id
~ba_analyse:id
~ba_depend_up:id
~ba_depend_down:DependDown.create
tc
We make two visits here , one in transform_priv to
generate the JavaScript Code for the contracts ,
and an other one to remove the contracts .
TODO : This should be done with one visit
generate the JavaScript Code for the contracts,
and an other one to remove the contracts.
TODO: This should be done with one visit
*)
let transform env program =
print_endline
( AST.string_of_ast
( Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of )
program ) ;
(AST.string_of_ast
(Contract.string_of
BaseContract.string_of
Analyse.string_of
Depend.string_of)
program); *)
let p =
AST.visit
~ba_c:ucToddC
program
in
AST.visit
~ba_c:(fun _ -> ())
(generate_tests env p)
end
|
632ba937a133bfa6251ea3a24b36bde3ae8979aa7748e363c19bfa9663cc6426 | FranklinChen/Ebnf2ps | Color.hs | -- -*- Mode: Haskell -*-
Copyright 1994 by
Color.hs --- string converter for colors
Author :
Created On : Thu Dec 2 16:58:33 1993
Last Modified By :
Last Modified On : Fri Dec 3 14:13:34 1993
Update Count : 3
-- Status : Unknown, Use with caution!
--
-- $Locker: $
$ Log : Color.hs , v $
Revision 1.1.1.1 1998/12/09 13:34:08 pjt
-- Imported sources
--
Revision 1.1 1994/03/15 15:34:53 thiemann
-- Initial revision
--
--
Last Modified By :
module Color where
( Color ( .. ) , , showsColor , showsAColor , prepareColors )
import Data.Char
import Numeric
type Color = (Int, Int, Int)
noColor :: Color
noColor = (-1, -1, -1)
readColor : : String - > Color
readColor = readColor1 . map toLower
readColor1 : : String - > Color
readColor1 ( ' b':'l':'a ' : _ ) = 0
readColor1 ( ' b':'l':'u ' : _ ) = 1
readColor1 ( ' g ' : _ ) = 2
readColor1 ( ' c ' : _ ) = 3
readColor1 ( ' r ' : _ ) = 4
readColor1 ( ' m ' : _ ) = 5
readColor1 ( ' y ' : _ ) = 6
readColor1 ( ' w ' : _ ) = 7
readColor1 _ = -1
readColor :: String -> Color
readColor = readColor1 . map toLower
readColor1 :: String -> Color
readColor1 ('b':'l':'a':_) = 0
readColor1 ('b':'l':'u':_) = 1
readColor1 ('g':_) = 2
readColor1 ('c':_) = 3
readColor1 ('r':_) = 4
readColor1 ('m':_) = 5
readColor1 ('y':_) = 6
readColor1 ('w':_) = 7
readColor1 _ = -1
-}
-- Gofer-like stuff:
ljustify' :: Int -> String -> String
ljustify' n s = s ++ space' (max 0 (n - length s))
space' :: Int -> String
space' n = copy' n ' '
copy' :: Int -> a -> [a] -- make list of n copies of x
copy' n x = take n xs where xs = x:xs
--
lookupColor :: String -> [(String,(Int,Int,Int))] -> (Int,Int,Int)
lookupColor colorName colorTable =
head [(r,g,b) | (c,(r,g,b)) <- colorTable, c == map toLower colorName]
showsColor :: Color -> ShowS
showsColor (r,g,b) = showString " (" . shows r . showChar ',' .
shows g . showChar ',' .
shows b . showChar ')'
showsAColor :: Color -> String -> ShowS
showsAColor color str = showString ('\t': ljustify' 16 str) . showsColor color . showChar '\n'
abgeändert und durch , weil \\ nicht
verfügbar , :
--
-- prepareColors rgbFile colors =
-- decodeColors (map (map toLower) colors) (fallBackRgb++parsedRgbFile) []
-- where parsedRgbFile = (map parseLine (lines rgbFile))
--
-- decodeColors [] parsedRgbFile decoded = decoded
-- decodeColors clrs [] decoded = [(name,(128,128,128)) | name <- clrs ]++decoded
decodeColors clrs ( ( r , , b , name):parsedRgbFile ) decoded
-- = decodeColors (clrs \\ found) parsedRgbFile (foundDecoded++decoded)
-- where found = [ c | c <- clrs, name == c ]
foundDecoded = [ ( c,(r , g , b ) ) | c < - found ]
prepareColors rgbFile colors =
decodeColors (map (map toLower) colors) (fallBackRgb++parsedRgbFile)
where parsedRgbFile = [parseLine l | l <- lines rgbFile, notComment l]
notComment ('!':_) = False
notComment _ = True
decodeColors :: [String] -> [(Int,Int,Int,String)] -> [(String,(Int,Int,Int))]
decodeColors clrs parsedRgbFile = [ (name,(r,g,b)) | name <- clrs,
(r,g,b,name') <- parsedRgbFile,
name == name' ]
-- bis hier
parseLine str = let (r,restr):_ = readDec (skipWhite str)
(g,restg):_ = readDec (skipWhite restr)
(b,restb):_ = readDec (skipWhite restg)
name = map toLower (skipWhite restb)
in (r,g,b,name)
where skipWhite = dropWhile isSpace
fallBackRgb :: [(Int,Int,Int,String)]
fallBackRgb = [
( 0, 0, 0,"black"),
( 0, 0,255,"blue"),
( 0,255, 0,"green"),
( 0,255,255,"cyan"),
(255, 0, 0,"red"),
(255, 0,255,"magenta"),
(255,255, 0,"yellow"),
(255,255,255,"white")]
showsPsColor (r,g,b) = showChar ' ' . shows r .
showChar ' ' . shows g .
showChar ' ' . shows b .
showString " scol "
showsFigColor (r,g,b) = showChar ' ' . shows (minPosition 0 (-1,32768*32768)
[ (x-r)*(x-r) + (y-g)*(y-g) + (z-b)*(z-b) | (x,y,z,_) <- fallBackRgb ])
--
-- find position of minimal element in list
--
minPosition i (pos,min) [] = pos
minPosition i (pos,min) (x:rest) | x < min = minPosition (i+1) (i,x) rest
| otherwise = minPosition (i+1) (pos,min) rest
| null | https://raw.githubusercontent.com/FranklinChen/Ebnf2ps/131bf89bc56c9503dc941f0b5bad0e2fe8eb4551/src/Color.hs | haskell | -*- Mode: Haskell -*-
- string converter for colors
Status : Unknown, Use with caution!
$Locker: $
Imported sources
Initial revision
Gofer-like stuff:
make list of n copies of x
prepareColors rgbFile colors =
decodeColors (map (map toLower) colors) (fallBackRgb++parsedRgbFile) []
where parsedRgbFile = (map parseLine (lines rgbFile))
decodeColors [] parsedRgbFile decoded = decoded
decodeColors clrs [] decoded = [(name,(128,128,128)) | name <- clrs ]++decoded
= decodeColors (clrs \\ found) parsedRgbFile (foundDecoded++decoded)
where found = [ c | c <- clrs, name == c ]
bis hier
find position of minimal element in list
| Copyright 1994 by
Author :
Created On : Thu Dec 2 16:58:33 1993
Last Modified By :
Last Modified On : Fri Dec 3 14:13:34 1993
Update Count : 3
$ Log : Color.hs , v $
Revision 1.1.1.1 1998/12/09 13:34:08 pjt
Revision 1.1 1994/03/15 15:34:53 thiemann
Last Modified By :
module Color where
( Color ( .. ) , , showsColor , showsAColor , prepareColors )
import Data.Char
import Numeric
type Color = (Int, Int, Int)
noColor :: Color
noColor = (-1, -1, -1)
readColor : : String - > Color
readColor = readColor1 . map toLower
readColor1 : : String - > Color
readColor1 ( ' b':'l':'a ' : _ ) = 0
readColor1 ( ' b':'l':'u ' : _ ) = 1
readColor1 ( ' g ' : _ ) = 2
readColor1 ( ' c ' : _ ) = 3
readColor1 ( ' r ' : _ ) = 4
readColor1 ( ' m ' : _ ) = 5
readColor1 ( ' y ' : _ ) = 6
readColor1 ( ' w ' : _ ) = 7
readColor1 _ = -1
readColor :: String -> Color
readColor = readColor1 . map toLower
readColor1 :: String -> Color
readColor1 ('b':'l':'a':_) = 0
readColor1 ('b':'l':'u':_) = 1
readColor1 ('g':_) = 2
readColor1 ('c':_) = 3
readColor1 ('r':_) = 4
readColor1 ('m':_) = 5
readColor1 ('y':_) = 6
readColor1 ('w':_) = 7
readColor1 _ = -1
-}
ljustify' :: Int -> String -> String
ljustify' n s = s ++ space' (max 0 (n - length s))
space' :: Int -> String
space' n = copy' n ' '
copy' n x = take n xs where xs = x:xs
lookupColor :: String -> [(String,(Int,Int,Int))] -> (Int,Int,Int)
lookupColor colorName colorTable =
head [(r,g,b) | (c,(r,g,b)) <- colorTable, c == map toLower colorName]
showsColor :: Color -> ShowS
showsColor (r,g,b) = showString " (" . shows r . showChar ',' .
shows g . showChar ',' .
shows b . showChar ')'
showsAColor :: Color -> String -> ShowS
showsAColor color str = showString ('\t': ljustify' 16 str) . showsColor color . showChar '\n'
abgeändert und durch , weil \\ nicht
verfügbar , :
decodeColors clrs ( ( r , , b , name):parsedRgbFile ) decoded
foundDecoded = [ ( c,(r , g , b ) ) | c < - found ]
prepareColors rgbFile colors =
decodeColors (map (map toLower) colors) (fallBackRgb++parsedRgbFile)
where parsedRgbFile = [parseLine l | l <- lines rgbFile, notComment l]
notComment ('!':_) = False
notComment _ = True
decodeColors :: [String] -> [(Int,Int,Int,String)] -> [(String,(Int,Int,Int))]
decodeColors clrs parsedRgbFile = [ (name,(r,g,b)) | name <- clrs,
(r,g,b,name') <- parsedRgbFile,
name == name' ]
parseLine str = let (r,restr):_ = readDec (skipWhite str)
(g,restg):_ = readDec (skipWhite restr)
(b,restb):_ = readDec (skipWhite restg)
name = map toLower (skipWhite restb)
in (r,g,b,name)
where skipWhite = dropWhile isSpace
fallBackRgb :: [(Int,Int,Int,String)]
fallBackRgb = [
( 0, 0, 0,"black"),
( 0, 0,255,"blue"),
( 0,255, 0,"green"),
( 0,255,255,"cyan"),
(255, 0, 0,"red"),
(255, 0,255,"magenta"),
(255,255, 0,"yellow"),
(255,255,255,"white")]
showsPsColor (r,g,b) = showChar ' ' . shows r .
showChar ' ' . shows g .
showChar ' ' . shows b .
showString " scol "
showsFigColor (r,g,b) = showChar ' ' . shows (minPosition 0 (-1,32768*32768)
[ (x-r)*(x-r) + (y-g)*(y-g) + (z-b)*(z-b) | (x,y,z,_) <- fallBackRgb ])
minPosition i (pos,min) [] = pos
minPosition i (pos,min) (x:rest) | x < min = minPosition (i+1) (i,x) rest
| otherwise = minPosition (i+1) (pos,min) rest
|
3bffa7a396a03effc9334189fbe143c27617eb4c58c66499fa4f813c833b8cfd | seancorfield/next-jdbc | build.clj | (ns build
"next.jdbc's build script.
clojure -T:build ci
clojure -T:build deploy
Run tests via:
clojure -X:test
For more information, run:
clojure -A:deps -T:build help/doc"
(:refer-clojure :exclude [test])
(:require [clojure.tools.build.api :as b]
[clojure.tools.deps :as t]
[deps-deploy.deps-deploy :as dd]))
(def lib 'com.github.seancorfield/next.jdbc)
(defn- the-version [patch] (format "1.3.%s" patch))
(def version (the-version (b/git-count-revs nil)))
(def snapshot (the-version "999-SNAPSHOT"))
(def class-dir "target/classes")
(defn test "Run all the tests." [opts]
(doseq [alias [:1.10 :1.11 :master]]
(println "\nRunning tests for Clojure" (name alias))
(let [basis (b/create-basis {:aliases [:test alias]})
combined (t/combine-aliases basis [:test alias])
cmds (b/java-command
{:basis basis
:java-opts (:jvm-opts combined)
:main 'clojure.main
:main-args ["-m" "cognitect.test-runner"]})
{:keys [exit]} (b/process cmds)]
(when-not (zero? exit) (throw "Tests failed"))))
opts)
(defn- jar-opts [opts]
(let [version (if (:snapshot opts) snapshot version)]
(assoc opts
:lib lib :version version
:jar-file (format "target/%s-%s.jar" lib version)
:scm {:tag (str "v" version)}
:basis (b/create-basis {})
:class-dir class-dir
:target "target"
:src-dirs ["src"]
:src-pom "template/pom.xml")))
(defn ci "Run the CI pipeline of tests (and build the JAR)." [opts]
(test opts)
(b/delete {:path "target"})
(let [opts (jar-opts opts)]
(println "\nWriting pom.xml...")
(b/write-pom opts)
(println "\nCopying source...")
(b/copy-dir {:src-dirs ["resources" "src"] :target-dir class-dir})
(println "\nBuilding JAR...")
(b/jar opts))
opts)
(defn deploy "Deploy the JAR to Clojars." [opts]
(let [{:keys [jar-file] :as opts} (jar-opts opts)]
(dd/deploy {:installer :remote :artifact (b/resolve-path jar-file)
:pom-file (b/pom-path (select-keys opts [:lib :class-dir]))}))
opts)
| null | https://raw.githubusercontent.com/seancorfield/next-jdbc/a7833b1858ef908f69cf33b514b529ad216b297d/build.clj | clojure | (ns build
"next.jdbc's build script.
clojure -T:build ci
clojure -T:build deploy
Run tests via:
clojure -X:test
For more information, run:
clojure -A:deps -T:build help/doc"
(:refer-clojure :exclude [test])
(:require [clojure.tools.build.api :as b]
[clojure.tools.deps :as t]
[deps-deploy.deps-deploy :as dd]))
(def lib 'com.github.seancorfield/next.jdbc)
(defn- the-version [patch] (format "1.3.%s" patch))
(def version (the-version (b/git-count-revs nil)))
(def snapshot (the-version "999-SNAPSHOT"))
(def class-dir "target/classes")
(defn test "Run all the tests." [opts]
(doseq [alias [:1.10 :1.11 :master]]
(println "\nRunning tests for Clojure" (name alias))
(let [basis (b/create-basis {:aliases [:test alias]})
combined (t/combine-aliases basis [:test alias])
cmds (b/java-command
{:basis basis
:java-opts (:jvm-opts combined)
:main 'clojure.main
:main-args ["-m" "cognitect.test-runner"]})
{:keys [exit]} (b/process cmds)]
(when-not (zero? exit) (throw "Tests failed"))))
opts)
(defn- jar-opts [opts]
(let [version (if (:snapshot opts) snapshot version)]
(assoc opts
:lib lib :version version
:jar-file (format "target/%s-%s.jar" lib version)
:scm {:tag (str "v" version)}
:basis (b/create-basis {})
:class-dir class-dir
:target "target"
:src-dirs ["src"]
:src-pom "template/pom.xml")))
(defn ci "Run the CI pipeline of tests (and build the JAR)." [opts]
(test opts)
(b/delete {:path "target"})
(let [opts (jar-opts opts)]
(println "\nWriting pom.xml...")
(b/write-pom opts)
(println "\nCopying source...")
(b/copy-dir {:src-dirs ["resources" "src"] :target-dir class-dir})
(println "\nBuilding JAR...")
(b/jar opts))
opts)
(defn deploy "Deploy the JAR to Clojars." [opts]
(let [{:keys [jar-file] :as opts} (jar-opts opts)]
(dd/deploy {:installer :remote :artifact (b/resolve-path jar-file)
:pom-file (b/pom-path (select-keys opts [:lib :class-dir]))}))
opts)
| |
8c85900de71dfd1fe2bc89883950050dbb9057c398fc3d1bf38037f3a7785770 | immutant/immutant | project.clj | Copyright 2014 - 2017 Red Hat , Inc , and individual contributors .
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(defproject org.immutant/integs "2.1.11-SNAPSHOT"
:plugins [[lein-modules "0.3.11"]]
:dependencies [[org.immutant/immutant _]
[org.immutant/wildfly _]]
:aliases {"all" ^:replace ["do" "clean," "test"]}
:modules {:parent nil}
:main clojure.core/+ ; immutant war build requires
; a main... any no-arg taking
; fn will do
:profiles {:integ-base {:plugins [[lein-immutant "2.1.0"]]
:aliases {"test" ^:displace ["immutant" "test"]}
:modules {:parent ".."}}
:integ-messaging {:test-paths ["../messaging/test"]}
:integ-scheduling {:dependencies [[clj-time _]]
:test-paths ["../scheduling/test"]}
:integ-caching {:dependencies [[cheshire _]
[org.clojure/data.fressian _]
[org.clojure/core.memoize _]]
:test-paths ["../caching/test"]}
:integ-web {:dependencies [[io.pedestal/pedestal.service _]
[http.async.client _]
[org.clojars.tcrawley/gniazdo _]
[ring/ring-devel _]
[compojure _]
[org.glassfish.jersey.media/jersey-media-sse _
:exclusions [org.glassfish.jersey.core/jersey-server]]
[org.glassfish.jersey.core/jersey-client _]
[javax.ws.rs/javax.ws.rs-api "2.0.1"]]
:resource-paths ["../web/dev-resources"]
:test-paths ["../web/test-integration"]
:main integs.web}
:integ-transactions {:test-paths ["../transactions/test"]
:dependencies [[org.clojure/java.jdbc _]
[com.h2database/h2 _]]}
:web [:integ-base :integ-web]
:scheduling [:integ-base :integ-scheduling]
:messaging [:integ-base :integ-messaging]
:caching [:integ-base :integ-caching]
:transactions [:integ-base :integ-transactions]
:integs [:web :messaging :caching :scheduling :transactions]
because prj / read , , hooks , etc
:modules {:parent ".."}
:main integs.cluster
:dependencies [[org.immutant/fntest _]
[clj-http _]
[environ _]]
:plugins [[lein-environ "1.0.0"]]
:test-paths ^:replace ["test-clustering"]}
:eap-base {:env {:eap true}
:immutant {:war {:resource-paths ["eap-resources"]}}
:exclusions [org.hornetq/hornetq-jms-server org.hornetq/hornetq-server org.jboss.narayana.jta/narayana-jta]
required by http.async.client 1.2
[org.hornetq/hornetq-jms-server "2.3.25.Final"]
[org.hornetq/hornetq-server "2.3.25.Final"]
[org.jboss.jbossts.jta/narayana-jta "4.17.29.Final"]]}
:eap [:web :scheduling :messaging :caching :transactions :eap-base]})
| null | https://raw.githubusercontent.com/immutant/immutant/6ff8fa03acf73929f61f2ca75446cb559ddfc1ef/integration-tests/project.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
immutant war build requires
a main... any no-arg taking
fn will do | Copyright 2014 - 2017 Red Hat , Inc , and individual contributors .
distributed under the License is distributed on an " AS IS " BASIS ,
(defproject org.immutant/integs "2.1.11-SNAPSHOT"
:plugins [[lein-modules "0.3.11"]]
:dependencies [[org.immutant/immutant _]
[org.immutant/wildfly _]]
:aliases {"all" ^:replace ["do" "clean," "test"]}
:modules {:parent nil}
:profiles {:integ-base {:plugins [[lein-immutant "2.1.0"]]
:aliases {"test" ^:displace ["immutant" "test"]}
:modules {:parent ".."}}
:integ-messaging {:test-paths ["../messaging/test"]}
:integ-scheduling {:dependencies [[clj-time _]]
:test-paths ["../scheduling/test"]}
:integ-caching {:dependencies [[cheshire _]
[org.clojure/data.fressian _]
[org.clojure/core.memoize _]]
:test-paths ["../caching/test"]}
:integ-web {:dependencies [[io.pedestal/pedestal.service _]
[http.async.client _]
[org.clojars.tcrawley/gniazdo _]
[ring/ring-devel _]
[compojure _]
[org.glassfish.jersey.media/jersey-media-sse _
:exclusions [org.glassfish.jersey.core/jersey-server]]
[org.glassfish.jersey.core/jersey-client _]
[javax.ws.rs/javax.ws.rs-api "2.0.1"]]
:resource-paths ["../web/dev-resources"]
:test-paths ["../web/test-integration"]
:main integs.web}
:integ-transactions {:test-paths ["../transactions/test"]
:dependencies [[org.clojure/java.jdbc _]
[com.h2database/h2 _]]}
:web [:integ-base :integ-web]
:scheduling [:integ-base :integ-scheduling]
:messaging [:integ-base :integ-messaging]
:caching [:integ-base :integ-caching]
:transactions [:integ-base :integ-transactions]
:integs [:web :messaging :caching :scheduling :transactions]
because prj / read , , hooks , etc
:modules {:parent ".."}
:main integs.cluster
:dependencies [[org.immutant/fntest _]
[clj-http _]
[environ _]]
:plugins [[lein-environ "1.0.0"]]
:test-paths ^:replace ["test-clustering"]}
:eap-base {:env {:eap true}
:immutant {:war {:resource-paths ["eap-resources"]}}
:exclusions [org.hornetq/hornetq-jms-server org.hornetq/hornetq-server org.jboss.narayana.jta/narayana-jta]
required by http.async.client 1.2
[org.hornetq/hornetq-jms-server "2.3.25.Final"]
[org.hornetq/hornetq-server "2.3.25.Final"]
[org.jboss.jbossts.jta/narayana-jta "4.17.29.Final"]]}
:eap [:web :scheduling :messaging :caching :transactions :eap-base]})
|
99b4f4cb23d4a5bcdf858ebb9fd9fbe8ebcfb68eec149a014aea250969bdb91a | Spivoxity/obc-3 | stack.ml |
* Oxford Oberon-2 compiler
* stack.ml
* Copyright ( C ) 1995 , 1998
* Oxford Oberon-2 compiler
* stack.ml
* Copyright (C) J. M. Spivey 1995, 1998
*)
open Icode
open Symtab
open Print
open Dict
open Gcmap
We keep a stack of Booleans , each indicating whether an item on the
evaluation stack at runtime is a pointer or not .
evaluation stack at runtime is a pointer or not. *)
let stk = ref []
let maxd = ref 0
let labstate = Hashtbl.create 100
let push_stack flag s = flag :: s
let rec pop_stack r s = Util.drop r s
let rec nth_stack s r = List.nth s r
let count = function VoidT -> 0 | (DoubleT|LongT) -> 2 | _ -> 1
let f = false and t = true
let flags = function VoidT -> [] | (DoubleT|LongT) -> [f; f] | _ -> [f]
let arity =
function
ERROR (_, _) | STKMAP _ -> (0, [])
| CONST _ | HEXCONST _ -> (0, [f])
| TCONST (k, _) -> (0, flags k)
| (LOCAL _ | GLOBAL _) -> (0, [f])
| LOAD k -> (1, flags k)
| CHECK (NullPtr, _) -> (1, [t])
| ALIGN _ -> (1, [f])
| BOUND _ -> (1, [])
| POP n -> (n, [])
| STORE k -> (count k + 1, [])
| FLEXCOPY -> (2, []) | FIXCOPY -> (3, [])
| RETURN -> (0, [])
| LINE _ -> (0, [])
| STATLINK -> (1, [])
| CALL (n, k) -> (n+1, flags k)
| CHECK (GlobProc, _) -> (1, [])
| MONOP (k, _) -> (count k, flags k)
| CHECK (DivZero k, _) -> (count k, flags k)
| BINOP (k, (Eq|Lt|Gt|Leq|Geq|Neq)) -> (2 * count k, [f])
| BINOP (k, _) -> (2 * count k, flags k)
| OFFSET -> (2, [t])
| CONV (k1, k2) -> (count k1, flags k2)
| JCASE _ -> (1, [])
| JRANGE _ -> (3, [])
| i -> failwith (sprintf "stack_sim $" [fInst i])
let simulate i =
begin match i with
JUMP lab ->
Hashtbl.add labstate lab !stk
| TESTGEQ lab ->
let s = pop_stack 1 !stk in
Hashtbl.add labstate lab s; stk := s
| DUP n ->
stk := push_stack (nth_stack !stk n) !stk
| JUMPC (k, _, lab) ->
let s = pop_stack (2 * count k) !stk in
Hashtbl.add labstate lab s; stk := s
| SWAP ->
let x = nth_stack !stk 0 and y = nth_stack !stk 1 in
stk := push_stack y (push_stack x (pop_stack 2 !stk))
| LABEL lab ->
(* This assumes that a label has an empty stack if it is
not the target of some forward branch. *)
stk := (try Hashtbl.find labstate lab with Not_found -> [])
| _ ->
let (k, xs) = arity i in
stk := List.fold_right push_stack xs (pop_stack k !stk)
end;
let d = List.length !stk in
maxd := max d !maxd;
if !Config.debug > 1 then
printf "! Sim: $ [$/$]\n" [fInst i; fNum d; fNum !maxd]
let reset () =
Hashtbl.clear labstate; stk := []; maxd := 0
let mark () =
stk := push_stack true (pop_stack 1 !stk)
The stack map for a procedure call shows the pointer layout of the
eval stack of the calling procedure . It 's based at ( bp+HEAD+args ) ,
the address of the stack word just beyond the parameters of the
callee . Thus the stack map will be zero except in the rare case that
extra pointers stay on the stack of the caller throughout execution of
the callee .
When make_map is called , the n parameters ( maybe including a static
link ) and the code address of the callee are on the simulated stack ;
so we drop n+1 stack items before computing the map .
eval stack of the calling procedure. It's based at (bp+HEAD+args),
the address of the stack word just beyond the parameters of the
callee. Thus the stack map will be zero except in the rare case that
extra pointers stay on the stack of the caller throughout execution of
the callee.
When make_map is called, the n parameters (maybe including a static
link) and the code address of the callee are on the simulated stack;
so we drop n+1 stack items before computing the map. *)
let make_map k n =
let h p m = join (if p then ptr_map else null_map) (shift 4 m) in
shift (4*n) (List.fold_right h (pop_stack (n+k) !stk) null_map)
let max_depth () = !maxd
let fStack =
let f prf =
let n = List.length !stk in
for i = 0 to n-1 do
prf "$" [fChr (if nth_stack !stk i then 't' else 'f')]
done in
fExt f
| null | https://raw.githubusercontent.com/Spivoxity/obc-3/9e5094df8382ac5dd25ff08768277be6bd71a4ae/compiler/stack.ml | ocaml | This assumes that a label has an empty stack if it is
not the target of some forward branch. |
* Oxford Oberon-2 compiler
* stack.ml
* Copyright ( C ) 1995 , 1998
* Oxford Oberon-2 compiler
* stack.ml
* Copyright (C) J. M. Spivey 1995, 1998
*)
open Icode
open Symtab
open Print
open Dict
open Gcmap
We keep a stack of Booleans , each indicating whether an item on the
evaluation stack at runtime is a pointer or not .
evaluation stack at runtime is a pointer or not. *)
let stk = ref []
let maxd = ref 0
let labstate = Hashtbl.create 100
let push_stack flag s = flag :: s
let rec pop_stack r s = Util.drop r s
let rec nth_stack s r = List.nth s r
let count = function VoidT -> 0 | (DoubleT|LongT) -> 2 | _ -> 1
let f = false and t = true
let flags = function VoidT -> [] | (DoubleT|LongT) -> [f; f] | _ -> [f]
let arity =
function
ERROR (_, _) | STKMAP _ -> (0, [])
| CONST _ | HEXCONST _ -> (0, [f])
| TCONST (k, _) -> (0, flags k)
| (LOCAL _ | GLOBAL _) -> (0, [f])
| LOAD k -> (1, flags k)
| CHECK (NullPtr, _) -> (1, [t])
| ALIGN _ -> (1, [f])
| BOUND _ -> (1, [])
| POP n -> (n, [])
| STORE k -> (count k + 1, [])
| FLEXCOPY -> (2, []) | FIXCOPY -> (3, [])
| RETURN -> (0, [])
| LINE _ -> (0, [])
| STATLINK -> (1, [])
| CALL (n, k) -> (n+1, flags k)
| CHECK (GlobProc, _) -> (1, [])
| MONOP (k, _) -> (count k, flags k)
| CHECK (DivZero k, _) -> (count k, flags k)
| BINOP (k, (Eq|Lt|Gt|Leq|Geq|Neq)) -> (2 * count k, [f])
| BINOP (k, _) -> (2 * count k, flags k)
| OFFSET -> (2, [t])
| CONV (k1, k2) -> (count k1, flags k2)
| JCASE _ -> (1, [])
| JRANGE _ -> (3, [])
| i -> failwith (sprintf "stack_sim $" [fInst i])
let simulate i =
begin match i with
JUMP lab ->
Hashtbl.add labstate lab !stk
| TESTGEQ lab ->
let s = pop_stack 1 !stk in
Hashtbl.add labstate lab s; stk := s
| DUP n ->
stk := push_stack (nth_stack !stk n) !stk
| JUMPC (k, _, lab) ->
let s = pop_stack (2 * count k) !stk in
Hashtbl.add labstate lab s; stk := s
| SWAP ->
let x = nth_stack !stk 0 and y = nth_stack !stk 1 in
stk := push_stack y (push_stack x (pop_stack 2 !stk))
| LABEL lab ->
stk := (try Hashtbl.find labstate lab with Not_found -> [])
| _ ->
let (k, xs) = arity i in
stk := List.fold_right push_stack xs (pop_stack k !stk)
end;
let d = List.length !stk in
maxd := max d !maxd;
if !Config.debug > 1 then
printf "! Sim: $ [$/$]\n" [fInst i; fNum d; fNum !maxd]
let reset () =
Hashtbl.clear labstate; stk := []; maxd := 0
let mark () =
stk := push_stack true (pop_stack 1 !stk)
The stack map for a procedure call shows the pointer layout of the
eval stack of the calling procedure . It 's based at ( bp+HEAD+args ) ,
the address of the stack word just beyond the parameters of the
callee . Thus the stack map will be zero except in the rare case that
extra pointers stay on the stack of the caller throughout execution of
the callee .
When make_map is called , the n parameters ( maybe including a static
link ) and the code address of the callee are on the simulated stack ;
so we drop n+1 stack items before computing the map .
eval stack of the calling procedure. It's based at (bp+HEAD+args),
the address of the stack word just beyond the parameters of the
callee. Thus the stack map will be zero except in the rare case that
extra pointers stay on the stack of the caller throughout execution of
the callee.
When make_map is called, the n parameters (maybe including a static
link) and the code address of the callee are on the simulated stack;
so we drop n+1 stack items before computing the map. *)
let make_map k n =
let h p m = join (if p then ptr_map else null_map) (shift 4 m) in
shift (4*n) (List.fold_right h (pop_stack (n+k) !stk) null_map)
let max_depth () = !maxd
let fStack =
let f prf =
let n = List.length !stk in
for i = 0 to n-1 do
prf "$" [fChr (if nth_stack !stk i then 't' else 'f')]
done in
fExt f
|
5314ebd482ee93cb934a0a637450e6cbcc640ebd0487f4b4672339941e659ca0 | flexsurfer/re-frame-steroid | views.clj | (ns steroid.views
(:require [clojure.walk :as w]))
;; source -im/status-react/blob/develop/src/status_im/utils/views.clj
(defn atom? [sub]
(or (vector? sub)
(and (seq sub)
(#{`reagent.core/atom} (first sub)))))
(defn walk-sub [sub form->sym]
(if (coll? sub)
(w/postwalk (fn [f]
(or (form->sym f) f)) sub)
(or (form->sym sub) sub)))
(defn prepare-subs [subs]
(let [pairs (map (fn [[form sub]]
{:form form
:sub sub
:sym (if (atom? sub)
(gensym (str (if (map? form) "keys" form)))
form)})
(partition 2 subs))
form->sym (->> pairs
(map (fn [{:keys [form sym]}]
[form sym]))
(into {}))]
[(mapcat (fn [{:keys [form sym sub]}]
(if (vector? sub)
[sym `(re-frame.core/subscribe ~(walk-sub sub form->sym))]
[form (walk-sub sub form->sym)]))
pairs)
(apply concat (keep (fn [{:keys [sym form sub]}]
(when (atom? sub)
[form `(deref ~sym)]))
pairs))]))
(defmacro letsubs [args & body])
(defmacro defview
[n params & rest-body]
(let [first-symbol (ffirst rest-body)
rest-body' (if (and (symbol? first-symbol)
(= (name first-symbol) "letsubs"))
(rest (first rest-body))
rest-body)
[subs component-map body] (case (count rest-body')
1 [nil {} (first rest-body')]
2 (let [first-element (first rest-body')]
(if (map? first-element)
[nil first-element (second rest-body')]
[(first rest-body') {} (second rest-body')]))
3 rest-body')
[subs-bindings vars-bindings] (prepare-subs subs)]
`(defn ~n ~params
(let [~@subs-bindings]
(reagent.core/create-class
(merge ~(->> component-map
(map (fn [[k f]]
(let [args (gensym "args")]
[k `(fn [& ~args]
(let [~@vars-bindings]
(apply ~f ~args)))])))
(into {}))
{:display-name (name '~n)
:reagent-render
(fn ~params
(let [~@vars-bindings]
~body))})))))) | null | https://raw.githubusercontent.com/flexsurfer/re-frame-steroid/c763cb80e34b191824af204bded87a9ccbc23dc3/src/steroid/views.clj | clojure | source -im/status-react/blob/develop/src/status_im/utils/views.clj | (ns steroid.views
(:require [clojure.walk :as w]))
(defn atom? [sub]
(or (vector? sub)
(and (seq sub)
(#{`reagent.core/atom} (first sub)))))
(defn walk-sub [sub form->sym]
(if (coll? sub)
(w/postwalk (fn [f]
(or (form->sym f) f)) sub)
(or (form->sym sub) sub)))
(defn prepare-subs [subs]
(let [pairs (map (fn [[form sub]]
{:form form
:sub sub
:sym (if (atom? sub)
(gensym (str (if (map? form) "keys" form)))
form)})
(partition 2 subs))
form->sym (->> pairs
(map (fn [{:keys [form sym]}]
[form sym]))
(into {}))]
[(mapcat (fn [{:keys [form sym sub]}]
(if (vector? sub)
[sym `(re-frame.core/subscribe ~(walk-sub sub form->sym))]
[form (walk-sub sub form->sym)]))
pairs)
(apply concat (keep (fn [{:keys [sym form sub]}]
(when (atom? sub)
[form `(deref ~sym)]))
pairs))]))
(defmacro letsubs [args & body])
(defmacro defview
[n params & rest-body]
(let [first-symbol (ffirst rest-body)
rest-body' (if (and (symbol? first-symbol)
(= (name first-symbol) "letsubs"))
(rest (first rest-body))
rest-body)
[subs component-map body] (case (count rest-body')
1 [nil {} (first rest-body')]
2 (let [first-element (first rest-body')]
(if (map? first-element)
[nil first-element (second rest-body')]
[(first rest-body') {} (second rest-body')]))
3 rest-body')
[subs-bindings vars-bindings] (prepare-subs subs)]
`(defn ~n ~params
(let [~@subs-bindings]
(reagent.core/create-class
(merge ~(->> component-map
(map (fn [[k f]]
(let [args (gensym "args")]
[k `(fn [& ~args]
(let [~@vars-bindings]
(apply ~f ~args)))])))
(into {}))
{:display-name (name '~n)
:reagent-render
(fn ~params
(let [~@vars-bindings]
~body))})))))) |
0f6cdb80a97b9524092dd4755100eedfe8a2c261a2891d93f2ce59eba20b7cb9 | dongcarl/guix | nar.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2012 , 2013 , 2014 , 2015 , 2016 , 2017 , 2018 , 2019 , 2020 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (test-nar)
#:use-module (guix tests)
#:use-module (guix nar)
#:use-module (guix serialization)
#:use-module (guix store)
#:use-module ((gcrypt hash)
#:select (open-sha256-port open-sha256-input-port))
#:use-module ((guix packages)
#:select (base32))
#:use-module ((guix build utils)
#:select (find-files))
#:use-module (rnrs bytevectors)
#:use-module (rnrs io ports)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-11)
#:use-module (srfi srfi-26)
#:use-module (srfi srfi-34)
#:use-module (srfi srfi-35)
#:use-module (srfi srfi-64)
#:use-module (ice-9 ftw)
#:use-module (ice-9 regex)
#:use-module ((ice-9 control) #:select (let/ec))
#:use-module (ice-9 match))
;; Test the (guix nar) module.
;;;
File system testing tools , initially contributed to , then libchop .
;;;
(define (random-file-size)
512 KiB
64 KiB
(inexact->exact
(max 0 (round (+ %average (* %stddev (random:normal)))))))
(define (make-file-tree dir tree)
"Make file system TREE at DIR."
(let loop ((dir dir)
(tree tree))
(define (scope file)
(string-append dir "/" file))
(match tree
(('directory name (body ...))
(mkdir (scope name))
(for-each (cute loop (scope name) <>) body))
(('directory name (? integer? mode) (body ...))
(mkdir (scope name))
(for-each (cute loop (scope name) <>) body)
(chmod (scope name) mode))
((file)
(populate-file (scope file) (random-file-size)))
((file (? integer? mode))
(populate-file (scope file) (random-file-size))
(chmod (scope file) mode))
((from '-> to)
(symlink to (scope from))))))
(define (delete-file-tree dir tree)
"Delete file TREE from DIR."
(let loop ((dir dir)
(tree tree))
(define (scope file)
(string-append dir "/" file))
(match tree
(('directory name (body ...))
(for-each (cute loop (scope name) <>) body)
(rmdir (scope name)))
(('directory name (? integer? mode) (body ...))
(chmod (scope name) #o755) ; make sure it can be entered
(for-each (cute loop (scope name) <>) body)
(rmdir (scope name)))
((from '-> _)
(delete-file (scope from)))
((file _ ...)
(delete-file (scope file))))))
(define-syntax-rule (with-file-tree dir tree body ...)
(dynamic-wind
(lambda ()
(make-file-tree dir 'tree))
(lambda ()
body ...)
(lambda ()
(delete-file-tree dir 'tree))))
(define (file-tree-equal? input output)
"Return #t if the file trees at INPUT and OUTPUT are equal."
(define strip
(cute string-drop <> (string-length input)))
(define sibling
(compose (cut string-append output <>) strip))
(file-system-fold (const #t)
(lambda (name stat result) ; leaf
(and result
(file=? name (sibling name))))
(lambda (name stat result) ; down
result)
(lambda (name stat result) ; up
result)
(const #f) ; skip
(lambda (name stat errno result)
(pk 'error name stat errno)
#f)
#t ; result
input
lstat))
(define (populate-file file size)
(call-with-output-file file
(lambda (p)
(put-bytevector p (random-bytevector size)))))
(define (rm-rf dir)
(file-system-fold (const #t) ; enter?
(lambda (file stat result) ; leaf
(unless (eq? 'symlink (stat:type stat))
(chmod file #o644))
(delete-file file))
(lambda (dir stat result) ; down
(chmod dir #o755))
(lambda (dir stat result) ; up
(rmdir dir))
(const #t) ; skip
(const #t) ; error
#t
dir
lstat))
(define %test-dir
An output directory under $ top_builddir .
(string-append (dirname (search-path %load-path "pre-inst-env"))
"/test-nar-" (number->string (getpid))))
(test-begin "nar")
(test-assert "write-file-tree + restore-file"
(let* ((file1 (search-path %load-path "guix.scm"))
(file2 (search-path %load-path "guix/base32.scm"))
(file3 "#!/bin/something")
(output (string-append %test-dir "/output")))
(dynamic-wind
(lambda () #t)
(lambda ()
(define-values (port get-bytevector)
(open-bytevector-output-port))
(write-file-tree "root" port
#:file-type+size
(match-lambda
("root"
(values 'directory 0))
("root/foo"
(values 'regular (stat:size (stat file1))))
("root/lnk"
(values 'symlink 0))
("root/dir"
(values 'directory 0))
("root/dir/bar"
(values 'regular (stat:size (stat file2))))
("root/dir/exe"
(values 'executable (string-length file3))))
#:file-port
(match-lambda
("root/foo" (open-input-file file1))
("root/dir/bar" (open-input-file file2))
("root/dir/exe" (open-input-string file3)))
#:symlink-target
(match-lambda
("root/lnk" "foo"))
#:directory-entries
(match-lambda
("root" '("foo" "dir" "lnk"))
("root/dir" '("bar" "exe"))))
(close-port port)
(rm-rf %test-dir)
(mkdir %test-dir)
(restore-file (open-bytevector-input-port (get-bytevector))
output)
(and (file=? (string-append output "/foo") file1)
(string=? (readlink (string-append output "/lnk"))
"foo")
(file=? (string-append output "/dir/bar") file2)
(string=? (call-with-input-file (string-append output "/dir/exe")
get-string-all)
file3)
(> (logand (stat:mode (lstat (string-append output "/dir/exe")))
#o100)
0)
(equal? '("." ".." "bar" "exe")
(scandir (string-append output "/dir")))
(equal? '("." ".." "dir" "foo" "lnk")
(scandir output))))
(lambda ()
(false-if-exception (rm-rf %test-dir))))))
(test-equal "write-file-tree + fold-archive"
'(("R" directory #f)
("R/dir" directory #f)
("R/dir/exe" executable "1234")
("R/dir" directory-complete #f)
("R/foo" regular "abcdefg")
("R/lnk" symlink "foo")
("R" directory-complete #f))
(let ()
(define-values (port get-bytevector)
(open-bytevector-output-port))
(write-file-tree "root" port
#:file-type+size
(match-lambda
("root"
(values 'directory 0))
("root/foo"
(values 'regular 7))
("root/lnk"
(values 'symlink 0))
("root/dir"
(values 'directory 0))
("root/dir/exe"
(values 'executable 4)))
#:file-port
(match-lambda
("root/foo" (open-input-string "abcdefg"))
("root/dir/exe" (open-input-string "1234")))
#:symlink-target
(match-lambda
("root/lnk" "foo"))
#:directory-entries
(match-lambda
("root" '("foo" "dir" "lnk"))
("root/dir" '("exe"))))
(close-port port)
(reverse
(fold-archive (lambda (file type contents result)
(let ((contents (if (memq type '(regular executable))
(utf8->string
(get-bytevector-n (car contents)
(cdr contents)))
contents)))
(cons `(,file ,type ,contents)
result)))
'()
(open-bytevector-input-port (get-bytevector))
"R"))))
(test-equal "write-file-tree + fold-archive, flat file"
'(("R" regular "abcdefg"))
(let ()
(define-values (port get-bytevector)
(open-bytevector-output-port))
(write-file-tree "root" port
#:file-type+size
(match-lambda
("root" (values 'regular 7)))
#:file-port
(match-lambda
("root" (open-input-string "abcdefg"))))
(close-port port)
(reverse
(fold-archive (lambda (file type contents result)
(let ((contents (utf8->string
(get-bytevector-n (car contents)
(cdr contents)))))
(cons `(,file ,type ,contents) result)))
'()
(open-bytevector-input-port (get-bytevector))
"R"))))
(test-assert "write-file supports non-file output ports"
(let ((input (string-append (dirname (search-path %load-path "guix.scm"))
"/guix"))
(output (%make-void-port "w")))
(write-file input output)
#t))
(test-equal "write-file puts file in C locale collation order"
(base32 "0sfn5r63k88w9ls4hivnvscg82bqg8a0w7955l6xlk4g96jnb2z3")
(let ((input (string-append %test-dir ".input")))
(dynamic-wind
(lambda ()
(define (touch file)
(call-with-output-file (string-append input "/" file)
(const #t)))
(mkdir input)
(touch "B")
(touch "Z")
(touch "a")
(symlink "B" (string-append input "/z")))
(lambda ()
(let-values (((port get-hash) (open-sha256-port)))
(write-file input port)
(close-port port)
(get-hash)))
(lambda ()
(rm-rf input)))))
(test-equal "restore-file with incomplete input"
(string-append %test-dir "/foo")
(let ((port (open-bytevector-input-port #vu8(1 2 3))))
(guard (c ((nar-error? c)
(and (eq? port (nar-error-port c))
(nar-error-file c))))
(restore-file port (string-append %test-dir "/foo"))
#f)))
(test-assert "write-file + restore-file"
(let* ((input (string-append (dirname (search-path %load-path "guix.scm"))
"/guix"))
(output %test-dir)
(nar (string-append output ".nar")))
(dynamic-wind
(lambda () #t)
(lambda ()
(call-with-output-file nar
(cut write-file input <>))
(call-with-input-file nar
(cut restore-file <> output))
(file-tree-equal? input output))
(lambda ()
(false-if-exception (delete-file nar))
(false-if-exception (rm-rf output))))))
(test-assert "write-file + restore-file with symlinks"
(let ((input (string-append %test-dir ".input")))
(mkdir input)
(dynamic-wind
(const #t)
(lambda ()
(with-file-tree input
(directory "root"
(("reg") ("exe" #o777) ("sym" -> "reg")))
(let* ((output %test-dir)
(nar (string-append output ".nar")))
(dynamic-wind
(lambda () #t)
(lambda ()
(call-with-output-file nar
(cut write-file input <>))
(call-with-input-file nar
(cut restore-file <> output))
(and (file-tree-equal? input output)
(every (lambda (file)
(canonical-file?
(string-append output "/" file)))
'("root" "root/reg" "root/exe"))))
(lambda ()
(false-if-exception (delete-file nar))
(false-if-exception (rm-rf output)))))))
(lambda ()
(rmdir input)))))
(test-assert "write-file #:select? + restore-file"
(let ((input (string-append %test-dir ".input")))
(mkdir input)
(dynamic-wind
(const #t)
(lambda ()
(with-file-tree input
(directory "root"
((directory "a" (("x") ("y") ("z")))
("b") ("c") ("d" -> "b")))
(let* ((output %test-dir)
(nar (string-append output ".nar")))
(dynamic-wind
(lambda () #t)
(lambda ()
(call-with-output-file nar
(lambda (port)
(write-file input port
#:select?
(lambda (file stat)
(and (not (string=? (basename file)
"a"))
(not (eq? (stat:type stat)
'symlink)))))))
(call-with-input-file nar
(cut restore-file <> output))
;; Make sure "a" and "d" have been filtered out.
(and (not (file-exists? (string-append output "/root/a")))
(file=? (string-append output "/root/b")
(string-append input "/root/b"))
(file=? (string-append output "/root/c")
(string-append input "/root/c"))
(not (file-exists? (string-append output "/root/d")))))
(lambda ()
(false-if-exception (delete-file nar))
(false-if-exception (rm-rf output)))))))
(lambda ()
(rmdir input)))))
(test-eq "restore-file with non-UTF8 locale" ;<>
'encoding-error
(let* ((file (search-path %load-path "guix.scm"))
(output (string-append %test-dir "/output"))
(locale (setlocale LC_ALL "C")))
(dynamic-wind
(lambda () #t)
(lambda ()
(define-values (port get-bytevector)
(open-bytevector-output-port))
(write-file-tree "root" port
#:file-type+size
(match-lambda
("root" (values 'directory 0))
("root/λ" (values 'regular 0)))
#:file-port (const (%make-void-port "r"))
#:symlink-target (const #f)
#:directory-entries (const '("λ")))
(close-port port)
(mkdir %test-dir)
(catch 'encoding-error
(lambda ()
;; This show throw to 'encoding-error.
(restore-file (open-bytevector-input-port (get-bytevector))
output)
(scandir output))
(lambda args
'encoding-error)))
(lambda ()
(false-if-exception (rm-rf %test-dir))
(setlocale LC_ALL locale)))))
;; XXX: Tell the 'deduplicate' procedure what store we're actually using.
(setenv "NIX_STORE" (%store-prefix))
(test-assert "restore-file-set (signed, valid)"
(with-store store
(let* ((texts (unfold (cut >= <> 10)
(lambda _ (random-text))
1+
0))
(files (map (cut add-text-to-store store "text" <>) texts))
(dump (call-with-bytevector-output-port
(cut export-paths store files <>))))
(delete-paths store files)
(and (every (negate file-exists?) files)
(let* ((source (open-bytevector-input-port dump))
(imported (restore-file-set source)))
(and (equal? imported files)
(every (lambda (file)
(and (file-exists? file)
(valid-path? store file)))
files)
(equal? texts
(map (lambda (file)
(call-with-input-file file
get-string-all))
files))
(every canonical-file? files)))))))
(test-assert "restore-file-set with directories (signed, valid)"
;; <> describes a bug whereby directories
;; containing files subject to deduplication were not canonicalized--i.e.,
their mtime and permissions were not reset . Ensure that this bug is
;; gone.
(with-store store
(let* ((text1 (random-text))
(text2 (random-text))
(tree `("tree" directory
("a" regular (data ,text1))
("b" directory
("c" regular (data ,text2))
("d" regular (data ,text1))))) ;duplicate
(file (add-file-tree-to-store store tree))
(dump (call-with-bytevector-output-port
(cute export-paths store (list file) <>))))
(delete-paths store (list file))
(and (not (file-exists? file))
(let* ((source (open-bytevector-input-port dump))
(imported (restore-file-set source)))
(and (equal? imported (list file))
(file-exists? file)
(valid-path? store file)
(string=? text1
(call-with-input-file (string-append file "/a")
get-string-all))
(string=? text2
(call-with-input-file
(string-append file "/b/c")
get-string-all))
(= (stat:ino (stat (string-append file "/a"))) ;deduplication
(stat:ino (stat (string-append file "/b/d"))))
(every canonical-file?
(find-files file #:directories? #t))))))))
(test-assert "restore-file-set (missing signature)"
(let/ec return
(with-store store
(let* ((file (add-text-to-store store "foo" (random-text)))
(dump (call-with-bytevector-output-port
(cute export-paths store (list file) <>
#:sign? #f))))
(delete-paths store (list file))
(and (not (file-exists? file))
(let ((source (open-bytevector-input-port dump)))
(guard (c ((nar-signature-error? c)
(let ((message (condition-message c))
(port (nar-error-port c)))
(return
(and (string-match "lacks.*signature" message)
(string=? file (nar-error-file c))
(eq? source port))))))
(restore-file-set source))
#f))))))
(test-assert "restore-file-set (corrupt)"
(let/ec return
(with-store store
(let* ((file (add-text-to-store store "foo"
(random-text)))
(dump (call-with-bytevector-output-port
(cute export-paths store (list file) <>))))
(delete-paths store (list file))
;; Flip a byte in the file contents.
(let* ((index 120)
(byte (bytevector-u8-ref dump index)))
(bytevector-u8-set! dump index (logxor #xff byte)))
(and (not (file-exists? file))
(let ((source (open-bytevector-input-port dump)))
(guard (c ((nar-invalid-hash-error? c)
(let ((message (condition-message c))
(port (nar-error-port c)))
(return
(and (string-contains message "hash")
(string=? file (nar-error-file c))
(eq? source port))))))
(restore-file-set source))
#f))))))
(test-end "nar")
;;; Local Variables:
eval : ( put ' with - file - tree ' scheme - indent - function 2 )
;;; End:
| null | https://raw.githubusercontent.com/dongcarl/guix/82543e9649da2da9a5285ede4ec4f718fd740fcb/tests/nar.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Test the (guix nar) module.
make sure it can be entered
leaf
down
up
skip
result
enter?
leaf
down
up
skip
error
Make sure "a" and "d" have been filtered out.
<>
This show throw to 'encoding-error.
XXX: Tell the 'deduplicate' procedure what store we're actually using.
<> describes a bug whereby directories
containing files subject to deduplication were not canonicalized--i.e.,
gone.
duplicate
deduplication
Flip a byte in the file contents.
Local Variables:
End: | Copyright © 2012 , 2013 , 2014 , 2015 , 2016 , 2017 , 2018 , 2019 , 2020 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (test-nar)
#:use-module (guix tests)
#:use-module (guix nar)
#:use-module (guix serialization)
#:use-module (guix store)
#:use-module ((gcrypt hash)
#:select (open-sha256-port open-sha256-input-port))
#:use-module ((guix packages)
#:select (base32))
#:use-module ((guix build utils)
#:select (find-files))
#:use-module (rnrs bytevectors)
#:use-module (rnrs io ports)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-11)
#:use-module (srfi srfi-26)
#:use-module (srfi srfi-34)
#:use-module (srfi srfi-35)
#:use-module (srfi srfi-64)
#:use-module (ice-9 ftw)
#:use-module (ice-9 regex)
#:use-module ((ice-9 control) #:select (let/ec))
#:use-module (ice-9 match))
File system testing tools , initially contributed to , then libchop .
(define (random-file-size)
512 KiB
64 KiB
(inexact->exact
(max 0 (round (+ %average (* %stddev (random:normal)))))))
(define (make-file-tree dir tree)
"Make file system TREE at DIR."
(let loop ((dir dir)
(tree tree))
(define (scope file)
(string-append dir "/" file))
(match tree
(('directory name (body ...))
(mkdir (scope name))
(for-each (cute loop (scope name) <>) body))
(('directory name (? integer? mode) (body ...))
(mkdir (scope name))
(for-each (cute loop (scope name) <>) body)
(chmod (scope name) mode))
((file)
(populate-file (scope file) (random-file-size)))
((file (? integer? mode))
(populate-file (scope file) (random-file-size))
(chmod (scope file) mode))
((from '-> to)
(symlink to (scope from))))))
(define (delete-file-tree dir tree)
"Delete file TREE from DIR."
(let loop ((dir dir)
(tree tree))
(define (scope file)
(string-append dir "/" file))
(match tree
(('directory name (body ...))
(for-each (cute loop (scope name) <>) body)
(rmdir (scope name)))
(('directory name (? integer? mode) (body ...))
(for-each (cute loop (scope name) <>) body)
(rmdir (scope name)))
((from '-> _)
(delete-file (scope from)))
((file _ ...)
(delete-file (scope file))))))
(define-syntax-rule (with-file-tree dir tree body ...)
(dynamic-wind
(lambda ()
(make-file-tree dir 'tree))
(lambda ()
body ...)
(lambda ()
(delete-file-tree dir 'tree))))
(define (file-tree-equal? input output)
"Return #t if the file trees at INPUT and OUTPUT are equal."
(define strip
(cute string-drop <> (string-length input)))
(define sibling
(compose (cut string-append output <>) strip))
(file-system-fold (const #t)
(and result
(file=? name (sibling name))))
result)
result)
(lambda (name stat errno result)
(pk 'error name stat errno)
#f)
input
lstat))
(define (populate-file file size)
(call-with-output-file file
(lambda (p)
(put-bytevector p (random-bytevector size)))))
(define (rm-rf dir)
(unless (eq? 'symlink (stat:type stat))
(chmod file #o644))
(delete-file file))
(chmod dir #o755))
(rmdir dir))
#t
dir
lstat))
(define %test-dir
An output directory under $ top_builddir .
(string-append (dirname (search-path %load-path "pre-inst-env"))
"/test-nar-" (number->string (getpid))))
(test-begin "nar")
(test-assert "write-file-tree + restore-file"
(let* ((file1 (search-path %load-path "guix.scm"))
(file2 (search-path %load-path "guix/base32.scm"))
(file3 "#!/bin/something")
(output (string-append %test-dir "/output")))
(dynamic-wind
(lambda () #t)
(lambda ()
(define-values (port get-bytevector)
(open-bytevector-output-port))
(write-file-tree "root" port
#:file-type+size
(match-lambda
("root"
(values 'directory 0))
("root/foo"
(values 'regular (stat:size (stat file1))))
("root/lnk"
(values 'symlink 0))
("root/dir"
(values 'directory 0))
("root/dir/bar"
(values 'regular (stat:size (stat file2))))
("root/dir/exe"
(values 'executable (string-length file3))))
#:file-port
(match-lambda
("root/foo" (open-input-file file1))
("root/dir/bar" (open-input-file file2))
("root/dir/exe" (open-input-string file3)))
#:symlink-target
(match-lambda
("root/lnk" "foo"))
#:directory-entries
(match-lambda
("root" '("foo" "dir" "lnk"))
("root/dir" '("bar" "exe"))))
(close-port port)
(rm-rf %test-dir)
(mkdir %test-dir)
(restore-file (open-bytevector-input-port (get-bytevector))
output)
(and (file=? (string-append output "/foo") file1)
(string=? (readlink (string-append output "/lnk"))
"foo")
(file=? (string-append output "/dir/bar") file2)
(string=? (call-with-input-file (string-append output "/dir/exe")
get-string-all)
file3)
(> (logand (stat:mode (lstat (string-append output "/dir/exe")))
#o100)
0)
(equal? '("." ".." "bar" "exe")
(scandir (string-append output "/dir")))
(equal? '("." ".." "dir" "foo" "lnk")
(scandir output))))
(lambda ()
(false-if-exception (rm-rf %test-dir))))))
(test-equal "write-file-tree + fold-archive"
'(("R" directory #f)
("R/dir" directory #f)
("R/dir/exe" executable "1234")
("R/dir" directory-complete #f)
("R/foo" regular "abcdefg")
("R/lnk" symlink "foo")
("R" directory-complete #f))
(let ()
(define-values (port get-bytevector)
(open-bytevector-output-port))
(write-file-tree "root" port
#:file-type+size
(match-lambda
("root"
(values 'directory 0))
("root/foo"
(values 'regular 7))
("root/lnk"
(values 'symlink 0))
("root/dir"
(values 'directory 0))
("root/dir/exe"
(values 'executable 4)))
#:file-port
(match-lambda
("root/foo" (open-input-string "abcdefg"))
("root/dir/exe" (open-input-string "1234")))
#:symlink-target
(match-lambda
("root/lnk" "foo"))
#:directory-entries
(match-lambda
("root" '("foo" "dir" "lnk"))
("root/dir" '("exe"))))
(close-port port)
(reverse
(fold-archive (lambda (file type contents result)
(let ((contents (if (memq type '(regular executable))
(utf8->string
(get-bytevector-n (car contents)
(cdr contents)))
contents)))
(cons `(,file ,type ,contents)
result)))
'()
(open-bytevector-input-port (get-bytevector))
"R"))))
(test-equal "write-file-tree + fold-archive, flat file"
'(("R" regular "abcdefg"))
(let ()
(define-values (port get-bytevector)
(open-bytevector-output-port))
(write-file-tree "root" port
#:file-type+size
(match-lambda
("root" (values 'regular 7)))
#:file-port
(match-lambda
("root" (open-input-string "abcdefg"))))
(close-port port)
(reverse
(fold-archive (lambda (file type contents result)
(let ((contents (utf8->string
(get-bytevector-n (car contents)
(cdr contents)))))
(cons `(,file ,type ,contents) result)))
'()
(open-bytevector-input-port (get-bytevector))
"R"))))
(test-assert "write-file supports non-file output ports"
(let ((input (string-append (dirname (search-path %load-path "guix.scm"))
"/guix"))
(output (%make-void-port "w")))
(write-file input output)
#t))
(test-equal "write-file puts file in C locale collation order"
(base32 "0sfn5r63k88w9ls4hivnvscg82bqg8a0w7955l6xlk4g96jnb2z3")
(let ((input (string-append %test-dir ".input")))
(dynamic-wind
(lambda ()
(define (touch file)
(call-with-output-file (string-append input "/" file)
(const #t)))
(mkdir input)
(touch "B")
(touch "Z")
(touch "a")
(symlink "B" (string-append input "/z")))
(lambda ()
(let-values (((port get-hash) (open-sha256-port)))
(write-file input port)
(close-port port)
(get-hash)))
(lambda ()
(rm-rf input)))))
(test-equal "restore-file with incomplete input"
(string-append %test-dir "/foo")
(let ((port (open-bytevector-input-port #vu8(1 2 3))))
(guard (c ((nar-error? c)
(and (eq? port (nar-error-port c))
(nar-error-file c))))
(restore-file port (string-append %test-dir "/foo"))
#f)))
(test-assert "write-file + restore-file"
(let* ((input (string-append (dirname (search-path %load-path "guix.scm"))
"/guix"))
(output %test-dir)
(nar (string-append output ".nar")))
(dynamic-wind
(lambda () #t)
(lambda ()
(call-with-output-file nar
(cut write-file input <>))
(call-with-input-file nar
(cut restore-file <> output))
(file-tree-equal? input output))
(lambda ()
(false-if-exception (delete-file nar))
(false-if-exception (rm-rf output))))))
(test-assert "write-file + restore-file with symlinks"
(let ((input (string-append %test-dir ".input")))
(mkdir input)
(dynamic-wind
(const #t)
(lambda ()
(with-file-tree input
(directory "root"
(("reg") ("exe" #o777) ("sym" -> "reg")))
(let* ((output %test-dir)
(nar (string-append output ".nar")))
(dynamic-wind
(lambda () #t)
(lambda ()
(call-with-output-file nar
(cut write-file input <>))
(call-with-input-file nar
(cut restore-file <> output))
(and (file-tree-equal? input output)
(every (lambda (file)
(canonical-file?
(string-append output "/" file)))
'("root" "root/reg" "root/exe"))))
(lambda ()
(false-if-exception (delete-file nar))
(false-if-exception (rm-rf output)))))))
(lambda ()
(rmdir input)))))
(test-assert "write-file #:select? + restore-file"
(let ((input (string-append %test-dir ".input")))
(mkdir input)
(dynamic-wind
(const #t)
(lambda ()
(with-file-tree input
(directory "root"
((directory "a" (("x") ("y") ("z")))
("b") ("c") ("d" -> "b")))
(let* ((output %test-dir)
(nar (string-append output ".nar")))
(dynamic-wind
(lambda () #t)
(lambda ()
(call-with-output-file nar
(lambda (port)
(write-file input port
#:select?
(lambda (file stat)
(and (not (string=? (basename file)
"a"))
(not (eq? (stat:type stat)
'symlink)))))))
(call-with-input-file nar
(cut restore-file <> output))
(and (not (file-exists? (string-append output "/root/a")))
(file=? (string-append output "/root/b")
(string-append input "/root/b"))
(file=? (string-append output "/root/c")
(string-append input "/root/c"))
(not (file-exists? (string-append output "/root/d")))))
(lambda ()
(false-if-exception (delete-file nar))
(false-if-exception (rm-rf output)))))))
(lambda ()
(rmdir input)))))
'encoding-error
(let* ((file (search-path %load-path "guix.scm"))
(output (string-append %test-dir "/output"))
(locale (setlocale LC_ALL "C")))
(dynamic-wind
(lambda () #t)
(lambda ()
(define-values (port get-bytevector)
(open-bytevector-output-port))
(write-file-tree "root" port
#:file-type+size
(match-lambda
("root" (values 'directory 0))
("root/λ" (values 'regular 0)))
#:file-port (const (%make-void-port "r"))
#:symlink-target (const #f)
#:directory-entries (const '("λ")))
(close-port port)
(mkdir %test-dir)
(catch 'encoding-error
(lambda ()
(restore-file (open-bytevector-input-port (get-bytevector))
output)
(scandir output))
(lambda args
'encoding-error)))
(lambda ()
(false-if-exception (rm-rf %test-dir))
(setlocale LC_ALL locale)))))
(setenv "NIX_STORE" (%store-prefix))
(test-assert "restore-file-set (signed, valid)"
(with-store store
(let* ((texts (unfold (cut >= <> 10)
(lambda _ (random-text))
1+
0))
(files (map (cut add-text-to-store store "text" <>) texts))
(dump (call-with-bytevector-output-port
(cut export-paths store files <>))))
(delete-paths store files)
(and (every (negate file-exists?) files)
(let* ((source (open-bytevector-input-port dump))
(imported (restore-file-set source)))
(and (equal? imported files)
(every (lambda (file)
(and (file-exists? file)
(valid-path? store file)))
files)
(equal? texts
(map (lambda (file)
(call-with-input-file file
get-string-all))
files))
(every canonical-file? files)))))))
(test-assert "restore-file-set with directories (signed, valid)"
their mtime and permissions were not reset . Ensure that this bug is
(with-store store
(let* ((text1 (random-text))
(text2 (random-text))
(tree `("tree" directory
("a" regular (data ,text1))
("b" directory
("c" regular (data ,text2))
(file (add-file-tree-to-store store tree))
(dump (call-with-bytevector-output-port
(cute export-paths store (list file) <>))))
(delete-paths store (list file))
(and (not (file-exists? file))
(let* ((source (open-bytevector-input-port dump))
(imported (restore-file-set source)))
(and (equal? imported (list file))
(file-exists? file)
(valid-path? store file)
(string=? text1
(call-with-input-file (string-append file "/a")
get-string-all))
(string=? text2
(call-with-input-file
(string-append file "/b/c")
get-string-all))
(stat:ino (stat (string-append file "/b/d"))))
(every canonical-file?
(find-files file #:directories? #t))))))))
(test-assert "restore-file-set (missing signature)"
(let/ec return
(with-store store
(let* ((file (add-text-to-store store "foo" (random-text)))
(dump (call-with-bytevector-output-port
(cute export-paths store (list file) <>
#:sign? #f))))
(delete-paths store (list file))
(and (not (file-exists? file))
(let ((source (open-bytevector-input-port dump)))
(guard (c ((nar-signature-error? c)
(let ((message (condition-message c))
(port (nar-error-port c)))
(return
(and (string-match "lacks.*signature" message)
(string=? file (nar-error-file c))
(eq? source port))))))
(restore-file-set source))
#f))))))
(test-assert "restore-file-set (corrupt)"
(let/ec return
(with-store store
(let* ((file (add-text-to-store store "foo"
(random-text)))
(dump (call-with-bytevector-output-port
(cute export-paths store (list file) <>))))
(delete-paths store (list file))
(let* ((index 120)
(byte (bytevector-u8-ref dump index)))
(bytevector-u8-set! dump index (logxor #xff byte)))
(and (not (file-exists? file))
(let ((source (open-bytevector-input-port dump)))
(guard (c ((nar-invalid-hash-error? c)
(let ((message (condition-message c))
(port (nar-error-port c)))
(return
(and (string-contains message "hash")
(string=? file (nar-error-file c))
(eq? source port))))))
(restore-file-set source))
#f))))))
(test-end "nar")
eval : ( put ' with - file - tree ' scheme - indent - function 2 )
|
6b284d9effa2d258269384e4720f455b97106a9aafd3d2b4c28e1afd99f9cbae | music-suite/music-suite | Score.hs |
# LANGUAGE TypeFamilies #
module Main where
import Music.Prelude
import Util
{-
Describe project here.
-}
music = c
Develop tools for basic counterpoint techniques .
- Presentations ( offset in time and pitch )
- Transformations
- Inversion , augment / dimin etc .
Try to encode the form of classical fugues in table form , see for example
Develop tools for basic counterpoint techniques.
- Presentations (offset in time and pitch)
- Transformations
- Inversion, augment/dimin etc.
Try to encode the form of classical fugues in table form, see for example
-}
subj :: Music
subj = pseq [a_ |*2,c,d,e|*3,g,
a |*2,g,a,e|*3,e,
d |*4,e,a_,b_,c,d]
fugue :: [(Part, Duration, Interval)] -> Music -> Music
fugue exps subj = ppar $ fmap (\(p,t,i) -> set parts' p . delay t . up i $ subj) exps
type Subject = Music
fugue1 :: Subject -> Music
fugue1 = fugue
[ (p1, 0, _P1)
, (p2, 1*4, _P5)
, (p3, 3*4, _P8)
, (p4, 4*4, -_P8)
, (p1, 9+0*4, _P8)
, (p2, 9+1*4, _P5)
, (p3, 9+3*4, _P1)
, (p4, 9+4*4, -_P8)
, (p1, 14+0*4, _P8)
, (p2, 14+2*4, -_P8)
, (p3, 14+3*4, _P5)
, (p4, 14+4*4, _P1)
, (p1, 20+0*4, _P5)
, (p2, 20+2*4, _P8)
, (p3, 20+3*4, _P8+_P5)
, (p4, 20+4*4, _P1)
]
where [p1,p2,p3,p4] = divide 4 violins
TODO
- Similar for double / triple fugues etc
- Functions to get distance between expositions and so on
- Terminology for the various combinations of subjects ( _ P4 above delayed 2 etc )
- Given a subject , list allowed permutations ( requires checking obviously )
- Or : just enumerate the permutations to allow for manual checking
TODO
- Similar for double/triple fugues etc
- Functions to get distance between expositions and so on
- Terminology for the various combinations of subjects (_P4 above delayed 2 etc)
- Given a subject, list allowed permutations (requires checking obviously)
- Or: just enumerate the permutations to allow for manual checking
-}
fugue1a = fugue1 $ pseq [c,cs] |> compress 4 (pseq [d,b,bb,a] |> pseq [gs,cs,d,ds])
fugue1a' = fugue1 $ pseq [c,cs] |> compress 4 (pseq [d,b,bb,b] |> pseq [gs,cs,d,ds])
fugue1b = fugue1 $ pseq [c,e] |> compress 4 (pseq [f,a,g,a] |> pseq [bb,a,g,f])
fugue1c = fugue1 $ pseq [c,d,b] |> compress 8 (pseq [e,d,e,d,e,g,a,e,d])
fugue1d = fugue1 $ compress 8 (pseq
[e,d,c|*2,f|*2,e,f,g,b,a|*2]
)
A nice 12 - tone style fugue
fugueX = fugue1 $ subjX
subjX =
[(0<->(1/4),c)^.event,((1/4)<->(1/2),bb)^.event,((1/2)<->(3/4),cs)^.event,((3/4)<->1,a)^.event,(1<->(5/4),d)^.event,((5/4)
<->(11/8),gs)^.event,((11/8)<->(3/2),g)^.event,((3/2)<->(13/8),g)^.event,((13/8)<->(27/16),f)^.event,((27/16)<->
(7/4),e)^.event,((7/4)<->2,fs)^.event,(2<->(17/8),fs)^.event,((17/8)<->(35/16),e)^.event,((35/16)<->(9/4),ds)^.event,((9/4)
<->(5/2),es)^.event,((5/2)<->(21/8),es)^.event,((21/8)<->(43/16),eb)^.event,((43/16)<->(11/4),d)^.event,((11/4)<->
(23/8),e)^.event,((23/8)<->3,b)^.event,(3<->(25/8),as)^.event,((25/8)<->(13/4),a)^.event,((13/4)<->(27/8),a)^.event,((27/8)
<->(55/16),g)^.event,((55/16)<->(7/2),fs)^.event,((7/2)<->(15/4),gs)^.event,((15/4)<->(31/8),gs)^.event,((31/8)<->
(63/16),fs)^.event,((63/16)<->4,es)^.event,(4<->(17/4),g)^.event,((17/4)<->(35/8),g)^.event,((35/8)<->
(71/16),f)^.event,((71/16)<->(9/2),e)^.event]^.score
| null | https://raw.githubusercontent.com/music-suite/music-suite/7f01fd62334c66418043b7a2d662af127f98685d/examples/pieces/Fugue/Score.hs | haskell |
Describe project here.
|
# LANGUAGE TypeFamilies #
module Main where
import Music.Prelude
import Util
music = c
Develop tools for basic counterpoint techniques .
- Presentations ( offset in time and pitch )
- Transformations
- Inversion , augment / dimin etc .
Try to encode the form of classical fugues in table form , see for example
Develop tools for basic counterpoint techniques.
- Presentations (offset in time and pitch)
- Transformations
- Inversion, augment/dimin etc.
Try to encode the form of classical fugues in table form, see for example
-}
subj :: Music
subj = pseq [a_ |*2,c,d,e|*3,g,
a |*2,g,a,e|*3,e,
d |*4,e,a_,b_,c,d]
fugue :: [(Part, Duration, Interval)] -> Music -> Music
fugue exps subj = ppar $ fmap (\(p,t,i) -> set parts' p . delay t . up i $ subj) exps
type Subject = Music
fugue1 :: Subject -> Music
fugue1 = fugue
[ (p1, 0, _P1)
, (p2, 1*4, _P5)
, (p3, 3*4, _P8)
, (p4, 4*4, -_P8)
, (p1, 9+0*4, _P8)
, (p2, 9+1*4, _P5)
, (p3, 9+3*4, _P1)
, (p4, 9+4*4, -_P8)
, (p1, 14+0*4, _P8)
, (p2, 14+2*4, -_P8)
, (p3, 14+3*4, _P5)
, (p4, 14+4*4, _P1)
, (p1, 20+0*4, _P5)
, (p2, 20+2*4, _P8)
, (p3, 20+3*4, _P8+_P5)
, (p4, 20+4*4, _P1)
]
where [p1,p2,p3,p4] = divide 4 violins
TODO
- Similar for double / triple fugues etc
- Functions to get distance between expositions and so on
- Terminology for the various combinations of subjects ( _ P4 above delayed 2 etc )
- Given a subject , list allowed permutations ( requires checking obviously )
- Or : just enumerate the permutations to allow for manual checking
TODO
- Similar for double/triple fugues etc
- Functions to get distance between expositions and so on
- Terminology for the various combinations of subjects (_P4 above delayed 2 etc)
- Given a subject, list allowed permutations (requires checking obviously)
- Or: just enumerate the permutations to allow for manual checking
-}
fugue1a = fugue1 $ pseq [c,cs] |> compress 4 (pseq [d,b,bb,a] |> pseq [gs,cs,d,ds])
fugue1a' = fugue1 $ pseq [c,cs] |> compress 4 (pseq [d,b,bb,b] |> pseq [gs,cs,d,ds])
fugue1b = fugue1 $ pseq [c,e] |> compress 4 (pseq [f,a,g,a] |> pseq [bb,a,g,f])
fugue1c = fugue1 $ pseq [c,d,b] |> compress 8 (pseq [e,d,e,d,e,g,a,e,d])
fugue1d = fugue1 $ compress 8 (pseq
[e,d,c|*2,f|*2,e,f,g,b,a|*2]
)
A nice 12 - tone style fugue
fugueX = fugue1 $ subjX
subjX =
[(0<->(1/4),c)^.event,((1/4)<->(1/2),bb)^.event,((1/2)<->(3/4),cs)^.event,((3/4)<->1,a)^.event,(1<->(5/4),d)^.event,((5/4)
<->(11/8),gs)^.event,((11/8)<->(3/2),g)^.event,((3/2)<->(13/8),g)^.event,((13/8)<->(27/16),f)^.event,((27/16)<->
(7/4),e)^.event,((7/4)<->2,fs)^.event,(2<->(17/8),fs)^.event,((17/8)<->(35/16),e)^.event,((35/16)<->(9/4),ds)^.event,((9/4)
<->(5/2),es)^.event,((5/2)<->(21/8),es)^.event,((21/8)<->(43/16),eb)^.event,((43/16)<->(11/4),d)^.event,((11/4)<->
(23/8),e)^.event,((23/8)<->3,b)^.event,(3<->(25/8),as)^.event,((25/8)<->(13/4),a)^.event,((13/4)<->(27/8),a)^.event,((27/8)
<->(55/16),g)^.event,((55/16)<->(7/2),fs)^.event,((7/2)<->(15/4),gs)^.event,((15/4)<->(31/8),gs)^.event,((31/8)<->
(63/16),fs)^.event,((63/16)<->4,es)^.event,(4<->(17/4),g)^.event,((17/4)<->(35/8),g)^.event,((35/8)<->
(71/16),f)^.event,((71/16)<->(9/2),e)^.event]^.score
|
dbc6f3037aac83e55e829c1eca6554246e8565e845264f971552d2b3c87764d7 | S8A/htdp-exercises | ex510.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex510) (read-case-sensitive #t) (teachpacks ((lib "batch-io.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "batch-io.rkt" "teachpack" "2htdp")) #f)))
; N String String -> String
; arranges all the words from the in-f into lines of maximal width w
; and writes them out to out-f
(define (fmt w in-f out-f)
(write-file out-f (lines->string (arrange-lines w (read-words/line in-f)))))
; [List-of [List-of String]] -> [List-of [List-of String]]
; arranges the given lines into new ones of maximal width w0
(define (arrange-lines w0 lines)
(local (; [List-of String] -> [List-of [List-of String]]
; arranges the given line into new ones of maximal width w0
(define (arrange-line l)
(cond
[(empty? l) '()]
[(cons? l)
(local ((define separated (separate-first '() l 0)))
(cons (first separated)
(arrange-line (first (rest separated)))))]))
; [List-of String] -> [List [List-of String] [List-of String]]
separates the first line of maximal width w0 from the rest of the
; given line
accumulator fst contains the words that form the first line .
; accumulator rst contains the rest of the words.
accumulator w is the width of the first line i.e. the number of
; characters in all words of fst plus a space after each one.
(define (separate-first fst rst w)
(cond
[(<= w (add1 w0))
(cond
[(empty? rst) (list (reverse fst) '())]
[(cons? rst)
(local ((define next (first rst)))
(separate-first (cons next fst)
(rest rst)
(+ w (length (explode next)) 1)))])]
[else (list (reverse (rest fst)) (cons (first fst) rst))])))
(foldr (lambda (fst rst)
(append (if (equal? fst '())
(list '())
fst)
rst))
'() (map arrange-line lines))))
(check-expect (arrange-lines 12
'(("hello" "world," "great" "big" "white" "world")
("my" "name" "is" "Samuel")
()
("Cheers.")))
'(("hello" "world,")
("great" "big")
("white" "world")
("my" "name" "is")
("Samuel")
()
("Cheers.")))
; [List-of [List-of String]] -> String
; converts the given lines into a string
(define (lines->string lines)
(local (; [List-of String] -> String
; converts the given line into a single string ending with a newline
(define (line->string l)
(local ((define str
(foldr (lambda (fst rst) (string-append fst rst)) "\n"
(map (lambda (w) (string-append " " w)) l))))
(if (string=? str "\n") str (substring str 1)))))
(foldr string-append "" (map line->string lines))))
(check-expect (lines->string '(("I'm" "not" "attached")
("to" "your" "world")
("nothing" "heals")
("nothing" "grows")
()
("M.M.")))
(string-append "I'm not attached\n"
"to your world\n"
"nothing heals\n"
"nothing grows\n\n"
"M.M.\n"))
| null | https://raw.githubusercontent.com/S8A/htdp-exercises/578e49834a9513f29ef81b7589b28081c5e0b69f/ex510.rkt | racket | about the language level of this file in a form that our tools can easily process.
N String String -> String
arranges all the words from the in-f into lines of maximal width w
and writes them out to out-f
[List-of [List-of String]] -> [List-of [List-of String]]
arranges the given lines into new ones of maximal width w0
[List-of String] -> [List-of [List-of String]]
arranges the given line into new ones of maximal width w0
[List-of String] -> [List [List-of String] [List-of String]]
given line
accumulator rst contains the rest of the words.
characters in all words of fst plus a space after each one.
[List-of [List-of String]] -> String
converts the given lines into a string
[List-of String] -> String
converts the given line into a single string ending with a newline | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex510) (read-case-sensitive #t) (teachpacks ((lib "batch-io.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "batch-io.rkt" "teachpack" "2htdp")) #f)))
(define (fmt w in-f out-f)
(write-file out-f (lines->string (arrange-lines w (read-words/line in-f)))))
(define (arrange-lines w0 lines)
(define (arrange-line l)
(cond
[(empty? l) '()]
[(cons? l)
(local ((define separated (separate-first '() l 0)))
(cons (first separated)
(arrange-line (first (rest separated)))))]))
separates the first line of maximal width w0 from the rest of the
accumulator fst contains the words that form the first line .
accumulator w is the width of the first line i.e. the number of
(define (separate-first fst rst w)
(cond
[(<= w (add1 w0))
(cond
[(empty? rst) (list (reverse fst) '())]
[(cons? rst)
(local ((define next (first rst)))
(separate-first (cons next fst)
(rest rst)
(+ w (length (explode next)) 1)))])]
[else (list (reverse (rest fst)) (cons (first fst) rst))])))
(foldr (lambda (fst rst)
(append (if (equal? fst '())
(list '())
fst)
rst))
'() (map arrange-line lines))))
(check-expect (arrange-lines 12
'(("hello" "world," "great" "big" "white" "world")
("my" "name" "is" "Samuel")
()
("Cheers.")))
'(("hello" "world,")
("great" "big")
("white" "world")
("my" "name" "is")
("Samuel")
()
("Cheers.")))
(define (lines->string lines)
(define (line->string l)
(local ((define str
(foldr (lambda (fst rst) (string-append fst rst)) "\n"
(map (lambda (w) (string-append " " w)) l))))
(if (string=? str "\n") str (substring str 1)))))
(foldr string-append "" (map line->string lines))))
(check-expect (lines->string '(("I'm" "not" "attached")
("to" "your" "world")
("nothing" "heals")
("nothing" "grows")
()
("M.M.")))
(string-append "I'm not attached\n"
"to your world\n"
"nothing heals\n"
"nothing grows\n\n"
"M.M.\n"))
|
b39b8fa871935f901709bab8e05d66fe3185075b41cc5c040ca3b79457e6d683 | igrep/typesafe-precure | Words.hs | # OPTIONS_GHC -fno - warn - missing - signatures #
module ACME.PreCure.Textbook.Suite.Words where
groupName_Suite = "ハートキャッチプリキュア!"
girlName_Hibiki = "北条 響"
girlName_Kanade = "南野 奏"
girlName_Ellen = "黒川 エレン"
girlName_Ako = "調辺 アコ"
cureName_Melody = "キュアメロディ"
cureName_Rhythm = "キュアリズム"
cureName_Beat = "キュアビート"
cureName_Muse = "キュアミューズ"
--
introducesHerselfAs_Melody = "爪弾くは荒ぶる調べ! キュアメロディー!"
introducesHerselfAs_Rhythm = "爪弾くはたおやかな調べ! キュアリズム!"
introducesHerselfAs_Beat = "爪弾くは魂の調べ! キュアビート!"
introducesHerselfAs_Muse = "爪弾くは女神の調べ! キュアミューズ!"
modulation = "レッツプレイ! プリキュア・モジュレーション!"
-- #Suite_Pretty_Cure.E2.99.AA
resonateSuiteOfN :: Int -> String
resonateSuiteOfN n =
"届け!" ++ show n ++ "人の組曲! スイートプリキュア!"
--
transformationSpeech_Melody_Rhythm =
[ modulation
, introducesHerselfAs_Melody
, introducesHerselfAs_Rhythm
, resonateSuiteOfN 2
]
-- -olfeXL8
transformationSpeech_Beat = [modulation, introducesHerselfAs_Beat]
--
transformationSpeech_Muse = [modulation, introducesHerselfAs_Muse]
g
transformationSpeech_Melody_Rhythm_Beat =
[ modulation
, introducesHerselfAs_Melody
, introducesHerselfAs_Rhythm
, introducesHerselfAs_Beat
, resonateSuiteOfN 3
]
-- -9PB0D6_Lc
transformationSpeech_Suite =
[ modulation
, introducesHerselfAs_Melody
, introducesHerselfAs_Rhythm
, introducesHerselfAs_Beat
, introducesHerselfAs_Muse
, resonateSuiteOfN 4
]
--
purificationSpeech_Melody =
[ "(ミミ~!)"
, "奏でましょう、奇跡のメロディー!ミラクル・ベルティエ!"
, "おいで、ミリー!"
, "(ミミ~!)"
, "翔けめぐれ、トーンのリング!"
, "プリキュア!ミュージックロンド!"
, "三拍子!1、2、3"
, "フィナーレ!"
]
--
purificationSpeech_Rhythm =
[ "(ファファ~!)"
, "刻みましょう、大いなるリズム!ファンタスティック・ベルティエ!"
, "おいで、ファリー!"
, "(ファファ~!)"
, "翔けめぐれ、トーンのリング!"
, "プリキュア!ミュージックロンド!"
, "三拍子!1、2、3"
, "フィナーレ!"
]
--
purificationSpeech_Beat =
[ "(ソソッ)"
, "弾き鳴らせ、愛の魂!ラブギターロッド!"
, "おいで、ソリー!"
, "(ソソッ)"
, "チェンジ!ソウルロッド!"
, "翔けめぐれ、トーンのリング!"
, "プリキュア!ハートフル・ビート・ロック!"
, "三拍子!1、2、3"
, "フィナーレ!"
]
purificationSpeech_Muse =
[ "おいで、シリー!"
, "(シシ~)"
, "「シ」の音符の、シャイニングメロディ!"
, "プリキュア!スパーリング・シャワー!!"
, "三拍子!1、2、3"
, "フィナーレ!"
]
--
purificationSpeech_Suite =
[ "いでよ、すべての音の源よ!"
, "届けましょう、希望のシンフォニー!"
, "プリキュア!スイートセッションアンサンブル!"
, "クレッシェンド!!"
, "フィナーレ!"
]
| null | https://raw.githubusercontent.com/igrep/typesafe-precure/fc94f5f2c0ca8d8acc0b8eabe890a85bc305d7b3/src/ACME/PreCure/Textbook/Suite/Words.hs | haskell |
#Suite_Pretty_Cure.E2.99.AA
-olfeXL8
-9PB0D6_Lc
| # OPTIONS_GHC -fno - warn - missing - signatures #
module ACME.PreCure.Textbook.Suite.Words where
groupName_Suite = "ハートキャッチプリキュア!"
girlName_Hibiki = "北条 響"
girlName_Kanade = "南野 奏"
girlName_Ellen = "黒川 エレン"
girlName_Ako = "調辺 アコ"
cureName_Melody = "キュアメロディ"
cureName_Rhythm = "キュアリズム"
cureName_Beat = "キュアビート"
cureName_Muse = "キュアミューズ"
introducesHerselfAs_Melody = "爪弾くは荒ぶる調べ! キュアメロディー!"
introducesHerselfAs_Rhythm = "爪弾くはたおやかな調べ! キュアリズム!"
introducesHerselfAs_Beat = "爪弾くは魂の調べ! キュアビート!"
introducesHerselfAs_Muse = "爪弾くは女神の調べ! キュアミューズ!"
modulation = "レッツプレイ! プリキュア・モジュレーション!"
resonateSuiteOfN :: Int -> String
resonateSuiteOfN n =
"届け!" ++ show n ++ "人の組曲! スイートプリキュア!"
transformationSpeech_Melody_Rhythm =
[ modulation
, introducesHerselfAs_Melody
, introducesHerselfAs_Rhythm
, resonateSuiteOfN 2
]
transformationSpeech_Beat = [modulation, introducesHerselfAs_Beat]
transformationSpeech_Muse = [modulation, introducesHerselfAs_Muse]
g
transformationSpeech_Melody_Rhythm_Beat =
[ modulation
, introducesHerselfAs_Melody
, introducesHerselfAs_Rhythm
, introducesHerselfAs_Beat
, resonateSuiteOfN 3
]
transformationSpeech_Suite =
[ modulation
, introducesHerselfAs_Melody
, introducesHerselfAs_Rhythm
, introducesHerselfAs_Beat
, introducesHerselfAs_Muse
, resonateSuiteOfN 4
]
purificationSpeech_Melody =
[ "(ミミ~!)"
, "奏でましょう、奇跡のメロディー!ミラクル・ベルティエ!"
, "おいで、ミリー!"
, "(ミミ~!)"
, "翔けめぐれ、トーンのリング!"
, "プリキュア!ミュージックロンド!"
, "三拍子!1、2、3"
, "フィナーレ!"
]
purificationSpeech_Rhythm =
[ "(ファファ~!)"
, "刻みましょう、大いなるリズム!ファンタスティック・ベルティエ!"
, "おいで、ファリー!"
, "(ファファ~!)"
, "翔けめぐれ、トーンのリング!"
, "プリキュア!ミュージックロンド!"
, "三拍子!1、2、3"
, "フィナーレ!"
]
purificationSpeech_Beat =
[ "(ソソッ)"
, "弾き鳴らせ、愛の魂!ラブギターロッド!"
, "おいで、ソリー!"
, "(ソソッ)"
, "チェンジ!ソウルロッド!"
, "翔けめぐれ、トーンのリング!"
, "プリキュア!ハートフル・ビート・ロック!"
, "三拍子!1、2、3"
, "フィナーレ!"
]
purificationSpeech_Muse =
[ "おいで、シリー!"
, "(シシ~)"
, "「シ」の音符の、シャイニングメロディ!"
, "プリキュア!スパーリング・シャワー!!"
, "三拍子!1、2、3"
, "フィナーレ!"
]
purificationSpeech_Suite =
[ "いでよ、すべての音の源よ!"
, "届けましょう、希望のシンフォニー!"
, "プリキュア!スイートセッションアンサンブル!"
, "クレッシェンド!!"
, "フィナーレ!"
]
|
9e2e84e0cc9258a93b8d462ca2dc1345948ad061c52be09b1b4dd65b2584308a | inventi/jenkins-rabbitz | api.clj | (ns karotz.api
(:require [clojure.xml :as xml]
[clojure.string :as st])
(:import java.io.IOException
java.net.URLEncoder
javax.crypto.Mac
javax.crypto.spec.SecretKeySpec
org.apache.commons.codec.binary.Base64
(java.util.logging Logger Level)))
(def log (Logger/getLogger "lt.inventi.karotz.api"))
(defn karotz-api [host]
(str "http://" host "/cgi-bin"))
(defn karotz-request
([karot url]
(let [url (str (karotz-api karot) "/" url)]
(future
(try
(slurp url)
(catch Exception e
(.log log Level/SEVERE "failed to send request to karotz" e))))
url)))
(defn move-ears
([karot]
(move-ears karot 5 3))
([karot left right]
(let [request-url (str "ears?left=" left "&right=" right)]
(karotz-request karot request-url))))
(defn say-out-loud [karot media-url]
(let [escaped-url (st/replace (str "sound?url=" media-url) #" " "%20")]
(karotz-request karot escaped-url)))
| null | https://raw.githubusercontent.com/inventi/jenkins-rabbitz/2a15a7df842ef9794df4a2932850867683d918d8/src/main/clojure/karotz/api.clj | clojure | (ns karotz.api
(:require [clojure.xml :as xml]
[clojure.string :as st])
(:import java.io.IOException
java.net.URLEncoder
javax.crypto.Mac
javax.crypto.spec.SecretKeySpec
org.apache.commons.codec.binary.Base64
(java.util.logging Logger Level)))
(def log (Logger/getLogger "lt.inventi.karotz.api"))
(defn karotz-api [host]
(str "http://" host "/cgi-bin"))
(defn karotz-request
([karot url]
(let [url (str (karotz-api karot) "/" url)]
(future
(try
(slurp url)
(catch Exception e
(.log log Level/SEVERE "failed to send request to karotz" e))))
url)))
(defn move-ears
([karot]
(move-ears karot 5 3))
([karot left right]
(let [request-url (str "ears?left=" left "&right=" right)]
(karotz-request karot request-url))))
(defn say-out-loud [karot media-url]
(let [escaped-url (st/replace (str "sound?url=" media-url) #" " "%20")]
(karotz-request karot escaped-url)))
| |
4710bf5b9e1dd5c754aac7b079aefae6238baf7cb0286ff5138e1ca7d0dbbc1c | erlang/otp | beam_lib_SUITE.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1997 - 2022 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(beam_lib_SUITE).
%%-define(debug, true).
-ifdef(debug).
-define(format(S, A), io:format(S, A)).
-define(line, put(line, ?LINE), ).
-define(config(X,Y), "./log_dir/").
-define(privdir, "beam_lib_SUITE_priv").
-else.
-include_lib("common_test/include/ct.hrl").
-define(format(S, A), ok).
-define(privdir, proplists:get_value(priv_dir, Conf)).
-endif.
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
normal/1, error/1, cmp/1, cmp_literals/1, strip/1, strip_add_chunks/1, otp_6711/1,
building/1, md5/1, encrypted_abstr/1, encrypted_abstr_file/1,
missing_debug_info_backend/1]).
-export([test_makedep_abstract_code/1]).
-export([init_per_testcase/2, end_per_testcase/2]).
suite() ->
[{ct_hooks,[ts_install_cth]},
{timetrap,{minutes,2}}].
all() ->
[error, normal, cmp, cmp_literals, strip, strip_add_chunks, otp_6711,
building, md5, encrypted_abstr, encrypted_abstr_file,
missing_debug_info_backend, test_makedep_abstract_code
].
groups() ->
[].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
Cleanup after strip and strip_add_chunks
case code:is_sticky(sofs) of
false ->
false = code:purge(sofs),
{module, sofs} = code:load_file(sofs),
code:stick_mod(sofs),
ok;
true ->
ok
end.
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_Case, _Config) ->
ok.
%% Read correct beam file.
normal(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
simple_file(Source),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
do_normal(Source, PrivDir, BeamFile, []),
{ok,_} = compile:file(Source, [{outdir,PrivDir}, no_debug_info]),
{ok, {simple, [{debug_info, {debug_info_v1, erl_abstract_code, {none, _}}}]}} =
beam_lib:chunks(BeamFile, [debug_info]),
{ok, {simple, [{abstract_code, no_abstract_code}]}} =
beam_lib:chunks(BeamFile, [abstract_code]),
%% {ok,_} = compile:file(Source, [compressed | CompileFlags]),
%% do_normal(BeamFile),
file:delete(BeamFile),
file:delete(Source),
NoOfTables = erlang:system_info(ets_count),
true = (P0 == pps()),
ok.
do_normal(Source, PrivDir, BeamFile, Opts) ->
CompileFlags = [{outdir,PrivDir}, debug_info | Opts],
{ok,_} = compile:file(Source, CompileFlags),
{ok, Binary} = file:read_file(BeamFile),
do_normal(BeamFile, Opts),
do_normal(Binary, Opts).
do_normal(BeamFile, Opts) ->
Imports = {imports, [{erlang, get_module_info, 1},
{erlang, get_module_info, 2},
{lists, member, 2}]},
Exports = {exports, [{module_info, 0}, {module_info, 1}, {t, 0}]},
Local = {locals, [{t, 1}]},
{ok, {simple, [Imports]}} = beam_lib:chunks(BeamFile, [imports]),
{ok, {simple, [{"ImpT",_Bin}]}} =
beam_lib:chunks(BeamFile, ["ImpT"]),
{ok, {simple, [Exports]}} = beam_lib:chunks(BeamFile, [exports]),
{ok, {simple, [{attributes, [{vsn, [_]}]}]}} =
beam_lib:chunks(BeamFile, [attributes]),
{ok, {simple, [{compile_info, _}=CompileInfo]}} =
beam_lib:chunks(BeamFile, [compile_info]),
{ok, {simple, [Local]}} = beam_lib:chunks(BeamFile, [locals]),
{ok, {simple, [{attributes, [{vsn, [_]}]}, CompileInfo,
Exports, Imports, Local]}} =
beam_lib:chunks(BeamFile, [attributes, compile_info, exports, imports, locals]),
{ok, {simple, [{atoms, _Atoms}]}} =
beam_lib:chunks(BeamFile, [atoms]),
{ok, {simple, [{labeled_exports, _LExports}]}} =
beam_lib:chunks(BeamFile, [labeled_exports]),
{ok, {simple, [{labeled_locals, _LLocals}]}} =
beam_lib:chunks(BeamFile, [labeled_locals]),
{ok, {simple, [_Vsn]}} = beam_lib:version(BeamFile),
{ok, {simple, [{abstract_code, {_, _}}]}} =
beam_lib:chunks(BeamFile, [abstract_code]),
{ok, {simple, [{debug_info, {debug_info_v1, erl_abstract_code, _}}]}} =
beam_lib:chunks(BeamFile, [debug_info]),
%% Test reading optional chunks.
All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT", "AtU8"],
{ok,{simple,Chunks}} = beam_lib:chunks(BeamFile, All, [allow_missing_chunks]),
case {verify_simple(Chunks),Opts} of
{{missing_chunk, AtomBin}, []} when is_binary(AtomBin) -> ok;
{{AtomBin, missing_chunk}, [no_utf8_atoms]} when is_binary(AtomBin) -> ok
end,
%% 'allow_missing_chunks' should work for named chunks too.
{ok, {simple, StrippedBeam}} = beam_lib:strip(BeamFile),
{ok, {simple, MChunks}} = beam_lib:chunks(StrippedBeam,
[attributes, locals],
[allow_missing_chunks]),
[{attributes, missing_chunk}, {locals, missing_chunk}] = MChunks,
%% Make sure that reading the atom chunk works when the 'allow_missing_chunks'
%% option is used.
Some = ["Code",atoms,"ExpT","LitT"],
{ok,{simple,SomeChunks}} = beam_lib:chunks(BeamFile, Some, [allow_missing_chunks]),
[{"Code",<<_/binary>>},{atoms,[_|_]},{"ExpT",<<_/binary>>},{"LitT",missing_chunk}] =
SomeChunks.
verify_simple([{"Atom", PlainAtomChunk},
{"Code", CodeBin},
{"StrT", StrBin},
{"ImpT", ImpBin},
{"ExpT", ExpBin},
{"FunT", missing_chunk},
{"LitT", missing_chunk},
{"AtU8", AtU8Chunk}])
when is_binary(CodeBin), is_binary(StrBin),
is_binary(ImpBin), is_binary(ExpBin) ->
{PlainAtomChunk, AtU8Chunk}.
%% Read invalid beam files.
error(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
WrongFile = Simple ++ "foo.beam",
simple_file(Source),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
{ok,_} = compile:file(Source, [{outdir,PrivDir},debug_info]),
ACopy = filename:join(PrivDir, "a_copy.beam"),
copy_file(BeamFile, ACopy),
{ok, Binary} = file:read_file(BeamFile),
copy_file(ACopy, WrongFile),
verify(file_error, beam_lib:info("./does_simply_not_exist")),
do_error(BeamFile, ACopy),
do_error(Binary, ACopy),
copy_file(ACopy, BeamFile),
verify(unknown_chunk, beam_lib:chunks(BeamFile, [not_a_chunk])),
ok = file:write_file(BeamFile, <<>>),
verify(not_a_beam_file, beam_lib:info(BeamFile)),
verify(not_a_beam_file, beam_lib:info(<<>>)),
ok = file:write_file(BeamFile, <<"short">>),
verify(not_a_beam_file, beam_lib:info(BeamFile)),
verify(not_a_beam_file, beam_lib:info(<<"short">>)),
{Binary1, _} = split_binary(Binary, byte_size(Binary)-10),
LastChunk = last_chunk(Binary),
verify(chunk_too_big, beam_lib:chunks(Binary1, [LastChunk])),
Chunks = chunk_info(Binary),
{value, {_, DebugInfoStart, _}} = lists:keysearch("Dbgi", 1, Chunks),
{Binary2, _} = split_binary(Binary, DebugInfoStart),
verify(chunk_too_big, beam_lib:chunks(Binary2, ["Dbgi"])),
{Binary3, _} = split_binary(Binary, DebugInfoStart-4),
verify(invalid_beam_file, beam_lib:chunks(Binary3, ["Dbgi"])),
Instead of the 5:32 field below , there used to be control characters
( including zero bytes ) directly in the string . Because inferior programs
such as sed and do n't like zero bytes in text files ,
%% we have eliminated them.
ok = file:write_file(BeamFile, <<"FOR1",5:32,"BEAMfel">>),
NoOfTables = erlang:system_info(ets_count),
true = (P0 == pps()),
file:delete(Source),
file:delete(WrongFile),
file:delete(BeamFile),
file:delete(ACopy),
ok.
last_chunk(Bin) ->
L = beam_lib:info(Bin),
{chunks,Chunks} = lists:keyfind(chunks, 1, L),
{Last,_,_} = lists:last(Chunks),
Last.
do_error(BeamFile, ACopy) ->
%% evil tests
Chunks = chunk_info(BeamFile),
{value, {_, AtomStart, _}} = lists:keysearch("AtU8", 1, Chunks),
{value, {_, ImportStart, _}} = lists:keysearch("ImpT", 1, Chunks),
{value, {_, DebugInfoStart, _}} = lists:keysearch("Dbgi", 1, Chunks),
{value, {_, AttributesStart, _}} =
lists:keysearch("Attr", 1, Chunks),
{value, {_, CompileInfoStart, _}} =
lists:keysearch("CInf", 1, Chunks),
verify(missing_chunk, beam_lib:chunks(BeamFile, ["__"])),
BF2 = set_byte(ACopy, BeamFile, ImportStart+4, 17),
verify(invalid_chunk, beam_lib:chunks(BF2, [imports])),
BF3 = set_byte(ACopy, BeamFile, AtomStart-6, 17),
verify(missing_chunk, beam_lib:chunks(BF3, [imports])),
BF4 = set_byte(ACopy, BeamFile, DebugInfoStart+10, 17),
verify(invalid_chunk, beam_lib:chunks(BF4, [debug_info])),
BF5 = set_byte(ACopy, BeamFile, AttributesStart+8, 17),
verify(invalid_chunk, beam_lib:chunks(BF5, [attributes])),
BF6 = set_byte(ACopy, BeamFile, 1, 17),
verify(not_a_beam_file, beam_lib:info(BF6)),
BF7 = set_byte(ACopy, BeamFile, 9, 17),
verify(not_a_beam_file, beam_lib:info(BF7)),
BF8 = set_byte(ACopy, BeamFile, 13, 17),
verify(missing_chunk, beam_lib:chunks(BF8, ["AtU8"])),
BF9 = set_byte(ACopy, BeamFile, CompileInfoStart+8, 17),
verify(invalid_chunk, beam_lib:chunks(BF9, [compile_info])).
Compare contents of BEAM files and directories .
cmp(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Dir1 = filename:join(PrivDir, "dir1"),
Dir2 = filename:join(PrivDir, "dir2"),
ok = file:make_dir(Dir1),
ok = file:make_dir(Dir2),
{SourceD1, BeamFileD1} = make_beam(Dir1, simple, member),
{Source2D1, BeamFile2D1} = make_beam(Dir1, simple2, concat),
{SourceD2, BeamFileD2} = make_beam(Dir2, simple, concat),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
%% cmp
ok = beam_lib:cmp(BeamFileD1, BeamFileD1),
ver(modules_different, beam_lib:cmp(BeamFileD1, BeamFile2D1)),
ver(chunks_different, beam_lib:cmp(BeamFileD1, BeamFileD2)),
verify(file_error, beam_lib:cmp(foo, bar)),
{ok, B1} = file:read_file(BeamFileD1),
ok = beam_lib:cmp(B1, BeamFileD1),
{ok, B2} = file:read_file(BeamFileD2),
ver(chunks_different, beam_lib:cmp(B1, B2)),
%% cmp_dirs
{[],[],[]} = beam_lib:cmp_dirs(Dir1, Dir1),
true = {[BeamFile2D1], [], [{BeamFileD1,BeamFileD2}]} ==
beam_lib:cmp_dirs(Dir1, Dir2),
true = {[], [BeamFile2D1], [{BeamFileD2,BeamFileD1}]} ==
beam_lib:cmp_dirs(Dir2, Dir1),
ver(not_a_directory, beam_lib:cmp_dirs(foo, bar)),
%% diff_dirs
ok = beam_lib:diff_dirs(Dir1, Dir1),
ver(not_a_directory, beam_lib:diff_dirs(foo, bar)),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1, Source2D1,
BeamFile2D1, SourceD2, BeamFileD2]),
file:del_dir(Dir1),
file:del_dir(Dir2),
ok.
Compare contents of BEAM files having literals .
cmp_literals(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Dir1 = filename:join(PrivDir, "dir1"),
Dir2 = filename:join(PrivDir, "dir2"),
ok = file:make_dir(Dir1),
ok = file:make_dir(Dir2),
{SourceD1, BeamFileD1} = make_beam(Dir1, simple, constant),
{SourceD2, BeamFileD2} = make_beam(Dir2, simple, constant2),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
%% cmp
ok = beam_lib:cmp(BeamFileD1, BeamFileD1),
ver(chunks_different, beam_lib:cmp(BeamFileD1, BeamFileD2)),
{ok, B1} = file:read_file(BeamFileD1),
ok = beam_lib:cmp(B1, BeamFileD1),
{ok, B2} = file:read_file(BeamFileD2),
ver(chunks_different, beam_lib:cmp(B1, B2)),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1, SourceD2, BeamFileD2]),
file:del_dir(Dir1),
file:del_dir(Dir2),
ok.
Strip BEAM files .
strip(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
{SourceD1, BeamFileD1} = make_beam(PrivDir, simple, member),
{Source2D1, BeamFile2D1} = make_beam(PrivDir, simple2, concat),
{Source3D1, BeamFile3D1} = make_beam(PrivDir, make_fun, make_fun),
{Source4D1, BeamFile4D1} = make_beam(PrivDir, constant, constant),
{Source5D1, BeamFile5D1} = make_beam(PrivDir, lines, lines),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
%% strip binary
verify(not_a_beam_file, beam_lib:strip(<<>>)),
{ok, B1} = file:read_file(BeamFileD1),
{ok, {simple, NB1}} = beam_lib:strip(B1),
BId1 = chunk_ids(B1),
NBId1 = chunk_ids(NB1),
true = length(BId1) > length(NBId1),
compare_chunks(B1, NB1, NBId1),
%% strip file
verify(file_error, beam_lib:strip(foo)),
{ok, {simple, _}} = beam_lib:strip(BeamFileD1),
compare_chunks(NB1, BeamFileD1, NBId1),
%% strip_files
{ok, B2} = file:read_file(BeamFile2D1),
{ok, [{simple,_},{simple2,_}]} = beam_lib:strip_files([B1, B2]),
{ok, [{simple,_},{simple2,_},{make_fun,_},{constant,_}]} =
beam_lib:strip_files([BeamFileD1, BeamFile2D1, BeamFile3D1, BeamFile4D1]),
%% strip a complex module
OrigSofsPath = code:where_is_file("sofs.beam"),
BeamFileSofs = filename:join(PrivDir,"sofs.beam"),
file:copy(OrigSofsPath, BeamFileSofs),
{ok, {sofs,_}} = beam_lib:strip(BeamFileSofs),
code:unstick_mod(sofs),
false = code:purge(sofs),
%% check that each module can be loaded.
{module, simple} = code:load_abs(filename:rootname(BeamFileD1)),
{module, simple2} = code:load_abs(filename:rootname(BeamFile2D1)),
{module, make_fun} = code:load_abs(filename:rootname(BeamFile3D1)),
{module, constant} = code:load_abs(filename:rootname(BeamFile4D1)),
{module, sofs} = code:load_abs(filename:rootname(BeamFileSofs)),
%% check that line number information is still present after stripping
{module, lines} = code:load_abs(filename:rootname(BeamFile5D1)),
Info = get_line_number_info(),
true = code:delete(lines),
false = code:purge(lines),
{ok, {lines,BeamFile5D1}} = beam_lib:strip(BeamFile5D1),
{module, lines} = code:load_abs(filename:rootname(BeamFile5D1)),
Info = get_line_number_info(),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1,
Source2D1, BeamFile2D1,
Source3D1, BeamFile3D1,
Source4D1, BeamFile4D1,
Source5D1, BeamFile5D1,
BeamFileSofs]),
false = code:purge(sofs),
{module, sofs} = code:load_file(sofs),
code:stick_mod(sofs),
ok.
strip_add_chunks(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
{SourceD1, BeamFileD1} = make_beam(PrivDir, simple, member),
{Source2D1, BeamFile2D1} = make_beam(PrivDir, simple2, concat),
{Source3D1, BeamFile3D1} = make_beam(PrivDir, make_fun, make_fun),
{Source4D1, BeamFile4D1} = make_beam(PrivDir, constant, constant),
{Source5D1, BeamFile5D1} = make_beam(PrivDir, lines, lines),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
%% strip binary
verify(not_a_beam_file, beam_lib:strip(<<>>)),
{ok, B1} = file:read_file(BeamFileD1),
{ok, {simple, NB1}} = beam_lib:strip(B1),
BId1 = chunk_ids(B1),
NBId1 = chunk_ids(NB1),
true = length(BId1) > length(NBId1),
compare_chunks(B1, NB1, NBId1),
%% Keep all the extra chunks
ExtraChunks = ["Abst", "Dbgi", "Attr", "CInf", "LocT", "Atom"],
{ok, {simple, AB1}} = beam_lib:strip(B1, ExtraChunks),
ABId1 = chunk_ids(AB1),
true = length(BId1) == length(ABId1),
compare_chunks(B1, AB1, ABId1),
%% strip file - Keep extra chunks
verify(file_error, beam_lib:strip(foo)),
{ok, {simple, _}} = beam_lib:strip(BeamFileD1, ExtraChunks),
compare_chunks(B1, BeamFileD1, ABId1),
%% strip_files
{ok, B2} = file:read_file(BeamFile2D1),
{ok, [{simple,_},{simple2,_}]} = beam_lib:strip_files([B1, B2], ExtraChunks),
{ok, [{simple,_},{simple2,_},{make_fun,_},{constant,_}]} =
beam_lib:strip_files([BeamFileD1, BeamFile2D1, BeamFile3D1, BeamFile4D1], ExtraChunks),
%% strip a complex module
OrigSofsPath = code:where_is_file("sofs.beam"),
BeamFileSofs = filename:join(PrivDir,"sofs.beam"),
file:copy(OrigSofsPath, BeamFileSofs),
{ok, {sofs,_}} = beam_lib:strip(BeamFileSofs, ExtraChunks),
code:unstick_mod(sofs),
false = code:purge(sofs),
%% check that each module can be loaded.
{module, simple} = code:load_abs(filename:rootname(BeamFileD1)),
{module, simple2} = code:load_abs(filename:rootname(BeamFile2D1)),
{module, make_fun} = code:load_abs(filename:rootname(BeamFile3D1)),
{module, constant} = code:load_abs(filename:rootname(BeamFile4D1)),
{module, sofs} = code:load_abs(filename:rootname(BeamFileSofs)),
%% check that line number information is still present after stripping
{module, lines} = code:load_abs(filename:rootname(BeamFile5D1)),
Info = get_line_number_info(),
false = code:purge(lines),
true = code:delete(lines),
{ok, {lines,BeamFile5D1}} = beam_lib:strip(BeamFile5D1),
{module, lines} = code:load_abs(filename:rootname(BeamFile5D1)),
Info = get_line_number_info(),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1,
Source2D1, BeamFile2D1,
Source3D1, BeamFile3D1,
Source4D1, BeamFile4D1,
Source5D1, BeamFile5D1,
BeamFileSofs]),
false = code:purge(sofs),
{module, sofs} = code:load_file(sofs),
code:stick_mod(sofs),
ok.
otp_6711(Conf) when is_list(Conf) ->
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:info(3)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:chunks(a, b)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:chunks(a,b,c)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:all_chunks(3)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:cmp(3,4)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:strip(3)}),
{'EXIT',{function_clause,_}} =
(catch {a, beam_lib:strip_files([3])}),
PrivDir = ?privdir,
Dir = filename:join(PrivDir, "dir"),
Lib = filename:join(Dir, "lib"),
App = filename:join(Lib, "app"),
EBin = filename:join(App, "ebin"),
ok = file:make_dir(Dir),
ok = file:make_dir(Lib),
ok = file:make_dir(App),
ok = file:make_dir(EBin),
{SourceD, BeamFileD} = make_beam(EBin, simple, member),
unwritable(BeamFileD),
%% There is no way that strip_release can fail with
%% function_clause or something like that...
{error,_,{file_error,_,_}} = beam_lib:strip_release(Dir),
delete_files([SourceD, BeamFileD]),
file:del_dir(EBin),
file:del_dir(App),
file:del_dir(Lib),
file:del_dir(Dir),
ok.
-include_lib("kernel/include/file.hrl").
unwritable(Fname) ->
{ok, Info} = file:read_file_info(Fname),
Mode = Info#file_info.mode - 8#00200,
file:write_file_info(Fname, Info#file_info{mode = Mode}).
Testing building of BEAM files .
building(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Dir1 = filename:join(PrivDir, "b_dir1"),
Dir2 = filename:join(PrivDir, "b_dir2"),
ok = file:make_dir(Dir1),
ok = file:make_dir(Dir2),
{SourceD1, BeamFileD1} = make_beam(Dir1, building, member),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
%% read all chunks
ChunkIds = chunk_ids(BeamFileD1),
{ok, _Mod, Chunks} = beam_lib:all_chunks(BeamFileD1),
ChunkIds = lists:map(fun ({Id, Data}) when is_binary(Data) -> Id
end, Chunks),
%% write a new beam file, with reversed chunk order
BeamFileD2 = filename:join(Dir2, "building.beam"),
{ok,RevBeam} = beam_lib:build_module(lists:reverse(Chunks)),
file:write_file(BeamFileD2, RevBeam),
%% compare files
compare_chunks(BeamFileD1, BeamFileD2, ChunkIds),
%% test that we can retrieve a chunk before the atom table
%% (actually, try to retrieve all chunks)
lists:foreach(fun(Id) ->
{ok, {building, [{Id, _Data}]}} =
beam_lib:chunks(BeamFileD1, [Id])
end, ChunkIds),
lists:foreach(fun(Id) ->
{ok, {building, [{Id, _Data}]}} =
beam_lib:chunks(BeamFileD2, [Id])
end, ChunkIds),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1, BeamFileD2]),
file:del_dir(Dir1),
file:del_dir(Dir2),
ok.
Compare beam_lib : md5/1 and code : .
md5(Conf) when is_list(Conf) ->
Beams = collect_beams(),
io:format("Found ~w beam files", [length(Beams)]),
md5_1(Beams).
md5_1([N|Ns]) ->
{ok,Beam0} = file:read_file(N),
Beam = maybe_uncompress(Beam0),
{ok,{Mod,MD5}} = beam_lib:md5(Beam),
{Mod,MD5} = {Mod,code:module_md5(Beam)},
md5_1(Ns);
md5_1([]) -> ok.
collect_beams() ->
SuperDir = filename:dirname(filename:dirname(code:which(?MODULE))),
TestDirs = filelib:wildcard(filename:join([SuperDir,"*_test"])),
AbsDirs = [filename:absname(X) || X <- code:get_path()],
collect_beams_1(AbsDirs ++ TestDirs).
collect_beams_1([Dir|Dirs]) ->
filelib:wildcard(filename:join(Dir, "*.beam")) ++ collect_beams_1(Dirs);
collect_beams_1([]) -> [].
maybe_uncompress(<<"FOR1",_/binary>>=Beam) -> Beam;
maybe_uncompress(Beam) -> zlib:gunzip(Beam).
%% Test encrypted abstract format.
encrypted_abstr(Conf) when is_list(Conf) ->
run_if_crypto_works(fun() -> encrypted_abstr_1(Conf) end).
encrypted_abstr_1(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
simple_file(Source),
%% Avoid getting an extra port when crypto starts erl_ddll.
erl_ddll:start(),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
Key = "#a_crypto_key",
CompileFlags = [{outdir,PrivDir}, debug_info, {debug_info_key,Key}],
{ok,_} = compile:file(Source, CompileFlags),
{ok, Binary} = file:read_file(BeamFile),
do_encrypted_abstr(BeamFile, Key),
do_encrypted_abstr(Binary, Key),
ok = crypto:stop(), %To get rid of extra ets tables.
file:delete(BeamFile),
file:delete(Source),
NoOfTables = erlang:system_info(ets_count),
true = (P0 == pps()),
ok.
do_encrypted_abstr(Beam, Key) ->
verify(key_missing_or_invalid, beam_lib:chunks(Beam, [debug_info])),
%% The raw chunk "Dbgi" can still be read even without a key.
{ok,{simple,[{"Dbgi",Dbgi}]}} = beam_lib:chunks(Beam, ["Dbgi"]),
<<0:8,8:8,"des3_cbc",_/binary>> = Dbgi,
Try som invalid funs .
bad_fun(badfun, fun() -> ok end),
bad_fun(badfun, {a,b}),
bad_fun(blurf),
{function_clause,_} = bad_fun(fun(glurf) -> ok end),
Funs that return something strange .
bad_fun(badfun, fun(init) -> {ok,fun() -> ok end} end),
glurf = bad_fun(fun(init) -> {error,glurf} end),
%% Try clearing (non-existing fun).
undefined = beam_lib:clear_crypto_key_fun(),
%% Install a fun which cannot retrieve a key.
ok = beam_lib:crypto_key_fun(fun(init) -> ok end),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
%% Install a fun which returns an incorrect key.
{ok,_} = beam_lib:clear_crypto_key_fun(),
ok = beam_lib:crypto_key_fun(simple_crypto_fun("wrong key...")),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
%% Installing a new key fun is not possible without clearing the old.
verify(exists, beam_lib:crypto_key_fun(simple_crypto_fun(Key))),
%% Install the simplest possible working key fun.
{ok,_} = beam_lib:clear_crypto_key_fun(),
ok = beam_lib:crypto_key_fun(simple_crypto_fun(Key)),
verify_abstract(Beam),
{ok,{simple,[{"Dbgi",Dbgi}]}} = beam_lib:chunks(Beam, ["Dbgi"]),
%% Installing a new key fun is not possible without clearing the old.
verify(exists, beam_lib:crypto_key_fun(ets_crypto_fun(Key))),
%% Install a key using an ets table.
{ok,_} = beam_lib:clear_crypto_key_fun(),
ok = beam_lib:crypto_key_fun(ets_crypto_fun(Key)),
verify_abstract(Beam),
{ok,{simple,[{"Dbgi",Dbgi}]}} = beam_lib:chunks(Beam, ["Dbgi"]),
{ok,cleared} = beam_lib:clear_crypto_key_fun(),
%% Try to force a stop/start race.
start_stop_race(10000),
ok.
start_stop_race(0) ->
ok;
start_stop_race(N) ->
{error,_} = beam_lib:crypto_key_fun(bad_fun),
undefined = beam_lib:clear_crypto_key_fun(),
start_stop_race(N-1).
bad_fun(F) ->
{error,E} = beam_lib:crypto_key_fun(F),
E.
bad_fun(S, F) ->
verify(S, beam_lib:crypto_key_fun(F)).
verify_abstract(Beam) ->
{ok,{simple,[Abst, Dbgi]}} = beam_lib:chunks(Beam, [abstract_code, debug_info]),
{abstract_code,{raw_abstract_v1,_}} = Abst,
{debug_info,{debug_info_v1,erl_abstract_code,_}} = Dbgi.
simple_crypto_fun(Key) ->
fun(init) -> ok;
({debug_info, des3_cbc, simple, _}) -> Key
end.
ets_crypto_fun(Key) ->
fun(init) ->
T = ets:new(beam_lib_SUITE_keys, [private, set]),
true = ets:insert(T, {key,Key}),
{ok,fun({debug_info, des3_cbc, simple, _}) ->
[{key,Val}] = ets:lookup(T, key),
Val;
(clear) ->
ets:delete(T),
cleared
end}
end.
Test encrypted abstract format with the key in .
encrypted_abstr_file(Conf) when is_list(Conf) ->
run_if_crypto_works(fun() -> encrypted_abstr_file_1(Conf) end).
encrypted_abstr_file_1(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
simple_file(Source),
%% Avoid getting an extra port when crypto starts erl_ddll.
erl_ddll:start(),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
Key = "Long And niCe 99Krypto Key",
CompileFlags = [{outdir,PrivDir}, debug_info, {debug_info_key,Key}],
{ok,_} = compile:file(Source, CompileFlags),
{ok, Binary} = file:read_file(BeamFile),
{ok,OldCwd} = file:get_cwd(),
ok = file:set_cwd(PrivDir),
do_encrypted_abstr_file(BeamFile, Key),
do_encrypted_abstr_file(Binary, Key),
ok = file:set_cwd(OldCwd),
ok = crypto:stop(), %To get rid of extra ets tables.
file:delete(filename:join(PrivDir, ".erlang.crypt")),
file:delete(BeamFile),
file:delete(Source),
NoOfTables = erlang:system_info(ets_count),
true = (P0 == pps()),
ok.
do_encrypted_abstr_file(Beam, Key) ->
%% No key.
write_crypt_file(""),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
%% A wrong key.
write_crypt_file(["[{debug_info,des3_cbc,simple,\"A Wrong Key\"}].\n"]),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
%% Write correct key...
write_crypt_file(["[{debug_info,des3_cbc,simple,\"",Key,"\"}].\n"]),
%% ... but the fun with the wrong key is still there.
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
%% Clear the fun. Now it should work.
{ok,_} = beam_lib:clear_crypto_key_fun(),
verify_abstract(Beam),
verify_abstract(Beam),
ok = file:delete(".erlang.crypt"),
verify_abstract(Beam),
Clear , otherwise the second pass will fail .
{ok,_} = beam_lib:clear_crypto_key_fun(),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
ok.
test_makedep_abstract_code(Conf) ->
PrivDir = ?privdir,
ErlFile = filename:join(PrivDir, "hello.erl"),
BeamFile = filename:join(PrivDir, "hello.beam"),
file:write_file(ErlFile,
["-module(hello).\n",
"-export([start/0]).\n",
"start() -> ok.\n"
]),
DependDir = filename:join(PrivDir, "depend"),
file:make_dir(DependDir),
DependFile = filename:join(DependDir,"hello.d"),
compile:file(ErlFile,
[debug_info,
makedep_side_effect,
{outdir, PrivDir},
{makedep_output, DependFile}]),
file:delete(DependFile),
file:del_dir(DependDir),
case beam_lib:chunks(BeamFile, [debug_info]) of
{ok, {Module, [{debug_info, {debug_info_v1,
_Backend=erl_abstract_code,Metadata}}]}} ->
SrcOpts = [no_copt, to_core, binary, return_errors,
no_inline, strict_record_tests, strict_record_updates,
dialyzer, no_spawn_compiler_process],
{ok,_} = erl_abstract_code:debug_info(core_v1, Module, Metadata,
SrcOpts),
ok
end.
write_crypt_file(Contents0) ->
Contents = list_to_binary([Contents0]),
io:format("~s\n", [binary_to_list(Contents)]),
ok = file:write_file(".erlang.crypt", Contents).
: Do n't crash when the backend for generating the abstract code
%% is missing.
missing_debug_info_backend(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
simple_file(Source),
%% Create a debug_info chunk with a non-existing backend.
{ok,simple} = compile:file(Source, [{outdir,PrivDir}]),
{ok,simple,All0} = beam_lib:all_chunks(BeamFile),
FakeBackend = definitely__not__an__existing__backend,
FakeDebugInfo = {debug_info_v1, FakeBackend, nothing_here},
All = lists:keyreplace("Dbgi", 1, All0, {"Dbgi", term_to_binary(FakeDebugInfo)}),
{ok,NewBeam} = beam_lib:build_module(All),
ok = file:write_file(BeamFile, NewBeam),
%% beam_lib should not crash, but return an error.
verify(missing_backend, beam_lib:chunks(BeamFile, [abstract_code])),
file:delete(BeamFile),
ok.
compare_chunks(File1, File2, ChunkIds) ->
{ok, {_, Chunks1}} = beam_lib:chunks(File1, ChunkIds),
{ok, {_, Chunks2}} = beam_lib:chunks(File2, ChunkIds),
true = Chunks1 == Chunks2.
chunk_ids(File) ->
lists:map(fun({Id,_Start,_Size}) -> Id end, chunk_info(File)).
chunk_info(File) ->
{value, {chunks, Chunks}} =
lists:keysearch(chunks, 1, beam_lib:info(File)),
Chunks.
make_beam(Dir, Module, F) ->
FileBase = filename:join(Dir, atom_to_list(Module)),
Source = FileBase ++ ".erl",
BeamFile = FileBase ++ ".beam",
file:delete(BeamFile),
simple_file(Source, Module, F),
{ok, _} = compile:file(Source, [{outdir,Dir}, debug_info, report]),
{Source, BeamFile}.
set_byte(_Backup, Binary, Pos, Byte) when is_binary(Binary) ->
<<B1:Pos/binary, _:1/binary, B2/binary>> = Binary,
NB = <<B1/binary, Byte:8, B2/binary>>,
NB;
set_byte(Backup, File, Pos, Byte) ->
copy_file(Backup, File),
set_byte(File, Pos, Byte),
File.
set_byte(File, Pos, Byte) ->
{ok, Fd} = file:open(File, [read, write]),
{ok, _} = file:position(Fd, Pos),
ok = file:write(Fd, [Byte]),
file:close(Fd).
copy_file(Src, Dest) ->
{ok, _} = file:copy(Src, Dest),
ok = file:change_mode(Dest, 8#0666).
delete_files(Files) ->
lists:foreach(fun(F) -> file:delete(F) end, Files).
verify(S, {error, beam_lib, R}) ->
verify_error(S, R);
verify(S, {error, R}) ->
verify_error(S, R).
verify_error(S, R) ->
if
S =:= R -> ok;
true -> [S|_] = tuple_to_list(R)
end,
%% Most formatted messages begin with "./simple.beam:" or "<<...".
FM = string:str(lists:flatten(beam_lib:format_error(R)), "simpl") > 0,
BM = string:str(lists:flatten(beam_lib:format_error(R)), "<<") > 0,
%% Also make sure that formatted message is not just the term printed.
Handled = beam_lib:format_error(R) =/= io_lib:format("~p~n", [R]),
true = ((FM > 0) or (BM > 0)) and Handled.
ver(S, {error, beam_lib, R}) ->
[S|_] = tuple_to_list(R),
case lists:flatten(beam_lib:format_error(R)) of
[${ | _] ->
ct:fail({bad_format_error, R});
_ ->
ok
end.
pps() ->
{erlang:ports()}.
simple_file(File) ->
simple_file(File, simple).
simple_file(File, Module) ->
simple_file(File, Module, member).
simple_file(File, Module, make_fun) ->
B = list_to_binary(["-module(", atom_to_list(Module), "). "
"-export([t/1]). "
"t(A) -> "
" fun(X) -> A+X end. "]),
ok = file:write_file(File, B);
simple_file(File, Module, constant) ->
B = list_to_binary(["-module(", atom_to_list(Module), "). "
"-export([t/1]). "
"t(A) -> "
" {a,b,[2,3],c,d}. "]),
ok = file:write_file(File, B);
simple_file(File, Module, constant2) ->
B = list_to_binary(["-module(", atom_to_list(Module), "). "
"-export([t/1]). "
"t(A) -> "
" {a,b,[2,3],x,y}. "]),
ok = file:write_file(File, B);
simple_file(File, Module, lines) ->
B = list_to_binary(["-module(", atom_to_list(Module), ").\n"
"-export([t/1]).\n"
"t(A) ->\n"
" A+1.\n"]),
ok = file:write_file(File, B);
simple_file(File, Module, F) ->
B = list_to_binary(["-module(", atom_to_list(Module), "). "
"-export([t/0]). "
"t() -> "
" t([]). "
"t(L) -> "
" lists:",
atom_to_list(F), "(a, L). "]),
ok = file:write_file(File, B).
run_if_crypto_works(Test) ->
try begin crypto:start(), crypto:stop(), ok end of
ok ->
Test()
catch
error:_ ->
{skip,"The crypto application is missing or broken"}
end.
get_line_number_info() ->
%% The stacktrace for operators such a '+' can vary depending on
%% whether the JIT is used or not.
case catch lines:t(atom) of
{'EXIT',{badarith,[{erlang,'+',[atom,1],_},
{lines,t,1,Info}|_]}} ->
Info;
{'EXIT',{badarith,[{lines,t,1,Info}|_]}} ->
Info
end.
| null | https://raw.githubusercontent.com/erlang/otp/1a36de1974cdb843baee80140b41e9323820f2c1/lib/stdlib/test/beam_lib_SUITE.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
-define(debug, true).
Read correct beam file.
{ok,_} = compile:file(Source, [compressed | CompileFlags]),
do_normal(BeamFile),
Test reading optional chunks.
'allow_missing_chunks' should work for named chunks too.
Make sure that reading the atom chunk works when the 'allow_missing_chunks'
option is used.
Read invalid beam files.
we have eliminated them.
evil tests
cmp
cmp_dirs
diff_dirs
cmp
strip binary
strip file
strip_files
strip a complex module
check that each module can be loaded.
check that line number information is still present after stripping
strip binary
Keep all the extra chunks
strip file - Keep extra chunks
strip_files
strip a complex module
check that each module can be loaded.
check that line number information is still present after stripping
There is no way that strip_release can fail with
function_clause or something like that...
read all chunks
write a new beam file, with reversed chunk order
compare files
test that we can retrieve a chunk before the atom table
(actually, try to retrieve all chunks)
Test encrypted abstract format.
Avoid getting an extra port when crypto starts erl_ddll.
To get rid of extra ets tables.
The raw chunk "Dbgi" can still be read even without a key.
Try clearing (non-existing fun).
Install a fun which cannot retrieve a key.
Install a fun which returns an incorrect key.
Installing a new key fun is not possible without clearing the old.
Install the simplest possible working key fun.
Installing a new key fun is not possible without clearing the old.
Install a key using an ets table.
Try to force a stop/start race.
Avoid getting an extra port when crypto starts erl_ddll.
To get rid of extra ets tables.
No key.
A wrong key.
Write correct key...
... but the fun with the wrong key is still there.
Clear the fun. Now it should work.
is missing.
Create a debug_info chunk with a non-existing backend.
beam_lib should not crash, but return an error.
Most formatted messages begin with "./simple.beam:" or "<<...".
Also make sure that formatted message is not just the term printed.
The stacktrace for operators such a '+' can vary depending on
whether the JIT is used or not. | Copyright Ericsson AB 1997 - 2022 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(beam_lib_SUITE).
-ifdef(debug).
-define(format(S, A), io:format(S, A)).
-define(line, put(line, ?LINE), ).
-define(config(X,Y), "./log_dir/").
-define(privdir, "beam_lib_SUITE_priv").
-else.
-include_lib("common_test/include/ct.hrl").
-define(format(S, A), ok).
-define(privdir, proplists:get_value(priv_dir, Conf)).
-endif.
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
normal/1, error/1, cmp/1, cmp_literals/1, strip/1, strip_add_chunks/1, otp_6711/1,
building/1, md5/1, encrypted_abstr/1, encrypted_abstr_file/1,
missing_debug_info_backend/1]).
-export([test_makedep_abstract_code/1]).
-export([init_per_testcase/2, end_per_testcase/2]).
suite() ->
[{ct_hooks,[ts_install_cth]},
{timetrap,{minutes,2}}].
all() ->
[error, normal, cmp, cmp_literals, strip, strip_add_chunks, otp_6711,
building, md5, encrypted_abstr, encrypted_abstr_file,
missing_debug_info_backend, test_makedep_abstract_code
].
groups() ->
[].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
Cleanup after strip and strip_add_chunks
case code:is_sticky(sofs) of
false ->
false = code:purge(sofs),
{module, sofs} = code:load_file(sofs),
code:stick_mod(sofs),
ok;
true ->
ok
end.
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_Case, _Config) ->
ok.
normal(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
simple_file(Source),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
do_normal(Source, PrivDir, BeamFile, []),
{ok,_} = compile:file(Source, [{outdir,PrivDir}, no_debug_info]),
{ok, {simple, [{debug_info, {debug_info_v1, erl_abstract_code, {none, _}}}]}} =
beam_lib:chunks(BeamFile, [debug_info]),
{ok, {simple, [{abstract_code, no_abstract_code}]}} =
beam_lib:chunks(BeamFile, [abstract_code]),
file:delete(BeamFile),
file:delete(Source),
NoOfTables = erlang:system_info(ets_count),
true = (P0 == pps()),
ok.
do_normal(Source, PrivDir, BeamFile, Opts) ->
CompileFlags = [{outdir,PrivDir}, debug_info | Opts],
{ok,_} = compile:file(Source, CompileFlags),
{ok, Binary} = file:read_file(BeamFile),
do_normal(BeamFile, Opts),
do_normal(Binary, Opts).
do_normal(BeamFile, Opts) ->
Imports = {imports, [{erlang, get_module_info, 1},
{erlang, get_module_info, 2},
{lists, member, 2}]},
Exports = {exports, [{module_info, 0}, {module_info, 1}, {t, 0}]},
Local = {locals, [{t, 1}]},
{ok, {simple, [Imports]}} = beam_lib:chunks(BeamFile, [imports]),
{ok, {simple, [{"ImpT",_Bin}]}} =
beam_lib:chunks(BeamFile, ["ImpT"]),
{ok, {simple, [Exports]}} = beam_lib:chunks(BeamFile, [exports]),
{ok, {simple, [{attributes, [{vsn, [_]}]}]}} =
beam_lib:chunks(BeamFile, [attributes]),
{ok, {simple, [{compile_info, _}=CompileInfo]}} =
beam_lib:chunks(BeamFile, [compile_info]),
{ok, {simple, [Local]}} = beam_lib:chunks(BeamFile, [locals]),
{ok, {simple, [{attributes, [{vsn, [_]}]}, CompileInfo,
Exports, Imports, Local]}} =
beam_lib:chunks(BeamFile, [attributes, compile_info, exports, imports, locals]),
{ok, {simple, [{atoms, _Atoms}]}} =
beam_lib:chunks(BeamFile, [atoms]),
{ok, {simple, [{labeled_exports, _LExports}]}} =
beam_lib:chunks(BeamFile, [labeled_exports]),
{ok, {simple, [{labeled_locals, _LLocals}]}} =
beam_lib:chunks(BeamFile, [labeled_locals]),
{ok, {simple, [_Vsn]}} = beam_lib:version(BeamFile),
{ok, {simple, [{abstract_code, {_, _}}]}} =
beam_lib:chunks(BeamFile, [abstract_code]),
{ok, {simple, [{debug_info, {debug_info_v1, erl_abstract_code, _}}]}} =
beam_lib:chunks(BeamFile, [debug_info]),
All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT", "AtU8"],
{ok,{simple,Chunks}} = beam_lib:chunks(BeamFile, All, [allow_missing_chunks]),
case {verify_simple(Chunks),Opts} of
{{missing_chunk, AtomBin}, []} when is_binary(AtomBin) -> ok;
{{AtomBin, missing_chunk}, [no_utf8_atoms]} when is_binary(AtomBin) -> ok
end,
{ok, {simple, StrippedBeam}} = beam_lib:strip(BeamFile),
{ok, {simple, MChunks}} = beam_lib:chunks(StrippedBeam,
[attributes, locals],
[allow_missing_chunks]),
[{attributes, missing_chunk}, {locals, missing_chunk}] = MChunks,
Some = ["Code",atoms,"ExpT","LitT"],
{ok,{simple,SomeChunks}} = beam_lib:chunks(BeamFile, Some, [allow_missing_chunks]),
[{"Code",<<_/binary>>},{atoms,[_|_]},{"ExpT",<<_/binary>>},{"LitT",missing_chunk}] =
SomeChunks.
verify_simple([{"Atom", PlainAtomChunk},
{"Code", CodeBin},
{"StrT", StrBin},
{"ImpT", ImpBin},
{"ExpT", ExpBin},
{"FunT", missing_chunk},
{"LitT", missing_chunk},
{"AtU8", AtU8Chunk}])
when is_binary(CodeBin), is_binary(StrBin),
is_binary(ImpBin), is_binary(ExpBin) ->
{PlainAtomChunk, AtU8Chunk}.
error(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
WrongFile = Simple ++ "foo.beam",
simple_file(Source),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
{ok,_} = compile:file(Source, [{outdir,PrivDir},debug_info]),
ACopy = filename:join(PrivDir, "a_copy.beam"),
copy_file(BeamFile, ACopy),
{ok, Binary} = file:read_file(BeamFile),
copy_file(ACopy, WrongFile),
verify(file_error, beam_lib:info("./does_simply_not_exist")),
do_error(BeamFile, ACopy),
do_error(Binary, ACopy),
copy_file(ACopy, BeamFile),
verify(unknown_chunk, beam_lib:chunks(BeamFile, [not_a_chunk])),
ok = file:write_file(BeamFile, <<>>),
verify(not_a_beam_file, beam_lib:info(BeamFile)),
verify(not_a_beam_file, beam_lib:info(<<>>)),
ok = file:write_file(BeamFile, <<"short">>),
verify(not_a_beam_file, beam_lib:info(BeamFile)),
verify(not_a_beam_file, beam_lib:info(<<"short">>)),
{Binary1, _} = split_binary(Binary, byte_size(Binary)-10),
LastChunk = last_chunk(Binary),
verify(chunk_too_big, beam_lib:chunks(Binary1, [LastChunk])),
Chunks = chunk_info(Binary),
{value, {_, DebugInfoStart, _}} = lists:keysearch("Dbgi", 1, Chunks),
{Binary2, _} = split_binary(Binary, DebugInfoStart),
verify(chunk_too_big, beam_lib:chunks(Binary2, ["Dbgi"])),
{Binary3, _} = split_binary(Binary, DebugInfoStart-4),
verify(invalid_beam_file, beam_lib:chunks(Binary3, ["Dbgi"])),
Instead of the 5:32 field below , there used to be control characters
( including zero bytes ) directly in the string . Because inferior programs
such as sed and do n't like zero bytes in text files ,
ok = file:write_file(BeamFile, <<"FOR1",5:32,"BEAMfel">>),
NoOfTables = erlang:system_info(ets_count),
true = (P0 == pps()),
file:delete(Source),
file:delete(WrongFile),
file:delete(BeamFile),
file:delete(ACopy),
ok.
last_chunk(Bin) ->
L = beam_lib:info(Bin),
{chunks,Chunks} = lists:keyfind(chunks, 1, L),
{Last,_,_} = lists:last(Chunks),
Last.
do_error(BeamFile, ACopy) ->
Chunks = chunk_info(BeamFile),
{value, {_, AtomStart, _}} = lists:keysearch("AtU8", 1, Chunks),
{value, {_, ImportStart, _}} = lists:keysearch("ImpT", 1, Chunks),
{value, {_, DebugInfoStart, _}} = lists:keysearch("Dbgi", 1, Chunks),
{value, {_, AttributesStart, _}} =
lists:keysearch("Attr", 1, Chunks),
{value, {_, CompileInfoStart, _}} =
lists:keysearch("CInf", 1, Chunks),
verify(missing_chunk, beam_lib:chunks(BeamFile, ["__"])),
BF2 = set_byte(ACopy, BeamFile, ImportStart+4, 17),
verify(invalid_chunk, beam_lib:chunks(BF2, [imports])),
BF3 = set_byte(ACopy, BeamFile, AtomStart-6, 17),
verify(missing_chunk, beam_lib:chunks(BF3, [imports])),
BF4 = set_byte(ACopy, BeamFile, DebugInfoStart+10, 17),
verify(invalid_chunk, beam_lib:chunks(BF4, [debug_info])),
BF5 = set_byte(ACopy, BeamFile, AttributesStart+8, 17),
verify(invalid_chunk, beam_lib:chunks(BF5, [attributes])),
BF6 = set_byte(ACopy, BeamFile, 1, 17),
verify(not_a_beam_file, beam_lib:info(BF6)),
BF7 = set_byte(ACopy, BeamFile, 9, 17),
verify(not_a_beam_file, beam_lib:info(BF7)),
BF8 = set_byte(ACopy, BeamFile, 13, 17),
verify(missing_chunk, beam_lib:chunks(BF8, ["AtU8"])),
BF9 = set_byte(ACopy, BeamFile, CompileInfoStart+8, 17),
verify(invalid_chunk, beam_lib:chunks(BF9, [compile_info])).
Compare contents of BEAM files and directories .
cmp(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Dir1 = filename:join(PrivDir, "dir1"),
Dir2 = filename:join(PrivDir, "dir2"),
ok = file:make_dir(Dir1),
ok = file:make_dir(Dir2),
{SourceD1, BeamFileD1} = make_beam(Dir1, simple, member),
{Source2D1, BeamFile2D1} = make_beam(Dir1, simple2, concat),
{SourceD2, BeamFileD2} = make_beam(Dir2, simple, concat),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
ok = beam_lib:cmp(BeamFileD1, BeamFileD1),
ver(modules_different, beam_lib:cmp(BeamFileD1, BeamFile2D1)),
ver(chunks_different, beam_lib:cmp(BeamFileD1, BeamFileD2)),
verify(file_error, beam_lib:cmp(foo, bar)),
{ok, B1} = file:read_file(BeamFileD1),
ok = beam_lib:cmp(B1, BeamFileD1),
{ok, B2} = file:read_file(BeamFileD2),
ver(chunks_different, beam_lib:cmp(B1, B2)),
{[],[],[]} = beam_lib:cmp_dirs(Dir1, Dir1),
true = {[BeamFile2D1], [], [{BeamFileD1,BeamFileD2}]} ==
beam_lib:cmp_dirs(Dir1, Dir2),
true = {[], [BeamFile2D1], [{BeamFileD2,BeamFileD1}]} ==
beam_lib:cmp_dirs(Dir2, Dir1),
ver(not_a_directory, beam_lib:cmp_dirs(foo, bar)),
ok = beam_lib:diff_dirs(Dir1, Dir1),
ver(not_a_directory, beam_lib:diff_dirs(foo, bar)),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1, Source2D1,
BeamFile2D1, SourceD2, BeamFileD2]),
file:del_dir(Dir1),
file:del_dir(Dir2),
ok.
Compare contents of BEAM files having literals .
cmp_literals(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Dir1 = filename:join(PrivDir, "dir1"),
Dir2 = filename:join(PrivDir, "dir2"),
ok = file:make_dir(Dir1),
ok = file:make_dir(Dir2),
{SourceD1, BeamFileD1} = make_beam(Dir1, simple, constant),
{SourceD2, BeamFileD2} = make_beam(Dir2, simple, constant2),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
ok = beam_lib:cmp(BeamFileD1, BeamFileD1),
ver(chunks_different, beam_lib:cmp(BeamFileD1, BeamFileD2)),
{ok, B1} = file:read_file(BeamFileD1),
ok = beam_lib:cmp(B1, BeamFileD1),
{ok, B2} = file:read_file(BeamFileD2),
ver(chunks_different, beam_lib:cmp(B1, B2)),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1, SourceD2, BeamFileD2]),
file:del_dir(Dir1),
file:del_dir(Dir2),
ok.
Strip BEAM files .
strip(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
{SourceD1, BeamFileD1} = make_beam(PrivDir, simple, member),
{Source2D1, BeamFile2D1} = make_beam(PrivDir, simple2, concat),
{Source3D1, BeamFile3D1} = make_beam(PrivDir, make_fun, make_fun),
{Source4D1, BeamFile4D1} = make_beam(PrivDir, constant, constant),
{Source5D1, BeamFile5D1} = make_beam(PrivDir, lines, lines),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
verify(not_a_beam_file, beam_lib:strip(<<>>)),
{ok, B1} = file:read_file(BeamFileD1),
{ok, {simple, NB1}} = beam_lib:strip(B1),
BId1 = chunk_ids(B1),
NBId1 = chunk_ids(NB1),
true = length(BId1) > length(NBId1),
compare_chunks(B1, NB1, NBId1),
verify(file_error, beam_lib:strip(foo)),
{ok, {simple, _}} = beam_lib:strip(BeamFileD1),
compare_chunks(NB1, BeamFileD1, NBId1),
{ok, B2} = file:read_file(BeamFile2D1),
{ok, [{simple,_},{simple2,_}]} = beam_lib:strip_files([B1, B2]),
{ok, [{simple,_},{simple2,_},{make_fun,_},{constant,_}]} =
beam_lib:strip_files([BeamFileD1, BeamFile2D1, BeamFile3D1, BeamFile4D1]),
OrigSofsPath = code:where_is_file("sofs.beam"),
BeamFileSofs = filename:join(PrivDir,"sofs.beam"),
file:copy(OrigSofsPath, BeamFileSofs),
{ok, {sofs,_}} = beam_lib:strip(BeamFileSofs),
code:unstick_mod(sofs),
false = code:purge(sofs),
{module, simple} = code:load_abs(filename:rootname(BeamFileD1)),
{module, simple2} = code:load_abs(filename:rootname(BeamFile2D1)),
{module, make_fun} = code:load_abs(filename:rootname(BeamFile3D1)),
{module, constant} = code:load_abs(filename:rootname(BeamFile4D1)),
{module, sofs} = code:load_abs(filename:rootname(BeamFileSofs)),
{module, lines} = code:load_abs(filename:rootname(BeamFile5D1)),
Info = get_line_number_info(),
true = code:delete(lines),
false = code:purge(lines),
{ok, {lines,BeamFile5D1}} = beam_lib:strip(BeamFile5D1),
{module, lines} = code:load_abs(filename:rootname(BeamFile5D1)),
Info = get_line_number_info(),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1,
Source2D1, BeamFile2D1,
Source3D1, BeamFile3D1,
Source4D1, BeamFile4D1,
Source5D1, BeamFile5D1,
BeamFileSofs]),
false = code:purge(sofs),
{module, sofs} = code:load_file(sofs),
code:stick_mod(sofs),
ok.
strip_add_chunks(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
{SourceD1, BeamFileD1} = make_beam(PrivDir, simple, member),
{Source2D1, BeamFile2D1} = make_beam(PrivDir, simple2, concat),
{Source3D1, BeamFile3D1} = make_beam(PrivDir, make_fun, make_fun),
{Source4D1, BeamFile4D1} = make_beam(PrivDir, constant, constant),
{Source5D1, BeamFile5D1} = make_beam(PrivDir, lines, lines),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
verify(not_a_beam_file, beam_lib:strip(<<>>)),
{ok, B1} = file:read_file(BeamFileD1),
{ok, {simple, NB1}} = beam_lib:strip(B1),
BId1 = chunk_ids(B1),
NBId1 = chunk_ids(NB1),
true = length(BId1) > length(NBId1),
compare_chunks(B1, NB1, NBId1),
ExtraChunks = ["Abst", "Dbgi", "Attr", "CInf", "LocT", "Atom"],
{ok, {simple, AB1}} = beam_lib:strip(B1, ExtraChunks),
ABId1 = chunk_ids(AB1),
true = length(BId1) == length(ABId1),
compare_chunks(B1, AB1, ABId1),
verify(file_error, beam_lib:strip(foo)),
{ok, {simple, _}} = beam_lib:strip(BeamFileD1, ExtraChunks),
compare_chunks(B1, BeamFileD1, ABId1),
{ok, B2} = file:read_file(BeamFile2D1),
{ok, [{simple,_},{simple2,_}]} = beam_lib:strip_files([B1, B2], ExtraChunks),
{ok, [{simple,_},{simple2,_},{make_fun,_},{constant,_}]} =
beam_lib:strip_files([BeamFileD1, BeamFile2D1, BeamFile3D1, BeamFile4D1], ExtraChunks),
OrigSofsPath = code:where_is_file("sofs.beam"),
BeamFileSofs = filename:join(PrivDir,"sofs.beam"),
file:copy(OrigSofsPath, BeamFileSofs),
{ok, {sofs,_}} = beam_lib:strip(BeamFileSofs, ExtraChunks),
code:unstick_mod(sofs),
false = code:purge(sofs),
{module, simple} = code:load_abs(filename:rootname(BeamFileD1)),
{module, simple2} = code:load_abs(filename:rootname(BeamFile2D1)),
{module, make_fun} = code:load_abs(filename:rootname(BeamFile3D1)),
{module, constant} = code:load_abs(filename:rootname(BeamFile4D1)),
{module, sofs} = code:load_abs(filename:rootname(BeamFileSofs)),
{module, lines} = code:load_abs(filename:rootname(BeamFile5D1)),
Info = get_line_number_info(),
false = code:purge(lines),
true = code:delete(lines),
{ok, {lines,BeamFile5D1}} = beam_lib:strip(BeamFile5D1),
{module, lines} = code:load_abs(filename:rootname(BeamFile5D1)),
Info = get_line_number_info(),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1,
Source2D1, BeamFile2D1,
Source3D1, BeamFile3D1,
Source4D1, BeamFile4D1,
Source5D1, BeamFile5D1,
BeamFileSofs]),
false = code:purge(sofs),
{module, sofs} = code:load_file(sofs),
code:stick_mod(sofs),
ok.
otp_6711(Conf) when is_list(Conf) ->
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:info(3)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:chunks(a, b)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:chunks(a,b,c)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:all_chunks(3)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:cmp(3,4)}),
{'EXIT',{function_clause,_}} = (catch {a, beam_lib:strip(3)}),
{'EXIT',{function_clause,_}} =
(catch {a, beam_lib:strip_files([3])}),
PrivDir = ?privdir,
Dir = filename:join(PrivDir, "dir"),
Lib = filename:join(Dir, "lib"),
App = filename:join(Lib, "app"),
EBin = filename:join(App, "ebin"),
ok = file:make_dir(Dir),
ok = file:make_dir(Lib),
ok = file:make_dir(App),
ok = file:make_dir(EBin),
{SourceD, BeamFileD} = make_beam(EBin, simple, member),
unwritable(BeamFileD),
{error,_,{file_error,_,_}} = beam_lib:strip_release(Dir),
delete_files([SourceD, BeamFileD]),
file:del_dir(EBin),
file:del_dir(App),
file:del_dir(Lib),
file:del_dir(Dir),
ok.
-include_lib("kernel/include/file.hrl").
unwritable(Fname) ->
{ok, Info} = file:read_file_info(Fname),
Mode = Info#file_info.mode - 8#00200,
file:write_file_info(Fname, Info#file_info{mode = Mode}).
Testing building of BEAM files .
building(Conf) when is_list(Conf) ->
PrivDir = ?privdir,
Dir1 = filename:join(PrivDir, "b_dir1"),
Dir2 = filename:join(PrivDir, "b_dir2"),
ok = file:make_dir(Dir1),
ok = file:make_dir(Dir2),
{SourceD1, BeamFileD1} = make_beam(Dir1, building, member),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
ChunkIds = chunk_ids(BeamFileD1),
{ok, _Mod, Chunks} = beam_lib:all_chunks(BeamFileD1),
ChunkIds = lists:map(fun ({Id, Data}) when is_binary(Data) -> Id
end, Chunks),
BeamFileD2 = filename:join(Dir2, "building.beam"),
{ok,RevBeam} = beam_lib:build_module(lists:reverse(Chunks)),
file:write_file(BeamFileD2, RevBeam),
compare_chunks(BeamFileD1, BeamFileD2, ChunkIds),
lists:foreach(fun(Id) ->
{ok, {building, [{Id, _Data}]}} =
beam_lib:chunks(BeamFileD1, [Id])
end, ChunkIds),
lists:foreach(fun(Id) ->
{ok, {building, [{Id, _Data}]}} =
beam_lib:chunks(BeamFileD2, [Id])
end, ChunkIds),
true = (P0 == pps()),
NoOfTables = erlang:system_info(ets_count),
delete_files([SourceD1, BeamFileD1, BeamFileD2]),
file:del_dir(Dir1),
file:del_dir(Dir2),
ok.
Compare beam_lib : md5/1 and code : .
md5(Conf) when is_list(Conf) ->
Beams = collect_beams(),
io:format("Found ~w beam files", [length(Beams)]),
md5_1(Beams).
md5_1([N|Ns]) ->
{ok,Beam0} = file:read_file(N),
Beam = maybe_uncompress(Beam0),
{ok,{Mod,MD5}} = beam_lib:md5(Beam),
{Mod,MD5} = {Mod,code:module_md5(Beam)},
md5_1(Ns);
md5_1([]) -> ok.
collect_beams() ->
SuperDir = filename:dirname(filename:dirname(code:which(?MODULE))),
TestDirs = filelib:wildcard(filename:join([SuperDir,"*_test"])),
AbsDirs = [filename:absname(X) || X <- code:get_path()],
collect_beams_1(AbsDirs ++ TestDirs).
collect_beams_1([Dir|Dirs]) ->
filelib:wildcard(filename:join(Dir, "*.beam")) ++ collect_beams_1(Dirs);
collect_beams_1([]) -> [].
maybe_uncompress(<<"FOR1",_/binary>>=Beam) -> Beam;
maybe_uncompress(Beam) -> zlib:gunzip(Beam).
encrypted_abstr(Conf) when is_list(Conf) ->
run_if_crypto_works(fun() -> encrypted_abstr_1(Conf) end).
encrypted_abstr_1(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
simple_file(Source),
erl_ddll:start(),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
Key = "#a_crypto_key",
CompileFlags = [{outdir,PrivDir}, debug_info, {debug_info_key,Key}],
{ok,_} = compile:file(Source, CompileFlags),
{ok, Binary} = file:read_file(BeamFile),
do_encrypted_abstr(BeamFile, Key),
do_encrypted_abstr(Binary, Key),
file:delete(BeamFile),
file:delete(Source),
NoOfTables = erlang:system_info(ets_count),
true = (P0 == pps()),
ok.
do_encrypted_abstr(Beam, Key) ->
verify(key_missing_or_invalid, beam_lib:chunks(Beam, [debug_info])),
{ok,{simple,[{"Dbgi",Dbgi}]}} = beam_lib:chunks(Beam, ["Dbgi"]),
<<0:8,8:8,"des3_cbc",_/binary>> = Dbgi,
Try som invalid funs .
bad_fun(badfun, fun() -> ok end),
bad_fun(badfun, {a,b}),
bad_fun(blurf),
{function_clause,_} = bad_fun(fun(glurf) -> ok end),
Funs that return something strange .
bad_fun(badfun, fun(init) -> {ok,fun() -> ok end} end),
glurf = bad_fun(fun(init) -> {error,glurf} end),
undefined = beam_lib:clear_crypto_key_fun(),
ok = beam_lib:crypto_key_fun(fun(init) -> ok end),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
{ok,_} = beam_lib:clear_crypto_key_fun(),
ok = beam_lib:crypto_key_fun(simple_crypto_fun("wrong key...")),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
verify(exists, beam_lib:crypto_key_fun(simple_crypto_fun(Key))),
{ok,_} = beam_lib:clear_crypto_key_fun(),
ok = beam_lib:crypto_key_fun(simple_crypto_fun(Key)),
verify_abstract(Beam),
{ok,{simple,[{"Dbgi",Dbgi}]}} = beam_lib:chunks(Beam, ["Dbgi"]),
verify(exists, beam_lib:crypto_key_fun(ets_crypto_fun(Key))),
{ok,_} = beam_lib:clear_crypto_key_fun(),
ok = beam_lib:crypto_key_fun(ets_crypto_fun(Key)),
verify_abstract(Beam),
{ok,{simple,[{"Dbgi",Dbgi}]}} = beam_lib:chunks(Beam, ["Dbgi"]),
{ok,cleared} = beam_lib:clear_crypto_key_fun(),
start_stop_race(10000),
ok.
start_stop_race(0) ->
ok;
start_stop_race(N) ->
{error,_} = beam_lib:crypto_key_fun(bad_fun),
undefined = beam_lib:clear_crypto_key_fun(),
start_stop_race(N-1).
bad_fun(F) ->
{error,E} = beam_lib:crypto_key_fun(F),
E.
bad_fun(S, F) ->
verify(S, beam_lib:crypto_key_fun(F)).
verify_abstract(Beam) ->
{ok,{simple,[Abst, Dbgi]}} = beam_lib:chunks(Beam, [abstract_code, debug_info]),
{abstract_code,{raw_abstract_v1,_}} = Abst,
{debug_info,{debug_info_v1,erl_abstract_code,_}} = Dbgi.
simple_crypto_fun(Key) ->
fun(init) -> ok;
({debug_info, des3_cbc, simple, _}) -> Key
end.
ets_crypto_fun(Key) ->
fun(init) ->
T = ets:new(beam_lib_SUITE_keys, [private, set]),
true = ets:insert(T, {key,Key}),
{ok,fun({debug_info, des3_cbc, simple, _}) ->
[{key,Val}] = ets:lookup(T, key),
Val;
(clear) ->
ets:delete(T),
cleared
end}
end.
Test encrypted abstract format with the key in .
encrypted_abstr_file(Conf) when is_list(Conf) ->
run_if_crypto_works(fun() -> encrypted_abstr_file_1(Conf) end).
encrypted_abstr_file_1(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
simple_file(Source),
erl_ddll:start(),
NoOfTables = erlang:system_info(ets_count),
P0 = pps(),
Key = "Long And niCe 99Krypto Key",
CompileFlags = [{outdir,PrivDir}, debug_info, {debug_info_key,Key}],
{ok,_} = compile:file(Source, CompileFlags),
{ok, Binary} = file:read_file(BeamFile),
{ok,OldCwd} = file:get_cwd(),
ok = file:set_cwd(PrivDir),
do_encrypted_abstr_file(BeamFile, Key),
do_encrypted_abstr_file(Binary, Key),
ok = file:set_cwd(OldCwd),
file:delete(filename:join(PrivDir, ".erlang.crypt")),
file:delete(BeamFile),
file:delete(Source),
NoOfTables = erlang:system_info(ets_count),
true = (P0 == pps()),
ok.
do_encrypted_abstr_file(Beam, Key) ->
write_crypt_file(""),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
write_crypt_file(["[{debug_info,des3_cbc,simple,\"A Wrong Key\"}].\n"]),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
write_crypt_file(["[{debug_info,des3_cbc,simple,\"",Key,"\"}].\n"]),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
{ok,_} = beam_lib:clear_crypto_key_fun(),
verify_abstract(Beam),
verify_abstract(Beam),
ok = file:delete(".erlang.crypt"),
verify_abstract(Beam),
Clear , otherwise the second pass will fail .
{ok,_} = beam_lib:clear_crypto_key_fun(),
{error,beam_lib,Error} = beam_lib:chunks(Beam, [abstract_code]),
ok.
test_makedep_abstract_code(Conf) ->
PrivDir = ?privdir,
ErlFile = filename:join(PrivDir, "hello.erl"),
BeamFile = filename:join(PrivDir, "hello.beam"),
file:write_file(ErlFile,
["-module(hello).\n",
"-export([start/0]).\n",
"start() -> ok.\n"
]),
DependDir = filename:join(PrivDir, "depend"),
file:make_dir(DependDir),
DependFile = filename:join(DependDir,"hello.d"),
compile:file(ErlFile,
[debug_info,
makedep_side_effect,
{outdir, PrivDir},
{makedep_output, DependFile}]),
file:delete(DependFile),
file:del_dir(DependDir),
case beam_lib:chunks(BeamFile, [debug_info]) of
{ok, {Module, [{debug_info, {debug_info_v1,
_Backend=erl_abstract_code,Metadata}}]}} ->
SrcOpts = [no_copt, to_core, binary, return_errors,
no_inline, strict_record_tests, strict_record_updates,
dialyzer, no_spawn_compiler_process],
{ok,_} = erl_abstract_code:debug_info(core_v1, Module, Metadata,
SrcOpts),
ok
end.
write_crypt_file(Contents0) ->
Contents = list_to_binary([Contents0]),
io:format("~s\n", [binary_to_list(Contents)]),
ok = file:write_file(".erlang.crypt", Contents).
: Do n't crash when the backend for generating the abstract code
missing_debug_info_backend(Conf) ->
PrivDir = ?privdir,
Simple = filename:join(PrivDir, "simple"),
Source = Simple ++ ".erl",
BeamFile = Simple ++ ".beam",
simple_file(Source),
{ok,simple} = compile:file(Source, [{outdir,PrivDir}]),
{ok,simple,All0} = beam_lib:all_chunks(BeamFile),
FakeBackend = definitely__not__an__existing__backend,
FakeDebugInfo = {debug_info_v1, FakeBackend, nothing_here},
All = lists:keyreplace("Dbgi", 1, All0, {"Dbgi", term_to_binary(FakeDebugInfo)}),
{ok,NewBeam} = beam_lib:build_module(All),
ok = file:write_file(BeamFile, NewBeam),
verify(missing_backend, beam_lib:chunks(BeamFile, [abstract_code])),
file:delete(BeamFile),
ok.
compare_chunks(File1, File2, ChunkIds) ->
{ok, {_, Chunks1}} = beam_lib:chunks(File1, ChunkIds),
{ok, {_, Chunks2}} = beam_lib:chunks(File2, ChunkIds),
true = Chunks1 == Chunks2.
chunk_ids(File) ->
lists:map(fun({Id,_Start,_Size}) -> Id end, chunk_info(File)).
chunk_info(File) ->
{value, {chunks, Chunks}} =
lists:keysearch(chunks, 1, beam_lib:info(File)),
Chunks.
make_beam(Dir, Module, F) ->
FileBase = filename:join(Dir, atom_to_list(Module)),
Source = FileBase ++ ".erl",
BeamFile = FileBase ++ ".beam",
file:delete(BeamFile),
simple_file(Source, Module, F),
{ok, _} = compile:file(Source, [{outdir,Dir}, debug_info, report]),
{Source, BeamFile}.
set_byte(_Backup, Binary, Pos, Byte) when is_binary(Binary) ->
<<B1:Pos/binary, _:1/binary, B2/binary>> = Binary,
NB = <<B1/binary, Byte:8, B2/binary>>,
NB;
set_byte(Backup, File, Pos, Byte) ->
copy_file(Backup, File),
set_byte(File, Pos, Byte),
File.
set_byte(File, Pos, Byte) ->
{ok, Fd} = file:open(File, [read, write]),
{ok, _} = file:position(Fd, Pos),
ok = file:write(Fd, [Byte]),
file:close(Fd).
copy_file(Src, Dest) ->
{ok, _} = file:copy(Src, Dest),
ok = file:change_mode(Dest, 8#0666).
delete_files(Files) ->
lists:foreach(fun(F) -> file:delete(F) end, Files).
verify(S, {error, beam_lib, R}) ->
verify_error(S, R);
verify(S, {error, R}) ->
verify_error(S, R).
verify_error(S, R) ->
if
S =:= R -> ok;
true -> [S|_] = tuple_to_list(R)
end,
FM = string:str(lists:flatten(beam_lib:format_error(R)), "simpl") > 0,
BM = string:str(lists:flatten(beam_lib:format_error(R)), "<<") > 0,
Handled = beam_lib:format_error(R) =/= io_lib:format("~p~n", [R]),
true = ((FM > 0) or (BM > 0)) and Handled.
ver(S, {error, beam_lib, R}) ->
[S|_] = tuple_to_list(R),
case lists:flatten(beam_lib:format_error(R)) of
[${ | _] ->
ct:fail({bad_format_error, R});
_ ->
ok
end.
pps() ->
{erlang:ports()}.
simple_file(File) ->
simple_file(File, simple).
simple_file(File, Module) ->
simple_file(File, Module, member).
simple_file(File, Module, make_fun) ->
B = list_to_binary(["-module(", atom_to_list(Module), "). "
"-export([t/1]). "
"t(A) -> "
" fun(X) -> A+X end. "]),
ok = file:write_file(File, B);
simple_file(File, Module, constant) ->
B = list_to_binary(["-module(", atom_to_list(Module), "). "
"-export([t/1]). "
"t(A) -> "
" {a,b,[2,3],c,d}. "]),
ok = file:write_file(File, B);
simple_file(File, Module, constant2) ->
B = list_to_binary(["-module(", atom_to_list(Module), "). "
"-export([t/1]). "
"t(A) -> "
" {a,b,[2,3],x,y}. "]),
ok = file:write_file(File, B);
simple_file(File, Module, lines) ->
B = list_to_binary(["-module(", atom_to_list(Module), ").\n"
"-export([t/1]).\n"
"t(A) ->\n"
" A+1.\n"]),
ok = file:write_file(File, B);
simple_file(File, Module, F) ->
B = list_to_binary(["-module(", atom_to_list(Module), "). "
"-export([t/0]). "
"t() -> "
" t([]). "
"t(L) -> "
" lists:",
atom_to_list(F), "(a, L). "]),
ok = file:write_file(File, B).
run_if_crypto_works(Test) ->
try begin crypto:start(), crypto:stop(), ok end of
ok ->
Test()
catch
error:_ ->
{skip,"The crypto application is missing or broken"}
end.
get_line_number_info() ->
case catch lines:t(atom) of
{'EXIT',{badarith,[{erlang,'+',[atom,1],_},
{lines,t,1,Info}|_]}} ->
Info;
{'EXIT',{badarith,[{lines,t,1,Info}|_]}} ->
Info
end.
|
cf237011ea64fcb53c4bf577096c1742dfe497127aac9809694c424f2eaf5b7a | esumii/min-caml | ack.ml | let rec ack x y =
if x <= 0 then y + 1 else
if y <= 0 then ack (x - 1) 1 else
ack (x - 1) (ack x (y - 1)) in
print_int (ack 3 10)
| null | https://raw.githubusercontent.com/esumii/min-caml/8860b6fbc50786a27963aff1f7639b94c244618a/shootout/ack.ml | ocaml | let rec ack x y =
if x <= 0 then y + 1 else
if y <= 0 then ack (x - 1) 1 else
ack (x - 1) (ack x (y - 1)) in
print_int (ack 3 10)
| |
c866a9d04fcdfa1d537ab6fcc8ea2c84ec66b1ae3f2ea26dde57a2a0059c7bef | techascent/tech.queue | task.clj | (ns tech.queue.task
"Generic things that can be executed on the queue. Uses keyword dispatch"
(:require [tech.queue.protocols :as q]))
(defn keyword->fn
[kwd]
(require (symbol (namespace kwd)))
(resolve (symbol (namespace kwd) (name kwd))))
(defn msg->task-object
[context msg]
((keyword->fn (:tech.queue.task/msg->obj msg)) context))
(defn var->keyword
"Given a var, make a keyword that points to it"
[var]
(let [{:keys [ns name]} (meta var)]
(keyword (clojure.core/name
(clojure.core/ns-name ns))
(clojure.core/name name))))
(defn add-processor-to-msg
[task-obj-constructor-var msg]
(assoc msg :tech.queue.task/msg->obj
(var->keyword task-obj-constructor-var)))
;;Forward everything to something created via the message
(defrecord TaskQueueProcessor [dispatch-fn context]
q/QueueProcessor
(msg->log-context [this msg]
(q/msg->log-context (dispatch-fn context msg) msg))
(msg-ready? [this msg]
(q/msg-ready? (dispatch-fn context msg) msg))
(process! [this msg]
(q/process! (dispatch-fn context msg) msg))
(retire! [this msg last-attempt-result]
(q/retire! (dispatch-fn context msg) msg last-attempt-result))
q/PResourceLimit
(resource-map [this msg initial-res]
(q/resource-map (dispatch-fn context msg) msg initial-res)))
(defn task-processor
[context & {:keys [dispatch-fn]
:or {dispatch-fn msg->task-object}}]
(->TaskQueueProcessor dispatch-fn context))
(defn put!
[queue constructor-var msg options]
(q/put! queue (add-processor-to-msg constructor-var msg) options))
(defrecord ForwardQueueProcessor [process-fn log-context-fn ready-fn retire-fn
resource-map-fn]
q/QueueProcessor
(msg->log-context [this msg]
(if log-context-fn
(log-context-fn msg)
{}))
(msg-ready? [this msg]
(if ready-fn
(ready-fn msg)
true))
(process! [this msg]
(process-fn msg)
{:status :success
:msg msg})
(retire! [this msg last-attempt-result]
(when retire-fn
(retire-fn msg)))
q/PResourceLimit
(resource-map [this msg initial-res-map]
(if resource-map-fn
(resource-map-fn msg initial-res-map)
{})))
(defn forward-queue-processor
[process-fn & {:keys [log-context-fn ready-fn retire-fn
resource-map-fn]}]
(->ForwardQueueProcessor process-fn log-context-fn ready-fn retire-fn
resource-map-fn))
| null | https://raw.githubusercontent.com/techascent/tech.queue/117d054de71fa3c3d93e718eb0c4ac209e62ff33/src/tech/queue/task.clj | clojure | Forward everything to something created via the message | (ns tech.queue.task
"Generic things that can be executed on the queue. Uses keyword dispatch"
(:require [tech.queue.protocols :as q]))
(defn keyword->fn
[kwd]
(require (symbol (namespace kwd)))
(resolve (symbol (namespace kwd) (name kwd))))
(defn msg->task-object
[context msg]
((keyword->fn (:tech.queue.task/msg->obj msg)) context))
(defn var->keyword
"Given a var, make a keyword that points to it"
[var]
(let [{:keys [ns name]} (meta var)]
(keyword (clojure.core/name
(clojure.core/ns-name ns))
(clojure.core/name name))))
(defn add-processor-to-msg
[task-obj-constructor-var msg]
(assoc msg :tech.queue.task/msg->obj
(var->keyword task-obj-constructor-var)))
(defrecord TaskQueueProcessor [dispatch-fn context]
q/QueueProcessor
(msg->log-context [this msg]
(q/msg->log-context (dispatch-fn context msg) msg))
(msg-ready? [this msg]
(q/msg-ready? (dispatch-fn context msg) msg))
(process! [this msg]
(q/process! (dispatch-fn context msg) msg))
(retire! [this msg last-attempt-result]
(q/retire! (dispatch-fn context msg) msg last-attempt-result))
q/PResourceLimit
(resource-map [this msg initial-res]
(q/resource-map (dispatch-fn context msg) msg initial-res)))
(defn task-processor
[context & {:keys [dispatch-fn]
:or {dispatch-fn msg->task-object}}]
(->TaskQueueProcessor dispatch-fn context))
(defn put!
[queue constructor-var msg options]
(q/put! queue (add-processor-to-msg constructor-var msg) options))
(defrecord ForwardQueueProcessor [process-fn log-context-fn ready-fn retire-fn
resource-map-fn]
q/QueueProcessor
(msg->log-context [this msg]
(if log-context-fn
(log-context-fn msg)
{}))
(msg-ready? [this msg]
(if ready-fn
(ready-fn msg)
true))
(process! [this msg]
(process-fn msg)
{:status :success
:msg msg})
(retire! [this msg last-attempt-result]
(when retire-fn
(retire-fn msg)))
q/PResourceLimit
(resource-map [this msg initial-res-map]
(if resource-map-fn
(resource-map-fn msg initial-res-map)
{})))
(defn forward-queue-processor
[process-fn & {:keys [log-context-fn ready-fn retire-fn
resource-map-fn]}]
(->ForwardQueueProcessor process-fn log-context-fn ready-fn retire-fn
resource-map-fn))
|
46043416a3d51826e0bf018ec4422244f52d097958f593a4ba92fbb35fc3cd6a | kiranlak/austin-sbst | traceManager.ml | Copyright : , University College London , 2011
open Cil
let htToInsert : (int, instr list) Hashtbl.t = Hashtbl.create 100
let htToAppend : (int, instr list) Hashtbl.t = Hashtbl.create 50
module Log = LogManager
let newSid = ref (-2)
let reset() =
Hashtbl.clear htToInsert;
Hashtbl.clear htToAppend;
newSid := (-2)
let pushInstrBeforeStmt (s:stmt) (ilist:instr list) =
let existing =
try
Hashtbl.find htToInsert s.sid
with
| Not_found -> []
in
Hashtbl.replace htToInsert s.sid (ilist@existing)
let appendInstr (s:stmt) (ilist:instr list) =
let existing =
try
Hashtbl.find htToAppend s.sid
with
| Not_found -> []
in
Hashtbl.replace htToAppend s.sid (ilist@existing)
let addInstrsToBlock (b:block) (ilist:instr list) =
match b.bstmts with
| [] -> (
let s' = mkStmt (Instr(ilist)) in
s'.sid <- !newSid;
decr newSid;
b.bstmts <- [s']
)
| s::rem -> (
let existing =
try
Hashtbl.find htToInsert s.sid
with
| Not_found -> []
in
Hashtbl.replace htToInsert s.sid (ilist@existing)
)
class insertInstructionsVis = object(this)
inherit nopCilVisitor
method vstmt (s:stmt) =
if Hashtbl.mem htToInsert s.sid then (
let existing = Hashtbl.find htToInsert s.sid in
ChangeDoChildrenPost(s, (fun s' -> this#queueInstr existing; s'))
) else if Hashtbl.mem htToAppend s.sid then (
let existing =
List.map(fun i -> mkStmtOneInstr i)(Hashtbl.find htToAppend s.sid)
in
ChangeDoChildrenPost(s, (
fun s' ->
let skind' = Block(mkBlock (compactStmts (s'::(existing)))) in
s'.skind <- skind';
s'))
) else
DoChildren
end
let insertInstructions (source:file) =
let vis = new insertInstructionsVis in
visitCilFileSameGlobals vis source; | null | https://raw.githubusercontent.com/kiranlak/austin-sbst/9c8aac72692dca952302e0e4fdb9ff381bba58ae/AustinOcaml/instrumentation/traceManager.ml | ocaml | Copyright : , University College London , 2011
open Cil
let htToInsert : (int, instr list) Hashtbl.t = Hashtbl.create 100
let htToAppend : (int, instr list) Hashtbl.t = Hashtbl.create 50
module Log = LogManager
let newSid = ref (-2)
let reset() =
Hashtbl.clear htToInsert;
Hashtbl.clear htToAppend;
newSid := (-2)
let pushInstrBeforeStmt (s:stmt) (ilist:instr list) =
let existing =
try
Hashtbl.find htToInsert s.sid
with
| Not_found -> []
in
Hashtbl.replace htToInsert s.sid (ilist@existing)
let appendInstr (s:stmt) (ilist:instr list) =
let existing =
try
Hashtbl.find htToAppend s.sid
with
| Not_found -> []
in
Hashtbl.replace htToAppend s.sid (ilist@existing)
let addInstrsToBlock (b:block) (ilist:instr list) =
match b.bstmts with
| [] -> (
let s' = mkStmt (Instr(ilist)) in
s'.sid <- !newSid;
decr newSid;
b.bstmts <- [s']
)
| s::rem -> (
let existing =
try
Hashtbl.find htToInsert s.sid
with
| Not_found -> []
in
Hashtbl.replace htToInsert s.sid (ilist@existing)
)
class insertInstructionsVis = object(this)
inherit nopCilVisitor
method vstmt (s:stmt) =
if Hashtbl.mem htToInsert s.sid then (
let existing = Hashtbl.find htToInsert s.sid in
ChangeDoChildrenPost(s, (fun s' -> this#queueInstr existing; s'))
) else if Hashtbl.mem htToAppend s.sid then (
let existing =
List.map(fun i -> mkStmtOneInstr i)(Hashtbl.find htToAppend s.sid)
in
ChangeDoChildrenPost(s, (
fun s' ->
let skind' = Block(mkBlock (compactStmts (s'::(existing)))) in
s'.skind <- skind';
s'))
) else
DoChildren
end
let insertInstructions (source:file) =
let vis = new insertInstructionsVis in
visitCilFileSameGlobals vis source; | |
7bfdb5fbe9197e260b6360bb69a473a6337b9279a40d12b3901c7a8932dc8323 | pyrocat101/opal | intuitive.ml |
* Intuitive Language - Hackerrank FP Contest Challenge :
* -calculi-jun14/challenges/intuitive-language
*
* The language is case - INSENSITIVE !
*
* letter : : = [ a - zA - Z ]
* ident : : = < letter > ( < digit > | < letter > ) *
*
* kwd : : = function | is | of | assign | and | to | do | what
* int : : = < digit>+
* num : : = < int > [ / < int > ]
* var : : = < ident >
* func : : = function of < int > : < exp > ( , < exp > ) *
* | < exp >
* : : = < var > is < func > .
*
* assn : : = Assign < exp > to < var > ( AND < exp > to < var > ) * !
*
* loop : : = do { < exp > } < assn >
*
* ask : : = what is ( < call > ) ( AND < call > ) * ?
*
* exp : : = < term > ( ( + | - ) < exp > ) ?
* term : : = < value > ( ( * | / ) < term > ) ?
* value : : = [ + | - ] < num > | < call > | \ ( exp \ )
* call : : = < var > ( \ [ < exp > \ ] ) *
*
* program : : = ( decl | assn | loop | ask ) *
*
* Intuitive Language - Hackerrank FP Contest Challenge:
* -calculi-jun14/challenges/intuitive-language
*
* The language is case-INSENSITIVE!
*
* letter ::= [a-zA-Z]
* ident ::= <letter> ( <digit> | <letter> )*
*
* kwd ::= function | is | of | assign | and | to | do | what
* int ::= <digit>+
* num ::= <int> [ / <int> ]
* var ::= <ident>
* func ::= function of <int> : <exp> (, <exp>)*
* | <exp>
* decl ::= <var> is <func> .
*
* assn ::= Assign <exp> to <var> ( AND <exp> to <var> )* !
*
* loop ::= do { <exp> } <assn>
*
* ask ::= what is ( <call> ) ( AND <call> )* ?
*
* exp ::= <term> ( ( + | - ) <exp> )?
* term ::= <value> ( ( * | / ) <term> )?
* value ::= [+ | -] <num> | <call> | \( exp \)
* call ::= <var> ( \[ <exp> \] )*
*
* program ::= ( decl | assn | loop | ask )*
*
*)
(* ----------------------------- opal.ml START ------------------------------ *)
module LazyStream = struct
type 'a t = Cons of 'a * 'a t Lazy.t | Nil
let of_stream stream =
let rec next stream =
try Cons(Stream.next stream, lazy (next stream))
with Stream.Failure -> Nil
in
next stream
let of_string str = str |> Stream.of_string |> of_stream
let of_channel ic = ic |> Stream.of_channel |> of_stream
let of_function f =
let rec next f =
match f () with
| Some x -> Cons(x, lazy (next f))
| None -> Nil
in
next f
end
let implode l = String.concat "" (List.map (String.make 1) l)
let explode s =
let l = ref [] in
String.iter (fun c -> l := c :: !l) s;
List.rev !l
let (%) f g = fun x -> g (f x)
type 'token input = 'token LazyStream.t
type ('token, 'result) parser = 'token input -> ('result * 'token input) option
let parse parser input =
match parser input with
| Some(res, _) -> Some res
| None -> None
let return x input = Some(x, input)
let (>>=) x f =
fun input ->
match x input with
| Some(result', input') -> f result' input'
| None -> None
let (<|>) x y =
fun input ->
match x input with
| Some _ as ret -> ret
| None -> y input
let rec scan x input =
match x input with
| Some(result', input') -> LazyStream.Cons(result', lazy (scan x input'))
| None -> LazyStream.Nil
let mzero _ = None
let any = function
| LazyStream.Cons(token, input') -> Some(token, Lazy.force input')
| LazyStream.Nil -> None
let satisfy test = any >>= (fun res -> if test res then return res else mzero)
let eof x = function LazyStream.Nil -> Some(x, LazyStream.Nil) | _ -> None
let (=>) x f = x >>= fun r -> return (f r)
let (>>) x y = x >>= fun _ -> y
let (<<) x y = x >>= fun r -> y >>= fun _ -> return r
let (<~>) x xs = x >>= fun r -> xs >>= fun rs -> return (r :: rs)
let rec choice = function [] -> mzero | h :: t -> (h <|> choice t)
let rec count n x = if n > 0 then x <~> count (n - 1) x else return []
let between op ed x = op >> x << ed
let option default x = x <|> return default
let optional x = option () (x >> return ())
let rec skip_many x = option () (x >>= fun _ -> skip_many x)
let skip_many1 x = x >> skip_many x
let rec many x = option [] (x >>= fun r -> many x >>= fun rs -> return (r :: rs))
let many1 x = x <~> many x
let sep_by1 x sep = x <~> many (sep >> x)
let sep_by x sep = sep_by1 x sep <|> return []
let end_by1 x sep = sep_by1 x sep << sep
let end_by x sep = end_by1 x sep <|> return []
let chainl1 x op =
let rec loop a = (op >>= fun f -> x >>= fun b -> loop (f a b)) <|> return a in
x >>= loop
let chainl x op default = chainl1 x op <|> return default
let rec chainr1 x op =
x >>= fun a -> (op >>= fun f -> chainr1 x op >>= f a) <|> return a
let chainr x op default = chainr1 x op <|> return default
let exactly x = satisfy ((=) x)
let one_of l = satisfy (fun x -> List.mem x l)
let none_of l = satisfy (fun x -> not (List.mem l x))
let range l r = satisfy (fun x -> l <= x && x <= r)
let space = one_of [' '; '\t'; '\r'; '\n']
let spaces = skip_many space
let newline = exactly '\n'
let tab = exactly '\t'
let upper = range 'A' 'Z'
let lower = range 'a' 'z'
let digit = range '0' '9'
let letter = lower <|> upper
let alpha_num = letter <|> digit
let hex_digit = range 'a' 'f' <|> range 'A' 'F'
let oct_digit = range '0' '7'
let lexeme x = spaces >> x
let token s =
let rec loop s i =
if i >= String.length s
then return s
else exactly s.[i] >> loop s (i + 1)
in
lexeme (loop s 0)
(* ------------------------------ opal.ml END ------------------------------- *)
(* rational number *)
type num = Ratio of int * int
let rec num_of_string s =
if String.contains s '/' then
let len = String.length s in
let delim = String.index s '/' in
let numer = String.sub s 0 delim
and denom = String.sub s (delim + 1) (len - delim - 1) in
Ratio (int_of_string numer, int_of_string denom) |> simplify
else
Ratio ((int_of_string s), 1) |> simplify
and sign x =
if x < 0 then
-1
else if x = 0 then
0
else
1
and string_of_num (Ratio (numer, denom)) =
if denom = 1 then
string_of_int numer
else
Format.sprintf "%s/%s" (string_of_int numer) (string_of_int denom)
and simplify (Ratio (numer, denom)) =
if numer = 0 || denom = 0 then
Ratio (0, 1)
else
let sign = (sign numer) * (sign denom) in
let numer = abs numer in
let denom = abs denom in
let divisor = gcd numer denom in
Ratio (sign * numer / divisor, denom / divisor)
and gcd a b =
if a = 0 then b
else if b = 0 then a
else if a > b then gcd b (a mod b)
else gcd a (b mod a)
a c ad + bc
- + - = -------
b d b*d
a c ad + bc
- + - = -------
b d b*d
*)
let ( +/ ) (Ratio (a, b)) (Ratio (c, d)) =
Ratio (a * d + b * c, b * d) |> simplify
a c ad - bc
- - - = -------
b d b*d
a c ad - bc
- - - = -------
b d b*d
*)
let ( -/ ) (Ratio (a, b)) (Ratio (c, d)) =
Ratio (a * d - b * c, b * d) |> simplify
(*
a c ac
- * - = --
b d bd
*)
let ( */ ) (Ratio (a, b)) (Ratio (c, d)) =
Ratio (a * c, b * d) |> simplify
(*
a c ad
- / - = --
b d bc
*)
let ( // ) (Ratio (a, b)) (Ratio (c, d)) =
Ratio (a * d, b * c) |> simplify
let minus_num (Ratio (a, b)) = Ratio (-a, b)
let is_integer_num (Ratio (a, b)) = b = 1
let sign_num (Ratio (a, b)) = sign a
let int_of_num (Ratio(a, b)) = a / b
(* interpreter *)
type exp = AddExp of exp * exp
| SubExp of exp * exp
| MulExp of exp * exp
| DivExp of exp * exp
| PosExp of exp
| NegExp of exp
| Number of num
| Call of call
and func = exp array
and call = string * exp list
and assign = (string * exp) list
and stmt = Decl of string * func
| Assign of assign
| Loop of exp * assign
| Ask of call list
type program = stmt list
type value = NumVal of num
| FuncVal of num * num list
exception Syntax_error
exception Runtime_error
(* parser *)
let kwd s =
let rec loop s i =
if i >= String.length s
then return s
else satisfy (fun c -> Char.lowercase c = s.[i]) >> loop s (i + 1)
in
lexeme (loop s 0)
let comma_list x = sep_by1 x (token ",")
let parens = between (token "(") (token ")")
let bracks = between (token "[") (token "]")
let braces = between (token "{") (token "}")
let reserved = ["function"; "is"; "of"; "assign"; "and"; "to"; "do"; "what"]
let ident = (spaces >> letter <~> many alpha_num) =>
implode % String.lowercase >>= function
| s when List.mem s reserved -> mzero
| s -> return s
let digits = spaces >> many1 digit => implode
let integer = digits => int_of_string
let number = digits => num_of_string
let add = token "+" >> return (fun x y -> AddExp(x, y))
let sub = token "-" >> return (fun x y -> SubExp(x, y))
let mul = token "*" >> return (fun x y -> MulExp(x, y))
let div = token "/" >> return (fun x y -> DivExp(x, y))
let pos = token "+" >> return (fun x -> PosExp(x))
let neg = token "-" >> return (fun x -> NegExp(x))
let rec expr input = (chainl1 term (add <|> sub)) input
and term input = (chainl1 value (mul <|> div)) input
and unary input = ((pos <|> neg) >>= fun op -> num_val => fun x -> op x) input
and value input = (unary <|> call_val <|> num_val <|> parens expr) input
and call_val input = (call => fun c -> Call c) input
and num_val input = (number => fun x -> Number x) input
and args input = (many (bracks expr)) input
and call input = (ident >>= fun fn -> args => fun args -> (fn, args)) input
let func_1 = expr => fun x -> [|x|]
let func_n =
kwd "function" >>
kwd "of" >>
integer >>= fun argc ->
token ":" >>
comma_list expr >>= fun argv ->
let args = Array.of_list argv in
if argc + 1 <> Array.length args then mzero else return args
let func = func_1 <|> func_n
let decl =
ident >>= fun name ->
kwd "is" >>
func >>= fun func ->
token "." >>
return (Decl (name, func))
let pair = expr >>= fun rhs -> kwd "to" >> ident => fun lhs -> (lhs, rhs)
let assign_impl = kwd "assign" >> sep_by1 pair (kwd "and") << token "!"
let assign = assign_impl => fun x -> Assign(x)
let loop =
kwd "do" >>
braces expr >>= fun times ->
assign_impl >>= fun body ->
return (Loop(times, body))
let queries = sep_by1 call (kwd "and")
let ask = kwd "what" >> kwd "is" >> queries << token "?" => fun q -> Ask(q)
let program = many (decl <|> assign <|> loop <|> ask)
let parser = parse program
(* eval *)
let rec evlis env l =
List.iter (function
| Decl (name, func) -> eval_decl env name func
| Assign pairs -> eval_assign env pairs
| Loop (times, body) -> eval_loop env times body
| Ask queries -> eval_ask env queries
) l
and eval_decl env name func =
let value = eval_func env func in
Hashtbl.replace env name value
and eval_assign env pairs =
List.iter (function (name, exp) ->
let value = eval_exp env exp in
Hashtbl.replace env name value
) pairs
and eval_loop env times body =
match eval_exp env times with
| NumVal n when is_integer_num n ->
let times' = int_of_num n in
for i = 1 to times' do
eval_assign env body
done
| _ -> raise Runtime_error
and eval_ask env queries =
List.iter (function query ->
let value = eval_call env query in
value |> string_of_value |> print_endline
) queries
and string_of_value v =
match v with
| NumVal n -> string_of_num n
| FuncVal (k0, ki) ->
(ki @ [k0]) |> List.map string_of_num |> String.concat ", "
and eval_func env f =
match f with
| [|k0|] -> eval_exp env k0
| _ ->
let f' = Array.map (eval_num env) f in
let k0 = f'.(Array.length f' - 1) in
let ki = Array.sub f' 0 (Array.length f' - 1) |> Array.to_list in
FuncVal (k0, ki)
and eval_num env exp =
match eval_exp env exp with
| NumVal n -> n
| _ -> raise Runtime_error
and binary_op f l r =
match (l, r) with
| (NumVal l, NumVal r) -> NumVal (f l r)
| _ -> raise Runtime_error
and unary_op f e =
match e with
| NumVal e -> NumVal (f e)
| _ -> raise Runtime_error
and ( +++ ) l r = binary_op ( +/ ) l r
and ( --- ) l r = binary_op ( -/ ) l r
and ( *** ) l r = binary_op ( */ ) l r
and ( /// ) l r = binary_op ( // ) l r
and eval_exp env exp =
match exp with
| AddExp (l, r) -> (eval_exp env l) +++ (eval_exp env r)
| SubExp (l, r) -> (eval_exp env l) --- (eval_exp env r)
| MulExp (l, r) -> (eval_exp env l) *** (eval_exp env r)
| DivExp (l, r) -> (eval_exp env l) /// (eval_exp env r)
| PosExp e -> unary_op (function x -> x) (eval_exp env e)
| NegExp e -> unary_op minus_num (eval_exp env e)
| Number n -> NumVal n
| Call c -> eval_call env c
and eval_call env (name, args) =
let value = Hashtbl.find env name in
match value with
| NumVal n when args = [] -> value
| NumVal _ -> raise Runtime_error
| FuncVal (k0, ki) ->
let args' = List.map (eval_num env) args in
let f' = List.fold_left apply (k0, ki) args' in
match f' with
| (k0, []) -> NumVal k0
| (k0, ki) -> FuncVal (k0, ki)
and apply (k0, ki) x =
match ki with
| k :: rest -> (k0 +/ x */ k, rest)
| _ -> raise Runtime_error
and num_of_value v =
match v with
| NumVal n -> n
| _ -> raise Runtime_error
and make_env () = Hashtbl.create 10
(* parse & eval *)
let rec run src =
match parser src with
| None -> raise Syntax_error
| Some ast ->
let env = make_env () in
evlis env ast
and run_of_channel channel =
channel |> LazyStream.of_channel |> run
and run_of_string str =
str |> LazyStream.of_string |> run
let () = run_of_channel stdin
| null | https://raw.githubusercontent.com/pyrocat101/opal/ac495a4fc141cf843da74d223baecca47324acd4/examples/intuitive.ml | ocaml | ----------------------------- opal.ml START ------------------------------
------------------------------ opal.ml END -------------------------------
rational number
a c ac
- * - = --
b d bd
a c ad
- / - = --
b d bc
interpreter
parser
eval
parse & eval |
* Intuitive Language - Hackerrank FP Contest Challenge :
* -calculi-jun14/challenges/intuitive-language
*
* The language is case - INSENSITIVE !
*
* letter : : = [ a - zA - Z ]
* ident : : = < letter > ( < digit > | < letter > ) *
*
* kwd : : = function | is | of | assign | and | to | do | what
* int : : = < digit>+
* num : : = < int > [ / < int > ]
* var : : = < ident >
* func : : = function of < int > : < exp > ( , < exp > ) *
* | < exp >
* : : = < var > is < func > .
*
* assn : : = Assign < exp > to < var > ( AND < exp > to < var > ) * !
*
* loop : : = do { < exp > } < assn >
*
* ask : : = what is ( < call > ) ( AND < call > ) * ?
*
* exp : : = < term > ( ( + | - ) < exp > ) ?
* term : : = < value > ( ( * | / ) < term > ) ?
* value : : = [ + | - ] < num > | < call > | \ ( exp \ )
* call : : = < var > ( \ [ < exp > \ ] ) *
*
* program : : = ( decl | assn | loop | ask ) *
*
* Intuitive Language - Hackerrank FP Contest Challenge:
* -calculi-jun14/challenges/intuitive-language
*
* The language is case-INSENSITIVE!
*
* letter ::= [a-zA-Z]
* ident ::= <letter> ( <digit> | <letter> )*
*
* kwd ::= function | is | of | assign | and | to | do | what
* int ::= <digit>+
* num ::= <int> [ / <int> ]
* var ::= <ident>
* func ::= function of <int> : <exp> (, <exp>)*
* | <exp>
* decl ::= <var> is <func> .
*
* assn ::= Assign <exp> to <var> ( AND <exp> to <var> )* !
*
* loop ::= do { <exp> } <assn>
*
* ask ::= what is ( <call> ) ( AND <call> )* ?
*
* exp ::= <term> ( ( + | - ) <exp> )?
* term ::= <value> ( ( * | / ) <term> )?
* value ::= [+ | -] <num> | <call> | \( exp \)
* call ::= <var> ( \[ <exp> \] )*
*
* program ::= ( decl | assn | loop | ask )*
*
*)
module LazyStream = struct
type 'a t = Cons of 'a * 'a t Lazy.t | Nil
let of_stream stream =
let rec next stream =
try Cons(Stream.next stream, lazy (next stream))
with Stream.Failure -> Nil
in
next stream
let of_string str = str |> Stream.of_string |> of_stream
let of_channel ic = ic |> Stream.of_channel |> of_stream
let of_function f =
let rec next f =
match f () with
| Some x -> Cons(x, lazy (next f))
| None -> Nil
in
next f
end
let implode l = String.concat "" (List.map (String.make 1) l)
let explode s =
let l = ref [] in
String.iter (fun c -> l := c :: !l) s;
List.rev !l
let (%) f g = fun x -> g (f x)
type 'token input = 'token LazyStream.t
type ('token, 'result) parser = 'token input -> ('result * 'token input) option
let parse parser input =
match parser input with
| Some(res, _) -> Some res
| None -> None
let return x input = Some(x, input)
let (>>=) x f =
fun input ->
match x input with
| Some(result', input') -> f result' input'
| None -> None
let (<|>) x y =
fun input ->
match x input with
| Some _ as ret -> ret
| None -> y input
let rec scan x input =
match x input with
| Some(result', input') -> LazyStream.Cons(result', lazy (scan x input'))
| None -> LazyStream.Nil
let mzero _ = None
let any = function
| LazyStream.Cons(token, input') -> Some(token, Lazy.force input')
| LazyStream.Nil -> None
let satisfy test = any >>= (fun res -> if test res then return res else mzero)
let eof x = function LazyStream.Nil -> Some(x, LazyStream.Nil) | _ -> None
let (=>) x f = x >>= fun r -> return (f r)
let (>>) x y = x >>= fun _ -> y
let (<<) x y = x >>= fun r -> y >>= fun _ -> return r
let (<~>) x xs = x >>= fun r -> xs >>= fun rs -> return (r :: rs)
let rec choice = function [] -> mzero | h :: t -> (h <|> choice t)
let rec count n x = if n > 0 then x <~> count (n - 1) x else return []
let between op ed x = op >> x << ed
let option default x = x <|> return default
let optional x = option () (x >> return ())
let rec skip_many x = option () (x >>= fun _ -> skip_many x)
let skip_many1 x = x >> skip_many x
let rec many x = option [] (x >>= fun r -> many x >>= fun rs -> return (r :: rs))
let many1 x = x <~> many x
let sep_by1 x sep = x <~> many (sep >> x)
let sep_by x sep = sep_by1 x sep <|> return []
let end_by1 x sep = sep_by1 x sep << sep
let end_by x sep = end_by1 x sep <|> return []
let chainl1 x op =
let rec loop a = (op >>= fun f -> x >>= fun b -> loop (f a b)) <|> return a in
x >>= loop
let chainl x op default = chainl1 x op <|> return default
let rec chainr1 x op =
x >>= fun a -> (op >>= fun f -> chainr1 x op >>= f a) <|> return a
let chainr x op default = chainr1 x op <|> return default
let exactly x = satisfy ((=) x)
let one_of l = satisfy (fun x -> List.mem x l)
let none_of l = satisfy (fun x -> not (List.mem l x))
let range l r = satisfy (fun x -> l <= x && x <= r)
let space = one_of [' '; '\t'; '\r'; '\n']
let spaces = skip_many space
let newline = exactly '\n'
let tab = exactly '\t'
let upper = range 'A' 'Z'
let lower = range 'a' 'z'
let digit = range '0' '9'
let letter = lower <|> upper
let alpha_num = letter <|> digit
let hex_digit = range 'a' 'f' <|> range 'A' 'F'
let oct_digit = range '0' '7'
let lexeme x = spaces >> x
let token s =
let rec loop s i =
if i >= String.length s
then return s
else exactly s.[i] >> loop s (i + 1)
in
lexeme (loop s 0)
type num = Ratio of int * int
let rec num_of_string s =
if String.contains s '/' then
let len = String.length s in
let delim = String.index s '/' in
let numer = String.sub s 0 delim
and denom = String.sub s (delim + 1) (len - delim - 1) in
Ratio (int_of_string numer, int_of_string denom) |> simplify
else
Ratio ((int_of_string s), 1) |> simplify
and sign x =
if x < 0 then
-1
else if x = 0 then
0
else
1
and string_of_num (Ratio (numer, denom)) =
if denom = 1 then
string_of_int numer
else
Format.sprintf "%s/%s" (string_of_int numer) (string_of_int denom)
and simplify (Ratio (numer, denom)) =
if numer = 0 || denom = 0 then
Ratio (0, 1)
else
let sign = (sign numer) * (sign denom) in
let numer = abs numer in
let denom = abs denom in
let divisor = gcd numer denom in
Ratio (sign * numer / divisor, denom / divisor)
and gcd a b =
if a = 0 then b
else if b = 0 then a
else if a > b then gcd b (a mod b)
else gcd a (b mod a)
a c ad + bc
- + - = -------
b d b*d
a c ad + bc
- + - = -------
b d b*d
*)
let ( +/ ) (Ratio (a, b)) (Ratio (c, d)) =
Ratio (a * d + b * c, b * d) |> simplify
a c ad - bc
- - - = -------
b d b*d
a c ad - bc
- - - = -------
b d b*d
*)
let ( -/ ) (Ratio (a, b)) (Ratio (c, d)) =
Ratio (a * d - b * c, b * d) |> simplify
let ( */ ) (Ratio (a, b)) (Ratio (c, d)) =
Ratio (a * c, b * d) |> simplify
let ( // ) (Ratio (a, b)) (Ratio (c, d)) =
Ratio (a * d, b * c) |> simplify
let minus_num (Ratio (a, b)) = Ratio (-a, b)
let is_integer_num (Ratio (a, b)) = b = 1
let sign_num (Ratio (a, b)) = sign a
let int_of_num (Ratio(a, b)) = a / b
type exp = AddExp of exp * exp
| SubExp of exp * exp
| MulExp of exp * exp
| DivExp of exp * exp
| PosExp of exp
| NegExp of exp
| Number of num
| Call of call
and func = exp array
and call = string * exp list
and assign = (string * exp) list
and stmt = Decl of string * func
| Assign of assign
| Loop of exp * assign
| Ask of call list
type program = stmt list
type value = NumVal of num
| FuncVal of num * num list
exception Syntax_error
exception Runtime_error
let kwd s =
let rec loop s i =
if i >= String.length s
then return s
else satisfy (fun c -> Char.lowercase c = s.[i]) >> loop s (i + 1)
in
lexeme (loop s 0)
let comma_list x = sep_by1 x (token ",")
let parens = between (token "(") (token ")")
let bracks = between (token "[") (token "]")
let braces = between (token "{") (token "}")
let reserved = ["function"; "is"; "of"; "assign"; "and"; "to"; "do"; "what"]
let ident = (spaces >> letter <~> many alpha_num) =>
implode % String.lowercase >>= function
| s when List.mem s reserved -> mzero
| s -> return s
let digits = spaces >> many1 digit => implode
let integer = digits => int_of_string
let number = digits => num_of_string
let add = token "+" >> return (fun x y -> AddExp(x, y))
let sub = token "-" >> return (fun x y -> SubExp(x, y))
let mul = token "*" >> return (fun x y -> MulExp(x, y))
let div = token "/" >> return (fun x y -> DivExp(x, y))
let pos = token "+" >> return (fun x -> PosExp(x))
let neg = token "-" >> return (fun x -> NegExp(x))
let rec expr input = (chainl1 term (add <|> sub)) input
and term input = (chainl1 value (mul <|> div)) input
and unary input = ((pos <|> neg) >>= fun op -> num_val => fun x -> op x) input
and value input = (unary <|> call_val <|> num_val <|> parens expr) input
and call_val input = (call => fun c -> Call c) input
and num_val input = (number => fun x -> Number x) input
and args input = (many (bracks expr)) input
and call input = (ident >>= fun fn -> args => fun args -> (fn, args)) input
let func_1 = expr => fun x -> [|x|]
let func_n =
kwd "function" >>
kwd "of" >>
integer >>= fun argc ->
token ":" >>
comma_list expr >>= fun argv ->
let args = Array.of_list argv in
if argc + 1 <> Array.length args then mzero else return args
let func = func_1 <|> func_n
let decl =
ident >>= fun name ->
kwd "is" >>
func >>= fun func ->
token "." >>
return (Decl (name, func))
let pair = expr >>= fun rhs -> kwd "to" >> ident => fun lhs -> (lhs, rhs)
let assign_impl = kwd "assign" >> sep_by1 pair (kwd "and") << token "!"
let assign = assign_impl => fun x -> Assign(x)
let loop =
kwd "do" >>
braces expr >>= fun times ->
assign_impl >>= fun body ->
return (Loop(times, body))
let queries = sep_by1 call (kwd "and")
let ask = kwd "what" >> kwd "is" >> queries << token "?" => fun q -> Ask(q)
let program = many (decl <|> assign <|> loop <|> ask)
let parser = parse program
let rec evlis env l =
List.iter (function
| Decl (name, func) -> eval_decl env name func
| Assign pairs -> eval_assign env pairs
| Loop (times, body) -> eval_loop env times body
| Ask queries -> eval_ask env queries
) l
and eval_decl env name func =
let value = eval_func env func in
Hashtbl.replace env name value
and eval_assign env pairs =
List.iter (function (name, exp) ->
let value = eval_exp env exp in
Hashtbl.replace env name value
) pairs
and eval_loop env times body =
match eval_exp env times with
| NumVal n when is_integer_num n ->
let times' = int_of_num n in
for i = 1 to times' do
eval_assign env body
done
| _ -> raise Runtime_error
and eval_ask env queries =
List.iter (function query ->
let value = eval_call env query in
value |> string_of_value |> print_endline
) queries
and string_of_value v =
match v with
| NumVal n -> string_of_num n
| FuncVal (k0, ki) ->
(ki @ [k0]) |> List.map string_of_num |> String.concat ", "
and eval_func env f =
match f with
| [|k0|] -> eval_exp env k0
| _ ->
let f' = Array.map (eval_num env) f in
let k0 = f'.(Array.length f' - 1) in
let ki = Array.sub f' 0 (Array.length f' - 1) |> Array.to_list in
FuncVal (k0, ki)
and eval_num env exp =
match eval_exp env exp with
| NumVal n -> n
| _ -> raise Runtime_error
and binary_op f l r =
match (l, r) with
| (NumVal l, NumVal r) -> NumVal (f l r)
| _ -> raise Runtime_error
and unary_op f e =
match e with
| NumVal e -> NumVal (f e)
| _ -> raise Runtime_error
and ( +++ ) l r = binary_op ( +/ ) l r
and ( --- ) l r = binary_op ( -/ ) l r
and ( *** ) l r = binary_op ( */ ) l r
and ( /// ) l r = binary_op ( // ) l r
and eval_exp env exp =
match exp with
| AddExp (l, r) -> (eval_exp env l) +++ (eval_exp env r)
| SubExp (l, r) -> (eval_exp env l) --- (eval_exp env r)
| MulExp (l, r) -> (eval_exp env l) *** (eval_exp env r)
| DivExp (l, r) -> (eval_exp env l) /// (eval_exp env r)
| PosExp e -> unary_op (function x -> x) (eval_exp env e)
| NegExp e -> unary_op minus_num (eval_exp env e)
| Number n -> NumVal n
| Call c -> eval_call env c
and eval_call env (name, args) =
let value = Hashtbl.find env name in
match value with
| NumVal n when args = [] -> value
| NumVal _ -> raise Runtime_error
| FuncVal (k0, ki) ->
let args' = List.map (eval_num env) args in
let f' = List.fold_left apply (k0, ki) args' in
match f' with
| (k0, []) -> NumVal k0
| (k0, ki) -> FuncVal (k0, ki)
and apply (k0, ki) x =
match ki with
| k :: rest -> (k0 +/ x */ k, rest)
| _ -> raise Runtime_error
and num_of_value v =
match v with
| NumVal n -> n
| _ -> raise Runtime_error
and make_env () = Hashtbl.create 10
let rec run src =
match parser src with
| None -> raise Syntax_error
| Some ast ->
let env = make_env () in
evlis env ast
and run_of_channel channel =
channel |> LazyStream.of_channel |> run
and run_of_string str =
str |> LazyStream.of_string |> run
let () = run_of_channel stdin
|
d9e6e0c60d8f6372b2a8fe80660faa4d4671427988dd7960315eeb141db73386 | mzp/min-caml | simm.ml | let f x =
x
| null | https://raw.githubusercontent.com/mzp/min-caml/cf1180792a2a0bc895ba17a0052e3b6f4a74e444/arch/llvm/simm.ml | ocaml | let f x =
x
| |
5459c4bf36b26595b0039756d367778101112143168029aaae84daa3506149a5 | mbutterick/sugar | include.rkt | #lang racket/base
(require (for-syntax racket/base
syntax/path-spec
racket/private/increader
compiler/cm-accomplice
racket/match racket/function)
"../define.rkt")
(provide+safe include-without-lang-line)
(define-syntax (do-include stx)
(syntax-case stx ()
[(_ orig-stx ctx loc fn reader)
Parse the file name
(let ([orig-c-file (resolve-path-spec (syntax fn) (syntax loc) (syntax orig-stx))]
[ctx (syntax ctx)]
[loc (syntax loc)]
[reader (syntax reader)]
[orig-stx (syntax orig-stx)]
[rkt->ss (lambda (p)
(let ([b (path->bytes p)])
(if (regexp-match? #rx#"[.]rkt$" b)
(path-replace-suffix p #".ss")
p)))])
(let ([c-file (if (file-exists? orig-c-file)
orig-c-file
(let ([p2 (rkt->ss orig-c-file)])
(if (file-exists? p2)
p2
orig-c-file)))])
(register-external-file c-file)
(let ([read-syntax (if (syntax-e reader)
(reader-val
(let loop ([e (syntax->datum
(local-expand reader 'expression null))])
(cond
[(reader? e) e]
[(pair? e) (or (loop (car e))
(loop (cdr e)))]
[else #f])))
(lambda (src in)
(parameterize ([read-accept-reader #t])
(read-syntax src in))))])
(unless (and (procedure? read-syntax)
(procedure-arity-includes? read-syntax 2))
(raise-syntax-error
#f
"reader is not a procedure of two arguments"
orig-stx))
;; Open the included file
(let ([p (with-handlers ([exn:fail?
(lambda (exn)
(raise-syntax-error
#f
(format
"can't open include file (~a)"
(if (exn? exn)
(exn-message exn)
exn))
orig-stx
c-file))])
(open-input-file c-file))])
(port-count-lines! p)
;; Read expressions from file
(let ([content
(let loop ()
(let ([r (with-handlers ([exn:fail?
(lambda (exn)
(close-input-port p)
(raise-syntax-error
#f
(format
"read error (~a)"
(if (exn? exn)
(exn-message exn)
exn))
orig-stx))])
(read-syntax c-file p))])
(if (eof-object? r)
null
(cons r (loop)))))])
;; Here's where we'll separate the content of the file from the #lang line.
;; the resulting material will be stored in 'content-guts'.
;; 'content' is a list of syntax objects from the source file.
;; Each object corresponds to a top-level expression in the file, converted to syntax.
If the file has a # lang line , there 's only one expression ( because the # lang expands to a single ` module ` form ) .
;; If it doesn't, then there are an indefinite number of expressions.
;; So we'll handle both types with a match.
(define content-guts
(cond
[(not (null? content))
save the first syntax object ( its context will be needed momentarily )
;; peel the wrapper off the file. it will come in like so
( module foo whatever / lang ( # % module - begin expr ... ) )
the guts are the ( expr ... ) . To get them , we want the cdr of the fourth element .
we do n't have ` fourth ` in the syntax environment .
;; get the guts and package them back into a syntax object using the saved content-syntax as context.
(define guts-data (match (map syntax->datum content)
[(list (list 'module modname lang (list '#%module-begin exprs ...))) exprs]
[(list exprs ...) exprs]))
(map (curry datum->syntax content-syntax) guts-data)]
[else null]))
(close-input-port p)
;; Preserve src info for content, but set its
;; lexical context to be that of the include expression
(let ([lexed-content
(let loop ([content content-guts]) ;; start with the new content-guts
(cond
[(pair? content)
(cons (loop (car content))
(loop (cdr content)))]
[(null? content) null]
[else
(let ([v (syntax-e content)])
(datum->syntax
ctx
(cond
[(pair? v)
(loop v)]
[(vector? v)
(list->vector (loop (vector->list v)))]
[(box? v)
(box (loop (unbox v)))]
[else
v])
content
content))]))])
*
(datum->syntax
(quote-syntax here)
`(begin ,@lexed-content)
orig-stx)))))))]))
(define-syntax (include-without-lang-line stx)
(syntax-case stx ()
[(_ fn)
(with-syntax ([_stx stx])
(syntax/loc stx (do-include _stx _stx _stx fn #f)))]))
| null | https://raw.githubusercontent.com/mbutterick/sugar/990b0b589274a36a58e27197e771500c5898b5a2/sugar/unstable/include.rkt | racket | Open the included file
Read expressions from file
Here's where we'll separate the content of the file from the #lang line.
the resulting material will be stored in 'content-guts'.
'content' is a list of syntax objects from the source file.
Each object corresponds to a top-level expression in the file, converted to syntax.
If it doesn't, then there are an indefinite number of expressions.
So we'll handle both types with a match.
peel the wrapper off the file. it will come in like so
get the guts and package them back into a syntax object using the saved content-syntax as context.
Preserve src info for content, but set its
lexical context to be that of the include expression
start with the new content-guts | #lang racket/base
(require (for-syntax racket/base
syntax/path-spec
racket/private/increader
compiler/cm-accomplice
racket/match racket/function)
"../define.rkt")
(provide+safe include-without-lang-line)
(define-syntax (do-include stx)
(syntax-case stx ()
[(_ orig-stx ctx loc fn reader)
Parse the file name
(let ([orig-c-file (resolve-path-spec (syntax fn) (syntax loc) (syntax orig-stx))]
[ctx (syntax ctx)]
[loc (syntax loc)]
[reader (syntax reader)]
[orig-stx (syntax orig-stx)]
[rkt->ss (lambda (p)
(let ([b (path->bytes p)])
(if (regexp-match? #rx#"[.]rkt$" b)
(path-replace-suffix p #".ss")
p)))])
(let ([c-file (if (file-exists? orig-c-file)
orig-c-file
(let ([p2 (rkt->ss orig-c-file)])
(if (file-exists? p2)
p2
orig-c-file)))])
(register-external-file c-file)
(let ([read-syntax (if (syntax-e reader)
(reader-val
(let loop ([e (syntax->datum
(local-expand reader 'expression null))])
(cond
[(reader? e) e]
[(pair? e) (or (loop (car e))
(loop (cdr e)))]
[else #f])))
(lambda (src in)
(parameterize ([read-accept-reader #t])
(read-syntax src in))))])
(unless (and (procedure? read-syntax)
(procedure-arity-includes? read-syntax 2))
(raise-syntax-error
#f
"reader is not a procedure of two arguments"
orig-stx))
(let ([p (with-handlers ([exn:fail?
(lambda (exn)
(raise-syntax-error
#f
(format
"can't open include file (~a)"
(if (exn? exn)
(exn-message exn)
exn))
orig-stx
c-file))])
(open-input-file c-file))])
(port-count-lines! p)
(let ([content
(let loop ()
(let ([r (with-handlers ([exn:fail?
(lambda (exn)
(close-input-port p)
(raise-syntax-error
#f
(format
"read error (~a)"
(if (exn? exn)
(exn-message exn)
exn))
orig-stx))])
(read-syntax c-file p))])
(if (eof-object? r)
null
(cons r (loop)))))])
If the file has a # lang line , there 's only one expression ( because the # lang expands to a single ` module ` form ) .
(define content-guts
(cond
[(not (null? content))
save the first syntax object ( its context will be needed momentarily )
( module foo whatever / lang ( # % module - begin expr ... ) )
the guts are the ( expr ... ) . To get them , we want the cdr of the fourth element .
we do n't have ` fourth ` in the syntax environment .
(define guts-data (match (map syntax->datum content)
[(list (list 'module modname lang (list '#%module-begin exprs ...))) exprs]
[(list exprs ...) exprs]))
(map (curry datum->syntax content-syntax) guts-data)]
[else null]))
(close-input-port p)
(let ([lexed-content
(cond
[(pair? content)
(cons (loop (car content))
(loop (cdr content)))]
[(null? content) null]
[else
(let ([v (syntax-e content)])
(datum->syntax
ctx
(cond
[(pair? v)
(loop v)]
[(vector? v)
(list->vector (loop (vector->list v)))]
[(box? v)
(box (loop (unbox v)))]
[else
v])
content
content))]))])
*
(datum->syntax
(quote-syntax here)
`(begin ,@lexed-content)
orig-stx)))))))]))
(define-syntax (include-without-lang-line stx)
(syntax-case stx ()
[(_ fn)
(with-syntax ([_stx stx])
(syntax/loc stx (do-include _stx _stx _stx fn #f)))]))
|
8b4255855353f979596ee712c230fcaa9d65f90035727bdd09789ec54cf9a336 | dmitryvk/cl-gtk2 | gtk.tree-model.lisp | (in-package :gtk)
(define-vtable ("GtkTreeModel" tree-model)
(:skip parent-instance g-type-interface)
;;some signals
(:skip tree-model-row-changed :pointer)
(:skip tree-model-row-inserted :pointer)
(:skip tree-model-row-has-child-toggled :pointer)
(:skip tree-model-row-deleted :pointer)
(:skip tree-model-rows-reordered :pointer)
;;methods
(get-flags (tree-model-flags (tree-model g-object)))
(get-n-columns (:int (tree-model g-object)))
(get-column-type (g-type-designator
(tree-model g-object)
(index :int)))
(get-iter (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(path (g-boxed-foreign tree-path))))
(get-path ((g-boxed-foreign tree-path :return)
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(get-value (:void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(n :int)
(value (:pointer g-value)))
:impl-call
((tree-model iter n)
(multiple-value-bind (v type) (tree-model-get-value-impl tree-model iter n)
(set-g-value value v type))))
(iter-next (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(iter-children (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(parent (g-boxed-foreign tree-iter))))
(iter-has-child (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(iter-n-children (:int
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(iter-nth-child (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(parent (g-boxed-foreign tree-iter))
(n :int)))
(iter-parent (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(child (g-boxed-foreign tree-iter))))
(ref-node (:void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(unref-node (:void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))))
(define-vtable ("GtkTreeSortable" tree-sortable)
(:skip parent-instance g-type-interface)
;; signal
(:skip sort-columns-changed :pointer)
;; methods
(get-sort-column-id
(:boolean (sortable (g-object tree-sortable))
(sort-column-id (:pointer :int))
(order (:pointer sort-type)))
:impl-call ((sortable)
(multiple-value-bind (sorted-p r-sort-column-id r-order) (tree-sortable-get-sort-column-id-impl sortable)
(unless (null-pointer-p sort-column-id)
(setf (mem-ref sort-column-id :int) r-sort-column-id))
(unless (null-pointer-p order)
(setf (mem-ref order 'sort-type) r-order))
sorted-p)))
(set-sort-column-id (:void (sortable (g-object tree-sortable)) (sort-column-id :int) (order sort-type)))
(set-sort-func (:void (sortable (g-object tree-sortable)) (sort-column-id :int) (func :pointer) (data :pointer) (destroy-notify :pointer)))
(set-default-sort-func (:void (sortable (g-object tree-sortable)) (func :pointer) (data :pointer) (destroy-notify :pointer)))
(has-default-sort-func (:boolean (sortable (g-object tree-sortable)))))
(defcfun (tree-model-sort-convert-child-path-to-path "gtk_tree_model_sort_convert_child_path_to_path") (g-boxed-foreign tree-path :return)
(tree-model-sort (g-object tree-model-sort))
(child-path (g-boxed-foreign tree-path)))
(export 'tree-model-sort-convert-child-path-to-path)
(defcfun gtk-tree-model-sort-convert-child-iter-to-iter :boolean
(tree-model-sort (g-object tree-model-sort))
(sort-iter (g-boxed-foreign tree-iter))
(child-iter (g-boxed-foreign tree-iter)))
(defun tree-model-sort-convert-child-iter-to-iter (tree-model-sort child-iter)
(let ((sort-iter (make-tree-iter)))
(when (gtk-tree-model-sort-convert-child-iter-to-iter tree-model-sort sort-iter child-iter)
sort-iter)))
(export 'tree-model-sort-convert-child-iter-to-iter)
(defcfun (tree-model-sort-convert-path-to-child-path "gtk_tree_model_sort_convert_path_to_child_path") (g-boxed-foreign tree-path :return)
(tree-model-sort (g-object tree-model-sort))
(sort-path (g-boxed-foreign tree-path)))
(export 'tree-model-sort-convert-path-to-child-path)
(defcfun gtk-tree-model-sort-convert-iter-to-child-iter :void
(tree-model-sort (g-object tree-model-sort))
(child-iter (g-boxed-foreign tree-iter))
(sorted-iter (g-boxed-foreign tree-iter)))
(defun tree-model-sort-convert-iter-to-child-iter (tree-model-sort sorted-iter)
(let ((child-iter (make-tree-iter)))
(gtk-tree-model-sort-convert-iter-to-child-iter tree-model-sort child-iter sorted-iter)
child-iter))
(export 'tree-model-sort-convert-iter-to-child-iter)
(defcfun (tree-model-sort-reset-default-sort-func "gtk_tree_model_sort_reset_default_sort_func") :void
(tree-model-sort (g-object tree-model-sort)))
(export 'tree-model-sort-reset-default-sort-func)
(defcfun (tree-model-sort-clear-cache "gtk_tree_model_sort_clear_cache") :void
(tree-model-sort (g-object tree-model-sort)))
(export 'tree-model-sort-clear-cached)
(defcfun (tree-model-sort-iter-is-valid "gtk_tree_model_sort_iter_is_valid") :boolean
(tree-model-sort (g-object tree-model-sort))
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-sort-iter-is-valid)
; TODO: GtkTreeModelFilter
(defclass array-list-store (tree-model)
((items :initform (make-array 0 :adjustable t :fill-pointer t) :reader store-items)
(columns-getters :initform (make-array 0 :adjustable t :fill-pointer t) :reader store-getters)
(columns-types :initform (make-array 0 :adjustable t :fill-pointer t) :reader store-types))
(:metaclass gobject-class)
(:g-type-name . "LispArrayListStore"))
(export 'array-list-store)
(register-object-type-implementation "LispArrayListStore" array-list-store "GObject" ("GtkTreeModel") nil)
(defun store-items-count (store)
(length (store-items store)))
(export 'store-items-count)
(defun store-item (store index)
(aref (store-items store) index))
(export 'store-item)
(defun store-add-item (store item)
(vector-push-extend item (store-items store))
(let* ((path (make-instance 'tree-path))
(iter (make-tree-iter)))
(setf (tree-path-indices path) (list (1- (length (store-items store)))))
(setf (tree-iter-stamp iter) 0 (tree-iter-user-data iter) (1- (length (store-items store))))
(emit-signal store "row-inserted" path iter)))
(export 'store-add-item)
(defun store-remove-item (store item &key (test 'eq))
(with-slots (items) store
(let ((index (position item items :test test)))
(unless index (error "No such item~%~A~%in list-store~%~A" item store))
(setf items (delete item items :test test))
(let ((path (make-instance 'tree-path)))
(setf (tree-path-indices path) (list index))
(emit-signal store "row-deleted" path)))))
(export 'store-remove-item)
(defun store-add-column (store type getter)
(vector-push-extend type (store-types store))
(vector-push-extend getter (store-getters store))
(1- (length (store-types store))))
(export 'store-add-column)
(defmethod tree-model-get-flags-impl ((model array-list-store))
'(:list-only))
(defmethod tree-model-get-n-columns-impl ((model array-list-store))
(length (store-types model)))
(defmethod tree-model-get-column-type-impl ((tree-model array-list-store) index)
(aref (store-types tree-model) index))
(defmethod tree-model-get-iter-impl ((model array-list-store) iter path)
(let ((indices (tree-path-indices path)))
(when (and (= 1 (length indices))
(< (first indices) (length (store-items model))))
(setf (tree-iter-stamp iter) 0 (tree-iter-user-data iter) (first indices))
t)))
(defmethod tree-model-ref-node-impl ((model array-list-store) iter)
(declare (ignorable model iter)))
(defmethod tree-model-unref-node-impl ((model array-list-store) iter)
(declare (ignorable model iter)))
(defmethod tree-model-iter-next-impl ((model array-list-store) iter)
(let ((n (tree-iter-user-data iter)))
(when (< n (1- (length (store-items model))))
(setf (tree-iter-user-data iter) (1+ n))
t)))
(defmethod tree-model-iter-nth-child-impl ((model array-list-store) iter parent n)
(declare (ignorable parent))
(setf (tree-iter-stamp iter) 0
(tree-iter-user-data iter) n)
t)
(defmethod tree-model-iter-children-impl ((model array-list-store) iter parent)
(declare (ignore iter parent))
nil)
(defmethod tree-model-iter-n-children-impl ((model array-list-store) iter)
(if (null iter)
(length (store-items model))
0))
(defmethod tree-model-get-path-impl ((model array-list-store) iter)
(let ((path (make-instance 'tree-path)))
(setf (tree-path-indices path) (list (tree-iter-user-data iter)))
path))
(defmethod tree-model-iter-has-child-impl ((model array-list-store) iter)
(declare (ignorable iter))
nil)
(defgeneric tree-model-item (model iter-or-path))
(defmethod tree-model-item ((model array-list-store) (iter tree-iter))
(let ((n-row (tree-iter-user-data iter)))
(aref (store-items model) n-row)))
(defmethod tree-model-item ((model array-list-store) (path tree-path))
(let ((n-row (first (tree-path-indices path))))
(aref (store-items model) n-row)))
(export 'tree-model-item)
(defmethod tree-model-get-value-impl ((model array-list-store) iter n)
(let ((n-row (tree-iter-user-data iter)))
(values (funcall (aref (store-getters model) n)
(aref (store-items model) n-row))
(aref (store-types model) n))))
(defcfun (tree-model-flags "gtk_tree_model_get_flags") tree-model-flags
(tree-model g-object))
(export 'tree-model-flags)
(defcfun (tree-model-n-columns "gtk_tree_model_get_n_columns") :int
(tree-model g-object))
(export 'tree-model-flags)
(defcfun (tree-model-column-type "gtk_tree_model_get_column_type") g-type-designator
(tree-model g-object)
(index :int))
(export 'tree-model-column-type)
(defcfun (tree-model-set-iter-to-path "gtk_tree_model_get_iter") :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(path (g-boxed-foreign tree-path)))
(defun tree-model-iter-by-path (tree-model tree-path)
(let ((iter (make-tree-iter)))
(if (tree-model-set-iter-to-path tree-model iter tree-path)
iter
nil)))
(export 'tree-model-iter-by-path)
(defcfun (tree-model-set-iter-from-string "gtk_tree_model_get_iter_from_string") :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(path-string :string))
(defun tree-model-iter-from-string (tree-model path-string)
(let ((iter (make-tree-iter)))
(if (tree-model-set-iter-from-string tree-model iter path-string)
iter
nil)))
(export 'tree-model-iter-from-string)
(defcfun (tree-model-set-iter-to-first "gtk_tree_model_get_iter_first") :boolean
(model g-object)
(iter (g-boxed-foreign tree-iter)))
(defun tree-model-iter-first (tree-model)
(let ((iter (make-tree-iter)))
(if (tree-model-set-iter-to-first tree-model iter)
iter
nil)))
(export 'tree-model-iter-first)
(defcfun (tree-model-path "gtk_tree_model_get_path") (g-boxed-foreign tree-path :return)
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-path)
(defcfun gtk-tree-model-get-value :void
(model g-object)
(iter (g-boxed-foreign tree-iter))
(column :int)
(value (:pointer g-value)))
(defun tree-model-value (tree-model iter column)
(with-foreign-object (v 'g-value)
(g-value-zero v)
(gtk-tree-model-get-value tree-model iter column v)
(prog1 (parse-g-value v)
(g-value-unset v))))
(export 'tree-model-value)
(defcfun (tree-model-iter-next "gtk_tree_model_iter_next") :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-iter-next)
(defcfun gtk-tree-model-iter-children :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(parent (g-boxed-foreign tree-iter)))
(defun tree-model-iter-first-child (tree-model parent)
(let ((iter (make-tree-iter)))
(if (gtk-tree-model-iter-children tree-model iter parent)
iter
nil)))
(export 'tree-model-iter-first-child)
(defcfun (tree-model-iter-has-child "gtk_tree_model_iter_has_child") :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-iter-has-child)
(defcfun (tree-model-iter-n-children "gtk_tree_model_iter_n_children") :int
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-iter-n-children)
(defcfun gtk-tree-model-iter-nth-child :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(parent (g-boxed-foreign tree-iter))
(n :int))
(defun tree-model-iter-nth-child (tree-model parent n)
(let ((iter (make-tree-iter)))
(if (gtk-tree-model-iter-nth-child tree-model iter parent n)
iter
n)))
(export 'tree-model-iter-nth-child)
(defcfun gtk-tree-model-iter-parent :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(child (g-boxed-foreign tree-iter)))
(defun tree-model-iter-parent (tree-model iter)
(let ((parent (make-tree-iter)))
(if (gtk-tree-model-iter-parent tree-model parent iter)
parent
nil)))
(export 'tree-model-iter-parent)
(defcfun (tree-model-iter-to-string "gtk_tree_model_get_string_from_iter") (g-string :free-from-foreign t)
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-iter-to-string)
(defcfun (tree-model-ref-node "gtk_tree_model_ref_node") :void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-ref-node)
(defcfun (tree-model-unref-node "gtk_tree_model_unref_node") :void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-unref-node)
(defcallback gtk-tree-model-foreach-cb :boolean ((model g-object) (path (g-boxed-foreign tree-path)) (iter (g-boxed-foreign tree-iter)) (data :pointer))
(let ((fn (get-stable-pointer-value data)))
(restart-case
(funcall fn model path iter)
(stop-tree-model-iteration () t)
(skip-tree-model-current () nil))))
(defcfun gtk-tree-model-foreach :void
(model g-object)
(func :pointer)
(data :pointer))
(defun do-tree-model (model fn)
(with-stable-pointer (ptr fn)
(gtk-tree-model-foreach model (callback gtk-tree-model-foreach-cb) ptr)))
(export 'do-tree-model)
(defun array-insert-at (array element index)
(assert (adjustable-array-p array))
(adjust-array array (1+ (length array)) :fill-pointer t)
(iter (for i from (1- (length array)) above index)
(setf (aref array i)
(aref array (1- i))))
(setf (aref array index) element)
array)
(defun array-remove-at (array index)
(assert (adjustable-array-p array))
(iter (for i from index below (1- (length array)))
(setf (aref array i)
(aref array (1+ i))))
(adjust-array array (1- (length array)) :fill-pointer t)
array)
(defstruct tree-node
(tree nil)
(parent nil)
(id nil)
(item nil)
(children (make-array 0 :element-type 'tree-node :adjustable t :fill-pointer t)))
(defclass tree-lisp-store (tree-model)
((columns-getters :initform (make-array 0 :adjustable t :fill-pointer t) :reader tree-lisp-store-getters)
(columns-types :initform (make-array 0 :adjustable t :fill-pointer t) :reader tree-lisp-store-types)
(root :initform (make-tree-node) :reader tree-lisp-store-root)
(id-map :initform (make-hash-table) :reader tree-lisp-store-id-map)
(next-id-value :initform 0 :accessor tree-lisp-store-next-id-value))
(:metaclass gobject-class)
(:g-type-name . "LispTreeStore"))
(defmethod initialize-instance :after ((object tree-lisp-store) &key &allow-other-keys)
(setf (tree-node-tree (tree-lisp-store-root object)) object))
(register-object-type-implementation "LispTreeStore" tree-lisp-store "GObject" ("GtkTreeModel") nil)
(defun map-subtree (node fn)
(funcall fn node)
(iter (for child in-vector (tree-node-children node))
(map-subtree child fn)))
(defun clear-id (node)
(map-subtree node
(lambda (n)
(when (and (tree-node-id n)
(tree-node-tree n))
(remhash (tree-node-id n)
(tree-lisp-store-id-map (tree-node-tree n))))
(setf (tree-node-id n) nil))))
(defun set-node-tree (node tree)
(map-subtree node
(lambda (n)
(setf (tree-node-tree n) tree))))
(defun tree-node-insert-at (node child index)
(assert (null (tree-node-parent child)))
(clear-id child)
(setf (tree-node-parent child) node)
(set-node-tree child (tree-node-tree node))
(array-insert-at (tree-node-children node) child index)
(notice-tree-node-insertion (tree-node-tree node) node child index)
node)
(defun tree-node-child-at (node index)
(aref (tree-node-children node) index))
(defun tree-node-remove-at (node index)
(assert (<= 0 index (1- (length (tree-node-children node)))))
(let ((child (tree-node-child-at node index)))
(clear-id child)
(setf (tree-node-parent child) nil)
(set-node-tree child nil)
(array-remove-at (tree-node-children node) index)
(notice-tree-node-removal (tree-node-tree node) node child index)))
(defun tree-lisp-store-add-column (store column-type column-getter)
(vector-push-extend column-getter (tree-lisp-store-getters store))
(vector-push-extend column-type (tree-lisp-store-types store)))
(defmethod tree-model-get-flags-impl ((store tree-lisp-store))
nil)
(defmethod tree-model-get-n-columns-impl ((store tree-lisp-store))
(length (tree-lisp-store-getters store)))
(defmethod tree-model-get-column-type-impl ((store tree-lisp-store) index)
(aref (tree-lisp-store-types store) index))
(defun get-node-by-indices (root indices)
(if indices
(get-node-by-indices (tree-node-child-at root (first indices)) (rest indices))
root))
(defun get-node-by-path (tree path)
(let ((indices (tree-path-indices path)))
(get-node-by-indices (tree-lisp-store-root tree) indices)))
(defun get-node-path (node)
(iter (with z = nil)
(for parent = (tree-node-parent node))
(while parent)
(for index = (position node (tree-node-children parent)))
(push index z)
(setf node parent)
(finally (return z))))
(defun tree-lisp-store-get-next-id (tree)
(incf (tree-lisp-store-next-id-value tree)))
(defun tree-lisp-store-add-id-map (tree id node)
(setf (gethash id (tree-lisp-store-id-map tree)) node))
(defun get-assigned-id (tree node)
(or (tree-node-id node)
(let ((id (tree-lisp-store-get-next-id tree)))
(tree-lisp-store-add-id-map tree id node)
(setf (tree-node-id node) id)
id)))
(defun get-node-by-id (tree id)
(gethash id (tree-lisp-store-id-map tree)))
(defmethod tree-model-get-iter-impl ((store tree-lisp-store) iter path)
(ignore-errors
(let* ((node (get-node-by-path store path))
(node-idx (get-assigned-id store node)))
(setf (tree-iter-stamp iter) 0
(tree-iter-user-data iter) node-idx))))
(defun get-node-by-iter (tree iter)
(get-node-by-id tree (tree-iter-user-data iter)))
(defmethod tree-model-get-path-impl ((store tree-lisp-store) iter)
(let* ((path (make-instance 'tree-path))
(node (get-node-by-iter store iter))
(indices (get-node-path node)))
(setf (tree-path-indices path) indices)
path))
(defmethod tree-model-get-value-impl ((store tree-lisp-store) iter n)
(let* ((node (get-node-by-iter store iter))
(getter (aref (tree-lisp-store-getters store) n))
(type (aref (tree-lisp-store-types store) n)))
(values (funcall getter (tree-node-item node))
type)))
(defmethod tree-model-iter-next-impl ((store tree-lisp-store) iter)
(let* ((node (get-node-by-iter store iter))
(parent (tree-node-parent node))
(index (position node (tree-node-children parent))))
(when (< (1+ index) (length (tree-node-children parent)))
(setf (tree-iter-stamp iter)
0
(tree-iter-user-data iter)
(get-assigned-id store (tree-node-child-at parent (1+ index))))
t)))
(defmethod tree-model-iter-children-impl ((store tree-lisp-store) iter parent)
(let* ((node (if parent
(get-node-by-iter store parent)
(tree-lisp-store-root store))))
(when (plusp (length (tree-node-children node)))
(setf (tree-iter-stamp iter)
0
(tree-iter-user-data iter)
(get-assigned-id store (tree-node-child-at node 0)))
t)))
(defmethod tree-model-iter-has-child-impl ((store tree-lisp-store) iter)
(let ((node (get-node-by-iter store iter)))
(plusp (length (tree-node-children node)))))
(defmethod tree-model-iter-n-children-impl ((store tree-lisp-store) iter)
(let* ((node (if iter
(get-node-by-iter store iter)
(tree-lisp-store-root store))))
(length (tree-node-children node))))
(defmethod tree-model-iter-nth-child-impl ((store tree-lisp-store) iter parent n)
(let* ((node (if parent
(get-node-by-iter store parent)
(tree-lisp-store-root store)))
(requested-node (tree-node-child-at node n)))
(setf (tree-iter-stamp iter) 0
(tree-iter-user-data iter) (get-assigned-id store requested-node))
t))
(defmethod tree-model-iter-parent-impl ((store tree-lisp-store) iter child)
(let ((node (get-node-by-iter store child)))
(when (tree-node-parent node)
(setf (tree-iter-stamp iter) 0
(tree-iter-user-data iter) (get-assigned-id store (tree-node-parent node))))))
(defmethod tree-model-ref-node-impl ((store tree-lisp-store) iter)
(declare (ignorable iter)))
(defmethod tree-model-unref-node-impl ((store tree-lisp-store) iter)
(declare (ignorable iter)))
(defun notice-tree-node-insertion (tree node child index)
(declare (ignore node index))
(when tree
(let* ((path (make-instance 'tree-path))
(iter (make-tree-iter)))
(setf (tree-path-indices path) (get-node-path child)
(tree-iter-stamp iter) 0
(tree-iter-user-data iter) (get-assigned-id tree child))
(emit-signal tree "row-inserted" path iter)
(when (plusp (length (tree-node-children child)))
(emit-signal tree "row-has-child-toggled" path iter)))))
(defun notice-tree-node-removal (tree node child index)
(declare (ignore child))
(when tree
(let ((path (make-instance 'tree-path)))
(setf (tree-path-indices path) (nconc (get-node-path node) (list index)))
(emit-signal tree "row-deleted" path))
(when (zerop (length (tree-node-children node)))
(let* ((path (make-instance 'tree-path))
(iter (make-tree-iter)))
(setf (tree-path-indices path) (get-node-path node)
(tree-iter-stamp iter) 0
(tree-iter-user-data iter) (get-assigned-id tree node))
(emit-signal tree "row-has-child-toggled" path iter)))))
| null | https://raw.githubusercontent.com/dmitryvk/cl-gtk2/a3108fbc701dbab93b899e04b9637ded2f813410/gtk/gtk.tree-model.lisp | lisp | some signals
methods
signal
methods
TODO: GtkTreeModelFilter | (in-package :gtk)
(define-vtable ("GtkTreeModel" tree-model)
(:skip parent-instance g-type-interface)
(:skip tree-model-row-changed :pointer)
(:skip tree-model-row-inserted :pointer)
(:skip tree-model-row-has-child-toggled :pointer)
(:skip tree-model-row-deleted :pointer)
(:skip tree-model-rows-reordered :pointer)
(get-flags (tree-model-flags (tree-model g-object)))
(get-n-columns (:int (tree-model g-object)))
(get-column-type (g-type-designator
(tree-model g-object)
(index :int)))
(get-iter (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(path (g-boxed-foreign tree-path))))
(get-path ((g-boxed-foreign tree-path :return)
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(get-value (:void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(n :int)
(value (:pointer g-value)))
:impl-call
((tree-model iter n)
(multiple-value-bind (v type) (tree-model-get-value-impl tree-model iter n)
(set-g-value value v type))))
(iter-next (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(iter-children (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(parent (g-boxed-foreign tree-iter))))
(iter-has-child (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(iter-n-children (:int
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(iter-nth-child (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(parent (g-boxed-foreign tree-iter))
(n :int)))
(iter-parent (:boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(child (g-boxed-foreign tree-iter))))
(ref-node (:void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))))
(unref-node (:void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))))
(define-vtable ("GtkTreeSortable" tree-sortable)
(:skip parent-instance g-type-interface)
(:skip sort-columns-changed :pointer)
(get-sort-column-id
(:boolean (sortable (g-object tree-sortable))
(sort-column-id (:pointer :int))
(order (:pointer sort-type)))
:impl-call ((sortable)
(multiple-value-bind (sorted-p r-sort-column-id r-order) (tree-sortable-get-sort-column-id-impl sortable)
(unless (null-pointer-p sort-column-id)
(setf (mem-ref sort-column-id :int) r-sort-column-id))
(unless (null-pointer-p order)
(setf (mem-ref order 'sort-type) r-order))
sorted-p)))
(set-sort-column-id (:void (sortable (g-object tree-sortable)) (sort-column-id :int) (order sort-type)))
(set-sort-func (:void (sortable (g-object tree-sortable)) (sort-column-id :int) (func :pointer) (data :pointer) (destroy-notify :pointer)))
(set-default-sort-func (:void (sortable (g-object tree-sortable)) (func :pointer) (data :pointer) (destroy-notify :pointer)))
(has-default-sort-func (:boolean (sortable (g-object tree-sortable)))))
(defcfun (tree-model-sort-convert-child-path-to-path "gtk_tree_model_sort_convert_child_path_to_path") (g-boxed-foreign tree-path :return)
(tree-model-sort (g-object tree-model-sort))
(child-path (g-boxed-foreign tree-path)))
(export 'tree-model-sort-convert-child-path-to-path)
(defcfun gtk-tree-model-sort-convert-child-iter-to-iter :boolean
(tree-model-sort (g-object tree-model-sort))
(sort-iter (g-boxed-foreign tree-iter))
(child-iter (g-boxed-foreign tree-iter)))
(defun tree-model-sort-convert-child-iter-to-iter (tree-model-sort child-iter)
(let ((sort-iter (make-tree-iter)))
(when (gtk-tree-model-sort-convert-child-iter-to-iter tree-model-sort sort-iter child-iter)
sort-iter)))
(export 'tree-model-sort-convert-child-iter-to-iter)
(defcfun (tree-model-sort-convert-path-to-child-path "gtk_tree_model_sort_convert_path_to_child_path") (g-boxed-foreign tree-path :return)
(tree-model-sort (g-object tree-model-sort))
(sort-path (g-boxed-foreign tree-path)))
(export 'tree-model-sort-convert-path-to-child-path)
(defcfun gtk-tree-model-sort-convert-iter-to-child-iter :void
(tree-model-sort (g-object tree-model-sort))
(child-iter (g-boxed-foreign tree-iter))
(sorted-iter (g-boxed-foreign tree-iter)))
(defun tree-model-sort-convert-iter-to-child-iter (tree-model-sort sorted-iter)
(let ((child-iter (make-tree-iter)))
(gtk-tree-model-sort-convert-iter-to-child-iter tree-model-sort child-iter sorted-iter)
child-iter))
(export 'tree-model-sort-convert-iter-to-child-iter)
(defcfun (tree-model-sort-reset-default-sort-func "gtk_tree_model_sort_reset_default_sort_func") :void
(tree-model-sort (g-object tree-model-sort)))
(export 'tree-model-sort-reset-default-sort-func)
(defcfun (tree-model-sort-clear-cache "gtk_tree_model_sort_clear_cache") :void
(tree-model-sort (g-object tree-model-sort)))
(export 'tree-model-sort-clear-cached)
(defcfun (tree-model-sort-iter-is-valid "gtk_tree_model_sort_iter_is_valid") :boolean
(tree-model-sort (g-object tree-model-sort))
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-sort-iter-is-valid)
(defclass array-list-store (tree-model)
((items :initform (make-array 0 :adjustable t :fill-pointer t) :reader store-items)
(columns-getters :initform (make-array 0 :adjustable t :fill-pointer t) :reader store-getters)
(columns-types :initform (make-array 0 :adjustable t :fill-pointer t) :reader store-types))
(:metaclass gobject-class)
(:g-type-name . "LispArrayListStore"))
(export 'array-list-store)
(register-object-type-implementation "LispArrayListStore" array-list-store "GObject" ("GtkTreeModel") nil)
(defun store-items-count (store)
(length (store-items store)))
(export 'store-items-count)
(defun store-item (store index)
(aref (store-items store) index))
(export 'store-item)
(defun store-add-item (store item)
(vector-push-extend item (store-items store))
(let* ((path (make-instance 'tree-path))
(iter (make-tree-iter)))
(setf (tree-path-indices path) (list (1- (length (store-items store)))))
(setf (tree-iter-stamp iter) 0 (tree-iter-user-data iter) (1- (length (store-items store))))
(emit-signal store "row-inserted" path iter)))
(export 'store-add-item)
(defun store-remove-item (store item &key (test 'eq))
(with-slots (items) store
(let ((index (position item items :test test)))
(unless index (error "No such item~%~A~%in list-store~%~A" item store))
(setf items (delete item items :test test))
(let ((path (make-instance 'tree-path)))
(setf (tree-path-indices path) (list index))
(emit-signal store "row-deleted" path)))))
(export 'store-remove-item)
(defun store-add-column (store type getter)
(vector-push-extend type (store-types store))
(vector-push-extend getter (store-getters store))
(1- (length (store-types store))))
(export 'store-add-column)
(defmethod tree-model-get-flags-impl ((model array-list-store))
'(:list-only))
(defmethod tree-model-get-n-columns-impl ((model array-list-store))
(length (store-types model)))
(defmethod tree-model-get-column-type-impl ((tree-model array-list-store) index)
(aref (store-types tree-model) index))
(defmethod tree-model-get-iter-impl ((model array-list-store) iter path)
(let ((indices (tree-path-indices path)))
(when (and (= 1 (length indices))
(< (first indices) (length (store-items model))))
(setf (tree-iter-stamp iter) 0 (tree-iter-user-data iter) (first indices))
t)))
(defmethod tree-model-ref-node-impl ((model array-list-store) iter)
(declare (ignorable model iter)))
(defmethod tree-model-unref-node-impl ((model array-list-store) iter)
(declare (ignorable model iter)))
(defmethod tree-model-iter-next-impl ((model array-list-store) iter)
(let ((n (tree-iter-user-data iter)))
(when (< n (1- (length (store-items model))))
(setf (tree-iter-user-data iter) (1+ n))
t)))
(defmethod tree-model-iter-nth-child-impl ((model array-list-store) iter parent n)
(declare (ignorable parent))
(setf (tree-iter-stamp iter) 0
(tree-iter-user-data iter) n)
t)
(defmethod tree-model-iter-children-impl ((model array-list-store) iter parent)
(declare (ignore iter parent))
nil)
(defmethod tree-model-iter-n-children-impl ((model array-list-store) iter)
(if (null iter)
(length (store-items model))
0))
(defmethod tree-model-get-path-impl ((model array-list-store) iter)
(let ((path (make-instance 'tree-path)))
(setf (tree-path-indices path) (list (tree-iter-user-data iter)))
path))
(defmethod tree-model-iter-has-child-impl ((model array-list-store) iter)
(declare (ignorable iter))
nil)
(defgeneric tree-model-item (model iter-or-path))
(defmethod tree-model-item ((model array-list-store) (iter tree-iter))
(let ((n-row (tree-iter-user-data iter)))
(aref (store-items model) n-row)))
(defmethod tree-model-item ((model array-list-store) (path tree-path))
(let ((n-row (first (tree-path-indices path))))
(aref (store-items model) n-row)))
(export 'tree-model-item)
(defmethod tree-model-get-value-impl ((model array-list-store) iter n)
(let ((n-row (tree-iter-user-data iter)))
(values (funcall (aref (store-getters model) n)
(aref (store-items model) n-row))
(aref (store-types model) n))))
(defcfun (tree-model-flags "gtk_tree_model_get_flags") tree-model-flags
(tree-model g-object))
(export 'tree-model-flags)
(defcfun (tree-model-n-columns "gtk_tree_model_get_n_columns") :int
(tree-model g-object))
(export 'tree-model-flags)
(defcfun (tree-model-column-type "gtk_tree_model_get_column_type") g-type-designator
(tree-model g-object)
(index :int))
(export 'tree-model-column-type)
(defcfun (tree-model-set-iter-to-path "gtk_tree_model_get_iter") :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(path (g-boxed-foreign tree-path)))
(defun tree-model-iter-by-path (tree-model tree-path)
(let ((iter (make-tree-iter)))
(if (tree-model-set-iter-to-path tree-model iter tree-path)
iter
nil)))
(export 'tree-model-iter-by-path)
(defcfun (tree-model-set-iter-from-string "gtk_tree_model_get_iter_from_string") :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(path-string :string))
(defun tree-model-iter-from-string (tree-model path-string)
(let ((iter (make-tree-iter)))
(if (tree-model-set-iter-from-string tree-model iter path-string)
iter
nil)))
(export 'tree-model-iter-from-string)
(defcfun (tree-model-set-iter-to-first "gtk_tree_model_get_iter_first") :boolean
(model g-object)
(iter (g-boxed-foreign tree-iter)))
(defun tree-model-iter-first (tree-model)
(let ((iter (make-tree-iter)))
(if (tree-model-set-iter-to-first tree-model iter)
iter
nil)))
(export 'tree-model-iter-first)
(defcfun (tree-model-path "gtk_tree_model_get_path") (g-boxed-foreign tree-path :return)
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-path)
(defcfun gtk-tree-model-get-value :void
(model g-object)
(iter (g-boxed-foreign tree-iter))
(column :int)
(value (:pointer g-value)))
(defun tree-model-value (tree-model iter column)
(with-foreign-object (v 'g-value)
(g-value-zero v)
(gtk-tree-model-get-value tree-model iter column v)
(prog1 (parse-g-value v)
(g-value-unset v))))
(export 'tree-model-value)
(defcfun (tree-model-iter-next "gtk_tree_model_iter_next") :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-iter-next)
(defcfun gtk-tree-model-iter-children :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(parent (g-boxed-foreign tree-iter)))
(defun tree-model-iter-first-child (tree-model parent)
(let ((iter (make-tree-iter)))
(if (gtk-tree-model-iter-children tree-model iter parent)
iter
nil)))
(export 'tree-model-iter-first-child)
(defcfun (tree-model-iter-has-child "gtk_tree_model_iter_has_child") :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-iter-has-child)
(defcfun (tree-model-iter-n-children "gtk_tree_model_iter_n_children") :int
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-iter-n-children)
(defcfun gtk-tree-model-iter-nth-child :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(parent (g-boxed-foreign tree-iter))
(n :int))
(defun tree-model-iter-nth-child (tree-model parent n)
(let ((iter (make-tree-iter)))
(if (gtk-tree-model-iter-nth-child tree-model iter parent n)
iter
n)))
(export 'tree-model-iter-nth-child)
(defcfun gtk-tree-model-iter-parent :boolean
(tree-model g-object)
(iter (g-boxed-foreign tree-iter))
(child (g-boxed-foreign tree-iter)))
(defun tree-model-iter-parent (tree-model iter)
(let ((parent (make-tree-iter)))
(if (gtk-tree-model-iter-parent tree-model parent iter)
parent
nil)))
(export 'tree-model-iter-parent)
(defcfun (tree-model-iter-to-string "gtk_tree_model_get_string_from_iter") (g-string :free-from-foreign t)
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-iter-to-string)
(defcfun (tree-model-ref-node "gtk_tree_model_ref_node") :void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-ref-node)
(defcfun (tree-model-unref-node "gtk_tree_model_unref_node") :void
(tree-model g-object)
(iter (g-boxed-foreign tree-iter)))
(export 'tree-model-unref-node)
(defcallback gtk-tree-model-foreach-cb :boolean ((model g-object) (path (g-boxed-foreign tree-path)) (iter (g-boxed-foreign tree-iter)) (data :pointer))
(let ((fn (get-stable-pointer-value data)))
(restart-case
(funcall fn model path iter)
(stop-tree-model-iteration () t)
(skip-tree-model-current () nil))))
(defcfun gtk-tree-model-foreach :void
(model g-object)
(func :pointer)
(data :pointer))
(defun do-tree-model (model fn)
(with-stable-pointer (ptr fn)
(gtk-tree-model-foreach model (callback gtk-tree-model-foreach-cb) ptr)))
(export 'do-tree-model)
(defun array-insert-at (array element index)
(assert (adjustable-array-p array))
(adjust-array array (1+ (length array)) :fill-pointer t)
(iter (for i from (1- (length array)) above index)
(setf (aref array i)
(aref array (1- i))))
(setf (aref array index) element)
array)
(defun array-remove-at (array index)
(assert (adjustable-array-p array))
(iter (for i from index below (1- (length array)))
(setf (aref array i)
(aref array (1+ i))))
(adjust-array array (1- (length array)) :fill-pointer t)
array)
(defstruct tree-node
(tree nil)
(parent nil)
(id nil)
(item nil)
(children (make-array 0 :element-type 'tree-node :adjustable t :fill-pointer t)))
(defclass tree-lisp-store (tree-model)
((columns-getters :initform (make-array 0 :adjustable t :fill-pointer t) :reader tree-lisp-store-getters)
(columns-types :initform (make-array 0 :adjustable t :fill-pointer t) :reader tree-lisp-store-types)
(root :initform (make-tree-node) :reader tree-lisp-store-root)
(id-map :initform (make-hash-table) :reader tree-lisp-store-id-map)
(next-id-value :initform 0 :accessor tree-lisp-store-next-id-value))
(:metaclass gobject-class)
(:g-type-name . "LispTreeStore"))
(defmethod initialize-instance :after ((object tree-lisp-store) &key &allow-other-keys)
(setf (tree-node-tree (tree-lisp-store-root object)) object))
(register-object-type-implementation "LispTreeStore" tree-lisp-store "GObject" ("GtkTreeModel") nil)
(defun map-subtree (node fn)
(funcall fn node)
(iter (for child in-vector (tree-node-children node))
(map-subtree child fn)))
(defun clear-id (node)
(map-subtree node
(lambda (n)
(when (and (tree-node-id n)
(tree-node-tree n))
(remhash (tree-node-id n)
(tree-lisp-store-id-map (tree-node-tree n))))
(setf (tree-node-id n) nil))))
(defun set-node-tree (node tree)
(map-subtree node
(lambda (n)
(setf (tree-node-tree n) tree))))
(defun tree-node-insert-at (node child index)
(assert (null (tree-node-parent child)))
(clear-id child)
(setf (tree-node-parent child) node)
(set-node-tree child (tree-node-tree node))
(array-insert-at (tree-node-children node) child index)
(notice-tree-node-insertion (tree-node-tree node) node child index)
node)
(defun tree-node-child-at (node index)
(aref (tree-node-children node) index))
(defun tree-node-remove-at (node index)
(assert (<= 0 index (1- (length (tree-node-children node)))))
(let ((child (tree-node-child-at node index)))
(clear-id child)
(setf (tree-node-parent child) nil)
(set-node-tree child nil)
(array-remove-at (tree-node-children node) index)
(notice-tree-node-removal (tree-node-tree node) node child index)))
(defun tree-lisp-store-add-column (store column-type column-getter)
(vector-push-extend column-getter (tree-lisp-store-getters store))
(vector-push-extend column-type (tree-lisp-store-types store)))
(defmethod tree-model-get-flags-impl ((store tree-lisp-store))
nil)
(defmethod tree-model-get-n-columns-impl ((store tree-lisp-store))
(length (tree-lisp-store-getters store)))
(defmethod tree-model-get-column-type-impl ((store tree-lisp-store) index)
(aref (tree-lisp-store-types store) index))
(defun get-node-by-indices (root indices)
(if indices
(get-node-by-indices (tree-node-child-at root (first indices)) (rest indices))
root))
(defun get-node-by-path (tree path)
(let ((indices (tree-path-indices path)))
(get-node-by-indices (tree-lisp-store-root tree) indices)))
(defun get-node-path (node)
(iter (with z = nil)
(for parent = (tree-node-parent node))
(while parent)
(for index = (position node (tree-node-children parent)))
(push index z)
(setf node parent)
(finally (return z))))
(defun tree-lisp-store-get-next-id (tree)
(incf (tree-lisp-store-next-id-value tree)))
(defun tree-lisp-store-add-id-map (tree id node)
(setf (gethash id (tree-lisp-store-id-map tree)) node))
(defun get-assigned-id (tree node)
(or (tree-node-id node)
(let ((id (tree-lisp-store-get-next-id tree)))
(tree-lisp-store-add-id-map tree id node)
(setf (tree-node-id node) id)
id)))
(defun get-node-by-id (tree id)
(gethash id (tree-lisp-store-id-map tree)))
(defmethod tree-model-get-iter-impl ((store tree-lisp-store) iter path)
(ignore-errors
(let* ((node (get-node-by-path store path))
(node-idx (get-assigned-id store node)))
(setf (tree-iter-stamp iter) 0
(tree-iter-user-data iter) node-idx))))
(defun get-node-by-iter (tree iter)
(get-node-by-id tree (tree-iter-user-data iter)))
(defmethod tree-model-get-path-impl ((store tree-lisp-store) iter)
(let* ((path (make-instance 'tree-path))
(node (get-node-by-iter store iter))
(indices (get-node-path node)))
(setf (tree-path-indices path) indices)
path))
(defmethod tree-model-get-value-impl ((store tree-lisp-store) iter n)
(let* ((node (get-node-by-iter store iter))
(getter (aref (tree-lisp-store-getters store) n))
(type (aref (tree-lisp-store-types store) n)))
(values (funcall getter (tree-node-item node))
type)))
(defmethod tree-model-iter-next-impl ((store tree-lisp-store) iter)
(let* ((node (get-node-by-iter store iter))
(parent (tree-node-parent node))
(index (position node (tree-node-children parent))))
(when (< (1+ index) (length (tree-node-children parent)))
(setf (tree-iter-stamp iter)
0
(tree-iter-user-data iter)
(get-assigned-id store (tree-node-child-at parent (1+ index))))
t)))
(defmethod tree-model-iter-children-impl ((store tree-lisp-store) iter parent)
(let* ((node (if parent
(get-node-by-iter store parent)
(tree-lisp-store-root store))))
(when (plusp (length (tree-node-children node)))
(setf (tree-iter-stamp iter)
0
(tree-iter-user-data iter)
(get-assigned-id store (tree-node-child-at node 0)))
t)))
(defmethod tree-model-iter-has-child-impl ((store tree-lisp-store) iter)
(let ((node (get-node-by-iter store iter)))
(plusp (length (tree-node-children node)))))
(defmethod tree-model-iter-n-children-impl ((store tree-lisp-store) iter)
(let* ((node (if iter
(get-node-by-iter store iter)
(tree-lisp-store-root store))))
(length (tree-node-children node))))
(defmethod tree-model-iter-nth-child-impl ((store tree-lisp-store) iter parent n)
(let* ((node (if parent
(get-node-by-iter store parent)
(tree-lisp-store-root store)))
(requested-node (tree-node-child-at node n)))
(setf (tree-iter-stamp iter) 0
(tree-iter-user-data iter) (get-assigned-id store requested-node))
t))
(defmethod tree-model-iter-parent-impl ((store tree-lisp-store) iter child)
(let ((node (get-node-by-iter store child)))
(when (tree-node-parent node)
(setf (tree-iter-stamp iter) 0
(tree-iter-user-data iter) (get-assigned-id store (tree-node-parent node))))))
(defmethod tree-model-ref-node-impl ((store tree-lisp-store) iter)
(declare (ignorable iter)))
(defmethod tree-model-unref-node-impl ((store tree-lisp-store) iter)
(declare (ignorable iter)))
(defun notice-tree-node-insertion (tree node child index)
(declare (ignore node index))
(when tree
(let* ((path (make-instance 'tree-path))
(iter (make-tree-iter)))
(setf (tree-path-indices path) (get-node-path child)
(tree-iter-stamp iter) 0
(tree-iter-user-data iter) (get-assigned-id tree child))
(emit-signal tree "row-inserted" path iter)
(when (plusp (length (tree-node-children child)))
(emit-signal tree "row-has-child-toggled" path iter)))))
(defun notice-tree-node-removal (tree node child index)
(declare (ignore child))
(when tree
(let ((path (make-instance 'tree-path)))
(setf (tree-path-indices path) (nconc (get-node-path node) (list index)))
(emit-signal tree "row-deleted" path))
(when (zerop (length (tree-node-children node)))
(let* ((path (make-instance 'tree-path))
(iter (make-tree-iter)))
(setf (tree-path-indices path) (get-node-path node)
(tree-iter-stamp iter) 0
(tree-iter-user-data iter) (get-assigned-id tree node))
(emit-signal tree "row-has-child-toggled" path iter)))))
|
8cf5027201cfe1437781a194262a3a27a5bf9ad7cc60e6884b48f4058f42b657 | ssomayyajula/refinery | propLexer.ml | # 1 "propLexer.mll"
open PropParser
# 6 "propLexer.ml"
let __ocaml_lex_tables = {
Lexing.lex_base =
"\000\000\245\255\246\255\062\000\000\000\000\000\001\000\001\000\
\252\255\253\255\254\255\001\000\251\255\250\255\249\255\000\000\
\000\000\000\000\248\255";
Lexing.lex_backtrk =
"\255\255\255\255\255\255\008\000\009\000\009\000\009\000\009\000\
\255\255\255\255\255\255\000\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255";
Lexing.lex_default =
"\002\000\000\000\000\000\255\255\255\255\255\255\255\255\255\255\
\000\000\000\000\000\000\255\255\000\000\000\000\000\000\255\255\
\255\255\255\255\000\000";
Lexing.lex_trans =
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\011\000\011\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\011\000\011\000\000\000\000\000\000\000\000\000\000\000\000\000\
\010\000\009\000\000\000\000\000\000\000\000\000\000\000\007\000\
\013\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\005\000\014\000\000\000\
\000\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\000\000\006\000\012\000\000\000\000\000\
\000\000\015\000\000\000\000\000\000\000\018\000\004\000\000\000\
\000\000\000\000\000\000\000\000\016\000\000\000\000\000\000\000\
\000\000\000\000\000\000\017\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\008\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\000\000\000\000\000\000\000\000\000\000\000\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000";
Lexing.lex_check =
"\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\011\000\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\011\000\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\000\000\255\255\255\255\255\255\255\255\255\255\000\000\
\006\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\000\000\005\000\255\255\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\255\255\000\000\007\000\255\255\255\255\
\255\255\004\000\255\255\255\255\255\255\017\000\000\000\255\255\
\255\255\255\255\255\255\255\255\015\000\255\255\255\255\255\255\
\255\255\255\255\255\255\016\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\000\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\255\255\255\255\255\255\255\255\255\255\255\255\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255";
Lexing.lex_base_code =
"";
Lexing.lex_backtrk_code =
"";
Lexing.lex_default_code =
"";
Lexing.lex_trans_code =
"";
Lexing.lex_check_code =
"";
Lexing.lex_code =
"";
}
let rec read lexbuf =
__ocaml_lex_read_rec lexbuf 0
and __ocaml_lex_read_rec lexbuf __ocaml_lex_state =
match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with
| 0 ->
# 14 "propLexer.mll"
( read lexbuf )
# 123 "propLexer.ml"
| 1 ->
# 15 "propLexer.mll"
( LPAREN )
# 128 "propLexer.ml"
| 2 ->
# 16 "propLexer.mll"
( RPAREN )
# 133 "propLexer.ml"
| 3 ->
# 17 "propLexer.mll"
( NOT )
# 138 "propLexer.ml"
| 4 ->
# 18 "propLexer.mll"
( AND )
# 143 "propLexer.ml"
| 5 ->
# 19 "propLexer.mll"
( OR )
# 148 "propLexer.ml"
| 6 ->
# 20 "propLexer.mll"
( IMPLIES )
# 153 "propLexer.ml"
| 7 ->
# 21 "propLexer.mll"
( FALSE )
# 158 "propLexer.ml"
| 8 ->
# 22 "propLexer.mll"
( ATOM (Lexing.lexeme lexbuf) )
# 163 "propLexer.ml"
| 9 ->
let
# 23 "propLexer.mll"
c
# 169 "propLexer.ml"
= Lexing.sub_lexeme_char lexbuf lexbuf.Lexing.lex_start_pos in
# 23 "propLexer.mll"
( failwith ("Unexpected char: " ^ (Char.escaped c)) )
# 173 "propLexer.ml"
| 10 ->
# 24 "propLexer.mll"
( EOF )
# 178 "propLexer.ml"
| __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf;
__ocaml_lex_read_rec lexbuf __ocaml_lex_state
;;
| null | https://raw.githubusercontent.com/ssomayyajula/refinery/2be3e85e25d40e0172cfa82e6352d78a95b342a6/propLexer.ml | ocaml | # 1 "propLexer.mll"
open PropParser
# 6 "propLexer.ml"
let __ocaml_lex_tables = {
Lexing.lex_base =
"\000\000\245\255\246\255\062\000\000\000\000\000\001\000\001\000\
\252\255\253\255\254\255\001\000\251\255\250\255\249\255\000\000\
\000\000\000\000\248\255";
Lexing.lex_backtrk =
"\255\255\255\255\255\255\008\000\009\000\009\000\009\000\009\000\
\255\255\255\255\255\255\000\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255";
Lexing.lex_default =
"\002\000\000\000\000\000\255\255\255\255\255\255\255\255\255\255\
\000\000\000\000\000\000\255\255\000\000\000\000\000\000\255\255\
\255\255\255\255\000\000";
Lexing.lex_trans =
"\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\011\000\011\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\011\000\011\000\000\000\000\000\000\000\000\000\000\000\000\000\
\010\000\009\000\000\000\000\000\000\000\000\000\000\000\007\000\
\013\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\005\000\014\000\000\000\
\000\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\000\000\006\000\012\000\000\000\000\000\
\000\000\015\000\000\000\000\000\000\000\018\000\004\000\000\000\
\000\000\000\000\000\000\000\000\016\000\000\000\000\000\000\000\
\000\000\000\000\000\000\017\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\008\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\000\000\000\000\000\000\000\000\000\000\000\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000";
Lexing.lex_check =
"\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\000\000\011\000\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\011\000\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\000\000\255\255\255\255\255\255\255\255\255\255\000\000\
\006\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\000\000\005\000\255\255\
\255\255\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\255\255\000\000\007\000\255\255\255\255\
\255\255\004\000\255\255\255\255\255\255\017\000\000\000\255\255\
\255\255\255\255\255\255\255\255\015\000\255\255\255\255\255\255\
\255\255\255\255\255\255\016\000\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\000\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\255\255\255\255\255\255\255\255\255\255\255\255\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\003\000\003\000\003\000\003\000\003\000\003\000\003\000\
\003\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\000\000\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\255\
\255\255\255\255\255\255\255\255\255\255\255\255\255\255";
Lexing.lex_base_code =
"";
Lexing.lex_backtrk_code =
"";
Lexing.lex_default_code =
"";
Lexing.lex_trans_code =
"";
Lexing.lex_check_code =
"";
Lexing.lex_code =
"";
}
let rec read lexbuf =
__ocaml_lex_read_rec lexbuf 0
and __ocaml_lex_read_rec lexbuf __ocaml_lex_state =
match Lexing.engine __ocaml_lex_tables __ocaml_lex_state lexbuf with
| 0 ->
# 14 "propLexer.mll"
( read lexbuf )
# 123 "propLexer.ml"
| 1 ->
# 15 "propLexer.mll"
( LPAREN )
# 128 "propLexer.ml"
| 2 ->
# 16 "propLexer.mll"
( RPAREN )
# 133 "propLexer.ml"
| 3 ->
# 17 "propLexer.mll"
( NOT )
# 138 "propLexer.ml"
| 4 ->
# 18 "propLexer.mll"
( AND )
# 143 "propLexer.ml"
| 5 ->
# 19 "propLexer.mll"
( OR )
# 148 "propLexer.ml"
| 6 ->
# 20 "propLexer.mll"
( IMPLIES )
# 153 "propLexer.ml"
| 7 ->
# 21 "propLexer.mll"
( FALSE )
# 158 "propLexer.ml"
| 8 ->
# 22 "propLexer.mll"
( ATOM (Lexing.lexeme lexbuf) )
# 163 "propLexer.ml"
| 9 ->
let
# 23 "propLexer.mll"
c
# 169 "propLexer.ml"
= Lexing.sub_lexeme_char lexbuf lexbuf.Lexing.lex_start_pos in
# 23 "propLexer.mll"
( failwith ("Unexpected char: " ^ (Char.escaped c)) )
# 173 "propLexer.ml"
| 10 ->
# 24 "propLexer.mll"
( EOF )
# 178 "propLexer.ml"
| __ocaml_lex_state -> lexbuf.Lexing.refill_buff lexbuf;
__ocaml_lex_read_rec lexbuf __ocaml_lex_state
;;
| |
307d9565e08fe72b1ab50a82019eecd941bab47867e95246e558603df3b68f24 | jfrederickson/dotfiles | services.scm | (use-modules
(gnu services shepherd)
(gnu packages wm))
(define kanshi-service
(shepherd-service
(provision '(kanshi))
(start #~(make-forkexec-constructor
(list #$(file-append kanshi "/bin/kanshi"))))
(stop #~(make-kill-destructor))))
| null | https://raw.githubusercontent.com/jfrederickson/dotfiles/12476f2e719d2274bbdcc43a97b336fab53f84ff/guix/guix/home/services.scm | scheme | (use-modules
(gnu services shepherd)
(gnu packages wm))
(define kanshi-service
(shepherd-service
(provision '(kanshi))
(start #~(make-forkexec-constructor
(list #$(file-append kanshi "/bin/kanshi"))))
(stop #~(make-kill-destructor))))
| |
ea24eac298daa9ea811cbd1c4fce4906fd4282a92a2f701358764a360600c8d3 | williamleferrand/accretio | basics.ml | (* basics.ml *)
open Lwt
open Printf
open CalendarLib
open Api
open Eliom_content.Html5
open Eliom_content.Html5.D
open Message_parsers
let alert_supervisor context () =
context.log_info "alerting supervisor, manual action is needed" ;
lwt _ =
context.message_supervisor
~subject:"Manual action is needed"
~content:[
pcdata "Hi," ; br () ;
br () ;
pcdata "Please connect to your dashboard and check the society. You can also use this direct link:" ; br () ;
br () ;
Raw.a ~a:[ a_href (uri_of_string (fun () -> context.direct_link)) ] [ pcdata context.direct_link ] ; br ();
br () ;
]
()
in
return `None
COMPONENT
*alert_supervisor
| null | https://raw.githubusercontent.com/williamleferrand/accretio/394f855e9c2a6a18f0c2da35058d5a01aacf6586/playbooks/basics.ml | ocaml | basics.ml |
open Lwt
open Printf
open CalendarLib
open Api
open Eliom_content.Html5
open Eliom_content.Html5.D
open Message_parsers
let alert_supervisor context () =
context.log_info "alerting supervisor, manual action is needed" ;
lwt _ =
context.message_supervisor
~subject:"Manual action is needed"
~content:[
pcdata "Hi," ; br () ;
br () ;
pcdata "Please connect to your dashboard and check the society. You can also use this direct link:" ; br () ;
br () ;
Raw.a ~a:[ a_href (uri_of_string (fun () -> context.direct_link)) ] [ pcdata context.direct_link ] ; br ();
br () ;
]
()
in
return `None
COMPONENT
*alert_supervisor
|
11ecfd91e803f3906245ac9d5e78d9db232d843e2fc4eb0e5f556a6f8a5c9bfb | brainsickcc/bscc | Token.hs | Copyright © 2012
-- This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see </>.
| Tokens are produced by lexical analysis ( " Bscc . ) and consumed
when parsing ( " Bscc . " ) .
module Bscc.Token
(Token, TokenNoPos (..))
where
import Bscc.Symbol.Name
import Text.Parsec.Pos (SourcePos)
-- | A token, with position information.
type Token = (SourcePos, TokenNoPos)
-- | A token, without position information.
data TokenNoPos =
-- | Identifier
TIdent SymbolName
-- | Newline
| TNl
-- | Keywords begin here
| TKwCall
| TKwEnd
| TKwSub
-- | String literal
| TStringLit String
-- | Symbol
| TSym Char
deriving (Show, Eq)
| null | https://raw.githubusercontent.com/brainsickcc/bscc/f47a83f5c77d0ef483e957032715ac4007fcb44d/Bscc/Token.hs | haskell | This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
along with this program. If not, see </>.
| A token, with position information.
| A token, without position information.
| Identifier
| Newline
| Keywords begin here
| String literal
| Symbol | Copyright © 2012
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU Affero General Public License
| Tokens are produced by lexical analysis ( " Bscc . ) and consumed
when parsing ( " Bscc . " ) .
module Bscc.Token
(Token, TokenNoPos (..))
where
import Bscc.Symbol.Name
import Text.Parsec.Pos (SourcePos)
type Token = (SourcePos, TokenNoPos)
data TokenNoPos =
TIdent SymbolName
| TNl
| TKwCall
| TKwEnd
| TKwSub
| TStringLit String
| TSym Char
deriving (Show, Eq)
|
3a5c5695255144cad48064ca319c3456cd94fc38df8e628e1ebaeea36a04ccbf | obsidiansystems/hydra-pay | Api.hs | # LANGUAGE TemplateHaskell #
module HydraPay.Api where
import Data.Map
import Data.Int
import GHC.Generics
import Data.Aeson as Aeson
import qualified Data.Text as T
import Control.Applicative((<|>))
import Control.Lens.TH
import Data.Fixed (Pico)
import Hydra.Types
import Hydra.ServerOutput as ServerOutput
import qualified HydraPay.Config as Config
type HeadName = T.Text
data HydraPayStats = HydraPayStats
{ _hydraPayStats_heads :: Integer
, _hydraPayStats_nodes :: Integer
}
deriving (Eq, Show, Generic)
instance ToJSON HydraPayStats
instance FromJSON HydraPayStats
data HeadCreate = HeadCreate
{ headCreate_name :: HeadName
, headCreate_participants :: [Address]
}
deriving (Eq, Show, Generic)
instance ToJSON HeadCreate
instance FromJSON HeadCreate
data HeadInit = HeadInit
{ headInit_name :: HeadName
, headInit_contestation :: Integer
}
deriving (Eq, Show, Generic)
instance ToJSON HeadInit
instance FromJSON HeadInit
data HeadCommit = HeadCommit
{ headCommit_name :: HeadName
, headCommit_participant :: Address
, headCommit_amount :: Lovelace
}
deriving (Eq, Show, Generic)
instance ToJSON HeadCommit
instance FromJSON HeadCommit
data HeadSubmitTx = HeadSubmitTx
{ headSubmitTx_name :: HeadName
, headSubmitTx_toAddr :: Address
, amount :: Lovelace
}
deriving (Eq, Show, Generic)
instance ToJSON HeadSubmitTx
instance FromJSON HeadSubmitTx
-- This is the API json type that we need to send back out
data HeadStatus = HeadStatus
{ headStatus_name :: HeadName
, headStatus_running :: Bool
, headStatus_status :: Status
, headStatus_balances :: Map Address Lovelace
}
deriving (Eq, Show, Generic)
instance ToJSON HeadStatus
instance FromJSON HeadStatus
data Tagged a = Tagged
{ tagged_id :: Int64
, tagged_payload :: a
}
deriving (Eq, Show, Generic)
instance ToJSON a => ToJSON (Tagged a)
instance FromJSON a => FromJSON (Tagged a)
data HydraPayError
= InvalidPayload
| HeadCreationFailed
| NotEnoughParticipants
| HeadExists HeadName
| HeadDoesn'tExist
| NetworkIsn'tRunning
| FailedToBuildFundsTx
| NodeCommandFailed
-- ^ Anytime a command fails
| NotAParticipant
| ProcessError String
| NoValidUTXOToCommit
| InsufficientFunds
| FanoutNotPossible
| TxInvalid {utxo :: WholeUTXO, transaction :: Value, validationError :: ValidationError}
deriving (Eq, Show, Generic)
instance ToJSON HydraPayError
instance FromJSON HydraPayError
-- | State the head can be in, progressing linearly though the states.
data Status
= Status_Pending
| Status_Init
| Status_Committing
| Status_Aborted
| Status_Open
| Status_Closed
| Status_Fanout
| Status_Finalized
deriving (Eq, Ord, Show, Generic)
instance ToJSON Status
instance FromJSON Status
data ClientMsg
= ClientHello
| Authenticate T.Text
| DoesHeadExist T.Text
| CreateHead HeadCreate
| InitHead HeadInit
| CommitHead HeadCommit
| CloseHead HeadName
| TearDownHead HeadName
-- ^ Kills network and removes head
| CheckFuel Address
| Withdraw Address Bool
| GetAddTx TxType Address Lovelace
| SubscribeTo HeadName
| SubmitHeadTx Address HeadSubmitTx
| RestartDevnet
| GetStats
| GetDevnetAddresses Integer -- Amount of addresses
| GetL1Balance Address
| GetHeadBalance HeadName Address
| LiveDocEzSubmitTx Tx Address
| GetIsManagedDevnet
| GetHydraPayMode
| GetProxyInfo Address
deriving (Eq, Show, Generic)
instance ToJSON ClientMsg
instance FromJSON ClientMsg
type Version = T.Text
versionStr :: Version
versionStr = "0.1.0"
data ServerMsg
= ServerHello Version
| OperationSuccess
| HeadInfo HeadStatus
| TxConfirmed Pico
| FundsTx Tx
| FuelAmount Lovelace
| SubscriptionStarted HeadStatus
| AlreadySubscribed HeadName
| InvalidMessage T.Text
| UnhandledMessage
| HeadExistsResult Bool
| DevnetRestarted
| ServerError HydraPayError
| HeadStatusChanged HeadName Status (Map Address Lovelace)
| NodeMessage (ServerOutput Value)
| DevnetAddresses [Address]
| CurrentStats HydraPayStats
| RequestError T.Text
| NotAuthenticated
| AuthResult Bool
| L1Balance Lovelace
| HeadBalance Lovelace
| BalanceChange HeadName (Map Address Lovelace)
| HeadRemoved HeadName
| ApiError T.Text
| HydraPayMode Config.HydraPayMode
| IsManagedDevnet Bool
| ProxyAddressInfo ProxyInfo
| WithdrawSubmitted TxId
deriving (Eq, Show, Generic)
instance ToJSON ServerMsg
instance FromJSON ServerMsg
-- | Information about the managed proxy-address
-- for a specific address
data ProxyInfo = ProxyInfo
{ proxyInfo_address :: Address
, proxyInfo_proxyAddress :: Address
, proxyInfo_balance :: Lovelace
, proxyInfo_fuel :: Lovelace
}
deriving (Eq, Show, Generic)
instance ToJSON ProxyInfo
instance FromJSON ProxyInfo
data ApiMsg
= TaggedMsg (Tagged ServerMsg)
| PlainMsg ServerMsg
instance FromJSON ApiMsg where
parseJSON v = (TaggedMsg <$> parseJSON v) <|> (PlainMsg <$> parseJSON v)
data Tx = Tx
{ txType :: T.Text
, txDescription :: T.Text
, txCborHex :: T.Text
}
deriving (Eq, Show, Generic)
instance ToJSON Tx where
toJSON (Tx t d c) =
object [ "type" .= t
, "description" .= d
, "cborHex" .= c
]
instance FromJSON Tx where
parseJSON = withObject "Tx" $ \v -> Tx
<$> v .: "type"
<*> v .: "description"
<*> v .: "cborHex"
data TxType =
Funds | Fuel
deriving (Eq, Show, Generic)
instance ToJSON TxType
instance FromJSON TxType
isFuelType :: TxType -> Bool
isFuelType Fuel = True
isFuelType _ = False
makeLenses ''HydraPayStats
makePrisms ''ApiMsg
makePrisms ''ServerMsg
| null | https://raw.githubusercontent.com/obsidiansystems/hydra-pay/263ccf817c5ed447b1bb2f433253b0cde749082a/common/src/HydraPay/Api.hs | haskell | This is the API json type that we need to send back out
^ Anytime a command fails
| State the head can be in, progressing linearly though the states.
^ Kills network and removes head
Amount of addresses
| Information about the managed proxy-address
for a specific address | # LANGUAGE TemplateHaskell #
module HydraPay.Api where
import Data.Map
import Data.Int
import GHC.Generics
import Data.Aeson as Aeson
import qualified Data.Text as T
import Control.Applicative((<|>))
import Control.Lens.TH
import Data.Fixed (Pico)
import Hydra.Types
import Hydra.ServerOutput as ServerOutput
import qualified HydraPay.Config as Config
type HeadName = T.Text
data HydraPayStats = HydraPayStats
{ _hydraPayStats_heads :: Integer
, _hydraPayStats_nodes :: Integer
}
deriving (Eq, Show, Generic)
instance ToJSON HydraPayStats
instance FromJSON HydraPayStats
data HeadCreate = HeadCreate
{ headCreate_name :: HeadName
, headCreate_participants :: [Address]
}
deriving (Eq, Show, Generic)
instance ToJSON HeadCreate
instance FromJSON HeadCreate
data HeadInit = HeadInit
{ headInit_name :: HeadName
, headInit_contestation :: Integer
}
deriving (Eq, Show, Generic)
instance ToJSON HeadInit
instance FromJSON HeadInit
data HeadCommit = HeadCommit
{ headCommit_name :: HeadName
, headCommit_participant :: Address
, headCommit_amount :: Lovelace
}
deriving (Eq, Show, Generic)
instance ToJSON HeadCommit
instance FromJSON HeadCommit
data HeadSubmitTx = HeadSubmitTx
{ headSubmitTx_name :: HeadName
, headSubmitTx_toAddr :: Address
, amount :: Lovelace
}
deriving (Eq, Show, Generic)
instance ToJSON HeadSubmitTx
instance FromJSON HeadSubmitTx
data HeadStatus = HeadStatus
{ headStatus_name :: HeadName
, headStatus_running :: Bool
, headStatus_status :: Status
, headStatus_balances :: Map Address Lovelace
}
deriving (Eq, Show, Generic)
instance ToJSON HeadStatus
instance FromJSON HeadStatus
data Tagged a = Tagged
{ tagged_id :: Int64
, tagged_payload :: a
}
deriving (Eq, Show, Generic)
instance ToJSON a => ToJSON (Tagged a)
instance FromJSON a => FromJSON (Tagged a)
data HydraPayError
= InvalidPayload
| HeadCreationFailed
| NotEnoughParticipants
| HeadExists HeadName
| HeadDoesn'tExist
| NetworkIsn'tRunning
| FailedToBuildFundsTx
| NodeCommandFailed
| NotAParticipant
| ProcessError String
| NoValidUTXOToCommit
| InsufficientFunds
| FanoutNotPossible
| TxInvalid {utxo :: WholeUTXO, transaction :: Value, validationError :: ValidationError}
deriving (Eq, Show, Generic)
instance ToJSON HydraPayError
instance FromJSON HydraPayError
data Status
= Status_Pending
| Status_Init
| Status_Committing
| Status_Aborted
| Status_Open
| Status_Closed
| Status_Fanout
| Status_Finalized
deriving (Eq, Ord, Show, Generic)
instance ToJSON Status
instance FromJSON Status
data ClientMsg
= ClientHello
| Authenticate T.Text
| DoesHeadExist T.Text
| CreateHead HeadCreate
| InitHead HeadInit
| CommitHead HeadCommit
| CloseHead HeadName
| TearDownHead HeadName
| CheckFuel Address
| Withdraw Address Bool
| GetAddTx TxType Address Lovelace
| SubscribeTo HeadName
| SubmitHeadTx Address HeadSubmitTx
| RestartDevnet
| GetStats
| GetL1Balance Address
| GetHeadBalance HeadName Address
| LiveDocEzSubmitTx Tx Address
| GetIsManagedDevnet
| GetHydraPayMode
| GetProxyInfo Address
deriving (Eq, Show, Generic)
instance ToJSON ClientMsg
instance FromJSON ClientMsg
type Version = T.Text
versionStr :: Version
versionStr = "0.1.0"
data ServerMsg
= ServerHello Version
| OperationSuccess
| HeadInfo HeadStatus
| TxConfirmed Pico
| FundsTx Tx
| FuelAmount Lovelace
| SubscriptionStarted HeadStatus
| AlreadySubscribed HeadName
| InvalidMessage T.Text
| UnhandledMessage
| HeadExistsResult Bool
| DevnetRestarted
| ServerError HydraPayError
| HeadStatusChanged HeadName Status (Map Address Lovelace)
| NodeMessage (ServerOutput Value)
| DevnetAddresses [Address]
| CurrentStats HydraPayStats
| RequestError T.Text
| NotAuthenticated
| AuthResult Bool
| L1Balance Lovelace
| HeadBalance Lovelace
| BalanceChange HeadName (Map Address Lovelace)
| HeadRemoved HeadName
| ApiError T.Text
| HydraPayMode Config.HydraPayMode
| IsManagedDevnet Bool
| ProxyAddressInfo ProxyInfo
| WithdrawSubmitted TxId
deriving (Eq, Show, Generic)
instance ToJSON ServerMsg
instance FromJSON ServerMsg
data ProxyInfo = ProxyInfo
{ proxyInfo_address :: Address
, proxyInfo_proxyAddress :: Address
, proxyInfo_balance :: Lovelace
, proxyInfo_fuel :: Lovelace
}
deriving (Eq, Show, Generic)
instance ToJSON ProxyInfo
instance FromJSON ProxyInfo
data ApiMsg
= TaggedMsg (Tagged ServerMsg)
| PlainMsg ServerMsg
instance FromJSON ApiMsg where
parseJSON v = (TaggedMsg <$> parseJSON v) <|> (PlainMsg <$> parseJSON v)
data Tx = Tx
{ txType :: T.Text
, txDescription :: T.Text
, txCborHex :: T.Text
}
deriving (Eq, Show, Generic)
instance ToJSON Tx where
toJSON (Tx t d c) =
object [ "type" .= t
, "description" .= d
, "cborHex" .= c
]
instance FromJSON Tx where
parseJSON = withObject "Tx" $ \v -> Tx
<$> v .: "type"
<*> v .: "description"
<*> v .: "cborHex"
data TxType =
Funds | Fuel
deriving (Eq, Show, Generic)
instance ToJSON TxType
instance FromJSON TxType
isFuelType :: TxType -> Bool
isFuelType Fuel = True
isFuelType _ = False
makeLenses ''HydraPayStats
makePrisms ''ApiMsg
makePrisms ''ServerMsg
|
85f4e76d37f0517745f199fe8b4077e13532ddb47392fc75f65b369ebfb3845d | generateme/fastmath | f.clj | (ns fastmath.fields.f
(:require [fastmath.core :as m]
[fastmath.random :as r]
[fastmath.vector :as v]
[fastmath.fields.utils :as u])
(:import [fastmath.vector Vec2]))
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defn fdisc
([] {:type :regular
:config (fn [] {:ashift (r/drand m/-PI m/PI)
:rshift (r/drand m/-PI m/PI)
:xshift (r/drand -2.0 2.0)
:yshift (r/drand -2.0 2.0)
:term1 (r/randval 0.0 (r/drand -2.0 2.0))
:term2 (r/randval 0.0 (r/drand -2.0 2.0))
:term3 (r/randval 0.0 (r/drand -2.0 2.0))
:term4 (u/sdrand 0.5 1.5)})})
([^double amount {:keys [^double ashift ^double rshift ^double xshift ^double yshift
^double term1 ^double term2 ^double term3 ^double term4]}]
(fn [^Vec2 v]
(let [afactor (/ m/TWO_PI (+ (v/mag v) ashift))
r (* 0.5 (+ (* (v/heading v) m/M_1_PI) rshift))
xfactor (m/cos (+ afactor xshift))
yfactor (m/sin (+ afactor yshift))
pr (* amount r)
t3pr (* term3 pr)
prx (* pr xfactor)
pry (* pr yfactor)]
(Vec2. (+ (* term1 prx) (* term2 prx (.x v)) (* t3pr (.x v)) (* term4 (.x v)))
(+ (* term1 pry) (* term2 pry (.y v)) (* t3pr (.y v)) (* term4 (.y v))))))))
(defn fan2
"Fan2"
([] {:type :regular
:config (fn [] {:x (r/drand -2.0 2.0)
:y (r/drand -2.0 2.0)})})
([^double amount {:keys [^double x ^double y]}]
(fn [^Vec2 v]
(let [r (v/mag v)
angle (v/heading v)
ac (+ angle y)
dx (+ m/EPSILON (* m/PI x x))
dx2 (* 0.5 dx)
t (- ac (* dx (long (/ ac dx))))
a (if (> t dx2)
(- angle dx2)
(+ angle dx2))]
(Vec2. (* amount r (m/sin a))
(* amount r (m/cos a)))))))
(defn fan
"Fan"
([] {:type :regular
:config (fn [] {:coeff20 (r/drand -2.0 2.0)
:coeff21 (r/drand -2.0 2.0)})})
([^double amount {:keys [^double coeff20 ^double coeff21]}]
(let [dx (+ m/EPSILON (* m/PI (m/sq coeff20)))
dx2 (* 0.5 dx)]
(fn [^Vec2 v]
(let [angle (v/heading v)
r (* amount (v/mag v))
ac (+ angle coeff21)
a (if (> ^double (mod ac dx) dx2)
(- angle dx2)
(+ angle dx2))]
(Vec2. (* r (m/cos a))
(* r (m/sin a))))))))
(def ^:const ^:private ^double fib-fnatlog (m/log m/PHI))
(defn fibonacci2
([] {:type :regular
:config (fn [] {:sc (u/sdrand 0.2 2.0)
:sc2 (u/sdrand 0.2 2.0)})})
([^double amount {:keys [^double sc ^double sc2]}]
(let [affive (/ amount m/SQRT5)]
(fn [^Vec2 v]
(let [a (* (.y v) fib-fnatlog)
snum1 (m/sin a)
cnum1 (m/cos a)
b (- (+ (* (.x v) m/PI) a))
snum2 (m/sin b)
cnum2 (m/cos b)
aa (* (.x v) fib-fnatlog)
eradius1 (* sc (m/exp (* sc2 aa)))
eradius2 (* sc (m/exp (* sc2 -1.0 (- aa (* (.y v) m/PI)))))]
(Vec2. (* affive (- (* eradius1 cnum1) (* eradius2 cnum2)))
(* affive (- (* eradius1 snum1) (* eradius2 snum2)))))))))
(defn fisheye
"Fisheye"
([] {:type :regular})
([^double amount _]
(fn [^Vec2 v]
(let [r (/ (* amount 4.0) (inc (v/mag v)))]
(Vec2. (* r (.y v)) (* r (.x v)))))))
(defn flipcircle
([] {:type :regular})
([^double amount _]
(let [samount (* amount amount)]
(fn [^Vec2 v]
(if (> (v/magsq v) samount)
(v/mult v amount)
(Vec2. (* amount (.x v)) (* amount -1.0 (.y v))))))))
(defn flipy
([] {:type :regular})
([^double amount _]
(fn [^Vec2 v]
(if-not (pos? (.x v))
(v/mult v amount)
(Vec2. (* amount (.x v)) (* amount -1.0 (.y v)))))))
(defn flower
"Flower"
([] {:type :random
:config (fn [] {:petals (r/randval (u/sirand 1 11)(u/sdrand 0.1 10.0))
:holes (r/drand -2.0 2.0)})})
([^double amount {:keys [^double petals ^double holes]}]
(fn [v]
(let [theta (v/heading v)
d (/ 1.0 (+ m/EPSILON (v/mag v)))
r (* amount (- (r/drand) holes) (m/cos (* petals theta)) d)]
(v/mult v r)))))
(defn flux
"Flux"
([] {:type :regular
:config (fn [] {:spread (u/sdrand 0.1 2.0)})})
([^double amount {:keys [^double spread]}]
(let [aspread2 (* amount (+ 2.0 spread))]
(fn [^Vec2 v]
(let [xpw (+ (.x v) amount)
xmw (- (.x v) amount)
y2 (* (.y v) (.y v))
avgr (* aspread2 (m/sqrt (/ (m/sqrt (+ y2 (* xpw xpw)))
(m/sqrt (+ y2 (* xmw xmw))))))
avga (* 0.5 (- (m/atan2 (.y v) xmw) (m/atan2 (.y v) xpw)))]
(Vec2. (* avgr (m/cos avga))
(* avgr (m/sin avga))))))))
(defn foci
"Foci"
([] {:type :regular})
([^double amount _]
(fn [^Vec2 v]
(let [expx (* 0.5 (m/exp (.x v)))
expnx (/ 0.25 expx)
sy (m/sin (.y v))
cy (m/cos (.y v))
tmp (- (+ expx expnx) cy)
tmp (/ amount (if (zero? tmp) m/EPSILON tmp))]
(Vec2. (* tmp (- expx expnx))
(* tmp sy))))))
(defn fourth
([] {:type :regular
:config (fn [] {:spin (r/drand m/-TWO_PI m/TWO_PI)
:space (u/sdrand 0.1 2.0)
:twist (r/drand m/-TWO_PI m/TWO_PI)
:x (r/randval 0.0 (r/drand -0.5 0.5))
:y (r/randval 0.0 (r/drand -0.5 0.5))})})
([^double amount {:keys [^double spin ^double space ^double twist
^double x ^double y]}]
(let [sqrvvar (* amount amount)
^Vec2 x-y (Vec2. (- x) y)
^Vec2 xy- (Vec2. x (- y))]
(fn [^Vec2 v]
(cond
(and (pos? (.x v))
(pos? (.y v))) (let [a (v/heading v)
r (/ (v/mag v))
s (m/sin a)
c (m/cos a)]
(Vec2. (* amount r c) (* amount r s)))
(and (pos? (.x v))
(neg? (.y v))) (let [r2 (v/magsq v)]
(if (< r2 sqrvvar)
(let [r (* amount (m/sqrt (dec (/ sqrvvar r2))))]
(v/mult v r))
(v/mult v amount)))
(and (neg? (.x v))
(pos? (.y v))) (let [xy (v/add v x-y)
r (v/mag xy)]
(if (< r amount)
(let [a (+ (v/heading xy) spin (* twist (- amount r)))
r (* amount r)]
(v/add (Vec2. (* r (m/cos a))
(* r (m/sin a))) xy-))
(let [r (* amount (inc (/ space r)))]
(v/add (v/mult xy r) xy-))))
:else (v/mult v amount))))))
(defn funnel
([] {:type :regular
:config (fn [] {:effect (r/drand -1.6 1.6)})})
([^double amount {:keys [^double effect]}]
(let [effect (* effect m/PI)]
(fn [v]
(v/emult (v/mult (v/tanh v) amount)
(v/shift (v/reciprocal (v/cos v)) effect))))))
;;
(defn foucaut
"Foucaut"
([] {:type :regular})
([^double amount _]
(let [as (* amount m/SQRTPI)]
(fn [^Vec2 v]
(let [k (* 0.5 (.y v))
cosk (m/cos k)
xx (->> cosk
(* cosk)
(* (m/cos (.y v)))
(* (/ (.x v) m/SQRTPI))
(* 2.0)
(* amount))
yy (* as (m/tan k))]
(Vec2. xx yy))))))
| null | https://raw.githubusercontent.com/generateme/fastmath/820d9e5c432441465d5d6576d4a583677a3bd84f/src/fastmath/fields/f.clj | clojure | (ns fastmath.fields.f
(:require [fastmath.core :as m]
[fastmath.random :as r]
[fastmath.vector :as v]
[fastmath.fields.utils :as u])
(:import [fastmath.vector Vec2]))
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defn fdisc
([] {:type :regular
:config (fn [] {:ashift (r/drand m/-PI m/PI)
:rshift (r/drand m/-PI m/PI)
:xshift (r/drand -2.0 2.0)
:yshift (r/drand -2.0 2.0)
:term1 (r/randval 0.0 (r/drand -2.0 2.0))
:term2 (r/randval 0.0 (r/drand -2.0 2.0))
:term3 (r/randval 0.0 (r/drand -2.0 2.0))
:term4 (u/sdrand 0.5 1.5)})})
([^double amount {:keys [^double ashift ^double rshift ^double xshift ^double yshift
^double term1 ^double term2 ^double term3 ^double term4]}]
(fn [^Vec2 v]
(let [afactor (/ m/TWO_PI (+ (v/mag v) ashift))
r (* 0.5 (+ (* (v/heading v) m/M_1_PI) rshift))
xfactor (m/cos (+ afactor xshift))
yfactor (m/sin (+ afactor yshift))
pr (* amount r)
t3pr (* term3 pr)
prx (* pr xfactor)
pry (* pr yfactor)]
(Vec2. (+ (* term1 prx) (* term2 prx (.x v)) (* t3pr (.x v)) (* term4 (.x v)))
(+ (* term1 pry) (* term2 pry (.y v)) (* t3pr (.y v)) (* term4 (.y v))))))))
(defn fan2
"Fan2"
([] {:type :regular
:config (fn [] {:x (r/drand -2.0 2.0)
:y (r/drand -2.0 2.0)})})
([^double amount {:keys [^double x ^double y]}]
(fn [^Vec2 v]
(let [r (v/mag v)
angle (v/heading v)
ac (+ angle y)
dx (+ m/EPSILON (* m/PI x x))
dx2 (* 0.5 dx)
t (- ac (* dx (long (/ ac dx))))
a (if (> t dx2)
(- angle dx2)
(+ angle dx2))]
(Vec2. (* amount r (m/sin a))
(* amount r (m/cos a)))))))
(defn fan
"Fan"
([] {:type :regular
:config (fn [] {:coeff20 (r/drand -2.0 2.0)
:coeff21 (r/drand -2.0 2.0)})})
([^double amount {:keys [^double coeff20 ^double coeff21]}]
(let [dx (+ m/EPSILON (* m/PI (m/sq coeff20)))
dx2 (* 0.5 dx)]
(fn [^Vec2 v]
(let [angle (v/heading v)
r (* amount (v/mag v))
ac (+ angle coeff21)
a (if (> ^double (mod ac dx) dx2)
(- angle dx2)
(+ angle dx2))]
(Vec2. (* r (m/cos a))
(* r (m/sin a))))))))
(def ^:const ^:private ^double fib-fnatlog (m/log m/PHI))
(defn fibonacci2
([] {:type :regular
:config (fn [] {:sc (u/sdrand 0.2 2.0)
:sc2 (u/sdrand 0.2 2.0)})})
([^double amount {:keys [^double sc ^double sc2]}]
(let [affive (/ amount m/SQRT5)]
(fn [^Vec2 v]
(let [a (* (.y v) fib-fnatlog)
snum1 (m/sin a)
cnum1 (m/cos a)
b (- (+ (* (.x v) m/PI) a))
snum2 (m/sin b)
cnum2 (m/cos b)
aa (* (.x v) fib-fnatlog)
eradius1 (* sc (m/exp (* sc2 aa)))
eradius2 (* sc (m/exp (* sc2 -1.0 (- aa (* (.y v) m/PI)))))]
(Vec2. (* affive (- (* eradius1 cnum1) (* eradius2 cnum2)))
(* affive (- (* eradius1 snum1) (* eradius2 snum2)))))))))
(defn fisheye
"Fisheye"
([] {:type :regular})
([^double amount _]
(fn [^Vec2 v]
(let [r (/ (* amount 4.0) (inc (v/mag v)))]
(Vec2. (* r (.y v)) (* r (.x v)))))))
(defn flipcircle
([] {:type :regular})
([^double amount _]
(let [samount (* amount amount)]
(fn [^Vec2 v]
(if (> (v/magsq v) samount)
(v/mult v amount)
(Vec2. (* amount (.x v)) (* amount -1.0 (.y v))))))))
(defn flipy
([] {:type :regular})
([^double amount _]
(fn [^Vec2 v]
(if-not (pos? (.x v))
(v/mult v amount)
(Vec2. (* amount (.x v)) (* amount -1.0 (.y v)))))))
(defn flower
"Flower"
([] {:type :random
:config (fn [] {:petals (r/randval (u/sirand 1 11)(u/sdrand 0.1 10.0))
:holes (r/drand -2.0 2.0)})})
([^double amount {:keys [^double petals ^double holes]}]
(fn [v]
(let [theta (v/heading v)
d (/ 1.0 (+ m/EPSILON (v/mag v)))
r (* amount (- (r/drand) holes) (m/cos (* petals theta)) d)]
(v/mult v r)))))
(defn flux
"Flux"
([] {:type :regular
:config (fn [] {:spread (u/sdrand 0.1 2.0)})})
([^double amount {:keys [^double spread]}]
(let [aspread2 (* amount (+ 2.0 spread))]
(fn [^Vec2 v]
(let [xpw (+ (.x v) amount)
xmw (- (.x v) amount)
y2 (* (.y v) (.y v))
avgr (* aspread2 (m/sqrt (/ (m/sqrt (+ y2 (* xpw xpw)))
(m/sqrt (+ y2 (* xmw xmw))))))
avga (* 0.5 (- (m/atan2 (.y v) xmw) (m/atan2 (.y v) xpw)))]
(Vec2. (* avgr (m/cos avga))
(* avgr (m/sin avga))))))))
(defn foci
"Foci"
([] {:type :regular})
([^double amount _]
(fn [^Vec2 v]
(let [expx (* 0.5 (m/exp (.x v)))
expnx (/ 0.25 expx)
sy (m/sin (.y v))
cy (m/cos (.y v))
tmp (- (+ expx expnx) cy)
tmp (/ amount (if (zero? tmp) m/EPSILON tmp))]
(Vec2. (* tmp (- expx expnx))
(* tmp sy))))))
(defn fourth
([] {:type :regular
:config (fn [] {:spin (r/drand m/-TWO_PI m/TWO_PI)
:space (u/sdrand 0.1 2.0)
:twist (r/drand m/-TWO_PI m/TWO_PI)
:x (r/randval 0.0 (r/drand -0.5 0.5))
:y (r/randval 0.0 (r/drand -0.5 0.5))})})
([^double amount {:keys [^double spin ^double space ^double twist
^double x ^double y]}]
(let [sqrvvar (* amount amount)
^Vec2 x-y (Vec2. (- x) y)
^Vec2 xy- (Vec2. x (- y))]
(fn [^Vec2 v]
(cond
(and (pos? (.x v))
(pos? (.y v))) (let [a (v/heading v)
r (/ (v/mag v))
s (m/sin a)
c (m/cos a)]
(Vec2. (* amount r c) (* amount r s)))
(and (pos? (.x v))
(neg? (.y v))) (let [r2 (v/magsq v)]
(if (< r2 sqrvvar)
(let [r (* amount (m/sqrt (dec (/ sqrvvar r2))))]
(v/mult v r))
(v/mult v amount)))
(and (neg? (.x v))
(pos? (.y v))) (let [xy (v/add v x-y)
r (v/mag xy)]
(if (< r amount)
(let [a (+ (v/heading xy) spin (* twist (- amount r)))
r (* amount r)]
(v/add (Vec2. (* r (m/cos a))
(* r (m/sin a))) xy-))
(let [r (* amount (inc (/ space r)))]
(v/add (v/mult xy r) xy-))))
:else (v/mult v amount))))))
(defn funnel
([] {:type :regular
:config (fn [] {:effect (r/drand -1.6 1.6)})})
([^double amount {:keys [^double effect]}]
(let [effect (* effect m/PI)]
(fn [v]
(v/emult (v/mult (v/tanh v) amount)
(v/shift (v/reciprocal (v/cos v)) effect))))))
(defn foucaut
"Foucaut"
([] {:type :regular})
([^double amount _]
(let [as (* amount m/SQRTPI)]
(fn [^Vec2 v]
(let [k (* 0.5 (.y v))
cosk (m/cos k)
xx (->> cosk
(* cosk)
(* (m/cos (.y v)))
(* (/ (.x v) m/SQRTPI))
(* 2.0)
(* amount))
yy (* as (m/tan k))]
(Vec2. xx yy))))))
| |
b00ca76a10b67a88ae5840c864986cb6e99f2c161face4e0e8a997736b8dd01e | georgegarrington/Syphon | Primitive.hs | module Transpile.Primitive where
import Data.Maybe
import AST.Definition
import AST.Module
import Transpile.Expression
import Transpile.Subscribe2
import Transpile.Function
Reducer in React hook ca n't be a curried function so we have to do this specific syntax
writeUpdateFn :: Dfn -> Module -> String
writeUpdateFn (Fun name lineNum _ cases wheres) mod =
"const update = ($arg0,$arg1) => {\n\n\
\\tconst $name = \"update\"\n" ++
(if length cases == 1 then writeCaseless 1 (cases !! 0) mod else concatMap (\cse -> writeCase 1 cse mod) cases) ++
"}\n"
writeEffectFn :: Dfn -> Module -> String
writeEffectFn (Fun name lineNum _ cases wheres) mod =
"const effect = ($arg0,$arg1) =>{\n\
\\tconst $name = \"effect\"\n" ++
(if length cases == 1 then writeCaseless 1 (cases !! 0) mod
else concatMap (\cse -> writeCase 1 cse mod) cases) ++
"}\n"
--Given the view definition and (maybe) a subscribe block definition, write the react component
writeReactComponent :: Dfn -> Module -> String -> String
writeReactComponent fn@(Fun name lineNum _ cases wheres) mod hook =
"export const App = () => {\n\n\
\\tconst [state,$dispatch] = " ++ hook ++ "(update,init" ++ (if hook == "useSyphon" then ",effect)" else ")") ++ "\n\
\\twindow.$dispatchRef = $dispatch\n" ++
(case lookupSpecialDfn "initEffects" mod of Nothing -> ""; Just dfn -> writeInitEffects dfn mod) ++
(if isJust $ lookupSpecialDfn "subscribe" mod then writeSubscriptions mod else "") ++
"\treturn (<Context.Provider value = {$dispatch}>{(() => {\n\n\
\\t\treturn view(state)\n\
\\n\t})()}</Context.Provider>)\n" ++
"}\n\n" ++
--The view function
writeFn fn mod
There should only ever be one case for initEffect
writeInitEffects :: Dfn -> Module -> String
writeInitEffects (Fun _ lineNum _ [Case _ _ e] wheres) mod =
"\tReact.useEffect(() => {\n\
\\t\tconst effectList = " ++ writeExpr 2 e mod ++ "\n\
\\t}, [])\n" | null | https://raw.githubusercontent.com/georgegarrington/Syphon/402a326b482e3ce627a15b651b3097c2e09e8a53/src/Transpile/Primitive.hs | haskell | Given the view definition and (maybe) a subscribe block definition, write the react component
The view function | module Transpile.Primitive where
import Data.Maybe
import AST.Definition
import AST.Module
import Transpile.Expression
import Transpile.Subscribe2
import Transpile.Function
Reducer in React hook ca n't be a curried function so we have to do this specific syntax
writeUpdateFn :: Dfn -> Module -> String
writeUpdateFn (Fun name lineNum _ cases wheres) mod =
"const update = ($arg0,$arg1) => {\n\n\
\\tconst $name = \"update\"\n" ++
(if length cases == 1 then writeCaseless 1 (cases !! 0) mod else concatMap (\cse -> writeCase 1 cse mod) cases) ++
"}\n"
writeEffectFn :: Dfn -> Module -> String
writeEffectFn (Fun name lineNum _ cases wheres) mod =
"const effect = ($arg0,$arg1) =>{\n\
\\tconst $name = \"effect\"\n" ++
(if length cases == 1 then writeCaseless 1 (cases !! 0) mod
else concatMap (\cse -> writeCase 1 cse mod) cases) ++
"}\n"
writeReactComponent :: Dfn -> Module -> String -> String
writeReactComponent fn@(Fun name lineNum _ cases wheres) mod hook =
"export const App = () => {\n\n\
\\tconst [state,$dispatch] = " ++ hook ++ "(update,init" ++ (if hook == "useSyphon" then ",effect)" else ")") ++ "\n\
\\twindow.$dispatchRef = $dispatch\n" ++
(case lookupSpecialDfn "initEffects" mod of Nothing -> ""; Just dfn -> writeInitEffects dfn mod) ++
(if isJust $ lookupSpecialDfn "subscribe" mod then writeSubscriptions mod else "") ++
"\treturn (<Context.Provider value = {$dispatch}>{(() => {\n\n\
\\t\treturn view(state)\n\
\\n\t})()}</Context.Provider>)\n" ++
"}\n\n" ++
writeFn fn mod
There should only ever be one case for initEffect
writeInitEffects :: Dfn -> Module -> String
writeInitEffects (Fun _ lineNum _ [Case _ _ e] wheres) mod =
"\tReact.useEffect(() => {\n\
\\t\tconst effectList = " ++ writeExpr 2 e mod ++ "\n\
\\t}, [])\n" |
a4edb8bb068fd797906acc3a8d4d00556cfcf2bf7c0cbd0f40b956261fcf42aa | realworldocaml/book | ppx_string.ml | open Base
open Ppxlib
open Ast_builder.Default
module Where = struct
type t =
| Imprecise of Location.t
| Precise of { mutable position : position }
let is_precise = function
| Imprecise _ -> false
| Precise _ -> true
;;
let advance position char =
let pos_cnum = position.pos_cnum + 1 in
match char with
| '\n' ->
{ position with pos_lnum = position.pos_lnum + 1; pos_bol = pos_cnum; pos_cnum }
| _ -> { position with pos_cnum }
;;
let skip t string =
match t with
| Imprecise _ -> ()
| Precise at ->
for pos = 0 to String.length string - 1 do
at.position <- advance at.position string.[pos]
done
;;
let loc_start = function
| Imprecise loc -> loc.loc_start
| Precise { position } -> position
;;
let loc_end = function
| Imprecise loc -> loc.loc_end
| Precise { position } -> position
;;
let skip_with_loc t string =
let loc_start = loc_start t in
skip t string;
let loc_end = loc_end t in
{ loc_ghost = true; loc_start; loc_end }
;;
let has_escapes ~loc ~string ~delimiter =
match delimiter with
| Some _ -> false
| None ->
let unescaped_len = 1 + String.length string + 1 in
let actual_len = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum in
unescaped_len <> actual_len
;;
let literal_prefix ~delimiter =
match delimiter with
| None -> "\""
| Some id -> Printf.sprintf "{%s|" id
;;
let create ~loc ~string ~delimiter =
if has_escapes ~loc ~string ~delimiter
then Imprecise { loc with loc_ghost = true }
else (
let t = Precise { position = loc.loc_start } in
skip t (literal_prefix ~delimiter);
t)
;;
end
let add_literal string ~where ~start ~until ~acc =
if start >= until
then acc
else (
let literal = String.sub string ~pos:start ~len:(until - start) in
let loc = Where.skip_with_loc where literal in
estring ~loc literal :: acc)
;;
let set_locs loc =
object
inherit Ast_traverse.map
method! location _ = loc
end
;;
let parse_error ~loc ~name string =
Location.raise_errorf ~loc "invalid %s: %S" name string
;;
let parse_expression ~where ~loc ~name string =
let lexbuf = Lexing.from_string string in
lexbuf.lex_abs_pos <- loc.loc_start.pos_cnum;
lexbuf.lex_curr_p <- loc.loc_start;
match Parse.expression lexbuf with
| exception _ -> parse_error ~loc ~name string
| expr -> if Where.is_precise where then expr else (set_locs loc)#expression expr
;;
let parse_ident ~where ~loc ~name module_path =
We want to parse a module path here , such as [ Core . Int ] . Whitespace , comments , etc .
have been allowed here historically .
Parsing a module path using [ ] would be too restrictive and disallow
whitespace , comments , etc .
Parsing [ module_path ^ " .to_string " ] assigns locations incorrectly because of the
synthetic suffix .
So we parse [ module_path ] as an expression . A valid module path , used in an
expression context , looks like a nullary variant constructor . So we convert nullary
variant constructors to module paths , and reject all other expressions .
have been allowed here historically.
Parsing a module path using [Longident.parse] would be too restrictive and disallow
whitespace, comments, etc.
Parsing [module_path ^ ".to_string"] assigns locations incorrectly because of the
synthetic suffix.
So we parse [module_path] as an expression. A valid module path, used in an
expression context, looks like a nullary variant constructor. So we convert nullary
variant constructors to module paths, and reject all other expressions. *)
match parse_expression ~where ~loc ~name module_path with
| { pexp_desc = Pexp_construct (ident, None); _ } ->
pexp_ident ~loc { ident with txt = Ldot (ident.txt, "to_string") }
| _ -> parse_error ~loc ~name module_path
;;
let parse_body ~where string =
let loc = Where.skip_with_loc where string in
parse_expression ~where ~loc ~name:"#{...} expression" string
;;
let parse_module_path ~where string =
let loc = Where.skip_with_loc where string in
parse_ident ~where ~loc ~name:"%{...} module path" string
;;
let parse_pad_length ~where string =
let loc = Where.skip_with_loc where string in
parse_expression ~where ~loc ~name:"#{...} pad length" string
;;
let add_interpreted string ~where ~start ~until ~acc =
Where.skip where "%{";
let loc_start = Where.loc_start where in
let expression =
let string = String.sub string ~pos:start ~len:(until - start) in
let value, module_path, pad_length =
match String.rsplit2 string ~on:'#' with
| None ->
let value = parse_body ~where string in
value, None, None
| Some (body, formatting) ->
let body = parse_body ~where body in
Where.skip where "#";
let module_path, pad_length =
match String.rsplit2 formatting ~on:':' with
| None ->
let fn = parse_module_path ~where formatting in
Some fn, None
| Some (module_path, pad_length) ->
let fn =
if String.is_empty module_path
then None
else Some (parse_module_path ~where module_path)
in
Where.skip where ":";
let len = parse_pad_length ~where pad_length in
fn, Some len
in
body, module_path, pad_length
in
let unpadded =
match module_path with
| None -> fun ~loc:_ -> value
| Some fn -> fun ~loc -> pexp_apply ~loc fn [ Nolabel, value ]
in
match pad_length with
| None -> unpadded
| Some len ->
fun ~loc ->
let ex_var = gen_symbol ~prefix:"__string_exp" () in
let ex = evar ~loc ex_var in
let lenvar = gen_symbol ~prefix:"__string_len" () in
[%expr
let [%p pvar ~loc ex_var] = [%e unpadded ~loc] in
let [%p pvar ~loc lenvar] = Stdlib.String.length [%e ex] in
Stdlib.( ^ )
(Stdlib.String.make (Stdlib.max 0 ([%e len] - [%e evar ~loc lenvar])) ' ')
[%e ex]]
in
let loc_end = Where.loc_end where in
Where.skip where "}";
expression ~loc:{ loc_ghost = true; loc_start; loc_end } :: acc
;;
type interpreted =
{ percent : int
; lbrace : int
; rbrace : int
}
let find_interpreted string ~where ~pos =
String.substr_index string ~pos ~pattern:"%{"
|> Option.map ~f:(fun percent ->
let lbrace = percent + 1 in
match String.substr_index string ~pos:(lbrace + 1) ~pattern:"}" with
| None ->
Where.skip where (String.sub string ~pos ~len:(percent - pos));
let loc = Where.skip_with_loc where "%{" in
Location.raise_errorf ~loc "unterminated %%{"
| Some rbrace -> { percent; lbrace; rbrace })
;;
let rec expand_expressions_from string ~where ~pos ~acc =
match find_interpreted string ~where ~pos with
| None ->
let len = String.length string in
let acc = add_literal string ~where ~start:pos ~until:len ~acc in
List.rev acc
| Some { percent; lbrace; rbrace } ->
let acc = add_literal string ~where ~start:pos ~until:percent ~acc in
let acc = add_interpreted string ~where ~start:(lbrace + 1) ~until:rbrace ~acc in
expand_expressions_from string ~where ~pos:(rbrace + 1) ~acc
;;
let expand_expressions ~loc ~delimiter string =
let where = Where.create ~loc ~delimiter ~string in
expand_expressions_from string ~where ~pos:0 ~acc:[]
;;
let concatenate ~loc expressions =
match expressions with
| [] -> [%expr ""]
| [ expr ] -> [%expr ([%e expr] : Stdlib.String.t)]
| multiple -> [%expr Stdlib.String.concat "" [%e elist ~loc multiple]]
;;
let expand ~expr_loc ~string_loc ~string ~delimiter =
concatenate ~loc:expr_loc (expand_expressions ~loc:string_loc ~delimiter string)
;;
let () =
Ppxlib.Driver.register_transformation
"ppx_string"
~extensions:
[ Extension.declare
"ppx_string.string"
Extension.Context.expression
Ast_pattern.(
pstr (pstr_eval (pexp_constant (pconst_string __' __ __)) nil ^:: nil))
(fun ~loc:expr_loc ~path:_ { loc = string_loc; txt = string } _ delimiter ->
Merlin_helpers.hide_expression
(expand ~expr_loc ~string_loc ~string ~delimiter))
]
;;
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/ppx_string/src/ppx_string.ml | ocaml | open Base
open Ppxlib
open Ast_builder.Default
module Where = struct
type t =
| Imprecise of Location.t
| Precise of { mutable position : position }
let is_precise = function
| Imprecise _ -> false
| Precise _ -> true
;;
let advance position char =
let pos_cnum = position.pos_cnum + 1 in
match char with
| '\n' ->
{ position with pos_lnum = position.pos_lnum + 1; pos_bol = pos_cnum; pos_cnum }
| _ -> { position with pos_cnum }
;;
let skip t string =
match t with
| Imprecise _ -> ()
| Precise at ->
for pos = 0 to String.length string - 1 do
at.position <- advance at.position string.[pos]
done
;;
let loc_start = function
| Imprecise loc -> loc.loc_start
| Precise { position } -> position
;;
let loc_end = function
| Imprecise loc -> loc.loc_end
| Precise { position } -> position
;;
let skip_with_loc t string =
let loc_start = loc_start t in
skip t string;
let loc_end = loc_end t in
{ loc_ghost = true; loc_start; loc_end }
;;
let has_escapes ~loc ~string ~delimiter =
match delimiter with
| Some _ -> false
| None ->
let unescaped_len = 1 + String.length string + 1 in
let actual_len = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum in
unescaped_len <> actual_len
;;
let literal_prefix ~delimiter =
match delimiter with
| None -> "\""
| Some id -> Printf.sprintf "{%s|" id
;;
let create ~loc ~string ~delimiter =
if has_escapes ~loc ~string ~delimiter
then Imprecise { loc with loc_ghost = true }
else (
let t = Precise { position = loc.loc_start } in
skip t (literal_prefix ~delimiter);
t)
;;
end
let add_literal string ~where ~start ~until ~acc =
if start >= until
then acc
else (
let literal = String.sub string ~pos:start ~len:(until - start) in
let loc = Where.skip_with_loc where literal in
estring ~loc literal :: acc)
;;
let set_locs loc =
object
inherit Ast_traverse.map
method! location _ = loc
end
;;
let parse_error ~loc ~name string =
Location.raise_errorf ~loc "invalid %s: %S" name string
;;
let parse_expression ~where ~loc ~name string =
let lexbuf = Lexing.from_string string in
lexbuf.lex_abs_pos <- loc.loc_start.pos_cnum;
lexbuf.lex_curr_p <- loc.loc_start;
match Parse.expression lexbuf with
| exception _ -> parse_error ~loc ~name string
| expr -> if Where.is_precise where then expr else (set_locs loc)#expression expr
;;
let parse_ident ~where ~loc ~name module_path =
We want to parse a module path here , such as [ Core . Int ] . Whitespace , comments , etc .
have been allowed here historically .
Parsing a module path using [ ] would be too restrictive and disallow
whitespace , comments , etc .
Parsing [ module_path ^ " .to_string " ] assigns locations incorrectly because of the
synthetic suffix .
So we parse [ module_path ] as an expression . A valid module path , used in an
expression context , looks like a nullary variant constructor . So we convert nullary
variant constructors to module paths , and reject all other expressions .
have been allowed here historically.
Parsing a module path using [Longident.parse] would be too restrictive and disallow
whitespace, comments, etc.
Parsing [module_path ^ ".to_string"] assigns locations incorrectly because of the
synthetic suffix.
So we parse [module_path] as an expression. A valid module path, used in an
expression context, looks like a nullary variant constructor. So we convert nullary
variant constructors to module paths, and reject all other expressions. *)
match parse_expression ~where ~loc ~name module_path with
| { pexp_desc = Pexp_construct (ident, None); _ } ->
pexp_ident ~loc { ident with txt = Ldot (ident.txt, "to_string") }
| _ -> parse_error ~loc ~name module_path
;;
let parse_body ~where string =
let loc = Where.skip_with_loc where string in
parse_expression ~where ~loc ~name:"#{...} expression" string
;;
let parse_module_path ~where string =
let loc = Where.skip_with_loc where string in
parse_ident ~where ~loc ~name:"%{...} module path" string
;;
let parse_pad_length ~where string =
let loc = Where.skip_with_loc where string in
parse_expression ~where ~loc ~name:"#{...} pad length" string
;;
let add_interpreted string ~where ~start ~until ~acc =
Where.skip where "%{";
let loc_start = Where.loc_start where in
let expression =
let string = String.sub string ~pos:start ~len:(until - start) in
let value, module_path, pad_length =
match String.rsplit2 string ~on:'#' with
| None ->
let value = parse_body ~where string in
value, None, None
| Some (body, formatting) ->
let body = parse_body ~where body in
Where.skip where "#";
let module_path, pad_length =
match String.rsplit2 formatting ~on:':' with
| None ->
let fn = parse_module_path ~where formatting in
Some fn, None
| Some (module_path, pad_length) ->
let fn =
if String.is_empty module_path
then None
else Some (parse_module_path ~where module_path)
in
Where.skip where ":";
let len = parse_pad_length ~where pad_length in
fn, Some len
in
body, module_path, pad_length
in
let unpadded =
match module_path with
| None -> fun ~loc:_ -> value
| Some fn -> fun ~loc -> pexp_apply ~loc fn [ Nolabel, value ]
in
match pad_length with
| None -> unpadded
| Some len ->
fun ~loc ->
let ex_var = gen_symbol ~prefix:"__string_exp" () in
let ex = evar ~loc ex_var in
let lenvar = gen_symbol ~prefix:"__string_len" () in
[%expr
let [%p pvar ~loc ex_var] = [%e unpadded ~loc] in
let [%p pvar ~loc lenvar] = Stdlib.String.length [%e ex] in
Stdlib.( ^ )
(Stdlib.String.make (Stdlib.max 0 ([%e len] - [%e evar ~loc lenvar])) ' ')
[%e ex]]
in
let loc_end = Where.loc_end where in
Where.skip where "}";
expression ~loc:{ loc_ghost = true; loc_start; loc_end } :: acc
;;
type interpreted =
{ percent : int
; lbrace : int
; rbrace : int
}
let find_interpreted string ~where ~pos =
String.substr_index string ~pos ~pattern:"%{"
|> Option.map ~f:(fun percent ->
let lbrace = percent + 1 in
match String.substr_index string ~pos:(lbrace + 1) ~pattern:"}" with
| None ->
Where.skip where (String.sub string ~pos ~len:(percent - pos));
let loc = Where.skip_with_loc where "%{" in
Location.raise_errorf ~loc "unterminated %%{"
| Some rbrace -> { percent; lbrace; rbrace })
;;
let rec expand_expressions_from string ~where ~pos ~acc =
match find_interpreted string ~where ~pos with
| None ->
let len = String.length string in
let acc = add_literal string ~where ~start:pos ~until:len ~acc in
List.rev acc
| Some { percent; lbrace; rbrace } ->
let acc = add_literal string ~where ~start:pos ~until:percent ~acc in
let acc = add_interpreted string ~where ~start:(lbrace + 1) ~until:rbrace ~acc in
expand_expressions_from string ~where ~pos:(rbrace + 1) ~acc
;;
let expand_expressions ~loc ~delimiter string =
let where = Where.create ~loc ~delimiter ~string in
expand_expressions_from string ~where ~pos:0 ~acc:[]
;;
let concatenate ~loc expressions =
match expressions with
| [] -> [%expr ""]
| [ expr ] -> [%expr ([%e expr] : Stdlib.String.t)]
| multiple -> [%expr Stdlib.String.concat "" [%e elist ~loc multiple]]
;;
let expand ~expr_loc ~string_loc ~string ~delimiter =
concatenate ~loc:expr_loc (expand_expressions ~loc:string_loc ~delimiter string)
;;
let () =
Ppxlib.Driver.register_transformation
"ppx_string"
~extensions:
[ Extension.declare
"ppx_string.string"
Extension.Context.expression
Ast_pattern.(
pstr (pstr_eval (pexp_constant (pconst_string __' __ __)) nil ^:: nil))
(fun ~loc:expr_loc ~path:_ { loc = string_loc; txt = string } _ delimiter ->
Merlin_helpers.hide_expression
(expand ~expr_loc ~string_loc ~string ~delimiter))
]
;;
| |
7cd6230d3f3492c40cb1f13df4af2e3918478393c546cef76aa7a36fa271266f | thi-ng/demos | ex02.clj | (ns ws-ldn-10.ex02
(:require
[thi.ng.math.core :as m]
[thi.ng.geom.core :as g]
[thi.ng.geom.vector :as v]
[thi.ng.geom.circle :as c]
[thi.ng.geom.svg.core :as svg]
[thi.ng.color.core :as col]
[piksel.core :as pix]))
(defn svg-doc
[width body]
(->> body
(svg/svg {:width width :height width})
(svg/serialize)))
(defn compute-dejong
"Computes a single DeJong 2d point vector for given params and XY pos"
[a b c d x y]
(v/vec2
(+ (Math/sin (* a y)) (Math/cos (* (* b x) x)))
(+ (Math/sin (* (* c x) x)) (Math/cos (* d y)))))
(defn dejong-svg
[{:keys [width iter a b c d color bg]}]
(let [scale (/ width 4)
center (v/vec2 (/ width 2))]
(->> (range iter)
;; iterative system: f(x+1) = f(f(x))
(reduce
(fn [[points [x y]] _]
(let [pos (compute-dejong a b c d x y)]
[(conj points (svg/circle (m/madd pos scale center) 1)) pos]))
['() [(m/random width) (m/random width)]])
(first)
(svg/group
{:fill (or color "rgba(0,0,0,0.25)") :stroke "none"}
(if bg (svg/rect [0 0] width width {:fill bg})))
(svg-doc width))))
(comment
(spit "dejong.svg"
(dejong-svg
{:width 600
:iter 100000
:a (m/random -3 3)
:b (m/random -3 3)
:c (m/random -3 3)
:d (m/random -3 3)}))
)
(defn dejong-bitmap
[{:keys [width iter a b c d color bg blend]}]
(prn :coeffs a b c d)
(let [scale (/ width 4.1)
center (v/vec2 (/ width 2))
img (pix/make-image width width)
pixels (pix/get-pixels img)
[red green blue] (map #(* % 255) @color)
alpha (peek @color)
blend-fn (pix/blend-modes blend)]
(when bg
(apply pix/fill-array pixels @bg))
(reduce
(fn [[x y] _]
(let [pos (compute-dejong a b c d x y)
[x y] (m/madd pos scale center)]
(pix/blend-pixel pixels x y width red green blue alpha blend-fn)
pos))
[(m/random width) (m/random width)]
(range iter))
(pix/set-pixels img pixels)
(pix/save-png "dejong.png" img)))
(comment
(dejong-bitmap
{:width 600
:iter 1000000
:a (m/random -3 3)
:b (m/random -3 3)
:c (m/random -3 3)
:d (m/random -3 3)
:color (col/rgba 0.6 0.3 0.1 0.15)
:bg (col/rgba 0 0 0.1)
:blend :add})
(dejong-bitmap
{:width 1280
:iter 3000000
:a 1.815
:b -1.687
:c 2.551
:d 1.151
:color (col/rgba 0.7 0.2 0.25 0.15)
:bg (col/rgba 0 0 0.1)
:blend :add})
;; -2.726 -2.082 -2.239 -2.340
)
(defn dejong-bitmap-duo
[{:keys [width iter a b c d col1 col2 alpha bg blend]}]
(prn :coeffs a b c d)
(let [scale (/ width 4.1)
center (v/vec2 (/ width 2))
img (pix/make-image width width)
pixels (pix/get-pixels img)
[r1 g1 b1] (map #(* % 255) @col1)
[r2 g2 b2] (map #(* % 255) @col2)
blend-fn (pix/blend-modes blend)]
(when bg
(apply pix/fill-array pixels @bg))
(reduce
(fn [[x y] _]
(let [pos (compute-dejong a b c d x y)
red (m/mix* r1 r2 (+ (* x 0.25) 0.5))
green (m/mix* g1 g2 (+ (* x 0.25) 0.5))
blue (m/mix* b1 b2 (+ (* y 0.25) 0.5))
[x y] (m/madd pos scale center)]
(pix/blend-pixel pixels x y width red green blue alpha blend-fn)
pos))
[(m/random width) (m/random width)]
(range iter))
(pix/set-pixels img pixels)
(pix/save-png "dejong.png" img)))
(comment
(dejong-bitmap-duo
{:width 1280
:iter 3000000
:a 1.815
:b -1.687
:c 2.551
:d 1.151
:col1 (col/rgba 0.7 0.2 0.15)
:col2 (col/rgba 0.1 0.2 0.5)
:alpha 0.15
:bg (col/rgba 0 0 0.1)
:blend :add})
)
| null | https://raw.githubusercontent.com/thi-ng/demos/048cd131099a7db29be56b965c053908acad4166/ws-ldn-10/src/clj/ws_ldn_10/ex02.clj | clojure | iterative system: f(x+1) = f(f(x))
-2.726 -2.082 -2.239 -2.340 | (ns ws-ldn-10.ex02
(:require
[thi.ng.math.core :as m]
[thi.ng.geom.core :as g]
[thi.ng.geom.vector :as v]
[thi.ng.geom.circle :as c]
[thi.ng.geom.svg.core :as svg]
[thi.ng.color.core :as col]
[piksel.core :as pix]))
(defn svg-doc
[width body]
(->> body
(svg/svg {:width width :height width})
(svg/serialize)))
(defn compute-dejong
"Computes a single DeJong 2d point vector for given params and XY pos"
[a b c d x y]
(v/vec2
(+ (Math/sin (* a y)) (Math/cos (* (* b x) x)))
(+ (Math/sin (* (* c x) x)) (Math/cos (* d y)))))
(defn dejong-svg
[{:keys [width iter a b c d color bg]}]
(let [scale (/ width 4)
center (v/vec2 (/ width 2))]
(->> (range iter)
(reduce
(fn [[points [x y]] _]
(let [pos (compute-dejong a b c d x y)]
[(conj points (svg/circle (m/madd pos scale center) 1)) pos]))
['() [(m/random width) (m/random width)]])
(first)
(svg/group
{:fill (or color "rgba(0,0,0,0.25)") :stroke "none"}
(if bg (svg/rect [0 0] width width {:fill bg})))
(svg-doc width))))
(comment
(spit "dejong.svg"
(dejong-svg
{:width 600
:iter 100000
:a (m/random -3 3)
:b (m/random -3 3)
:c (m/random -3 3)
:d (m/random -3 3)}))
)
(defn dejong-bitmap
[{:keys [width iter a b c d color bg blend]}]
(prn :coeffs a b c d)
(let [scale (/ width 4.1)
center (v/vec2 (/ width 2))
img (pix/make-image width width)
pixels (pix/get-pixels img)
[red green blue] (map #(* % 255) @color)
alpha (peek @color)
blend-fn (pix/blend-modes blend)]
(when bg
(apply pix/fill-array pixels @bg))
(reduce
(fn [[x y] _]
(let [pos (compute-dejong a b c d x y)
[x y] (m/madd pos scale center)]
(pix/blend-pixel pixels x y width red green blue alpha blend-fn)
pos))
[(m/random width) (m/random width)]
(range iter))
(pix/set-pixels img pixels)
(pix/save-png "dejong.png" img)))
(comment
(dejong-bitmap
{:width 600
:iter 1000000
:a (m/random -3 3)
:b (m/random -3 3)
:c (m/random -3 3)
:d (m/random -3 3)
:color (col/rgba 0.6 0.3 0.1 0.15)
:bg (col/rgba 0 0 0.1)
:blend :add})
(dejong-bitmap
{:width 1280
:iter 3000000
:a 1.815
:b -1.687
:c 2.551
:d 1.151
:color (col/rgba 0.7 0.2 0.25 0.15)
:bg (col/rgba 0 0 0.1)
:blend :add})
)
(defn dejong-bitmap-duo
[{:keys [width iter a b c d col1 col2 alpha bg blend]}]
(prn :coeffs a b c d)
(let [scale (/ width 4.1)
center (v/vec2 (/ width 2))
img (pix/make-image width width)
pixels (pix/get-pixels img)
[r1 g1 b1] (map #(* % 255) @col1)
[r2 g2 b2] (map #(* % 255) @col2)
blend-fn (pix/blend-modes blend)]
(when bg
(apply pix/fill-array pixels @bg))
(reduce
(fn [[x y] _]
(let [pos (compute-dejong a b c d x y)
red (m/mix* r1 r2 (+ (* x 0.25) 0.5))
green (m/mix* g1 g2 (+ (* x 0.25) 0.5))
blue (m/mix* b1 b2 (+ (* y 0.25) 0.5))
[x y] (m/madd pos scale center)]
(pix/blend-pixel pixels x y width red green blue alpha blend-fn)
pos))
[(m/random width) (m/random width)]
(range iter))
(pix/set-pixels img pixels)
(pix/save-png "dejong.png" img)))
(comment
(dejong-bitmap-duo
{:width 1280
:iter 3000000
:a 1.815
:b -1.687
:c 2.551
:d 1.151
:col1 (col/rgba 0.7 0.2 0.15)
:col2 (col/rgba 0.1 0.2 0.5)
:alpha 0.15
:bg (col/rgba 0 0 0.1)
:blend :add})
)
|
9f017da227ffb379ef80754d7753fd61d8edd9c6c0524d380dc837686b460181 | HealthSamurai/ci3 | core_test.clj | (ns ci3.core-test
(:require [clojure.test :refer :all]
[ci3.core :refer :all]))
| null | https://raw.githubusercontent.com/HealthSamurai/ci3/d14a459e677a1f2d89c5df5f09276059f6debccd/test/ci3/core_test.clj | clojure | (ns ci3.core-test
(:require [clojure.test :refer :all]
[ci3.core :refer :all]))
| |
480bd299de512dbca1c63adae08fd1f5c2b588c1bb053a909897cec15e270bc3 | scsibug/hS3 | AWSConnection.hs | -----------------------------------------------------------------------------
-- |
-- Module : Network.AWS.AWSConnection
Copyright : ( c ) 2007
-- License : BSD3
--
Connection and authentication info for an Amazon AWS request .
-----------------------------------------------------------------------------
module Network.AWS.AWSConnection (
-- * Constants
defaultAmazonS3Host, defaultAmazonS3Port,
-- * Function Types
amazonS3Connection, amazonS3ConnectionFromEnv,
-- * Data Types
AWSConnection(..)
) where
import System.Environment
| An Amazon Web Services connection . Everything needed to connect
-- and authenticate requests.
data AWSConnection =
AWSConnection { awsHost :: String, -- ^ Service provider hostname
awsPort :: Int, -- ^ Service provider port number
awsAccessKey :: String, -- ^ Access Key ID
awsSecretKey :: String -- ^ Secret Access Key
} deriving (Show)
| Hostname used for connecting to Amazon 's production S3 service ( @s3.amazonaws.com@ ) .
defaultAmazonS3Host :: String
defaultAmazonS3Host = "s3.amazonaws.com"
| Port number used for connecting to Amazon 's production S3 service ( @80@ ) .
defaultAmazonS3Port :: Int
defaultAmazonS3Port = 80
| Create an AWSConnection to Amazon from credentials . Uses the
-- production service.
amazonS3Connection :: String -- ^ Access Key ID
-> String -- ^ Secret Access Key
^ Connection to Amazon S3
amazonS3Connection = AWSConnection defaultAmazonS3Host defaultAmazonS3Port
-- | Retrieve Access and Secret keys from environment variables
AWS_ACCESS_KEY_ID and , respectively .
-- Either variable being undefined or empty will result in
-- 'Nothing'.
amazonS3ConnectionFromEnv :: IO (Maybe AWSConnection)
amazonS3ConnectionFromEnv =
do ak <- getEnvKey "AWS_ACCESS_KEY_ID"
sk0 <- getEnvKey "AWS_ACCESS_KEY_SECRET"
sk1 <- getEnvKey "AWS_SECRET_ACCESS_KEY"
return $ case (ak, sk0, sk1) of
("", _, _) -> Nothing
( _, "", "") -> Nothing
( _, "", _) -> Just (amazonS3Connection ak sk1)
( _, _, _) -> Just (amazonS3Connection ak sk0)
where getEnvKey s = fmap (maybe "" id . lookup s) getEnvironment
| null | https://raw.githubusercontent.com/scsibug/hS3/ca0c0a4f48f2343324fce94da9c30a8718db7905/Network/AWS/AWSConnection.hs | haskell | ---------------------------------------------------------------------------
|
Module : Network.AWS.AWSConnection
License : BSD3
---------------------------------------------------------------------------
* Constants
* Function Types
* Data Types
and authenticate requests.
^ Service provider hostname
^ Service provider port number
^ Access Key ID
^ Secret Access Key
production service.
^ Access Key ID
^ Secret Access Key
| Retrieve Access and Secret keys from environment variables
Either variable being undefined or empty will result in
'Nothing'. | Copyright : ( c ) 2007
Connection and authentication info for an Amazon AWS request .
module Network.AWS.AWSConnection (
defaultAmazonS3Host, defaultAmazonS3Port,
amazonS3Connection, amazonS3ConnectionFromEnv,
AWSConnection(..)
) where
import System.Environment
| An Amazon Web Services connection . Everything needed to connect
data AWSConnection =
} deriving (Show)
| Hostname used for connecting to Amazon 's production S3 service ( @s3.amazonaws.com@ ) .
defaultAmazonS3Host :: String
defaultAmazonS3Host = "s3.amazonaws.com"
| Port number used for connecting to Amazon 's production S3 service ( @80@ ) .
defaultAmazonS3Port :: Int
defaultAmazonS3Port = 80
| Create an AWSConnection to Amazon from credentials . Uses the
^ Connection to Amazon S3
amazonS3Connection = AWSConnection defaultAmazonS3Host defaultAmazonS3Port
AWS_ACCESS_KEY_ID and , respectively .
amazonS3ConnectionFromEnv :: IO (Maybe AWSConnection)
amazonS3ConnectionFromEnv =
do ak <- getEnvKey "AWS_ACCESS_KEY_ID"
sk0 <- getEnvKey "AWS_ACCESS_KEY_SECRET"
sk1 <- getEnvKey "AWS_SECRET_ACCESS_KEY"
return $ case (ak, sk0, sk1) of
("", _, _) -> Nothing
( _, "", "") -> Nothing
( _, "", _) -> Just (amazonS3Connection ak sk1)
( _, _, _) -> Just (amazonS3Connection ak sk0)
where getEnvKey s = fmap (maybe "" id . lookup s) getEnvironment
|
61f867961ea595cdabd9be940d0ad1ec2613ceb48e8f62c8cca742c960849dcb | nikita-volkov/postgresql-binary | Main.hs | module Main where
import Criterion
import Criterion.Main
import qualified PostgreSQL.Binary.Decoding as D
import qualified PostgreSQL.Binary.Encoding as E
import Prelude
main =
defaultMain
[ b "bool" D.bool ((E.encodingBytes . E.bool) True),
b "int2" (D.int :: D.Value Int16) ((E.encodingBytes . E.int2_int16) 1000),
b "int4" (D.int :: D.Value Int32) ((E.encodingBytes . E.int4_int32) 1000),
b "int8" (D.int :: D.Value Int64) ((E.encodingBytes . E.int8_int64) 1000),
b "float4" D.float4 ((E.encodingBytes . E.float4) 12.65468468),
b "float8" D.float8 ((E.encodingBytes . E.float8) 12.65468468),
b "numeric" D.numeric ((E.encodingBytes . E.numeric) (read "20.213290183")),
b "char" D.char ((E.encodingBytes . E.char_utf8) 'Я'),
b "text" D.text_strict ((E.encodingBytes . E.text_strict) "alsdjflskjдывлоаы оады"),
b "bytea" D.bytea_strict ((E.encodingBytes . E.bytea_strict) "alskdfj;dasjfl;dasjflksdj"),
b "date" D.date ((E.encodingBytes . E.date) (read "2000-01-19")),
b "time" D.time_int ((E.encodingBytes . E.time_int) (read "10:41:06")),
b "timetz" D.timetz_int ((E.encodingBytes . E.timetz_int) (read "(10:41:06, +0300)")),
b "timestamp" D.timestamp_int ((E.encodingBytes . E.timestamp_int) (read "2000-01-19 10:41:06")),
b "timestamptz" D.timestamptz_int ((E.encodingBytes . E.timestamptz_int) (read "2000-01-19 10:41:06")),
b "interval" D.interval_int ((E.encodingBytes . E.interval_int) (secondsToDiffTime 23472391128374)),
b "uuid" D.uuid ((E.encodingBytes . E.uuid) (read "550e8400-e29b-41d4-a716-446655440000")),
let encoder =
E.array 23 . E.dimensionArray foldl' (E.encodingArray . E.int4_int32)
decoder =
D.array $
D.dimensionArray replicateM $
D.valueArray $
(D.int :: D.Value Int32)
in b "array" decoder (E.encodingBytes (encoder [1, 2, 3, 4]))
]
where
b name decoder value =
bench name $ nf (D.valueParser decoder) value
| null | https://raw.githubusercontent.com/nikita-volkov/postgresql-binary/406cb9a69890eaa92f5aa69092c93e95fc1e0c89/decoding/Main.hs | haskell | module Main where
import Criterion
import Criterion.Main
import qualified PostgreSQL.Binary.Decoding as D
import qualified PostgreSQL.Binary.Encoding as E
import Prelude
main =
defaultMain
[ b "bool" D.bool ((E.encodingBytes . E.bool) True),
b "int2" (D.int :: D.Value Int16) ((E.encodingBytes . E.int2_int16) 1000),
b "int4" (D.int :: D.Value Int32) ((E.encodingBytes . E.int4_int32) 1000),
b "int8" (D.int :: D.Value Int64) ((E.encodingBytes . E.int8_int64) 1000),
b "float4" D.float4 ((E.encodingBytes . E.float4) 12.65468468),
b "float8" D.float8 ((E.encodingBytes . E.float8) 12.65468468),
b "numeric" D.numeric ((E.encodingBytes . E.numeric) (read "20.213290183")),
b "char" D.char ((E.encodingBytes . E.char_utf8) 'Я'),
b "text" D.text_strict ((E.encodingBytes . E.text_strict) "alsdjflskjдывлоаы оады"),
b "bytea" D.bytea_strict ((E.encodingBytes . E.bytea_strict) "alskdfj;dasjfl;dasjflksdj"),
b "date" D.date ((E.encodingBytes . E.date) (read "2000-01-19")),
b "time" D.time_int ((E.encodingBytes . E.time_int) (read "10:41:06")),
b "timetz" D.timetz_int ((E.encodingBytes . E.timetz_int) (read "(10:41:06, +0300)")),
b "timestamp" D.timestamp_int ((E.encodingBytes . E.timestamp_int) (read "2000-01-19 10:41:06")),
b "timestamptz" D.timestamptz_int ((E.encodingBytes . E.timestamptz_int) (read "2000-01-19 10:41:06")),
b "interval" D.interval_int ((E.encodingBytes . E.interval_int) (secondsToDiffTime 23472391128374)),
b "uuid" D.uuid ((E.encodingBytes . E.uuid) (read "550e8400-e29b-41d4-a716-446655440000")),
let encoder =
E.array 23 . E.dimensionArray foldl' (E.encodingArray . E.int4_int32)
decoder =
D.array $
D.dimensionArray replicateM $
D.valueArray $
(D.int :: D.Value Int32)
in b "array" decoder (E.encodingBytes (encoder [1, 2, 3, 4]))
]
where
b name decoder value =
bench name $ nf (D.valueParser decoder) value
| |
ceb95315c7945a0a4983c3fbf538822d5f786f4ec8ca1c454819f92698a79817 | ewestern/geos | STRTreeSpec.hs | module Data.Geometry.Geos.STRTreeSpec where
import Data.Either
import qualified Data.Vector as V
import qualified Data.Geometry.Geos.STRTree as STR
import Data.Geometry.Geos.Geometry
import Test.Hspec
strSpec = describe "STRTree" $ do
describe "fromList" $ do
it "should correctly build an STRTree" $ do
let points = fmap (flip PointGeometry Nothing . point . uncurry coordinate2)
[ (0.1, 0.2)
, (0.2, 0.3)
, (0.3, 0.4)
]
mapping :: [(Geometry Point, Int)]
mapping = zip points [1..]
strtree = STR.fromList mapping
env = V.fromList $ fmap (uncurry coordinate2)
[ (0, 0)
, (1, 0)
, (1, 1)
, (0, 1)
, (0, 0)
]
eitherQuery = linearRing env >>= polygon . V.singleton
query = PolygonGeometry (fromRight (error "shouldn't happen") eitherQuery) Nothing
results = STR.lookup query strtree
results `shouldBe` (V.fromList [1..3])
describe "unfoldr" $ do
it "should correctly build an STRTree" $ do
let buildFunc i =
if i < 1
then Just ((flip PointGeometry Nothing . point $ coordinate2 i i, 100 + i), i+0.1 )
else Nothing
init :: Double
init = 0.1
strtree = STR.unfoldr buildFunc init
env = V.fromList $ fmap (uncurry coordinate2)
[ (0, 0)
, (1, 0)
, (1, 1)
, (0, 1)
, (0, 0)
]
eitherQuery = linearRing env >>= polygon . V.singleton
query = PolygonGeometry (fromRight (error "shouldn't happen") eitherQuery) Nothing
results = STR.lookup query strtree
V.length results `shouldBe` 10
| null | https://raw.githubusercontent.com/ewestern/geos/3568c3449efe180bd89959c9247d4667137662b6/tests/Data/Geometry/Geos/STRTreeSpec.hs | haskell | module Data.Geometry.Geos.STRTreeSpec where
import Data.Either
import qualified Data.Vector as V
import qualified Data.Geometry.Geos.STRTree as STR
import Data.Geometry.Geos.Geometry
import Test.Hspec
strSpec = describe "STRTree" $ do
describe "fromList" $ do
it "should correctly build an STRTree" $ do
let points = fmap (flip PointGeometry Nothing . point . uncurry coordinate2)
[ (0.1, 0.2)
, (0.2, 0.3)
, (0.3, 0.4)
]
mapping :: [(Geometry Point, Int)]
mapping = zip points [1..]
strtree = STR.fromList mapping
env = V.fromList $ fmap (uncurry coordinate2)
[ (0, 0)
, (1, 0)
, (1, 1)
, (0, 1)
, (0, 0)
]
eitherQuery = linearRing env >>= polygon . V.singleton
query = PolygonGeometry (fromRight (error "shouldn't happen") eitherQuery) Nothing
results = STR.lookup query strtree
results `shouldBe` (V.fromList [1..3])
describe "unfoldr" $ do
it "should correctly build an STRTree" $ do
let buildFunc i =
if i < 1
then Just ((flip PointGeometry Nothing . point $ coordinate2 i i, 100 + i), i+0.1 )
else Nothing
init :: Double
init = 0.1
strtree = STR.unfoldr buildFunc init
env = V.fromList $ fmap (uncurry coordinate2)
[ (0, 0)
, (1, 0)
, (1, 1)
, (0, 1)
, (0, 0)
]
eitherQuery = linearRing env >>= polygon . V.singleton
query = PolygonGeometry (fromRight (error "shouldn't happen") eitherQuery) Nothing
results = STR.lookup query strtree
V.length results `shouldBe` 10
| |
181e54d720698b9b42c3e96fd354c0af722f047c455603a7cda8e3c6cf1d0047 | junjihashimoto/hugs-js | sort.hs | import Data.List(sort)
import Control.Monad(forM_)
main = do
str <- readFile "sort.hs"
let lstr = sort $ lines str
forM_ lstr putStrLn
| null | https://raw.githubusercontent.com/junjihashimoto/hugs-js/5a38dbe8310b5d56746ec83c24f7a9f520fbdcd3/npm-sample/sort.hs | haskell | import Data.List(sort)
import Control.Monad(forM_)
main = do
str <- readFile "sort.hs"
let lstr = sort $ lines str
forM_ lstr putStrLn
| |
d42b1b19c3bd49bd32999ab01ece8fe952eeade16948f119fbda648a5afc6a1a | Clojure2D/clojure2d-examples | M_4_1_01.clj | (ns GG.M.M-4-1-01
(:require [clojure2d.core :refer :all]
[fastmath.vector :as v]
[fastmath.random :as r]
[fastmath.core :as m]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(def ^:const w 600)
(def ^:const h 600)
(def ^:const min-x 5)
(def ^:const min-y 5)
(def ^:const max-x (- w 5))
(def ^:const max-y (- h 5))
(defrecord Node [pos velocity damping])
(defn update-node
""
[{:keys [pos velocity ^double damping]}]
(let [npos (v/add pos velocity)
^double x (npos 0)
^double y (npos 1)
vx (if (or (< x min-x)
(> x max-x))
(- ^double (velocity 0))
(velocity 0))
vy (if (or (< y min-y)
(> y max-y))
(- ^double (velocity 1))
(velocity 1))]
(Node. npos (v/mult (v/vec2 vx vy) (- 1.0 damping)) damping)))
(defn random-velocity
""
[n]
(assoc n :velocity (v/vec2 (r/drand -5 5) (r/drand -5 5))))
(defn generate-nodes
""
[cnt]
(for [i (range cnt)]
(Node. (v/vec2 (r/drand w) (r/drand h))
(v/vec2 (r/drand -3 3) (r/drand -3 3))
0.01)))
(defn draw
""
[canvas window _ nodes]
(let [nodes (if (and (key-pressed? window)
(= (key-char window) \s))
(map random-velocity nodes)
nodes)]
(-> canvas
(set-color :white 10)
(rect 0 0 w h)
(set-color :black))
(doseq [^Node node nodes
:let [[x y] (.pos node)]]
(ellipse canvas x y 10 10))
(map update-node nodes)))
(def window (show-window {:canvas (canvas w h)
:draw-fn draw
:setup (fn [canvas _]
(set-background canvas :white)
(generate-nodes 20))}))
| null | https://raw.githubusercontent.com/Clojure2D/clojure2d-examples/9de82f5ac0737b7e78e07a17cf03ac577d973817/src/GG/M/M_4_1_01.clj | clojure | (ns GG.M.M-4-1-01
(:require [clojure2d.core :refer :all]
[fastmath.vector :as v]
[fastmath.random :as r]
[fastmath.core :as m]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(def ^:const w 600)
(def ^:const h 600)
(def ^:const min-x 5)
(def ^:const min-y 5)
(def ^:const max-x (- w 5))
(def ^:const max-y (- h 5))
(defrecord Node [pos velocity damping])
(defn update-node
""
[{:keys [pos velocity ^double damping]}]
(let [npos (v/add pos velocity)
^double x (npos 0)
^double y (npos 1)
vx (if (or (< x min-x)
(> x max-x))
(- ^double (velocity 0))
(velocity 0))
vy (if (or (< y min-y)
(> y max-y))
(- ^double (velocity 1))
(velocity 1))]
(Node. npos (v/mult (v/vec2 vx vy) (- 1.0 damping)) damping)))
(defn random-velocity
""
[n]
(assoc n :velocity (v/vec2 (r/drand -5 5) (r/drand -5 5))))
(defn generate-nodes
""
[cnt]
(for [i (range cnt)]
(Node. (v/vec2 (r/drand w) (r/drand h))
(v/vec2 (r/drand -3 3) (r/drand -3 3))
0.01)))
(defn draw
""
[canvas window _ nodes]
(let [nodes (if (and (key-pressed? window)
(= (key-char window) \s))
(map random-velocity nodes)
nodes)]
(-> canvas
(set-color :white 10)
(rect 0 0 w h)
(set-color :black))
(doseq [^Node node nodes
:let [[x y] (.pos node)]]
(ellipse canvas x y 10 10))
(map update-node nodes)))
(def window (show-window {:canvas (canvas w h)
:draw-fn draw
:setup (fn [canvas _]
(set-background canvas :white)
(generate-nodes 20))}))
| |
29fad0e284b1c3f64abebefc774901bc69604f5294a1f0356333582c604b3879 | ghc/ghc | GhcInGhci.hs | module Settings.Flavours.GhcInGhci (ghcInGhciFlavour) where
import qualified Data.Set as Set
import Expression
import Flavour
import Oracles.Flag
import {-# SOURCE #-} Settings.Default
-- Please update doc/flavours.md when changing this file.
ghcInGhciFlavour :: Flavour
ghcInGhciFlavour = defaultFlavour
{ name = "ghc-in-ghci"
, args = defaultBuilderArgs <> ghciArgs <> defaultPackageArgs
We ca n't build DLLs on Windows ( yet ) . Actually we should only
include the dynamic way when we have a dynamic host GHC , but just
checking for Windows seems simpler for now .
, libraryWays = pure (Set.fromList [vanilla]) <> pure (Set.fromList [ dynamic | not windowsHost ])
, rtsWays = pure (Set.fromList [vanilla]) <> (targetSupportsThreadedRts ? pure (Set.fromList [threaded])) <> pure (Set.fromList [ dynamic | not windowsHost ])
, dynamicGhcPrograms = return False }
ghciArgs :: Args
ghciArgs = sourceArgs SourceArgs
{ hsDefault = mconcat $
[ pure ["-O0", "-H64m"]
]
, hsLibrary = mempty
, hsCompiler = mempty
, hsGhc = mempty }
| null | https://raw.githubusercontent.com/ghc/ghc/37cfe3c0f4fb16189bbe3bb735f758cd6e3d9157/hadrian/src/Settings/Flavours/GhcInGhci.hs | haskell | # SOURCE #
Please update doc/flavours.md when changing this file. | module Settings.Flavours.GhcInGhci (ghcInGhciFlavour) where
import qualified Data.Set as Set
import Expression
import Flavour
import Oracles.Flag
ghcInGhciFlavour :: Flavour
ghcInGhciFlavour = defaultFlavour
{ name = "ghc-in-ghci"
, args = defaultBuilderArgs <> ghciArgs <> defaultPackageArgs
We ca n't build DLLs on Windows ( yet ) . Actually we should only
include the dynamic way when we have a dynamic host GHC , but just
checking for Windows seems simpler for now .
, libraryWays = pure (Set.fromList [vanilla]) <> pure (Set.fromList [ dynamic | not windowsHost ])
, rtsWays = pure (Set.fromList [vanilla]) <> (targetSupportsThreadedRts ? pure (Set.fromList [threaded])) <> pure (Set.fromList [ dynamic | not windowsHost ])
, dynamicGhcPrograms = return False }
ghciArgs :: Args
ghciArgs = sourceArgs SourceArgs
{ hsDefault = mconcat $
[ pure ["-O0", "-H64m"]
]
, hsLibrary = mempty
, hsCompiler = mempty
, hsGhc = mempty }
|
cb69f855f917c77ce91ef4860ab4ae554a93189dabb36cf52623451f09a99853 | MagnusS/okra | test_filter.ml |
* Copyright ( c ) 2021 < >
* Copyright ( c ) 2021 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2021 Magnus Skjegstad <>
* Copyright (c) 2021 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Okra
let p1 = "Project 1"
let p2 = "Project 2"
let o1 = "O1"
let o2 = "O2"
let t1 = "title1"
let t2 = "title2"
let t3 = "title3"
let e1 = "foo"
let e2 = "bar"
let e3 = "john"
let te1 = [ [ (e1, 1.) ]; [ (e1, 2.); (e2, 2.) ] ]
let te2 = [ [ (e1, 10.) ] ]
let te3 = [ [ (e2, 10.) ]; [ (e3, 5.) ] ]
let id2 = "Id2"
let id3 = "ID3"
let kr1 =
KR.v ~project:p1 ~objective:o1 ~title:t1 ~id:KR.New_KR ~time_entries:te1 []
let kr2 =
KR.v ~project:p2 ~objective:o2 ~title:t2 ~id:(ID id2) ~time_entries:te2 []
let kr3 =
KR.v ~project:p2 ~objective:o2 ~title:t3 ~id:(ID id3) ~time_entries:te3 []
let report () = Okra.Report.of_krs [ kr1; kr2; kr3 ]
let filter ?include_projects ?exclude_projects ?include_objectives
?exclude_objectives ?include_krs ?exclude_krs ?include_engineers
?exclude_engineers t =
let f =
Okra.Report.Filter.v ?include_projects ?exclude_projects ?include_objectives
?exclude_objectives ?include_krs ?exclude_krs ?include_engineers
?exclude_engineers ()
in
Okra.Report.filter f t
let test_include_projects () =
let t = report () in
let t1 = filter t ~include_projects:[ p1 ] in
Alcotest.(check int) "include project 1" 1 (List.length (Report.all_krs t1));
let t2 = filter t ~include_projects:[ p1; p2 ] in
Alcotest.(check int)
"include projects 1,2" 3
(List.length (Report.all_krs t2))
let test_exclude_projects () =
let t = report () in
let t1 = filter t ~exclude_projects:[ p1 ] in
Alcotest.(check int) "exclude project 1" 2 (List.length (Report.all_krs t1));
let t1 = filter t ~include_projects:[ p2 ] ~exclude_projects:[ p1 ] in
Alcotest.(check int) "exclude project 1" 2 (List.length (Report.all_krs t1));
let t2 = filter t ~exclude_projects:[ p1; p2 ] in
Alcotest.(check int)
"exclude projects 1,2" 0
(List.length (Report.all_krs t2))
let test_include_objectives () =
let t = report () in
let t1 = filter t ~include_objectives:[ o1 ] in
Alcotest.(check int) "include objective 1" 1 (List.length (Report.all_krs t1));
let t2 = filter t ~include_objectives:[ o1; o2 ] in
Alcotest.(check int)
"include objectives 1,2" 3
(List.length (Report.all_krs t2))
let test_exclude_objectives () =
let t = report () in
let t1 = filter t ~exclude_objectives:[ o1 ] in
Alcotest.(check int) "exclude objective 1" 2 (List.length (Report.all_krs t1));
let t1 = filter t ~include_objectives:[ o2 ] ~exclude_objectives:[ o1 ] in
Alcotest.(check int) "exclude objective 1" 2 (List.length (Report.all_krs t1));
let t2 = filter t ~exclude_projects:[ p1; p2 ] in
Alcotest.(check int)
"exclude objectives 1,2" 0
(List.length (Report.all_krs t2))
let test_include_krs () =
let t = report () in
let t1 = filter t ~include_krs:[ ID id2 ] in
Alcotest.(check int) "include KRs 2" 1 (List.length (Report.all_krs t1));
let t1' = filter t ~include_krs:[ ID (String.uppercase_ascii id2) ] in
Alcotest.(check int) "include KRs 2" 1 (List.length (Report.all_krs t1'));
let t2 = filter t ~include_krs:[ ID id2; ID id3 ] in
Alcotest.(check int) "include KRs 2,3" 2 (List.length (Report.all_krs t2));
let t3 = filter t ~include_krs:[ New_KR ] in
Alcotest.(check int) "include New KRs" 1 (List.length (Report.all_krs t3))
let test_exclude_krs () =
let t = report () in
let t1 = filter t ~exclude_krs:[ ID id2 ] in
Alcotest.(check int) "exclude KRs 2" 2 (List.length (Report.all_krs t1));
let t1 = filter t ~include_krs:[ New_KR; ID id3 ] ~exclude_krs:[ ID id2 ] in
Alcotest.(check int) "exclude KRs 2" 2 (List.length (Report.all_krs t1));
let t2 = filter t ~exclude_krs:[ ID id2; ID id3 ] in
Alcotest.(check int) "exclude KRs 2,3" 1 (List.length (Report.all_krs t2))
let test_include_engineers () =
let t = report () in
let t1 = filter t ~include_engineers:[ e1 ] in
Alcotest.(check int) "include foo" 2 (List.length (Report.all_krs t1));
let t2 = filter t ~include_engineers:[ e1; e2 ] in
Alcotest.(check int) "include foo,bar" 3 (List.length (Report.all_krs t2))
let get_kr t =
match Report.all_krs t with
| [] -> Alcotest.fail "invalide filter: empty result"
| [ x ] -> x
| _ -> Alcotest.fail "invalid filter: too many results"
let test_exclude_engineers () =
let t = report () in
let t1 = filter t ~exclude_engineers:[ e1 ] in
Alcotest.(check int) "exclude foo" 1 (List.length (Report.all_krs t1));
let t2 = filter t ~exclude_engineers:[ e1; e2 ] in
Alcotest.(check int) "exclude foo,bar" 0 (List.length (Report.all_krs t2));
check that counter do not change if the KR filter return total KRs .
let t1 = filter t ~include_krs:[ ID id2 ] ~include_engineers:[ e1 ] in
let kr = get_kr t1 in
Alcotest.(check int) "check counter" kr2.KR.counter kr.KR.counter;
let t2 = filter t ~include_krs:[ ID id3 ] ~include_engineers:[ e2 ] in
let kr = get_kr t2 in
Alcotest.(check (list (list (pair string (float 0.)))))
"check time entries"
[ [ (e2, 10.) ] ]
kr.KR.time_entries
let tests =
[
("include projects", `Quick, test_include_projects);
("exclude projects", `Quick, test_exclude_projects);
("include objectives", `Quick, test_include_objectives);
("exclude objectives", `Quick, test_exclude_objectives);
("include KRs", `Quick, test_include_krs);
("exclude KRs", `Quick, test_exclude_krs);
("include engineers", `Quick, test_include_engineers);
("exclude engineers", `Quick, test_exclude_engineers);
]
| null | https://raw.githubusercontent.com/MagnusS/okra/13f473baae1e8490887bde156f7bb238ef7c645e/test/test_filter.ml | ocaml |
* Copyright ( c ) 2021 < >
* Copyright ( c ) 2021 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2021 Magnus Skjegstad <>
* Copyright (c) 2021 Thomas Gazagnaire <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Okra
let p1 = "Project 1"
let p2 = "Project 2"
let o1 = "O1"
let o2 = "O2"
let t1 = "title1"
let t2 = "title2"
let t3 = "title3"
let e1 = "foo"
let e2 = "bar"
let e3 = "john"
let te1 = [ [ (e1, 1.) ]; [ (e1, 2.); (e2, 2.) ] ]
let te2 = [ [ (e1, 10.) ] ]
let te3 = [ [ (e2, 10.) ]; [ (e3, 5.) ] ]
let id2 = "Id2"
let id3 = "ID3"
let kr1 =
KR.v ~project:p1 ~objective:o1 ~title:t1 ~id:KR.New_KR ~time_entries:te1 []
let kr2 =
KR.v ~project:p2 ~objective:o2 ~title:t2 ~id:(ID id2) ~time_entries:te2 []
let kr3 =
KR.v ~project:p2 ~objective:o2 ~title:t3 ~id:(ID id3) ~time_entries:te3 []
let report () = Okra.Report.of_krs [ kr1; kr2; kr3 ]
let filter ?include_projects ?exclude_projects ?include_objectives
?exclude_objectives ?include_krs ?exclude_krs ?include_engineers
?exclude_engineers t =
let f =
Okra.Report.Filter.v ?include_projects ?exclude_projects ?include_objectives
?exclude_objectives ?include_krs ?exclude_krs ?include_engineers
?exclude_engineers ()
in
Okra.Report.filter f t
let test_include_projects () =
let t = report () in
let t1 = filter t ~include_projects:[ p1 ] in
Alcotest.(check int) "include project 1" 1 (List.length (Report.all_krs t1));
let t2 = filter t ~include_projects:[ p1; p2 ] in
Alcotest.(check int)
"include projects 1,2" 3
(List.length (Report.all_krs t2))
let test_exclude_projects () =
let t = report () in
let t1 = filter t ~exclude_projects:[ p1 ] in
Alcotest.(check int) "exclude project 1" 2 (List.length (Report.all_krs t1));
let t1 = filter t ~include_projects:[ p2 ] ~exclude_projects:[ p1 ] in
Alcotest.(check int) "exclude project 1" 2 (List.length (Report.all_krs t1));
let t2 = filter t ~exclude_projects:[ p1; p2 ] in
Alcotest.(check int)
"exclude projects 1,2" 0
(List.length (Report.all_krs t2))
let test_include_objectives () =
let t = report () in
let t1 = filter t ~include_objectives:[ o1 ] in
Alcotest.(check int) "include objective 1" 1 (List.length (Report.all_krs t1));
let t2 = filter t ~include_objectives:[ o1; o2 ] in
Alcotest.(check int)
"include objectives 1,2" 3
(List.length (Report.all_krs t2))
let test_exclude_objectives () =
let t = report () in
let t1 = filter t ~exclude_objectives:[ o1 ] in
Alcotest.(check int) "exclude objective 1" 2 (List.length (Report.all_krs t1));
let t1 = filter t ~include_objectives:[ o2 ] ~exclude_objectives:[ o1 ] in
Alcotest.(check int) "exclude objective 1" 2 (List.length (Report.all_krs t1));
let t2 = filter t ~exclude_projects:[ p1; p2 ] in
Alcotest.(check int)
"exclude objectives 1,2" 0
(List.length (Report.all_krs t2))
let test_include_krs () =
let t = report () in
let t1 = filter t ~include_krs:[ ID id2 ] in
Alcotest.(check int) "include KRs 2" 1 (List.length (Report.all_krs t1));
let t1' = filter t ~include_krs:[ ID (String.uppercase_ascii id2) ] in
Alcotest.(check int) "include KRs 2" 1 (List.length (Report.all_krs t1'));
let t2 = filter t ~include_krs:[ ID id2; ID id3 ] in
Alcotest.(check int) "include KRs 2,3" 2 (List.length (Report.all_krs t2));
let t3 = filter t ~include_krs:[ New_KR ] in
Alcotest.(check int) "include New KRs" 1 (List.length (Report.all_krs t3))
let test_exclude_krs () =
let t = report () in
let t1 = filter t ~exclude_krs:[ ID id2 ] in
Alcotest.(check int) "exclude KRs 2" 2 (List.length (Report.all_krs t1));
let t1 = filter t ~include_krs:[ New_KR; ID id3 ] ~exclude_krs:[ ID id2 ] in
Alcotest.(check int) "exclude KRs 2" 2 (List.length (Report.all_krs t1));
let t2 = filter t ~exclude_krs:[ ID id2; ID id3 ] in
Alcotest.(check int) "exclude KRs 2,3" 1 (List.length (Report.all_krs t2))
let test_include_engineers () =
let t = report () in
let t1 = filter t ~include_engineers:[ e1 ] in
Alcotest.(check int) "include foo" 2 (List.length (Report.all_krs t1));
let t2 = filter t ~include_engineers:[ e1; e2 ] in
Alcotest.(check int) "include foo,bar" 3 (List.length (Report.all_krs t2))
let get_kr t =
match Report.all_krs t with
| [] -> Alcotest.fail "invalide filter: empty result"
| [ x ] -> x
| _ -> Alcotest.fail "invalid filter: too many results"
let test_exclude_engineers () =
let t = report () in
let t1 = filter t ~exclude_engineers:[ e1 ] in
Alcotest.(check int) "exclude foo" 1 (List.length (Report.all_krs t1));
let t2 = filter t ~exclude_engineers:[ e1; e2 ] in
Alcotest.(check int) "exclude foo,bar" 0 (List.length (Report.all_krs t2));
check that counter do not change if the KR filter return total KRs .
let t1 = filter t ~include_krs:[ ID id2 ] ~include_engineers:[ e1 ] in
let kr = get_kr t1 in
Alcotest.(check int) "check counter" kr2.KR.counter kr.KR.counter;
let t2 = filter t ~include_krs:[ ID id3 ] ~include_engineers:[ e2 ] in
let kr = get_kr t2 in
Alcotest.(check (list (list (pair string (float 0.)))))
"check time entries"
[ [ (e2, 10.) ] ]
kr.KR.time_entries
let tests =
[
("include projects", `Quick, test_include_projects);
("exclude projects", `Quick, test_exclude_projects);
("include objectives", `Quick, test_include_objectives);
("exclude objectives", `Quick, test_exclude_objectives);
("include KRs", `Quick, test_include_krs);
("exclude KRs", `Quick, test_exclude_krs);
("include engineers", `Quick, test_include_engineers);
("exclude engineers", `Quick, test_exclude_engineers);
]
| |
471b89c8c6b91ada424a077f62b44fec78ece9c7ea1be68d562dc4f727c2aec7 | slagyr/gaeshi | version.clj | (ns gaeshi.kuzushi.version
(:require
[clojure.string :as str]))
(def major 0)
(def minor 10)
(def tiny 0)
(def snapshot false)
(def string
(str
(str/join "." (filter identity [major minor tiny]))
(if snapshot "-SNAPSHOT" "")))
(def summary (str "gaeshi/lein-gaeshi " string))
(def gaeshi-version "0.10.0")
(def gaeshi-dev-version "0.10.0")
| null | https://raw.githubusercontent.com/slagyr/gaeshi/a5677ed1c8d9269d412f07a7ab33bbc40aa7011a/lein-gaeshi/src/gaeshi/kuzushi/version.clj | clojure | (ns gaeshi.kuzushi.version
(:require
[clojure.string :as str]))
(def major 0)
(def minor 10)
(def tiny 0)
(def snapshot false)
(def string
(str
(str/join "." (filter identity [major minor tiny]))
(if snapshot "-SNAPSHOT" "")))
(def summary (str "gaeshi/lein-gaeshi " string))
(def gaeshi-version "0.10.0")
(def gaeshi-dev-version "0.10.0")
| |
477182fbed052f18d4ba004a734ed562f3425f1a9c8ce866970bcf919163ac15 | Octachron/codept | NAME__a.ml | open Main
open B
open C
open M
| null | https://raw.githubusercontent.com/Octachron/codept/2d2a95fde3f67cdd0f5a1b68d8b8b47aefef9290/tests/complex/namespaced/NAME__a.ml | ocaml | open Main
open B
open C
open M
| |
4acebfccaf56d5983fb21e5624a76cdd5d8ffd72b81ad26c5e7d85e0503ab78b | radian-software/kalyn | Primitives.hs | module Primitives where
import Assembly
import OS
import Subroutines
-- -syscall-table/
see also section 2 of the Linux man pages
basicOp :: String -> BinOp -> Stateful VirtualFunction
basicOp name op = do
temp <- newTemp
return $ function
(name ++ "__uncurried")
[ OP MOV $ MR (getArg 2) temp
, OP op $ MR (getArg 1) temp
, OP MOV $ RR temp rax
, RET
]
plus :: Stateful VirtualFunction
plus = basicOp "plus" ADD
minus :: Stateful VirtualFunction
minus = basicOp "minus" SUB
times :: Stateful VirtualFunction
times = basicOp "times" IMUL
divOp :: String -> [VirtualInstruction] -> Stateful VirtualFunction
divOp name post = do
temp <- newTemp
return
$ function (name ++ "__uncurried")
$ [OP MOV $ MR (getArg 2) rax, CQTO, OP MOV $ MR (getArg 1) temp, IDIV temp]
++ post
++ [RET]
divide :: Stateful VirtualFunction
divide = divOp "divide" []
modulo :: Stateful VirtualFunction
modulo = divOp "modulo" [OP MOV $ RR rdx rax]
bitAnd :: Stateful VirtualFunction
bitAnd = basicOp "and" AND
bitOr :: Stateful VirtualFunction
bitOr = basicOp "or" OR
xor :: Stateful VirtualFunction
xor = basicOp "xor" XOR
bitNot :: Stateful VirtualFunction
bitNot = do
temp <- newTemp
return $ function
"not"
[OP MOV $ MR (getArg 1) temp, UN NOT $ R temp, OP MOV $ RR temp rax]
shiftOp :: String -> Shift -> Stateful VirtualFunction
shiftOp name op = do
arg <- newTemp
fixup <- newLabel
fixupDone <- newLabel
let needsZeroing = case op of
SHL -> True
SAL -> True
SHR -> True
SAR -> False
return $ function
(name ++ "__uncurried")
[ OP MOV $ MR (getArg 2) arg
, OP MOV $ MR (getArg 1) rcx
, OP CMP $ IR 64 rcx
, JUMP JGE fixup
, SHIFT Nothing op arg
, LABEL fixupDone
, OP MOV $ RR arg rax
, RET
, LABEL fixup
, if needsZeroing then OP MOV $ IR 0 arg else SHIFT (Just 63) op arg
, OP MOV $ RR arg rax
, RET
]
shl :: Stateful VirtualFunction
shl = shiftOp "shl" SHL
shr :: Stateful VirtualFunction
shr = shiftOp "shr" SHR
sal :: Stateful VirtualFunction
sal = shiftOp "sal" SAL
sar :: Stateful VirtualFunction
sar = shiftOp "sar" SAR
monadPrint :: Stateful VirtualFunction
monadPrint = do
temp <- newTemp
str <- newTemp
return $ function
"print__uncurried__unmonadified"
[ OP MOV $ MR (getArg 1) temp
, UN PUSH $ R temp
, JUMP CALL "packString"
, OP MOV $ RR rax str
, unpush 1
, OP MOV $ IR 1 rax
, OP MOV $ IR 1 rdi
, LEA (Mem (Right 8) str Nothing) rsi
, OP MOV $ MR (deref str) rdx
, SYSCALL 3 -- write
, OP MOV $ IR 0 rax
, RET
]
monadWriteFile :: Stateful VirtualFunction
monadWriteFile = do
temp <- newTemp
filename <- newTemp
contents <- newTemp
fd <- newTemp
ptr <- newTemp
bytesLeft <- newTemp
notExists <- newLabel
writeStart <- newLabel
writeDone <- newLabel
crash <- newLabel
msg <- newTemp
return $ function
"writeFile__uncurried__unmonadified"
[ OP MOV $ MR (getArg 2) temp
, UN PUSH $ R temp
, JUMP CALL "packString"
, unpush 1
, OP MOV $ RR rax filename
, OP MOV $ MR (getArg 1) temp
, UN PUSH $ R temp
, JUMP CALL "packString"
, unpush 1
, OP MOV $ RR rax contents
, OP MOV $ IR 87 rax
, LEA (getField 1 filename) rdi
, SYSCALL 1 -- unlink
, OP CMP $ IR (-2) rax
, JUMP JE notExists
, OP CMP $ IR 0 rax
, JUMP JL crash
, LABEL notExists
, OP MOV $ IR 2 rax
, LEA (getField 1 filename) rdi
, OP MOV $ IR 0x41 rsi
, OP MOV $ IR 0o666 rdx
, SYSCALL 3 -- open
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP MOV $ RR rax fd
, LEA (getField 1 contents) ptr
, OP MOV $ MR (getField 0 contents) bytesLeft
, LABEL writeStart
, OP CMP $ IR 0 bytesLeft
, JUMP JLE writeDone
, OP MOV $ IR 1 rax
, OP MOV $ RR fd rdi
, OP MOV $ RR ptr rsi
, OP MOV $ RR bytesLeft rdx
, SYSCALL 3 -- write
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP ADD $ RR rax ptr
, OP SUB $ RR rax bytesLeft
, JUMP JMP writeStart
, LABEL writeDone
, OP MOV $ IR 3 rax
, OP MOV $ RR fd rdi
, SYSCALL 1 -- close
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP MOV $ IR 0 rax
, RET
, LABEL crash
, LEA (memLabel "msgWriteFileFailed") msg
, UN PUSH $ R msg
, JUMP CALL "crash"
]
setFileMode :: Stateful VirtualFunction
setFileMode = do
temp <- newTemp
filename <- newTemp
crash <- newLabel
msg <- newTemp
return $ function
"setFileMode__uncurried__unmonadified"
[ OP MOV $ MR (getArg 2) temp
, UN PUSH $ R temp
, JUMP CALL "packString"
, unpush 1
, OP MOV $ RR rax filename
, OP MOV $ IR 90 rax
, LEA (getField 1 filename) rdi
, OP MOV $ MR (getArg 1) rsi
, SYSCALL 2 -- chmod
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP MOV $ IR 0 rax
, RET
, LABEL crash
, LEA (memLabel "msgSetFileModeFailed") msg
, UN PUSH $ R msg
, JUMP CALL "crash"
]
monadGetWorkingDirectory :: Stateful VirtualFunction
monadGetWorkingDirectory = do
buf <- newTemp
msg <- newTemp
crash <- newLabel
return $ function
"getWorkingDirectory__unmonadified"
[ OP MOV $ IR 79 rax
, LEA (memLabel "syscallBuffer") buf
, OP MOV $ RR buf rdi
, OP MOV $ IR (fromIntegral syscallBufferSize) rsi
, SYSCALL 2 -- getcwd
, UN PUSH $ R buf
, UN DEC $ R rax
, UN PUSH $ R rax
, JUMP CALL "unpackString"
, unpush 2
, RET
, LABEL crash
, LEA (memLabel "msgGetWorkingDirectoryFailed") msg
, UN PUSH $ R msg
, JUMP CALL "crash"
]
monadReadFile :: Stateful VirtualFunction
monadReadFile = do
buffer <- newTemp
filename <- newTemp
fd <- newTemp
strStart <- newTemp
strEnd <- newTemp
bytesRead <- newTemp
msg <- newTemp
newString <- newTemp
allocedLength <- newTemp
readStart <- newLabel
readDone <- newLabel
skipStartUpdate <- newLabel
skipEndUpdate <- newLabel
crash <- newLabel
return $ function
"readFile__uncurried__unmonadified"
[ LEA (memLabel "syscallBuffer") buffer
, UN PUSH $ M (getArg 1)
, JUMP CALL "packString"
, unpush 1
, OP MOV $ RR rax filename
, OP MOV $ IR 2 rax
, LEA (getField 1 filename) rdi
, OP MOV $ IR 0 rsi
, SYSCALL 2 -- open
, OP MOV $ RR rax fd
, OP MOV $ IR 0 strStart
, OP MOV $ IR 0 strEnd
, LABEL readStart
, OP MOV $ IR 0 rax
, OP MOV $ RR fd rdi
, OP MOV $ RR buffer rsi
, OP MOV $ IR (fromIntegral syscallBufferSize) rdx
, SYSCALL 3 -- read
, OP MOV $ RR rax bytesRead
, OP CMP $ IR 0 bytesRead
, JUMP JE readDone
, JUMP JL crash
, UN PUSH $ R buffer
, UN PUSH $ R bytesRead
, JUMP CALL "unpackString"
, unpush 2
, OP MOV $ RR rax newString
, OP CMP $ IR 0 strStart
, JUMP JNE skipStartUpdate
, OP MOV $ RR newString strStart
, JUMP JMP skipEndUpdate
, LABEL skipStartUpdate
, OP MOV $ RM newString (deref strEnd)
, LABEL skipEndUpdate
, OP MOV $ RR bytesRead allocedLength
, OP IMUL $ IR 24 allocedLength
, LEA (Mem (Right $ -8) newString (Just (Scale1, allocedLength))) strEnd
, JUMP JMP readStart
, LABEL readDone
, OP MOV $ IR 3 rax
, OP MOV $ RR fd rdi
, SYSCALL 1 -- close
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP MOV $ RR strStart rax
, RET
, LABEL crash
, LEA (memLabel "msgReadFileFailed") msg
, UN PUSH $ R msg
, JUMP CALL "crash"
]
primitiveError :: Stateful VirtualFunction
primitiveError = return $ function
"error__uncurried"
[ UN PUSH $ M (getArg 1)
, JUMP CALL "packString"
, unpush 1
, UN PUSH $ R rax
, JUMP CALL "crash"
]
compareOp :: String -> Jump -> Stateful VirtualFunction
compareOp name op = do
temp <- newTemp
yes <- newLabel
return $ function
name
[ OP MOV $ MR (getArg 2) temp
, OP CMP $ MR (getArg 1) temp
, JUMP op yes
, OP MOV $ IR 0 rax
, RET
, LABEL yes
, OP MOV $ IR 1 rax
, RET
]
equal :: Stateful VirtualFunction
equal = compareOp "equal__uncurried" JE
notEqual :: Stateful VirtualFunction
notEqual = compareOp "notEqual__uncurried" JNE
lessThan :: Stateful VirtualFunction
lessThan = compareOp "lessThan__uncurried" JL
lessThanEqual :: Stateful VirtualFunction
lessThanEqual = compareOp "lessThanEqual__uncurried" JLE
greaterThan :: Stateful VirtualFunction
greaterThan = compareOp "greaterThan__uncurried" JG
greaterThanEqual :: Stateful VirtualFunction
greaterThanEqual = compareOp "greaterThanEqual__uncurried" JGE
monadReturn :: Stateful VirtualFunction
monadReturn = return $ function "return__uncurried__unmonadified"
[OP MOV $ MR (getArg 1) rax, RET]
monadBind :: Stateful VirtualFunction
monadBind = do
firstMonad <- newTemp
secondMonad <- newTemp
arg <- newTemp
fn <- newTemp
firstCallCode <- translateCall firstMonad Nothing
secondCallCode <- translateCall fn (Just arg)
thirdCallCode <- translateCall secondMonad Nothing
return $ function
"bind__uncurried__unmonadified"
( [OP MOV $ MR (getArg 2) firstMonad]
++ firstCallCode
++ [OP MOV $ RR rax arg, OP MOV $ MR (getArg 1) fn]
++ secondCallCode
++ [OP MOV $ RR rax secondMonad]
++ thirdCallCode
++ [RET]
)
primitiveCrash :: Stateful VirtualFunction
primitiveCrash = do
msg <- newTemp
return $ function
"crash"
[ OP MOV $ MR (getArg 1) msg
, OP MOV $ IR 1 rax
, OP MOV $ IR 2 rdi
, LEA (Mem (Right 8) msg Nothing) rsi
, OP MOV $ MR (deref msg) rdx
, SYSCALL 3 -- write
, OP MOV $ IR 60 rax
, OP MOV $ IR 1 rdi
, SYSCALL 1 -- exit
]
primitiveTrace :: Stateful VirtualFunction
primitiveTrace = return $ function
"trace__uncurried"
[ UN PUSH $ M (getArg 2)
, JUMP CALL "print__uncurried__unmonadified"
, unpush 1
, OP MOV $ MR (getArg 1) rax
, RET
]
| null | https://raw.githubusercontent.com/radian-software/kalyn/03d279eee4cf73cd355ed7a13c46340fbb15540b/src/Primitives.hs | haskell | -syscall-table/
write
unlink
open
write
close
chmod
getcwd
open
read
close
write
exit | module Primitives where
import Assembly
import OS
import Subroutines
see also section 2 of the Linux man pages
basicOp :: String -> BinOp -> Stateful VirtualFunction
basicOp name op = do
temp <- newTemp
return $ function
(name ++ "__uncurried")
[ OP MOV $ MR (getArg 2) temp
, OP op $ MR (getArg 1) temp
, OP MOV $ RR temp rax
, RET
]
plus :: Stateful VirtualFunction
plus = basicOp "plus" ADD
minus :: Stateful VirtualFunction
minus = basicOp "minus" SUB
times :: Stateful VirtualFunction
times = basicOp "times" IMUL
divOp :: String -> [VirtualInstruction] -> Stateful VirtualFunction
divOp name post = do
temp <- newTemp
return
$ function (name ++ "__uncurried")
$ [OP MOV $ MR (getArg 2) rax, CQTO, OP MOV $ MR (getArg 1) temp, IDIV temp]
++ post
++ [RET]
divide :: Stateful VirtualFunction
divide = divOp "divide" []
modulo :: Stateful VirtualFunction
modulo = divOp "modulo" [OP MOV $ RR rdx rax]
bitAnd :: Stateful VirtualFunction
bitAnd = basicOp "and" AND
bitOr :: Stateful VirtualFunction
bitOr = basicOp "or" OR
xor :: Stateful VirtualFunction
xor = basicOp "xor" XOR
bitNot :: Stateful VirtualFunction
bitNot = do
temp <- newTemp
return $ function
"not"
[OP MOV $ MR (getArg 1) temp, UN NOT $ R temp, OP MOV $ RR temp rax]
shiftOp :: String -> Shift -> Stateful VirtualFunction
shiftOp name op = do
arg <- newTemp
fixup <- newLabel
fixupDone <- newLabel
let needsZeroing = case op of
SHL -> True
SAL -> True
SHR -> True
SAR -> False
return $ function
(name ++ "__uncurried")
[ OP MOV $ MR (getArg 2) arg
, OP MOV $ MR (getArg 1) rcx
, OP CMP $ IR 64 rcx
, JUMP JGE fixup
, SHIFT Nothing op arg
, LABEL fixupDone
, OP MOV $ RR arg rax
, RET
, LABEL fixup
, if needsZeroing then OP MOV $ IR 0 arg else SHIFT (Just 63) op arg
, OP MOV $ RR arg rax
, RET
]
shl :: Stateful VirtualFunction
shl = shiftOp "shl" SHL
shr :: Stateful VirtualFunction
shr = shiftOp "shr" SHR
sal :: Stateful VirtualFunction
sal = shiftOp "sal" SAL
sar :: Stateful VirtualFunction
sar = shiftOp "sar" SAR
monadPrint :: Stateful VirtualFunction
monadPrint = do
temp <- newTemp
str <- newTemp
return $ function
"print__uncurried__unmonadified"
[ OP MOV $ MR (getArg 1) temp
, UN PUSH $ R temp
, JUMP CALL "packString"
, OP MOV $ RR rax str
, unpush 1
, OP MOV $ IR 1 rax
, OP MOV $ IR 1 rdi
, LEA (Mem (Right 8) str Nothing) rsi
, OP MOV $ MR (deref str) rdx
, OP MOV $ IR 0 rax
, RET
]
monadWriteFile :: Stateful VirtualFunction
monadWriteFile = do
temp <- newTemp
filename <- newTemp
contents <- newTemp
fd <- newTemp
ptr <- newTemp
bytesLeft <- newTemp
notExists <- newLabel
writeStart <- newLabel
writeDone <- newLabel
crash <- newLabel
msg <- newTemp
return $ function
"writeFile__uncurried__unmonadified"
[ OP MOV $ MR (getArg 2) temp
, UN PUSH $ R temp
, JUMP CALL "packString"
, unpush 1
, OP MOV $ RR rax filename
, OP MOV $ MR (getArg 1) temp
, UN PUSH $ R temp
, JUMP CALL "packString"
, unpush 1
, OP MOV $ RR rax contents
, OP MOV $ IR 87 rax
, LEA (getField 1 filename) rdi
, OP CMP $ IR (-2) rax
, JUMP JE notExists
, OP CMP $ IR 0 rax
, JUMP JL crash
, LABEL notExists
, OP MOV $ IR 2 rax
, LEA (getField 1 filename) rdi
, OP MOV $ IR 0x41 rsi
, OP MOV $ IR 0o666 rdx
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP MOV $ RR rax fd
, LEA (getField 1 contents) ptr
, OP MOV $ MR (getField 0 contents) bytesLeft
, LABEL writeStart
, OP CMP $ IR 0 bytesLeft
, JUMP JLE writeDone
, OP MOV $ IR 1 rax
, OP MOV $ RR fd rdi
, OP MOV $ RR ptr rsi
, OP MOV $ RR bytesLeft rdx
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP ADD $ RR rax ptr
, OP SUB $ RR rax bytesLeft
, JUMP JMP writeStart
, LABEL writeDone
, OP MOV $ IR 3 rax
, OP MOV $ RR fd rdi
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP MOV $ IR 0 rax
, RET
, LABEL crash
, LEA (memLabel "msgWriteFileFailed") msg
, UN PUSH $ R msg
, JUMP CALL "crash"
]
setFileMode :: Stateful VirtualFunction
setFileMode = do
temp <- newTemp
filename <- newTemp
crash <- newLabel
msg <- newTemp
return $ function
"setFileMode__uncurried__unmonadified"
[ OP MOV $ MR (getArg 2) temp
, UN PUSH $ R temp
, JUMP CALL "packString"
, unpush 1
, OP MOV $ RR rax filename
, OP MOV $ IR 90 rax
, LEA (getField 1 filename) rdi
, OP MOV $ MR (getArg 1) rsi
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP MOV $ IR 0 rax
, RET
, LABEL crash
, LEA (memLabel "msgSetFileModeFailed") msg
, UN PUSH $ R msg
, JUMP CALL "crash"
]
monadGetWorkingDirectory :: Stateful VirtualFunction
monadGetWorkingDirectory = do
buf <- newTemp
msg <- newTemp
crash <- newLabel
return $ function
"getWorkingDirectory__unmonadified"
[ OP MOV $ IR 79 rax
, LEA (memLabel "syscallBuffer") buf
, OP MOV $ RR buf rdi
, OP MOV $ IR (fromIntegral syscallBufferSize) rsi
, UN PUSH $ R buf
, UN DEC $ R rax
, UN PUSH $ R rax
, JUMP CALL "unpackString"
, unpush 2
, RET
, LABEL crash
, LEA (memLabel "msgGetWorkingDirectoryFailed") msg
, UN PUSH $ R msg
, JUMP CALL "crash"
]
monadReadFile :: Stateful VirtualFunction
monadReadFile = do
buffer <- newTemp
filename <- newTemp
fd <- newTemp
strStart <- newTemp
strEnd <- newTemp
bytesRead <- newTemp
msg <- newTemp
newString <- newTemp
allocedLength <- newTemp
readStart <- newLabel
readDone <- newLabel
skipStartUpdate <- newLabel
skipEndUpdate <- newLabel
crash <- newLabel
return $ function
"readFile__uncurried__unmonadified"
[ LEA (memLabel "syscallBuffer") buffer
, UN PUSH $ M (getArg 1)
, JUMP CALL "packString"
, unpush 1
, OP MOV $ RR rax filename
, OP MOV $ IR 2 rax
, LEA (getField 1 filename) rdi
, OP MOV $ IR 0 rsi
, OP MOV $ RR rax fd
, OP MOV $ IR 0 strStart
, OP MOV $ IR 0 strEnd
, LABEL readStart
, OP MOV $ IR 0 rax
, OP MOV $ RR fd rdi
, OP MOV $ RR buffer rsi
, OP MOV $ IR (fromIntegral syscallBufferSize) rdx
, OP MOV $ RR rax bytesRead
, OP CMP $ IR 0 bytesRead
, JUMP JE readDone
, JUMP JL crash
, UN PUSH $ R buffer
, UN PUSH $ R bytesRead
, JUMP CALL "unpackString"
, unpush 2
, OP MOV $ RR rax newString
, OP CMP $ IR 0 strStart
, JUMP JNE skipStartUpdate
, OP MOV $ RR newString strStart
, JUMP JMP skipEndUpdate
, LABEL skipStartUpdate
, OP MOV $ RM newString (deref strEnd)
, LABEL skipEndUpdate
, OP MOV $ RR bytesRead allocedLength
, OP IMUL $ IR 24 allocedLength
, LEA (Mem (Right $ -8) newString (Just (Scale1, allocedLength))) strEnd
, JUMP JMP readStart
, LABEL readDone
, OP MOV $ IR 3 rax
, OP MOV $ RR fd rdi
, OP CMP $ IR 0 rax
, JUMP JL crash
, OP MOV $ RR strStart rax
, RET
, LABEL crash
, LEA (memLabel "msgReadFileFailed") msg
, UN PUSH $ R msg
, JUMP CALL "crash"
]
primitiveError :: Stateful VirtualFunction
primitiveError = return $ function
"error__uncurried"
[ UN PUSH $ M (getArg 1)
, JUMP CALL "packString"
, unpush 1
, UN PUSH $ R rax
, JUMP CALL "crash"
]
compareOp :: String -> Jump -> Stateful VirtualFunction
compareOp name op = do
temp <- newTemp
yes <- newLabel
return $ function
name
[ OP MOV $ MR (getArg 2) temp
, OP CMP $ MR (getArg 1) temp
, JUMP op yes
, OP MOV $ IR 0 rax
, RET
, LABEL yes
, OP MOV $ IR 1 rax
, RET
]
equal :: Stateful VirtualFunction
equal = compareOp "equal__uncurried" JE
notEqual :: Stateful VirtualFunction
notEqual = compareOp "notEqual__uncurried" JNE
lessThan :: Stateful VirtualFunction
lessThan = compareOp "lessThan__uncurried" JL
lessThanEqual :: Stateful VirtualFunction
lessThanEqual = compareOp "lessThanEqual__uncurried" JLE
greaterThan :: Stateful VirtualFunction
greaterThan = compareOp "greaterThan__uncurried" JG
greaterThanEqual :: Stateful VirtualFunction
greaterThanEqual = compareOp "greaterThanEqual__uncurried" JGE
monadReturn :: Stateful VirtualFunction
monadReturn = return $ function "return__uncurried__unmonadified"
[OP MOV $ MR (getArg 1) rax, RET]
monadBind :: Stateful VirtualFunction
monadBind = do
firstMonad <- newTemp
secondMonad <- newTemp
arg <- newTemp
fn <- newTemp
firstCallCode <- translateCall firstMonad Nothing
secondCallCode <- translateCall fn (Just arg)
thirdCallCode <- translateCall secondMonad Nothing
return $ function
"bind__uncurried__unmonadified"
( [OP MOV $ MR (getArg 2) firstMonad]
++ firstCallCode
++ [OP MOV $ RR rax arg, OP MOV $ MR (getArg 1) fn]
++ secondCallCode
++ [OP MOV $ RR rax secondMonad]
++ thirdCallCode
++ [RET]
)
primitiveCrash :: Stateful VirtualFunction
primitiveCrash = do
msg <- newTemp
return $ function
"crash"
[ OP MOV $ MR (getArg 1) msg
, OP MOV $ IR 1 rax
, OP MOV $ IR 2 rdi
, LEA (Mem (Right 8) msg Nothing) rsi
, OP MOV $ MR (deref msg) rdx
, OP MOV $ IR 60 rax
, OP MOV $ IR 1 rdi
]
primitiveTrace :: Stateful VirtualFunction
primitiveTrace = return $ function
"trace__uncurried"
[ UN PUSH $ M (getArg 2)
, JUMP CALL "print__uncurried__unmonadified"
, unpush 1
, OP MOV $ MR (getArg 1) rax
, RET
]
|
fcd4d692fef6f895c0502e91819583282a32af45383f37aedac492031de1988b | Incubaid/arakoon | arakoon_exc.ml |
Copyright ( 2010 - 2014 ) INCUBAID BVBA
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright (2010-2014) INCUBAID BVBA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
type rc =
| E_OK
| E_NO_MAGIC
| E_NO_HELLO
| E_NOT_MASTER
| E_NOT_FOUND
| E_WRONG_CLUSTER
| E_ASSERTION_FAILED
| E_READ_ONLY
| E_OUTSIDE_INTERVAL
| E_GOING_DOWN
| E_NOT_SUPPORTED
| E_NO_LONGER_MASTER
| E_BAD_INPUT
| E_INCONSISTENT_READ
| E_USERFUNCTION_FAILURE
| E_USERHOOK_NOT_FOUND
| E_MAX_CONNECTIONS
| E_UNKNOWN_FAILURE
let mapping = [
( E_OK , 0x00l);
( E_NO_MAGIC , 0x01l);
( E_NO_HELLO , 0x03l);
( E_NOT_MASTER , 0x04l);
( E_NOT_FOUND , 0x05l);
( E_WRONG_CLUSTER , 0x06l);
( E_ASSERTION_FAILED , 0x07l);
( E_READ_ONLY , 0x08l);
( E_OUTSIDE_INTERVAL , 0x09l);
( E_GOING_DOWN , 0x10l);
( E_NOT_SUPPORTED , 0x20l);
( E_NO_LONGER_MASTER , 0x21l);
( E_BAD_INPUT , 0x26l);
( E_INCONSISTENT_READ , 0x80l);
( E_USERFUNCTION_FAILURE, 0x81l);
( E_USERHOOK_NOT_FOUND , 0x82l);
( E_MAX_CONNECTIONS , 0xfel);
( E_UNKNOWN_FAILURE , 0xffl );
]
let create f =
let h = Hashtbl.create 47 in
let () = List.iter
(fun (a,b) -> let (a',b') = f (a,b) in Hashtbl.add h a' b')
mapping
in
h
let hmap = create (fun x -> x)
let rmap = create (fun (a,b) -> (b,a))
let int32_of_rc rc = Hashtbl.find hmap rc
let rc_of_int32 i32 =
try Hashtbl.find rmap i32 with Not_found -> E_UNKNOWN_FAILURE
let string_of_rc = function
| E_OK -> "E_OK"
| E_NO_MAGIC -> "E_NO_MAGIC"
| E_NO_HELLO -> "E_NO_HELLO"
| E_NOT_MASTER -> "E_NOT_MASTER"
| E_NOT_FOUND -> "E_NOT_FOUND"
| E_WRONG_CLUSTER -> "E_WRONG_CLUSTER"
| E_ASSERTION_FAILED -> "E_ASSERTION_FAILED"
| E_READ_ONLY -> "E_READ_ONLY"
| E_OUTSIDE_INTERVAL -> "E_OUTSIDE_INTERVAL"
| E_GOING_DOWN -> "E_GOING_DOWN"
| E_USERFUNCTION_FAILURE -> "E_USERFUNCTION_FAILURE"
| E_USERHOOK_NOT_FOUND -> "E_USERHOOK_NOT_FOUND"
| E_MAX_CONNECTIONS -> "E_MAX_CONNECTIONS"
| E_NOT_SUPPORTED -> "E_NOT_SUPPORTED"
| E_NO_LONGER_MASTER -> "E_NO_LONGER_MASTER"
| E_BAD_INPUT -> "E_BAD_INPUT"
| E_INCONSISTENT_READ -> "E_INCONSISTENT_READ"
| E_UNKNOWN_FAILURE -> "E_UNKNOWN_FAILURE"
exception Exception of rc * string
open Lwt
let output_exception oc rc msg =
Llio.output_int32 oc (int32_of_rc rc) >>= fun () ->
Llio.output_string oc msg
| null | https://raw.githubusercontent.com/Incubaid/arakoon/43a8d0b26e4876ef91d9657149f105c7e57e0cb0/src/client/arakoon_exc.ml | ocaml |
Copyright ( 2010 - 2014 ) INCUBAID BVBA
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright (2010-2014) INCUBAID BVBA
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
type rc =
| E_OK
| E_NO_MAGIC
| E_NO_HELLO
| E_NOT_MASTER
| E_NOT_FOUND
| E_WRONG_CLUSTER
| E_ASSERTION_FAILED
| E_READ_ONLY
| E_OUTSIDE_INTERVAL
| E_GOING_DOWN
| E_NOT_SUPPORTED
| E_NO_LONGER_MASTER
| E_BAD_INPUT
| E_INCONSISTENT_READ
| E_USERFUNCTION_FAILURE
| E_USERHOOK_NOT_FOUND
| E_MAX_CONNECTIONS
| E_UNKNOWN_FAILURE
let mapping = [
( E_OK , 0x00l);
( E_NO_MAGIC , 0x01l);
( E_NO_HELLO , 0x03l);
( E_NOT_MASTER , 0x04l);
( E_NOT_FOUND , 0x05l);
( E_WRONG_CLUSTER , 0x06l);
( E_ASSERTION_FAILED , 0x07l);
( E_READ_ONLY , 0x08l);
( E_OUTSIDE_INTERVAL , 0x09l);
( E_GOING_DOWN , 0x10l);
( E_NOT_SUPPORTED , 0x20l);
( E_NO_LONGER_MASTER , 0x21l);
( E_BAD_INPUT , 0x26l);
( E_INCONSISTENT_READ , 0x80l);
( E_USERFUNCTION_FAILURE, 0x81l);
( E_USERHOOK_NOT_FOUND , 0x82l);
( E_MAX_CONNECTIONS , 0xfel);
( E_UNKNOWN_FAILURE , 0xffl );
]
let create f =
let h = Hashtbl.create 47 in
let () = List.iter
(fun (a,b) -> let (a',b') = f (a,b) in Hashtbl.add h a' b')
mapping
in
h
let hmap = create (fun x -> x)
let rmap = create (fun (a,b) -> (b,a))
let int32_of_rc rc = Hashtbl.find hmap rc
let rc_of_int32 i32 =
try Hashtbl.find rmap i32 with Not_found -> E_UNKNOWN_FAILURE
let string_of_rc = function
| E_OK -> "E_OK"
| E_NO_MAGIC -> "E_NO_MAGIC"
| E_NO_HELLO -> "E_NO_HELLO"
| E_NOT_MASTER -> "E_NOT_MASTER"
| E_NOT_FOUND -> "E_NOT_FOUND"
| E_WRONG_CLUSTER -> "E_WRONG_CLUSTER"
| E_ASSERTION_FAILED -> "E_ASSERTION_FAILED"
| E_READ_ONLY -> "E_READ_ONLY"
| E_OUTSIDE_INTERVAL -> "E_OUTSIDE_INTERVAL"
| E_GOING_DOWN -> "E_GOING_DOWN"
| E_USERFUNCTION_FAILURE -> "E_USERFUNCTION_FAILURE"
| E_USERHOOK_NOT_FOUND -> "E_USERHOOK_NOT_FOUND"
| E_MAX_CONNECTIONS -> "E_MAX_CONNECTIONS"
| E_NOT_SUPPORTED -> "E_NOT_SUPPORTED"
| E_NO_LONGER_MASTER -> "E_NO_LONGER_MASTER"
| E_BAD_INPUT -> "E_BAD_INPUT"
| E_INCONSISTENT_READ -> "E_INCONSISTENT_READ"
| E_UNKNOWN_FAILURE -> "E_UNKNOWN_FAILURE"
exception Exception of rc * string
open Lwt
let output_exception oc rc msg =
Llio.output_int32 oc (int32_of_rc rc) >>= fun () ->
Llio.output_string oc msg
| |
4932c26e2f52cc9b82f95a83d23b0f4ec065caf7da6beda8e9f6feffce492b7b | programaker-project/Programaker-Core | automate_mail_app.erl | %%%-------------------------------------------------------------------
%% @doc automate_mail app API
%% @end
%%%-------------------------------------------------------------------
-module(automate_mail_app).
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1]).
%%====================================================================
%% API
%%====================================================================
start(_StartType, _StartArgs) ->
{ok, self()}.
%%--------------------------------------------------------------------
stop(_State) ->
ok.
%%====================================================================
Internal functions
%%====================================================================
| null | https://raw.githubusercontent.com/programaker-project/Programaker-Core/ef10fc6d2a228b2096b121170c421f5c29f9f270/backend/apps/automate_mail/src/automate_mail_app.erl | erlang | -------------------------------------------------------------------
@doc automate_mail app API
@end
-------------------------------------------------------------------
Application callbacks
====================================================================
API
====================================================================
--------------------------------------------------------------------
====================================================================
==================================================================== |
-module(automate_mail_app).
-behaviour(application).
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
{ok, self()}.
stop(_State) ->
ok.
Internal functions
|
95c275425dc57b1fed0c28a6186ac1b7910cf3e557eba4ec3f703433d386d7f3 | AdaCore/why3 | gnat_ast.ml | (* -*- mode: tuareg -*- *)
(* This package is automatically generated by xtree. Do not edit manually. *)
[@@@warning "-42"]
(* TODO Use appropriate OCaml integer type *)
type uint = Uint of string
type ureal = Ureal of { numerator : uint ; denominator : uint; base : int; negative : bool }
GNAT
type source_ptr =
| No_location
| Source_ptr of { filename: string; line: int }
type node_id = Node_id
GNATCOLL.Symbols
type symbol = No_symbol | Symbol of string
type symbol_set = symbol list
type string_sets_set = symbol list
type why_node_set = unit
Why .
type odomain =
| Expr
| Term
| Pterm
| Pred
| Prog
type domain =
| Term
| Pterm
| Pred
| Prog
type type_ =
| Builtin
| Split
| Abstract
type literal =
| True
| False
type theory_type =
| Theory
| Module
type clone_type =
| Import
| Export
| Clone_default
type subst_type =
| Type_subst
| Function
| Predicate
| Namepace
| Lemma
| Goal
type connector =
| Or_else
| And_then
| Imply
| Equivalent
| Or
| And
type assert_kind =
| Assert
| Check
| Assume
type axiom_dep_kind =
| Axdep_func
| Axdep_pred
type 'a nonempty = { elt0: 'a; elts: 'a list }
let list_of_nonempty n = n.elt0 :: n.elts
(* Kind tags *)
type type_tag = [`Type]
type name_tag = [`Name]
type effects_tag = [`Effects]
type raise_effect_tag = [`Raise_effect]
type binder_tag = [`Binder]
type transparent_type_definition_tag = [`Transparent_type_definition]
type record_binder_tag = [`Record_binder]
type record_definition_tag = [`Record_definition]
type range_type_definition_tag = [`Range_type_definition]
type triggers_tag = [`Triggers]
type trigger_tag = [`Trigger]
type axiom_dep_tag = [`Axiom_dep]
type handler_tag = [`Handler]
type field_association_tag = [`Field_association]
type variant_tag = [`Variant]
type variants_tag = [`Variants]
type universal_quantif_tag = [`Universal_quantif]
type existential_quantif_tag = [`Existential_quantif]
type not_tag = [`Not]
type connection_tag = [`Connection]
type label_tag = [`Label]
type loc_label_tag = [`Loc_label]
type identifier_tag = [`Identifier]
type tagged_tag = [`Tagged]
type call_tag = [`Call]
type literal_tag = [`Literal]
type binding_tag = [`Binding]
type elsif_tag = [`Elsif]
type epsilon_tag = [`Epsilon]
type conditional_tag = [`Conditional]
type integer_constant_tag = [`Integer_constant]
type range_constant_tag = [`Range_constant]
type modular_constant_tag = [`Modular_constant]
type fixed_constant_tag = [`Fixed_constant]
type real_constant_tag = [`Real_constant]
type float_constant_tag = [`Float_constant]
type comment_tag = [`Comment]
type deref_tag = [`Deref]
type record_access_tag = [`Record_access]
type record_update_tag = [`Record_update]
type record_aggregate_tag = [`Record_aggregate]
type any_expr_tag = [`Any_expr]
type assignment_tag = [`Assignment]
type binding_ref_tag = [`Binding_ref]
type loop_tag = [`Loop]
type statement_sequence_tag = [`Statement_sequence]
type abstract_expr_tag = [`Abstract_expr]
type assert_tag = [`Assert]
type raise_tag = [`Raise]
type try_block_tag = [`Try_block]
type function_decl_tag = [`Function_decl]
type axiom_tag = [`Axiom]
type goal_tag = [`Goal]
type type_decl_tag = [`Type_decl]
type global_ref_declaration_tag = [`Global_ref_declaration]
type namespace_declaration_tag = [`Namespace_declaration]
type exception_declaration_tag = [`Exception_declaration]
type meta_declaration_tag = [`Meta_declaration]
type clone_declaration_tag = [`Clone_declaration]
type clone_substitution_tag = [`Clone_substitution]
type include_declaration_tag = [`Include_declaration]
type theory_declaration_tag = [`Theory_declaration]
type module_tag = [`Module]
(* Class tags *)
type expr_tag = [
| `Universal_quantif
| `Existential_quantif
| `Not
| `Connection
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
| `Integer_constant
| `Range_constant
| `Modular_constant
| `Fixed_constant
| `Real_constant
| `Float_constant
| `Comment
| `Deref
| `Record_access
| `Record_update
| `Record_aggregate
| `Any_expr
| `Assignment
| `Binding_ref
| `Loop
| `Statement_sequence
| `Abstract_expr
| `Assert
| `Raise
| `Try_block
]
type pred_tag = [
| `Universal_quantif
| `Existential_quantif
| `Not
| `Connection
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
]
type term_tag = [
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
| `Integer_constant
| `Range_constant
| `Modular_constant
| `Fixed_constant
| `Real_constant
| `Float_constant
| `Comment
| `Deref
| `Record_access
| `Record_update
| `Record_aggregate
]
type prog_tag = [
| `Not
| `Connection
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
| `Integer_constant
| `Range_constant
| `Modular_constant
| `Fixed_constant
| `Real_constant
| `Float_constant
| `Comment
| `Deref
| `Record_access
| `Record_update
| `Record_aggregate
| `Any_expr
| `Assignment
| `Binding_ref
| `Loop
| `Statement_sequence
| `Abstract_expr
| `Assert
| `Raise
| `Try_block
]
type type_definition_tag = [
| `Transparent_type_definition
| `Record_binder
| `Record_definition
| `Range_type_definition
]
type declaration_tag = [
| `Function_decl
| `Axiom
| `Goal
| `Type_decl
| `Global_ref_declaration
| `Namespace_declaration
| `Exception_declaration
| `Meta_declaration
| `Clone_declaration
]
type any_node_tag = [
| `Type
| `Name
| `Effects
| `Raise_effect
| `Binder
| `Transparent_type_definition
| `Record_binder
| `Record_definition
| `Range_type_definition
| `Triggers
| `Trigger
| `Axiom_dep
| `Handler
| `Field_association
| `Variant
| `Variants
| `Universal_quantif
| `Existential_quantif
| `Not
| `Connection
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
| `Integer_constant
| `Range_constant
| `Modular_constant
| `Fixed_constant
| `Real_constant
| `Float_constant
| `Comment
| `Deref
| `Record_access
| `Record_update
| `Record_aggregate
| `Any_expr
| `Assignment
| `Binding_ref
| `Loop
| `Statement_sequence
| `Abstract_expr
| `Assert
| `Raise
| `Try_block
| `Function_decl
| `Axiom
| `Goal
| `Type_decl
| `Global_ref_declaration
| `Namespace_declaration
| `Exception_declaration
| `Meta_declaration
| `Clone_declaration
| `Clone_substitution
| `Include_declaration
| `Theory_declaration
| `Module
]
type 'a why_node = { info : why_node_info ; desc: 'a why_node_desc }
and why_node_info = {id: int; node: node_id; domain: domain; link: why_node_set; checked: bool}
and 'a why_node_desc =
| Type : {type_kind: type_; name: name_id; is_mutable: bool; relaxed_init: bool} -> [> type_tag] why_node_desc
| Name : {symb: symbol; namespace: symbol; module_: module_oid; infix: bool} -> [> name_tag] why_node_desc
| Effects : {reads: identifier_olist; writes: identifier_olist; raises: raise_effect_olist} -> [> effects_tag] why_node_desc
| Raise_effect : {name: name_id; arg_id: identifier_oid; post: pred_oid} -> [> raise_effect_tag] why_node_desc
| Binder : {name: identifier_oid; arg_type: type_id} -> [> binder_tag] why_node_desc
| Transparent_type_definition : {type_definition: type_id} -> [> transparent_type_definition_tag] why_node_desc
| Record_binder : {name: identifier_oid; arg_type: type_id; labels: symbol_set; is_mutable: bool} -> [> record_binder_tag] why_node_desc
| Record_definition : {fields: record_binder_list} -> [> record_definition_tag] why_node_desc
| Range_type_definition : {first: uint; last: uint} -> [> range_type_definition_tag] why_node_desc
| Triggers : {triggers: trigger_list} -> [> triggers_tag] why_node_desc
| Trigger : {terms: expr_list} -> [> trigger_tag] why_node_desc
| Axiom_dep : {name: identifier_id; kind: axiom_dep_kind} -> [> axiom_dep_tag] why_node_desc
| Handler : {name: name_id; arg_id: identifier_oid; def: prog_id} -> [> handler_tag] why_node_desc
| Field_association : {field: identifier_id; value: expr_id} -> [> field_association_tag] why_node_desc
| Variant : {cmp_op: identifier_id; labels: symbol_set; expr: term_id} -> [> variant_tag] why_node_desc
| Variants : {variants: variant_list} -> [> variants_tag] why_node_desc
| Universal_quantif : {variables: identifier_list; labels: symbol_set; var_type: type_id; triggers: triggers_oid; pred: pred_id} -> [> universal_quantif_tag] why_node_desc
| Existential_quantif : {variables: identifier_list; labels: symbol_set; var_type: type_id; pred: pred_id} -> [> existential_quantif_tag] why_node_desc
| Not : {right: expr_id} -> [> not_tag] why_node_desc
| Connection : {left: expr_id; op: connector; right: expr_id; more_right: expr_olist} -> [> connection_tag] why_node_desc
| Label : {labels: symbol_set; def: expr_id; typ: type_oid} -> [> label_tag] why_node_desc
| Loc_label : {sloc: source_ptr; def: expr_id; marker: symbol} -> [> loc_label_tag] why_node_desc
| Identifier : {name: name_id; typ: type_oid; labels: string_sets_set} -> [> identifier_tag] why_node_desc
| Tagged : {tag: symbol; def: expr_id; typ: type_oid} -> [> tagged_tag] why_node_desc
| Call : {name: identifier_id; args: expr_olist; typ: type_oid} -> [> call_tag] why_node_desc
| Literal : {value: literal; typ: type_oid} -> [> literal_tag] why_node_desc
| Binding : {name: identifier_id; def: expr_id; context: expr_id; typ: type_oid} -> [> binding_tag] why_node_desc
| Elsif : {condition: expr_id; then_part: expr_id; typ: type_oid} -> [> elsif_tag] why_node_desc
| Epsilon : {name: identifier_id; typ: type_id; pred: pred_id} -> [> epsilon_tag] why_node_desc
| Conditional : {condition: expr_id; then_part: expr_id; elsif_parts: expr_olist; else_part: expr_oid; typ: type_oid} -> [> conditional_tag] why_node_desc
| Integer_constant : {value: uint} -> [> integer_constant_tag] why_node_desc
| Range_constant : {value: uint; typ: type_id} -> [> range_constant_tag] why_node_desc
| Modular_constant : {value: uint; typ: type_id} -> [> modular_constant_tag] why_node_desc
| Fixed_constant : {value: uint; typ: type_id} -> [> fixed_constant_tag] why_node_desc
| Real_constant : {value: ureal} -> [> real_constant_tag] why_node_desc
| Float_constant : {value: ureal; typ: type_id} -> [> float_constant_tag] why_node_desc
| Comment : {comment: symbol} -> [> comment_tag] why_node_desc
| Deref : {right: identifier_id; typ: type_id} -> [> deref_tag] why_node_desc
| Record_access : {name: expr_id; field: identifier_id; typ: type_oid} -> [> record_access_tag] why_node_desc
| Record_update : {name: expr_id; updates: field_association_list; typ: type_oid} -> [> record_update_tag] why_node_desc
| Record_aggregate : {associations: field_association_list; typ: type_oid} -> [> record_aggregate_tag] why_node_desc
| Any_expr : {effects: effects_oid; pre: pred_oid; post: pred_oid; return_type: type_id; labels: symbol_set} -> [> any_expr_tag] why_node_desc
| Assignment : {name: identifier_id; value: prog_id; typ: type_id; labels: symbol_set} -> [> assignment_tag] why_node_desc
| Binding_ref : {name: identifier_id; def: prog_id; context: prog_id; typ: type_id} -> [> binding_ref_tag] why_node_desc
| Loop : {code_before: prog_id; invariants: pred_olist; variants: variants_olist; code_after: prog_id} -> [> loop_tag] why_node_desc
| Statement_sequence : {statements: prog_list} -> [> statement_sequence_tag] why_node_desc
| Abstract_expr : {expr: prog_id; post: pred_id; typ: type_oid} -> [> abstract_expr_tag] why_node_desc
| Assert : {pred: pred_id; assert_kind: assert_kind} -> [> assert_tag] why_node_desc
| Raise : {name: name_id; arg: expr_oid; typ: type_oid} -> [> raise_tag] why_node_desc
| Try_block : {prog: prog_id; handler: handler_list; typ: type_oid} -> [> try_block_tag] why_node_desc
| Function_decl : {name: identifier_id; binders: binder_olist; effects: effects_oid; pre: pred_oid; post: pred_oid; return_type: type_oid; def: expr_oid; labels: symbol_set; location: source_ptr} -> [> function_decl_tag] why_node_desc
| Axiom : {name: symbol; def: pred_id; dep: axiom_dep_oid} -> [> axiom_tag] why_node_desc
| Goal : {name: symbol; def: pred_id} -> [> goal_tag] why_node_desc
| Type_decl : {args: identifier_olist; name: name_id; labels: symbol_set; definition: type_definition_oid} -> [> type_decl_tag] why_node_desc
| Global_ref_declaration : {name: identifier_id; ref_type: type_id; labels: symbol_set; location: source_ptr} -> [> global_ref_declaration_tag] why_node_desc
| Namespace_declaration : {declarations: declaration_olist; name: symbol} -> [> namespace_declaration_tag] why_node_desc
| Exception_declaration : {name: name_id; arg: type_oid} -> [> exception_declaration_tag] why_node_desc
| Meta_declaration : {name: symbol; parameter: symbol} -> [> meta_declaration_tag] why_node_desc
| Clone_declaration : {origin: module_id; as_name: symbol; clone_kind: clone_type; substitutions: clone_substitution_olist; theory_kind: theory_type} -> [> clone_declaration_tag] why_node_desc
| Clone_substitution : {kind: subst_type; orig_name: name_id; image: name_id} -> [> clone_substitution_tag] why_node_desc
| Include_declaration : {module_: module_id; kind: theory_type; use_kind: clone_type} -> [> include_declaration_tag] why_node_desc
| Theory_declaration : {declarations: declaration_olist; name: symbol; kind: theory_type; includes: include_declaration_olist; comment: symbol} -> [> theory_declaration_tag] why_node_desc
| Module : {file: symbol; name: symbol} -> [> module_tag] why_node_desc
and 'a why_node_oid = 'a why_node option
and 'a why_node_olist = 'a why_node list
and 'a why_node_id = 'a why_node
and 'a why_node_list = 'a why_node nonempty
(* Kind nodes *)
and type_oid = type_tag why_node_oid
and type_olist = type_tag why_node_olist
and type_id = type_tag why_node_id
and type_list = type_tag why_node_list
and name_oid = name_tag why_node_oid
and name_olist = name_tag why_node_olist
and name_id = name_tag why_node_id
and name_list = name_tag why_node_list
and effects_oid = effects_tag why_node_oid
and effects_olist = effects_tag why_node_olist
and effects_id = effects_tag why_node_id
and effects_list = effects_tag why_node_list
and raise_effect_oid = raise_effect_tag why_node_oid
and raise_effect_olist = raise_effect_tag why_node_olist
and raise_effect_id = raise_effect_tag why_node_id
and raise_effect_list = raise_effect_tag why_node_list
and binder_oid = binder_tag why_node_oid
and binder_olist = binder_tag why_node_olist
and binder_id = binder_tag why_node_id
and binder_list = binder_tag why_node_list
and transparent_type_definition_oid = transparent_type_definition_tag why_node_oid
and transparent_type_definition_olist = transparent_type_definition_tag why_node_olist
and transparent_type_definition_id = transparent_type_definition_tag why_node_id
and transparent_type_definition_list = transparent_type_definition_tag why_node_list
and record_binder_oid = record_binder_tag why_node_oid
and record_binder_olist = record_binder_tag why_node_olist
and record_binder_id = record_binder_tag why_node_id
and record_binder_list = record_binder_tag why_node_list
and record_definition_oid = record_definition_tag why_node_oid
and record_definition_olist = record_definition_tag why_node_olist
and record_definition_id = record_definition_tag why_node_id
and record_definition_list = record_definition_tag why_node_list
and range_type_definition_oid = range_type_definition_tag why_node_oid
and range_type_definition_olist = range_type_definition_tag why_node_olist
and range_type_definition_id = range_type_definition_tag why_node_id
and range_type_definition_list = range_type_definition_tag why_node_list
and triggers_oid = triggers_tag why_node_oid
and triggers_olist = triggers_tag why_node_olist
and triggers_id = triggers_tag why_node_id
and triggers_list = triggers_tag why_node_list
and trigger_oid = trigger_tag why_node_oid
and trigger_olist = trigger_tag why_node_olist
and trigger_id = trigger_tag why_node_id
and trigger_list = trigger_tag why_node_list
and axiom_dep_oid = axiom_dep_tag why_node_oid
and axiom_dep_olist = axiom_dep_tag why_node_olist
and axiom_dep_id = axiom_dep_tag why_node_id
and axiom_dep_list = axiom_dep_tag why_node_list
and handler_oid = handler_tag why_node_oid
and handler_olist = handler_tag why_node_olist
and handler_id = handler_tag why_node_id
and handler_list = handler_tag why_node_list
and field_association_oid = field_association_tag why_node_oid
and field_association_olist = field_association_tag why_node_olist
and field_association_id = field_association_tag why_node_id
and field_association_list = field_association_tag why_node_list
and variant_oid = variant_tag why_node_oid
and variant_olist = variant_tag why_node_olist
and variant_id = variant_tag why_node_id
and variant_list = variant_tag why_node_list
and variants_oid = variants_tag why_node_oid
and variants_olist = variants_tag why_node_olist
and variants_id = variants_tag why_node_id
and variants_list = variants_tag why_node_list
and universal_quantif_oid = universal_quantif_tag why_node_oid
and universal_quantif_olist = universal_quantif_tag why_node_olist
and universal_quantif_id = universal_quantif_tag why_node_id
and universal_quantif_list = universal_quantif_tag why_node_list
and existential_quantif_oid = existential_quantif_tag why_node_oid
and existential_quantif_olist = existential_quantif_tag why_node_olist
and existential_quantif_id = existential_quantif_tag why_node_id
and existential_quantif_list = existential_quantif_tag why_node_list
and not_oid = not_tag why_node_oid
and not_olist = not_tag why_node_olist
and not_id = not_tag why_node_id
and not_list = not_tag why_node_list
and connection_oid = connection_tag why_node_oid
and connection_olist = connection_tag why_node_olist
and connection_id = connection_tag why_node_id
and connection_list = connection_tag why_node_list
and label_oid = label_tag why_node_oid
and label_olist = label_tag why_node_olist
and label_id = label_tag why_node_id
and label_list = label_tag why_node_list
and loc_label_oid = loc_label_tag why_node_oid
and loc_label_olist = loc_label_tag why_node_olist
and loc_label_id = loc_label_tag why_node_id
and loc_label_list = loc_label_tag why_node_list
and identifier_oid = identifier_tag why_node_oid
and identifier_olist = identifier_tag why_node_olist
and identifier_id = identifier_tag why_node_id
and identifier_list = identifier_tag why_node_list
and tagged_oid = tagged_tag why_node_oid
and tagged_olist = tagged_tag why_node_olist
and tagged_id = tagged_tag why_node_id
and tagged_list = tagged_tag why_node_list
and call_oid = call_tag why_node_oid
and call_olist = call_tag why_node_olist
and call_id = call_tag why_node_id
and call_list = call_tag why_node_list
and literal_oid = literal_tag why_node_oid
and literal_olist = literal_tag why_node_olist
and literal_id = literal_tag why_node_id
and literal_list = literal_tag why_node_list
and binding_oid = binding_tag why_node_oid
and binding_olist = binding_tag why_node_olist
and binding_id = binding_tag why_node_id
and binding_list = binding_tag why_node_list
and elsif_oid = elsif_tag why_node_oid
and elsif_olist = elsif_tag why_node_olist
and elsif_id = elsif_tag why_node_id
and elsif_list = elsif_tag why_node_list
and epsilon_oid = epsilon_tag why_node_oid
and epsilon_olist = epsilon_tag why_node_olist
and epsilon_id = epsilon_tag why_node_id
and epsilon_list = epsilon_tag why_node_list
and conditional_oid = conditional_tag why_node_oid
and conditional_olist = conditional_tag why_node_olist
and conditional_id = conditional_tag why_node_id
and conditional_list = conditional_tag why_node_list
and integer_constant_oid = integer_constant_tag why_node_oid
and integer_constant_olist = integer_constant_tag why_node_olist
and integer_constant_id = integer_constant_tag why_node_id
and integer_constant_list = integer_constant_tag why_node_list
and range_constant_oid = range_constant_tag why_node_oid
and range_constant_olist = range_constant_tag why_node_olist
and range_constant_id = range_constant_tag why_node_id
and range_constant_list = range_constant_tag why_node_list
and modular_constant_oid = modular_constant_tag why_node_oid
and modular_constant_olist = modular_constant_tag why_node_olist
and modular_constant_id = modular_constant_tag why_node_id
and modular_constant_list = modular_constant_tag why_node_list
and fixed_constant_oid = fixed_constant_tag why_node_oid
and fixed_constant_olist = fixed_constant_tag why_node_olist
and fixed_constant_id = fixed_constant_tag why_node_id
and fixed_constant_list = fixed_constant_tag why_node_list
and real_constant_oid = real_constant_tag why_node_oid
and real_constant_olist = real_constant_tag why_node_olist
and real_constant_id = real_constant_tag why_node_id
and real_constant_list = real_constant_tag why_node_list
and float_constant_oid = float_constant_tag why_node_oid
and float_constant_olist = float_constant_tag why_node_olist
and float_constant_id = float_constant_tag why_node_id
and float_constant_list = float_constant_tag why_node_list
and comment_oid = comment_tag why_node_oid
and comment_olist = comment_tag why_node_olist
and comment_id = comment_tag why_node_id
and comment_list = comment_tag why_node_list
and deref_oid = deref_tag why_node_oid
and deref_olist = deref_tag why_node_olist
and deref_id = deref_tag why_node_id
and deref_list = deref_tag why_node_list
and record_access_oid = record_access_tag why_node_oid
and record_access_olist = record_access_tag why_node_olist
and record_access_id = record_access_tag why_node_id
and record_access_list = record_access_tag why_node_list
and record_update_oid = record_update_tag why_node_oid
and record_update_olist = record_update_tag why_node_olist
and record_update_id = record_update_tag why_node_id
and record_update_list = record_update_tag why_node_list
and record_aggregate_oid = record_aggregate_tag why_node_oid
and record_aggregate_olist = record_aggregate_tag why_node_olist
and record_aggregate_id = record_aggregate_tag why_node_id
and record_aggregate_list = record_aggregate_tag why_node_list
and any_expr_oid = any_expr_tag why_node_oid
and any_expr_olist = any_expr_tag why_node_olist
and any_expr_id = any_expr_tag why_node_id
and any_expr_list = any_expr_tag why_node_list
and assignment_oid = assignment_tag why_node_oid
and assignment_olist = assignment_tag why_node_olist
and assignment_id = assignment_tag why_node_id
and assignment_list = assignment_tag why_node_list
and binding_ref_oid = binding_ref_tag why_node_oid
and binding_ref_olist = binding_ref_tag why_node_olist
and binding_ref_id = binding_ref_tag why_node_id
and binding_ref_list = binding_ref_tag why_node_list
and loop_oid = loop_tag why_node_oid
and loop_olist = loop_tag why_node_olist
and loop_id = loop_tag why_node_id
and loop_list = loop_tag why_node_list
and statement_sequence_oid = statement_sequence_tag why_node_oid
and statement_sequence_olist = statement_sequence_tag why_node_olist
and statement_sequence_id = statement_sequence_tag why_node_id
and statement_sequence_list = statement_sequence_tag why_node_list
and abstract_expr_oid = abstract_expr_tag why_node_oid
and abstract_expr_olist = abstract_expr_tag why_node_olist
and abstract_expr_id = abstract_expr_tag why_node_id
and abstract_expr_list = abstract_expr_tag why_node_list
and assert_oid = assert_tag why_node_oid
and assert_olist = assert_tag why_node_olist
and assert_id = assert_tag why_node_id
and assert_list = assert_tag why_node_list
and raise_oid = raise_tag why_node_oid
and raise_olist = raise_tag why_node_olist
and raise_id = raise_tag why_node_id
and raise_list = raise_tag why_node_list
and try_block_oid = try_block_tag why_node_oid
and try_block_olist = try_block_tag why_node_olist
and try_block_id = try_block_tag why_node_id
and try_block_list = try_block_tag why_node_list
and function_decl_oid = function_decl_tag why_node_oid
and function_decl_olist = function_decl_tag why_node_olist
and function_decl_id = function_decl_tag why_node_id
and function_decl_list = function_decl_tag why_node_list
and axiom_oid = axiom_tag why_node_oid
and axiom_olist = axiom_tag why_node_olist
and axiom_id = axiom_tag why_node_id
and axiom_list = axiom_tag why_node_list
and goal_oid = goal_tag why_node_oid
and goal_olist = goal_tag why_node_olist
and goal_id = goal_tag why_node_id
and goal_list = goal_tag why_node_list
and type_decl_oid = type_decl_tag why_node_oid
and type_decl_olist = type_decl_tag why_node_olist
and type_decl_id = type_decl_tag why_node_id
and type_decl_list = type_decl_tag why_node_list
and global_ref_declaration_oid = global_ref_declaration_tag why_node_oid
and global_ref_declaration_olist = global_ref_declaration_tag why_node_olist
and global_ref_declaration_id = global_ref_declaration_tag why_node_id
and global_ref_declaration_list = global_ref_declaration_tag why_node_list
and namespace_declaration_oid = namespace_declaration_tag why_node_oid
and namespace_declaration_olist = namespace_declaration_tag why_node_olist
and namespace_declaration_id = namespace_declaration_tag why_node_id
and namespace_declaration_list = namespace_declaration_tag why_node_list
and exception_declaration_oid = exception_declaration_tag why_node_oid
and exception_declaration_olist = exception_declaration_tag why_node_olist
and exception_declaration_id = exception_declaration_tag why_node_id
and exception_declaration_list = exception_declaration_tag why_node_list
and meta_declaration_oid = meta_declaration_tag why_node_oid
and meta_declaration_olist = meta_declaration_tag why_node_olist
and meta_declaration_id = meta_declaration_tag why_node_id
and meta_declaration_list = meta_declaration_tag why_node_list
and clone_declaration_oid = clone_declaration_tag why_node_oid
and clone_declaration_olist = clone_declaration_tag why_node_olist
and clone_declaration_id = clone_declaration_tag why_node_id
and clone_declaration_list = clone_declaration_tag why_node_list
and clone_substitution_oid = clone_substitution_tag why_node_oid
and clone_substitution_olist = clone_substitution_tag why_node_olist
and clone_substitution_id = clone_substitution_tag why_node_id
and clone_substitution_list = clone_substitution_tag why_node_list
and include_declaration_oid = include_declaration_tag why_node_oid
and include_declaration_olist = include_declaration_tag why_node_olist
and include_declaration_id = include_declaration_tag why_node_id
and include_declaration_list = include_declaration_tag why_node_list
and theory_declaration_oid = theory_declaration_tag why_node_oid
and theory_declaration_olist = theory_declaration_tag why_node_olist
and theory_declaration_id = theory_declaration_tag why_node_id
and theory_declaration_list = theory_declaration_tag why_node_list
and module_oid = module_tag why_node_oid
and module_olist = module_tag why_node_olist
and module_id = module_tag why_node_id
and module_list = module_tag why_node_list
(* Class nodes *)
and expr_oid = expr_tag why_node_oid
and expr_olist = expr_tag why_node_olist
and expr_id = expr_tag why_node_id
and expr_list = expr_tag why_node_list
and pred_oid = pred_tag why_node_oid
and pred_olist = pred_tag why_node_olist
and pred_id = pred_tag why_node_id
and pred_list = pred_tag why_node_list
and term_oid = term_tag why_node_oid
and term_olist = term_tag why_node_olist
and term_id = term_tag why_node_id
and term_list = term_tag why_node_list
and prog_oid = prog_tag why_node_oid
and prog_olist = prog_tag why_node_olist
and prog_id = prog_tag why_node_id
and prog_list = prog_tag why_node_list
and type_definition_oid = type_definition_tag why_node_oid
and type_definition_olist = type_definition_tag why_node_olist
and type_definition_id = type_definition_tag why_node_id
and type_definition_list = type_definition_tag why_node_list
and declaration_oid = declaration_tag why_node_oid
and declaration_olist = declaration_tag why_node_olist
and declaration_id = declaration_tag why_node_id
and declaration_list = declaration_tag why_node_list
and any_node_oid = any_node_tag why_node_oid
and any_node_olist = any_node_tag why_node_olist
and any_node_id = any_node_tag why_node_id
and any_node_list = any_node_tag why_node_list
type file = { theory_declarations: theory_declaration_olist }
(* Tag coercions *)
let type_coercion (node : any_node_tag why_node) : type_tag why_node =
match node.desc with
| Type _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "type_coercion"
let name_coercion (node : any_node_tag why_node) : name_tag why_node =
match node.desc with
| Name _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "name_coercion"
let effects_coercion (node : any_node_tag why_node) : effects_tag why_node =
match node.desc with
| Effects _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "effects_coercion"
let raise_effect_coercion (node : any_node_tag why_node) : raise_effect_tag why_node =
match node.desc with
| Raise_effect _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "raise_effect_coercion"
let binder_coercion (node : any_node_tag why_node) : binder_tag why_node =
match node.desc with
| Binder _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "binder_coercion"
let transparent_type_definition_coercion (node : any_node_tag why_node) : transparent_type_definition_tag why_node =
match node.desc with
| Transparent_type_definition _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "transparent_type_definition_coercion"
let record_binder_coercion (node : any_node_tag why_node) : record_binder_tag why_node =
match node.desc with
| Record_binder _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_binder_coercion"
let record_definition_coercion (node : any_node_tag why_node) : record_definition_tag why_node =
match node.desc with
| Record_definition _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_definition_coercion"
let range_type_definition_coercion (node : any_node_tag why_node) : range_type_definition_tag why_node =
match node.desc with
| Range_type_definition _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "range_type_definition_coercion"
let triggers_coercion (node : any_node_tag why_node) : triggers_tag why_node =
match node.desc with
| Triggers _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "triggers_coercion"
let trigger_coercion (node : any_node_tag why_node) : trigger_tag why_node =
match node.desc with
| Trigger _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "trigger_coercion"
let axiom_dep_coercion (node : any_node_tag why_node) : axiom_dep_tag why_node =
match node.desc with
| Axiom_dep _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "axiom_dep_coercion"
let handler_coercion (node : any_node_tag why_node) : handler_tag why_node =
match node.desc with
| Handler _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "handler_coercion"
let field_association_coercion (node : any_node_tag why_node) : field_association_tag why_node =
match node.desc with
| Field_association _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "field_association_coercion"
let variant_coercion (node : any_node_tag why_node) : variant_tag why_node =
match node.desc with
| Variant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "variant_coercion"
let variants_coercion (node : any_node_tag why_node) : variants_tag why_node =
match node.desc with
| Variants _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "variants_coercion"
let universal_quantif_coercion (node : any_node_tag why_node) : universal_quantif_tag why_node =
match node.desc with
| Universal_quantif _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "universal_quantif_coercion"
let existential_quantif_coercion (node : any_node_tag why_node) : existential_quantif_tag why_node =
match node.desc with
| Existential_quantif _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "existential_quantif_coercion"
let not_coercion (node : any_node_tag why_node) : not_tag why_node =
match node.desc with
| Not _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "not_coercion"
let connection_coercion (node : any_node_tag why_node) : connection_tag why_node =
match node.desc with
| Connection _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "connection_coercion"
let label_coercion (node : any_node_tag why_node) : label_tag why_node =
match node.desc with
| Label _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "label_coercion"
let loc_label_coercion (node : any_node_tag why_node) : loc_label_tag why_node =
match node.desc with
| Loc_label _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "loc_label_coercion"
let identifier_coercion (node : any_node_tag why_node) : identifier_tag why_node =
match node.desc with
| Identifier _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "identifier_coercion"
let tagged_coercion (node : any_node_tag why_node) : tagged_tag why_node =
match node.desc with
| Tagged _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "tagged_coercion"
let call_coercion (node : any_node_tag why_node) : call_tag why_node =
match node.desc with
| Call _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "call_coercion"
let literal_coercion (node : any_node_tag why_node) : literal_tag why_node =
match node.desc with
| Literal _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "literal_coercion"
let binding_coercion (node : any_node_tag why_node) : binding_tag why_node =
match node.desc with
| Binding _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "binding_coercion"
let elsif_coercion (node : any_node_tag why_node) : elsif_tag why_node =
match node.desc with
| Elsif _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "elsif_coercion"
let epsilon_coercion (node : any_node_tag why_node) : epsilon_tag why_node =
match node.desc with
| Epsilon _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "epsilon_coercion"
let conditional_coercion (node : any_node_tag why_node) : conditional_tag why_node =
match node.desc with
| Conditional _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "conditional_coercion"
let integer_constant_coercion (node : any_node_tag why_node) : integer_constant_tag why_node =
match node.desc with
| Integer_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "integer_constant_coercion"
let range_constant_coercion (node : any_node_tag why_node) : range_constant_tag why_node =
match node.desc with
| Range_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "range_constant_coercion"
let modular_constant_coercion (node : any_node_tag why_node) : modular_constant_tag why_node =
match node.desc with
| Modular_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "modular_constant_coercion"
let fixed_constant_coercion (node : any_node_tag why_node) : fixed_constant_tag why_node =
match node.desc with
| Fixed_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "fixed_constant_coercion"
let real_constant_coercion (node : any_node_tag why_node) : real_constant_tag why_node =
match node.desc with
| Real_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "real_constant_coercion"
let float_constant_coercion (node : any_node_tag why_node) : float_constant_tag why_node =
match node.desc with
| Float_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "float_constant_coercion"
let comment_coercion (node : any_node_tag why_node) : comment_tag why_node =
match node.desc with
| Comment _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "comment_coercion"
let deref_coercion (node : any_node_tag why_node) : deref_tag why_node =
match node.desc with
| Deref _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "deref_coercion"
let record_access_coercion (node : any_node_tag why_node) : record_access_tag why_node =
match node.desc with
| Record_access _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_access_coercion"
let record_update_coercion (node : any_node_tag why_node) : record_update_tag why_node =
match node.desc with
| Record_update _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_update_coercion"
let record_aggregate_coercion (node : any_node_tag why_node) : record_aggregate_tag why_node =
match node.desc with
| Record_aggregate _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_aggregate_coercion"
let any_expr_coercion (node : any_node_tag why_node) : any_expr_tag why_node =
match node.desc with
| Any_expr _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "any_expr_coercion"
let assignment_coercion (node : any_node_tag why_node) : assignment_tag why_node =
match node.desc with
| Assignment _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "assignment_coercion"
let binding_ref_coercion (node : any_node_tag why_node) : binding_ref_tag why_node =
match node.desc with
| Binding_ref _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "binding_ref_coercion"
let loop_coercion (node : any_node_tag why_node) : loop_tag why_node =
match node.desc with
| Loop _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "loop_coercion"
let statement_sequence_coercion (node : any_node_tag why_node) : statement_sequence_tag why_node =
match node.desc with
| Statement_sequence _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "statement_sequence_coercion"
let abstract_expr_coercion (node : any_node_tag why_node) : abstract_expr_tag why_node =
match node.desc with
| Abstract_expr _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "abstract_expr_coercion"
let assert_coercion (node : any_node_tag why_node) : assert_tag why_node =
match node.desc with
| Assert _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "assert_coercion"
let raise_coercion (node : any_node_tag why_node) : raise_tag why_node =
match node.desc with
| Raise _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "raise_coercion"
let try_block_coercion (node : any_node_tag why_node) : try_block_tag why_node =
match node.desc with
| Try_block _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "try_block_coercion"
let function_decl_coercion (node : any_node_tag why_node) : function_decl_tag why_node =
match node.desc with
| Function_decl _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "function_decl_coercion"
let axiom_coercion (node : any_node_tag why_node) : axiom_tag why_node =
match node.desc with
| Axiom _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "axiom_coercion"
let goal_coercion (node : any_node_tag why_node) : goal_tag why_node =
match node.desc with
| Goal _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "goal_coercion"
let type_decl_coercion (node : any_node_tag why_node) : type_decl_tag why_node =
match node.desc with
| Type_decl _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "type_decl_coercion"
let global_ref_declaration_coercion (node : any_node_tag why_node) : global_ref_declaration_tag why_node =
match node.desc with
| Global_ref_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "global_ref_declaration_coercion"
let namespace_declaration_coercion (node : any_node_tag why_node) : namespace_declaration_tag why_node =
match node.desc with
| Namespace_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "namespace_declaration_coercion"
let exception_declaration_coercion (node : any_node_tag why_node) : exception_declaration_tag why_node =
match node.desc with
| Exception_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "exception_declaration_coercion"
let meta_declaration_coercion (node : any_node_tag why_node) : meta_declaration_tag why_node =
match node.desc with
| Meta_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "meta_declaration_coercion"
let clone_declaration_coercion (node : any_node_tag why_node) : clone_declaration_tag why_node =
match node.desc with
| Clone_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "clone_declaration_coercion"
let clone_substitution_coercion (node : any_node_tag why_node) : clone_substitution_tag why_node =
match node.desc with
| Clone_substitution _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "clone_substitution_coercion"
let include_declaration_coercion (node : any_node_tag why_node) : include_declaration_tag why_node =
match node.desc with
| Include_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "include_declaration_coercion"
let theory_declaration_coercion (node : any_node_tag why_node) : theory_declaration_tag why_node =
match node.desc with
| Theory_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "theory_declaration_coercion"
let module_coercion (node : any_node_tag why_node) : module_tag why_node =
match node.desc with
| Module _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "module_coercion"
(* Class coercions *)
let expr_coercion (node : any_node_tag why_node) : expr_tag why_node =
match node.desc with
| Universal_quantif _ as desc -> {info=node.info; desc}
| Existential_quantif _ as desc -> {info=node.info; desc}
| Not _ as desc -> {info=node.info; desc}
| Connection _ as desc -> {info=node.info; desc}
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| Integer_constant _ as desc -> {info=node.info; desc}
| Range_constant _ as desc -> {info=node.info; desc}
| Modular_constant _ as desc -> {info=node.info; desc}
| Fixed_constant _ as desc -> {info=node.info; desc}
| Real_constant _ as desc -> {info=node.info; desc}
| Float_constant _ as desc -> {info=node.info; desc}
| Comment _ as desc -> {info=node.info; desc}
| Deref _ as desc -> {info=node.info; desc}
| Record_access _ as desc -> {info=node.info; desc}
| Record_update _ as desc -> {info=node.info; desc}
| Record_aggregate _ as desc -> {info=node.info; desc}
| Any_expr _ as desc -> {info=node.info; desc}
| Assignment _ as desc -> {info=node.info; desc}
| Binding_ref _ as desc -> {info=node.info; desc}
| Loop _ as desc -> {info=node.info; desc}
| Statement_sequence _ as desc -> {info=node.info; desc}
| Abstract_expr _ as desc -> {info=node.info; desc}
| Assert _ as desc -> {info=node.info; desc}
| Raise _ as desc -> {info=node.info; desc}
| Try_block _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "expr_coercion"
[@@warning "-11"]
let pred_coercion (node : any_node_tag why_node) : pred_tag why_node =
match node.desc with
| Universal_quantif _ as desc -> {info=node.info; desc}
| Existential_quantif _ as desc -> {info=node.info; desc}
| Not _ as desc -> {info=node.info; desc}
| Connection _ as desc -> {info=node.info; desc}
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "pred_coercion"
[@@warning "-11"]
let term_coercion (node : any_node_tag why_node) : term_tag why_node =
match node.desc with
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| Integer_constant _ as desc -> {info=node.info; desc}
| Range_constant _ as desc -> {info=node.info; desc}
| Modular_constant _ as desc -> {info=node.info; desc}
| Fixed_constant _ as desc -> {info=node.info; desc}
| Real_constant _ as desc -> {info=node.info; desc}
| Float_constant _ as desc -> {info=node.info; desc}
| Comment _ as desc -> {info=node.info; desc}
| Deref _ as desc -> {info=node.info; desc}
| Record_access _ as desc -> {info=node.info; desc}
| Record_update _ as desc -> {info=node.info; desc}
| Record_aggregate _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "term_coercion"
[@@warning "-11"]
let prog_coercion (node : any_node_tag why_node) : prog_tag why_node =
match node.desc with
| Not _ as desc -> {info=node.info; desc}
| Connection _ as desc -> {info=node.info; desc}
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| Integer_constant _ as desc -> {info=node.info; desc}
| Range_constant _ as desc -> {info=node.info; desc}
| Modular_constant _ as desc -> {info=node.info; desc}
| Fixed_constant _ as desc -> {info=node.info; desc}
| Real_constant _ as desc -> {info=node.info; desc}
| Float_constant _ as desc -> {info=node.info; desc}
| Comment _ as desc -> {info=node.info; desc}
| Deref _ as desc -> {info=node.info; desc}
| Record_access _ as desc -> {info=node.info; desc}
| Record_update _ as desc -> {info=node.info; desc}
| Record_aggregate _ as desc -> {info=node.info; desc}
| Any_expr _ as desc -> {info=node.info; desc}
| Assignment _ as desc -> {info=node.info; desc}
| Binding_ref _ as desc -> {info=node.info; desc}
| Loop _ as desc -> {info=node.info; desc}
| Statement_sequence _ as desc -> {info=node.info; desc}
| Abstract_expr _ as desc -> {info=node.info; desc}
| Assert _ as desc -> {info=node.info; desc}
| Raise _ as desc -> {info=node.info; desc}
| Try_block _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "prog_coercion"
[@@warning "-11"]
let type_definition_coercion (node : any_node_tag why_node) : type_definition_tag why_node =
match node.desc with
| Transparent_type_definition _ as desc -> {info=node.info; desc}
| Record_binder _ as desc -> {info=node.info; desc}
| Record_definition _ as desc -> {info=node.info; desc}
| Range_type_definition _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "type_definition_coercion"
[@@warning "-11"]
let declaration_coercion (node : any_node_tag why_node) : declaration_tag why_node =
match node.desc with
| Function_decl _ as desc -> {info=node.info; desc}
| Axiom _ as desc -> {info=node.info; desc}
| Goal _ as desc -> {info=node.info; desc}
| Type_decl _ as desc -> {info=node.info; desc}
| Global_ref_declaration _ as desc -> {info=node.info; desc}
| Namespace_declaration _ as desc -> {info=node.info; desc}
| Exception_declaration _ as desc -> {info=node.info; desc}
| Meta_declaration _ as desc -> {info=node.info; desc}
| Clone_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "declaration_coercion"
[@@warning "-11"]
let any_node_coercion (node : any_node_tag why_node) : any_node_tag why_node =
match node.desc with
| Type _ as desc -> {info=node.info; desc}
| Name _ as desc -> {info=node.info; desc}
| Effects _ as desc -> {info=node.info; desc}
| Raise_effect _ as desc -> {info=node.info; desc}
| Binder _ as desc -> {info=node.info; desc}
| Transparent_type_definition _ as desc -> {info=node.info; desc}
| Record_binder _ as desc -> {info=node.info; desc}
| Record_definition _ as desc -> {info=node.info; desc}
| Range_type_definition _ as desc -> {info=node.info; desc}
| Triggers _ as desc -> {info=node.info; desc}
| Trigger _ as desc -> {info=node.info; desc}
| Axiom_dep _ as desc -> {info=node.info; desc}
| Handler _ as desc -> {info=node.info; desc}
| Field_association _ as desc -> {info=node.info; desc}
| Variant _ as desc -> {info=node.info; desc}
| Variants _ as desc -> {info=node.info; desc}
| Universal_quantif _ as desc -> {info=node.info; desc}
| Existential_quantif _ as desc -> {info=node.info; desc}
| Not _ as desc -> {info=node.info; desc}
| Connection _ as desc -> {info=node.info; desc}
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| Integer_constant _ as desc -> {info=node.info; desc}
| Range_constant _ as desc -> {info=node.info; desc}
| Modular_constant _ as desc -> {info=node.info; desc}
| Fixed_constant _ as desc -> {info=node.info; desc}
| Real_constant _ as desc -> {info=node.info; desc}
| Float_constant _ as desc -> {info=node.info; desc}
| Comment _ as desc -> {info=node.info; desc}
| Deref _ as desc -> {info=node.info; desc}
| Record_access _ as desc -> {info=node.info; desc}
| Record_update _ as desc -> {info=node.info; desc}
| Record_aggregate _ as desc -> {info=node.info; desc}
| Any_expr _ as desc -> {info=node.info; desc}
| Assignment _ as desc -> {info=node.info; desc}
| Binding_ref _ as desc -> {info=node.info; desc}
| Loop _ as desc -> {info=node.info; desc}
| Statement_sequence _ as desc -> {info=node.info; desc}
| Abstract_expr _ as desc -> {info=node.info; desc}
| Assert _ as desc -> {info=node.info; desc}
| Raise _ as desc -> {info=node.info; desc}
| Try_block _ as desc -> {info=node.info; desc}
| Function_decl _ as desc -> {info=node.info; desc}
| Axiom _ as desc -> {info=node.info; desc}
| Goal _ as desc -> {info=node.info; desc}
| Type_decl _ as desc -> {info=node.info; desc}
| Global_ref_declaration _ as desc -> {info=node.info; desc}
| Namespace_declaration _ as desc -> {info=node.info; desc}
| Exception_declaration _ as desc -> {info=node.info; desc}
| Meta_declaration _ as desc -> {info=node.info; desc}
| Clone_declaration _ as desc -> {info=node.info; desc}
| Clone_substitution _ as desc -> {info=node.info; desc}
| Include_declaration _ as desc -> {info=node.info; desc}
| Theory_declaration _ as desc -> {info=node.info; desc}
| Module _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "any_node_coercion"
[@@warning "-11"]
module From_json = struct
type t = [
| `Null
| `Bool of bool
| `Int of int
| `Intlit of string
| `Float of float
| `String of string
| `Assoc of (string * t) list
| `List of t list
| `Tuple of t list
| `Variant of string * t option
]
type 'a from_json = t -> 'a
exception Unexpected_Json of string * t
let unexpected_json type_str json =
raise (Unexpected_Json (type_str, json))
let boolean_from_json : bool from_json = function
| `Bool b -> b
| json -> unexpected_json "bool" json
let int_from_json : int from_json = function
| `Int i -> i
| json -> unexpected_json "int" json
let uint_from_json : uint from_json = function
| `String s -> Uint s
| json -> unexpected_json "uint" json
let ureal_from_json : ureal from_json = function
| `List [numerator; denominator; base; negative] ->
Ureal {
numerator = uint_from_json numerator;
denominator = uint_from_json denominator;
base = int_from_json base;
negative = boolean_from_json negative;
}
| json ->
unexpected_json "ureal" json
let source_ptr_from_json : source_ptr from_json = function
| `Null -> No_location
| `List [`String filename; `Int line] ->
Source_ptr {filename; line}
| json -> unexpected_json "source_ptr" json
let node_id_from_json : node_id from_json = function
| `String "<Node_Id>" -> Node_id
| json -> unexpected_json "node_id" json
let why_node_set_from_json : why_node_set from_json = function
| `String "<Why_Node_Set>" -> ()
| json -> unexpected_json "why_node_set" json
let symbol_from_json : symbol from_json = function
| `String "" -> No_symbol
| `String s -> Symbol s
| json -> unexpected_json "symbol" json
let symbol_set_from_json : symbol_set from_json = function
| `List l -> List.map symbol_from_json l
| json -> unexpected_json "symbol_set" json
let string_sets_set_from_json : string_sets_set from_json = function
| `List l -> List.map symbol_from_json l
| json -> unexpected_json "string_sets_set" json
Why .
let domain_from_json : domain from_json = function
| `Int 1 -> Term
| `Int 2 -> Pterm
| `Int 3 -> Pred
| `Int 4 -> Prog
| json -> unexpected_json "domain" json
let type_from_json : type_ from_json = function
| `Int 0 -> Builtin
| `Int 1 -> Split
| `Int 2 -> Abstract
| json -> unexpected_json "type_" json
let literal_from_json : literal from_json = function
| `Int 0 -> True
| `Int 1 -> False
| json -> unexpected_json "literal" json
let theory_type_from_json : theory_type from_json = function
| `Int 0 -> Theory
| `Int 1 -> Module
| json -> unexpected_json "theory_type" json
let clone_type_from_json : clone_type from_json = function
| `Int 0 -> Import
| `Int 1 -> Export
| `Int 2 -> Clone_default
| json -> unexpected_json "clone_type" json
let subst_type_from_json : subst_type from_json = function
| `Int 0 -> Type_subst
| `Int 1 -> Function
| `Int 2 -> Predicate
| `Int 3 -> Namepace
| `Int 4 -> Lemma
| `Int 5 -> Goal
| json -> unexpected_json "subst_type" json
let connector_from_json : connector from_json = function
| `Int 0 -> Or_else
| `Int 1 -> And_then
| `Int 2 -> Imply
| `Int 3 -> Equivalent
| `Int 4 -> Or
| `Int 5 -> And
| json -> unexpected_json "connector" json
let assert_kind_from_json : assert_kind from_json = function
| `Int 0 -> Assert
| `Int 1 -> Check
| `Int 2 -> Assume
| json -> unexpected_json "assert_kind" json
let axiom_dep_kind_from_json : axiom_dep_kind from_json = function
| `Int 0 -> Axdep_func
| `Int 1 -> Axdep_pred
| json -> unexpected_json "axiom_dep_kind" json
let rec why_node_from_json : 'a why_node from_json = function
| `List [`String "W_TYPE"; id; node; domain; link; checked; type_kind; name; is_mutable; relaxed_init] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Type {
type_kind = type_from_json type_kind;
name = name_opaque_id_from_json name;
is_mutable = boolean_from_json is_mutable;
relaxed_init = boolean_from_json relaxed_init;
} in
{info; desc}
| `List [`String "W_NAME"; id; node; domain; link; checked; symb; namespace; module_; infix] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Name {
symb = symbol_from_json symb;
namespace = symbol_from_json namespace;
module_ = module_opaque_oid_from_json module_;
infix = boolean_from_json infix;
} in
{info; desc}
| `List [`String "W_EFFECTS"; id; node; domain; link; checked; reads; writes; raises] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Effects {
reads = identifier_opaque_olist_from_json reads;
writes = identifier_opaque_olist_from_json writes;
raises = raise_effect_opaque_olist_from_json raises;
} in
{info; desc}
| `List [`String "W_RAISE_EFFECT"; id; node; domain; link; checked; name; arg_id; post] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Raise_effect {
name = name_opaque_id_from_json name;
arg_id = identifier_opaque_oid_from_json arg_id;
post = pred_opaque_oid_from_json post;
} in
{info; desc}
| `List [`String "W_BINDER"; id; node; domain; link; checked; name; arg_type] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Binder {
name = identifier_opaque_oid_from_json name;
arg_type = type_opaque_id_from_json arg_type;
} in
{info; desc}
| `List [`String "W_TRANSPARENT_TYPE_DEFINITION"; id; node; domain; link; checked; type_definition] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Transparent_type_definition {
type_definition = type_opaque_id_from_json type_definition;
} in
{info; desc}
| `List [`String "W_RECORD_BINDER"; id; node; domain; link; checked; name; arg_type; labels; is_mutable] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_binder {
name = identifier_opaque_oid_from_json name;
arg_type = type_opaque_id_from_json arg_type;
labels = symbol_set_from_json labels;
is_mutable = boolean_from_json is_mutable;
} in
{info; desc}
| `List [`String "W_RECORD_DEFINITION"; id; node; domain; link; checked; fields] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_definition {
fields = record_binder_opaque_list_from_json fields;
} in
{info; desc}
| `List [`String "W_RANGE_TYPE_DEFINITION"; id; node; domain; link; checked; first; last] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Range_type_definition {
first = uint_from_json first;
last = uint_from_json last;
} in
{info; desc}
| `List [`String "W_TRIGGERS"; id; node; domain; link; checked; triggers] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Triggers {
triggers = trigger_opaque_list_from_json triggers;
} in
{info; desc}
| `List [`String "W_TRIGGER"; id; node; domain; link; checked; terms] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Trigger {
terms = expr_opaque_list_from_json terms;
} in
{info; desc}
| `List [`String "W_AXIOM_DEP"; id; node; domain; link; checked; name; kind] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Axiom_dep {
name = identifier_opaque_id_from_json name;
kind = axiom_dep_kind_from_json kind;
} in
{info; desc}
| `List [`String "W_HANDLER"; id; node; domain; link; checked; name; arg_id; def] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Handler {
name = name_opaque_id_from_json name;
arg_id = identifier_opaque_oid_from_json arg_id;
def = prog_opaque_id_from_json def;
} in
{info; desc}
| `List [`String "W_FIELD_ASSOCIATION"; id; node; domain; link; checked; field; value] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Field_association {
field = identifier_opaque_id_from_json field;
value = expr_opaque_id_from_json value;
} in
{info; desc}
| `List [`String "W_VARIANT"; id; node; domain; link; checked; cmp_op; labels; expr] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Variant {
cmp_op = identifier_opaque_id_from_json cmp_op;
labels = symbol_set_from_json labels;
expr = term_opaque_id_from_json expr;
} in
{info; desc}
| `List [`String "W_VARIANTS"; id; node; domain; link; checked; variants] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Variants {
variants = variant_opaque_list_from_json variants;
} in
{info; desc}
| `List [`String "W_UNIVERSAL_QUANTIF"; id; node; domain; link; checked; variables; labels; var_type; triggers; pred] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Universal_quantif {
variables = identifier_opaque_list_from_json variables;
labels = symbol_set_from_json labels;
var_type = type_opaque_id_from_json var_type;
triggers = triggers_opaque_oid_from_json triggers;
pred = pred_opaque_id_from_json pred;
} in
{info; desc}
| `List [`String "W_EXISTENTIAL_QUANTIF"; id; node; domain; link; checked; variables; labels; var_type; pred] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Existential_quantif {
variables = identifier_opaque_list_from_json variables;
labels = symbol_set_from_json labels;
var_type = type_opaque_id_from_json var_type;
pred = pred_opaque_id_from_json pred;
} in
{info; desc}
| `List [`String "W_NOT"; id; node; domain; link; checked; right] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Not {
right = expr_opaque_id_from_json right;
} in
{info; desc}
| `List [`String "W_CONNECTION"; id; node; domain; link; checked; left; op; right; more_right] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Connection {
left = expr_opaque_id_from_json left;
op = connector_from_json op;
right = expr_opaque_id_from_json right;
more_right = expr_opaque_olist_from_json more_right;
} in
{info; desc}
| `List [`String "W_LABEL"; id; node; domain; link; checked; labels; def; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Label {
labels = symbol_set_from_json labels;
def = expr_opaque_id_from_json def;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_LOC_LABEL"; id; node; domain; link; checked; sloc; def; marker] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Loc_label {
sloc = source_ptr_from_json sloc;
def = expr_opaque_id_from_json def;
marker = symbol_from_json marker;
} in
{info; desc}
| `List [`String "W_IDENTIFIER"; id; node; domain; link; checked; name; typ; labels] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Identifier {
name = name_opaque_id_from_json name;
typ = type_opaque_oid_from_json typ;
labels = string_sets_set_from_json labels;
} in
{info; desc}
| `List [`String "W_TAGGED"; id; node; domain; link; checked; tag; def; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Tagged {
tag = symbol_from_json tag;
def = expr_opaque_id_from_json def;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_CALL"; id; node; domain; link; checked; name; args; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Call {
name = identifier_opaque_id_from_json name;
args = expr_opaque_olist_from_json args;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_LITERAL"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Literal {
value = literal_from_json value;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_BINDING"; id; node; domain; link; checked; name; def; context; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Binding {
name = identifier_opaque_id_from_json name;
def = expr_opaque_id_from_json def;
context = expr_opaque_id_from_json context;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_ELSIF"; id; node; domain; link; checked; condition; then_part; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Elsif {
condition = expr_opaque_id_from_json condition;
then_part = expr_opaque_id_from_json then_part;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_EPSILON"; id; node; domain; link; checked; name; typ; pred] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Epsilon {
name = identifier_opaque_id_from_json name;
typ = type_opaque_id_from_json typ;
pred = pred_opaque_id_from_json pred;
} in
{info; desc}
| `List [`String "W_CONDITIONAL"; id; node; domain; link; checked; condition; then_part; elsif_parts; else_part; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Conditional {
condition = expr_opaque_id_from_json condition;
then_part = expr_opaque_id_from_json then_part;
elsif_parts = expr_opaque_olist_from_json elsif_parts;
else_part = expr_opaque_oid_from_json else_part;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_INTEGER_CONSTANT"; id; node; domain; link; checked; value] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Integer_constant {
value = uint_from_json value;
} in
{info; desc}
| `List [`String "W_RANGE_CONSTANT"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Range_constant {
value = uint_from_json value;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_MODULAR_CONSTANT"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Modular_constant {
value = uint_from_json value;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_FIXED_CONSTANT"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Fixed_constant {
value = uint_from_json value;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_REAL_CONSTANT"; id; node; domain; link; checked; value] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Real_constant {
value = ureal_from_json value;
} in
{info; desc}
| `List [`String "W_FLOAT_CONSTANT"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Float_constant {
value = ureal_from_json value;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_COMMENT"; id; node; domain; link; checked; comment] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Comment {
comment = symbol_from_json comment;
} in
{info; desc}
| `List [`String "W_DEREF"; id; node; domain; link; checked; right; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Deref {
right = identifier_opaque_id_from_json right;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_RECORD_ACCESS"; id; node; domain; link; checked; name; field; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_access {
name = expr_opaque_id_from_json name;
field = identifier_opaque_id_from_json field;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_RECORD_UPDATE"; id; node; domain; link; checked; name; updates; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_update {
name = expr_opaque_id_from_json name;
updates = field_association_opaque_list_from_json updates;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_RECORD_AGGREGATE"; id; node; domain; link; checked; associations; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_aggregate {
associations = field_association_opaque_list_from_json associations;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_ANY_EXPR"; id; node; domain; link; checked; effects; pre; post; return_type; labels] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Any_expr {
effects = effects_opaque_oid_from_json effects;
pre = pred_opaque_oid_from_json pre;
post = pred_opaque_oid_from_json post;
return_type = type_opaque_id_from_json return_type;
labels = symbol_set_from_json labels;
} in
{info; desc}
| `List [`String "W_ASSIGNMENT"; id; node; domain; link; checked; name; value; typ; labels] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Assignment {
name = identifier_opaque_id_from_json name;
value = prog_opaque_id_from_json value;
typ = type_opaque_id_from_json typ;
labels = symbol_set_from_json labels;
} in
{info; desc}
| `List [`String "W_BINDING_REF"; id; node; domain; link; checked; name; def; context; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Binding_ref {
name = identifier_opaque_id_from_json name;
def = prog_opaque_id_from_json def;
context = prog_opaque_id_from_json context;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_LOOP"; id; node; domain; link; checked; code_before; invariants; variants; code_after] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Loop {
code_before = prog_opaque_id_from_json code_before;
invariants = pred_opaque_olist_from_json invariants;
variants = variants_opaque_olist_from_json variants;
code_after = prog_opaque_id_from_json code_after;
} in
{info; desc}
| `List [`String "W_STATEMENT_SEQUENCE"; id; node; domain; link; checked; statements] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Statement_sequence {
statements = prog_opaque_list_from_json statements;
} in
{info; desc}
| `List [`String "W_ABSTRACT_EXPR"; id; node; domain; link; checked; expr; post; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Abstract_expr {
expr = prog_opaque_id_from_json expr;
post = pred_opaque_id_from_json post;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_ASSERT"; id; node; domain; link; checked; pred; assert_kind] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Assert {
pred = pred_opaque_id_from_json pred;
assert_kind = assert_kind_from_json assert_kind;
} in
{info; desc}
| `List [`String "W_RAISE"; id; node; domain; link; checked; name; arg; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Raise {
name = name_opaque_id_from_json name;
arg = expr_opaque_oid_from_json arg;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_TRY_BLOCK"; id; node; domain; link; checked; prog; handler; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Try_block {
prog = prog_opaque_id_from_json prog;
handler = handler_opaque_list_from_json handler;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_FUNCTION_DECL"; id; node; domain; link; checked; name; binders; effects; pre; post; return_type; def; labels; location] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Function_decl {
name = identifier_opaque_id_from_json name;
binders = binder_opaque_olist_from_json binders;
effects = effects_opaque_oid_from_json effects;
pre = pred_opaque_oid_from_json pre;
post = pred_opaque_oid_from_json post;
return_type = type_opaque_oid_from_json return_type;
def = expr_opaque_oid_from_json def;
labels = symbol_set_from_json labels;
location = source_ptr_from_json location;
} in
{info; desc}
| `List [`String "W_AXIOM"; id; node; domain; link; checked; name; def; dep] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Axiom {
name = symbol_from_json name;
def = pred_opaque_id_from_json def;
dep = axiom_dep_opaque_oid_from_json dep;
} in
{info; desc}
| `List [`String "W_GOAL"; id; node; domain; link; checked; name; def] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Goal {
name = symbol_from_json name;
def = pred_opaque_id_from_json def;
} in
{info; desc}
| `List [`String "W_TYPE_DECL"; id; node; domain; link; checked; args; name; labels; definition] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Type_decl {
args = identifier_opaque_olist_from_json args;
name = name_opaque_id_from_json name;
labels = symbol_set_from_json labels;
definition = type_definition_opaque_oid_from_json definition;
} in
{info; desc}
| `List [`String "W_GLOBAL_REF_DECLARATION"; id; node; domain; link; checked; name; ref_type; labels; location] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Global_ref_declaration {
name = identifier_opaque_id_from_json name;
ref_type = type_opaque_id_from_json ref_type;
labels = symbol_set_from_json labels;
location = source_ptr_from_json location;
} in
{info; desc}
| `List [`String "W_NAMESPACE_DECLARATION"; id; node; domain; link; checked; declarations; name] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Namespace_declaration {
declarations = declaration_opaque_olist_from_json declarations;
name = symbol_from_json name;
} in
{info; desc}
| `List [`String "W_EXCEPTION_DECLARATION"; id; node; domain; link; checked; name; arg] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Exception_declaration {
name = name_opaque_id_from_json name;
arg = type_opaque_oid_from_json arg;
} in
{info; desc}
| `List [`String "W_META_DECLARATION"; id; node; domain; link; checked; name; parameter] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Meta_declaration {
name = symbol_from_json name;
parameter = symbol_from_json parameter;
} in
{info; desc}
| `List [`String "W_CLONE_DECLARATION"; id; node; domain; link; checked; origin; as_name; clone_kind; substitutions; theory_kind] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Clone_declaration {
origin = module_opaque_id_from_json origin;
as_name = symbol_from_json as_name;
clone_kind = clone_type_from_json clone_kind;
substitutions = clone_substitution_opaque_olist_from_json substitutions;
theory_kind = theory_type_from_json theory_kind;
} in
{info; desc}
| `List [`String "W_CLONE_SUBSTITUTION"; id; node; domain; link; checked; kind; orig_name; image] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Clone_substitution {
kind = subst_type_from_json kind;
orig_name = name_opaque_id_from_json orig_name;
image = name_opaque_id_from_json image;
} in
{info; desc}
| `List [`String "W_INCLUDE_DECLARATION"; id; node; domain; link; checked; module_; kind; use_kind] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Include_declaration {
module_ = module_opaque_id_from_json module_;
kind = theory_type_from_json kind;
use_kind = clone_type_from_json use_kind;
} in
{info; desc}
| `List [`String "W_THEORY_DECLARATION"; id; node; domain; link; checked; declarations; name; kind; includes; comment] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Theory_declaration {
declarations = declaration_opaque_olist_from_json declarations;
name = symbol_from_json name;
kind = theory_type_from_json kind;
includes = include_declaration_opaque_olist_from_json includes;
comment = symbol_from_json comment;
} in
{info; desc}
| `List [`String "W_MODULE"; id; node; domain; link; checked; file; name] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Module {
file = symbol_from_json file;
name = symbol_from_json name;
} in
{info; desc}
| json ->
unexpected_json "why_node" json
and why_node_id_from_json : 'a . (any_node_tag why_node -> 'a why_node) -> 'a why_node_id from_json =
fun coerce json ->
coerce (why_node_from_json json)
and why_node_oid_from_json : 'a . (any_node_tag why_node -> 'a why_node) -> 'a why_node_oid from_json =
fun coerce json ->
match json with
| `Null ->
None
| json ->
Some (coerce (why_node_from_json json))
and why_node_list_from_json : 'a . (any_node_tag why_node -> 'a why_node) -> 'a why_node_list from_json =
fun coerce json ->
match json with
| `List (elt0 :: elts) ->
let elt0 = coerce (why_node_from_json elt0) in
let elts =
List.map coerce
(List.map why_node_from_json elts) in
{elt0; elts}
| json ->
unexpected_json "why_node_list" json
and why_node_olist_from_json : 'a . (any_node_tag why_node -> 'a why_node) -> 'a why_node_olist from_json =
fun coerce json ->
match json with
| `Null ->
[]
| `List l ->
List.map coerce
(List.map why_node_from_json l)
| json ->
unexpected_json "why_node_olist_from_json" json
(* Opaque tags from json *)
and type_opaque_oid_from_json json = why_node_oid_from_json type_coercion json
and type_opaque_olist_from_json json = why_node_olist_from_json type_coercion json
and type_opaque_id_from_json json = why_node_id_from_json type_coercion json
and type_opaque_list_from_json json = why_node_list_from_json type_coercion json
and name_opaque_oid_from_json json = why_node_oid_from_json name_coercion json
and name_opaque_olist_from_json json = why_node_olist_from_json name_coercion json
and name_opaque_id_from_json json = why_node_id_from_json name_coercion json
and name_opaque_list_from_json json = why_node_list_from_json name_coercion json
and effects_opaque_oid_from_json json = why_node_oid_from_json effects_coercion json
and effects_opaque_olist_from_json json = why_node_olist_from_json effects_coercion json
and effects_opaque_id_from_json json = why_node_id_from_json effects_coercion json
and effects_opaque_list_from_json json = why_node_list_from_json effects_coercion json
and raise_effect_opaque_oid_from_json json = why_node_oid_from_json raise_effect_coercion json
and raise_effect_opaque_olist_from_json json = why_node_olist_from_json raise_effect_coercion json
and raise_effect_opaque_id_from_json json = why_node_id_from_json raise_effect_coercion json
and raise_effect_opaque_list_from_json json = why_node_list_from_json raise_effect_coercion json
and binder_opaque_oid_from_json json = why_node_oid_from_json binder_coercion json
and binder_opaque_olist_from_json json = why_node_olist_from_json binder_coercion json
and binder_opaque_id_from_json json = why_node_id_from_json binder_coercion json
and binder_opaque_list_from_json json = why_node_list_from_json binder_coercion json
and transparent_type_definition_opaque_oid_from_json json = why_node_oid_from_json transparent_type_definition_coercion json
and transparent_type_definition_opaque_olist_from_json json = why_node_olist_from_json transparent_type_definition_coercion json
and transparent_type_definition_opaque_id_from_json json = why_node_id_from_json transparent_type_definition_coercion json
and transparent_type_definition_opaque_list_from_json json = why_node_list_from_json transparent_type_definition_coercion json
and record_binder_opaque_oid_from_json json = why_node_oid_from_json record_binder_coercion json
and record_binder_opaque_olist_from_json json = why_node_olist_from_json record_binder_coercion json
and record_binder_opaque_id_from_json json = why_node_id_from_json record_binder_coercion json
and record_binder_opaque_list_from_json json = why_node_list_from_json record_binder_coercion json
and record_definition_opaque_oid_from_json json = why_node_oid_from_json record_definition_coercion json
and record_definition_opaque_olist_from_json json = why_node_olist_from_json record_definition_coercion json
and record_definition_opaque_id_from_json json = why_node_id_from_json record_definition_coercion json
and record_definition_opaque_list_from_json json = why_node_list_from_json record_definition_coercion json
and range_type_definition_opaque_oid_from_json json = why_node_oid_from_json range_type_definition_coercion json
and range_type_definition_opaque_olist_from_json json = why_node_olist_from_json range_type_definition_coercion json
and range_type_definition_opaque_id_from_json json = why_node_id_from_json range_type_definition_coercion json
and range_type_definition_opaque_list_from_json json = why_node_list_from_json range_type_definition_coercion json
and triggers_opaque_oid_from_json json = why_node_oid_from_json triggers_coercion json
and triggers_opaque_olist_from_json json = why_node_olist_from_json triggers_coercion json
and triggers_opaque_id_from_json json = why_node_id_from_json triggers_coercion json
and triggers_opaque_list_from_json json = why_node_list_from_json triggers_coercion json
and trigger_opaque_oid_from_json json = why_node_oid_from_json trigger_coercion json
and trigger_opaque_olist_from_json json = why_node_olist_from_json trigger_coercion json
and trigger_opaque_id_from_json json = why_node_id_from_json trigger_coercion json
and trigger_opaque_list_from_json json = why_node_list_from_json trigger_coercion json
and axiom_dep_opaque_oid_from_json json = why_node_oid_from_json axiom_dep_coercion json
and axiom_dep_opaque_olist_from_json json = why_node_olist_from_json axiom_dep_coercion json
and axiom_dep_opaque_id_from_json json = why_node_id_from_json axiom_dep_coercion json
and axiom_dep_opaque_list_from_json json = why_node_list_from_json axiom_dep_coercion json
and handler_opaque_oid_from_json json = why_node_oid_from_json handler_coercion json
and handler_opaque_olist_from_json json = why_node_olist_from_json handler_coercion json
and handler_opaque_id_from_json json = why_node_id_from_json handler_coercion json
and handler_opaque_list_from_json json = why_node_list_from_json handler_coercion json
and field_association_opaque_oid_from_json json = why_node_oid_from_json field_association_coercion json
and field_association_opaque_olist_from_json json = why_node_olist_from_json field_association_coercion json
and field_association_opaque_id_from_json json = why_node_id_from_json field_association_coercion json
and field_association_opaque_list_from_json json = why_node_list_from_json field_association_coercion json
and variant_opaque_oid_from_json json = why_node_oid_from_json variant_coercion json
and variant_opaque_olist_from_json json = why_node_olist_from_json variant_coercion json
and variant_opaque_id_from_json json = why_node_id_from_json variant_coercion json
and variant_opaque_list_from_json json = why_node_list_from_json variant_coercion json
and variants_opaque_oid_from_json json = why_node_oid_from_json variants_coercion json
and variants_opaque_olist_from_json json = why_node_olist_from_json variants_coercion json
and variants_opaque_id_from_json json = why_node_id_from_json variants_coercion json
and variants_opaque_list_from_json json = why_node_list_from_json variants_coercion json
and universal_quantif_opaque_oid_from_json json = why_node_oid_from_json universal_quantif_coercion json
and universal_quantif_opaque_olist_from_json json = why_node_olist_from_json universal_quantif_coercion json
and universal_quantif_opaque_id_from_json json = why_node_id_from_json universal_quantif_coercion json
and universal_quantif_opaque_list_from_json json = why_node_list_from_json universal_quantif_coercion json
and existential_quantif_opaque_oid_from_json json = why_node_oid_from_json existential_quantif_coercion json
and existential_quantif_opaque_olist_from_json json = why_node_olist_from_json existential_quantif_coercion json
and existential_quantif_opaque_id_from_json json = why_node_id_from_json existential_quantif_coercion json
and existential_quantif_opaque_list_from_json json = why_node_list_from_json existential_quantif_coercion json
and not_opaque_oid_from_json json = why_node_oid_from_json not_coercion json
and not_opaque_olist_from_json json = why_node_olist_from_json not_coercion json
and not_opaque_id_from_json json = why_node_id_from_json not_coercion json
and not_opaque_list_from_json json = why_node_list_from_json not_coercion json
and connection_opaque_oid_from_json json = why_node_oid_from_json connection_coercion json
and connection_opaque_olist_from_json json = why_node_olist_from_json connection_coercion json
and connection_opaque_id_from_json json = why_node_id_from_json connection_coercion json
and connection_opaque_list_from_json json = why_node_list_from_json connection_coercion json
and label_opaque_oid_from_json json = why_node_oid_from_json label_coercion json
and label_opaque_olist_from_json json = why_node_olist_from_json label_coercion json
and label_opaque_id_from_json json = why_node_id_from_json label_coercion json
and label_opaque_list_from_json json = why_node_list_from_json label_coercion json
and loc_label_opaque_oid_from_json json = why_node_oid_from_json loc_label_coercion json
and loc_label_opaque_olist_from_json json = why_node_olist_from_json loc_label_coercion json
and loc_label_opaque_id_from_json json = why_node_id_from_json loc_label_coercion json
and loc_label_opaque_list_from_json json = why_node_list_from_json loc_label_coercion json
and identifier_opaque_oid_from_json json = why_node_oid_from_json identifier_coercion json
and identifier_opaque_olist_from_json json = why_node_olist_from_json identifier_coercion json
and identifier_opaque_id_from_json json = why_node_id_from_json identifier_coercion json
and identifier_opaque_list_from_json json = why_node_list_from_json identifier_coercion json
and tagged_opaque_oid_from_json json = why_node_oid_from_json tagged_coercion json
and tagged_opaque_olist_from_json json = why_node_olist_from_json tagged_coercion json
and tagged_opaque_id_from_json json = why_node_id_from_json tagged_coercion json
and tagged_opaque_list_from_json json = why_node_list_from_json tagged_coercion json
and call_opaque_oid_from_json json = why_node_oid_from_json call_coercion json
and call_opaque_olist_from_json json = why_node_olist_from_json call_coercion json
and call_opaque_id_from_json json = why_node_id_from_json call_coercion json
and call_opaque_list_from_json json = why_node_list_from_json call_coercion json
and literal_opaque_oid_from_json json = why_node_oid_from_json literal_coercion json
and literal_opaque_olist_from_json json = why_node_olist_from_json literal_coercion json
and literal_opaque_id_from_json json = why_node_id_from_json literal_coercion json
and literal_opaque_list_from_json json = why_node_list_from_json literal_coercion json
and binding_opaque_oid_from_json json = why_node_oid_from_json binding_coercion json
and binding_opaque_olist_from_json json = why_node_olist_from_json binding_coercion json
and binding_opaque_id_from_json json = why_node_id_from_json binding_coercion json
and binding_opaque_list_from_json json = why_node_list_from_json binding_coercion json
and elsif_opaque_oid_from_json json = why_node_oid_from_json elsif_coercion json
and elsif_opaque_olist_from_json json = why_node_olist_from_json elsif_coercion json
and elsif_opaque_id_from_json json = why_node_id_from_json elsif_coercion json
and elsif_opaque_list_from_json json = why_node_list_from_json elsif_coercion json
and epsilon_opaque_oid_from_json json = why_node_oid_from_json epsilon_coercion json
and epsilon_opaque_olist_from_json json = why_node_olist_from_json epsilon_coercion json
and epsilon_opaque_id_from_json json = why_node_id_from_json epsilon_coercion json
and epsilon_opaque_list_from_json json = why_node_list_from_json epsilon_coercion json
and conditional_opaque_oid_from_json json = why_node_oid_from_json conditional_coercion json
and conditional_opaque_olist_from_json json = why_node_olist_from_json conditional_coercion json
and conditional_opaque_id_from_json json = why_node_id_from_json conditional_coercion json
and conditional_opaque_list_from_json json = why_node_list_from_json conditional_coercion json
and integer_constant_opaque_oid_from_json json = why_node_oid_from_json integer_constant_coercion json
and integer_constant_opaque_olist_from_json json = why_node_olist_from_json integer_constant_coercion json
and integer_constant_opaque_id_from_json json = why_node_id_from_json integer_constant_coercion json
and integer_constant_opaque_list_from_json json = why_node_list_from_json integer_constant_coercion json
and range_constant_opaque_oid_from_json json = why_node_oid_from_json range_constant_coercion json
and range_constant_opaque_olist_from_json json = why_node_olist_from_json range_constant_coercion json
and range_constant_opaque_id_from_json json = why_node_id_from_json range_constant_coercion json
and range_constant_opaque_list_from_json json = why_node_list_from_json range_constant_coercion json
and modular_constant_opaque_oid_from_json json = why_node_oid_from_json modular_constant_coercion json
and modular_constant_opaque_olist_from_json json = why_node_olist_from_json modular_constant_coercion json
and modular_constant_opaque_id_from_json json = why_node_id_from_json modular_constant_coercion json
and modular_constant_opaque_list_from_json json = why_node_list_from_json modular_constant_coercion json
and fixed_constant_opaque_oid_from_json json = why_node_oid_from_json fixed_constant_coercion json
and fixed_constant_opaque_olist_from_json json = why_node_olist_from_json fixed_constant_coercion json
and fixed_constant_opaque_id_from_json json = why_node_id_from_json fixed_constant_coercion json
and fixed_constant_opaque_list_from_json json = why_node_list_from_json fixed_constant_coercion json
and real_constant_opaque_oid_from_json json = why_node_oid_from_json real_constant_coercion json
and real_constant_opaque_olist_from_json json = why_node_olist_from_json real_constant_coercion json
and real_constant_opaque_id_from_json json = why_node_id_from_json real_constant_coercion json
and real_constant_opaque_list_from_json json = why_node_list_from_json real_constant_coercion json
and float_constant_opaque_oid_from_json json = why_node_oid_from_json float_constant_coercion json
and float_constant_opaque_olist_from_json json = why_node_olist_from_json float_constant_coercion json
and float_constant_opaque_id_from_json json = why_node_id_from_json float_constant_coercion json
and float_constant_opaque_list_from_json json = why_node_list_from_json float_constant_coercion json
and comment_opaque_oid_from_json json = why_node_oid_from_json comment_coercion json
and comment_opaque_olist_from_json json = why_node_olist_from_json comment_coercion json
and comment_opaque_id_from_json json = why_node_id_from_json comment_coercion json
and comment_opaque_list_from_json json = why_node_list_from_json comment_coercion json
and deref_opaque_oid_from_json json = why_node_oid_from_json deref_coercion json
and deref_opaque_olist_from_json json = why_node_olist_from_json deref_coercion json
and deref_opaque_id_from_json json = why_node_id_from_json deref_coercion json
and deref_opaque_list_from_json json = why_node_list_from_json deref_coercion json
and record_access_opaque_oid_from_json json = why_node_oid_from_json record_access_coercion json
and record_access_opaque_olist_from_json json = why_node_olist_from_json record_access_coercion json
and record_access_opaque_id_from_json json = why_node_id_from_json record_access_coercion json
and record_access_opaque_list_from_json json = why_node_list_from_json record_access_coercion json
and record_update_opaque_oid_from_json json = why_node_oid_from_json record_update_coercion json
and record_update_opaque_olist_from_json json = why_node_olist_from_json record_update_coercion json
and record_update_opaque_id_from_json json = why_node_id_from_json record_update_coercion json
and record_update_opaque_list_from_json json = why_node_list_from_json record_update_coercion json
and record_aggregate_opaque_oid_from_json json = why_node_oid_from_json record_aggregate_coercion json
and record_aggregate_opaque_olist_from_json json = why_node_olist_from_json record_aggregate_coercion json
and record_aggregate_opaque_id_from_json json = why_node_id_from_json record_aggregate_coercion json
and record_aggregate_opaque_list_from_json json = why_node_list_from_json record_aggregate_coercion json
and any_expr_opaque_oid_from_json json = why_node_oid_from_json any_expr_coercion json
and any_expr_opaque_olist_from_json json = why_node_olist_from_json any_expr_coercion json
and any_expr_opaque_id_from_json json = why_node_id_from_json any_expr_coercion json
and any_expr_opaque_list_from_json json = why_node_list_from_json any_expr_coercion json
and assignment_opaque_oid_from_json json = why_node_oid_from_json assignment_coercion json
and assignment_opaque_olist_from_json json = why_node_olist_from_json assignment_coercion json
and assignment_opaque_id_from_json json = why_node_id_from_json assignment_coercion json
and assignment_opaque_list_from_json json = why_node_list_from_json assignment_coercion json
and binding_ref_opaque_oid_from_json json = why_node_oid_from_json binding_ref_coercion json
and binding_ref_opaque_olist_from_json json = why_node_olist_from_json binding_ref_coercion json
and binding_ref_opaque_id_from_json json = why_node_id_from_json binding_ref_coercion json
and binding_ref_opaque_list_from_json json = why_node_list_from_json binding_ref_coercion json
and loop_opaque_oid_from_json json = why_node_oid_from_json loop_coercion json
and loop_opaque_olist_from_json json = why_node_olist_from_json loop_coercion json
and loop_opaque_id_from_json json = why_node_id_from_json loop_coercion json
and loop_opaque_list_from_json json = why_node_list_from_json loop_coercion json
and statement_sequence_opaque_oid_from_json json = why_node_oid_from_json statement_sequence_coercion json
and statement_sequence_opaque_olist_from_json json = why_node_olist_from_json statement_sequence_coercion json
and statement_sequence_opaque_id_from_json json = why_node_id_from_json statement_sequence_coercion json
and statement_sequence_opaque_list_from_json json = why_node_list_from_json statement_sequence_coercion json
and abstract_expr_opaque_oid_from_json json = why_node_oid_from_json abstract_expr_coercion json
and abstract_expr_opaque_olist_from_json json = why_node_olist_from_json abstract_expr_coercion json
and abstract_expr_opaque_id_from_json json = why_node_id_from_json abstract_expr_coercion json
and abstract_expr_opaque_list_from_json json = why_node_list_from_json abstract_expr_coercion json
and assert_opaque_oid_from_json json = why_node_oid_from_json assert_coercion json
and assert_opaque_olist_from_json json = why_node_olist_from_json assert_coercion json
and assert_opaque_id_from_json json = why_node_id_from_json assert_coercion json
and assert_opaque_list_from_json json = why_node_list_from_json assert_coercion json
and raise_opaque_oid_from_json json = why_node_oid_from_json raise_coercion json
and raise_opaque_olist_from_json json = why_node_olist_from_json raise_coercion json
and raise_opaque_id_from_json json = why_node_id_from_json raise_coercion json
and raise_opaque_list_from_json json = why_node_list_from_json raise_coercion json
and try_block_opaque_oid_from_json json = why_node_oid_from_json try_block_coercion json
and try_block_opaque_olist_from_json json = why_node_olist_from_json try_block_coercion json
and try_block_opaque_id_from_json json = why_node_id_from_json try_block_coercion json
and try_block_opaque_list_from_json json = why_node_list_from_json try_block_coercion json
and function_decl_opaque_oid_from_json json = why_node_oid_from_json function_decl_coercion json
and function_decl_opaque_olist_from_json json = why_node_olist_from_json function_decl_coercion json
and function_decl_opaque_id_from_json json = why_node_id_from_json function_decl_coercion json
and function_decl_opaque_list_from_json json = why_node_list_from_json function_decl_coercion json
and axiom_opaque_oid_from_json json = why_node_oid_from_json axiom_coercion json
and axiom_opaque_olist_from_json json = why_node_olist_from_json axiom_coercion json
and axiom_opaque_id_from_json json = why_node_id_from_json axiom_coercion json
and axiom_opaque_list_from_json json = why_node_list_from_json axiom_coercion json
and goal_opaque_oid_from_json json = why_node_oid_from_json goal_coercion json
and goal_opaque_olist_from_json json = why_node_olist_from_json goal_coercion json
and goal_opaque_id_from_json json = why_node_id_from_json goal_coercion json
and goal_opaque_list_from_json json = why_node_list_from_json goal_coercion json
and type_decl_opaque_oid_from_json json = why_node_oid_from_json type_decl_coercion json
and type_decl_opaque_olist_from_json json = why_node_olist_from_json type_decl_coercion json
and type_decl_opaque_id_from_json json = why_node_id_from_json type_decl_coercion json
and type_decl_opaque_list_from_json json = why_node_list_from_json type_decl_coercion json
and global_ref_declaration_opaque_oid_from_json json = why_node_oid_from_json global_ref_declaration_coercion json
and global_ref_declaration_opaque_olist_from_json json = why_node_olist_from_json global_ref_declaration_coercion json
and global_ref_declaration_opaque_id_from_json json = why_node_id_from_json global_ref_declaration_coercion json
and global_ref_declaration_opaque_list_from_json json = why_node_list_from_json global_ref_declaration_coercion json
and namespace_declaration_opaque_oid_from_json json = why_node_oid_from_json namespace_declaration_coercion json
and namespace_declaration_opaque_olist_from_json json = why_node_olist_from_json namespace_declaration_coercion json
and namespace_declaration_opaque_id_from_json json = why_node_id_from_json namespace_declaration_coercion json
and namespace_declaration_opaque_list_from_json json = why_node_list_from_json namespace_declaration_coercion json
and exception_declaration_opaque_oid_from_json json = why_node_oid_from_json exception_declaration_coercion json
and exception_declaration_opaque_olist_from_json json = why_node_olist_from_json exception_declaration_coercion json
and exception_declaration_opaque_id_from_json json = why_node_id_from_json exception_declaration_coercion json
and exception_declaration_opaque_list_from_json json = why_node_list_from_json exception_declaration_coercion json
and meta_declaration_opaque_oid_from_json json = why_node_oid_from_json meta_declaration_coercion json
and meta_declaration_opaque_olist_from_json json = why_node_olist_from_json meta_declaration_coercion json
and meta_declaration_opaque_id_from_json json = why_node_id_from_json meta_declaration_coercion json
and meta_declaration_opaque_list_from_json json = why_node_list_from_json meta_declaration_coercion json
and clone_declaration_opaque_oid_from_json json = why_node_oid_from_json clone_declaration_coercion json
and clone_declaration_opaque_olist_from_json json = why_node_olist_from_json clone_declaration_coercion json
and clone_declaration_opaque_id_from_json json = why_node_id_from_json clone_declaration_coercion json
and clone_declaration_opaque_list_from_json json = why_node_list_from_json clone_declaration_coercion json
and clone_substitution_opaque_oid_from_json json = why_node_oid_from_json clone_substitution_coercion json
and clone_substitution_opaque_olist_from_json json = why_node_olist_from_json clone_substitution_coercion json
and clone_substitution_opaque_id_from_json json = why_node_id_from_json clone_substitution_coercion json
and clone_substitution_opaque_list_from_json json = why_node_list_from_json clone_substitution_coercion json
and include_declaration_opaque_oid_from_json json = why_node_oid_from_json include_declaration_coercion json
and include_declaration_opaque_olist_from_json json = why_node_olist_from_json include_declaration_coercion json
and include_declaration_opaque_id_from_json json = why_node_id_from_json include_declaration_coercion json
and include_declaration_opaque_list_from_json json = why_node_list_from_json include_declaration_coercion json
and theory_declaration_opaque_oid_from_json json = why_node_oid_from_json theory_declaration_coercion json
and theory_declaration_opaque_olist_from_json json = why_node_olist_from_json theory_declaration_coercion json
and theory_declaration_opaque_id_from_json json = why_node_id_from_json theory_declaration_coercion json
and theory_declaration_opaque_list_from_json json = why_node_list_from_json theory_declaration_coercion json
and module_opaque_oid_from_json json = why_node_oid_from_json module_coercion json
and module_opaque_olist_from_json json = why_node_olist_from_json module_coercion json
and module_opaque_id_from_json json = why_node_id_from_json module_coercion json
and module_opaque_list_from_json json = why_node_list_from_json module_coercion json
(* Opaque classes from json *)
and expr_opaque_oid_from_json json = why_node_oid_from_json expr_coercion json
and expr_opaque_olist_from_json json = why_node_olist_from_json expr_coercion json
and expr_opaque_id_from_json json = why_node_id_from_json expr_coercion json
and expr_opaque_list_from_json json = why_node_list_from_json expr_coercion json
and pred_opaque_oid_from_json json = why_node_oid_from_json pred_coercion json
and pred_opaque_olist_from_json json = why_node_olist_from_json pred_coercion json
and pred_opaque_id_from_json json = why_node_id_from_json pred_coercion json
and pred_opaque_list_from_json json = why_node_list_from_json pred_coercion json
and term_opaque_oid_from_json json = why_node_oid_from_json term_coercion json
and term_opaque_olist_from_json json = why_node_olist_from_json term_coercion json
and term_opaque_id_from_json json = why_node_id_from_json term_coercion json
and term_opaque_list_from_json json = why_node_list_from_json term_coercion json
and prog_opaque_oid_from_json json = why_node_oid_from_json prog_coercion json
and prog_opaque_olist_from_json json = why_node_olist_from_json prog_coercion json
and prog_opaque_id_from_json json = why_node_id_from_json prog_coercion json
and prog_opaque_list_from_json json = why_node_list_from_json prog_coercion json
and type_definition_opaque_oid_from_json json = why_node_oid_from_json type_definition_coercion json
and type_definition_opaque_olist_from_json json = why_node_olist_from_json type_definition_coercion json
and type_definition_opaque_id_from_json json = why_node_id_from_json type_definition_coercion json
and type_definition_opaque_list_from_json json = why_node_list_from_json type_definition_coercion json
and declaration_opaque_oid_from_json json = why_node_oid_from_json declaration_coercion json
and declaration_opaque_olist_from_json json = why_node_olist_from_json declaration_coercion json
and declaration_opaque_id_from_json json = why_node_id_from_json declaration_coercion json
and declaration_opaque_list_from_json json = why_node_list_from_json declaration_coercion json
and any_node_opaque_oid_from_json json = why_node_oid_from_json any_node_coercion json
and any_node_opaque_olist_from_json json = why_node_olist_from_json any_node_coercion json
and any_node_opaque_id_from_json json = why_node_id_from_json any_node_coercion json
and any_node_opaque_list_from_json json = why_node_list_from_json any_node_coercion json
let file_from_json : file from_json = function
| `Assoc fields when
List.length fields = 1 && List.mem_assoc "theory_declarations" fields ->
let ast_json = List.assoc "theory_declarations" fields in
let theory_declarations = theory_declaration_opaque_olist_from_json ast_json in
{ theory_declarations }
| json -> unexpected_json "file_from_json" json
end | null | https://raw.githubusercontent.com/AdaCore/why3/97be0f6354f4c5a85896746847192d828cc462d6/plugins/gnat_json/gnat_ast.ml | ocaml | -*- mode: tuareg -*-
This package is automatically generated by xtree. Do not edit manually.
TODO Use appropriate OCaml integer type
Kind tags
Class tags
Kind nodes
Class nodes
Tag coercions
Class coercions
Opaque tags from json
Opaque classes from json |
[@@@warning "-42"]
type uint = Uint of string
type ureal = Ureal of { numerator : uint ; denominator : uint; base : int; negative : bool }
GNAT
type source_ptr =
| No_location
| Source_ptr of { filename: string; line: int }
type node_id = Node_id
GNATCOLL.Symbols
type symbol = No_symbol | Symbol of string
type symbol_set = symbol list
type string_sets_set = symbol list
type why_node_set = unit
Why .
type odomain =
| Expr
| Term
| Pterm
| Pred
| Prog
type domain =
| Term
| Pterm
| Pred
| Prog
type type_ =
| Builtin
| Split
| Abstract
type literal =
| True
| False
type theory_type =
| Theory
| Module
type clone_type =
| Import
| Export
| Clone_default
type subst_type =
| Type_subst
| Function
| Predicate
| Namepace
| Lemma
| Goal
type connector =
| Or_else
| And_then
| Imply
| Equivalent
| Or
| And
type assert_kind =
| Assert
| Check
| Assume
type axiom_dep_kind =
| Axdep_func
| Axdep_pred
type 'a nonempty = { elt0: 'a; elts: 'a list }
let list_of_nonempty n = n.elt0 :: n.elts
type type_tag = [`Type]
type name_tag = [`Name]
type effects_tag = [`Effects]
type raise_effect_tag = [`Raise_effect]
type binder_tag = [`Binder]
type transparent_type_definition_tag = [`Transparent_type_definition]
type record_binder_tag = [`Record_binder]
type record_definition_tag = [`Record_definition]
type range_type_definition_tag = [`Range_type_definition]
type triggers_tag = [`Triggers]
type trigger_tag = [`Trigger]
type axiom_dep_tag = [`Axiom_dep]
type handler_tag = [`Handler]
type field_association_tag = [`Field_association]
type variant_tag = [`Variant]
type variants_tag = [`Variants]
type universal_quantif_tag = [`Universal_quantif]
type existential_quantif_tag = [`Existential_quantif]
type not_tag = [`Not]
type connection_tag = [`Connection]
type label_tag = [`Label]
type loc_label_tag = [`Loc_label]
type identifier_tag = [`Identifier]
type tagged_tag = [`Tagged]
type call_tag = [`Call]
type literal_tag = [`Literal]
type binding_tag = [`Binding]
type elsif_tag = [`Elsif]
type epsilon_tag = [`Epsilon]
type conditional_tag = [`Conditional]
type integer_constant_tag = [`Integer_constant]
type range_constant_tag = [`Range_constant]
type modular_constant_tag = [`Modular_constant]
type fixed_constant_tag = [`Fixed_constant]
type real_constant_tag = [`Real_constant]
type float_constant_tag = [`Float_constant]
type comment_tag = [`Comment]
type deref_tag = [`Deref]
type record_access_tag = [`Record_access]
type record_update_tag = [`Record_update]
type record_aggregate_tag = [`Record_aggregate]
type any_expr_tag = [`Any_expr]
type assignment_tag = [`Assignment]
type binding_ref_tag = [`Binding_ref]
type loop_tag = [`Loop]
type statement_sequence_tag = [`Statement_sequence]
type abstract_expr_tag = [`Abstract_expr]
type assert_tag = [`Assert]
type raise_tag = [`Raise]
type try_block_tag = [`Try_block]
type function_decl_tag = [`Function_decl]
type axiom_tag = [`Axiom]
type goal_tag = [`Goal]
type type_decl_tag = [`Type_decl]
type global_ref_declaration_tag = [`Global_ref_declaration]
type namespace_declaration_tag = [`Namespace_declaration]
type exception_declaration_tag = [`Exception_declaration]
type meta_declaration_tag = [`Meta_declaration]
type clone_declaration_tag = [`Clone_declaration]
type clone_substitution_tag = [`Clone_substitution]
type include_declaration_tag = [`Include_declaration]
type theory_declaration_tag = [`Theory_declaration]
type module_tag = [`Module]
type expr_tag = [
| `Universal_quantif
| `Existential_quantif
| `Not
| `Connection
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
| `Integer_constant
| `Range_constant
| `Modular_constant
| `Fixed_constant
| `Real_constant
| `Float_constant
| `Comment
| `Deref
| `Record_access
| `Record_update
| `Record_aggregate
| `Any_expr
| `Assignment
| `Binding_ref
| `Loop
| `Statement_sequence
| `Abstract_expr
| `Assert
| `Raise
| `Try_block
]
type pred_tag = [
| `Universal_quantif
| `Existential_quantif
| `Not
| `Connection
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
]
type term_tag = [
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
| `Integer_constant
| `Range_constant
| `Modular_constant
| `Fixed_constant
| `Real_constant
| `Float_constant
| `Comment
| `Deref
| `Record_access
| `Record_update
| `Record_aggregate
]
type prog_tag = [
| `Not
| `Connection
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
| `Integer_constant
| `Range_constant
| `Modular_constant
| `Fixed_constant
| `Real_constant
| `Float_constant
| `Comment
| `Deref
| `Record_access
| `Record_update
| `Record_aggregate
| `Any_expr
| `Assignment
| `Binding_ref
| `Loop
| `Statement_sequence
| `Abstract_expr
| `Assert
| `Raise
| `Try_block
]
type type_definition_tag = [
| `Transparent_type_definition
| `Record_binder
| `Record_definition
| `Range_type_definition
]
type declaration_tag = [
| `Function_decl
| `Axiom
| `Goal
| `Type_decl
| `Global_ref_declaration
| `Namespace_declaration
| `Exception_declaration
| `Meta_declaration
| `Clone_declaration
]
type any_node_tag = [
| `Type
| `Name
| `Effects
| `Raise_effect
| `Binder
| `Transparent_type_definition
| `Record_binder
| `Record_definition
| `Range_type_definition
| `Triggers
| `Trigger
| `Axiom_dep
| `Handler
| `Field_association
| `Variant
| `Variants
| `Universal_quantif
| `Existential_quantif
| `Not
| `Connection
| `Label
| `Loc_label
| `Identifier
| `Tagged
| `Call
| `Literal
| `Binding
| `Elsif
| `Epsilon
| `Conditional
| `Integer_constant
| `Range_constant
| `Modular_constant
| `Fixed_constant
| `Real_constant
| `Float_constant
| `Comment
| `Deref
| `Record_access
| `Record_update
| `Record_aggregate
| `Any_expr
| `Assignment
| `Binding_ref
| `Loop
| `Statement_sequence
| `Abstract_expr
| `Assert
| `Raise
| `Try_block
| `Function_decl
| `Axiom
| `Goal
| `Type_decl
| `Global_ref_declaration
| `Namespace_declaration
| `Exception_declaration
| `Meta_declaration
| `Clone_declaration
| `Clone_substitution
| `Include_declaration
| `Theory_declaration
| `Module
]
type 'a why_node = { info : why_node_info ; desc: 'a why_node_desc }
and why_node_info = {id: int; node: node_id; domain: domain; link: why_node_set; checked: bool}
and 'a why_node_desc =
| Type : {type_kind: type_; name: name_id; is_mutable: bool; relaxed_init: bool} -> [> type_tag] why_node_desc
| Name : {symb: symbol; namespace: symbol; module_: module_oid; infix: bool} -> [> name_tag] why_node_desc
| Effects : {reads: identifier_olist; writes: identifier_olist; raises: raise_effect_olist} -> [> effects_tag] why_node_desc
| Raise_effect : {name: name_id; arg_id: identifier_oid; post: pred_oid} -> [> raise_effect_tag] why_node_desc
| Binder : {name: identifier_oid; arg_type: type_id} -> [> binder_tag] why_node_desc
| Transparent_type_definition : {type_definition: type_id} -> [> transparent_type_definition_tag] why_node_desc
| Record_binder : {name: identifier_oid; arg_type: type_id; labels: symbol_set; is_mutable: bool} -> [> record_binder_tag] why_node_desc
| Record_definition : {fields: record_binder_list} -> [> record_definition_tag] why_node_desc
| Range_type_definition : {first: uint; last: uint} -> [> range_type_definition_tag] why_node_desc
| Triggers : {triggers: trigger_list} -> [> triggers_tag] why_node_desc
| Trigger : {terms: expr_list} -> [> trigger_tag] why_node_desc
| Axiom_dep : {name: identifier_id; kind: axiom_dep_kind} -> [> axiom_dep_tag] why_node_desc
| Handler : {name: name_id; arg_id: identifier_oid; def: prog_id} -> [> handler_tag] why_node_desc
| Field_association : {field: identifier_id; value: expr_id} -> [> field_association_tag] why_node_desc
| Variant : {cmp_op: identifier_id; labels: symbol_set; expr: term_id} -> [> variant_tag] why_node_desc
| Variants : {variants: variant_list} -> [> variants_tag] why_node_desc
| Universal_quantif : {variables: identifier_list; labels: symbol_set; var_type: type_id; triggers: triggers_oid; pred: pred_id} -> [> universal_quantif_tag] why_node_desc
| Existential_quantif : {variables: identifier_list; labels: symbol_set; var_type: type_id; pred: pred_id} -> [> existential_quantif_tag] why_node_desc
| Not : {right: expr_id} -> [> not_tag] why_node_desc
| Connection : {left: expr_id; op: connector; right: expr_id; more_right: expr_olist} -> [> connection_tag] why_node_desc
| Label : {labels: symbol_set; def: expr_id; typ: type_oid} -> [> label_tag] why_node_desc
| Loc_label : {sloc: source_ptr; def: expr_id; marker: symbol} -> [> loc_label_tag] why_node_desc
| Identifier : {name: name_id; typ: type_oid; labels: string_sets_set} -> [> identifier_tag] why_node_desc
| Tagged : {tag: symbol; def: expr_id; typ: type_oid} -> [> tagged_tag] why_node_desc
| Call : {name: identifier_id; args: expr_olist; typ: type_oid} -> [> call_tag] why_node_desc
| Literal : {value: literal; typ: type_oid} -> [> literal_tag] why_node_desc
| Binding : {name: identifier_id; def: expr_id; context: expr_id; typ: type_oid} -> [> binding_tag] why_node_desc
| Elsif : {condition: expr_id; then_part: expr_id; typ: type_oid} -> [> elsif_tag] why_node_desc
| Epsilon : {name: identifier_id; typ: type_id; pred: pred_id} -> [> epsilon_tag] why_node_desc
| Conditional : {condition: expr_id; then_part: expr_id; elsif_parts: expr_olist; else_part: expr_oid; typ: type_oid} -> [> conditional_tag] why_node_desc
| Integer_constant : {value: uint} -> [> integer_constant_tag] why_node_desc
| Range_constant : {value: uint; typ: type_id} -> [> range_constant_tag] why_node_desc
| Modular_constant : {value: uint; typ: type_id} -> [> modular_constant_tag] why_node_desc
| Fixed_constant : {value: uint; typ: type_id} -> [> fixed_constant_tag] why_node_desc
| Real_constant : {value: ureal} -> [> real_constant_tag] why_node_desc
| Float_constant : {value: ureal; typ: type_id} -> [> float_constant_tag] why_node_desc
| Comment : {comment: symbol} -> [> comment_tag] why_node_desc
| Deref : {right: identifier_id; typ: type_id} -> [> deref_tag] why_node_desc
| Record_access : {name: expr_id; field: identifier_id; typ: type_oid} -> [> record_access_tag] why_node_desc
| Record_update : {name: expr_id; updates: field_association_list; typ: type_oid} -> [> record_update_tag] why_node_desc
| Record_aggregate : {associations: field_association_list; typ: type_oid} -> [> record_aggregate_tag] why_node_desc
| Any_expr : {effects: effects_oid; pre: pred_oid; post: pred_oid; return_type: type_id; labels: symbol_set} -> [> any_expr_tag] why_node_desc
| Assignment : {name: identifier_id; value: prog_id; typ: type_id; labels: symbol_set} -> [> assignment_tag] why_node_desc
| Binding_ref : {name: identifier_id; def: prog_id; context: prog_id; typ: type_id} -> [> binding_ref_tag] why_node_desc
| Loop : {code_before: prog_id; invariants: pred_olist; variants: variants_olist; code_after: prog_id} -> [> loop_tag] why_node_desc
| Statement_sequence : {statements: prog_list} -> [> statement_sequence_tag] why_node_desc
| Abstract_expr : {expr: prog_id; post: pred_id; typ: type_oid} -> [> abstract_expr_tag] why_node_desc
| Assert : {pred: pred_id; assert_kind: assert_kind} -> [> assert_tag] why_node_desc
| Raise : {name: name_id; arg: expr_oid; typ: type_oid} -> [> raise_tag] why_node_desc
| Try_block : {prog: prog_id; handler: handler_list; typ: type_oid} -> [> try_block_tag] why_node_desc
| Function_decl : {name: identifier_id; binders: binder_olist; effects: effects_oid; pre: pred_oid; post: pred_oid; return_type: type_oid; def: expr_oid; labels: symbol_set; location: source_ptr} -> [> function_decl_tag] why_node_desc
| Axiom : {name: symbol; def: pred_id; dep: axiom_dep_oid} -> [> axiom_tag] why_node_desc
| Goal : {name: symbol; def: pred_id} -> [> goal_tag] why_node_desc
| Type_decl : {args: identifier_olist; name: name_id; labels: symbol_set; definition: type_definition_oid} -> [> type_decl_tag] why_node_desc
| Global_ref_declaration : {name: identifier_id; ref_type: type_id; labels: symbol_set; location: source_ptr} -> [> global_ref_declaration_tag] why_node_desc
| Namespace_declaration : {declarations: declaration_olist; name: symbol} -> [> namespace_declaration_tag] why_node_desc
| Exception_declaration : {name: name_id; arg: type_oid} -> [> exception_declaration_tag] why_node_desc
| Meta_declaration : {name: symbol; parameter: symbol} -> [> meta_declaration_tag] why_node_desc
| Clone_declaration : {origin: module_id; as_name: symbol; clone_kind: clone_type; substitutions: clone_substitution_olist; theory_kind: theory_type} -> [> clone_declaration_tag] why_node_desc
| Clone_substitution : {kind: subst_type; orig_name: name_id; image: name_id} -> [> clone_substitution_tag] why_node_desc
| Include_declaration : {module_: module_id; kind: theory_type; use_kind: clone_type} -> [> include_declaration_tag] why_node_desc
| Theory_declaration : {declarations: declaration_olist; name: symbol; kind: theory_type; includes: include_declaration_olist; comment: symbol} -> [> theory_declaration_tag] why_node_desc
| Module : {file: symbol; name: symbol} -> [> module_tag] why_node_desc
and 'a why_node_oid = 'a why_node option
and 'a why_node_olist = 'a why_node list
and 'a why_node_id = 'a why_node
and 'a why_node_list = 'a why_node nonempty
and type_oid = type_tag why_node_oid
and type_olist = type_tag why_node_olist
and type_id = type_tag why_node_id
and type_list = type_tag why_node_list
and name_oid = name_tag why_node_oid
and name_olist = name_tag why_node_olist
and name_id = name_tag why_node_id
and name_list = name_tag why_node_list
and effects_oid = effects_tag why_node_oid
and effects_olist = effects_tag why_node_olist
and effects_id = effects_tag why_node_id
and effects_list = effects_tag why_node_list
and raise_effect_oid = raise_effect_tag why_node_oid
and raise_effect_olist = raise_effect_tag why_node_olist
and raise_effect_id = raise_effect_tag why_node_id
and raise_effect_list = raise_effect_tag why_node_list
and binder_oid = binder_tag why_node_oid
and binder_olist = binder_tag why_node_olist
and binder_id = binder_tag why_node_id
and binder_list = binder_tag why_node_list
and transparent_type_definition_oid = transparent_type_definition_tag why_node_oid
and transparent_type_definition_olist = transparent_type_definition_tag why_node_olist
and transparent_type_definition_id = transparent_type_definition_tag why_node_id
and transparent_type_definition_list = transparent_type_definition_tag why_node_list
and record_binder_oid = record_binder_tag why_node_oid
and record_binder_olist = record_binder_tag why_node_olist
and record_binder_id = record_binder_tag why_node_id
and record_binder_list = record_binder_tag why_node_list
and record_definition_oid = record_definition_tag why_node_oid
and record_definition_olist = record_definition_tag why_node_olist
and record_definition_id = record_definition_tag why_node_id
and record_definition_list = record_definition_tag why_node_list
and range_type_definition_oid = range_type_definition_tag why_node_oid
and range_type_definition_olist = range_type_definition_tag why_node_olist
and range_type_definition_id = range_type_definition_tag why_node_id
and range_type_definition_list = range_type_definition_tag why_node_list
and triggers_oid = triggers_tag why_node_oid
and triggers_olist = triggers_tag why_node_olist
and triggers_id = triggers_tag why_node_id
and triggers_list = triggers_tag why_node_list
and trigger_oid = trigger_tag why_node_oid
and trigger_olist = trigger_tag why_node_olist
and trigger_id = trigger_tag why_node_id
and trigger_list = trigger_tag why_node_list
and axiom_dep_oid = axiom_dep_tag why_node_oid
and axiom_dep_olist = axiom_dep_tag why_node_olist
and axiom_dep_id = axiom_dep_tag why_node_id
and axiom_dep_list = axiom_dep_tag why_node_list
and handler_oid = handler_tag why_node_oid
and handler_olist = handler_tag why_node_olist
and handler_id = handler_tag why_node_id
and handler_list = handler_tag why_node_list
and field_association_oid = field_association_tag why_node_oid
and field_association_olist = field_association_tag why_node_olist
and field_association_id = field_association_tag why_node_id
and field_association_list = field_association_tag why_node_list
and variant_oid = variant_tag why_node_oid
and variant_olist = variant_tag why_node_olist
and variant_id = variant_tag why_node_id
and variant_list = variant_tag why_node_list
and variants_oid = variants_tag why_node_oid
and variants_olist = variants_tag why_node_olist
and variants_id = variants_tag why_node_id
and variants_list = variants_tag why_node_list
and universal_quantif_oid = universal_quantif_tag why_node_oid
and universal_quantif_olist = universal_quantif_tag why_node_olist
and universal_quantif_id = universal_quantif_tag why_node_id
and universal_quantif_list = universal_quantif_tag why_node_list
and existential_quantif_oid = existential_quantif_tag why_node_oid
and existential_quantif_olist = existential_quantif_tag why_node_olist
and existential_quantif_id = existential_quantif_tag why_node_id
and existential_quantif_list = existential_quantif_tag why_node_list
and not_oid = not_tag why_node_oid
and not_olist = not_tag why_node_olist
and not_id = not_tag why_node_id
and not_list = not_tag why_node_list
and connection_oid = connection_tag why_node_oid
and connection_olist = connection_tag why_node_olist
and connection_id = connection_tag why_node_id
and connection_list = connection_tag why_node_list
and label_oid = label_tag why_node_oid
and label_olist = label_tag why_node_olist
and label_id = label_tag why_node_id
and label_list = label_tag why_node_list
and loc_label_oid = loc_label_tag why_node_oid
and loc_label_olist = loc_label_tag why_node_olist
and loc_label_id = loc_label_tag why_node_id
and loc_label_list = loc_label_tag why_node_list
and identifier_oid = identifier_tag why_node_oid
and identifier_olist = identifier_tag why_node_olist
and identifier_id = identifier_tag why_node_id
and identifier_list = identifier_tag why_node_list
and tagged_oid = tagged_tag why_node_oid
and tagged_olist = tagged_tag why_node_olist
and tagged_id = tagged_tag why_node_id
and tagged_list = tagged_tag why_node_list
and call_oid = call_tag why_node_oid
and call_olist = call_tag why_node_olist
and call_id = call_tag why_node_id
and call_list = call_tag why_node_list
and literal_oid = literal_tag why_node_oid
and literal_olist = literal_tag why_node_olist
and literal_id = literal_tag why_node_id
and literal_list = literal_tag why_node_list
and binding_oid = binding_tag why_node_oid
and binding_olist = binding_tag why_node_olist
and binding_id = binding_tag why_node_id
and binding_list = binding_tag why_node_list
and elsif_oid = elsif_tag why_node_oid
and elsif_olist = elsif_tag why_node_olist
and elsif_id = elsif_tag why_node_id
and elsif_list = elsif_tag why_node_list
and epsilon_oid = epsilon_tag why_node_oid
and epsilon_olist = epsilon_tag why_node_olist
and epsilon_id = epsilon_tag why_node_id
and epsilon_list = epsilon_tag why_node_list
and conditional_oid = conditional_tag why_node_oid
and conditional_olist = conditional_tag why_node_olist
and conditional_id = conditional_tag why_node_id
and conditional_list = conditional_tag why_node_list
and integer_constant_oid = integer_constant_tag why_node_oid
and integer_constant_olist = integer_constant_tag why_node_olist
and integer_constant_id = integer_constant_tag why_node_id
and integer_constant_list = integer_constant_tag why_node_list
and range_constant_oid = range_constant_tag why_node_oid
and range_constant_olist = range_constant_tag why_node_olist
and range_constant_id = range_constant_tag why_node_id
and range_constant_list = range_constant_tag why_node_list
and modular_constant_oid = modular_constant_tag why_node_oid
and modular_constant_olist = modular_constant_tag why_node_olist
and modular_constant_id = modular_constant_tag why_node_id
and modular_constant_list = modular_constant_tag why_node_list
and fixed_constant_oid = fixed_constant_tag why_node_oid
and fixed_constant_olist = fixed_constant_tag why_node_olist
and fixed_constant_id = fixed_constant_tag why_node_id
and fixed_constant_list = fixed_constant_tag why_node_list
and real_constant_oid = real_constant_tag why_node_oid
and real_constant_olist = real_constant_tag why_node_olist
and real_constant_id = real_constant_tag why_node_id
and real_constant_list = real_constant_tag why_node_list
and float_constant_oid = float_constant_tag why_node_oid
and float_constant_olist = float_constant_tag why_node_olist
and float_constant_id = float_constant_tag why_node_id
and float_constant_list = float_constant_tag why_node_list
and comment_oid = comment_tag why_node_oid
and comment_olist = comment_tag why_node_olist
and comment_id = comment_tag why_node_id
and comment_list = comment_tag why_node_list
and deref_oid = deref_tag why_node_oid
and deref_olist = deref_tag why_node_olist
and deref_id = deref_tag why_node_id
and deref_list = deref_tag why_node_list
and record_access_oid = record_access_tag why_node_oid
and record_access_olist = record_access_tag why_node_olist
and record_access_id = record_access_tag why_node_id
and record_access_list = record_access_tag why_node_list
and record_update_oid = record_update_tag why_node_oid
and record_update_olist = record_update_tag why_node_olist
and record_update_id = record_update_tag why_node_id
and record_update_list = record_update_tag why_node_list
and record_aggregate_oid = record_aggregate_tag why_node_oid
and record_aggregate_olist = record_aggregate_tag why_node_olist
and record_aggregate_id = record_aggregate_tag why_node_id
and record_aggregate_list = record_aggregate_tag why_node_list
and any_expr_oid = any_expr_tag why_node_oid
and any_expr_olist = any_expr_tag why_node_olist
and any_expr_id = any_expr_tag why_node_id
and any_expr_list = any_expr_tag why_node_list
and assignment_oid = assignment_tag why_node_oid
and assignment_olist = assignment_tag why_node_olist
and assignment_id = assignment_tag why_node_id
and assignment_list = assignment_tag why_node_list
and binding_ref_oid = binding_ref_tag why_node_oid
and binding_ref_olist = binding_ref_tag why_node_olist
and binding_ref_id = binding_ref_tag why_node_id
and binding_ref_list = binding_ref_tag why_node_list
and loop_oid = loop_tag why_node_oid
and loop_olist = loop_tag why_node_olist
and loop_id = loop_tag why_node_id
and loop_list = loop_tag why_node_list
and statement_sequence_oid = statement_sequence_tag why_node_oid
and statement_sequence_olist = statement_sequence_tag why_node_olist
and statement_sequence_id = statement_sequence_tag why_node_id
and statement_sequence_list = statement_sequence_tag why_node_list
and abstract_expr_oid = abstract_expr_tag why_node_oid
and abstract_expr_olist = abstract_expr_tag why_node_olist
and abstract_expr_id = abstract_expr_tag why_node_id
and abstract_expr_list = abstract_expr_tag why_node_list
and assert_oid = assert_tag why_node_oid
and assert_olist = assert_tag why_node_olist
and assert_id = assert_tag why_node_id
and assert_list = assert_tag why_node_list
and raise_oid = raise_tag why_node_oid
and raise_olist = raise_tag why_node_olist
and raise_id = raise_tag why_node_id
and raise_list = raise_tag why_node_list
and try_block_oid = try_block_tag why_node_oid
and try_block_olist = try_block_tag why_node_olist
and try_block_id = try_block_tag why_node_id
and try_block_list = try_block_tag why_node_list
and function_decl_oid = function_decl_tag why_node_oid
and function_decl_olist = function_decl_tag why_node_olist
and function_decl_id = function_decl_tag why_node_id
and function_decl_list = function_decl_tag why_node_list
and axiom_oid = axiom_tag why_node_oid
and axiom_olist = axiom_tag why_node_olist
and axiom_id = axiom_tag why_node_id
and axiom_list = axiom_tag why_node_list
and goal_oid = goal_tag why_node_oid
and goal_olist = goal_tag why_node_olist
and goal_id = goal_tag why_node_id
and goal_list = goal_tag why_node_list
and type_decl_oid = type_decl_tag why_node_oid
and type_decl_olist = type_decl_tag why_node_olist
and type_decl_id = type_decl_tag why_node_id
and type_decl_list = type_decl_tag why_node_list
and global_ref_declaration_oid = global_ref_declaration_tag why_node_oid
and global_ref_declaration_olist = global_ref_declaration_tag why_node_olist
and global_ref_declaration_id = global_ref_declaration_tag why_node_id
and global_ref_declaration_list = global_ref_declaration_tag why_node_list
and namespace_declaration_oid = namespace_declaration_tag why_node_oid
and namespace_declaration_olist = namespace_declaration_tag why_node_olist
and namespace_declaration_id = namespace_declaration_tag why_node_id
and namespace_declaration_list = namespace_declaration_tag why_node_list
and exception_declaration_oid = exception_declaration_tag why_node_oid
and exception_declaration_olist = exception_declaration_tag why_node_olist
and exception_declaration_id = exception_declaration_tag why_node_id
and exception_declaration_list = exception_declaration_tag why_node_list
and meta_declaration_oid = meta_declaration_tag why_node_oid
and meta_declaration_olist = meta_declaration_tag why_node_olist
and meta_declaration_id = meta_declaration_tag why_node_id
and meta_declaration_list = meta_declaration_tag why_node_list
and clone_declaration_oid = clone_declaration_tag why_node_oid
and clone_declaration_olist = clone_declaration_tag why_node_olist
and clone_declaration_id = clone_declaration_tag why_node_id
and clone_declaration_list = clone_declaration_tag why_node_list
and clone_substitution_oid = clone_substitution_tag why_node_oid
and clone_substitution_olist = clone_substitution_tag why_node_olist
and clone_substitution_id = clone_substitution_tag why_node_id
and clone_substitution_list = clone_substitution_tag why_node_list
and include_declaration_oid = include_declaration_tag why_node_oid
and include_declaration_olist = include_declaration_tag why_node_olist
and include_declaration_id = include_declaration_tag why_node_id
and include_declaration_list = include_declaration_tag why_node_list
and theory_declaration_oid = theory_declaration_tag why_node_oid
and theory_declaration_olist = theory_declaration_tag why_node_olist
and theory_declaration_id = theory_declaration_tag why_node_id
and theory_declaration_list = theory_declaration_tag why_node_list
and module_oid = module_tag why_node_oid
and module_olist = module_tag why_node_olist
and module_id = module_tag why_node_id
and module_list = module_tag why_node_list
and expr_oid = expr_tag why_node_oid
and expr_olist = expr_tag why_node_olist
and expr_id = expr_tag why_node_id
and expr_list = expr_tag why_node_list
and pred_oid = pred_tag why_node_oid
and pred_olist = pred_tag why_node_olist
and pred_id = pred_tag why_node_id
and pred_list = pred_tag why_node_list
and term_oid = term_tag why_node_oid
and term_olist = term_tag why_node_olist
and term_id = term_tag why_node_id
and term_list = term_tag why_node_list
and prog_oid = prog_tag why_node_oid
and prog_olist = prog_tag why_node_olist
and prog_id = prog_tag why_node_id
and prog_list = prog_tag why_node_list
and type_definition_oid = type_definition_tag why_node_oid
and type_definition_olist = type_definition_tag why_node_olist
and type_definition_id = type_definition_tag why_node_id
and type_definition_list = type_definition_tag why_node_list
and declaration_oid = declaration_tag why_node_oid
and declaration_olist = declaration_tag why_node_olist
and declaration_id = declaration_tag why_node_id
and declaration_list = declaration_tag why_node_list
and any_node_oid = any_node_tag why_node_oid
and any_node_olist = any_node_tag why_node_olist
and any_node_id = any_node_tag why_node_id
and any_node_list = any_node_tag why_node_list
type file = { theory_declarations: theory_declaration_olist }
let type_coercion (node : any_node_tag why_node) : type_tag why_node =
match node.desc with
| Type _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "type_coercion"
let name_coercion (node : any_node_tag why_node) : name_tag why_node =
match node.desc with
| Name _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "name_coercion"
let effects_coercion (node : any_node_tag why_node) : effects_tag why_node =
match node.desc with
| Effects _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "effects_coercion"
let raise_effect_coercion (node : any_node_tag why_node) : raise_effect_tag why_node =
match node.desc with
| Raise_effect _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "raise_effect_coercion"
let binder_coercion (node : any_node_tag why_node) : binder_tag why_node =
match node.desc with
| Binder _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "binder_coercion"
let transparent_type_definition_coercion (node : any_node_tag why_node) : transparent_type_definition_tag why_node =
match node.desc with
| Transparent_type_definition _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "transparent_type_definition_coercion"
let record_binder_coercion (node : any_node_tag why_node) : record_binder_tag why_node =
match node.desc with
| Record_binder _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_binder_coercion"
let record_definition_coercion (node : any_node_tag why_node) : record_definition_tag why_node =
match node.desc with
| Record_definition _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_definition_coercion"
let range_type_definition_coercion (node : any_node_tag why_node) : range_type_definition_tag why_node =
match node.desc with
| Range_type_definition _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "range_type_definition_coercion"
let triggers_coercion (node : any_node_tag why_node) : triggers_tag why_node =
match node.desc with
| Triggers _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "triggers_coercion"
let trigger_coercion (node : any_node_tag why_node) : trigger_tag why_node =
match node.desc with
| Trigger _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "trigger_coercion"
let axiom_dep_coercion (node : any_node_tag why_node) : axiom_dep_tag why_node =
match node.desc with
| Axiom_dep _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "axiom_dep_coercion"
let handler_coercion (node : any_node_tag why_node) : handler_tag why_node =
match node.desc with
| Handler _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "handler_coercion"
let field_association_coercion (node : any_node_tag why_node) : field_association_tag why_node =
match node.desc with
| Field_association _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "field_association_coercion"
let variant_coercion (node : any_node_tag why_node) : variant_tag why_node =
match node.desc with
| Variant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "variant_coercion"
let variants_coercion (node : any_node_tag why_node) : variants_tag why_node =
match node.desc with
| Variants _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "variants_coercion"
let universal_quantif_coercion (node : any_node_tag why_node) : universal_quantif_tag why_node =
match node.desc with
| Universal_quantif _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "universal_quantif_coercion"
let existential_quantif_coercion (node : any_node_tag why_node) : existential_quantif_tag why_node =
match node.desc with
| Existential_quantif _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "existential_quantif_coercion"
let not_coercion (node : any_node_tag why_node) : not_tag why_node =
match node.desc with
| Not _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "not_coercion"
let connection_coercion (node : any_node_tag why_node) : connection_tag why_node =
match node.desc with
| Connection _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "connection_coercion"
let label_coercion (node : any_node_tag why_node) : label_tag why_node =
match node.desc with
| Label _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "label_coercion"
let loc_label_coercion (node : any_node_tag why_node) : loc_label_tag why_node =
match node.desc with
| Loc_label _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "loc_label_coercion"
let identifier_coercion (node : any_node_tag why_node) : identifier_tag why_node =
match node.desc with
| Identifier _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "identifier_coercion"
let tagged_coercion (node : any_node_tag why_node) : tagged_tag why_node =
match node.desc with
| Tagged _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "tagged_coercion"
let call_coercion (node : any_node_tag why_node) : call_tag why_node =
match node.desc with
| Call _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "call_coercion"
let literal_coercion (node : any_node_tag why_node) : literal_tag why_node =
match node.desc with
| Literal _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "literal_coercion"
let binding_coercion (node : any_node_tag why_node) : binding_tag why_node =
match node.desc with
| Binding _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "binding_coercion"
let elsif_coercion (node : any_node_tag why_node) : elsif_tag why_node =
match node.desc with
| Elsif _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "elsif_coercion"
let epsilon_coercion (node : any_node_tag why_node) : epsilon_tag why_node =
match node.desc with
| Epsilon _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "epsilon_coercion"
let conditional_coercion (node : any_node_tag why_node) : conditional_tag why_node =
match node.desc with
| Conditional _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "conditional_coercion"
let integer_constant_coercion (node : any_node_tag why_node) : integer_constant_tag why_node =
match node.desc with
| Integer_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "integer_constant_coercion"
let range_constant_coercion (node : any_node_tag why_node) : range_constant_tag why_node =
match node.desc with
| Range_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "range_constant_coercion"
let modular_constant_coercion (node : any_node_tag why_node) : modular_constant_tag why_node =
match node.desc with
| Modular_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "modular_constant_coercion"
let fixed_constant_coercion (node : any_node_tag why_node) : fixed_constant_tag why_node =
match node.desc with
| Fixed_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "fixed_constant_coercion"
let real_constant_coercion (node : any_node_tag why_node) : real_constant_tag why_node =
match node.desc with
| Real_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "real_constant_coercion"
let float_constant_coercion (node : any_node_tag why_node) : float_constant_tag why_node =
match node.desc with
| Float_constant _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "float_constant_coercion"
let comment_coercion (node : any_node_tag why_node) : comment_tag why_node =
match node.desc with
| Comment _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "comment_coercion"
let deref_coercion (node : any_node_tag why_node) : deref_tag why_node =
match node.desc with
| Deref _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "deref_coercion"
let record_access_coercion (node : any_node_tag why_node) : record_access_tag why_node =
match node.desc with
| Record_access _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_access_coercion"
let record_update_coercion (node : any_node_tag why_node) : record_update_tag why_node =
match node.desc with
| Record_update _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_update_coercion"
let record_aggregate_coercion (node : any_node_tag why_node) : record_aggregate_tag why_node =
match node.desc with
| Record_aggregate _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "record_aggregate_coercion"
let any_expr_coercion (node : any_node_tag why_node) : any_expr_tag why_node =
match node.desc with
| Any_expr _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "any_expr_coercion"
let assignment_coercion (node : any_node_tag why_node) : assignment_tag why_node =
match node.desc with
| Assignment _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "assignment_coercion"
let binding_ref_coercion (node : any_node_tag why_node) : binding_ref_tag why_node =
match node.desc with
| Binding_ref _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "binding_ref_coercion"
let loop_coercion (node : any_node_tag why_node) : loop_tag why_node =
match node.desc with
| Loop _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "loop_coercion"
let statement_sequence_coercion (node : any_node_tag why_node) : statement_sequence_tag why_node =
match node.desc with
| Statement_sequence _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "statement_sequence_coercion"
let abstract_expr_coercion (node : any_node_tag why_node) : abstract_expr_tag why_node =
match node.desc with
| Abstract_expr _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "abstract_expr_coercion"
let assert_coercion (node : any_node_tag why_node) : assert_tag why_node =
match node.desc with
| Assert _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "assert_coercion"
let raise_coercion (node : any_node_tag why_node) : raise_tag why_node =
match node.desc with
| Raise _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "raise_coercion"
let try_block_coercion (node : any_node_tag why_node) : try_block_tag why_node =
match node.desc with
| Try_block _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "try_block_coercion"
let function_decl_coercion (node : any_node_tag why_node) : function_decl_tag why_node =
match node.desc with
| Function_decl _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "function_decl_coercion"
let axiom_coercion (node : any_node_tag why_node) : axiom_tag why_node =
match node.desc with
| Axiom _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "axiom_coercion"
let goal_coercion (node : any_node_tag why_node) : goal_tag why_node =
match node.desc with
| Goal _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "goal_coercion"
let type_decl_coercion (node : any_node_tag why_node) : type_decl_tag why_node =
match node.desc with
| Type_decl _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "type_decl_coercion"
let global_ref_declaration_coercion (node : any_node_tag why_node) : global_ref_declaration_tag why_node =
match node.desc with
| Global_ref_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "global_ref_declaration_coercion"
let namespace_declaration_coercion (node : any_node_tag why_node) : namespace_declaration_tag why_node =
match node.desc with
| Namespace_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "namespace_declaration_coercion"
let exception_declaration_coercion (node : any_node_tag why_node) : exception_declaration_tag why_node =
match node.desc with
| Exception_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "exception_declaration_coercion"
let meta_declaration_coercion (node : any_node_tag why_node) : meta_declaration_tag why_node =
match node.desc with
| Meta_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "meta_declaration_coercion"
let clone_declaration_coercion (node : any_node_tag why_node) : clone_declaration_tag why_node =
match node.desc with
| Clone_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "clone_declaration_coercion"
let clone_substitution_coercion (node : any_node_tag why_node) : clone_substitution_tag why_node =
match node.desc with
| Clone_substitution _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "clone_substitution_coercion"
let include_declaration_coercion (node : any_node_tag why_node) : include_declaration_tag why_node =
match node.desc with
| Include_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "include_declaration_coercion"
let theory_declaration_coercion (node : any_node_tag why_node) : theory_declaration_tag why_node =
match node.desc with
| Theory_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "theory_declaration_coercion"
let module_coercion (node : any_node_tag why_node) : module_tag why_node =
match node.desc with
| Module _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "module_coercion"
let expr_coercion (node : any_node_tag why_node) : expr_tag why_node =
match node.desc with
| Universal_quantif _ as desc -> {info=node.info; desc}
| Existential_quantif _ as desc -> {info=node.info; desc}
| Not _ as desc -> {info=node.info; desc}
| Connection _ as desc -> {info=node.info; desc}
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| Integer_constant _ as desc -> {info=node.info; desc}
| Range_constant _ as desc -> {info=node.info; desc}
| Modular_constant _ as desc -> {info=node.info; desc}
| Fixed_constant _ as desc -> {info=node.info; desc}
| Real_constant _ as desc -> {info=node.info; desc}
| Float_constant _ as desc -> {info=node.info; desc}
| Comment _ as desc -> {info=node.info; desc}
| Deref _ as desc -> {info=node.info; desc}
| Record_access _ as desc -> {info=node.info; desc}
| Record_update _ as desc -> {info=node.info; desc}
| Record_aggregate _ as desc -> {info=node.info; desc}
| Any_expr _ as desc -> {info=node.info; desc}
| Assignment _ as desc -> {info=node.info; desc}
| Binding_ref _ as desc -> {info=node.info; desc}
| Loop _ as desc -> {info=node.info; desc}
| Statement_sequence _ as desc -> {info=node.info; desc}
| Abstract_expr _ as desc -> {info=node.info; desc}
| Assert _ as desc -> {info=node.info; desc}
| Raise _ as desc -> {info=node.info; desc}
| Try_block _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "expr_coercion"
[@@warning "-11"]
let pred_coercion (node : any_node_tag why_node) : pred_tag why_node =
match node.desc with
| Universal_quantif _ as desc -> {info=node.info; desc}
| Existential_quantif _ as desc -> {info=node.info; desc}
| Not _ as desc -> {info=node.info; desc}
| Connection _ as desc -> {info=node.info; desc}
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "pred_coercion"
[@@warning "-11"]
let term_coercion (node : any_node_tag why_node) : term_tag why_node =
match node.desc with
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| Integer_constant _ as desc -> {info=node.info; desc}
| Range_constant _ as desc -> {info=node.info; desc}
| Modular_constant _ as desc -> {info=node.info; desc}
| Fixed_constant _ as desc -> {info=node.info; desc}
| Real_constant _ as desc -> {info=node.info; desc}
| Float_constant _ as desc -> {info=node.info; desc}
| Comment _ as desc -> {info=node.info; desc}
| Deref _ as desc -> {info=node.info; desc}
| Record_access _ as desc -> {info=node.info; desc}
| Record_update _ as desc -> {info=node.info; desc}
| Record_aggregate _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "term_coercion"
[@@warning "-11"]
let prog_coercion (node : any_node_tag why_node) : prog_tag why_node =
match node.desc with
| Not _ as desc -> {info=node.info; desc}
| Connection _ as desc -> {info=node.info; desc}
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| Integer_constant _ as desc -> {info=node.info; desc}
| Range_constant _ as desc -> {info=node.info; desc}
| Modular_constant _ as desc -> {info=node.info; desc}
| Fixed_constant _ as desc -> {info=node.info; desc}
| Real_constant _ as desc -> {info=node.info; desc}
| Float_constant _ as desc -> {info=node.info; desc}
| Comment _ as desc -> {info=node.info; desc}
| Deref _ as desc -> {info=node.info; desc}
| Record_access _ as desc -> {info=node.info; desc}
| Record_update _ as desc -> {info=node.info; desc}
| Record_aggregate _ as desc -> {info=node.info; desc}
| Any_expr _ as desc -> {info=node.info; desc}
| Assignment _ as desc -> {info=node.info; desc}
| Binding_ref _ as desc -> {info=node.info; desc}
| Loop _ as desc -> {info=node.info; desc}
| Statement_sequence _ as desc -> {info=node.info; desc}
| Abstract_expr _ as desc -> {info=node.info; desc}
| Assert _ as desc -> {info=node.info; desc}
| Raise _ as desc -> {info=node.info; desc}
| Try_block _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "prog_coercion"
[@@warning "-11"]
let type_definition_coercion (node : any_node_tag why_node) : type_definition_tag why_node =
match node.desc with
| Transparent_type_definition _ as desc -> {info=node.info; desc}
| Record_binder _ as desc -> {info=node.info; desc}
| Record_definition _ as desc -> {info=node.info; desc}
| Range_type_definition _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "type_definition_coercion"
[@@warning "-11"]
let declaration_coercion (node : any_node_tag why_node) : declaration_tag why_node =
match node.desc with
| Function_decl _ as desc -> {info=node.info; desc}
| Axiom _ as desc -> {info=node.info; desc}
| Goal _ as desc -> {info=node.info; desc}
| Type_decl _ as desc -> {info=node.info; desc}
| Global_ref_declaration _ as desc -> {info=node.info; desc}
| Namespace_declaration _ as desc -> {info=node.info; desc}
| Exception_declaration _ as desc -> {info=node.info; desc}
| Meta_declaration _ as desc -> {info=node.info; desc}
| Clone_declaration _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "declaration_coercion"
[@@warning "-11"]
let any_node_coercion (node : any_node_tag why_node) : any_node_tag why_node =
match node.desc with
| Type _ as desc -> {info=node.info; desc}
| Name _ as desc -> {info=node.info; desc}
| Effects _ as desc -> {info=node.info; desc}
| Raise_effect _ as desc -> {info=node.info; desc}
| Binder _ as desc -> {info=node.info; desc}
| Transparent_type_definition _ as desc -> {info=node.info; desc}
| Record_binder _ as desc -> {info=node.info; desc}
| Record_definition _ as desc -> {info=node.info; desc}
| Range_type_definition _ as desc -> {info=node.info; desc}
| Triggers _ as desc -> {info=node.info; desc}
| Trigger _ as desc -> {info=node.info; desc}
| Axiom_dep _ as desc -> {info=node.info; desc}
| Handler _ as desc -> {info=node.info; desc}
| Field_association _ as desc -> {info=node.info; desc}
| Variant _ as desc -> {info=node.info; desc}
| Variants _ as desc -> {info=node.info; desc}
| Universal_quantif _ as desc -> {info=node.info; desc}
| Existential_quantif _ as desc -> {info=node.info; desc}
| Not _ as desc -> {info=node.info; desc}
| Connection _ as desc -> {info=node.info; desc}
| Label _ as desc -> {info=node.info; desc}
| Loc_label _ as desc -> {info=node.info; desc}
| Identifier _ as desc -> {info=node.info; desc}
| Tagged _ as desc -> {info=node.info; desc}
| Call _ as desc -> {info=node.info; desc}
| Literal _ as desc -> {info=node.info; desc}
| Binding _ as desc -> {info=node.info; desc}
| Elsif _ as desc -> {info=node.info; desc}
| Epsilon _ as desc -> {info=node.info; desc}
| Conditional _ as desc -> {info=node.info; desc}
| Integer_constant _ as desc -> {info=node.info; desc}
| Range_constant _ as desc -> {info=node.info; desc}
| Modular_constant _ as desc -> {info=node.info; desc}
| Fixed_constant _ as desc -> {info=node.info; desc}
| Real_constant _ as desc -> {info=node.info; desc}
| Float_constant _ as desc -> {info=node.info; desc}
| Comment _ as desc -> {info=node.info; desc}
| Deref _ as desc -> {info=node.info; desc}
| Record_access _ as desc -> {info=node.info; desc}
| Record_update _ as desc -> {info=node.info; desc}
| Record_aggregate _ as desc -> {info=node.info; desc}
| Any_expr _ as desc -> {info=node.info; desc}
| Assignment _ as desc -> {info=node.info; desc}
| Binding_ref _ as desc -> {info=node.info; desc}
| Loop _ as desc -> {info=node.info; desc}
| Statement_sequence _ as desc -> {info=node.info; desc}
| Abstract_expr _ as desc -> {info=node.info; desc}
| Assert _ as desc -> {info=node.info; desc}
| Raise _ as desc -> {info=node.info; desc}
| Try_block _ as desc -> {info=node.info; desc}
| Function_decl _ as desc -> {info=node.info; desc}
| Axiom _ as desc -> {info=node.info; desc}
| Goal _ as desc -> {info=node.info; desc}
| Type_decl _ as desc -> {info=node.info; desc}
| Global_ref_declaration _ as desc -> {info=node.info; desc}
| Namespace_declaration _ as desc -> {info=node.info; desc}
| Exception_declaration _ as desc -> {info=node.info; desc}
| Meta_declaration _ as desc -> {info=node.info; desc}
| Clone_declaration _ as desc -> {info=node.info; desc}
| Clone_substitution _ as desc -> {info=node.info; desc}
| Include_declaration _ as desc -> {info=node.info; desc}
| Theory_declaration _ as desc -> {info=node.info; desc}
| Module _ as desc -> {info=node.info; desc}
| _ -> invalid_arg "any_node_coercion"
[@@warning "-11"]
module From_json = struct
type t = [
| `Null
| `Bool of bool
| `Int of int
| `Intlit of string
| `Float of float
| `String of string
| `Assoc of (string * t) list
| `List of t list
| `Tuple of t list
| `Variant of string * t option
]
type 'a from_json = t -> 'a
exception Unexpected_Json of string * t
let unexpected_json type_str json =
raise (Unexpected_Json (type_str, json))
let boolean_from_json : bool from_json = function
| `Bool b -> b
| json -> unexpected_json "bool" json
let int_from_json : int from_json = function
| `Int i -> i
| json -> unexpected_json "int" json
let uint_from_json : uint from_json = function
| `String s -> Uint s
| json -> unexpected_json "uint" json
let ureal_from_json : ureal from_json = function
| `List [numerator; denominator; base; negative] ->
Ureal {
numerator = uint_from_json numerator;
denominator = uint_from_json denominator;
base = int_from_json base;
negative = boolean_from_json negative;
}
| json ->
unexpected_json "ureal" json
let source_ptr_from_json : source_ptr from_json = function
| `Null -> No_location
| `List [`String filename; `Int line] ->
Source_ptr {filename; line}
| json -> unexpected_json "source_ptr" json
let node_id_from_json : node_id from_json = function
| `String "<Node_Id>" -> Node_id
| json -> unexpected_json "node_id" json
let why_node_set_from_json : why_node_set from_json = function
| `String "<Why_Node_Set>" -> ()
| json -> unexpected_json "why_node_set" json
let symbol_from_json : symbol from_json = function
| `String "" -> No_symbol
| `String s -> Symbol s
| json -> unexpected_json "symbol" json
let symbol_set_from_json : symbol_set from_json = function
| `List l -> List.map symbol_from_json l
| json -> unexpected_json "symbol_set" json
let string_sets_set_from_json : string_sets_set from_json = function
| `List l -> List.map symbol_from_json l
| json -> unexpected_json "string_sets_set" json
Why .
let domain_from_json : domain from_json = function
| `Int 1 -> Term
| `Int 2 -> Pterm
| `Int 3 -> Pred
| `Int 4 -> Prog
| json -> unexpected_json "domain" json
let type_from_json : type_ from_json = function
| `Int 0 -> Builtin
| `Int 1 -> Split
| `Int 2 -> Abstract
| json -> unexpected_json "type_" json
let literal_from_json : literal from_json = function
| `Int 0 -> True
| `Int 1 -> False
| json -> unexpected_json "literal" json
let theory_type_from_json : theory_type from_json = function
| `Int 0 -> Theory
| `Int 1 -> Module
| json -> unexpected_json "theory_type" json
let clone_type_from_json : clone_type from_json = function
| `Int 0 -> Import
| `Int 1 -> Export
| `Int 2 -> Clone_default
| json -> unexpected_json "clone_type" json
let subst_type_from_json : subst_type from_json = function
| `Int 0 -> Type_subst
| `Int 1 -> Function
| `Int 2 -> Predicate
| `Int 3 -> Namepace
| `Int 4 -> Lemma
| `Int 5 -> Goal
| json -> unexpected_json "subst_type" json
let connector_from_json : connector from_json = function
| `Int 0 -> Or_else
| `Int 1 -> And_then
| `Int 2 -> Imply
| `Int 3 -> Equivalent
| `Int 4 -> Or
| `Int 5 -> And
| json -> unexpected_json "connector" json
let assert_kind_from_json : assert_kind from_json = function
| `Int 0 -> Assert
| `Int 1 -> Check
| `Int 2 -> Assume
| json -> unexpected_json "assert_kind" json
let axiom_dep_kind_from_json : axiom_dep_kind from_json = function
| `Int 0 -> Axdep_func
| `Int 1 -> Axdep_pred
| json -> unexpected_json "axiom_dep_kind" json
let rec why_node_from_json : 'a why_node from_json = function
| `List [`String "W_TYPE"; id; node; domain; link; checked; type_kind; name; is_mutable; relaxed_init] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Type {
type_kind = type_from_json type_kind;
name = name_opaque_id_from_json name;
is_mutable = boolean_from_json is_mutable;
relaxed_init = boolean_from_json relaxed_init;
} in
{info; desc}
| `List [`String "W_NAME"; id; node; domain; link; checked; symb; namespace; module_; infix] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Name {
symb = symbol_from_json symb;
namespace = symbol_from_json namespace;
module_ = module_opaque_oid_from_json module_;
infix = boolean_from_json infix;
} in
{info; desc}
| `List [`String "W_EFFECTS"; id; node; domain; link; checked; reads; writes; raises] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Effects {
reads = identifier_opaque_olist_from_json reads;
writes = identifier_opaque_olist_from_json writes;
raises = raise_effect_opaque_olist_from_json raises;
} in
{info; desc}
| `List [`String "W_RAISE_EFFECT"; id; node; domain; link; checked; name; arg_id; post] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Raise_effect {
name = name_opaque_id_from_json name;
arg_id = identifier_opaque_oid_from_json arg_id;
post = pred_opaque_oid_from_json post;
} in
{info; desc}
| `List [`String "W_BINDER"; id; node; domain; link; checked; name; arg_type] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Binder {
name = identifier_opaque_oid_from_json name;
arg_type = type_opaque_id_from_json arg_type;
} in
{info; desc}
| `List [`String "W_TRANSPARENT_TYPE_DEFINITION"; id; node; domain; link; checked; type_definition] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Transparent_type_definition {
type_definition = type_opaque_id_from_json type_definition;
} in
{info; desc}
| `List [`String "W_RECORD_BINDER"; id; node; domain; link; checked; name; arg_type; labels; is_mutable] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_binder {
name = identifier_opaque_oid_from_json name;
arg_type = type_opaque_id_from_json arg_type;
labels = symbol_set_from_json labels;
is_mutable = boolean_from_json is_mutable;
} in
{info; desc}
| `List [`String "W_RECORD_DEFINITION"; id; node; domain; link; checked; fields] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_definition {
fields = record_binder_opaque_list_from_json fields;
} in
{info; desc}
| `List [`String "W_RANGE_TYPE_DEFINITION"; id; node; domain; link; checked; first; last] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Range_type_definition {
first = uint_from_json first;
last = uint_from_json last;
} in
{info; desc}
| `List [`String "W_TRIGGERS"; id; node; domain; link; checked; triggers] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Triggers {
triggers = trigger_opaque_list_from_json triggers;
} in
{info; desc}
| `List [`String "W_TRIGGER"; id; node; domain; link; checked; terms] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Trigger {
terms = expr_opaque_list_from_json terms;
} in
{info; desc}
| `List [`String "W_AXIOM_DEP"; id; node; domain; link; checked; name; kind] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Axiom_dep {
name = identifier_opaque_id_from_json name;
kind = axiom_dep_kind_from_json kind;
} in
{info; desc}
| `List [`String "W_HANDLER"; id; node; domain; link; checked; name; arg_id; def] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Handler {
name = name_opaque_id_from_json name;
arg_id = identifier_opaque_oid_from_json arg_id;
def = prog_opaque_id_from_json def;
} in
{info; desc}
| `List [`String "W_FIELD_ASSOCIATION"; id; node; domain; link; checked; field; value] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Field_association {
field = identifier_opaque_id_from_json field;
value = expr_opaque_id_from_json value;
} in
{info; desc}
| `List [`String "W_VARIANT"; id; node; domain; link; checked; cmp_op; labels; expr] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Variant {
cmp_op = identifier_opaque_id_from_json cmp_op;
labels = symbol_set_from_json labels;
expr = term_opaque_id_from_json expr;
} in
{info; desc}
| `List [`String "W_VARIANTS"; id; node; domain; link; checked; variants] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Variants {
variants = variant_opaque_list_from_json variants;
} in
{info; desc}
| `List [`String "W_UNIVERSAL_QUANTIF"; id; node; domain; link; checked; variables; labels; var_type; triggers; pred] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Universal_quantif {
variables = identifier_opaque_list_from_json variables;
labels = symbol_set_from_json labels;
var_type = type_opaque_id_from_json var_type;
triggers = triggers_opaque_oid_from_json triggers;
pred = pred_opaque_id_from_json pred;
} in
{info; desc}
| `List [`String "W_EXISTENTIAL_QUANTIF"; id; node; domain; link; checked; variables; labels; var_type; pred] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Existential_quantif {
variables = identifier_opaque_list_from_json variables;
labels = symbol_set_from_json labels;
var_type = type_opaque_id_from_json var_type;
pred = pred_opaque_id_from_json pred;
} in
{info; desc}
| `List [`String "W_NOT"; id; node; domain; link; checked; right] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Not {
right = expr_opaque_id_from_json right;
} in
{info; desc}
| `List [`String "W_CONNECTION"; id; node; domain; link; checked; left; op; right; more_right] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Connection {
left = expr_opaque_id_from_json left;
op = connector_from_json op;
right = expr_opaque_id_from_json right;
more_right = expr_opaque_olist_from_json more_right;
} in
{info; desc}
| `List [`String "W_LABEL"; id; node; domain; link; checked; labels; def; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Label {
labels = symbol_set_from_json labels;
def = expr_opaque_id_from_json def;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_LOC_LABEL"; id; node; domain; link; checked; sloc; def; marker] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Loc_label {
sloc = source_ptr_from_json sloc;
def = expr_opaque_id_from_json def;
marker = symbol_from_json marker;
} in
{info; desc}
| `List [`String "W_IDENTIFIER"; id; node; domain; link; checked; name; typ; labels] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Identifier {
name = name_opaque_id_from_json name;
typ = type_opaque_oid_from_json typ;
labels = string_sets_set_from_json labels;
} in
{info; desc}
| `List [`String "W_TAGGED"; id; node; domain; link; checked; tag; def; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Tagged {
tag = symbol_from_json tag;
def = expr_opaque_id_from_json def;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_CALL"; id; node; domain; link; checked; name; args; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Call {
name = identifier_opaque_id_from_json name;
args = expr_opaque_olist_from_json args;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_LITERAL"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Literal {
value = literal_from_json value;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_BINDING"; id; node; domain; link; checked; name; def; context; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Binding {
name = identifier_opaque_id_from_json name;
def = expr_opaque_id_from_json def;
context = expr_opaque_id_from_json context;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_ELSIF"; id; node; domain; link; checked; condition; then_part; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Elsif {
condition = expr_opaque_id_from_json condition;
then_part = expr_opaque_id_from_json then_part;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_EPSILON"; id; node; domain; link; checked; name; typ; pred] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Epsilon {
name = identifier_opaque_id_from_json name;
typ = type_opaque_id_from_json typ;
pred = pred_opaque_id_from_json pred;
} in
{info; desc}
| `List [`String "W_CONDITIONAL"; id; node; domain; link; checked; condition; then_part; elsif_parts; else_part; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Conditional {
condition = expr_opaque_id_from_json condition;
then_part = expr_opaque_id_from_json then_part;
elsif_parts = expr_opaque_olist_from_json elsif_parts;
else_part = expr_opaque_oid_from_json else_part;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_INTEGER_CONSTANT"; id; node; domain; link; checked; value] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Integer_constant {
value = uint_from_json value;
} in
{info; desc}
| `List [`String "W_RANGE_CONSTANT"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Range_constant {
value = uint_from_json value;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_MODULAR_CONSTANT"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Modular_constant {
value = uint_from_json value;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_FIXED_CONSTANT"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Fixed_constant {
value = uint_from_json value;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_REAL_CONSTANT"; id; node; domain; link; checked; value] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Real_constant {
value = ureal_from_json value;
} in
{info; desc}
| `List [`String "W_FLOAT_CONSTANT"; id; node; domain; link; checked; value; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Float_constant {
value = ureal_from_json value;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_COMMENT"; id; node; domain; link; checked; comment] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Comment {
comment = symbol_from_json comment;
} in
{info; desc}
| `List [`String "W_DEREF"; id; node; domain; link; checked; right; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Deref {
right = identifier_opaque_id_from_json right;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_RECORD_ACCESS"; id; node; domain; link; checked; name; field; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_access {
name = expr_opaque_id_from_json name;
field = identifier_opaque_id_from_json field;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_RECORD_UPDATE"; id; node; domain; link; checked; name; updates; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_update {
name = expr_opaque_id_from_json name;
updates = field_association_opaque_list_from_json updates;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_RECORD_AGGREGATE"; id; node; domain; link; checked; associations; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Record_aggregate {
associations = field_association_opaque_list_from_json associations;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_ANY_EXPR"; id; node; domain; link; checked; effects; pre; post; return_type; labels] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Any_expr {
effects = effects_opaque_oid_from_json effects;
pre = pred_opaque_oid_from_json pre;
post = pred_opaque_oid_from_json post;
return_type = type_opaque_id_from_json return_type;
labels = symbol_set_from_json labels;
} in
{info; desc}
| `List [`String "W_ASSIGNMENT"; id; node; domain; link; checked; name; value; typ; labels] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Assignment {
name = identifier_opaque_id_from_json name;
value = prog_opaque_id_from_json value;
typ = type_opaque_id_from_json typ;
labels = symbol_set_from_json labels;
} in
{info; desc}
| `List [`String "W_BINDING_REF"; id; node; domain; link; checked; name; def; context; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Binding_ref {
name = identifier_opaque_id_from_json name;
def = prog_opaque_id_from_json def;
context = prog_opaque_id_from_json context;
typ = type_opaque_id_from_json typ;
} in
{info; desc}
| `List [`String "W_LOOP"; id; node; domain; link; checked; code_before; invariants; variants; code_after] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Loop {
code_before = prog_opaque_id_from_json code_before;
invariants = pred_opaque_olist_from_json invariants;
variants = variants_opaque_olist_from_json variants;
code_after = prog_opaque_id_from_json code_after;
} in
{info; desc}
| `List [`String "W_STATEMENT_SEQUENCE"; id; node; domain; link; checked; statements] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Statement_sequence {
statements = prog_opaque_list_from_json statements;
} in
{info; desc}
| `List [`String "W_ABSTRACT_EXPR"; id; node; domain; link; checked; expr; post; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Abstract_expr {
expr = prog_opaque_id_from_json expr;
post = pred_opaque_id_from_json post;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_ASSERT"; id; node; domain; link; checked; pred; assert_kind] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Assert {
pred = pred_opaque_id_from_json pred;
assert_kind = assert_kind_from_json assert_kind;
} in
{info; desc}
| `List [`String "W_RAISE"; id; node; domain; link; checked; name; arg; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Raise {
name = name_opaque_id_from_json name;
arg = expr_opaque_oid_from_json arg;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_TRY_BLOCK"; id; node; domain; link; checked; prog; handler; typ] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Try_block {
prog = prog_opaque_id_from_json prog;
handler = handler_opaque_list_from_json handler;
typ = type_opaque_oid_from_json typ;
} in
{info; desc}
| `List [`String "W_FUNCTION_DECL"; id; node; domain; link; checked; name; binders; effects; pre; post; return_type; def; labels; location] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Function_decl {
name = identifier_opaque_id_from_json name;
binders = binder_opaque_olist_from_json binders;
effects = effects_opaque_oid_from_json effects;
pre = pred_opaque_oid_from_json pre;
post = pred_opaque_oid_from_json post;
return_type = type_opaque_oid_from_json return_type;
def = expr_opaque_oid_from_json def;
labels = symbol_set_from_json labels;
location = source_ptr_from_json location;
} in
{info; desc}
| `List [`String "W_AXIOM"; id; node; domain; link; checked; name; def; dep] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Axiom {
name = symbol_from_json name;
def = pred_opaque_id_from_json def;
dep = axiom_dep_opaque_oid_from_json dep;
} in
{info; desc}
| `List [`String "W_GOAL"; id; node; domain; link; checked; name; def] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Goal {
name = symbol_from_json name;
def = pred_opaque_id_from_json def;
} in
{info; desc}
| `List [`String "W_TYPE_DECL"; id; node; domain; link; checked; args; name; labels; definition] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Type_decl {
args = identifier_opaque_olist_from_json args;
name = name_opaque_id_from_json name;
labels = symbol_set_from_json labels;
definition = type_definition_opaque_oid_from_json definition;
} in
{info; desc}
| `List [`String "W_GLOBAL_REF_DECLARATION"; id; node; domain; link; checked; name; ref_type; labels; location] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Global_ref_declaration {
name = identifier_opaque_id_from_json name;
ref_type = type_opaque_id_from_json ref_type;
labels = symbol_set_from_json labels;
location = source_ptr_from_json location;
} in
{info; desc}
| `List [`String "W_NAMESPACE_DECLARATION"; id; node; domain; link; checked; declarations; name] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Namespace_declaration {
declarations = declaration_opaque_olist_from_json declarations;
name = symbol_from_json name;
} in
{info; desc}
| `List [`String "W_EXCEPTION_DECLARATION"; id; node; domain; link; checked; name; arg] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Exception_declaration {
name = name_opaque_id_from_json name;
arg = type_opaque_oid_from_json arg;
} in
{info; desc}
| `List [`String "W_META_DECLARATION"; id; node; domain; link; checked; name; parameter] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Meta_declaration {
name = symbol_from_json name;
parameter = symbol_from_json parameter;
} in
{info; desc}
| `List [`String "W_CLONE_DECLARATION"; id; node; domain; link; checked; origin; as_name; clone_kind; substitutions; theory_kind] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Clone_declaration {
origin = module_opaque_id_from_json origin;
as_name = symbol_from_json as_name;
clone_kind = clone_type_from_json clone_kind;
substitutions = clone_substitution_opaque_olist_from_json substitutions;
theory_kind = theory_type_from_json theory_kind;
} in
{info; desc}
| `List [`String "W_CLONE_SUBSTITUTION"; id; node; domain; link; checked; kind; orig_name; image] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Clone_substitution {
kind = subst_type_from_json kind;
orig_name = name_opaque_id_from_json orig_name;
image = name_opaque_id_from_json image;
} in
{info; desc}
| `List [`String "W_INCLUDE_DECLARATION"; id; node; domain; link; checked; module_; kind; use_kind] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Include_declaration {
module_ = module_opaque_id_from_json module_;
kind = theory_type_from_json kind;
use_kind = clone_type_from_json use_kind;
} in
{info; desc}
| `List [`String "W_THEORY_DECLARATION"; id; node; domain; link; checked; declarations; name; kind; includes; comment] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Theory_declaration {
declarations = declaration_opaque_olist_from_json declarations;
name = symbol_from_json name;
kind = theory_type_from_json kind;
includes = include_declaration_opaque_olist_from_json includes;
comment = symbol_from_json comment;
} in
{info; desc}
| `List [`String "W_MODULE"; id; node; domain; link; checked; file; name] ->
let info = {
id = int_from_json id;
node = node_id_from_json node;
domain = domain_from_json domain;
link = why_node_set_from_json link;
checked = boolean_from_json checked;
} in
let desc = Module {
file = symbol_from_json file;
name = symbol_from_json name;
} in
{info; desc}
| json ->
unexpected_json "why_node" json
and why_node_id_from_json : 'a . (any_node_tag why_node -> 'a why_node) -> 'a why_node_id from_json =
fun coerce json ->
coerce (why_node_from_json json)
and why_node_oid_from_json : 'a . (any_node_tag why_node -> 'a why_node) -> 'a why_node_oid from_json =
fun coerce json ->
match json with
| `Null ->
None
| json ->
Some (coerce (why_node_from_json json))
and why_node_list_from_json : 'a . (any_node_tag why_node -> 'a why_node) -> 'a why_node_list from_json =
fun coerce json ->
match json with
| `List (elt0 :: elts) ->
let elt0 = coerce (why_node_from_json elt0) in
let elts =
List.map coerce
(List.map why_node_from_json elts) in
{elt0; elts}
| json ->
unexpected_json "why_node_list" json
and why_node_olist_from_json : 'a . (any_node_tag why_node -> 'a why_node) -> 'a why_node_olist from_json =
fun coerce json ->
match json with
| `Null ->
[]
| `List l ->
List.map coerce
(List.map why_node_from_json l)
| json ->
unexpected_json "why_node_olist_from_json" json
and type_opaque_oid_from_json json = why_node_oid_from_json type_coercion json
and type_opaque_olist_from_json json = why_node_olist_from_json type_coercion json
and type_opaque_id_from_json json = why_node_id_from_json type_coercion json
and type_opaque_list_from_json json = why_node_list_from_json type_coercion json
and name_opaque_oid_from_json json = why_node_oid_from_json name_coercion json
and name_opaque_olist_from_json json = why_node_olist_from_json name_coercion json
and name_opaque_id_from_json json = why_node_id_from_json name_coercion json
and name_opaque_list_from_json json = why_node_list_from_json name_coercion json
and effects_opaque_oid_from_json json = why_node_oid_from_json effects_coercion json
and effects_opaque_olist_from_json json = why_node_olist_from_json effects_coercion json
and effects_opaque_id_from_json json = why_node_id_from_json effects_coercion json
and effects_opaque_list_from_json json = why_node_list_from_json effects_coercion json
and raise_effect_opaque_oid_from_json json = why_node_oid_from_json raise_effect_coercion json
and raise_effect_opaque_olist_from_json json = why_node_olist_from_json raise_effect_coercion json
and raise_effect_opaque_id_from_json json = why_node_id_from_json raise_effect_coercion json
and raise_effect_opaque_list_from_json json = why_node_list_from_json raise_effect_coercion json
and binder_opaque_oid_from_json json = why_node_oid_from_json binder_coercion json
and binder_opaque_olist_from_json json = why_node_olist_from_json binder_coercion json
and binder_opaque_id_from_json json = why_node_id_from_json binder_coercion json
and binder_opaque_list_from_json json = why_node_list_from_json binder_coercion json
and transparent_type_definition_opaque_oid_from_json json = why_node_oid_from_json transparent_type_definition_coercion json
and transparent_type_definition_opaque_olist_from_json json = why_node_olist_from_json transparent_type_definition_coercion json
and transparent_type_definition_opaque_id_from_json json = why_node_id_from_json transparent_type_definition_coercion json
and transparent_type_definition_opaque_list_from_json json = why_node_list_from_json transparent_type_definition_coercion json
and record_binder_opaque_oid_from_json json = why_node_oid_from_json record_binder_coercion json
and record_binder_opaque_olist_from_json json = why_node_olist_from_json record_binder_coercion json
and record_binder_opaque_id_from_json json = why_node_id_from_json record_binder_coercion json
and record_binder_opaque_list_from_json json = why_node_list_from_json record_binder_coercion json
and record_definition_opaque_oid_from_json json = why_node_oid_from_json record_definition_coercion json
and record_definition_opaque_olist_from_json json = why_node_olist_from_json record_definition_coercion json
and record_definition_opaque_id_from_json json = why_node_id_from_json record_definition_coercion json
and record_definition_opaque_list_from_json json = why_node_list_from_json record_definition_coercion json
and range_type_definition_opaque_oid_from_json json = why_node_oid_from_json range_type_definition_coercion json
and range_type_definition_opaque_olist_from_json json = why_node_olist_from_json range_type_definition_coercion json
and range_type_definition_opaque_id_from_json json = why_node_id_from_json range_type_definition_coercion json
and range_type_definition_opaque_list_from_json json = why_node_list_from_json range_type_definition_coercion json
and triggers_opaque_oid_from_json json = why_node_oid_from_json triggers_coercion json
and triggers_opaque_olist_from_json json = why_node_olist_from_json triggers_coercion json
and triggers_opaque_id_from_json json = why_node_id_from_json triggers_coercion json
and triggers_opaque_list_from_json json = why_node_list_from_json triggers_coercion json
and trigger_opaque_oid_from_json json = why_node_oid_from_json trigger_coercion json
and trigger_opaque_olist_from_json json = why_node_olist_from_json trigger_coercion json
and trigger_opaque_id_from_json json = why_node_id_from_json trigger_coercion json
and trigger_opaque_list_from_json json = why_node_list_from_json trigger_coercion json
and axiom_dep_opaque_oid_from_json json = why_node_oid_from_json axiom_dep_coercion json
and axiom_dep_opaque_olist_from_json json = why_node_olist_from_json axiom_dep_coercion json
and axiom_dep_opaque_id_from_json json = why_node_id_from_json axiom_dep_coercion json
and axiom_dep_opaque_list_from_json json = why_node_list_from_json axiom_dep_coercion json
and handler_opaque_oid_from_json json = why_node_oid_from_json handler_coercion json
and handler_opaque_olist_from_json json = why_node_olist_from_json handler_coercion json
and handler_opaque_id_from_json json = why_node_id_from_json handler_coercion json
and handler_opaque_list_from_json json = why_node_list_from_json handler_coercion json
and field_association_opaque_oid_from_json json = why_node_oid_from_json field_association_coercion json
and field_association_opaque_olist_from_json json = why_node_olist_from_json field_association_coercion json
and field_association_opaque_id_from_json json = why_node_id_from_json field_association_coercion json
and field_association_opaque_list_from_json json = why_node_list_from_json field_association_coercion json
and variant_opaque_oid_from_json json = why_node_oid_from_json variant_coercion json
and variant_opaque_olist_from_json json = why_node_olist_from_json variant_coercion json
and variant_opaque_id_from_json json = why_node_id_from_json variant_coercion json
and variant_opaque_list_from_json json = why_node_list_from_json variant_coercion json
and variants_opaque_oid_from_json json = why_node_oid_from_json variants_coercion json
and variants_opaque_olist_from_json json = why_node_olist_from_json variants_coercion json
and variants_opaque_id_from_json json = why_node_id_from_json variants_coercion json
and variants_opaque_list_from_json json = why_node_list_from_json variants_coercion json
and universal_quantif_opaque_oid_from_json json = why_node_oid_from_json universal_quantif_coercion json
and universal_quantif_opaque_olist_from_json json = why_node_olist_from_json universal_quantif_coercion json
and universal_quantif_opaque_id_from_json json = why_node_id_from_json universal_quantif_coercion json
and universal_quantif_opaque_list_from_json json = why_node_list_from_json universal_quantif_coercion json
and existential_quantif_opaque_oid_from_json json = why_node_oid_from_json existential_quantif_coercion json
and existential_quantif_opaque_olist_from_json json = why_node_olist_from_json existential_quantif_coercion json
and existential_quantif_opaque_id_from_json json = why_node_id_from_json existential_quantif_coercion json
and existential_quantif_opaque_list_from_json json = why_node_list_from_json existential_quantif_coercion json
and not_opaque_oid_from_json json = why_node_oid_from_json not_coercion json
and not_opaque_olist_from_json json = why_node_olist_from_json not_coercion json
and not_opaque_id_from_json json = why_node_id_from_json not_coercion json
and not_opaque_list_from_json json = why_node_list_from_json not_coercion json
and connection_opaque_oid_from_json json = why_node_oid_from_json connection_coercion json
and connection_opaque_olist_from_json json = why_node_olist_from_json connection_coercion json
and connection_opaque_id_from_json json = why_node_id_from_json connection_coercion json
and connection_opaque_list_from_json json = why_node_list_from_json connection_coercion json
and label_opaque_oid_from_json json = why_node_oid_from_json label_coercion json
and label_opaque_olist_from_json json = why_node_olist_from_json label_coercion json
and label_opaque_id_from_json json = why_node_id_from_json label_coercion json
and label_opaque_list_from_json json = why_node_list_from_json label_coercion json
and loc_label_opaque_oid_from_json json = why_node_oid_from_json loc_label_coercion json
and loc_label_opaque_olist_from_json json = why_node_olist_from_json loc_label_coercion json
and loc_label_opaque_id_from_json json = why_node_id_from_json loc_label_coercion json
and loc_label_opaque_list_from_json json = why_node_list_from_json loc_label_coercion json
and identifier_opaque_oid_from_json json = why_node_oid_from_json identifier_coercion json
and identifier_opaque_olist_from_json json = why_node_olist_from_json identifier_coercion json
and identifier_opaque_id_from_json json = why_node_id_from_json identifier_coercion json
and identifier_opaque_list_from_json json = why_node_list_from_json identifier_coercion json
and tagged_opaque_oid_from_json json = why_node_oid_from_json tagged_coercion json
and tagged_opaque_olist_from_json json = why_node_olist_from_json tagged_coercion json
and tagged_opaque_id_from_json json = why_node_id_from_json tagged_coercion json
and tagged_opaque_list_from_json json = why_node_list_from_json tagged_coercion json
and call_opaque_oid_from_json json = why_node_oid_from_json call_coercion json
and call_opaque_olist_from_json json = why_node_olist_from_json call_coercion json
and call_opaque_id_from_json json = why_node_id_from_json call_coercion json
and call_opaque_list_from_json json = why_node_list_from_json call_coercion json
and literal_opaque_oid_from_json json = why_node_oid_from_json literal_coercion json
and literal_opaque_olist_from_json json = why_node_olist_from_json literal_coercion json
and literal_opaque_id_from_json json = why_node_id_from_json literal_coercion json
and literal_opaque_list_from_json json = why_node_list_from_json literal_coercion json
and binding_opaque_oid_from_json json = why_node_oid_from_json binding_coercion json
and binding_opaque_olist_from_json json = why_node_olist_from_json binding_coercion json
and binding_opaque_id_from_json json = why_node_id_from_json binding_coercion json
and binding_opaque_list_from_json json = why_node_list_from_json binding_coercion json
and elsif_opaque_oid_from_json json = why_node_oid_from_json elsif_coercion json
and elsif_opaque_olist_from_json json = why_node_olist_from_json elsif_coercion json
and elsif_opaque_id_from_json json = why_node_id_from_json elsif_coercion json
and elsif_opaque_list_from_json json = why_node_list_from_json elsif_coercion json
and epsilon_opaque_oid_from_json json = why_node_oid_from_json epsilon_coercion json
and epsilon_opaque_olist_from_json json = why_node_olist_from_json epsilon_coercion json
and epsilon_opaque_id_from_json json = why_node_id_from_json epsilon_coercion json
and epsilon_opaque_list_from_json json = why_node_list_from_json epsilon_coercion json
and conditional_opaque_oid_from_json json = why_node_oid_from_json conditional_coercion json
and conditional_opaque_olist_from_json json = why_node_olist_from_json conditional_coercion json
and conditional_opaque_id_from_json json = why_node_id_from_json conditional_coercion json
and conditional_opaque_list_from_json json = why_node_list_from_json conditional_coercion json
and integer_constant_opaque_oid_from_json json = why_node_oid_from_json integer_constant_coercion json
and integer_constant_opaque_olist_from_json json = why_node_olist_from_json integer_constant_coercion json
and integer_constant_opaque_id_from_json json = why_node_id_from_json integer_constant_coercion json
and integer_constant_opaque_list_from_json json = why_node_list_from_json integer_constant_coercion json
and range_constant_opaque_oid_from_json json = why_node_oid_from_json range_constant_coercion json
and range_constant_opaque_olist_from_json json = why_node_olist_from_json range_constant_coercion json
and range_constant_opaque_id_from_json json = why_node_id_from_json range_constant_coercion json
and range_constant_opaque_list_from_json json = why_node_list_from_json range_constant_coercion json
and modular_constant_opaque_oid_from_json json = why_node_oid_from_json modular_constant_coercion json
and modular_constant_opaque_olist_from_json json = why_node_olist_from_json modular_constant_coercion json
and modular_constant_opaque_id_from_json json = why_node_id_from_json modular_constant_coercion json
and modular_constant_opaque_list_from_json json = why_node_list_from_json modular_constant_coercion json
and fixed_constant_opaque_oid_from_json json = why_node_oid_from_json fixed_constant_coercion json
and fixed_constant_opaque_olist_from_json json = why_node_olist_from_json fixed_constant_coercion json
and fixed_constant_opaque_id_from_json json = why_node_id_from_json fixed_constant_coercion json
and fixed_constant_opaque_list_from_json json = why_node_list_from_json fixed_constant_coercion json
and real_constant_opaque_oid_from_json json = why_node_oid_from_json real_constant_coercion json
and real_constant_opaque_olist_from_json json = why_node_olist_from_json real_constant_coercion json
and real_constant_opaque_id_from_json json = why_node_id_from_json real_constant_coercion json
and real_constant_opaque_list_from_json json = why_node_list_from_json real_constant_coercion json
and float_constant_opaque_oid_from_json json = why_node_oid_from_json float_constant_coercion json
and float_constant_opaque_olist_from_json json = why_node_olist_from_json float_constant_coercion json
and float_constant_opaque_id_from_json json = why_node_id_from_json float_constant_coercion json
and float_constant_opaque_list_from_json json = why_node_list_from_json float_constant_coercion json
and comment_opaque_oid_from_json json = why_node_oid_from_json comment_coercion json
and comment_opaque_olist_from_json json = why_node_olist_from_json comment_coercion json
and comment_opaque_id_from_json json = why_node_id_from_json comment_coercion json
and comment_opaque_list_from_json json = why_node_list_from_json comment_coercion json
and deref_opaque_oid_from_json json = why_node_oid_from_json deref_coercion json
and deref_opaque_olist_from_json json = why_node_olist_from_json deref_coercion json
and deref_opaque_id_from_json json = why_node_id_from_json deref_coercion json
and deref_opaque_list_from_json json = why_node_list_from_json deref_coercion json
and record_access_opaque_oid_from_json json = why_node_oid_from_json record_access_coercion json
and record_access_opaque_olist_from_json json = why_node_olist_from_json record_access_coercion json
and record_access_opaque_id_from_json json = why_node_id_from_json record_access_coercion json
and record_access_opaque_list_from_json json = why_node_list_from_json record_access_coercion json
and record_update_opaque_oid_from_json json = why_node_oid_from_json record_update_coercion json
and record_update_opaque_olist_from_json json = why_node_olist_from_json record_update_coercion json
and record_update_opaque_id_from_json json = why_node_id_from_json record_update_coercion json
and record_update_opaque_list_from_json json = why_node_list_from_json record_update_coercion json
and record_aggregate_opaque_oid_from_json json = why_node_oid_from_json record_aggregate_coercion json
and record_aggregate_opaque_olist_from_json json = why_node_olist_from_json record_aggregate_coercion json
and record_aggregate_opaque_id_from_json json = why_node_id_from_json record_aggregate_coercion json
and record_aggregate_opaque_list_from_json json = why_node_list_from_json record_aggregate_coercion json
and any_expr_opaque_oid_from_json json = why_node_oid_from_json any_expr_coercion json
and any_expr_opaque_olist_from_json json = why_node_olist_from_json any_expr_coercion json
and any_expr_opaque_id_from_json json = why_node_id_from_json any_expr_coercion json
and any_expr_opaque_list_from_json json = why_node_list_from_json any_expr_coercion json
and assignment_opaque_oid_from_json json = why_node_oid_from_json assignment_coercion json
and assignment_opaque_olist_from_json json = why_node_olist_from_json assignment_coercion json
and assignment_opaque_id_from_json json = why_node_id_from_json assignment_coercion json
and assignment_opaque_list_from_json json = why_node_list_from_json assignment_coercion json
and binding_ref_opaque_oid_from_json json = why_node_oid_from_json binding_ref_coercion json
and binding_ref_opaque_olist_from_json json = why_node_olist_from_json binding_ref_coercion json
and binding_ref_opaque_id_from_json json = why_node_id_from_json binding_ref_coercion json
and binding_ref_opaque_list_from_json json = why_node_list_from_json binding_ref_coercion json
and loop_opaque_oid_from_json json = why_node_oid_from_json loop_coercion json
and loop_opaque_olist_from_json json = why_node_olist_from_json loop_coercion json
and loop_opaque_id_from_json json = why_node_id_from_json loop_coercion json
and loop_opaque_list_from_json json = why_node_list_from_json loop_coercion json
and statement_sequence_opaque_oid_from_json json = why_node_oid_from_json statement_sequence_coercion json
and statement_sequence_opaque_olist_from_json json = why_node_olist_from_json statement_sequence_coercion json
and statement_sequence_opaque_id_from_json json = why_node_id_from_json statement_sequence_coercion json
and statement_sequence_opaque_list_from_json json = why_node_list_from_json statement_sequence_coercion json
and abstract_expr_opaque_oid_from_json json = why_node_oid_from_json abstract_expr_coercion json
and abstract_expr_opaque_olist_from_json json = why_node_olist_from_json abstract_expr_coercion json
and abstract_expr_opaque_id_from_json json = why_node_id_from_json abstract_expr_coercion json
and abstract_expr_opaque_list_from_json json = why_node_list_from_json abstract_expr_coercion json
and assert_opaque_oid_from_json json = why_node_oid_from_json assert_coercion json
and assert_opaque_olist_from_json json = why_node_olist_from_json assert_coercion json
and assert_opaque_id_from_json json = why_node_id_from_json assert_coercion json
and assert_opaque_list_from_json json = why_node_list_from_json assert_coercion json
and raise_opaque_oid_from_json json = why_node_oid_from_json raise_coercion json
and raise_opaque_olist_from_json json = why_node_olist_from_json raise_coercion json
and raise_opaque_id_from_json json = why_node_id_from_json raise_coercion json
and raise_opaque_list_from_json json = why_node_list_from_json raise_coercion json
and try_block_opaque_oid_from_json json = why_node_oid_from_json try_block_coercion json
and try_block_opaque_olist_from_json json = why_node_olist_from_json try_block_coercion json
and try_block_opaque_id_from_json json = why_node_id_from_json try_block_coercion json
and try_block_opaque_list_from_json json = why_node_list_from_json try_block_coercion json
and function_decl_opaque_oid_from_json json = why_node_oid_from_json function_decl_coercion json
and function_decl_opaque_olist_from_json json = why_node_olist_from_json function_decl_coercion json
and function_decl_opaque_id_from_json json = why_node_id_from_json function_decl_coercion json
and function_decl_opaque_list_from_json json = why_node_list_from_json function_decl_coercion json
and axiom_opaque_oid_from_json json = why_node_oid_from_json axiom_coercion json
and axiom_opaque_olist_from_json json = why_node_olist_from_json axiom_coercion json
and axiom_opaque_id_from_json json = why_node_id_from_json axiom_coercion json
and axiom_opaque_list_from_json json = why_node_list_from_json axiom_coercion json
and goal_opaque_oid_from_json json = why_node_oid_from_json goal_coercion json
and goal_opaque_olist_from_json json = why_node_olist_from_json goal_coercion json
and goal_opaque_id_from_json json = why_node_id_from_json goal_coercion json
and goal_opaque_list_from_json json = why_node_list_from_json goal_coercion json
and type_decl_opaque_oid_from_json json = why_node_oid_from_json type_decl_coercion json
and type_decl_opaque_olist_from_json json = why_node_olist_from_json type_decl_coercion json
and type_decl_opaque_id_from_json json = why_node_id_from_json type_decl_coercion json
and type_decl_opaque_list_from_json json = why_node_list_from_json type_decl_coercion json
and global_ref_declaration_opaque_oid_from_json json = why_node_oid_from_json global_ref_declaration_coercion json
and global_ref_declaration_opaque_olist_from_json json = why_node_olist_from_json global_ref_declaration_coercion json
and global_ref_declaration_opaque_id_from_json json = why_node_id_from_json global_ref_declaration_coercion json
and global_ref_declaration_opaque_list_from_json json = why_node_list_from_json global_ref_declaration_coercion json
and namespace_declaration_opaque_oid_from_json json = why_node_oid_from_json namespace_declaration_coercion json
and namespace_declaration_opaque_olist_from_json json = why_node_olist_from_json namespace_declaration_coercion json
and namespace_declaration_opaque_id_from_json json = why_node_id_from_json namespace_declaration_coercion json
and namespace_declaration_opaque_list_from_json json = why_node_list_from_json namespace_declaration_coercion json
and exception_declaration_opaque_oid_from_json json = why_node_oid_from_json exception_declaration_coercion json
and exception_declaration_opaque_olist_from_json json = why_node_olist_from_json exception_declaration_coercion json
and exception_declaration_opaque_id_from_json json = why_node_id_from_json exception_declaration_coercion json
and exception_declaration_opaque_list_from_json json = why_node_list_from_json exception_declaration_coercion json
and meta_declaration_opaque_oid_from_json json = why_node_oid_from_json meta_declaration_coercion json
and meta_declaration_opaque_olist_from_json json = why_node_olist_from_json meta_declaration_coercion json
and meta_declaration_opaque_id_from_json json = why_node_id_from_json meta_declaration_coercion json
and meta_declaration_opaque_list_from_json json = why_node_list_from_json meta_declaration_coercion json
and clone_declaration_opaque_oid_from_json json = why_node_oid_from_json clone_declaration_coercion json
and clone_declaration_opaque_olist_from_json json = why_node_olist_from_json clone_declaration_coercion json
and clone_declaration_opaque_id_from_json json = why_node_id_from_json clone_declaration_coercion json
and clone_declaration_opaque_list_from_json json = why_node_list_from_json clone_declaration_coercion json
and clone_substitution_opaque_oid_from_json json = why_node_oid_from_json clone_substitution_coercion json
and clone_substitution_opaque_olist_from_json json = why_node_olist_from_json clone_substitution_coercion json
and clone_substitution_opaque_id_from_json json = why_node_id_from_json clone_substitution_coercion json
and clone_substitution_opaque_list_from_json json = why_node_list_from_json clone_substitution_coercion json
and include_declaration_opaque_oid_from_json json = why_node_oid_from_json include_declaration_coercion json
and include_declaration_opaque_olist_from_json json = why_node_olist_from_json include_declaration_coercion json
and include_declaration_opaque_id_from_json json = why_node_id_from_json include_declaration_coercion json
and include_declaration_opaque_list_from_json json = why_node_list_from_json include_declaration_coercion json
and theory_declaration_opaque_oid_from_json json = why_node_oid_from_json theory_declaration_coercion json
and theory_declaration_opaque_olist_from_json json = why_node_olist_from_json theory_declaration_coercion json
and theory_declaration_opaque_id_from_json json = why_node_id_from_json theory_declaration_coercion json
and theory_declaration_opaque_list_from_json json = why_node_list_from_json theory_declaration_coercion json
and module_opaque_oid_from_json json = why_node_oid_from_json module_coercion json
and module_opaque_olist_from_json json = why_node_olist_from_json module_coercion json
and module_opaque_id_from_json json = why_node_id_from_json module_coercion json
and module_opaque_list_from_json json = why_node_list_from_json module_coercion json
and expr_opaque_oid_from_json json = why_node_oid_from_json expr_coercion json
and expr_opaque_olist_from_json json = why_node_olist_from_json expr_coercion json
and expr_opaque_id_from_json json = why_node_id_from_json expr_coercion json
and expr_opaque_list_from_json json = why_node_list_from_json expr_coercion json
and pred_opaque_oid_from_json json = why_node_oid_from_json pred_coercion json
and pred_opaque_olist_from_json json = why_node_olist_from_json pred_coercion json
and pred_opaque_id_from_json json = why_node_id_from_json pred_coercion json
and pred_opaque_list_from_json json = why_node_list_from_json pred_coercion json
and term_opaque_oid_from_json json = why_node_oid_from_json term_coercion json
and term_opaque_olist_from_json json = why_node_olist_from_json term_coercion json
and term_opaque_id_from_json json = why_node_id_from_json term_coercion json
and term_opaque_list_from_json json = why_node_list_from_json term_coercion json
and prog_opaque_oid_from_json json = why_node_oid_from_json prog_coercion json
and prog_opaque_olist_from_json json = why_node_olist_from_json prog_coercion json
and prog_opaque_id_from_json json = why_node_id_from_json prog_coercion json
and prog_opaque_list_from_json json = why_node_list_from_json prog_coercion json
and type_definition_opaque_oid_from_json json = why_node_oid_from_json type_definition_coercion json
and type_definition_opaque_olist_from_json json = why_node_olist_from_json type_definition_coercion json
and type_definition_opaque_id_from_json json = why_node_id_from_json type_definition_coercion json
and type_definition_opaque_list_from_json json = why_node_list_from_json type_definition_coercion json
and declaration_opaque_oid_from_json json = why_node_oid_from_json declaration_coercion json
and declaration_opaque_olist_from_json json = why_node_olist_from_json declaration_coercion json
and declaration_opaque_id_from_json json = why_node_id_from_json declaration_coercion json
and declaration_opaque_list_from_json json = why_node_list_from_json declaration_coercion json
and any_node_opaque_oid_from_json json = why_node_oid_from_json any_node_coercion json
and any_node_opaque_olist_from_json json = why_node_olist_from_json any_node_coercion json
and any_node_opaque_id_from_json json = why_node_id_from_json any_node_coercion json
and any_node_opaque_list_from_json json = why_node_list_from_json any_node_coercion json
let file_from_json : file from_json = function
| `Assoc fields when
List.length fields = 1 && List.mem_assoc "theory_declarations" fields ->
let ast_json = List.assoc "theory_declarations" fields in
let theory_declarations = theory_declaration_opaque_olist_from_json ast_json in
{ theory_declarations }
| json -> unexpected_json "file_from_json" json
end |
7f933f023eb810d0d6531c1515ab994e2b5e0e5aaf38acfecc8f214eda66c275 | janestreet/lwt-async | lwt_timeout.ml | Lightweight thread library for
* Module Lwt_timeout
* Copyright ( C ) 2005 - 2008
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation , with linking exceptions ;
* either version 2.1 of the License , or ( at your option ) any later
* version . See COPYING file for details .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA
* 02111 - 1307 , USA .
*
* Module Lwt_timeout
* Copyright (C) 2005-2008 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, with linking exceptions;
* either version 2.1 of the License, or (at your option) any later
* version. See COPYING file for details.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*)
let section = Lwt_log.Section.make "lwt(timeout)"
type t =
{ mutable delay : int; action : unit -> unit;
mutable prev : t; mutable next : t }
let make delay action =
let rec x = { delay = delay; action = action; prev = x; next = x } in
x
let lst_empty () = make (-1) (fun () -> ())
let lst_remove x =
let p = x.prev in
let n = x.next in
p.next <- n;
n.prev <- p;
x.next <- x;
x.prev <- x
let lst_insert p x =
let n = p.next in
p.next <- x;
x.prev <- p;
x.next <- n;
n.prev <- x
let lst_in_list x = x.next != x
let lst_is_empty set = set.next == set
let lst_peek s = let x = s.next in lst_remove x; x
(****)
let count = ref 0
let buckets = ref [||]
let curr = ref 0
let stopped = ref true
let size l =
let len = Array.length !buckets in
if l >= len then begin
let b = Array.init (l + 1) (fun _ -> lst_empty ()) in
Array.blit !buckets !curr b 0 (len - !curr);
Array.blit !buckets 0 b (len - !curr) !curr;
buckets := b; curr := 0;
end
(****)
let handle_exn =
ref
(fun exn ->
ignore (Lwt_log.error ~section ~exn "uncaught exception after timeout");
exit 1)
let set_exn_handler f = handle_exn := f
let rec loop () =
stopped := false;
Lwt.bind (Lwt_unix.sleep 1.) (fun () ->
let s = !buckets.(!curr) in
while not (lst_is_empty s) do
let x = lst_peek s in
decr count;
(*XXX Should probably report any exception *)
try
x.action ()
with e -> !handle_exn e
done;
curr := (!curr + 1) mod (Array.length !buckets);
if !count > 0 then loop () else begin stopped := true; Lwt.return () end)
let start x =
let in_list = lst_in_list x in
let slot = (!curr + x.delay) mod (Array.length !buckets) in
lst_remove x;
lst_insert !buckets.(slot) x;
if not in_list then begin
incr count;
if !count = 1 && !stopped then ignore (loop ())
end
let create delay action =
if delay < 1 then invalid_arg "Lwt_timeout.create";
let x = make delay action in
size delay;
x
let stop x =
if lst_in_list x then begin
lst_remove x;
decr count
end
let change x delay =
if delay < 1 then invalid_arg "Lwt_timeout.change";
x.delay <- delay;
size delay;
if lst_in_list x then start x
| null | https://raw.githubusercontent.com/janestreet/lwt-async/c738e6202c1c7409e079e513c7bdf469f7f9984c/src/unix/lwt_timeout.ml | ocaml | **
**
XXX Should probably report any exception | Lightweight thread library for
* Module Lwt_timeout
* Copyright ( C ) 2005 - 2008
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation , with linking exceptions ;
* either version 2.1 of the License , or ( at your option ) any later
* version . See COPYING file for details .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA
* 02111 - 1307 , USA .
*
* Module Lwt_timeout
* Copyright (C) 2005-2008 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, with linking exceptions;
* either version 2.1 of the License, or (at your option) any later
* version. See COPYING file for details.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*)
let section = Lwt_log.Section.make "lwt(timeout)"
type t =
{ mutable delay : int; action : unit -> unit;
mutable prev : t; mutable next : t }
let make delay action =
let rec x = { delay = delay; action = action; prev = x; next = x } in
x
let lst_empty () = make (-1) (fun () -> ())
let lst_remove x =
let p = x.prev in
let n = x.next in
p.next <- n;
n.prev <- p;
x.next <- x;
x.prev <- x
let lst_insert p x =
let n = p.next in
p.next <- x;
x.prev <- p;
x.next <- n;
n.prev <- x
let lst_in_list x = x.next != x
let lst_is_empty set = set.next == set
let lst_peek s = let x = s.next in lst_remove x; x
let count = ref 0
let buckets = ref [||]
let curr = ref 0
let stopped = ref true
let size l =
let len = Array.length !buckets in
if l >= len then begin
let b = Array.init (l + 1) (fun _ -> lst_empty ()) in
Array.blit !buckets !curr b 0 (len - !curr);
Array.blit !buckets 0 b (len - !curr) !curr;
buckets := b; curr := 0;
end
let handle_exn =
ref
(fun exn ->
ignore (Lwt_log.error ~section ~exn "uncaught exception after timeout");
exit 1)
let set_exn_handler f = handle_exn := f
let rec loop () =
stopped := false;
Lwt.bind (Lwt_unix.sleep 1.) (fun () ->
let s = !buckets.(!curr) in
while not (lst_is_empty s) do
let x = lst_peek s in
decr count;
try
x.action ()
with e -> !handle_exn e
done;
curr := (!curr + 1) mod (Array.length !buckets);
if !count > 0 then loop () else begin stopped := true; Lwt.return () end)
let start x =
let in_list = lst_in_list x in
let slot = (!curr + x.delay) mod (Array.length !buckets) in
lst_remove x;
lst_insert !buckets.(slot) x;
if not in_list then begin
incr count;
if !count = 1 && !stopped then ignore (loop ())
end
let create delay action =
if delay < 1 then invalid_arg "Lwt_timeout.create";
let x = make delay action in
size delay;
x
let stop x =
if lst_in_list x then begin
lst_remove x;
decr count
end
let change x delay =
if delay < 1 then invalid_arg "Lwt_timeout.change";
x.delay <- delay;
size delay;
if lst_in_list x then start x
|
721950b988f61036947bbc162d9ba945fdfb7df7584b7093dbd932f4f40787b7 | Cumulus/Cumulus | db.ml |
Copyright ( c ) 2012
Permission is hereby granted , free of charge , to any person obtaining a copy of
this software and associated documentation files ( the " Software " ) , to deal in
the Software without restriction , including without limitation the rights to
use , copy , modify , merge , publish , distribute , sublicense , and/or sell copies of
the Software , and to permit persons to whom the Software is furnished to do so ,
subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER
IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE .
Copyright (c) 2012 Enguerrand Decorne
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*)
open Batteries
open Eliom_lib.Lwt_ops
module Lwt_thread = struct
include Lwt
include Lwt_chan
end
module Lwt_PGOCaml = PGOCaml_generic.Make(Lwt_thread)
module Lwt_Query = Query.Make_with_Db(Lwt_thread)(Lwt_PGOCaml)
let connect =
let open Config_db in
Lwt_PGOCaml.connect
?database
?host
?port
?password
?user
?unix_domain_socket_dir
let pool = Lwt_pool.create 16 ~validate:Lwt_PGOCaml.alive connect
let use f = Lwt_pool.use pool f
(** Debugging *)
(*let log = Some Pervasives.stdout*)
let log = None
let view x = use (fun db -> Lwt_Query.view db ?log x)
let view_opt x = use (fun db -> Lwt_Query.view_opt db ?log x)
let view_one x = use (fun db -> Lwt_Query.view_one db ?log x)
let query x = use (fun db -> Lwt_Query.query db ?log x)
let value x = use (fun db -> Lwt_Query.value db ?log x)
let value_opt x = use (fun db -> Lwt_Query.value_opt db ?log x)
let alter x = use (fun db -> Lwt_PGOCaml.alter db x)
| null | https://raw.githubusercontent.com/Cumulus/Cumulus/3b6de05d76c57d528e052aa382f98e40354cf581/src/base/db/db.ml | ocaml | * Debugging
let log = Some Pervasives.stdout |
Copyright ( c ) 2012
Permission is hereby granted , free of charge , to any person obtaining a copy of
this software and associated documentation files ( the " Software " ) , to deal in
the Software without restriction , including without limitation the rights to
use , copy , modify , merge , publish , distribute , sublicense , and/or sell copies of
the Software , and to permit persons to whom the Software is furnished to do so ,
subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
IMPLIED , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER
IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE .
Copyright (c) 2012 Enguerrand Decorne
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*)
open Batteries
open Eliom_lib.Lwt_ops
module Lwt_thread = struct
include Lwt
include Lwt_chan
end
module Lwt_PGOCaml = PGOCaml_generic.Make(Lwt_thread)
module Lwt_Query = Query.Make_with_Db(Lwt_thread)(Lwt_PGOCaml)
let connect =
let open Config_db in
Lwt_PGOCaml.connect
?database
?host
?port
?password
?user
?unix_domain_socket_dir
let pool = Lwt_pool.create 16 ~validate:Lwt_PGOCaml.alive connect
let use f = Lwt_pool.use pool f
let log = None
let view x = use (fun db -> Lwt_Query.view db ?log x)
let view_opt x = use (fun db -> Lwt_Query.view_opt db ?log x)
let view_one x = use (fun db -> Lwt_Query.view_one db ?log x)
let query x = use (fun db -> Lwt_Query.query db ?log x)
let value x = use (fun db -> Lwt_Query.value db ?log x)
let value_opt x = use (fun db -> Lwt_Query.value_opt db ?log x)
let alter x = use (fun db -> Lwt_PGOCaml.alter db x)
|
01ae4e87ea00ad11ead97ec70c93779e734c5c45f7d0ca30ea8c039f79dd79e5 | dQuadrant/cardano-marketplace | Core.hs | # LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE NumericUnderscores #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeApplications #
module Cardano.Marketplace.V2.Core where
import Cardano.Api
import Cardano.Api.Shelley (ProtocolParameters, ReferenceScript (ReferenceScriptNone), fromPlutusData, scriptDataToJsonDetailedSchema, toPlutusData)
import qualified Cardano.Api.Shelley as Shelley
import Cardano.Kuber.Api
import Cardano.Kuber.Data.Parsers
import Cardano.Kuber.Util
import Cardano.Ledger.Alonzo.Tx (TxBody (txfee))
import qualified Cardano.Ledger.BaseTypes as Shelley (Network (..))
import Cardano.Marketplace.Common.TextUtils
import Cardano.Marketplace.Common.TransactionUtils
import Codec.Serialise (serialise)
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.Text as Aeson
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy as TLE
import Plutus.Contracts.V2.SimpleMarketplace hiding (Withdraw)
import qualified Plutus.Contracts.V2.SimpleMarketplace as SMP
import Plutus.V1.Ledger.Api hiding (Address, TxOut, Value, getTxId)
import qualified Plutus.V1.Ledger.Api (Address)
import qualified Plutus.V1.Ledger.Api as Plutus
import qualified Debug.Trace as Debug
import Data.Functor ((<&>))
import Control.Exception (throw)
mint ctx signKey addrEra assetName amount = do
let script = RequireSignature (verificationKeyHash $ getVerificationKey signKey)
txBuilder =
txWalletAddress addrEra
<> txWalletSignKey signKey
<> txMintSimpleScript @(SimpleScript SimpleScriptV2) script [(assetName, Quantity amount)]
submitTransaction ctx txBuilder
createReferenceScript :: ChainInfo v => v -> SigningKey PaymentKey -> IO ()
createReferenceScript ctx sKey = do
let walletAddrInEra = getAddrEraFromSignKey ctx sKey
txOperations = txPayToWithReference walletAddrInEra (lovelaceToValue $ Lovelace 20_000_000) simpleMarketplaceScript
<> txWalletAddress walletAddrInEra
<> txWalletSignKey sKey
submitTransaction ctx txOperations
sellToken :: ChainInfo v => v -> String -> Integer -> SigningKey PaymentKey -> Maybe (AddressInEra BabbageEra ) -> Address ShelleyAddr -> IO ()
sellToken ctx itemStr cost sKey mSellerAddr marketAddr = do
let addrShelley = skeyToAddr sKey (getNetworkId ctx)
sellerAddr =case mSellerAddr of
Nothing -> skeyToAddrInEra sKey (getNetworkId ctx)
Just ad -> ad
item <- parseAssetNQuantity $ T.pack itemStr
let saleDatum = constructDatum sellerAddr cost
marketAddrInEra = marketAddressInEra (getNetworkId ctx)
txOperations =
txPayToScriptWithData marketAddrInEra (valueFromList [item]) saleDatum
<> txWalletSignKey sKey
putStrLn $ "InlineDatum : " ++ encodeScriptData saleDatum
submitTransaction ctx txOperations
data UtxoWithData = UtxoWithData
{
uwdTxIn :: TxIn,
uwdTxOut :: TxOut CtxUTxO BabbageEra,
uwdScriptData :: ScriptData,
uwdSimpleSale :: SimpleSale,
uwdSellerAddr :: AddressInEra BabbageEra
}
buyToken :: ChainInfo v => v -> Text -> Maybe String -> SigningKey PaymentKey -> Address ShelleyAddr -> IO ()
buyToken ctx txInText datumStrM sKey marketAddr = do
dcInfo <- withDetails ctx
UtxoWithData txIn txOut scriptData sSale@(SimpleSale _ priceOfAsset) sellerAddrInEra <- getUtxoWithData ctx txInText datumStrM marketAddr
let sellerPayOperation = txPayTo sellerAddrInEra (ensureMinAda sellerAddrInEra (lovelaceToValue $ Lovelace priceOfAsset) (dciProtocolParams dcInfo))
redeemMarketUtxo dcInfo txIn txOut sKey sellerPayOperation scriptData SMP.Buy
withdrawToken :: ChainInfo v => v -> Text -> Maybe String -> SigningKey PaymentKey -> Address ShelleyAddr -> IO ()
withdrawToken ctx txInText datumStrM sKey marketAddr = do
dcInfo <- withDetails ctx
UtxoWithData txIn txOut scriptData _ sellerAddrInEra <- getUtxoWithData ctx txInText datumStrM marketAddr
let sellerSignOperation = txSignBy sellerAddrInEra
redeemMarketUtxo dcInfo txIn txOut sKey sellerSignOperation scriptData SMP.Withdraw
getUtxoWithData :: ChainInfo v => v -> Text -> Maybe String -> Address ShelleyAddr -> IO UtxoWithData
getUtxoWithData ctx txInText datumStrM marketAddr= do
txIn <- parseTxIn txInText
UTxO uMap <- queryMarketUtxos ctx marketAddr
let txOut = unMaybe "Error couldn't find the given txin in market utxos." $ Map.lookup txIn uMap
(scriptData, simpleSale) <- getSimpleSaleTuple datumStrM txOut
let nwId = getNetworkId ctx
sellerAddrInEra = plutusAddressToAddressInEra nwId (sellerAddress simpleSale)
pure $ UtxoWithData txIn txOut scriptData simpleSale sellerAddrInEra
getSimpleSaleTuple :: Maybe String -> TxOut CtxUTxO BabbageEra -> IO (ScriptData, SimpleSale)
getSimpleSaleTuple datumStrM txOut = case datumStrM of
Nothing -> do
let inlineDatum = findInlineDatumFromTxOut txOut
simpleSale = unMaybe "Failed to convert datum to SimpleSale" $ Plutus.fromBuiltinData $ dataToBuiltinData $ toPlutusData inlineDatum
pure $ Debug.trace (show simpleSale) (inlineDatum, simpleSale)
Just datumStr -> do
simpleSaleTuple@(scriptData, _) <- parseSimpleSale datumStr
let datumHashMatches = matchesDatumhash (hashScriptData scriptData) txOut
if not datumHashMatches
then error "Error : The given txin doesn't match the datumhash of the datum."
else pure $ Debug.trace (show simpleSaleTuple) simpleSaleTuple
redeemMarketUtxo :: DetailedChainInfo -> TxIn -> TxOut CtxUTxO BabbageEra -> SigningKey PaymentKey -> TxBuilder -> ScriptData -> SMP.MarketRedeemer -> IO ()
redeemMarketUtxo dcInfo txIn txOut sKey extraOperations scriptData redeemer = do
let walletAddr = getAddrEraFromSignKey dcInfo sKey
redeemUtxoOperation = txRedeemUtxo txIn txOut simpleMarketplacePlutusV2 (fromPlutusData $ toData redeemer) Nothing
txOperations =
redeemUtxoOperation
<> txWalletAddress walletAddr
<> txWalletSignKey sKey
<> extraOperations
submitTransaction dcInfo txOperations
putStrLn "Done"
ensureMinAda :: AddressInEra BabbageEra -> Value -> ProtocolParameters -> Value
ensureMinAda addr value pParams =
if diff > 0
then value <> lovelaceToValue diff
else value
where
diff = minLovelace - currentLovelace
minLovelace = unMaybe "minLovelace calculation error" $ calculateTxoutMinLovelace (TxOut addr (TxOutValue MultiAssetInBabbageEra value) TxOutDatumNone ReferenceScriptNone) pParams
currentLovelace = selectLovelace value
findInlineDatumFromTxOut :: TxOut CtxUTxO BabbageEra -> ScriptData
findInlineDatumFromTxOut (TxOut _ _ (TxOutDatumInline _ sd) _) = Debug.trace (show sd) sd
findInlineDatumFromTxOut _ = error "Error : The given txin doesn't have an inline datum. Please provide a datum using --datum '<datum string>'."
matchesDatumhash :: Hash ScriptData -> TxOut ctx era -> Bool
matchesDatumhash datumHash (TxOut _ (TxOutValue _ value) (TxOutDatumHash _ hash) _) = hash == datumHash
matchesDatumhash _ _ = False
throwLeft e = case e of
Left e -> throw e
Right v -> pure v
txSimpleSaleScript = PlutusScript PlutusScriptV2 simpleMarketplacePlutusV2 | null | https://raw.githubusercontent.com/dQuadrant/cardano-marketplace/9c0ff5efe306ff6b0332e06b33b46ef8f5351beb/marketplace-core/Cardano/Marketplace/V2/Core.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE OverloadedStrings # | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE FlexibleContexts #
# LANGUAGE NumericUnderscores #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeApplications #
module Cardano.Marketplace.V2.Core where
import Cardano.Api
import Cardano.Api.Shelley (ProtocolParameters, ReferenceScript (ReferenceScriptNone), fromPlutusData, scriptDataToJsonDetailedSchema, toPlutusData)
import qualified Cardano.Api.Shelley as Shelley
import Cardano.Kuber.Api
import Cardano.Kuber.Data.Parsers
import Cardano.Kuber.Util
import Cardano.Ledger.Alonzo.Tx (TxBody (txfee))
import qualified Cardano.Ledger.BaseTypes as Shelley (Network (..))
import Cardano.Marketplace.Common.TextUtils
import Cardano.Marketplace.Common.TransactionUtils
import Codec.Serialise (serialise)
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.Text as Aeson
import qualified Data.Map as Map
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy as TLE
import Plutus.Contracts.V2.SimpleMarketplace hiding (Withdraw)
import qualified Plutus.Contracts.V2.SimpleMarketplace as SMP
import Plutus.V1.Ledger.Api hiding (Address, TxOut, Value, getTxId)
import qualified Plutus.V1.Ledger.Api (Address)
import qualified Plutus.V1.Ledger.Api as Plutus
import qualified Debug.Trace as Debug
import Data.Functor ((<&>))
import Control.Exception (throw)
mint ctx signKey addrEra assetName amount = do
let script = RequireSignature (verificationKeyHash $ getVerificationKey signKey)
txBuilder =
txWalletAddress addrEra
<> txWalletSignKey signKey
<> txMintSimpleScript @(SimpleScript SimpleScriptV2) script [(assetName, Quantity amount)]
submitTransaction ctx txBuilder
createReferenceScript :: ChainInfo v => v -> SigningKey PaymentKey -> IO ()
createReferenceScript ctx sKey = do
let walletAddrInEra = getAddrEraFromSignKey ctx sKey
txOperations = txPayToWithReference walletAddrInEra (lovelaceToValue $ Lovelace 20_000_000) simpleMarketplaceScript
<> txWalletAddress walletAddrInEra
<> txWalletSignKey sKey
submitTransaction ctx txOperations
sellToken :: ChainInfo v => v -> String -> Integer -> SigningKey PaymentKey -> Maybe (AddressInEra BabbageEra ) -> Address ShelleyAddr -> IO ()
sellToken ctx itemStr cost sKey mSellerAddr marketAddr = do
let addrShelley = skeyToAddr sKey (getNetworkId ctx)
sellerAddr =case mSellerAddr of
Nothing -> skeyToAddrInEra sKey (getNetworkId ctx)
Just ad -> ad
item <- parseAssetNQuantity $ T.pack itemStr
let saleDatum = constructDatum sellerAddr cost
marketAddrInEra = marketAddressInEra (getNetworkId ctx)
txOperations =
txPayToScriptWithData marketAddrInEra (valueFromList [item]) saleDatum
<> txWalletSignKey sKey
putStrLn $ "InlineDatum : " ++ encodeScriptData saleDatum
submitTransaction ctx txOperations
data UtxoWithData = UtxoWithData
{
uwdTxIn :: TxIn,
uwdTxOut :: TxOut CtxUTxO BabbageEra,
uwdScriptData :: ScriptData,
uwdSimpleSale :: SimpleSale,
uwdSellerAddr :: AddressInEra BabbageEra
}
buyToken :: ChainInfo v => v -> Text -> Maybe String -> SigningKey PaymentKey -> Address ShelleyAddr -> IO ()
buyToken ctx txInText datumStrM sKey marketAddr = do
dcInfo <- withDetails ctx
UtxoWithData txIn txOut scriptData sSale@(SimpleSale _ priceOfAsset) sellerAddrInEra <- getUtxoWithData ctx txInText datumStrM marketAddr
let sellerPayOperation = txPayTo sellerAddrInEra (ensureMinAda sellerAddrInEra (lovelaceToValue $ Lovelace priceOfAsset) (dciProtocolParams dcInfo))
redeemMarketUtxo dcInfo txIn txOut sKey sellerPayOperation scriptData SMP.Buy
withdrawToken :: ChainInfo v => v -> Text -> Maybe String -> SigningKey PaymentKey -> Address ShelleyAddr -> IO ()
withdrawToken ctx txInText datumStrM sKey marketAddr = do
dcInfo <- withDetails ctx
UtxoWithData txIn txOut scriptData _ sellerAddrInEra <- getUtxoWithData ctx txInText datumStrM marketAddr
let sellerSignOperation = txSignBy sellerAddrInEra
redeemMarketUtxo dcInfo txIn txOut sKey sellerSignOperation scriptData SMP.Withdraw
getUtxoWithData :: ChainInfo v => v -> Text -> Maybe String -> Address ShelleyAddr -> IO UtxoWithData
getUtxoWithData ctx txInText datumStrM marketAddr= do
txIn <- parseTxIn txInText
UTxO uMap <- queryMarketUtxos ctx marketAddr
let txOut = unMaybe "Error couldn't find the given txin in market utxos." $ Map.lookup txIn uMap
(scriptData, simpleSale) <- getSimpleSaleTuple datumStrM txOut
let nwId = getNetworkId ctx
sellerAddrInEra = plutusAddressToAddressInEra nwId (sellerAddress simpleSale)
pure $ UtxoWithData txIn txOut scriptData simpleSale sellerAddrInEra
getSimpleSaleTuple :: Maybe String -> TxOut CtxUTxO BabbageEra -> IO (ScriptData, SimpleSale)
getSimpleSaleTuple datumStrM txOut = case datumStrM of
Nothing -> do
let inlineDatum = findInlineDatumFromTxOut txOut
simpleSale = unMaybe "Failed to convert datum to SimpleSale" $ Plutus.fromBuiltinData $ dataToBuiltinData $ toPlutusData inlineDatum
pure $ Debug.trace (show simpleSale) (inlineDatum, simpleSale)
Just datumStr -> do
simpleSaleTuple@(scriptData, _) <- parseSimpleSale datumStr
let datumHashMatches = matchesDatumhash (hashScriptData scriptData) txOut
if not datumHashMatches
then error "Error : The given txin doesn't match the datumhash of the datum."
else pure $ Debug.trace (show simpleSaleTuple) simpleSaleTuple
redeemMarketUtxo :: DetailedChainInfo -> TxIn -> TxOut CtxUTxO BabbageEra -> SigningKey PaymentKey -> TxBuilder -> ScriptData -> SMP.MarketRedeemer -> IO ()
redeemMarketUtxo dcInfo txIn txOut sKey extraOperations scriptData redeemer = do
let walletAddr = getAddrEraFromSignKey dcInfo sKey
redeemUtxoOperation = txRedeemUtxo txIn txOut simpleMarketplacePlutusV2 (fromPlutusData $ toData redeemer) Nothing
txOperations =
redeemUtxoOperation
<> txWalletAddress walletAddr
<> txWalletSignKey sKey
<> extraOperations
submitTransaction dcInfo txOperations
putStrLn "Done"
ensureMinAda :: AddressInEra BabbageEra -> Value -> ProtocolParameters -> Value
ensureMinAda addr value pParams =
if diff > 0
then value <> lovelaceToValue diff
else value
where
diff = minLovelace - currentLovelace
minLovelace = unMaybe "minLovelace calculation error" $ calculateTxoutMinLovelace (TxOut addr (TxOutValue MultiAssetInBabbageEra value) TxOutDatumNone ReferenceScriptNone) pParams
currentLovelace = selectLovelace value
findInlineDatumFromTxOut :: TxOut CtxUTxO BabbageEra -> ScriptData
findInlineDatumFromTxOut (TxOut _ _ (TxOutDatumInline _ sd) _) = Debug.trace (show sd) sd
findInlineDatumFromTxOut _ = error "Error : The given txin doesn't have an inline datum. Please provide a datum using --datum '<datum string>'."
matchesDatumhash :: Hash ScriptData -> TxOut ctx era -> Bool
matchesDatumhash datumHash (TxOut _ (TxOutValue _ value) (TxOutDatumHash _ hash) _) = hash == datumHash
matchesDatumhash _ _ = False
throwLeft e = case e of
Left e -> throw e
Right v -> pure v
txSimpleSaleScript = PlutusScript PlutusScriptV2 simpleMarketplacePlutusV2 |
bc31f9d07f13553db374466841ba93bcfab7cd2eab1b40953747dbdd6b833d46 | scrintal/heroicons-reagent | bars_arrow_up.cljs | (ns com.scrintal.heroicons.outline.bars-arrow-up)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M3 4.5h14.25M3 9h9.75M3 13.5h5.25m5.25-.75L17.25 9m0 0L21 12.75M17.25 9v12"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/outline/bars_arrow_up.cljs | clojure | (ns com.scrintal.heroicons.outline.bars-arrow-up)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M3 4.5h14.25M3 9h9.75M3 13.5h5.25m5.25-.75L17.25 9m0 0L21 12.75M17.25 9v12"}]]) | |
d948419a3ea1841fa13311ec8da18da972b67946889749a630435ee83859f09c | MyDataFlow/ttalk-server | mod_shared_roster_ldap.erl | %%%-------------------------------------------------------------------
File :
%%% Author : Realloc <>
< >
< >
%%% Description : LDAP shared roster management
Created : 5 Mar 2005 by < >
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2013 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License
%%% along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
%%%
%%%-------------------------------------------------------------------
-module(mod_shared_roster_ldap).
-behaviour(gen_server).
-behaviour(gen_mod).
%% API
-export([start_link/2, start/2, stop/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2,
handle_info/2, terminate/2, code_change/3]).
-export([get_user_roster/2, get_subscription_lists/3,
get_jid_info/4, process_item/2, in_subscription/6,
out_subscription/4]).
-export([config_change/4]).
-include("ejabberd.hrl").
-include("jlib.hrl").
-include("mod_roster.hrl").
-include("eldap.hrl").
-define(CACHE_SIZE, 1000).
-define(USER_CACHE_VALIDITY, 300).
-define(GROUP_CACHE_VALIDITY, 300).
-define(LDAP_SEARCH_TIMEOUT, 5).
-record(state,
{host = <<"">> :: binary(),
eldap_id = <<"">> :: binary(),
servers = [] :: [binary()],
backups = [] :: [binary()],
port = ?LDAP_PORT :: inet:port_number(),
tls_options = [] :: list(),
dn = <<"">> :: binary(),
base = <<"">> :: binary(),
password = <<"">> :: binary(),
uid = <<"">> :: binary(),
deref_aliases = never :: never | searching |
finding | always,
group_attr = <<"">> :: binary(),
group_desc = <<"">> :: binary(),
user_desc = <<"">> :: binary(),
user_uid = <<"">> :: binary(),
uid_format = <<"">> :: binary(),
uid_format_re = <<"">> :: binary(),
filter = <<"">> :: binary(),
ufilter = <<"">> :: binary(),
rfilter = <<"">> :: binary(),
gfilter = <<"">> :: binary(),
auth_check = true :: boolean(),
user_cache_size = ?CACHE_SIZE :: non_neg_integer(),
group_cache_size = ?CACHE_SIZE :: non_neg_integer(),
user_cache_validity = ?USER_CACHE_VALIDITY :: non_neg_integer(),
group_cache_validity = ?GROUP_CACHE_VALIDITY :: non_neg_integer()}).
-record(group_info, {desc, members}).
%%====================================================================
%% API
%%====================================================================
start_link(Host, Opts) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
gen_server:start_link({local, Proc}, ?MODULE,
[Host, Opts], []).
start(Host, Opts) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
ChildSpec = {Proc, {?MODULE, start_link, [Host, Opts]},
permanent, 1000, worker, [?MODULE]},
supervisor:start_child(ejabberd_sup, ChildSpec).
stop(Host) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
supervisor:terminate_child(ejabberd_sup, Proc),
supervisor:delete_child(ejabberd_sup, Proc).
%%--------------------------------------------------------------------
%% Hooks
%%--------------------------------------------------------------------
get_user_roster(Items, {U, S} = US) ->
SRUsers = get_user_to_groups_map(US, true),
{NewItems1, SRUsersRest} =
lists:mapfoldl(
fun (Item, SRUsers1) ->
{_, _, {U1, S1, _}} = Item#roster.usj,
US1 = {U1, S1},
case dict:find(US1, SRUsers1) of
{ok, _GroupNames} ->
{Item#roster{subscription = both, ask = none},
dict:erase(US1, SRUsers1)};
error ->
{Item, SRUsers1}
end
end,
SRUsers, Items),
SRItems = [#roster{usj = {U, S, {U1, S1, <<"">>}},
us = US, jid = {U1, S1, <<"">>},
name = get_user_name(U1, S1), subscription = both,
ask = none, groups = GroupNames}
|| {{U1, S1}, GroupNames} <- dict:to_list(SRUsersRest)],
SRItems ++ NewItems1.
%% This function in use to rewrite the roster entries when moving or renaming
%% them in the user contact list.
process_item(RosterItem, _Host) ->
USFrom = RosterItem#roster.us,
{User, Server, _Resource} = RosterItem#roster.jid,
USTo = {User, Server},
Map = get_user_to_groups_map(USFrom, false),
case dict:find(USTo, Map) of
error -> RosterItem;
{ok, []} -> RosterItem;
{ok, GroupNames}
when RosterItem#roster.subscription == remove ->
RosterItem#roster{subscription = both, ask = none,
groups = GroupNames};
_ -> RosterItem#roster{subscription = both, ask = none}
end.
get_subscription_lists({F, T, P}, User, Server) ->
LUser = jid:nodeprep(User),
LServer = jid:nameprep(Server),
US = {LUser, LServer},
DisplayedGroups = get_user_displayed_groups(US),
SRUsers = lists:usort(lists:flatmap(fun (Group) ->
get_group_users(LServer, Group)
end,
DisplayedGroups)),
SRJIDs = [{U1, S1, <<"">>} || {U1, S1} <- SRUsers],
{lists:usort(SRJIDs ++ F), lists:usort(SRJIDs ++ T), P}.
get_jid_info({Subscription, Groups}, User, Server, JID) ->
LUser = jid:nodeprep(User),
LServer = jid:nameprep(Server),
US = {LUser, LServer},
{U1, S1, _} = jid:to_lower(JID),
US1 = {U1, S1},
SRUsers = get_user_to_groups_map(US, false),
case dict:find(US1, SRUsers) of
{ok, GroupNames} ->
NewGroups = if Groups == [] -> GroupNames;
true -> Groups
end,
{both, NewGroups};
error -> {Subscription, Groups}
end.
in_subscription(Acc, User, Server, JID, Type, _Reason) ->
process_subscription(in, User, Server, JID, Type, Acc).
out_subscription(User, Server, JID, Type) ->
process_subscription(out, User, Server, JID, Type, false).
process_subscription(Direction, User, Server, JID, _Type, Acc) ->
LUser = jid:nodeprep(User),
LServer = jid:nameprep(Server),
US = {LUser, LServer},
{U1, S1, _} = jid:to_lower(jid:to_bare(JID)),
US1 = {U1, S1},
DisplayedGroups = get_user_displayed_groups(US),
SRUsers = lists:usort(lists:flatmap(
fun (Group) ->
get_group_users(LServer, Group)
end,
DisplayedGroups)),
case lists:member(US1, SRUsers) of
true ->
case Direction of
in -> {stop, false};
out -> stop
end;
false -> Acc
end.
%%====================================================================
%% config change hook
%%====================================================================
%% react to "global" config change
config_change(Acc, Host, ldap, _NewConfig) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
Mods = ejabberd_config:get_local_option({modules, Host}),
Opts = proplists:get_value(?MODULE,Mods,[]),
ok = gen_server:call(Proc,{new_config, Host, Opts}),
Acc;
config_change(Acc, _, _, _) ->
Acc.
%%====================================================================
%% gen_server callbacks
%%====================================================================
init([Host, Opts]) ->
State = parse_options(Host, Opts),
process_flag(trap_exit,true),
cache_tab:new(shared_roster_ldap_user,
[{max_size, State#state.user_cache_size}, {lru, false},
{life_time, State#state.user_cache_validity}]),
cache_tab:new(shared_roster_ldap_group,
[{max_size, State#state.group_cache_size}, {lru, false},
{life_time, State#state.group_cache_validity}]),
ejabberd_hooks:add(host_config_update, Host, ?MODULE,
config_change, 50),
ejabberd_hooks:add(roster_get, Host, ?MODULE,
get_user_roster, 70),
ejabberd_hooks:add(roster_in_subscription, Host, ?MODULE,
in_subscription, 30),
ejabberd_hooks:add(roster_out_subscription, Host, ?MODULE,
out_subscription, 30),
ejabberd_hooks:add(roster_get_subscription_lists, Host, ?MODULE,
get_subscription_lists, 70),
ejabberd_hooks:add(roster_get_jid_info, Host, ?MODULE,
get_jid_info, 70),
ejabberd_hooks:add(roster_process_item, Host, ?MODULE,
process_item, 50),
eldap_pool:start_link(State#state.eldap_id,
State#state.servers, State#state.backups,
State#state.port, State#state.dn,
State#state.password, State#state.tls_options),
{ok, State}.
handle_call(get_state, _From, State) ->
{reply, {ok, State}, State};
handle_call(_Request, _From, State) ->
{reply, {error, badarg}, State}.
handle_cast(_Msg, State) -> {noreply, State}.
handle_info(_Info, State) -> {noreply, State}.
terminate(_Reason, State) ->
Host = State#state.host,
ejabberd_hooks:delete(host_config_update, Host, ?MODULE, config_change, 50),
ejabberd_hooks:delete(roster_get, Host, ?MODULE,
get_user_roster, 70),
ejabberd_hooks:delete(roster_in_subscription, Host,
?MODULE, in_subscription, 30),
ejabberd_hooks:delete(roster_out_subscription, Host,
?MODULE, out_subscription, 30),
ejabberd_hooks:delete(roster_get_subscription_lists,
Host, ?MODULE, get_subscription_lists, 70),
ejabberd_hooks:delete(roster_get_jid_info, Host,
?MODULE, get_jid_info, 70),
ejabberd_hooks:delete(roster_process_item, Host,
?MODULE, process_item, 50).
code_change(_OldVsn, State, _Extra) -> {ok, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
%% For a given user, map all his shared roster contacts to groups they are
%% members of. Skip the user himself iff SkipUS is true.
get_user_to_groups_map({_, Server} = US, SkipUS) ->
DisplayedGroups = get_user_displayed_groups(US),
%% Pass given FilterParseArgs to eldap_filter:parse, and if successful, run and
return the resulting filter , retrieving given AttributesList . Return the
%% result entries. On any error silently return an empty list of results.
%%
Eldap server ID and base DN for the query are both retrieved from the State
%% record.
lists:foldl(fun (Group, Dict1) ->
GroupName = get_group_name(Server, Group),
lists:foldl(fun (Contact, Dict) ->
if
SkipUS, Contact == US -> Dict;
true -> dict:append(Contact, GroupName, Dict)
end
end,
Dict1, get_group_users(Server, Group))
end,
dict:new(), DisplayedGroups).
eldap_search(State, FilterParseArgs, AttributesList) ->
case apply(eldap_filter, parse, FilterParseArgs) of
{ok, EldapFilter} ->
case eldap_pool:search(State#state.eldap_id,
[{base, State#state.base},
{filter, EldapFilter},
{timeout, ?LDAP_SEARCH_TIMEOUT},
{deref_aliases, State#state.deref_aliases},
{attributes, AttributesList}])
of
#eldap_search_result{entries = Es} ->
%% A result with entries. Return their list.
Es;
_ ->
%% Something else. Pretend we got no results.
[]
end;
_->
%% Filter parsing failed. Pretend we got no results.
[]
end.
get_user_displayed_groups({User, Host}) ->
{ok, State} = eldap_utils:get_state(Host, ?MODULE),
GroupAttr = State#state.group_attr,
Entries = eldap_search(State,
[eldap_filter:do_sub(State#state.rfilter, [{<<"%u">>, User}])],
[GroupAttr]),
Reply = lists:flatmap(fun (#eldap_entry{attributes = Attrs}) ->
case eldap_utils:singleton_value(Attrs) of
{GroupAttr, Value} -> [Value];
_ -> []
end
end,
Entries),
lists:usort(Reply).
get_group_users(Host, Group) ->
{ok, State} = eldap_utils:get_state(Host, ?MODULE),
case cache_tab:dirty_lookup(shared_roster_ldap_group,
{Group, Host},
fun () -> search_group_info(State, Group) end)
of
{ok, #group_info{members = Members}}
when Members /= undefined ->
Members;
_ -> []
end.
get_group_name(Host, Group) ->
{ok, State} = eldap_utils:get_state(Host, ?MODULE),
case cache_tab:dirty_lookup(shared_roster_ldap_group,
{Group, Host},
fun () -> search_group_info(State, Group) end)
of
{ok, #group_info{desc = GroupName}}
when GroupName /= undefined ->
GroupName;
_ -> Group
end.
get_user_name(User, Host) ->
{ok, State} = eldap_utils:get_state(Host, ?MODULE),
case cache_tab:dirty_lookup(shared_roster_ldap_user,
{User, Host},
fun () -> search_user_name(State, User) end)
of
{ok, UserName} -> UserName;
error -> User
end.
search_group_info(State, Group) ->
Extractor = case State#state.uid_format_re of
<<"">> ->
fun (UID) ->
catch eldap_utils:get_user_part(
UID,
State#state.uid_format)
end;
_ ->
fun (UID) ->
catch get_user_part_re(
UID,
State#state.uid_format_re)
end
end,
AuthChecker = case State#state.auth_check of
true -> fun ejabberd_auth:is_user_exists/2;
_ -> fun (_U, _S) -> true end
end,
Host = State#state.host,
case eldap_search(State,
[eldap_filter:do_sub(State#state.gfilter,
[{<<"%g">>, Group}])],
[State#state.group_attr, State#state.group_desc,
State#state.uid])
of
[] ->
error;
LDAPEntries ->
{GroupDesc, MembersLists} =
lists:foldl(
fun (#eldap_entry{attributes = Attrs}, {DescAcc, JIDsAcc}) ->
case
{eldap_utils:get_ldap_attr(State#state.group_attr, Attrs),
eldap_utils:get_ldap_attr(State#state.group_desc, Attrs),
lists:keysearch(State#state.uid, 1, Attrs)}
of
{ID, Desc, {value, {GroupMemberAttr, MemberIn}}}
when ID /= <<"">>, GroupMemberAttr == State#state.uid ->
Member = case MemberIn of
[M] ->
M;
_ ->
MemberIn
end,
JIDs = lists:foldl(
fun ({ok, UID}, L) ->
PUID = jid:nodeprep(UID),
case PUID of
error ->
L;
_ ->
case AuthChecker(PUID, Host) of
true ->
[{PUID, Host} | L];
_ ->
L
end
end;
(_, L) -> L
end,
[],
[Extractor(Member)]),
{Desc, [JIDs | JIDsAcc]};
_ ->
{DescAcc, JIDsAcc}
end
end,
{Group, []}, LDAPEntries),
{ok,
#group_info{desc = GroupDesc,
members = lists:usort(lists:flatten(MembersLists))}}
end.
search_user_name(State, User) ->
case eldap_search(State,
[eldap_filter:do_sub(State#state.ufilter,
[{<<"%u">>, User}])],
[State#state.user_desc, State#state.user_uid])
of
[#eldap_entry{attributes = Attrs} | _] ->
case {eldap_utils:get_ldap_attr(State#state.user_uid, Attrs),
eldap_utils:get_ldap_attr(State#state.user_desc, Attrs)}
of
{UID, Desc} when UID /= <<"">> -> {ok, Desc};
_ -> error
end;
[] -> error
end.
%% Getting User ID part by regex pattern
get_user_part_re(String, Pattern) ->
case catch re:run(String, Pattern) of
{match, Captured} ->
{First, Len} = lists:nth(2, Captured),
Result = binary:part(String, First, Len),
{ok, Result};
_ -> {error, badmatch}
end.
parse_options(Host, Opts) ->
Eldap_ID = atom_to_binary(gen_mod:get_module_proc(Host, ?MODULE),utf8),
Cfg = eldap_utils:get_config(Host, Opts),
GroupAttr = eldap_utils:get_mod_opt(ldap_groupattr, Opts,
fun iolist_to_binary/1,
<<"cn">>),
GroupDesc = eldap_utils:get_mod_opt(ldap_groupdesc, Opts,
fun iolist_to_binary/1,
GroupAttr),
UserDesc = eldap_utils:get_mod_opt(ldap_userdesc, Opts,
fun iolist_to_binary/1,
<<"cn">>),
UserUID = eldap_utils:get_mod_opt(ldap_useruid, Opts,
fun iolist_to_binary/1,
<<"cn">>),
UIDAttr = eldap_utils:get_mod_opt(ldap_memberattr, Opts,
fun iolist_to_binary/1,
<<"memberUid">>),
UIDAttrFormat = eldap_utils:get_mod_opt(ldap_memberattr_format, Opts,
fun iolist_to_binary/1,
<<"%u">>),
UIDAttrFormatRe = eldap_utils:get_mod_opt(ldap_memberattr_format_re, Opts,
fun(S) ->
Re = iolist_to_binary(S),
{ok, MP} = re:compile(Re),
MP
end, <<"">>),
AuthCheck = eldap_utils:get_mod_opt(ldap_auth_check, Opts,
fun(on) -> true;
(off) -> false;
(false) -> false;
(true) -> true
end, true),
UserCacheValidity = eldap_utils:get_opt(
{ldap_user_cache_validity, Host}, Opts,
fun(I) when is_integer(I), I>0 -> I end,
?USER_CACHE_VALIDITY),
GroupCacheValidity = eldap_utils:get_opt(
{ldap_group_cache_validity, Host}, Opts,
fun(I) when is_integer(I), I>0 -> I end,
?GROUP_CACHE_VALIDITY),
UserCacheSize = eldap_utils:get_opt(
{ldap_user_cache_size, Host}, Opts,
fun(I) when is_integer(I), I>0 -> I end,
?CACHE_SIZE),
GroupCacheSize = eldap_utils:get_opt(
{ldap_group_cache_size, Host}, Opts,
fun(I) when is_integer(I), I>0 -> I end,
?CACHE_SIZE),
ConfigFilter = eldap_utils:get_opt({ldap_filter, Host}, Opts,
fun check_filter/1, <<"">>),
ConfigUserFilter = eldap_utils:get_opt({ldap_ufilter, Host}, Opts,
fun check_filter/1, <<"">>),
ConfigGroupFilter = eldap_utils:get_opt({ldap_gfilter, Host}, Opts,
fun check_filter/1, <<"">>),
RosterFilter = eldap_utils:get_opt({ldap_rfilter, Host}, Opts,
fun check_filter/1, <<"">>),
SubFilter = <<"(&(", UIDAttr/binary, "=", UIDAttrFormat/binary,
")(", GroupAttr/binary, "=%g))">>,
UserSubFilter = case ConfigUserFilter of
<<"">> ->
eldap_filter:do_sub(SubFilter, [{<<"%g">>, <<"*">>}]);
UString -> UString
end,
GroupSubFilter = case ConfigGroupFilter of
<<"">> ->
eldap_filter:do_sub(SubFilter, [{<<"%u">>, <<"*">>}]);
GString -> GString
end,
Filter = case ConfigFilter of
<<"">> -> SubFilter;
_ ->
<<"(&", SubFilter/binary, ConfigFilter/binary, ")">>
end,
UserFilter = case ConfigFilter of
<<"">> -> UserSubFilter;
_ ->
<<"(&", UserSubFilter/binary, ConfigFilter/binary, ")">>
end,
GroupFilter = case ConfigFilter of
<<"">> -> GroupSubFilter;
_ ->
<<"(&", GroupSubFilter/binary, ConfigFilter/binary,")">>
end,
#state{host = Host, eldap_id = Eldap_ID,
servers = Cfg#eldap_config.servers,
backups = Cfg#eldap_config.backups,
port = Cfg#eldap_config.port,
tls_options = Cfg#eldap_config.tls_options,
dn = Cfg#eldap_config.dn,
password = Cfg#eldap_config.password,
base = Cfg#eldap_config.base,
deref_aliases = Cfg#eldap_config.deref_aliases,
uid = UIDAttr,
group_attr = GroupAttr, group_desc = GroupDesc,
user_desc = UserDesc, user_uid = UserUID,
uid_format = UIDAttrFormat,
uid_format_re = UIDAttrFormatRe, filter = Filter,
ufilter = UserFilter, rfilter = RosterFilter,
gfilter = GroupFilter, auth_check = AuthCheck,
user_cache_size = UserCacheSize,
user_cache_validity = UserCacheValidity,
group_cache_size = GroupCacheSize,
group_cache_validity = GroupCacheValidity}.
check_filter(F) ->
NewF = iolist_to_binary(F),
{ok, _} = eldap_filter:parse(NewF),
NewF.
| null | https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/apps/ejabberd/src/mod_shared_roster_ldap.erl | erlang | -------------------------------------------------------------------
Author : Realloc <>
Description : LDAP shared roster management
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with this program; if not, write to the Free Software
-------------------------------------------------------------------
API
gen_server callbacks
====================================================================
API
====================================================================
--------------------------------------------------------------------
Hooks
--------------------------------------------------------------------
This function in use to rewrite the roster entries when moving or renaming
them in the user contact list.
====================================================================
config change hook
====================================================================
react to "global" config change
====================================================================
gen_server callbacks
====================================================================
--------------------------------------------------------------------
--------------------------------------------------------------------
For a given user, map all his shared roster contacts to groups they are
members of. Skip the user himself iff SkipUS is true.
Pass given FilterParseArgs to eldap_filter:parse, and if successful, run and
result entries. On any error silently return an empty list of results.
record.
A result with entries. Return their list.
Something else. Pretend we got no results.
Filter parsing failed. Pretend we got no results.
Getting User ID part by regex pattern | File :
< >
< >
Created : 5 Mar 2005 by < >
ejabberd , Copyright ( C ) 2002 - 2013 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
-module(mod_shared_roster_ldap).
-behaviour(gen_server).
-behaviour(gen_mod).
-export([start_link/2, start/2, stop/1]).
-export([init/1, handle_call/3, handle_cast/2,
handle_info/2, terminate/2, code_change/3]).
-export([get_user_roster/2, get_subscription_lists/3,
get_jid_info/4, process_item/2, in_subscription/6,
out_subscription/4]).
-export([config_change/4]).
-include("ejabberd.hrl").
-include("jlib.hrl").
-include("mod_roster.hrl").
-include("eldap.hrl").
-define(CACHE_SIZE, 1000).
-define(USER_CACHE_VALIDITY, 300).
-define(GROUP_CACHE_VALIDITY, 300).
-define(LDAP_SEARCH_TIMEOUT, 5).
-record(state,
{host = <<"">> :: binary(),
eldap_id = <<"">> :: binary(),
servers = [] :: [binary()],
backups = [] :: [binary()],
port = ?LDAP_PORT :: inet:port_number(),
tls_options = [] :: list(),
dn = <<"">> :: binary(),
base = <<"">> :: binary(),
password = <<"">> :: binary(),
uid = <<"">> :: binary(),
deref_aliases = never :: never | searching |
finding | always,
group_attr = <<"">> :: binary(),
group_desc = <<"">> :: binary(),
user_desc = <<"">> :: binary(),
user_uid = <<"">> :: binary(),
uid_format = <<"">> :: binary(),
uid_format_re = <<"">> :: binary(),
filter = <<"">> :: binary(),
ufilter = <<"">> :: binary(),
rfilter = <<"">> :: binary(),
gfilter = <<"">> :: binary(),
auth_check = true :: boolean(),
user_cache_size = ?CACHE_SIZE :: non_neg_integer(),
group_cache_size = ?CACHE_SIZE :: non_neg_integer(),
user_cache_validity = ?USER_CACHE_VALIDITY :: non_neg_integer(),
group_cache_validity = ?GROUP_CACHE_VALIDITY :: non_neg_integer()}).
-record(group_info, {desc, members}).
start_link(Host, Opts) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
gen_server:start_link({local, Proc}, ?MODULE,
[Host, Opts], []).
start(Host, Opts) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
ChildSpec = {Proc, {?MODULE, start_link, [Host, Opts]},
permanent, 1000, worker, [?MODULE]},
supervisor:start_child(ejabberd_sup, ChildSpec).
stop(Host) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
supervisor:terminate_child(ejabberd_sup, Proc),
supervisor:delete_child(ejabberd_sup, Proc).
get_user_roster(Items, {U, S} = US) ->
SRUsers = get_user_to_groups_map(US, true),
{NewItems1, SRUsersRest} =
lists:mapfoldl(
fun (Item, SRUsers1) ->
{_, _, {U1, S1, _}} = Item#roster.usj,
US1 = {U1, S1},
case dict:find(US1, SRUsers1) of
{ok, _GroupNames} ->
{Item#roster{subscription = both, ask = none},
dict:erase(US1, SRUsers1)};
error ->
{Item, SRUsers1}
end
end,
SRUsers, Items),
SRItems = [#roster{usj = {U, S, {U1, S1, <<"">>}},
us = US, jid = {U1, S1, <<"">>},
name = get_user_name(U1, S1), subscription = both,
ask = none, groups = GroupNames}
|| {{U1, S1}, GroupNames} <- dict:to_list(SRUsersRest)],
SRItems ++ NewItems1.
process_item(RosterItem, _Host) ->
USFrom = RosterItem#roster.us,
{User, Server, _Resource} = RosterItem#roster.jid,
USTo = {User, Server},
Map = get_user_to_groups_map(USFrom, false),
case dict:find(USTo, Map) of
error -> RosterItem;
{ok, []} -> RosterItem;
{ok, GroupNames}
when RosterItem#roster.subscription == remove ->
RosterItem#roster{subscription = both, ask = none,
groups = GroupNames};
_ -> RosterItem#roster{subscription = both, ask = none}
end.
get_subscription_lists({F, T, P}, User, Server) ->
LUser = jid:nodeprep(User),
LServer = jid:nameprep(Server),
US = {LUser, LServer},
DisplayedGroups = get_user_displayed_groups(US),
SRUsers = lists:usort(lists:flatmap(fun (Group) ->
get_group_users(LServer, Group)
end,
DisplayedGroups)),
SRJIDs = [{U1, S1, <<"">>} || {U1, S1} <- SRUsers],
{lists:usort(SRJIDs ++ F), lists:usort(SRJIDs ++ T), P}.
get_jid_info({Subscription, Groups}, User, Server, JID) ->
LUser = jid:nodeprep(User),
LServer = jid:nameprep(Server),
US = {LUser, LServer},
{U1, S1, _} = jid:to_lower(JID),
US1 = {U1, S1},
SRUsers = get_user_to_groups_map(US, false),
case dict:find(US1, SRUsers) of
{ok, GroupNames} ->
NewGroups = if Groups == [] -> GroupNames;
true -> Groups
end,
{both, NewGroups};
error -> {Subscription, Groups}
end.
in_subscription(Acc, User, Server, JID, Type, _Reason) ->
process_subscription(in, User, Server, JID, Type, Acc).
out_subscription(User, Server, JID, Type) ->
process_subscription(out, User, Server, JID, Type, false).
process_subscription(Direction, User, Server, JID, _Type, Acc) ->
LUser = jid:nodeprep(User),
LServer = jid:nameprep(Server),
US = {LUser, LServer},
{U1, S1, _} = jid:to_lower(jid:to_bare(JID)),
US1 = {U1, S1},
DisplayedGroups = get_user_displayed_groups(US),
SRUsers = lists:usort(lists:flatmap(
fun (Group) ->
get_group_users(LServer, Group)
end,
DisplayedGroups)),
case lists:member(US1, SRUsers) of
true ->
case Direction of
in -> {stop, false};
out -> stop
end;
false -> Acc
end.
config_change(Acc, Host, ldap, _NewConfig) ->
Proc = gen_mod:get_module_proc(Host, ?MODULE),
Mods = ejabberd_config:get_local_option({modules, Host}),
Opts = proplists:get_value(?MODULE,Mods,[]),
ok = gen_server:call(Proc,{new_config, Host, Opts}),
Acc;
config_change(Acc, _, _, _) ->
Acc.
init([Host, Opts]) ->
State = parse_options(Host, Opts),
process_flag(trap_exit,true),
cache_tab:new(shared_roster_ldap_user,
[{max_size, State#state.user_cache_size}, {lru, false},
{life_time, State#state.user_cache_validity}]),
cache_tab:new(shared_roster_ldap_group,
[{max_size, State#state.group_cache_size}, {lru, false},
{life_time, State#state.group_cache_validity}]),
ejabberd_hooks:add(host_config_update, Host, ?MODULE,
config_change, 50),
ejabberd_hooks:add(roster_get, Host, ?MODULE,
get_user_roster, 70),
ejabberd_hooks:add(roster_in_subscription, Host, ?MODULE,
in_subscription, 30),
ejabberd_hooks:add(roster_out_subscription, Host, ?MODULE,
out_subscription, 30),
ejabberd_hooks:add(roster_get_subscription_lists, Host, ?MODULE,
get_subscription_lists, 70),
ejabberd_hooks:add(roster_get_jid_info, Host, ?MODULE,
get_jid_info, 70),
ejabberd_hooks:add(roster_process_item, Host, ?MODULE,
process_item, 50),
eldap_pool:start_link(State#state.eldap_id,
State#state.servers, State#state.backups,
State#state.port, State#state.dn,
State#state.password, State#state.tls_options),
{ok, State}.
handle_call(get_state, _From, State) ->
{reply, {ok, State}, State};
handle_call(_Request, _From, State) ->
{reply, {error, badarg}, State}.
handle_cast(_Msg, State) -> {noreply, State}.
handle_info(_Info, State) -> {noreply, State}.
terminate(_Reason, State) ->
Host = State#state.host,
ejabberd_hooks:delete(host_config_update, Host, ?MODULE, config_change, 50),
ejabberd_hooks:delete(roster_get, Host, ?MODULE,
get_user_roster, 70),
ejabberd_hooks:delete(roster_in_subscription, Host,
?MODULE, in_subscription, 30),
ejabberd_hooks:delete(roster_out_subscription, Host,
?MODULE, out_subscription, 30),
ejabberd_hooks:delete(roster_get_subscription_lists,
Host, ?MODULE, get_subscription_lists, 70),
ejabberd_hooks:delete(roster_get_jid_info, Host,
?MODULE, get_jid_info, 70),
ejabberd_hooks:delete(roster_process_item, Host,
?MODULE, process_item, 50).
code_change(_OldVsn, State, _Extra) -> {ok, State}.
Internal functions
get_user_to_groups_map({_, Server} = US, SkipUS) ->
DisplayedGroups = get_user_displayed_groups(US),
return the resulting filter , retrieving given AttributesList . Return the
Eldap server ID and base DN for the query are both retrieved from the State
lists:foldl(fun (Group, Dict1) ->
GroupName = get_group_name(Server, Group),
lists:foldl(fun (Contact, Dict) ->
if
SkipUS, Contact == US -> Dict;
true -> dict:append(Contact, GroupName, Dict)
end
end,
Dict1, get_group_users(Server, Group))
end,
dict:new(), DisplayedGroups).
eldap_search(State, FilterParseArgs, AttributesList) ->
case apply(eldap_filter, parse, FilterParseArgs) of
{ok, EldapFilter} ->
case eldap_pool:search(State#state.eldap_id,
[{base, State#state.base},
{filter, EldapFilter},
{timeout, ?LDAP_SEARCH_TIMEOUT},
{deref_aliases, State#state.deref_aliases},
{attributes, AttributesList}])
of
#eldap_search_result{entries = Es} ->
Es;
_ ->
[]
end;
_->
[]
end.
get_user_displayed_groups({User, Host}) ->
{ok, State} = eldap_utils:get_state(Host, ?MODULE),
GroupAttr = State#state.group_attr,
Entries = eldap_search(State,
[eldap_filter:do_sub(State#state.rfilter, [{<<"%u">>, User}])],
[GroupAttr]),
Reply = lists:flatmap(fun (#eldap_entry{attributes = Attrs}) ->
case eldap_utils:singleton_value(Attrs) of
{GroupAttr, Value} -> [Value];
_ -> []
end
end,
Entries),
lists:usort(Reply).
get_group_users(Host, Group) ->
{ok, State} = eldap_utils:get_state(Host, ?MODULE),
case cache_tab:dirty_lookup(shared_roster_ldap_group,
{Group, Host},
fun () -> search_group_info(State, Group) end)
of
{ok, #group_info{members = Members}}
when Members /= undefined ->
Members;
_ -> []
end.
get_group_name(Host, Group) ->
{ok, State} = eldap_utils:get_state(Host, ?MODULE),
case cache_tab:dirty_lookup(shared_roster_ldap_group,
{Group, Host},
fun () -> search_group_info(State, Group) end)
of
{ok, #group_info{desc = GroupName}}
when GroupName /= undefined ->
GroupName;
_ -> Group
end.
get_user_name(User, Host) ->
{ok, State} = eldap_utils:get_state(Host, ?MODULE),
case cache_tab:dirty_lookup(shared_roster_ldap_user,
{User, Host},
fun () -> search_user_name(State, User) end)
of
{ok, UserName} -> UserName;
error -> User
end.
search_group_info(State, Group) ->
Extractor = case State#state.uid_format_re of
<<"">> ->
fun (UID) ->
catch eldap_utils:get_user_part(
UID,
State#state.uid_format)
end;
_ ->
fun (UID) ->
catch get_user_part_re(
UID,
State#state.uid_format_re)
end
end,
AuthChecker = case State#state.auth_check of
true -> fun ejabberd_auth:is_user_exists/2;
_ -> fun (_U, _S) -> true end
end,
Host = State#state.host,
case eldap_search(State,
[eldap_filter:do_sub(State#state.gfilter,
[{<<"%g">>, Group}])],
[State#state.group_attr, State#state.group_desc,
State#state.uid])
of
[] ->
error;
LDAPEntries ->
{GroupDesc, MembersLists} =
lists:foldl(
fun (#eldap_entry{attributes = Attrs}, {DescAcc, JIDsAcc}) ->
case
{eldap_utils:get_ldap_attr(State#state.group_attr, Attrs),
eldap_utils:get_ldap_attr(State#state.group_desc, Attrs),
lists:keysearch(State#state.uid, 1, Attrs)}
of
{ID, Desc, {value, {GroupMemberAttr, MemberIn}}}
when ID /= <<"">>, GroupMemberAttr == State#state.uid ->
Member = case MemberIn of
[M] ->
M;
_ ->
MemberIn
end,
JIDs = lists:foldl(
fun ({ok, UID}, L) ->
PUID = jid:nodeprep(UID),
case PUID of
error ->
L;
_ ->
case AuthChecker(PUID, Host) of
true ->
[{PUID, Host} | L];
_ ->
L
end
end;
(_, L) -> L
end,
[],
[Extractor(Member)]),
{Desc, [JIDs | JIDsAcc]};
_ ->
{DescAcc, JIDsAcc}
end
end,
{Group, []}, LDAPEntries),
{ok,
#group_info{desc = GroupDesc,
members = lists:usort(lists:flatten(MembersLists))}}
end.
search_user_name(State, User) ->
case eldap_search(State,
[eldap_filter:do_sub(State#state.ufilter,
[{<<"%u">>, User}])],
[State#state.user_desc, State#state.user_uid])
of
[#eldap_entry{attributes = Attrs} | _] ->
case {eldap_utils:get_ldap_attr(State#state.user_uid, Attrs),
eldap_utils:get_ldap_attr(State#state.user_desc, Attrs)}
of
{UID, Desc} when UID /= <<"">> -> {ok, Desc};
_ -> error
end;
[] -> error
end.
get_user_part_re(String, Pattern) ->
case catch re:run(String, Pattern) of
{match, Captured} ->
{First, Len} = lists:nth(2, Captured),
Result = binary:part(String, First, Len),
{ok, Result};
_ -> {error, badmatch}
end.
parse_options(Host, Opts) ->
Eldap_ID = atom_to_binary(gen_mod:get_module_proc(Host, ?MODULE),utf8),
Cfg = eldap_utils:get_config(Host, Opts),
GroupAttr = eldap_utils:get_mod_opt(ldap_groupattr, Opts,
fun iolist_to_binary/1,
<<"cn">>),
GroupDesc = eldap_utils:get_mod_opt(ldap_groupdesc, Opts,
fun iolist_to_binary/1,
GroupAttr),
UserDesc = eldap_utils:get_mod_opt(ldap_userdesc, Opts,
fun iolist_to_binary/1,
<<"cn">>),
UserUID = eldap_utils:get_mod_opt(ldap_useruid, Opts,
fun iolist_to_binary/1,
<<"cn">>),
UIDAttr = eldap_utils:get_mod_opt(ldap_memberattr, Opts,
fun iolist_to_binary/1,
<<"memberUid">>),
UIDAttrFormat = eldap_utils:get_mod_opt(ldap_memberattr_format, Opts,
fun iolist_to_binary/1,
<<"%u">>),
UIDAttrFormatRe = eldap_utils:get_mod_opt(ldap_memberattr_format_re, Opts,
fun(S) ->
Re = iolist_to_binary(S),
{ok, MP} = re:compile(Re),
MP
end, <<"">>),
AuthCheck = eldap_utils:get_mod_opt(ldap_auth_check, Opts,
fun(on) -> true;
(off) -> false;
(false) -> false;
(true) -> true
end, true),
UserCacheValidity = eldap_utils:get_opt(
{ldap_user_cache_validity, Host}, Opts,
fun(I) when is_integer(I), I>0 -> I end,
?USER_CACHE_VALIDITY),
GroupCacheValidity = eldap_utils:get_opt(
{ldap_group_cache_validity, Host}, Opts,
fun(I) when is_integer(I), I>0 -> I end,
?GROUP_CACHE_VALIDITY),
UserCacheSize = eldap_utils:get_opt(
{ldap_user_cache_size, Host}, Opts,
fun(I) when is_integer(I), I>0 -> I end,
?CACHE_SIZE),
GroupCacheSize = eldap_utils:get_opt(
{ldap_group_cache_size, Host}, Opts,
fun(I) when is_integer(I), I>0 -> I end,
?CACHE_SIZE),
ConfigFilter = eldap_utils:get_opt({ldap_filter, Host}, Opts,
fun check_filter/1, <<"">>),
ConfigUserFilter = eldap_utils:get_opt({ldap_ufilter, Host}, Opts,
fun check_filter/1, <<"">>),
ConfigGroupFilter = eldap_utils:get_opt({ldap_gfilter, Host}, Opts,
fun check_filter/1, <<"">>),
RosterFilter = eldap_utils:get_opt({ldap_rfilter, Host}, Opts,
fun check_filter/1, <<"">>),
SubFilter = <<"(&(", UIDAttr/binary, "=", UIDAttrFormat/binary,
")(", GroupAttr/binary, "=%g))">>,
UserSubFilter = case ConfigUserFilter of
<<"">> ->
eldap_filter:do_sub(SubFilter, [{<<"%g">>, <<"*">>}]);
UString -> UString
end,
GroupSubFilter = case ConfigGroupFilter of
<<"">> ->
eldap_filter:do_sub(SubFilter, [{<<"%u">>, <<"*">>}]);
GString -> GString
end,
Filter = case ConfigFilter of
<<"">> -> SubFilter;
_ ->
<<"(&", SubFilter/binary, ConfigFilter/binary, ")">>
end,
UserFilter = case ConfigFilter of
<<"">> -> UserSubFilter;
_ ->
<<"(&", UserSubFilter/binary, ConfigFilter/binary, ")">>
end,
GroupFilter = case ConfigFilter of
<<"">> -> GroupSubFilter;
_ ->
<<"(&", GroupSubFilter/binary, ConfigFilter/binary,")">>
end,
#state{host = Host, eldap_id = Eldap_ID,
servers = Cfg#eldap_config.servers,
backups = Cfg#eldap_config.backups,
port = Cfg#eldap_config.port,
tls_options = Cfg#eldap_config.tls_options,
dn = Cfg#eldap_config.dn,
password = Cfg#eldap_config.password,
base = Cfg#eldap_config.base,
deref_aliases = Cfg#eldap_config.deref_aliases,
uid = UIDAttr,
group_attr = GroupAttr, group_desc = GroupDesc,
user_desc = UserDesc, user_uid = UserUID,
uid_format = UIDAttrFormat,
uid_format_re = UIDAttrFormatRe, filter = Filter,
ufilter = UserFilter, rfilter = RosterFilter,
gfilter = GroupFilter, auth_check = AuthCheck,
user_cache_size = UserCacheSize,
user_cache_validity = UserCacheValidity,
group_cache_size = GroupCacheSize,
group_cache_validity = GroupCacheValidity}.
check_filter(F) ->
NewF = iolist_to_binary(F),
{ok, _} = eldap_filter:parse(NewF),
NewF.
|
3cb2d6b1c2ea30a69ef1c8cd977972762ff54081747169ebf25b5e239fd7c5cf | openbadgefactory/salava | pdf.clj | (ns salava.badge.pdf
(:require [yesql.core :refer [defqueries]]
[salava.core.time :refer [unix-time date-from-unix-time]]
[salava.core.i18n :refer [t]]
[salava.core.helper :refer [dump private?]]
[salava.badge.evidence :refer [badge-evidence]]
[salava.badge.main :refer [user-badges-to-export fetch-badge badge-url]]
[salava.core.util :as u :refer [get-db plugin-fun get-plugins md->html str->qr-base64]]
[clj-pdf.core :as pdf]
[clj-pdf-markdown.core :refer [markdown->clj-pdf]]
[clojure.string :refer [ends-with? blank?]]
[salava.user.db :as ud]
[ring.util.io :as io]
[clojure.tools.logging :as log]
[salava.badge.endorsement :refer [user-badge-endorsements]]
[salava.badge.pdf-helper :as pdfh]
[clojure.core.reducers :as r]))
(defqueries "sql/badge/main.sql")
(defn replace-nils [data]
(clojure.walk/postwalk
(fn [d]
(if (map? d)
(let [m (into {} (map (fn [k v] (if (blank? (str v)) {k "-"} {k v})) (keys d) (vals d)))]
(when (seq m) m))
d))
data))
(defn pdf-generator-helper [ctx user-id input]
(let [badges-for-export (user-badges-to-export ctx user-id)
filtered-badges (filter (fn [b] (some #(= % (:id b)) input)) badges-for-export)
badge-with-content (map #(-> (fetch-badge ctx (:id %))
(assoc :tags (:tags %))) filtered-badges)
badge-ids (map #(:badge_id (first (get-in % [:content]))) badge-with-content)
temp (map #(select-multi-language-badge-content {:id %} (u/get-db ctx)) badge-ids)
badges (map #(-> %1
(dissoc :content)
(assoc :qr_code (str->qr-base64 (badge-url ctx (:id %1)))
:endorsements (->> [(vec (select-badge-endorsements {:id (:badge_id %1)} (u/get-db ctx)))
(user-badge-endorsements ctx (:id %1))]
flatten) #_(->> (vec (select-badge-endorsements {:id (:badge_id %1)} (u/get-db ctx))))
:evidences (remove (fn [e] (= true (get-in e [:properties :hidden]))) (badge-evidence ctx (:id %1) user-id))
:content %2)) badge-with-content temp)]
(replace-nils badges)))
(defn- badge-pdf-helper [ctx user-id id]
(let [badge (fetch-badge ctx id)]
(replace-nils (some-> badge
(assoc :qr_code (str->qr-base64 (badge-url ctx id))
:endorsements (->> [(vec (select-badge-endorsements {:id (:badge_id badge)} (u/get-db ctx)))
(user-badge-endorsements ctx id)]
flatten)
:evidences (remove (fn [e] (= true (get-in e [:properties :hidden]))) (badge-evidence ctx id user-id))
:content (select-multi-language-badge-content {:id (:badge_id badge)} (u/get-db ctx)))))))
#_(defn process-pdf-page [stylesheet template badge ul]
(let [file (java.io.File/createTempFile "temp" ".pdf")
pdf (pdf/pdf (into [stylesheet] (template badge)) (.getAbsolutePath file))]
(if (blank? (slurp file))
(pdf/pdf [{} [:paragraph (t :core/Errorpage ul)]] (.getAbsolutePath file))
file)))
(defn process-markdown-helper [markdown id context]
(let [file (java.io.File/createTempFile "markdown" ".pdf")]
(try
(pdf/pdf [{} (pdfh/markdown->clj-pdf {:wrap {:global-wrapper :paragraph}} markdown)] (.getAbsolutePath file))
true
(catch Exception e
(log/error (str "Markdown Error in Badge id: " id " in " context))
false)
(finally (.delete file)))))
(defn process-markdown [markdown id context]
(if (== 1 (count markdown))
markdown
(if-let [p (process-markdown-helper markdown id context)]
} ; : spacer { : extra - starting - value 1 : allow - extra - line - breaks ? true : single - value 2 }
:wrap {:global-wrapper :paragraph}} markdown)
"")))
(defn generatePDF [ctx user-id input lang]
(let [data-dir (get-in ctx [:config :core :data-dir])
site-url (get-in ctx [:config :core :site-url])
badges [(badge-pdf-helper ctx user-id input)] #_(pdf-generator-helper ctx user-id input)
user-data (ud/user-information ctx user-id)
ul (if (blank? (:language user-data)) "en" (:language user-data))
font-path (first (mapcat #(get-in ctx [:config % :font] []) (get-plugins ctx)))
font {:encoding :unicode :ttf-name (str site-url font-path)}
stylesheet {:heading-name {:color [127 113 121]
:family :times-roman
:align :center}
:generic {:family :times-roman
:color [127 113 121]
:indent 20}
:link {:family :times-roman
:color [66 100 162]}
:chunk {:size 11
:style :bold}}
pdf-settings (if (empty? font-path) {:stylesheet stylesheet :bottom-margin 0 :footer {:page-numbers true :align :right}} {:font font :stylesheet stylesheet :bottom-margin 0 :footer {:page-numbers false :align :right}}) ;:register-system-fonts? true})
badge-template (pdf/template
(let [template #(cons [:paragraph] [(if (and (not (= "-" (:image_file %))) (ends-with? (:image_file %) "png"))
[:image {:width 85 :height 85 :align :center} (str data-dir "/" (:image_file %))]
[:image {:width 85 :height 85 :align :center :base64 true} $qr_code])
[:heading.heading-name (:name %)]
[:paragraph {:indent 20 :align :center} [:spacer]]
[:paragraph.generic {:align :left :style :italic} (:description %)] [:spacer]
[:paragraph.generic
[:chunk.chunk (str (t :badge/Recipient ul) ": ")] (str $first_name " " $last_name) "\n"
[:chunk.chunk (str (t :badge/Issuedby ul) ": ")] (:issuer_content_name %) "\n"
(when-not (= "-" (:creator_name %))
[:paragraph.generic
[:chunk.chunk (str (t :badge/Createdby ul) ": ")] (:creator_name %)])
[:chunk.chunk (str (t :badge/Issuedon ul) ": ")] (if (number? $issued_on) (date-from-unix-time (long (* 1000 $issued_on)) "date") $issued_on) "\n"
[:paragraph
[:chunk.chunk (str (t :badge/Expireson ul) ": ")] (if (number? $expires_on) (date-from-unix-time (long (* 1000 $expires_on)) "date") $expires_on)]
(when-not (empty? $tags)
[:paragraph.generic
[:chunk.chunk (str (t :badge/Tags ul) ": ")] (into [:phrase] (for [t $tags] (str t " ")))])
[:paragraph
(let [alignments (replace-nils (select-alignment-content {:badge_content_id (:badge_content_id %)} (u/get-db ctx)))]
(when-not (empty? alignments)
[:paragraph.generic
[:spacer 0]
[:phrase {:size 12 :style :bold} (str (t :badge/Alignments ul) ": " (count alignments))] "\n"
(into [:paragraph] (for [a alignments]
[:paragraph
(:name a) "\n"
[:chunk {:style :italic} (:description a)] "\n"
[:chunk.link (:url a)] [:spacer 0]]))]))]
[:paragraph
[:chunk.chunk (str (t :badge/Criteria ul) ": ")] [:anchor {:target (:criteria_url %)} [:chunk.link (t :badge/Opencriteriapage ul)]] "\n"
[:spacer 0]
(process-markdown (:criteria_content %) $id "Criteria")
[:spacer 1] "\n"]]
(when-not (empty? $evidences)
[:paragraph.generic
[:spacer 0]
[:phrase {:size 12 :style :bold} (if (= 1 (count $evidences)) (t :badge/Evidence ul) (t :badge/Evidences ul))]
[:spacer]
(reduce (fn [r evidence]
(conj r [:phrase
(when (and (not (blank? (:name evidence))) (not= "-" (:name evidence))) [:phrase [:chunk (:name evidence)] "\n"])
(when (and (not (blank? (:narrative evidence))) (not= "-" (:narrative evidence))) [:phrase [:chunk (:narrative evidence)] "\n"])
(when (and (not (blank? (:description evidence))) (not= "-" (:description evidence))) [:phrase [:chunk (:description evidence)] "\n"])
[:anchor {:target (:url evidence)} [:chunk.link (str (t :badge/Openevidencepage ul) "...")]]
[:spacer 2]]))
[:list {:numbered true :indent 0}] $evidences)])
(when (seq $endorsements)
[:paragraph.generic
;[:spacer 0]
[:phrase {:size 12 :style :bold} (t :badge/BadgeEndorsedBy ul)] "\n"
[:spacer 0]
(into [:paragraph {:indent 0}]
(for [e $endorsements]
[:paragraph
(if (or (= "-" (:issuer_name e)) (blank? (:issuer_name e))) (str (:first_name e) " " (:last_name e)) (:issuer_name e)) "\n"
[:anchor {:target (:issuer_url e) :style {:family :times-roman :color [66 100 162]}} (or (:issuer_url e) "-")] "\n"
[:chunk (if (number? (or (:issued_on e) (:mtime e))) (date-from-unix-time (long (* 1000 (or (:issued_on e) (:mtime e))))) (or (:issued_on e) (:mtime e)))] "\n"
(process-markdown (:content e) $id "Endorsements")]))])
[:line {:dotted true}]
[:spacer 0]
[:heading.heading-name (t :badge/IssuerInfo ul)]
[:spacer 0]
[:paragraph.generic
[:chunk.chunk (str (t :badge/IssuerDescription ul) ": ")] (:issuer_description %)]
[:spacer 0]
[:paragraph.generic
[:chunk.chunk (str (t :badge/IssuerWebsite ul) ": ")]
[:anchor {:target (:issuer_content_url %) :style {:family :times-roman :color [66 100 162]}} (:issuer_content_url %)]]
[:paragraph.generic
[:chunk.chunk (str (t :badge/IssuerContact ul) ": ")] (:issuer_contact %)]
(when-not (= "-" (and (:creator_description %) (:creator_url %) (:creator_email %)))
[:paragraph.generic
[:spacer 0]
[:paragraph
[:chunk.chunk (str (t :badge/CreatorDescription ul) ": ")] (:creator_description %) "\n"]
[:paragraph
[:chunk.chunk (str (t :badge/CreatorWebsite ul) ": ")] [:anchor {:target (:creator_url %) :style {:family :times-roman :color [66 100 162]}} (:creator_url %)]]
[:paragraph
[:chunk.chunk (str (t :badge/CreatorContact ul) ": ")] (:creator_email %)]])
(let [issuer-endorsement (replace-nils (select-issuer-endorsements {:id (:issuer_content_id %)} (u/get-db ctx)))]
(when-not (empty? issuer-endorsement)
[:paragraph.generic
[:spacer]
[:phrase {:size 12 :style :bold} (t :badge/IssuerEndorsedBy ul)]
[:spacer 0]
(into [:paragraph {:indent 0}]
(for [e issuer-endorsement]
[:paragraph {:indent 0}
(:issuer_name e) "\n"
[:anchor {:target (:issuer_url e) :style {:family :times-roman :color [66 100 162]}} (:issuer_url e)] "\n"
[:chunk (if (number? (:issued_on e)) (date-from-unix-time (long (* 1000 (:issued_on e)))) (:issued_on e))] "\n"
(process-markdown (:content e) $id "Issuer Endorsements")]))]))
[:pdf-table {:align :right :width-percent 100 :cell-border false}
nil
[[:pdf-cell [:paragraph {:align :right} [:chunk [:image {:width 60 :height 60 :base64 true} $qr_code]] "\n"
[:phrase [:chunk.link {:style :italic} (str site-url "/app/badge/info/" $id)]]]]]]
[:pagebreak]])
content (if (= lang "all") (map template $content) (map template (filter #(= (:default_language_code %) (:language_code %)) $content)))]
(reduce into [] content)))]
(fn [out]
(try
(pdf/pdf (into [pdf-settings] (badge-template badges)) out)
(catch Exception e
(log/error "PDF not generated")
(log/error (.getMessage e))
(pdf/pdf [{} [:paragraph (t :core/Errorpage ul)]] out))))))
| null | https://raw.githubusercontent.com/openbadgefactory/salava/97f05992406e4dcbe3c4bff75c04378d19606b61/src/clj/salava/badge/pdf.clj | clojure | : spacer { : extra - starting - value 1 : allow - extra - line - breaks ? true : single - value 2 }
:register-system-fonts? true})
[:spacer 0] | (ns salava.badge.pdf
(:require [yesql.core :refer [defqueries]]
[salava.core.time :refer [unix-time date-from-unix-time]]
[salava.core.i18n :refer [t]]
[salava.core.helper :refer [dump private?]]
[salava.badge.evidence :refer [badge-evidence]]
[salava.badge.main :refer [user-badges-to-export fetch-badge badge-url]]
[salava.core.util :as u :refer [get-db plugin-fun get-plugins md->html str->qr-base64]]
[clj-pdf.core :as pdf]
[clj-pdf-markdown.core :refer [markdown->clj-pdf]]
[clojure.string :refer [ends-with? blank?]]
[salava.user.db :as ud]
[ring.util.io :as io]
[clojure.tools.logging :as log]
[salava.badge.endorsement :refer [user-badge-endorsements]]
[salava.badge.pdf-helper :as pdfh]
[clojure.core.reducers :as r]))
(defqueries "sql/badge/main.sql")
(defn replace-nils [data]
(clojure.walk/postwalk
(fn [d]
(if (map? d)
(let [m (into {} (map (fn [k v] (if (blank? (str v)) {k "-"} {k v})) (keys d) (vals d)))]
(when (seq m) m))
d))
data))
(defn pdf-generator-helper [ctx user-id input]
(let [badges-for-export (user-badges-to-export ctx user-id)
filtered-badges (filter (fn [b] (some #(= % (:id b)) input)) badges-for-export)
badge-with-content (map #(-> (fetch-badge ctx (:id %))
(assoc :tags (:tags %))) filtered-badges)
badge-ids (map #(:badge_id (first (get-in % [:content]))) badge-with-content)
temp (map #(select-multi-language-badge-content {:id %} (u/get-db ctx)) badge-ids)
badges (map #(-> %1
(dissoc :content)
(assoc :qr_code (str->qr-base64 (badge-url ctx (:id %1)))
:endorsements (->> [(vec (select-badge-endorsements {:id (:badge_id %1)} (u/get-db ctx)))
(user-badge-endorsements ctx (:id %1))]
flatten) #_(->> (vec (select-badge-endorsements {:id (:badge_id %1)} (u/get-db ctx))))
:evidences (remove (fn [e] (= true (get-in e [:properties :hidden]))) (badge-evidence ctx (:id %1) user-id))
:content %2)) badge-with-content temp)]
(replace-nils badges)))
(defn- badge-pdf-helper [ctx user-id id]
(let [badge (fetch-badge ctx id)]
(replace-nils (some-> badge
(assoc :qr_code (str->qr-base64 (badge-url ctx id))
:endorsements (->> [(vec (select-badge-endorsements {:id (:badge_id badge)} (u/get-db ctx)))
(user-badge-endorsements ctx id)]
flatten)
:evidences (remove (fn [e] (= true (get-in e [:properties :hidden]))) (badge-evidence ctx id user-id))
:content (select-multi-language-badge-content {:id (:badge_id badge)} (u/get-db ctx)))))))
#_(defn process-pdf-page [stylesheet template badge ul]
(let [file (java.io.File/createTempFile "temp" ".pdf")
pdf (pdf/pdf (into [stylesheet] (template badge)) (.getAbsolutePath file))]
(if (blank? (slurp file))
(pdf/pdf [{} [:paragraph (t :core/Errorpage ul)]] (.getAbsolutePath file))
file)))
(defn process-markdown-helper [markdown id context]
(let [file (java.io.File/createTempFile "markdown" ".pdf")]
(try
(pdf/pdf [{} (pdfh/markdown->clj-pdf {:wrap {:global-wrapper :paragraph}} markdown)] (.getAbsolutePath file))
true
(catch Exception e
(log/error (str "Markdown Error in Badge id: " id " in " context))
false)
(finally (.delete file)))))
(defn process-markdown [markdown id context]
(if (== 1 (count markdown))
markdown
(if-let [p (process-markdown-helper markdown id context)]
:wrap {:global-wrapper :paragraph}} markdown)
"")))
(defn generatePDF [ctx user-id input lang]
(let [data-dir (get-in ctx [:config :core :data-dir])
site-url (get-in ctx [:config :core :site-url])
badges [(badge-pdf-helper ctx user-id input)] #_(pdf-generator-helper ctx user-id input)
user-data (ud/user-information ctx user-id)
ul (if (blank? (:language user-data)) "en" (:language user-data))
font-path (first (mapcat #(get-in ctx [:config % :font] []) (get-plugins ctx)))
font {:encoding :unicode :ttf-name (str site-url font-path)}
stylesheet {:heading-name {:color [127 113 121]
:family :times-roman
:align :center}
:generic {:family :times-roman
:color [127 113 121]
:indent 20}
:link {:family :times-roman
:color [66 100 162]}
:chunk {:size 11
:style :bold}}
badge-template (pdf/template
(let [template #(cons [:paragraph] [(if (and (not (= "-" (:image_file %))) (ends-with? (:image_file %) "png"))
[:image {:width 85 :height 85 :align :center} (str data-dir "/" (:image_file %))]
[:image {:width 85 :height 85 :align :center :base64 true} $qr_code])
[:heading.heading-name (:name %)]
[:paragraph {:indent 20 :align :center} [:spacer]]
[:paragraph.generic {:align :left :style :italic} (:description %)] [:spacer]
[:paragraph.generic
[:chunk.chunk (str (t :badge/Recipient ul) ": ")] (str $first_name " " $last_name) "\n"
[:chunk.chunk (str (t :badge/Issuedby ul) ": ")] (:issuer_content_name %) "\n"
(when-not (= "-" (:creator_name %))
[:paragraph.generic
[:chunk.chunk (str (t :badge/Createdby ul) ": ")] (:creator_name %)])
[:chunk.chunk (str (t :badge/Issuedon ul) ": ")] (if (number? $issued_on) (date-from-unix-time (long (* 1000 $issued_on)) "date") $issued_on) "\n"
[:paragraph
[:chunk.chunk (str (t :badge/Expireson ul) ": ")] (if (number? $expires_on) (date-from-unix-time (long (* 1000 $expires_on)) "date") $expires_on)]
(when-not (empty? $tags)
[:paragraph.generic
[:chunk.chunk (str (t :badge/Tags ul) ": ")] (into [:phrase] (for [t $tags] (str t " ")))])
[:paragraph
(let [alignments (replace-nils (select-alignment-content {:badge_content_id (:badge_content_id %)} (u/get-db ctx)))]
(when-not (empty? alignments)
[:paragraph.generic
[:spacer 0]
[:phrase {:size 12 :style :bold} (str (t :badge/Alignments ul) ": " (count alignments))] "\n"
(into [:paragraph] (for [a alignments]
[:paragraph
(:name a) "\n"
[:chunk {:style :italic} (:description a)] "\n"
[:chunk.link (:url a)] [:spacer 0]]))]))]
[:paragraph
[:chunk.chunk (str (t :badge/Criteria ul) ": ")] [:anchor {:target (:criteria_url %)} [:chunk.link (t :badge/Opencriteriapage ul)]] "\n"
[:spacer 0]
(process-markdown (:criteria_content %) $id "Criteria")
[:spacer 1] "\n"]]
(when-not (empty? $evidences)
[:paragraph.generic
[:spacer 0]
[:phrase {:size 12 :style :bold} (if (= 1 (count $evidences)) (t :badge/Evidence ul) (t :badge/Evidences ul))]
[:spacer]
(reduce (fn [r evidence]
(conj r [:phrase
(when (and (not (blank? (:name evidence))) (not= "-" (:name evidence))) [:phrase [:chunk (:name evidence)] "\n"])
(when (and (not (blank? (:narrative evidence))) (not= "-" (:narrative evidence))) [:phrase [:chunk (:narrative evidence)] "\n"])
(when (and (not (blank? (:description evidence))) (not= "-" (:description evidence))) [:phrase [:chunk (:description evidence)] "\n"])
[:anchor {:target (:url evidence)} [:chunk.link (str (t :badge/Openevidencepage ul) "...")]]
[:spacer 2]]))
[:list {:numbered true :indent 0}] $evidences)])
(when (seq $endorsements)
[:paragraph.generic
[:phrase {:size 12 :style :bold} (t :badge/BadgeEndorsedBy ul)] "\n"
[:spacer 0]
(into [:paragraph {:indent 0}]
(for [e $endorsements]
[:paragraph
(if (or (= "-" (:issuer_name e)) (blank? (:issuer_name e))) (str (:first_name e) " " (:last_name e)) (:issuer_name e)) "\n"
[:anchor {:target (:issuer_url e) :style {:family :times-roman :color [66 100 162]}} (or (:issuer_url e) "-")] "\n"
[:chunk (if (number? (or (:issued_on e) (:mtime e))) (date-from-unix-time (long (* 1000 (or (:issued_on e) (:mtime e))))) (or (:issued_on e) (:mtime e)))] "\n"
(process-markdown (:content e) $id "Endorsements")]))])
[:line {:dotted true}]
[:spacer 0]
[:heading.heading-name (t :badge/IssuerInfo ul)]
[:spacer 0]
[:paragraph.generic
[:chunk.chunk (str (t :badge/IssuerDescription ul) ": ")] (:issuer_description %)]
[:spacer 0]
[:paragraph.generic
[:chunk.chunk (str (t :badge/IssuerWebsite ul) ": ")]
[:anchor {:target (:issuer_content_url %) :style {:family :times-roman :color [66 100 162]}} (:issuer_content_url %)]]
[:paragraph.generic
[:chunk.chunk (str (t :badge/IssuerContact ul) ": ")] (:issuer_contact %)]
(when-not (= "-" (and (:creator_description %) (:creator_url %) (:creator_email %)))
[:paragraph.generic
[:spacer 0]
[:paragraph
[:chunk.chunk (str (t :badge/CreatorDescription ul) ": ")] (:creator_description %) "\n"]
[:paragraph
[:chunk.chunk (str (t :badge/CreatorWebsite ul) ": ")] [:anchor {:target (:creator_url %) :style {:family :times-roman :color [66 100 162]}} (:creator_url %)]]
[:paragraph
[:chunk.chunk (str (t :badge/CreatorContact ul) ": ")] (:creator_email %)]])
(let [issuer-endorsement (replace-nils (select-issuer-endorsements {:id (:issuer_content_id %)} (u/get-db ctx)))]
(when-not (empty? issuer-endorsement)
[:paragraph.generic
[:spacer]
[:phrase {:size 12 :style :bold} (t :badge/IssuerEndorsedBy ul)]
[:spacer 0]
(into [:paragraph {:indent 0}]
(for [e issuer-endorsement]
[:paragraph {:indent 0}
(:issuer_name e) "\n"
[:anchor {:target (:issuer_url e) :style {:family :times-roman :color [66 100 162]}} (:issuer_url e)] "\n"
[:chunk (if (number? (:issued_on e)) (date-from-unix-time (long (* 1000 (:issued_on e)))) (:issued_on e))] "\n"
(process-markdown (:content e) $id "Issuer Endorsements")]))]))
[:pdf-table {:align :right :width-percent 100 :cell-border false}
nil
[[:pdf-cell [:paragraph {:align :right} [:chunk [:image {:width 60 :height 60 :base64 true} $qr_code]] "\n"
[:phrase [:chunk.link {:style :italic} (str site-url "/app/badge/info/" $id)]]]]]]
[:pagebreak]])
content (if (= lang "all") (map template $content) (map template (filter #(= (:default_language_code %) (:language_code %)) $content)))]
(reduce into [] content)))]
(fn [out]
(try
(pdf/pdf (into [pdf-settings] (badge-template badges)) out)
(catch Exception e
(log/error "PDF not generated")
(log/error (.getMessage e))
(pdf/pdf [{} [:paragraph (t :core/Errorpage ul)]] out))))))
|
7c9a98910d00aa0c4838815318c275bf4ec109c7817a968dcc6c929476c2aab2 | coq/coq | vcs.mli | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
This module builds a VCS like interface on top of Dag , used to build
a Dag instance by the State Transaction Machine .
This data structure does not hold system states :
- Edges are meant to hold a diff .
The delta between two states , or equivalent data like a vernac_expr whose
execution corresponds to the application of the diff .
- Nodes are empty , unless one adds explicit info .
The info could be a system state , obtaind by applying all the diffs
from the initial state , but is not necessarily there .
As a consequence , " checkout " just updates the current branch .
The type [ i d ] is the type of commits ( a node in the dag )
The type [ Vcs.t ] has 4 parameters :
[ ' info ] data attached to a node ( like a system state )
[ ' diff ] data attached to an edge ( the commit content , a " patch " )
[ ' kind ] extra data attached to a branch ( like being the master branch )
[ ' cdata ] extra data hold by dag properties
a Dag instance by the State Transaction Machine.
This data structure does not hold system states:
- Edges are meant to hold a diff.
The delta between two states, or equivalent data like a vernac_expr whose
execution corresponds to the application of the diff.
- Nodes are empty, unless one adds explicit info.
The info could be a system state, obtaind by applying all the diffs
from the initial state, but is not necessarily there.
As a consequence, "checkout" just updates the current branch.
The type [id] is the type of commits (a node in the dag)
The type [Vcs.t] has 4 parameters:
['info] data attached to a node (like a system state)
['diff] data attached to an edge (the commit content, a "patch")
['kind] extra data attached to a branch (like being the master branch)
['cdata] extra data hold by dag properties
*)
module type Kind =
sig
type 'a t
val master : 'a t
end
module type S = sig
module Branch :
sig
type t
val make : string -> t
val equal : t -> t -> bool
val compare : t -> t -> int
val to_string : t -> string
val master : t
end
type id
type 'a kind_gen
type kind = Branch.t kind_gen
type branch_info = {
kind : kind;
root : id;
pos : id;
}
type ('diff,'info,'property_data) t
val empty : id -> ('diff,'info,'property_data) t
val current_branch : ('e,'i,'c) t -> Branch.t
val branches : ('e,'i,'c) t -> Branch.t list
val get_branch : ('e,'i,'c) t -> Branch.t -> branch_info
val reset_branch : ('e,'i,'c) t -> Branch.t -> id -> ('e,'i,'c) t
val branch :
('e,'i,'c) t -> ?root:id -> ?pos:id ->
Branch.t -> kind -> ('e,'i,'c) t
val delete_branch : ('e,'i,'c) t -> Branch.t -> ('e,'i,'c) t
val merge :
('diff,'i,'c) t -> id -> ours:'diff -> theirs:'diff -> ?into:Branch.t ->
Branch.t -> ('diff,'i,'c) t
val commit : ('diff,'i,'c) t -> id -> 'diff -> ('diff,'i,'c) t
val rewrite_merge :
('diff,'i,'c) t -> id -> ours:'diff -> theirs:'diff -> at:id ->
Branch.t -> ('diff,'i,'c) t
val checkout : ('e,'i,'c) t -> Branch.t -> ('e,'i,'c) t
val set_info : ('e,'info,'c) t -> id -> 'info -> ('e,'info,'c) t
val get_info : ('e,'info,'c) t -> id -> 'info option
Read only dag
module Dag : Dag.S with type node = id
val dag : ('diff,'info,'cdata) t -> ('diff,'info,'cdata) Dag.t
Properties are not a concept typical of a VCS , but a useful metadata
* of a DAG ( or graph ) .
* of a DAG (or graph). *)
val create_property : ('e,'i,'c) t -> id list -> 'c -> ('e,'i,'c) t
val property_of : ('e,'i,'c) t -> id -> 'c Dag.Property.t list
val delete_property : ('e,'i,'c) t -> 'c Dag.Property.t -> ('e,'i,'c) t
(* Removes all unreachable nodes and returns them *)
val gc : ('e,'info,'c) t -> ('e,'info,'c) t * Dag.NodeSet.t
val reachable : ('e,'info,'c) t -> id -> Dag.NodeSet.t
end
module Make(OT : Map.OrderedType)(K : Kind) : S
with type id = OT.t
and type Dag.node = OT.t
and type 'a kind_gen = 'a K.t
and type Dag.NodeSet.t = Set.Make(OT).t
and type Dag.NodeSet.elt = OT.t
| null | https://raw.githubusercontent.com/coq/coq/92b9d3ca9928e8332ac81175272e8e4489961d71/stm/vcs.mli | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
Removes all unreachable nodes and returns them | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
This module builds a VCS like interface on top of Dag , used to build
a Dag instance by the State Transaction Machine .
This data structure does not hold system states :
- Edges are meant to hold a diff .
The delta between two states , or equivalent data like a vernac_expr whose
execution corresponds to the application of the diff .
- Nodes are empty , unless one adds explicit info .
The info could be a system state , obtaind by applying all the diffs
from the initial state , but is not necessarily there .
As a consequence , " checkout " just updates the current branch .
The type [ i d ] is the type of commits ( a node in the dag )
The type [ Vcs.t ] has 4 parameters :
[ ' info ] data attached to a node ( like a system state )
[ ' diff ] data attached to an edge ( the commit content , a " patch " )
[ ' kind ] extra data attached to a branch ( like being the master branch )
[ ' cdata ] extra data hold by dag properties
a Dag instance by the State Transaction Machine.
This data structure does not hold system states:
- Edges are meant to hold a diff.
The delta between two states, or equivalent data like a vernac_expr whose
execution corresponds to the application of the diff.
- Nodes are empty, unless one adds explicit info.
The info could be a system state, obtaind by applying all the diffs
from the initial state, but is not necessarily there.
As a consequence, "checkout" just updates the current branch.
The type [id] is the type of commits (a node in the dag)
The type [Vcs.t] has 4 parameters:
['info] data attached to a node (like a system state)
['diff] data attached to an edge (the commit content, a "patch")
['kind] extra data attached to a branch (like being the master branch)
['cdata] extra data hold by dag properties
*)
module type Kind =
sig
type 'a t
val master : 'a t
end
module type S = sig
module Branch :
sig
type t
val make : string -> t
val equal : t -> t -> bool
val compare : t -> t -> int
val to_string : t -> string
val master : t
end
type id
type 'a kind_gen
type kind = Branch.t kind_gen
type branch_info = {
kind : kind;
root : id;
pos : id;
}
type ('diff,'info,'property_data) t
val empty : id -> ('diff,'info,'property_data) t
val current_branch : ('e,'i,'c) t -> Branch.t
val branches : ('e,'i,'c) t -> Branch.t list
val get_branch : ('e,'i,'c) t -> Branch.t -> branch_info
val reset_branch : ('e,'i,'c) t -> Branch.t -> id -> ('e,'i,'c) t
val branch :
('e,'i,'c) t -> ?root:id -> ?pos:id ->
Branch.t -> kind -> ('e,'i,'c) t
val delete_branch : ('e,'i,'c) t -> Branch.t -> ('e,'i,'c) t
val merge :
('diff,'i,'c) t -> id -> ours:'diff -> theirs:'diff -> ?into:Branch.t ->
Branch.t -> ('diff,'i,'c) t
val commit : ('diff,'i,'c) t -> id -> 'diff -> ('diff,'i,'c) t
val rewrite_merge :
('diff,'i,'c) t -> id -> ours:'diff -> theirs:'diff -> at:id ->
Branch.t -> ('diff,'i,'c) t
val checkout : ('e,'i,'c) t -> Branch.t -> ('e,'i,'c) t
val set_info : ('e,'info,'c) t -> id -> 'info -> ('e,'info,'c) t
val get_info : ('e,'info,'c) t -> id -> 'info option
Read only dag
module Dag : Dag.S with type node = id
val dag : ('diff,'info,'cdata) t -> ('diff,'info,'cdata) Dag.t
Properties are not a concept typical of a VCS , but a useful metadata
* of a DAG ( or graph ) .
* of a DAG (or graph). *)
val create_property : ('e,'i,'c) t -> id list -> 'c -> ('e,'i,'c) t
val property_of : ('e,'i,'c) t -> id -> 'c Dag.Property.t list
val delete_property : ('e,'i,'c) t -> 'c Dag.Property.t -> ('e,'i,'c) t
val gc : ('e,'info,'c) t -> ('e,'info,'c) t * Dag.NodeSet.t
val reachable : ('e,'info,'c) t -> id -> Dag.NodeSet.t
end
module Make(OT : Map.OrderedType)(K : Kind) : S
with type id = OT.t
and type Dag.node = OT.t
and type 'a kind_gen = 'a K.t
and type Dag.NodeSet.t = Set.Make(OT).t
and type Dag.NodeSet.elt = OT.t
|
87cde6ead0a6d87046a8b9f73b9b8bd1ec5e8071cec98a06ffd23f3b65187065 | cyborgize/es-cli | es.ml | open Devkit
open ExtLib
open Printf
module J = Yojson.Safe
module SS = Set.Make(String)
let log = Log.from "es"
let http_timeout = ref (Time.seconds 60)
type common_args = {
es_version : Config_t.version option;
verbose : bool;
}
let args =
ExtArg.[
"-T", String (fun t -> http_timeout := Time.of_compact_duration t), " set HTTP request timeout (format: 45s, 2m, or 1m30s)";
]
let json_content_type = "application/json"
let ndjson_content_type = "application/x-ndjson"
type content_type =
| JSON of string
| NDJSON of string
let json_body_opt = function
| Some JSON body -> Some (`Raw (json_content_type, body))
| Some NDJSON body -> Some (`Raw (ndjson_content_type, body))
| None -> None
let make_url host path args =
let args =
List.filter_map begin function
| name, Some Some value -> Some (String.concat "=" Web.[ urlencode name; urlencode value; ])
| name, Some None -> Some (Web.urlencode name)
| _name, None -> None
end args
in
let args = match args with [] -> [] | _ -> [ String.concat "&" args; ] in
let path = String.concat "/" ("" :: List.filter_map id path) in
let path = String.concat "?" (path :: args) in
String.concat "" [ host; path; ]
let request ?verbose ?body action host path args =
Web.http_request_lwt' ?verbose ~timeout:(Time.to_sec !http_timeout) ?body:(json_body_opt body) action (make_url host path args)
let request ?verbose ?body action host path args unformat =
match%lwt request ?verbose ?body action host path args with
| `Error code -> Exn_lwt.fail "(%d) %s" (Curl.errno code) (Curl.strerror code)
| `Ok (code, result) ->
let is_error_response result = Elastic_j.((response''_of_string result).error) <> None in
let is_severe_error code result = code / 100 <> 2 && (code <> 404 || is_error_response result) in
match is_severe_error code result with
| exception exn -> Exn_lwt.fail ~exn "http %d : %s" code result
| true -> Lwt.return_error result
| false ->
match unformat result with
| exception exn -> Exn_lwt.fail ~exn "unformat %s" result
| docs -> Lwt.return_ok docs
exception ErrorExit
let fail_lwt fmt =
ksprintf begin fun s ->
let%lwt () = Lwt_io.eprintl s in
Lwt.fail ErrorExit
end fmt
let default_doc_type = "_doc"
type 't json_reader = J.lexer_state -> Lexing.lexbuf -> 't
type 't json_writer = Bi_outbuf.t -> 't -> unit
type es_version_config = {
read_total : Elastic_t.total json_reader;
write_total : Elastic_t.total json_writer;
default_get_doc_type : string;
default_put_doc_type : string option;
}
let es6_config = {
read_total = Elastic_j.read_es6_total;
write_total = Elastic_j.write_es6_total;
default_get_doc_type = "_all";
default_put_doc_type = None;
}
let es7_config = {
read_total = Elastic_j.read_total;
write_total = Elastic_j.write_total;
default_get_doc_type = default_doc_type;
default_put_doc_type = Some default_doc_type;
}
let rec coalesce = function Some _ as hd :: _ -> hd | None :: tl -> coalesce tl | [] -> None
let get_es_version { verbose; _ } host =
match%lwt request ~verbose `GET host [] [] Elastic_j.main_of_string with
| Error error -> fail_lwt "could not get ES version:\n%s" error
| Ok { Elastic_t.version = { number; }; } ->
match Stre.nsplitc number '.' with
| [] -> Exn_lwt.fail "empty ES version number"
| "5" :: _ -> Lwt.return `ES5
| "6" :: _ -> Lwt.return `ES6
| "7" :: _ -> Lwt.return `ES7
| "8" :: _ -> Lwt.return `ES8
| other :: _ ->
match int_of_string other with
| exception exn -> Exn_lwt.fail ~exn "invalid ES version number : %s" number
| _ -> Exn_lwt.fail "unsupported ES version number : %s" number
let get_es_version_config' = function
| `ES5 | `ES6 -> es6_config
| `ES7 | `ES8 -> es7_config
let get_es_version_config common_args host es_version { Config_t.version = config_version; _ } cluster_version =
let version = coalesce [ es_version; cluster_version; config_version; ] in
let%lwt version =
match version with
| Some (#Wrap.Version.exact as version) -> Lwt.return version
| None | Some `Auto -> get_es_version common_args host
in
Lwt.return (get_es_version_config' version)
let get_body_query_file body_query =
match body_query <> "" && body_query.[0] = '@' with
| true -> Control.with_input_txt (String.slice ~first:1 body_query) IO.read_all
| false -> body_query
let usage tools =
fprintf stderr "Usage: %s {<tool>|-help|version}\n" Sys.executable_name;
fprintf stderr "where <tool> is one of:\n";
List.sort ~cmp:compare tools |>
List.iter (fun (s,_) -> fprintf stderr " %s\n" s)
let str_list =
ExtArg.make_arg @@ object
method store v = Arg.String (tuck v)
method kind = "string"
method show v = match !v with [] -> "none" | l -> String.concat "," l
end
let csv ?(sep=",") = function [] -> None | l -> Some (String.concat sep l)
let int = Option.map string_of_int
let one = function [] -> None | [x] -> Some x | _ -> assert false
let flag ?(default=false) = function x when x = default -> None | true -> Some "true" | false -> Some "false"
type hit_format = [
| `FullID
| `ID
| `Type
| `Index
| `Routing
| `Hit
| `Source
]
type hit_formatter = J.t Elastic_t.option_hit -> string
let hit_format_of_string = function
| "full_id" -> `FullID
| "id" -> `ID
| "type" -> `Type
| "index" -> `Index
| "routing" -> `Routing
| "hit" -> `Hit
| "source" -> `Source
| s -> Exn.fail "unknown hit field \"%s\"" s
let string_of_hit_format = function
| `FullID -> "full_id"
| `ID -> "id"
| `Type -> "type"
| `Index -> "index"
| `Routing -> "routing"
| `Hit -> "hit"
| `Source -> "source"
let map_of_hit_format =
let open Elastic_t in function
| `FullID -> (fun ({ index; doc_type; id; _ } : 'a Elastic_t.option_hit) ->
sprintf "/%s/%s/%s" index (Option.default default_doc_type doc_type) id)
| `ID -> (fun hit -> hit.id)
| `Type -> (fun hit -> Option.default default_doc_type hit.doc_type)
| `Index -> (fun hit -> hit.index)
| `Routing -> (fun hit -> Option.default "" hit.routing)
| `Hit -> (fun hit -> Elastic_j.string_of_option_hit J.write_json hit)
| `Source -> (fun { source; _ } -> Option.map_default J.to_string "" source)
type index_shard_format = [
| `Index
| `Shard
| `Time
| `Type
| `Stage
| `SourceHost
| `SourceNode
| `TargetHost
| `TargetNode
| `Repository
| `Snapshot
| `Files
| `FilesRecovered
| `FilesPercent
| `FilesTotal
| `Bytes
| `BytesRecovered
| `BytesPercent
| `BytesTotal
| `TranslogOps
| `TranslogOpsRecovered
| `TranslogOpsPercent
]
let index_shard_format_of_string = function
| "index" -> `Index
| "shard" -> `Shard
| "time" -> `Time
| "type" -> `Type
| "stage" -> `Stage
| "source_host" -> `SourceHost
| "source_node" -> `SourceNode
| "target_host" -> `TargetHost
| "target_node" -> `TargetNode
| "repository" -> `Repository
| "snapshot" -> `Snapshot
| "files" -> `Files
| "files_recovered" -> `FilesRecovered
| "files_percent" -> `FilesPercent
| "files_total" -> `FilesTotal
| "bytes" -> `Bytes
| "bytes_recovered" -> `BytesRecovered
| "bytes_percent" -> `BytesPercent
| "bytes_total" -> `BytesTotal
| "translog_ops" -> `TranslogOps
| "translog_ops_recovered" -> `TranslogOpsRecovered
| "translog_ops_percent" -> `TranslogOpsPercent
| s -> Exn.fail "unknown index shard field \"%s\"" s
let string_of_index_shard_format = function
| `Index -> "index"
| `Shard -> "shard"
| `Time -> "time"
| `Type -> "type"
| `Stage -> "stage"
| `SourceHost -> "source_host"
| `SourceNode -> "source_node"
| `TargetHost -> "target_host"
| `TargetNode -> "target_node"
| `Repository -> "repository"
| `Snapshot -> "snapshot"
| `Files -> "files"
| `FilesRecovered -> "files_recovered"
| `FilesPercent -> "files_percent"
| `FilesTotal -> "files_total"
| `Bytes -> "bytes"
| `BytesRecovered -> "bytes_recovered"
| `BytesPercent -> "bytes_percent"
| `BytesTotal -> "bytes_total"
| `TranslogOps -> "translog_ops"
| `TranslogOpsRecovered -> "translog_ops_recovered"
| `TranslogOpsPercent -> "translog_ops_percent"
let map_of_index_shard_format =
let open Elastic_t in function
| `Index -> (fun index (_shard : index_shard) -> `String index)
| `Shard -> (fun _index shard -> `Int shard.id)
| `Time -> (fun _index shard -> `Duration (Time.msec shard.index.total_time_in_millis))
| `Type -> (fun _index shard -> `Symbol shard.kind)
| `Stage -> (fun _index shard -> `Symbol shard.stage)
| `SourceHost -> (fun _index shard -> match shard.source.host with Some host -> `String host | None -> `None)
| `SourceNode -> (fun _index shard -> match shard.source.name with Some name -> `String name | None -> `None)
| `TargetHost -> (fun _index shard -> match shard.target.host with Some host -> `String host | None -> `None)
| `TargetNode -> (fun _index shard -> match shard.target.name with Some name -> `String name | None -> `None)
| `Repository -> (fun _index _shard -> `None) (* FIXME what is repository? *)
| `Snapshot -> (fun _index _shard -> `None) (* FIXME what is snapshot? *)
| `Files -> (fun _index shard -> `Int shard.index.files.total) (* FIXME what's the difference w/ files_total? *)
| `FilesRecovered -> (fun _index shard -> `Int shard.index.files.recovered)
| `FilesPercent -> (fun _index shard -> `String shard.index.files.percent)
| `FilesTotal -> (fun _index shard -> `Int shard.index.files.total)
| `Bytes -> (fun _index shard -> `Int shard.index.size.total_in_bytes) (* FIXME what's the difference w/ bytes_total? *)
| `BytesRecovered -> (fun _index shard -> `Int shard.index.size.recovered_in_bytes)
| `BytesPercent -> (fun _index shard -> `String shard.index.size.percent)
| `BytesTotal -> (fun _index shard -> `Int shard.index.size.total_in_bytes)
| `TranslogOps -> (fun _index shard -> `Int shard.translog.total)
| `TranslogOpsRecovered -> (fun _index shard -> `Int shard.translog.recovered)
| `TranslogOpsPercent -> (fun _index shard -> `String shard.translog.percent)
let default_index_shard_format = [
`Index; `Shard; `Time; `Type; `Stage;
`SourceHost; `SourceNode; `TargetHost; `TargetNode;
`Repository; `Snapshot;
`Files; `FilesRecovered; `FilesPercent; `FilesTotal;
`Bytes; `BytesRecovered; `BytesPercent; `BytesTotal;
`TranslogOps; `TranslogOpsRecovered; `TranslogOpsPercent;
]
let map_show = function
| `String x | `Symbol x -> x
| `Int x -> string_of_int x
| `Float x -> string_of_float x
| `Duration x -> Time.compact_duration x
| `None -> "n/a"
let compare_fmt = function
| `String x -> String.equal x
| `Symbol x -> String.equal (String.lowercase_ascii x) $ String.lowercase_ascii
| `Int x -> Factor.Int.equal x $ int_of_string
| `Float x -> Factor.Float.equal x $ float_of_string
FIXME parse time ?
| `None -> (fun _ -> false)
let split doc_id = Stre.nsplitc doc_id '/'
let join doc_id = String.concat "/" doc_id
let is_pure_id' doc_id =
match doc_id with
| [ _doc_id; ] | [ ""; _doc_id; ] -> true
| _ -> false
let is_pure_id doc_id = is_pure_id' (split doc_id)
let map_index_doc_id' doc_type doc_id =
match doc_id with
| [ doc_id; ] | [ ""; doc_id; ] -> Exn_lwt.fail "document id missing index name : /%s" doc_id
| [ index; doc_id; ] | [ ""; index; doc_id; ] -> Lwt.return (index, doc_type, doc_id)
| [ index; doc_type; doc_id; ] | [ ""; index; doc_type; doc_id; ] -> Lwt.return (index, Some doc_type, doc_id)
| _ -> Exn_lwt.fail "invalid document id : %s" (join doc_id)
let map_index_doc_id doc_type doc_id = map_index_doc_id' doc_type (split doc_id)
let map_doc_id' index doc_type doc_id =
match doc_id with
| [ doc_id; ] | [ ""; doc_id; ] -> Lwt.return (index, doc_type, doc_id)
| [ doc_type; doc_id; ] | [ ""; doc_type; doc_id; ] -> Lwt.return (index, Some doc_type, doc_id)
| _ -> Exn_lwt.fail "invalid document id : /%s/%s" index (join doc_id)
let map_doc_id index doc_type doc_id = map_doc_id' index doc_type (split doc_id)
let map_doc_id_opt' index doc_type doc_id =
let%lwt (index, doc_type, doc_id) = map_doc_id' index doc_type doc_id in
Lwt.return (index, doc_type, Some doc_id)
let map_doc_id_opt index doc_type doc_id = map_doc_id_opt' index doc_type (split doc_id)
let map_typed_doc_id' index doc_type doc_id =
match doc_id with
| [ doc_id; ] | [ ""; doc_id; ] -> Lwt.return (index, Some doc_type, doc_id)
| _ -> Exn_lwt.fail "invalid document id : /%s/%s/%s" index doc_type (join doc_id)
let map_typed_doc_id index doc_type doc_id = map_typed_doc_id' index doc_type (split doc_id)
let map_typed_doc_id_opt' index doc_type doc_id =
let%lwt (index, doc_type, doc_id) = map_typed_doc_id' index doc_type doc_id in
Lwt.return (index, doc_type, Some doc_id)
let map_typed_doc_id_opt index doc_type doc_id = map_typed_doc_id_opt' index doc_type (split doc_id)
let map_index_mode index =
match Stre.nsplitc index '/' with
| [ index; ] | [ ""; index; ] -> `Index index
| [ index; doc_type_or_id; ] | [ ""; index; doc_type_or_id; ] -> `IndexOrID (index, doc_type_or_id)
| [ index; doc_type; doc_id; ] | [ ""; index; doc_type; doc_id; ] -> `ID (index, doc_type, doc_id)
| _ -> Exn.fail "invalid index name or document id : %s" index
let map_ids ~default_get_doc_type index doc_type doc_ids =
let multiple (first_index, first_doc_type, _doc_id as first_doc) other_doc_ids =
let first_doc_type = Option.default default_get_doc_type first_doc_type in
let merge_equal x y = match x with Some x' when String.equal x' y -> x | _ -> None in
let (docs, common_index, common_doc_type) =
List.fold_left begin fun (docs, common_index, common_doc_type) (index, doc_type, _doc_id as doc) ->
let common_index = merge_equal common_index index in
let common_doc_type = merge_equal common_doc_type (Option.default default_get_doc_type doc_type) in
doc :: docs, common_index, common_doc_type
end ([ first_doc; ], Some first_index, Some first_doc_type) other_doc_ids
in
let (docs, index, doc_type) =
match common_index, common_doc_type with
| Some _, Some _ ->
let docs = List.map (fun (_index, _doc_type, doc_id) -> None, None, doc_id) docs in
docs, common_index, common_doc_type
| Some _, None ->
let docs = List.map (fun (_index, doc_type, doc_id) -> None, doc_type, doc_id) docs in
docs, common_index, None
| None, _ ->
let docs = List.map (fun (index, doc_type, doc_id) -> Some index, doc_type, doc_id) docs in
docs, None, None
in
Lwt.return (`Multi (docs, index, doc_type))
in
let%lwt mode = Lwt.wrap1 map_index_mode index in
let doc_ids = List.map split doc_ids in
match mode, doc_ids with
| `Index _, [] ->
let%lwt () = Lwt_io.eprintl "only INDEX is provided and no DOC_ID" in
Lwt.return `None
| `Index index, [ doc_id; ] ->
let%lwt (index, doc_type, doc_id) = map_doc_id' index doc_type doc_id in
let doc_type = Option.default default_get_doc_type doc_type in
Lwt.return (`Single (Some index, Some doc_type, Some doc_id))
| `Index index, doc_id :: doc_ids ->
let%lwt doc_id = map_doc_id' index doc_type doc_id in
let%lwt doc_ids = Lwt_list.map_s (map_doc_id' index doc_type) doc_ids in
multiple doc_id doc_ids
| `IndexOrID (index, doc_id), [] ->
let doc_type = Option.default default_get_doc_type doc_type in
Lwt.return (`Single (Some index, Some doc_type, Some doc_id))
| `IndexOrID (index, doc_type), doc_id :: doc_ids when List.for_all is_pure_id' (doc_id :: doc_ids) ->
begin match doc_ids with
| [] ->
let%lwt (index, doc_type, doc_id) = map_typed_doc_id' index doc_type doc_id in
Lwt.return (`Single (Some index, doc_type, Some doc_id))
| _ ->
let%lwt doc_id = map_typed_doc_id' index doc_type doc_id in
let%lwt doc_ids = Lwt_list.map_s (map_typed_doc_id' index doc_type) doc_ids in
multiple doc_id doc_ids
end
| `IndexOrID (index, doc_id), doc_ids ->
let%lwt doc_ids = Lwt_list.map_s (map_index_doc_id' doc_type) doc_ids in
multiple (index, doc_type, doc_id) doc_ids
| `ID (index, doc_type, doc_id), [] ->
Lwt.return (`Single (Some index, Some doc_type, Some doc_id))
| `ID (index, doc_type', doc_id), doc_ids ->
let%lwt doc_ids = Lwt_list.map_s (map_index_doc_id' doc_type) doc_ids in
multiple (index, Some doc_type', doc_id) doc_ids
module Common_args = struct
open Cmdliner
let host = Arg.(required & pos 0 (some string) None & info [] ~docv:"HOST" ~doc:"host")
let index = Arg.(required & pos 1 (some string) None & info [] ~docv:"INDEX" ~doc:"index")
let doc_type = Arg.(value & opt (some string) None & info [ "T"; "doctype"; ] ~docv:"DOC_TYPE" ~doc:"document type")
let doc_id = Arg.(pos 2 (some string) None & info [] ~docv:"DOC_ID" ~doc:"document id")
let doc_ids =
let doc = "document ids" in
Arg.(value & pos_right 1 string [] & info [] ~docv:"DOC_ID1[ DOC_ID2[ DOC_ID3...]]" ~doc)
let timeout = Arg.(value & opt (some string) None & info [ "t"; "timeout"; ] ~doc:"timeout")
let source_includes = Arg.(value & opt_all string [] & info [ "i"; "source-includes"; ] ~doc:"source_includes")
let source_excludes = Arg.(value & opt_all string [] & info [ "e"; "source-excludes"; ] ~doc:"source_excludes")
let routing = Arg.(value & opt (some string) None & info [ "r"; "routing"; ] ~doc:"routing")
let preference = Arg.(value & opt_all string [] & info [ "p"; "preference"; ] ~doc:"preference")
let sort = Arg.(value & opt_all string [] & info [ "s"; "sort"; ] ~doc:"sort")
let format =
let parse format =
match hit_format_of_string format with
| exception Failure msg -> Error (`Msg msg)
| format -> Ok format
in
let print fmt format =
Format.fprintf fmt "%s" (string_of_hit_format format)
in
Arg.(list (conv (parse, print)))
let format = Arg.(value & opt_all format [] & info [ "f"; "format"; ] ~doc:"map hits according to specified format (hit|id|source)")
type expand_wildcards =
| All
| Open
| Closed
| Hidden
| None_
let string_of_expand_wildcards = function
| All -> "all"
| Open -> "open"
| Closed -> "closed"
| Hidden -> "hidden"
| None_ -> "none"
let expand_wildcards =
let conv_expand_wildcards =
let parse = function
| "all" -> Ok All
| "open" -> Ok Open
| "closed" -> Ok Closed
| "hidden" -> Ok Hidden
| "none" -> Ok None_
| x -> Error (`Msg x)
in
Arg.conv (parse, (fun fmt x -> Format.fprintf fmt "%s" (string_of_expand_wildcards x)))
in
Arg.(value & opt (some conv_expand_wildcards) None & info [ "w"; "expand-wildcards"; ] ~doc:"expand_wildcards")
Common_args
type alias_action = {
action : [ `Add | `Remove ];
index : string;
alias : string;
}
type alias_args = {
host : string;
actions : alias_action list;
}
let alias { verbose; _ } {
host;
actions;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
let (action, body) =
match actions with
| [] -> `GET, None
| actions ->
let actions = List.map (fun { action; index; alias; } -> [ action, { Elastic_t.index; alias; }; ]) actions in
`POST, Some (JSON (Elastic_j.string_of_aliases { Elastic_t.actions; }) : content_type)
in
Lwt_main.run @@
match%lwt request ~verbose ?body action host [ Some "_aliases"; ] [] id with
| Error error -> fail_lwt "alias error:\n%s" error
| Ok result -> Lwt_io.printl result
type cat_format =
| Text
| JSON
| Smile
| YAML
| CBOR
let string_of_cat_format = function
| Text -> "text"
| JSON -> "json"
| Smile -> "smile"
| YAML -> "yaml"
| CBOR -> "cbor"
type cat_args = {
host : string;
query : string list;
help : bool;
headers : bool;
columns : string list;
sort : string list;
format : cat_format option;
time_units : string option;
size_units : string option;
byte_units : string option;
expand_wildcards : Common_args.expand_wildcards option;
args : (string * string option) list;
}
let cat ({ verbose; _ } as _common_args) {
host;
query;
help;
headers;
columns;
sort;
format;
time_units;
size_units;
byte_units;
expand_wildcards;
args;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
let flag name ?value x l = if x then (name, Some value) :: l else l in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"h", csv columns;
"s", csv sort;
"time", time_units;
"size", size_units;
"bytes", byte_units;
"format", Option.map string_of_cat_format format;
"expand_wildcards", Option.map Common_args.string_of_expand_wildcards expand_wildcards;
] @
flag "help" help @@
flag "v" headers @@
List.map (fun (k, v) -> k, Some v) args
in
Lwt_main.run @@
match%lwt request ~verbose `GET host (Some "_cat" :: List.map some query) args id with
| Error error -> fail_lwt "cat error:\n%s" error
| Ok result -> Lwt_io.print result
type count_args = {
host : string;
index : string;
doc_type : string option;
timeout : string option;
routing : string option;
preference : string list;
query : string option;
body_query : string option;
analyzer : string option;
analyze_wildcard : bool;
default_field : string option;
default_operator : string option;
retry : bool;
}
let count ({ verbose; _ } as _common_args) {
host;
index;
doc_type;
timeout;
routing;
preference;
query;
body_query;
analyzer;
analyze_wildcard;
default_field;
default_operator;
retry = _;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
Lwt_main.run @@
let body_query = Option.map get_body_query_file body_query in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"timeout", timeout;
"routing", routing;
"preference", csv ~sep:"|" preference;
"analyzer", analyzer;
"analyze_wildcard", flag analyze_wildcard;
"df", default_field;
"default_operator", default_operator;
"q", query;
]
in
let body_query = match body_query with Some query -> Some (JSON query : content_type) | None -> None in
let count () =
match%lwt request ~verbose ?body:body_query `POST host [ Some index; doc_type; Some "_count"; ] args id with
| Error error -> fail_lwt "count error:\n%s" error
| Ok result ->
let { Elastic_t.count; shards = { Elastic_t.failed = _; _ }; } = Elastic_j.count_of_string result in
Lwt_io.printlf "%d" count
TODO check failed > 0 & & retry
in
count ()
type delete_args = {
host : string;
index : string;
doc_type : string option;
doc_ids : string list;
timeout : string option;
routing : string option;
}
let delete ({ verbose; es_version; _ } as common_args) {
host;
index;
doc_type;
doc_ids;
timeout;
routing;
} =
let config = Common.load_config () in
let { Common.host; version; _ } = Common.get_cluster config host in
Lwt_main.run @@
let%lwt ({ default_get_doc_type; _ }) =
get_es_version_config common_args host es_version config version
in
match%lwt map_ids ~default_get_doc_type index doc_type doc_ids with
| `None -> Lwt.return_unit
| `Single _ | `Multi _ as mode ->
let (action, body, path) =
match mode with
| `Single (index, doc_type, doc_id) -> `DELETE, None, [ index; doc_type; doc_id; ]
| `Multi (docs, index, doc_type) ->
let body =
List.fold_left begin fun acc (index, doc_type, doc_id) ->
let delete = { Elastic_t.index; doc_type; id = doc_id; routing = None; } in
let bulk = { Elastic_t.index = None; create = None; update = None; delete = Some delete; } in
"\n" :: Elastic_j.string_of_bulk bulk :: acc
end [] docs |>
List.rev |>
String.concat ""
in
`POST, Some (NDJSON body), [ index; doc_type; Some "_bulk"; ]
in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"timeout", timeout;
"routing", routing;
]
in
match%lwt request ~verbose ?body action host path args id with
| Error response -> Lwt_io.eprintl response
| Ok response -> Lwt_io.printl response
type flush_args = {
host : string;
indices : string list;
force : bool;
synced : bool;
wait : bool;
}
let flush { verbose; _ } {
host;
indices;
force;
synced;
wait;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
let bool' v = function true -> Some v | false -> None in
let bool = bool' "true" in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"force", bool force;
"wait_if_ongoing", bool wait;
]
in
let path = [ csv indices; Some "_flush"; bool' "synced" synced; ] in
Lwt_main.run @@
match%lwt request ~verbose `POST host path args id with
| Error error -> fail_lwt "flush error:\n%s" error
| Ok result -> Lwt_io.printl result
type get_args = {
host : string;
index : string;
doc_type : string option;
doc_ids : string list;
timeout : string option;
source_includes : string list;
source_excludes : string list;
routing : string option;
preference : string list;
format : hit_format list list;
}
let get ({ verbose; es_version; _ } as common_args) {
host;
index;
doc_type;
doc_ids;
timeout;
source_includes;
source_excludes;
routing;
preference;
format;
} =
let config = Common.load_config () in
let { Common.host; version; _ } = Common.get_cluster config host in
Lwt_main.run @@
let%lwt ({ default_get_doc_type; _ }) =
get_es_version_config common_args host es_version config version
in
match%lwt map_ids ~default_get_doc_type index doc_type doc_ids with
| `None -> Lwt.return_unit
| `Single _ | `Multi _ as mode ->
let (body, path, unformat) =
match mode with
| `Single (index, doc_type, doc_id) ->
let path = [ index; doc_type; doc_id; ] in
let unformat x = [ Elastic_j.option_hit_of_string J.read_json x; ] in
None, path, unformat
| `Multi (docs, index, doc_type) ->
let (docs, ids) =
match index, doc_type with
| Some _, Some _ ->
let ids = List.map (fun (_index, _doc_type, doc_id) -> doc_id) docs in
[], ids
| _ ->
let docs =
List.map begin fun (index, doc_type, id) ->
{ Elastic_t.index; doc_type; id; routing = None; source = None; stored_fields = None; }
end docs
in
docs, []
in
let path = [ index; doc_type; Some "_mget"; ] in
let unformat x =
let { Elastic_t.docs; } = Elastic_j.docs_of_string (Elastic_j.read_option_hit J.read_json) x in
docs
in
Some (JSON (Elastic_j.string_of_multiget { docs; ids; }) : content_type), path, unformat
in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"timeout", timeout;
(if source_excludes = [] then "_source" else "_source_includes"), csv source_includes;
"_source_excludes", csv source_excludes;
"routing", routing;
"preference", csv ~sep:"|" preference;
]
in
let request unformat = request ~verbose ?body `GET host path args unformat in
match format with
| [] ->
begin match%lwt request id with
| Error response -> Lwt_io.eprintl response
| Ok response -> Lwt_io.printl response
end
| _ ->
match%lwt request unformat with
| Error response -> Lwt_io.eprintl response
| Ok docs ->
Lwt_list.iter_s begin fun hit ->
List.map (List.map map_of_hit_format) format |>
List.concat |>
List.map (fun f -> f hit) |>
String.join " " |>
Lwt_io.printl
end docs
type health_args = {
hosts : string list;
}
let health { verbose; _ } {
hosts;
} =
let config = Common.load_config () in
let all_hosts = lazy (List.map (fun (name, _) -> Common.get_cluster config name) config.Config_t.clusters) in
let hosts =
match List.rev hosts with
| [] -> !!all_hosts
| hosts ->
List.map begin function
| "_all" -> !!all_hosts
| name -> [ Common.get_cluster config name; ]
end hosts |>
List.concat
in
Lwt_main.run @@
let%lwt results =
Lwt_list.mapi_p begin fun i { Common.host; _ } ->
let columns = [
"cluster"; "status";
"node.total"; "node.data";
"shards"; "pri"; "relo"; "init"; "unassign";
"pending_tasks"; "max_task_wait_time";
"active_shards_percent";
] in
let args = [ "h", Some (Some (String.concat "," columns)); ] in
match%lwt request ~verbose `GET host [ Some "_cat"; Some "health"; ] args id with
| Error error -> Lwt.return (i, sprintf "%s error %s\n" host error)
| Ok result -> Lwt.return (i, sprintf "%s %s" host result)
end hosts
in
List.sort ~cmp:(Factor.Int.compare $$ fst) results |>
Lwt_list.iter_s (fun (_i, result) -> Lwt_io.print result)
type index_action =
| Get
| Create
| Delete
| Open
| Close
| Freeze
| Unfreeze
| Settings
| Mappings
type index_args = {
host : string;
index : string;
action : index_action;
expand_wildcards : Common_args.expand_wildcards option;
body : string option;
}
let index_tool { verbose; _ } {
host;
index;
action;
expand_wildcards;
body;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
Lwt_main.run @@
let (meth, body) = match body with Some body -> `PUT, Some (JSON body : content_type) | None -> `GET, None in
let (meth, path) =
match action with
| Get -> `GET, None
| Create -> `PUT, None
| Delete -> `DELETE, None
| Open -> `POST, Some "_open"
| Close -> `POST, Some "_close"
| Freeze -> `POST, Some "_freeze"
| Unfreeze -> `POST, Some "_unfreeze"
| Settings -> meth, Some "_settings"
| Mappings -> meth, Some "_mappings"
in
let path = Some index :: path :: [] in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"expand_wildcards", Option.map Common_args.string_of_expand_wildcards expand_wildcards;
]
in
match%lwt request ~verbose ?body meth host path args id with
| Error error -> fail_lwt "index error:\n%s" error
| Ok result -> Lwt_io.printl result
type nodes_args = {
host : string;
check_nodes : string list;
}
let nodes { verbose; _ } {
host;
check_nodes;
} =
let config = Common.load_config () in
let { Common.host; nodes; _ } = Common.get_cluster config host in
let check_nodes = match check_nodes with [] -> Option.default [] nodes | nodes -> nodes in
let check_nodes = SS.of_list (List.concat (List.map Common.expand_node check_nodes)) in
Lwt_main.run @@
match%lwt request ~verbose `GET host [ Some "_nodes"; ] [] J.from_string with
| Error error -> fail_lwt "nodes error:\n%s" error
| Ok result ->
J.Util.member "nodes" result |>
J.Util.to_assoc |>
List.fold_left begin fun (missing, present) (_node_id, node) ->
let name = J.Util.member "name" node |> J.Util.to_string in
SS.remove name missing, SS.add name present
end (check_nodes, SS.empty) |>
fun (missing, present) ->
let%lwt () =
match SS.is_empty missing with
| true -> Lwt.return_unit
| false -> Lwt_io.printlf "missing: %s" (String.concat " " (SS.elements missing))
in
let%lwt () =
let unlisted = SS.diff present check_nodes in
match SS.is_empty unlisted with
| true -> Lwt.return_unit
| false -> Lwt_io.printlf "unlisted: %s" (String.concat " " (SS.elements unlisted))
in
Lwt.return_unit
type put_args = {
host : string;
index : string;
doc_type : string option;
doc_id : string option;
routing : string option;
body : string option;
}
let put ({ verbose; es_version; _ } as common_args) {
host;
index;
doc_type;
doc_id;
routing;
body;
} =
let config = Common.load_config () in
let { Common.host; version; _ } = Common.get_cluster config host in
Lwt_main.run @@
let%lwt { default_put_doc_type; _ } =
get_es_version_config common_args host es_version config version
in
let%lwt (index, doc_type, doc_id) =
let%lwt mode = Lwt.wrap1 map_index_mode index in
match mode, doc_id with
| `Index index, None -> Lwt.return (index, doc_type, None)
| `Index index, Some doc_id -> map_doc_id_opt index doc_type doc_id
| `IndexOrID (index, doc_id), None -> Lwt.return (index, doc_type, Some doc_id)
| `IndexOrID (index, doc_type), Some doc_id -> map_typed_doc_id_opt index doc_type doc_id
| `ID (index, doc_type, doc_id), None -> Lwt.return (index, Some doc_type, Some doc_id)
| `ID (index, doc_type, doc_id1), Some doc_id2 ->
Exn_lwt.fail "invalid document id : /%s/%s/%s/%s" index doc_type doc_id1 doc_id2
in
let%lwt doc_type =
match coalesce [ doc_type; default_put_doc_type; ] with
| Some doc_type -> Lwt.return doc_type
| None -> Exn_lwt.fail "DOC_TYPE is not provided"
in
let args = [ "routing", Option.map some routing; ] in
let%lwt body = match body with Some body -> Lwt.return body | None -> Lwt_io.read Lwt_io.stdin in
let action = if doc_id <> None then `PUT else `POST in
match%lwt request ~verbose ~body:(JSON body) action host [ Some index; Some doc_type; doc_id; ] args id with
| Error error -> fail_lwt "put error:\n%s" error
| Ok result -> Lwt_io.printl result
type recovery_args = {
host : string;
indices : string list;
filter_include : (index_shard_format * string) list;
filter_exclude : (index_shard_format * string) list;
format : index_shard_format list list;
}
let recovery { verbose; _ } {
host;
indices;
filter_include;
filter_exclude;
format;
} =
let format =
match format with
| [] -> List.map map_of_index_shard_format default_index_shard_format
| _ ->
List.map (List.map map_of_index_shard_format) format |>
List.concat
in
let config = Common.load_config () in
let filter_include = List.map (fun (k, v) -> map_of_index_shard_format k, v) filter_include in
let filter_exclude = List.map (fun (k, v) -> map_of_index_shard_format k, v) filter_exclude in
let { Common.host; _ } = Common.get_cluster config host in
Lwt_main.run @@
match%lwt request ~verbose `GET host [ csv indices; Some "_recovery"; ] [] Elastic_j.indices_shards_of_string with
| Error error -> fail_lwt "recovery error:\n%s" error
| Ok indices ->
let indices =
match filter_include, filter_exclude with
| [], [] -> indices
| _ ->
List.map begin fun (index, ({ shards; } : Elastic_t.index_shards)) ->
let shards =
List.filter begin fun shard ->
List.for_all (fun (f, v) -> compare_fmt (f index shard) v) filter_include &&
not (List.exists (fun (f, v) -> compare_fmt (f index shard) v) filter_exclude)
end shards
in
index, { Elastic_t.shards; }
end indices
in
Lwt_list.iter_s begin fun (index, ({ shards; } : Elastic_t.index_shards)) ->
Lwt_list.iter_s begin fun shard ->
List.map (fun f -> map_show (f index shard)) format |>
String.concat " " |>
Lwt_io.printl
end shards
end indices
type refresh_args = {
host : string;
indices : string list;
}
let refresh { verbose; _ } {
host;
indices;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
Lwt_main.run @@
match%lwt request ~verbose `POST host [ csv indices; Some "_refresh"; ] [] id with
| Error error -> fail_lwt "refresh error:\n%s" error
| Ok result -> Lwt_io.printl result
type aggregation_field = {
field : string;
}
type aggregation_stats = {
field : string;
missing : string option;
}
type aggregation_cardinality = {
common : aggregation_stats;
precision_threshold : int option;
}
type aggregation_extended_stats = {
common : aggregation_stats;
sigma : float option;
}
type aggregation_string_stats = {
common : aggregation_stats;
show_distribution : bool option;
}
type aggregation_terms = {
field : string;
size : int option;
}
type aggregation =
| Avg of aggregation_stats
| Cardinality of aggregation_cardinality
| ExtendedStats of aggregation_extended_stats
| Max of aggregation_stats
| Min of aggregation_stats
| Stats of aggregation_stats
| Sum of aggregation_stats
| StringStats of aggregation_string_stats
| Terms of aggregation_terms
| ValueCount of aggregation_field
let string_of_aggregation = function
| Avg _ -> "avg"
| Cardinality _ -> "cardinality"
| ExtendedStats _ -> "extended_stats"
| Max _ -> "max"
| Min _ -> "min"
| Stats _ -> "stats"
| Sum _ -> "sum"
| StringStats _ -> "string_stats"
| Terms _ -> "terms"
| ValueCount _ -> "value_count"
type search_args = {
host : string;
index : string;
doc_type : string option;
timeout : string option;
size : int option;
from : int option;
sort : string list;
source_includes : string list;
source_excludes : string list;
fields : string list;
routing : string option;
preference : string list;
scroll : string option;
slice_id : int option;
slice_max : int option;
query : string option;
body_query : string option;
aggregations : (string * aggregation) list;
analyzer : string option;
analyze_wildcard : bool;
default_field : string option;
default_operator : string option;
explain : bool;
show_count : bool;
track_total_hits : string option;
retry : bool;
format : hit_format list list;
}
let search ({ verbose; es_version; _ } as common_args) {
host;
index;
doc_type;
timeout;
size;
from;
sort;
source_includes;
source_excludes;
fields;
routing;
preference;
scroll;
slice_id;
slice_max;
query;
body_query;
aggregations;
analyzer;
analyze_wildcard;
default_field;
default_operator;
explain;
show_count;
track_total_hits;
retry;
format;
} =
let config = Common.load_config () in
let { Common.host; version; _ } = Common.get_cluster config host in
Lwt_main.run @@
let%lwt { read_total; write_total; _ } =
get_es_version_config common_args host es_version config version
in
let body_query = Option.map get_body_query_file body_query in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"timeout", timeout;
"size", int size;
"from", int from;
"track_total_hits", track_total_hits;
"sort", csv sort;
(if source_excludes = [] then "_source" else "_source_includes"), csv source_includes;
"_source_excludes", csv source_excludes;
"stored_fields", csv fields;
"routing", routing;
"preference", csv ~sep:"|" preference;
"explain", flag explain;
"scroll", scroll;
"analyzer", analyzer;
"analyze_wildcard", flag analyze_wildcard;
"df", default_field;
"default_operator", default_operator;
"q", query;
]
in
let format =
List.map (List.map map_of_hit_format) format |>
List.concat
in
let body_query =
match slice_id, slice_max with
| None, _ | _, None -> body_query
| Some slice_id, Some slice_max ->
let slice = "slice", `Assoc [ "id", `Int slice_id; "max", `Int slice_max; ] in
match body_query with
| None -> Some (Util_j.string_of_assoc [slice])
| Some body ->
let body = Util_j.assoc_of_string body in
let body = slice :: List.filter (function "slice", _ -> false | _ -> true) body in
Some (Util_j.string_of_assoc body)
in
let body_query =
match aggregations with
| [] -> body_query
| _ ->
match body_query with
| Some _ -> Exn.fail "providing query body and aggregations at the same time is not supported"
| None ->
let aggregations =
let cons name map hd tl = match hd with Some hd -> (name, map hd) :: tl | None -> tl in
let bool x = `Bool x in
let float x = `Float x in
let int x = `Int x in
let string x = `String x in
let metrics { field; missing; } params =
let params = cons "missing" string missing params in
("field", `String field) :: params
in
List.map begin fun (name, aggregation) ->
let aggregation_params =
match aggregation with
| Avg params | Max params | Min params | Stats params | Sum params ->
metrics params []
| Cardinality { common; precision_threshold; } ->
let params = cons "precision_threshold" int precision_threshold [] in
metrics common params
| ExtendedStats { common; sigma; } ->
let params = cons "sigma" float sigma [] in
metrics common params
| StringStats { common; show_distribution; } ->
let params = cons "show_distribution" bool show_distribution [] in
metrics common params
| Terms { field; size; } ->
let params = cons "size" int size [] in
("field", `String field) :: params
| ValueCount { field; } ->
("field", `String field) :: []
in
name, `Assoc [ string_of_aggregation aggregation, `Assoc aggregation_params; ]
end aggregations
in
Some (Util_j.string_of_assoc [ "aggs", `Assoc aggregations; ])
in
let body_query = match body_query with Some query -> Some (JSON query : content_type) | None -> None in
let htbl = Hashtbl.create (if retry then Option.default 10 size else 0) in
let rec search () =
match%lwt request ~verbose ?body:body_query `POST host [ Some index; doc_type; Some "_search"; ] args id with
| Error error -> fail_lwt "search error:\n%s" error
| Ok result ->
match show_count, format, scroll, retry with
| false, [], None, false -> Lwt_io.printl result
| show_count, format, scroll, retry ->
let scroll_path = [ Some "_search"; Some "scroll"; ] in
let clear_scroll' scroll_id =
let clear_scroll = (JSON (Elastic_j.string_of_clear_scroll { Elastic_t.scroll_id = [ scroll_id; ]; }) : content_type) in
match%lwt request ~verbose ~body:clear_scroll `DELETE host scroll_path [] id with
| Error error -> fail_lwt "clear scroll error:\n%s" error
| Ok _ok -> Lwt.return_unit
in
let clear_scroll scroll_id = Option.map_default clear_scroll' Lwt.return_unit scroll_id in
let rec loop result =
let { Elastic_t.hits = response_hits; scroll_id; shards = { Elastic_t.failed; _ }; _ } as response =
Elastic_j.response'_of_string (Elastic_j.read_option_hit J.read_json) read_total result
in
match response_hits with
| None -> log #error "no hits"; clear_scroll scroll_id
| Some ({ Elastic_t.total; hits; _ } as response_hits) ->
let hits =
match retry with
| false -> hits
| true ->
List.filter begin fun ({ Elastic_t.index; doc_type; id; _ } : 'a Elastic_t.option_hit) ->
let key = index, doc_type, id in
match Hashtbl.mem htbl key with
| false -> Hashtbl.add htbl key (); true
| true -> false
end hits
in
let%lwt () =
match show_count with
| false -> Lwt.return_unit
| true ->
match total with
| None -> Lwt_io.printl "unknown"
| Some { value; relation; } ->
match relation with
| `Eq -> Lwt_io.printlf "%d" value
| `Gte -> Lwt_io.printlf ">=%d" value
in
let%lwt () =
match format, show_count, retry with
| [], true, _ -> Lwt.return_unit
| [], false, false -> Lwt_io.printl result
| [], false, true when hits <> [] || Hashtbl.length htbl = 0 ->
{ response with Elastic_t.hits = Some { response_hits with Elastic_t.hits; }; } |>
Elastic_j.string_of_response' (Elastic_j.write_option_hit J.write_json) write_total |>
Lwt_io.printl
| _ ->
Lwt_list.iter_s begin fun hit ->
List.map (fun f -> f hit) format |>
String.join " " |>
Lwt_io.printl
end hits
in
match failed > 0 && retry with
| true ->
let%lwt () = clear_scroll scroll_id in
search ()
| false ->
match hits, scroll, scroll_id with
| [], _, _ | _, None, _ | _, _, None -> clear_scroll scroll_id
| _, Some scroll, Some scroll_id ->
let scroll = (JSON (Elastic_j.string_of_scroll { Elastic_t.scroll; scroll_id; }) : content_type) in
match%lwt request ~verbose ~body:scroll `POST host scroll_path [] id with
| Error error ->
let%lwt () = Lwt_io.eprintlf "scroll error:\n%s" error in
let%lwt () = clear_scroll' scroll_id in
Lwt.fail ErrorExit
| Ok result -> loop result
in
loop result
in
search ()
module Settings = struct
type input =
| Text
| JSON
type output =
| Text
| JSON
| Raw
type type_ =
| Transient
| Persistent
| Defaults
type args = {
host : string;
keys : string list;
reset : bool;
include_defaults : bool;
input : input;
output : output;
type_ : type_ option;
}
let settings { verbose; _ } {
host;
keys;
reset;
include_defaults;
input;
output;
type_;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
let path = [ Some "_cluster"; Some "settings"; ] in
let (get_keys, set_keys) =
List.map begin fun s ->
match Stre.splitc s '=' with
| exception Not_found when reset -> `Set (s, None)
| exception Not_found -> `Get s
| key, value -> `Set (key, Some value)
end keys |>
List.partition (function `Get _ -> true | `Set _ -> false)
in
let get_keys = List.map (function `Get key -> key | `Set _ -> assert false) get_keys in
let set_keys = List.map (function `Get _ -> assert false | `Set pair -> pair) set_keys in
Lwt_main.run @@
let%lwt set_mode =
match set_keys, type_ with
| [], _ -> Lwt.return_none
| _, Some Transient -> Lwt.return_some `Transient
| _, Some Persistent -> Lwt.return_some `Persistent
FIXME
FIXME
in
let%lwt () =
match set_mode with
| None -> Lwt.return_unit
| Some mode ->
let%lwt values =
Lwt_list.map_s begin fun (key, value) ->
let%lwt value =
match value with
| None -> Lwt.return `Null
| Some value ->
match input with
| Text -> Lwt.return (`String value)
| JSON -> Lwt.wrap1 J.from_string value
in
Lwt.return (key, value)
end set_keys
in
let values = Some (`Assoc values) in
let (transient, persistent) =
match mode with
| `Transient -> values, None
| `Persistent -> None, values
in
let settings = ({ transient; persistent; defaults = None; } : Elastic_t.cluster_tree_settings) in
let body = (JSON (Elastic_j.string_of_cluster_tree_settings settings) : content_type) in
match%lwt request ~verbose ~body `PUT host path [] id with
| Error error -> fail_lwt "settings error:\n%s" error
| Ok result -> Lwt_io.printl result
in
let%lwt () =
match get_keys, set_keys with
| [], _ :: _ -> Lwt.return_unit
| _ ->
let include_defaults = include_defaults || type_ = Some Defaults in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"flat_settings", Some "true";
"include_defaults", flag include_defaults;
]
in
match%lwt request ~verbose `GET host path args id with
| Error error -> fail_lwt "settings error:\n%s" error
| Ok result ->
match get_keys, output, type_ with
| [], Raw, None -> Lwt_io.printl result
| _ ->
let%lwt { Elastic_t.transient; persistent; defaults; } = Lwt.wrap1 Elastic_j.cluster_flat_settings_of_string result in
let type_settings =
match type_ with
| None -> None
| Some Defaults -> Some defaults
| Some Transient -> Some transient
| Some Persistent -> Some persistent
in
let output =
match output with
| Text -> `Text
| JSON -> `JSON
| Raw -> `Raw
in
let module SS = Set.Make(String) in
let get_keys = SS.of_list get_keys in
let get_keys_empty = SS.is_empty get_keys in
let get_keys_typed_single = SS.cardinal get_keys = 1 && Option.is_some type_ in
match output with
| `Raw ->
let filter =
match get_keys_empty with
| true -> id
| false -> (fun settings -> List.filter (fun (key, _value) -> SS.mem key get_keys) settings)
in
begin match type_settings with
| Some settings ->
let settings = Option.map_default filter [] settings in
Lwt_io.printl (Elastic_j.string_of_settings settings)
| None ->
let transient = Option.map filter transient in
let persistent = Option.map filter persistent in
let defaults = Option.map filter defaults in
Lwt_io.printl (Elastic_j.string_of_cluster_flat_settings { Elastic_t.transient; persistent; defaults; })
end
| `Text | `JSON as output ->
let settings =
match type_settings with
| Some settings -> [ None, settings; ]
| None -> [ Some "transient: ", transient; Some "persistent: ", persistent; Some "defaults: ", defaults; ]
in
let string_of_value =
match output with
| `JSON -> (fun value -> J.to_string value)
| `Text ->
function
| `Null -> "null"
| `Intlit s | `String s -> s
| `Bool x -> string_of_bool x
| `Int x -> string_of_int x
| `Float x -> string_of_float x
| `List _ | `Assoc _ | `Tuple _ | `Variant _ as value -> J.to_string value
in
let print_value value =
Lwt_io.printl (string_of_value value)
in
let print_key_value prefix key value =
Lwt_io.printlf "%s%s: %s" prefix key (string_of_value value)
in
let print prefix (key, value) =
match prefix, get_keys_typed_single with
| None, true -> print_value value
| _ -> print_key_value (Option.default "" prefix) key value
in
Lwt_list.iter_s begin function
| _prefix, None -> Lwt.return_unit
| prefix, Some settings ->
match get_keys_empty with
| true -> Lwt_list.iter_s (print prefix) settings
| _ ->
Lwt_list.iter_s (function key, _ as pair when SS.mem key get_keys -> print prefix pair | _ -> Lwt.return_unit) settings
end settings
in
Lwt.return_unit
end (* Settings *)
open Cmdliner
[@@@alert "-deprecated"]
module Let_syntax = struct
let map ~f t = Term.(const f $ t)
let both a b = Term.(const (fun x y -> x, y) $ a $ b)
end
let common_args =
let args es_version verbose = { es_version; verbose; } in
let docs = Manpage.s_common_options in
let es_version =
Arg.(last & vflag_all [ None; ] [
Some `ES5, Arg.info [ "5"; ] ~docs ~doc:"force ES version 5.x";
Some `ES6, Arg.info [ "6"; ] ~docs ~doc:"force ES version 6.x";
Some `ES7, Arg.info [ "7"; ] ~docs ~doc:"force ES version 7.x";
Some `ES8, Arg.info [ "8"; ] ~docs ~doc:"force ES version 8.x";
])
in
let verbose =
let doc = "verbose output" in
Arg.(value & flag & info [ "v"; "verbose"; ] ~docs ~doc)
in
Term.(const args $ es_version $ verbose)
let default_tool =
let doc = "a command-line client for ES" in
let sdocs = Manpage.s_common_options in
let exits = Term.default_exits in
let man = [] in
Term.(ret (const (fun _ -> `Help (`Pager, None)) $ common_args), info "es" ~version:Common.version ~doc ~sdocs ~exits ~man)
let alias_tool =
let action =
let parse x =
match Stre.splitc x '=' with
| alias, index -> Ok (alias, Some index)
| exception Not_found -> Ok (x, None)
in
let print fmt (alias, index) =
match index with
| Some index -> Format.fprintf fmt "%s=%s" alias index
| None -> Format.fprintf fmt "%s" alias
in
Arg.conv (parse, print)
in
let open Common_args in
let%map common_args = common_args
and host = host
and index =
let doc = "index to operate on. If not provided, -a and -r must include the =INDEX part." in
Arg.(value & pos 1 (some string) None & info [] ~docv:"INDEX" ~doc)
and add =
let doc = "add index INDEX to alias ALIAS" in
Arg.(value & opt_all action [] & info [ "a"; "add"; ] ~docv:"ALIAS[=INDEX]" ~doc)
and remove =
let doc = "remove index INDEX from alias ALIAS" in
Arg.(value & opt_all action [] & info [ "r"; "remove"; ] ~docv:"ALIAS[=INDEX]" ~doc)
in
let map action = function
| alias, Some index -> { action; index; alias; }
| alias, None ->
match index with
| Some index -> { action; index; alias; }
| None -> Exn.fail "INDEX is not specified for %s" alias
in
let add = List.map (map `Add) add in
let remove = List.map (map `Remove) remove in
alias common_args {
host;
actions = add @ remove;
}
let alias_tool =
alias_tool,
let open Term in
let doc = "add or remove index aliases" in
let exits = default_exits in
let man = [] in
info "alias" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let cat_tool =
let conv_format =
let parse = function
| "text" -> Ok Text
| "json" -> Ok JSON
| "smile" -> Ok Smile
| "yaml" -> Ok YAML
| "cbor" -> Ok CBOR
| x -> Error (`Msg x)
in
Arg.conv (parse, (fun fmt x -> Format.fprintf fmt "%s" (string_of_cat_format x)))
in
let conv_arg =
let parse x =
match Stre.splitc x '=' with
| key, value -> Ok (key, Some value)
| exception Not_found -> Ok (x, None)
in
let print fmt (key, value) =
match value with
| Some value -> Format.fprintf fmt "%s=%s" key value
| None -> Format.fprintf fmt "%s" key
in
Arg.conv (parse, print)
in
let open Common_args in
let%map common_args = common_args
and host = host
and query = Arg.(value & pos_right 0 string [] & info [] ~docv:"PATH[ SUBPATH1[ SUBPATH2]]" ~doc:"path components")
and help = Arg.(value & flag & info [ "I"; "H"; ] ~doc:"show columns help")
and headers = Arg.(value & flag & info [ "h"; "headers"; ] ~doc:"show headers")
and columns = Arg.(value & opt_all string [] & info [ "i"; "columns"; ] ~doc:"include columns")
and sort = sort
and format = Arg.(value & opt (some conv_format) None & info [ "f"; "format"; ] ~doc:"output format")
and time_units = Arg.(value & opt (some string) None & info [ "T"; "time-units"; ] ~doc:"time units")
and size_units = Arg.(value & opt (some string) None & info [ "S"; "size-units"; ] ~doc:"size units")
and byte_units = Arg.(value & opt (some string) None & info [ "B"; "byte-units"; ] ~doc:"byte units")
and expand_wildcards = expand_wildcards
and args = Arg.(value & opt_all conv_arg [] & info [ "a"; "arg"; ] ~docv:"KEY[=VALUE]" ~doc:"add arbitrary &key[=value] to the request") in
cat common_args {
host;
query;
help;
headers;
columns;
sort;
format;
time_units;
size_units;
byte_units;
expand_wildcards;
args;
}
let cat_tool =
cat_tool,
let open Term in
let doc = "cat" in
let exits = default_exits in
let man = [] in
info "cat" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let count_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and timeout = timeout
and routing = routing
and preference = preference
and query = Arg.(value & opt (some string) None & info [ "q"; "query"; ] ~doc:"query using query_string query")
and body_query = Arg.(value & pos 2 (some string) None & info [] ~docv:"BODY_QUERY" ~doc:"body query")
and analyzer = Arg.(value & opt (some string) None & info [ "A"; "analyzer"; ] ~doc:"analyzer to be used for query_string query")
and analyze_wildcard = Arg.(value & flag & info [ "W"; "analyze-wildcard"; ] ~doc:"analyze wildcard and prefix queries in query_string query")
and default_field = Arg.(value & opt (some string) None & info [ "d"; "default-field"; ] ~doc:"default field to be used for query_string query")
and default_operator = Arg.(value & opt (some string) None & info [ "O"; "default-operator"; ] ~doc:"default operator to be used for query_string query")
and retry = Arg.(value & flag & info [ "R"; "retry"; ] ~doc:"retry if there are any failed shards") in
count common_args {
host;
index;
doc_type;
timeout;
routing;
preference;
query;
body_query;
analyzer;
analyze_wildcard;
default_field;
default_operator;
retry;
}
let count_tool =
count_tool,
let open Term in
let doc = "count" in
let exits = default_exits in
let man = [] in
info "count" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let delete_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and doc_ids = doc_ids
and timeout = timeout
and routing = routing in
delete common_args {
host;
index;
doc_type;
doc_ids;
timeout;
routing;
}
let delete_tool =
delete_tool,
let open Term in
let doc = "delete document(s)" in
let exits = default_exits in
let man = [] in
info "delete" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let flush_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and indices =
let doc = "indices to flush" in
Arg.(value & pos_right 0 string [] & info [] ~docv:"INDEX1[ INDEX2[ INDEX3...]]" ~doc)
and force = Arg.(value & flag & info [ "f"; "force"; ] ~doc:"force flush")
and synced = Arg.(value & flag & info [ "s"; "synced"; ] ~doc:"synced flush")
and wait = Arg.(value & flag & info [ "w"; "wait"; ] ~doc:"wait if another flush is already ongoing") in
flush common_args {
host;
indices;
force;
synced;
wait;
}
let flush_tool =
flush_tool,
let open Term in
let doc = "flush indices" in
let exits = default_exits in
let man = [] in
info "flush" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let get_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and doc_ids = doc_ids
and timeout = timeout
and source_includes = source_includes
and source_excludes = source_excludes
and routing = routing
and preference = preference
and format = format in
get common_args {
host;
index;
doc_type;
doc_ids;
timeout;
source_includes;
source_excludes;
routing;
preference;
format;
}
let get_tool =
get_tool,
let open Term in
let doc = "get document(s)" in
let exits = default_exits in
let man = [] in
info "get" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let health_tool =
let%map common_args = common_args
and hosts = Arg.(value & pos_all string [] & info [] ~docv:"HOST1[ HOST2[ HOST3...]]" ~doc:"hosts") in
health common_args {
hosts;
}
let health_tool =
health_tool,
let open Term in
let doc = "cluster health" in
let exits = default_exits in
let man = [] in
info "health" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let index_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and action =
Arg.(value & vflag (Get : index_action) [
Create, info [ "C"; "create"; ] ~doc:"create index";
Delete, info [ "D"; "delete"; ] ~doc:"delete index";
Open, info [ "o"; "open"; ] ~doc:"open index";
Close, info [ "c"; "close"; ] ~doc:"close index";
Freeze, info [ "f"; "freeze"; ] ~doc:"freeze index";
Unfreeze, info [ "u"; "unfreeze"; ] ~doc:"unfreeze index";
Mappings, info [ "m"; "mappings"; ] ~doc:"operate on index mappings";
Settings, info [ "s"; "settings"; ] ~doc:"operator on index settings";
])
and expand_wildcards = expand_wildcards
and body = Arg.(value & pos 2 (some string) None & info [] ~docv:"BODY" ~doc:"body to put") in
index_tool common_args {
host;
index;
action;
expand_wildcards;
body;
}
let index_tool =
index_tool,
let open Term in
let doc = "index" in
let exits = default_exits in
let man = [] in
info "index" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let nodes_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and check_nodes =
let doc = "check presence of specified nodes" in
Arg.(value & pos_right 0 string [] & info [] ~docv:"HOST1[ HOST2[ HOST3...]]" ~doc)
in
nodes common_args {
host;
check_nodes;
}
let nodes_tool =
nodes_tool,
let open Term in
let doc = "cluster nodes" in
let exits = default_exits in
let man = [] in
info "nodes" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let put_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and doc_id = Arg.value doc_id
and routing = routing
and body =
let doc = "document source to put" in
Arg.(value & opt (some string) None & info [ "s"; "source"; ] ~docv:"DOC" ~doc)
in
put common_args {
host;
index;
doc_type;
doc_id;
routing;
body;
}
let put_tool =
put_tool,
let open Term in
let doc = "put document" in
let exits = default_exits in
let man = [] in
info "put" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let recovery_tool =
let format =
let parse format =
match index_shard_format_of_string format with
| exception Failure msg -> Error (`Msg msg)
| format -> Ok format
in
let print fmt format =
Format.fprintf fmt "%s" (string_of_index_shard_format format)
in
Arg.conv (parse, print)
in
let filter = Arg.pair ~sep:'=' format Arg.string in
let format = Arg.list format in
let%map common_args = common_args
and host = Common_args.host
and indices =
let doc = "indices to check" in
Arg.(value & pos_right 0 string [] & info [] ~docv:"INDEX1[ INDEX2[ INDEX3...]]" ~doc)
and format = Arg.(value & opt_all format [] & info [ "f"; "format"; ] ~doc:"map hits according to specified format")
and filter_include =
let doc = "include only shards matching filter" in
Arg.(value & opt_all filter [] & info [ "i"; "include"; ] ~doc ~docv:"COLUMN=VALUE")
and filter_exclude =
let doc = "exclude shards matching filter" in
Arg.(value & opt_all filter [] & info [ "e"; "exclude"; ] ~doc ~docv:"COLUMN=VALUE")
in
recovery common_args {
host;
indices;
filter_include;
filter_exclude;
format;
}
let recovery_tool =
recovery_tool,
let open Term in
let doc = "cluster recovery" in
let exits = default_exits in
let man = [] in
info "recovery" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let refresh_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and indices =
let doc = "indices to refresh" in
Arg.(value & pos_right 0 string [] & info [] ~docv:"INDEX1[ INDEX2[ INDEX3...]]" ~doc)
in
refresh common_args {
host;
indices;
}
let refresh_tool =
refresh_tool,
let open Term in
let doc = "refresh indices" in
let exits = default_exits in
let man = [] in
info "refresh" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let search_tool =
let aggregation =
let module Let_syntax =
struct
let map ~f = function Ok x -> f x | Error _ as error -> error
let bind ~f = function [] -> f (None, []) | hd :: tl -> f (Some hd, tl)
end
in
let missing_field name = Error (`Msg (sprintf "terms aggregation %s missing field" name)) in
let parse conv =
let parse = Arg.conv_parser conv in
fun x -> Option.map_default (fun x -> let%map x = parse x in Ok (Some x)) (Ok None) x
in
let parse_bool = parse Arg.bool in
let parse_float = parse Arg.float in
let parse_int = parse Arg.int in
let parse_metrics name = function
| [] -> missing_field name
| field :: params ->
let%bind (missing, params) = params in
let agg = { field; missing; } in
Ok (agg, params)
in
let parse_cardinality name params =
let%map (common, params) = parse_metrics name params in
let%bind (precision_threshold, params) = params in
let%map precision_threshold = parse_int precision_threshold in
Ok (Cardinality { common; precision_threshold; }, params)
in
let parse_extended_stats name params =
let%map (common, params) = parse_metrics name params in
let%bind (sigma, params) = params in
let%map sigma = parse_float sigma in
Ok (ExtendedStats { common; sigma; }, params)
in
let parse_string_stats name params =
let%map (common, params) = parse_metrics name params in
let%bind (show_distribution, params) = params in
let%map show_distribution = parse_bool show_distribution in
Ok (StringStats { common; show_distribution; }, params)
in
let parse_terms name = function
| [] -> missing_field name
| field :: params ->
let%bind (size, params) = params in
let%map size = parse_int size in
let agg = { field; size; } in
Ok (Terms agg, params)
in
let parse_field name = function
| [] -> missing_field name
| field :: params -> Ok ({ field; }, params)
in
let parse agg =
match Stre.nsplitc agg ':' with
| [] -> assert false
| name :: [] -> Error (`Msg (sprintf "aggregation %s missing type" name))
| name :: type_ :: params ->
let%map (agg, params) =
match type_ with
| "a" | "avg" -> let%map (agg, params) = parse_metrics name params in Ok (Avg agg, params)
| "u" | "cardinal" | "cardinality" -> parse_cardinality name params
| "e" | "est" | "extended_stats" -> parse_extended_stats name params
| "min" -> let%map (agg, params) = parse_metrics name params in Ok (Min agg, params)
| "max" -> let%map (agg, params) = parse_metrics name params in Ok (Max agg, params)
| "st" | "stats" -> let%map (agg, params) = parse_metrics name params in Ok (Stats agg, params)
| "sst" | "string_stats" -> parse_string_stats name params
| "s" | "sum" -> let%map (agg, params) = parse_metrics name params in Ok (Sum agg, params)
| "t" | "terms" -> parse_terms name params
| "n" | "count" | "value_count" -> let%map (agg, params) = parse_field name params in Ok (ValueCount agg, params)
| agg -> Error (`Msg (sprintf "unknown aggregation type: %s" agg))
in
match params with
| [] -> Ok (name, agg)
| _ :: _ ->
let msg = sprintf "%s aggregation %s unknown extra parameters: %s" (string_of_aggregation agg) name (String.concat ":" params) in
Error (`Msg msg)
in
let print fmt (name, agg) =
let cons map hd tl = match hd with Some hd -> map hd :: tl | None -> tl in
let params =
match agg with
| Avg params | Max params | Min params | Stats params | Sum params ->
let { field; missing } = params in
name :: field :: cons id missing []
| Cardinality { common = { field; missing; }; precision_threshold; } ->
let params = cons string_of_int precision_threshold [] in
let params = cons id missing params in
name :: field :: params
| ExtendedStats { common = { field; missing; }; sigma; } ->
let params = cons string_of_float sigma [] in
let params = cons id missing params in
name :: field :: params
| StringStats { common = { field; missing; }; show_distribution; } ->
let params = cons string_of_bool show_distribution [] in
let params = cons id missing params in
name :: field :: params
| Terms { field; size } ->
name :: field :: cons string_of_int size []
| ValueCount { field; } ->
name :: field :: []
in
Format.fprintf fmt "%s" (String.concat ":" params)
in
Arg.(conv (parse, print))
in
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and timeout = timeout
and source_includes = source_includes
and source_excludes = source_excludes
and routing = routing
and preference = preference
and format = format
and size = Arg.(value & opt (some int) None & info [ "n"; "size"; ] ~doc:"size")
and from = Arg.(value & opt (some int) None & info [ "o"; "from"; ] ~doc:"from")
and sort = sort
and fields = Arg.(value & opt_all string [] & info [ "F"; "fields"; ] ~doc:"fields")
and scroll = Arg.(value & opt (some string) None & info [ "S"; "scroll"; ] ~doc:"scroll")
and slice_max = Arg.(value & opt (some int) None & info [ "N"; "slice-max"; ] ~doc:"slice_max")
and slice_id = Arg.(value & opt (some int) None & info [ "I"; "slice-id"; ] ~doc:"slice_id")
and query = Arg.(value & opt (some string) None & info [ "q"; "query"; ] ~doc:"query using query_string query")
and body_query = Arg.(value & pos 2 (some string) None & info [] ~docv:"BODY_QUERY" ~doc:"body query")
and aggregations = Arg.(value & opt_all aggregation [] & info [ "a"; "aggregation"; ] ~doc:"add simple aggregation")
and analyzer = Arg.(value & opt (some string) None & info [ "A"; "analyzer"; ] ~doc:"analyzer to be used for query_string query")
and analyze_wildcard = Arg.(value & flag & info [ "W"; "analyze-wildcard"; ] ~doc:"analyze wildcard and prefix queries in query_string query")
and default_field = Arg.(value & opt (some string) None & info [ "d"; "default-field"; ] ~doc:"default field to be used for query_string query")
and default_operator = Arg.(value & opt (some string) None & info [ "O"; "default-operator"; ] ~doc:"default operator to be used for query_string query")
and explain = Arg.(value & flag & info [ "E"; "explain"; ] ~doc:"explain hits")
and show_count = Arg.(value & flag & info [ "c"; "show-count"; ] ~doc:"output total number of hits")
and track_total_hits = Arg.(value & opt (some string) None & info [ "C"; "track-total-hits"; ] ~doc:"track total number hits (true, false, or a number)")
and retry = Arg.(value & flag & info [ "R"; "retry"; ] ~doc:"retry if there are any failed shards") in
search common_args {
host;
index;
doc_type;
timeout;
size;
from;
sort;
source_includes;
source_excludes;
fields;
routing;
preference;
scroll;
slice_id;
slice_max;
query;
body_query;
aggregations;
analyzer;
analyze_wildcard;
default_field;
default_operator;
explain;
show_count;
track_total_hits;
retry;
format;
}
let search_tool =
search_tool,
let open Term in
let doc = "search" in
let exits = default_exits in
let man = [] in
info "search" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let settings_tool =
let open Common_args in
let open Settings in
let%map common_args = common_args
and host = host
and keys = Arg.(value & pos_right 0 string [] & info [] ~docv:"KEYS" ~doc:"setting keys")
and reset = Arg.(value & flag & info [ "r"; "reset"; ] ~doc:"reset keys")
and include_defaults = Arg.(value & flag & info [ "D"; "include-defaults"; ] ~doc:"include defaults")
and input = Arg.(value & vflag (Text : input) [ JSON, info [ "j"; "input-json"; ] ~doc:"json input format"; ])
and output =
let output =
let parse output =
match output with
| "text" -> Ok (Text : output)
| "json" -> Ok JSON
| "raw" -> Ok Raw
| _ -> Error (`Msg (sprintf "unknown output format: %s" output))
in
let print fmt output =
let output =
match output with
| (Text : output) -> "text"
| JSON -> "json"
| Raw -> "raw"
in
Format.fprintf fmt "%s" output
in
Arg.(conv (parse, print))
in
Arg.(value & opt ~vopt:(JSON : output) output Text & info [ "J"; "output"; ] ~doc:"choose output format")
and type_ =
let type_transient = Some Transient, Arg.info [ "t"; "transient"; ] ~doc:"transient setting" in
let type_persistent = Some Persistent, Arg.info [ "p"; "persistent"; ] ~doc:"persistent setting" in
let type_defaults = Some Defaults, Arg.info [ "d"; "default"; ] ~doc:"default setting" in
Arg.(value & vflag None [ type_transient; type_persistent; type_defaults; ])
in
settings common_args {
host;
keys;
reset;
include_defaults;
input;
output;
type_;
}
let settings_tool =
settings_tool,
let open Term in
let doc = "manage cluster settings" in
let exits = default_exits in
let man = [] in
info "settings" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let tools = [
alias_tool;
cat_tool;
count_tool;
delete_tool;
flush_tool;
get_tool;
health_tool;
index_tool;
nodes_tool;
put_tool;
recovery_tool;
refresh_tool;
search_tool;
settings_tool;
]
let () =
try
let argv = Common.get_argv () in
Term.(exit (eval_choice ~catch:false ~argv default_tool tools))
with
| ErrorExit -> exit 1
| exn -> log #error ~exn "uncaught exception"; exit 125
| null | https://raw.githubusercontent.com/cyborgize/es-cli/fa757156fa08b9da7110dea4898952bf29160e64/src/es.ml | ocaml | FIXME what is repository?
FIXME what is snapshot?
FIXME what's the difference w/ files_total?
FIXME what's the difference w/ bytes_total?
Settings | open Devkit
open ExtLib
open Printf
module J = Yojson.Safe
module SS = Set.Make(String)
let log = Log.from "es"
let http_timeout = ref (Time.seconds 60)
type common_args = {
es_version : Config_t.version option;
verbose : bool;
}
let args =
ExtArg.[
"-T", String (fun t -> http_timeout := Time.of_compact_duration t), " set HTTP request timeout (format: 45s, 2m, or 1m30s)";
]
let json_content_type = "application/json"
let ndjson_content_type = "application/x-ndjson"
type content_type =
| JSON of string
| NDJSON of string
let json_body_opt = function
| Some JSON body -> Some (`Raw (json_content_type, body))
| Some NDJSON body -> Some (`Raw (ndjson_content_type, body))
| None -> None
let make_url host path args =
let args =
List.filter_map begin function
| name, Some Some value -> Some (String.concat "=" Web.[ urlencode name; urlencode value; ])
| name, Some None -> Some (Web.urlencode name)
| _name, None -> None
end args
in
let args = match args with [] -> [] | _ -> [ String.concat "&" args; ] in
let path = String.concat "/" ("" :: List.filter_map id path) in
let path = String.concat "?" (path :: args) in
String.concat "" [ host; path; ]
let request ?verbose ?body action host path args =
Web.http_request_lwt' ?verbose ~timeout:(Time.to_sec !http_timeout) ?body:(json_body_opt body) action (make_url host path args)
let request ?verbose ?body action host path args unformat =
match%lwt request ?verbose ?body action host path args with
| `Error code -> Exn_lwt.fail "(%d) %s" (Curl.errno code) (Curl.strerror code)
| `Ok (code, result) ->
let is_error_response result = Elastic_j.((response''_of_string result).error) <> None in
let is_severe_error code result = code / 100 <> 2 && (code <> 404 || is_error_response result) in
match is_severe_error code result with
| exception exn -> Exn_lwt.fail ~exn "http %d : %s" code result
| true -> Lwt.return_error result
| false ->
match unformat result with
| exception exn -> Exn_lwt.fail ~exn "unformat %s" result
| docs -> Lwt.return_ok docs
exception ErrorExit
let fail_lwt fmt =
ksprintf begin fun s ->
let%lwt () = Lwt_io.eprintl s in
Lwt.fail ErrorExit
end fmt
let default_doc_type = "_doc"
type 't json_reader = J.lexer_state -> Lexing.lexbuf -> 't
type 't json_writer = Bi_outbuf.t -> 't -> unit
type es_version_config = {
read_total : Elastic_t.total json_reader;
write_total : Elastic_t.total json_writer;
default_get_doc_type : string;
default_put_doc_type : string option;
}
let es6_config = {
read_total = Elastic_j.read_es6_total;
write_total = Elastic_j.write_es6_total;
default_get_doc_type = "_all";
default_put_doc_type = None;
}
let es7_config = {
read_total = Elastic_j.read_total;
write_total = Elastic_j.write_total;
default_get_doc_type = default_doc_type;
default_put_doc_type = Some default_doc_type;
}
let rec coalesce = function Some _ as hd :: _ -> hd | None :: tl -> coalesce tl | [] -> None
let get_es_version { verbose; _ } host =
match%lwt request ~verbose `GET host [] [] Elastic_j.main_of_string with
| Error error -> fail_lwt "could not get ES version:\n%s" error
| Ok { Elastic_t.version = { number; }; } ->
match Stre.nsplitc number '.' with
| [] -> Exn_lwt.fail "empty ES version number"
| "5" :: _ -> Lwt.return `ES5
| "6" :: _ -> Lwt.return `ES6
| "7" :: _ -> Lwt.return `ES7
| "8" :: _ -> Lwt.return `ES8
| other :: _ ->
match int_of_string other with
| exception exn -> Exn_lwt.fail ~exn "invalid ES version number : %s" number
| _ -> Exn_lwt.fail "unsupported ES version number : %s" number
let get_es_version_config' = function
| `ES5 | `ES6 -> es6_config
| `ES7 | `ES8 -> es7_config
let get_es_version_config common_args host es_version { Config_t.version = config_version; _ } cluster_version =
let version = coalesce [ es_version; cluster_version; config_version; ] in
let%lwt version =
match version with
| Some (#Wrap.Version.exact as version) -> Lwt.return version
| None | Some `Auto -> get_es_version common_args host
in
Lwt.return (get_es_version_config' version)
let get_body_query_file body_query =
match body_query <> "" && body_query.[0] = '@' with
| true -> Control.with_input_txt (String.slice ~first:1 body_query) IO.read_all
| false -> body_query
let usage tools =
fprintf stderr "Usage: %s {<tool>|-help|version}\n" Sys.executable_name;
fprintf stderr "where <tool> is one of:\n";
List.sort ~cmp:compare tools |>
List.iter (fun (s,_) -> fprintf stderr " %s\n" s)
let str_list =
ExtArg.make_arg @@ object
method store v = Arg.String (tuck v)
method kind = "string"
method show v = match !v with [] -> "none" | l -> String.concat "," l
end
let csv ?(sep=",") = function [] -> None | l -> Some (String.concat sep l)
let int = Option.map string_of_int
let one = function [] -> None | [x] -> Some x | _ -> assert false
let flag ?(default=false) = function x when x = default -> None | true -> Some "true" | false -> Some "false"
type hit_format = [
| `FullID
| `ID
| `Type
| `Index
| `Routing
| `Hit
| `Source
]
type hit_formatter = J.t Elastic_t.option_hit -> string
let hit_format_of_string = function
| "full_id" -> `FullID
| "id" -> `ID
| "type" -> `Type
| "index" -> `Index
| "routing" -> `Routing
| "hit" -> `Hit
| "source" -> `Source
| s -> Exn.fail "unknown hit field \"%s\"" s
let string_of_hit_format = function
| `FullID -> "full_id"
| `ID -> "id"
| `Type -> "type"
| `Index -> "index"
| `Routing -> "routing"
| `Hit -> "hit"
| `Source -> "source"
let map_of_hit_format =
let open Elastic_t in function
| `FullID -> (fun ({ index; doc_type; id; _ } : 'a Elastic_t.option_hit) ->
sprintf "/%s/%s/%s" index (Option.default default_doc_type doc_type) id)
| `ID -> (fun hit -> hit.id)
| `Type -> (fun hit -> Option.default default_doc_type hit.doc_type)
| `Index -> (fun hit -> hit.index)
| `Routing -> (fun hit -> Option.default "" hit.routing)
| `Hit -> (fun hit -> Elastic_j.string_of_option_hit J.write_json hit)
| `Source -> (fun { source; _ } -> Option.map_default J.to_string "" source)
type index_shard_format = [
| `Index
| `Shard
| `Time
| `Type
| `Stage
| `SourceHost
| `SourceNode
| `TargetHost
| `TargetNode
| `Repository
| `Snapshot
| `Files
| `FilesRecovered
| `FilesPercent
| `FilesTotal
| `Bytes
| `BytesRecovered
| `BytesPercent
| `BytesTotal
| `TranslogOps
| `TranslogOpsRecovered
| `TranslogOpsPercent
]
let index_shard_format_of_string = function
| "index" -> `Index
| "shard" -> `Shard
| "time" -> `Time
| "type" -> `Type
| "stage" -> `Stage
| "source_host" -> `SourceHost
| "source_node" -> `SourceNode
| "target_host" -> `TargetHost
| "target_node" -> `TargetNode
| "repository" -> `Repository
| "snapshot" -> `Snapshot
| "files" -> `Files
| "files_recovered" -> `FilesRecovered
| "files_percent" -> `FilesPercent
| "files_total" -> `FilesTotal
| "bytes" -> `Bytes
| "bytes_recovered" -> `BytesRecovered
| "bytes_percent" -> `BytesPercent
| "bytes_total" -> `BytesTotal
| "translog_ops" -> `TranslogOps
| "translog_ops_recovered" -> `TranslogOpsRecovered
| "translog_ops_percent" -> `TranslogOpsPercent
| s -> Exn.fail "unknown index shard field \"%s\"" s
let string_of_index_shard_format = function
| `Index -> "index"
| `Shard -> "shard"
| `Time -> "time"
| `Type -> "type"
| `Stage -> "stage"
| `SourceHost -> "source_host"
| `SourceNode -> "source_node"
| `TargetHost -> "target_host"
| `TargetNode -> "target_node"
| `Repository -> "repository"
| `Snapshot -> "snapshot"
| `Files -> "files"
| `FilesRecovered -> "files_recovered"
| `FilesPercent -> "files_percent"
| `FilesTotal -> "files_total"
| `Bytes -> "bytes"
| `BytesRecovered -> "bytes_recovered"
| `BytesPercent -> "bytes_percent"
| `BytesTotal -> "bytes_total"
| `TranslogOps -> "translog_ops"
| `TranslogOpsRecovered -> "translog_ops_recovered"
| `TranslogOpsPercent -> "translog_ops_percent"
let map_of_index_shard_format =
let open Elastic_t in function
| `Index -> (fun index (_shard : index_shard) -> `String index)
| `Shard -> (fun _index shard -> `Int shard.id)
| `Time -> (fun _index shard -> `Duration (Time.msec shard.index.total_time_in_millis))
| `Type -> (fun _index shard -> `Symbol shard.kind)
| `Stage -> (fun _index shard -> `Symbol shard.stage)
| `SourceHost -> (fun _index shard -> match shard.source.host with Some host -> `String host | None -> `None)
| `SourceNode -> (fun _index shard -> match shard.source.name with Some name -> `String name | None -> `None)
| `TargetHost -> (fun _index shard -> match shard.target.host with Some host -> `String host | None -> `None)
| `TargetNode -> (fun _index shard -> match shard.target.name with Some name -> `String name | None -> `None)
| `FilesRecovered -> (fun _index shard -> `Int shard.index.files.recovered)
| `FilesPercent -> (fun _index shard -> `String shard.index.files.percent)
| `FilesTotal -> (fun _index shard -> `Int shard.index.files.total)
| `BytesRecovered -> (fun _index shard -> `Int shard.index.size.recovered_in_bytes)
| `BytesPercent -> (fun _index shard -> `String shard.index.size.percent)
| `BytesTotal -> (fun _index shard -> `Int shard.index.size.total_in_bytes)
| `TranslogOps -> (fun _index shard -> `Int shard.translog.total)
| `TranslogOpsRecovered -> (fun _index shard -> `Int shard.translog.recovered)
| `TranslogOpsPercent -> (fun _index shard -> `String shard.translog.percent)
let default_index_shard_format = [
`Index; `Shard; `Time; `Type; `Stage;
`SourceHost; `SourceNode; `TargetHost; `TargetNode;
`Repository; `Snapshot;
`Files; `FilesRecovered; `FilesPercent; `FilesTotal;
`Bytes; `BytesRecovered; `BytesPercent; `BytesTotal;
`TranslogOps; `TranslogOpsRecovered; `TranslogOpsPercent;
]
let map_show = function
| `String x | `Symbol x -> x
| `Int x -> string_of_int x
| `Float x -> string_of_float x
| `Duration x -> Time.compact_duration x
| `None -> "n/a"
let compare_fmt = function
| `String x -> String.equal x
| `Symbol x -> String.equal (String.lowercase_ascii x) $ String.lowercase_ascii
| `Int x -> Factor.Int.equal x $ int_of_string
| `Float x -> Factor.Float.equal x $ float_of_string
FIXME parse time ?
| `None -> (fun _ -> false)
let split doc_id = Stre.nsplitc doc_id '/'
let join doc_id = String.concat "/" doc_id
let is_pure_id' doc_id =
match doc_id with
| [ _doc_id; ] | [ ""; _doc_id; ] -> true
| _ -> false
let is_pure_id doc_id = is_pure_id' (split doc_id)
let map_index_doc_id' doc_type doc_id =
match doc_id with
| [ doc_id; ] | [ ""; doc_id; ] -> Exn_lwt.fail "document id missing index name : /%s" doc_id
| [ index; doc_id; ] | [ ""; index; doc_id; ] -> Lwt.return (index, doc_type, doc_id)
| [ index; doc_type; doc_id; ] | [ ""; index; doc_type; doc_id; ] -> Lwt.return (index, Some doc_type, doc_id)
| _ -> Exn_lwt.fail "invalid document id : %s" (join doc_id)
let map_index_doc_id doc_type doc_id = map_index_doc_id' doc_type (split doc_id)
let map_doc_id' index doc_type doc_id =
match doc_id with
| [ doc_id; ] | [ ""; doc_id; ] -> Lwt.return (index, doc_type, doc_id)
| [ doc_type; doc_id; ] | [ ""; doc_type; doc_id; ] -> Lwt.return (index, Some doc_type, doc_id)
| _ -> Exn_lwt.fail "invalid document id : /%s/%s" index (join doc_id)
let map_doc_id index doc_type doc_id = map_doc_id' index doc_type (split doc_id)
let map_doc_id_opt' index doc_type doc_id =
let%lwt (index, doc_type, doc_id) = map_doc_id' index doc_type doc_id in
Lwt.return (index, doc_type, Some doc_id)
let map_doc_id_opt index doc_type doc_id = map_doc_id_opt' index doc_type (split doc_id)
let map_typed_doc_id' index doc_type doc_id =
match doc_id with
| [ doc_id; ] | [ ""; doc_id; ] -> Lwt.return (index, Some doc_type, doc_id)
| _ -> Exn_lwt.fail "invalid document id : /%s/%s/%s" index doc_type (join doc_id)
let map_typed_doc_id index doc_type doc_id = map_typed_doc_id' index doc_type (split doc_id)
let map_typed_doc_id_opt' index doc_type doc_id =
let%lwt (index, doc_type, doc_id) = map_typed_doc_id' index doc_type doc_id in
Lwt.return (index, doc_type, Some doc_id)
let map_typed_doc_id_opt index doc_type doc_id = map_typed_doc_id_opt' index doc_type (split doc_id)
let map_index_mode index =
match Stre.nsplitc index '/' with
| [ index; ] | [ ""; index; ] -> `Index index
| [ index; doc_type_or_id; ] | [ ""; index; doc_type_or_id; ] -> `IndexOrID (index, doc_type_or_id)
| [ index; doc_type; doc_id; ] | [ ""; index; doc_type; doc_id; ] -> `ID (index, doc_type, doc_id)
| _ -> Exn.fail "invalid index name or document id : %s" index
let map_ids ~default_get_doc_type index doc_type doc_ids =
let multiple (first_index, first_doc_type, _doc_id as first_doc) other_doc_ids =
let first_doc_type = Option.default default_get_doc_type first_doc_type in
let merge_equal x y = match x with Some x' when String.equal x' y -> x | _ -> None in
let (docs, common_index, common_doc_type) =
List.fold_left begin fun (docs, common_index, common_doc_type) (index, doc_type, _doc_id as doc) ->
let common_index = merge_equal common_index index in
let common_doc_type = merge_equal common_doc_type (Option.default default_get_doc_type doc_type) in
doc :: docs, common_index, common_doc_type
end ([ first_doc; ], Some first_index, Some first_doc_type) other_doc_ids
in
let (docs, index, doc_type) =
match common_index, common_doc_type with
| Some _, Some _ ->
let docs = List.map (fun (_index, _doc_type, doc_id) -> None, None, doc_id) docs in
docs, common_index, common_doc_type
| Some _, None ->
let docs = List.map (fun (_index, doc_type, doc_id) -> None, doc_type, doc_id) docs in
docs, common_index, None
| None, _ ->
let docs = List.map (fun (index, doc_type, doc_id) -> Some index, doc_type, doc_id) docs in
docs, None, None
in
Lwt.return (`Multi (docs, index, doc_type))
in
let%lwt mode = Lwt.wrap1 map_index_mode index in
let doc_ids = List.map split doc_ids in
match mode, doc_ids with
| `Index _, [] ->
let%lwt () = Lwt_io.eprintl "only INDEX is provided and no DOC_ID" in
Lwt.return `None
| `Index index, [ doc_id; ] ->
let%lwt (index, doc_type, doc_id) = map_doc_id' index doc_type doc_id in
let doc_type = Option.default default_get_doc_type doc_type in
Lwt.return (`Single (Some index, Some doc_type, Some doc_id))
| `Index index, doc_id :: doc_ids ->
let%lwt doc_id = map_doc_id' index doc_type doc_id in
let%lwt doc_ids = Lwt_list.map_s (map_doc_id' index doc_type) doc_ids in
multiple doc_id doc_ids
| `IndexOrID (index, doc_id), [] ->
let doc_type = Option.default default_get_doc_type doc_type in
Lwt.return (`Single (Some index, Some doc_type, Some doc_id))
| `IndexOrID (index, doc_type), doc_id :: doc_ids when List.for_all is_pure_id' (doc_id :: doc_ids) ->
begin match doc_ids with
| [] ->
let%lwt (index, doc_type, doc_id) = map_typed_doc_id' index doc_type doc_id in
Lwt.return (`Single (Some index, doc_type, Some doc_id))
| _ ->
let%lwt doc_id = map_typed_doc_id' index doc_type doc_id in
let%lwt doc_ids = Lwt_list.map_s (map_typed_doc_id' index doc_type) doc_ids in
multiple doc_id doc_ids
end
| `IndexOrID (index, doc_id), doc_ids ->
let%lwt doc_ids = Lwt_list.map_s (map_index_doc_id' doc_type) doc_ids in
multiple (index, doc_type, doc_id) doc_ids
| `ID (index, doc_type, doc_id), [] ->
Lwt.return (`Single (Some index, Some doc_type, Some doc_id))
| `ID (index, doc_type', doc_id), doc_ids ->
let%lwt doc_ids = Lwt_list.map_s (map_index_doc_id' doc_type) doc_ids in
multiple (index, Some doc_type', doc_id) doc_ids
module Common_args = struct
open Cmdliner
let host = Arg.(required & pos 0 (some string) None & info [] ~docv:"HOST" ~doc:"host")
let index = Arg.(required & pos 1 (some string) None & info [] ~docv:"INDEX" ~doc:"index")
let doc_type = Arg.(value & opt (some string) None & info [ "T"; "doctype"; ] ~docv:"DOC_TYPE" ~doc:"document type")
let doc_id = Arg.(pos 2 (some string) None & info [] ~docv:"DOC_ID" ~doc:"document id")
let doc_ids =
let doc = "document ids" in
Arg.(value & pos_right 1 string [] & info [] ~docv:"DOC_ID1[ DOC_ID2[ DOC_ID3...]]" ~doc)
let timeout = Arg.(value & opt (some string) None & info [ "t"; "timeout"; ] ~doc:"timeout")
let source_includes = Arg.(value & opt_all string [] & info [ "i"; "source-includes"; ] ~doc:"source_includes")
let source_excludes = Arg.(value & opt_all string [] & info [ "e"; "source-excludes"; ] ~doc:"source_excludes")
let routing = Arg.(value & opt (some string) None & info [ "r"; "routing"; ] ~doc:"routing")
let preference = Arg.(value & opt_all string [] & info [ "p"; "preference"; ] ~doc:"preference")
let sort = Arg.(value & opt_all string [] & info [ "s"; "sort"; ] ~doc:"sort")
let format =
let parse format =
match hit_format_of_string format with
| exception Failure msg -> Error (`Msg msg)
| format -> Ok format
in
let print fmt format =
Format.fprintf fmt "%s" (string_of_hit_format format)
in
Arg.(list (conv (parse, print)))
let format = Arg.(value & opt_all format [] & info [ "f"; "format"; ] ~doc:"map hits according to specified format (hit|id|source)")
type expand_wildcards =
| All
| Open
| Closed
| Hidden
| None_
let string_of_expand_wildcards = function
| All -> "all"
| Open -> "open"
| Closed -> "closed"
| Hidden -> "hidden"
| None_ -> "none"
let expand_wildcards =
let conv_expand_wildcards =
let parse = function
| "all" -> Ok All
| "open" -> Ok Open
| "closed" -> Ok Closed
| "hidden" -> Ok Hidden
| "none" -> Ok None_
| x -> Error (`Msg x)
in
Arg.conv (parse, (fun fmt x -> Format.fprintf fmt "%s" (string_of_expand_wildcards x)))
in
Arg.(value & opt (some conv_expand_wildcards) None & info [ "w"; "expand-wildcards"; ] ~doc:"expand_wildcards")
Common_args
type alias_action = {
action : [ `Add | `Remove ];
index : string;
alias : string;
}
type alias_args = {
host : string;
actions : alias_action list;
}
let alias { verbose; _ } {
host;
actions;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
let (action, body) =
match actions with
| [] -> `GET, None
| actions ->
let actions = List.map (fun { action; index; alias; } -> [ action, { Elastic_t.index; alias; }; ]) actions in
`POST, Some (JSON (Elastic_j.string_of_aliases { Elastic_t.actions; }) : content_type)
in
Lwt_main.run @@
match%lwt request ~verbose ?body action host [ Some "_aliases"; ] [] id with
| Error error -> fail_lwt "alias error:\n%s" error
| Ok result -> Lwt_io.printl result
type cat_format =
| Text
| JSON
| Smile
| YAML
| CBOR
let string_of_cat_format = function
| Text -> "text"
| JSON -> "json"
| Smile -> "smile"
| YAML -> "yaml"
| CBOR -> "cbor"
type cat_args = {
host : string;
query : string list;
help : bool;
headers : bool;
columns : string list;
sort : string list;
format : cat_format option;
time_units : string option;
size_units : string option;
byte_units : string option;
expand_wildcards : Common_args.expand_wildcards option;
args : (string * string option) list;
}
let cat ({ verbose; _ } as _common_args) {
host;
query;
help;
headers;
columns;
sort;
format;
time_units;
size_units;
byte_units;
expand_wildcards;
args;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
let flag name ?value x l = if x then (name, Some value) :: l else l in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"h", csv columns;
"s", csv sort;
"time", time_units;
"size", size_units;
"bytes", byte_units;
"format", Option.map string_of_cat_format format;
"expand_wildcards", Option.map Common_args.string_of_expand_wildcards expand_wildcards;
] @
flag "help" help @@
flag "v" headers @@
List.map (fun (k, v) -> k, Some v) args
in
Lwt_main.run @@
match%lwt request ~verbose `GET host (Some "_cat" :: List.map some query) args id with
| Error error -> fail_lwt "cat error:\n%s" error
| Ok result -> Lwt_io.print result
type count_args = {
host : string;
index : string;
doc_type : string option;
timeout : string option;
routing : string option;
preference : string list;
query : string option;
body_query : string option;
analyzer : string option;
analyze_wildcard : bool;
default_field : string option;
default_operator : string option;
retry : bool;
}
let count ({ verbose; _ } as _common_args) {
host;
index;
doc_type;
timeout;
routing;
preference;
query;
body_query;
analyzer;
analyze_wildcard;
default_field;
default_operator;
retry = _;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
Lwt_main.run @@
let body_query = Option.map get_body_query_file body_query in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"timeout", timeout;
"routing", routing;
"preference", csv ~sep:"|" preference;
"analyzer", analyzer;
"analyze_wildcard", flag analyze_wildcard;
"df", default_field;
"default_operator", default_operator;
"q", query;
]
in
let body_query = match body_query with Some query -> Some (JSON query : content_type) | None -> None in
let count () =
match%lwt request ~verbose ?body:body_query `POST host [ Some index; doc_type; Some "_count"; ] args id with
| Error error -> fail_lwt "count error:\n%s" error
| Ok result ->
let { Elastic_t.count; shards = { Elastic_t.failed = _; _ }; } = Elastic_j.count_of_string result in
Lwt_io.printlf "%d" count
TODO check failed > 0 & & retry
in
count ()
type delete_args = {
host : string;
index : string;
doc_type : string option;
doc_ids : string list;
timeout : string option;
routing : string option;
}
let delete ({ verbose; es_version; _ } as common_args) {
host;
index;
doc_type;
doc_ids;
timeout;
routing;
} =
let config = Common.load_config () in
let { Common.host; version; _ } = Common.get_cluster config host in
Lwt_main.run @@
let%lwt ({ default_get_doc_type; _ }) =
get_es_version_config common_args host es_version config version
in
match%lwt map_ids ~default_get_doc_type index doc_type doc_ids with
| `None -> Lwt.return_unit
| `Single _ | `Multi _ as mode ->
let (action, body, path) =
match mode with
| `Single (index, doc_type, doc_id) -> `DELETE, None, [ index; doc_type; doc_id; ]
| `Multi (docs, index, doc_type) ->
let body =
List.fold_left begin fun acc (index, doc_type, doc_id) ->
let delete = { Elastic_t.index; doc_type; id = doc_id; routing = None; } in
let bulk = { Elastic_t.index = None; create = None; update = None; delete = Some delete; } in
"\n" :: Elastic_j.string_of_bulk bulk :: acc
end [] docs |>
List.rev |>
String.concat ""
in
`POST, Some (NDJSON body), [ index; doc_type; Some "_bulk"; ]
in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"timeout", timeout;
"routing", routing;
]
in
match%lwt request ~verbose ?body action host path args id with
| Error response -> Lwt_io.eprintl response
| Ok response -> Lwt_io.printl response
type flush_args = {
host : string;
indices : string list;
force : bool;
synced : bool;
wait : bool;
}
let flush { verbose; _ } {
host;
indices;
force;
synced;
wait;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
let bool' v = function true -> Some v | false -> None in
let bool = bool' "true" in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"force", bool force;
"wait_if_ongoing", bool wait;
]
in
let path = [ csv indices; Some "_flush"; bool' "synced" synced; ] in
Lwt_main.run @@
match%lwt request ~verbose `POST host path args id with
| Error error -> fail_lwt "flush error:\n%s" error
| Ok result -> Lwt_io.printl result
type get_args = {
host : string;
index : string;
doc_type : string option;
doc_ids : string list;
timeout : string option;
source_includes : string list;
source_excludes : string list;
routing : string option;
preference : string list;
format : hit_format list list;
}
let get ({ verbose; es_version; _ } as common_args) {
host;
index;
doc_type;
doc_ids;
timeout;
source_includes;
source_excludes;
routing;
preference;
format;
} =
let config = Common.load_config () in
let { Common.host; version; _ } = Common.get_cluster config host in
Lwt_main.run @@
let%lwt ({ default_get_doc_type; _ }) =
get_es_version_config common_args host es_version config version
in
match%lwt map_ids ~default_get_doc_type index doc_type doc_ids with
| `None -> Lwt.return_unit
| `Single _ | `Multi _ as mode ->
let (body, path, unformat) =
match mode with
| `Single (index, doc_type, doc_id) ->
let path = [ index; doc_type; doc_id; ] in
let unformat x = [ Elastic_j.option_hit_of_string J.read_json x; ] in
None, path, unformat
| `Multi (docs, index, doc_type) ->
let (docs, ids) =
match index, doc_type with
| Some _, Some _ ->
let ids = List.map (fun (_index, _doc_type, doc_id) -> doc_id) docs in
[], ids
| _ ->
let docs =
List.map begin fun (index, doc_type, id) ->
{ Elastic_t.index; doc_type; id; routing = None; source = None; stored_fields = None; }
end docs
in
docs, []
in
let path = [ index; doc_type; Some "_mget"; ] in
let unformat x =
let { Elastic_t.docs; } = Elastic_j.docs_of_string (Elastic_j.read_option_hit J.read_json) x in
docs
in
Some (JSON (Elastic_j.string_of_multiget { docs; ids; }) : content_type), path, unformat
in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"timeout", timeout;
(if source_excludes = [] then "_source" else "_source_includes"), csv source_includes;
"_source_excludes", csv source_excludes;
"routing", routing;
"preference", csv ~sep:"|" preference;
]
in
let request unformat = request ~verbose ?body `GET host path args unformat in
match format with
| [] ->
begin match%lwt request id with
| Error response -> Lwt_io.eprintl response
| Ok response -> Lwt_io.printl response
end
| _ ->
match%lwt request unformat with
| Error response -> Lwt_io.eprintl response
| Ok docs ->
Lwt_list.iter_s begin fun hit ->
List.map (List.map map_of_hit_format) format |>
List.concat |>
List.map (fun f -> f hit) |>
String.join " " |>
Lwt_io.printl
end docs
type health_args = {
hosts : string list;
}
let health { verbose; _ } {
hosts;
} =
let config = Common.load_config () in
let all_hosts = lazy (List.map (fun (name, _) -> Common.get_cluster config name) config.Config_t.clusters) in
let hosts =
match List.rev hosts with
| [] -> !!all_hosts
| hosts ->
List.map begin function
| "_all" -> !!all_hosts
| name -> [ Common.get_cluster config name; ]
end hosts |>
List.concat
in
Lwt_main.run @@
let%lwt results =
Lwt_list.mapi_p begin fun i { Common.host; _ } ->
let columns = [
"cluster"; "status";
"node.total"; "node.data";
"shards"; "pri"; "relo"; "init"; "unassign";
"pending_tasks"; "max_task_wait_time";
"active_shards_percent";
] in
let args = [ "h", Some (Some (String.concat "," columns)); ] in
match%lwt request ~verbose `GET host [ Some "_cat"; Some "health"; ] args id with
| Error error -> Lwt.return (i, sprintf "%s error %s\n" host error)
| Ok result -> Lwt.return (i, sprintf "%s %s" host result)
end hosts
in
List.sort ~cmp:(Factor.Int.compare $$ fst) results |>
Lwt_list.iter_s (fun (_i, result) -> Lwt_io.print result)
type index_action =
| Get
| Create
| Delete
| Open
| Close
| Freeze
| Unfreeze
| Settings
| Mappings
type index_args = {
host : string;
index : string;
action : index_action;
expand_wildcards : Common_args.expand_wildcards option;
body : string option;
}
let index_tool { verbose; _ } {
host;
index;
action;
expand_wildcards;
body;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
Lwt_main.run @@
let (meth, body) = match body with Some body -> `PUT, Some (JSON body : content_type) | None -> `GET, None in
let (meth, path) =
match action with
| Get -> `GET, None
| Create -> `PUT, None
| Delete -> `DELETE, None
| Open -> `POST, Some "_open"
| Close -> `POST, Some "_close"
| Freeze -> `POST, Some "_freeze"
| Unfreeze -> `POST, Some "_unfreeze"
| Settings -> meth, Some "_settings"
| Mappings -> meth, Some "_mappings"
in
let path = Some index :: path :: [] in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"expand_wildcards", Option.map Common_args.string_of_expand_wildcards expand_wildcards;
]
in
match%lwt request ~verbose ?body meth host path args id with
| Error error -> fail_lwt "index error:\n%s" error
| Ok result -> Lwt_io.printl result
type nodes_args = {
host : string;
check_nodes : string list;
}
let nodes { verbose; _ } {
host;
check_nodes;
} =
let config = Common.load_config () in
let { Common.host; nodes; _ } = Common.get_cluster config host in
let check_nodes = match check_nodes with [] -> Option.default [] nodes | nodes -> nodes in
let check_nodes = SS.of_list (List.concat (List.map Common.expand_node check_nodes)) in
Lwt_main.run @@
match%lwt request ~verbose `GET host [ Some "_nodes"; ] [] J.from_string with
| Error error -> fail_lwt "nodes error:\n%s" error
| Ok result ->
J.Util.member "nodes" result |>
J.Util.to_assoc |>
List.fold_left begin fun (missing, present) (_node_id, node) ->
let name = J.Util.member "name" node |> J.Util.to_string in
SS.remove name missing, SS.add name present
end (check_nodes, SS.empty) |>
fun (missing, present) ->
let%lwt () =
match SS.is_empty missing with
| true -> Lwt.return_unit
| false -> Lwt_io.printlf "missing: %s" (String.concat " " (SS.elements missing))
in
let%lwt () =
let unlisted = SS.diff present check_nodes in
match SS.is_empty unlisted with
| true -> Lwt.return_unit
| false -> Lwt_io.printlf "unlisted: %s" (String.concat " " (SS.elements unlisted))
in
Lwt.return_unit
type put_args = {
host : string;
index : string;
doc_type : string option;
doc_id : string option;
routing : string option;
body : string option;
}
let put ({ verbose; es_version; _ } as common_args) {
host;
index;
doc_type;
doc_id;
routing;
body;
} =
let config = Common.load_config () in
let { Common.host; version; _ } = Common.get_cluster config host in
Lwt_main.run @@
let%lwt { default_put_doc_type; _ } =
get_es_version_config common_args host es_version config version
in
let%lwt (index, doc_type, doc_id) =
let%lwt mode = Lwt.wrap1 map_index_mode index in
match mode, doc_id with
| `Index index, None -> Lwt.return (index, doc_type, None)
| `Index index, Some doc_id -> map_doc_id_opt index doc_type doc_id
| `IndexOrID (index, doc_id), None -> Lwt.return (index, doc_type, Some doc_id)
| `IndexOrID (index, doc_type), Some doc_id -> map_typed_doc_id_opt index doc_type doc_id
| `ID (index, doc_type, doc_id), None -> Lwt.return (index, Some doc_type, Some doc_id)
| `ID (index, doc_type, doc_id1), Some doc_id2 ->
Exn_lwt.fail "invalid document id : /%s/%s/%s/%s" index doc_type doc_id1 doc_id2
in
let%lwt doc_type =
match coalesce [ doc_type; default_put_doc_type; ] with
| Some doc_type -> Lwt.return doc_type
| None -> Exn_lwt.fail "DOC_TYPE is not provided"
in
let args = [ "routing", Option.map some routing; ] in
let%lwt body = match body with Some body -> Lwt.return body | None -> Lwt_io.read Lwt_io.stdin in
let action = if doc_id <> None then `PUT else `POST in
match%lwt request ~verbose ~body:(JSON body) action host [ Some index; Some doc_type; doc_id; ] args id with
| Error error -> fail_lwt "put error:\n%s" error
| Ok result -> Lwt_io.printl result
type recovery_args = {
host : string;
indices : string list;
filter_include : (index_shard_format * string) list;
filter_exclude : (index_shard_format * string) list;
format : index_shard_format list list;
}
let recovery { verbose; _ } {
host;
indices;
filter_include;
filter_exclude;
format;
} =
let format =
match format with
| [] -> List.map map_of_index_shard_format default_index_shard_format
| _ ->
List.map (List.map map_of_index_shard_format) format |>
List.concat
in
let config = Common.load_config () in
let filter_include = List.map (fun (k, v) -> map_of_index_shard_format k, v) filter_include in
let filter_exclude = List.map (fun (k, v) -> map_of_index_shard_format k, v) filter_exclude in
let { Common.host; _ } = Common.get_cluster config host in
Lwt_main.run @@
match%lwt request ~verbose `GET host [ csv indices; Some "_recovery"; ] [] Elastic_j.indices_shards_of_string with
| Error error -> fail_lwt "recovery error:\n%s" error
| Ok indices ->
let indices =
match filter_include, filter_exclude with
| [], [] -> indices
| _ ->
List.map begin fun (index, ({ shards; } : Elastic_t.index_shards)) ->
let shards =
List.filter begin fun shard ->
List.for_all (fun (f, v) -> compare_fmt (f index shard) v) filter_include &&
not (List.exists (fun (f, v) -> compare_fmt (f index shard) v) filter_exclude)
end shards
in
index, { Elastic_t.shards; }
end indices
in
Lwt_list.iter_s begin fun (index, ({ shards; } : Elastic_t.index_shards)) ->
Lwt_list.iter_s begin fun shard ->
List.map (fun f -> map_show (f index shard)) format |>
String.concat " " |>
Lwt_io.printl
end shards
end indices
type refresh_args = {
host : string;
indices : string list;
}
let refresh { verbose; _ } {
host;
indices;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
Lwt_main.run @@
match%lwt request ~verbose `POST host [ csv indices; Some "_refresh"; ] [] id with
| Error error -> fail_lwt "refresh error:\n%s" error
| Ok result -> Lwt_io.printl result
type aggregation_field = {
field : string;
}
type aggregation_stats = {
field : string;
missing : string option;
}
type aggregation_cardinality = {
common : aggregation_stats;
precision_threshold : int option;
}
type aggregation_extended_stats = {
common : aggregation_stats;
sigma : float option;
}
type aggregation_string_stats = {
common : aggregation_stats;
show_distribution : bool option;
}
type aggregation_terms = {
field : string;
size : int option;
}
type aggregation =
| Avg of aggregation_stats
| Cardinality of aggregation_cardinality
| ExtendedStats of aggregation_extended_stats
| Max of aggregation_stats
| Min of aggregation_stats
| Stats of aggregation_stats
| Sum of aggregation_stats
| StringStats of aggregation_string_stats
| Terms of aggregation_terms
| ValueCount of aggregation_field
let string_of_aggregation = function
| Avg _ -> "avg"
| Cardinality _ -> "cardinality"
| ExtendedStats _ -> "extended_stats"
| Max _ -> "max"
| Min _ -> "min"
| Stats _ -> "stats"
| Sum _ -> "sum"
| StringStats _ -> "string_stats"
| Terms _ -> "terms"
| ValueCount _ -> "value_count"
type search_args = {
host : string;
index : string;
doc_type : string option;
timeout : string option;
size : int option;
from : int option;
sort : string list;
source_includes : string list;
source_excludes : string list;
fields : string list;
routing : string option;
preference : string list;
scroll : string option;
slice_id : int option;
slice_max : int option;
query : string option;
body_query : string option;
aggregations : (string * aggregation) list;
analyzer : string option;
analyze_wildcard : bool;
default_field : string option;
default_operator : string option;
explain : bool;
show_count : bool;
track_total_hits : string option;
retry : bool;
format : hit_format list list;
}
let search ({ verbose; es_version; _ } as common_args) {
host;
index;
doc_type;
timeout;
size;
from;
sort;
source_includes;
source_excludes;
fields;
routing;
preference;
scroll;
slice_id;
slice_max;
query;
body_query;
aggregations;
analyzer;
analyze_wildcard;
default_field;
default_operator;
explain;
show_count;
track_total_hits;
retry;
format;
} =
let config = Common.load_config () in
let { Common.host; version; _ } = Common.get_cluster config host in
Lwt_main.run @@
let%lwt { read_total; write_total; _ } =
get_es_version_config common_args host es_version config version
in
let body_query = Option.map get_body_query_file body_query in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"timeout", timeout;
"size", int size;
"from", int from;
"track_total_hits", track_total_hits;
"sort", csv sort;
(if source_excludes = [] then "_source" else "_source_includes"), csv source_includes;
"_source_excludes", csv source_excludes;
"stored_fields", csv fields;
"routing", routing;
"preference", csv ~sep:"|" preference;
"explain", flag explain;
"scroll", scroll;
"analyzer", analyzer;
"analyze_wildcard", flag analyze_wildcard;
"df", default_field;
"default_operator", default_operator;
"q", query;
]
in
let format =
List.map (List.map map_of_hit_format) format |>
List.concat
in
let body_query =
match slice_id, slice_max with
| None, _ | _, None -> body_query
| Some slice_id, Some slice_max ->
let slice = "slice", `Assoc [ "id", `Int slice_id; "max", `Int slice_max; ] in
match body_query with
| None -> Some (Util_j.string_of_assoc [slice])
| Some body ->
let body = Util_j.assoc_of_string body in
let body = slice :: List.filter (function "slice", _ -> false | _ -> true) body in
Some (Util_j.string_of_assoc body)
in
let body_query =
match aggregations with
| [] -> body_query
| _ ->
match body_query with
| Some _ -> Exn.fail "providing query body and aggregations at the same time is not supported"
| None ->
let aggregations =
let cons name map hd tl = match hd with Some hd -> (name, map hd) :: tl | None -> tl in
let bool x = `Bool x in
let float x = `Float x in
let int x = `Int x in
let string x = `String x in
let metrics { field; missing; } params =
let params = cons "missing" string missing params in
("field", `String field) :: params
in
List.map begin fun (name, aggregation) ->
let aggregation_params =
match aggregation with
| Avg params | Max params | Min params | Stats params | Sum params ->
metrics params []
| Cardinality { common; precision_threshold; } ->
let params = cons "precision_threshold" int precision_threshold [] in
metrics common params
| ExtendedStats { common; sigma; } ->
let params = cons "sigma" float sigma [] in
metrics common params
| StringStats { common; show_distribution; } ->
let params = cons "show_distribution" bool show_distribution [] in
metrics common params
| Terms { field; size; } ->
let params = cons "size" int size [] in
("field", `String field) :: params
| ValueCount { field; } ->
("field", `String field) :: []
in
name, `Assoc [ string_of_aggregation aggregation, `Assoc aggregation_params; ]
end aggregations
in
Some (Util_j.string_of_assoc [ "aggs", `Assoc aggregations; ])
in
let body_query = match body_query with Some query -> Some (JSON query : content_type) | None -> None in
let htbl = Hashtbl.create (if retry then Option.default 10 size else 0) in
let rec search () =
match%lwt request ~verbose ?body:body_query `POST host [ Some index; doc_type; Some "_search"; ] args id with
| Error error -> fail_lwt "search error:\n%s" error
| Ok result ->
match show_count, format, scroll, retry with
| false, [], None, false -> Lwt_io.printl result
| show_count, format, scroll, retry ->
let scroll_path = [ Some "_search"; Some "scroll"; ] in
let clear_scroll' scroll_id =
let clear_scroll = (JSON (Elastic_j.string_of_clear_scroll { Elastic_t.scroll_id = [ scroll_id; ]; }) : content_type) in
match%lwt request ~verbose ~body:clear_scroll `DELETE host scroll_path [] id with
| Error error -> fail_lwt "clear scroll error:\n%s" error
| Ok _ok -> Lwt.return_unit
in
let clear_scroll scroll_id = Option.map_default clear_scroll' Lwt.return_unit scroll_id in
let rec loop result =
let { Elastic_t.hits = response_hits; scroll_id; shards = { Elastic_t.failed; _ }; _ } as response =
Elastic_j.response'_of_string (Elastic_j.read_option_hit J.read_json) read_total result
in
match response_hits with
| None -> log #error "no hits"; clear_scroll scroll_id
| Some ({ Elastic_t.total; hits; _ } as response_hits) ->
let hits =
match retry with
| false -> hits
| true ->
List.filter begin fun ({ Elastic_t.index; doc_type; id; _ } : 'a Elastic_t.option_hit) ->
let key = index, doc_type, id in
match Hashtbl.mem htbl key with
| false -> Hashtbl.add htbl key (); true
| true -> false
end hits
in
let%lwt () =
match show_count with
| false -> Lwt.return_unit
| true ->
match total with
| None -> Lwt_io.printl "unknown"
| Some { value; relation; } ->
match relation with
| `Eq -> Lwt_io.printlf "%d" value
| `Gte -> Lwt_io.printlf ">=%d" value
in
let%lwt () =
match format, show_count, retry with
| [], true, _ -> Lwt.return_unit
| [], false, false -> Lwt_io.printl result
| [], false, true when hits <> [] || Hashtbl.length htbl = 0 ->
{ response with Elastic_t.hits = Some { response_hits with Elastic_t.hits; }; } |>
Elastic_j.string_of_response' (Elastic_j.write_option_hit J.write_json) write_total |>
Lwt_io.printl
| _ ->
Lwt_list.iter_s begin fun hit ->
List.map (fun f -> f hit) format |>
String.join " " |>
Lwt_io.printl
end hits
in
match failed > 0 && retry with
| true ->
let%lwt () = clear_scroll scroll_id in
search ()
| false ->
match hits, scroll, scroll_id with
| [], _, _ | _, None, _ | _, _, None -> clear_scroll scroll_id
| _, Some scroll, Some scroll_id ->
let scroll = (JSON (Elastic_j.string_of_scroll { Elastic_t.scroll; scroll_id; }) : content_type) in
match%lwt request ~verbose ~body:scroll `POST host scroll_path [] id with
| Error error ->
let%lwt () = Lwt_io.eprintlf "scroll error:\n%s" error in
let%lwt () = clear_scroll' scroll_id in
Lwt.fail ErrorExit
| Ok result -> loop result
in
loop result
in
search ()
module Settings = struct
type input =
| Text
| JSON
type output =
| Text
| JSON
| Raw
type type_ =
| Transient
| Persistent
| Defaults
type args = {
host : string;
keys : string list;
reset : bool;
include_defaults : bool;
input : input;
output : output;
type_ : type_ option;
}
let settings { verbose; _ } {
host;
keys;
reset;
include_defaults;
input;
output;
type_;
} =
let config = Common.load_config () in
let { Common.host; _ } = Common.get_cluster config host in
let path = [ Some "_cluster"; Some "settings"; ] in
let (get_keys, set_keys) =
List.map begin fun s ->
match Stre.splitc s '=' with
| exception Not_found when reset -> `Set (s, None)
| exception Not_found -> `Get s
| key, value -> `Set (key, Some value)
end keys |>
List.partition (function `Get _ -> true | `Set _ -> false)
in
let get_keys = List.map (function `Get key -> key | `Set _ -> assert false) get_keys in
let set_keys = List.map (function `Get _ -> assert false | `Set pair -> pair) set_keys in
Lwt_main.run @@
let%lwt set_mode =
match set_keys, type_ with
| [], _ -> Lwt.return_none
| _, Some Transient -> Lwt.return_some `Transient
| _, Some Persistent -> Lwt.return_some `Persistent
FIXME
FIXME
in
let%lwt () =
match set_mode with
| None -> Lwt.return_unit
| Some mode ->
let%lwt values =
Lwt_list.map_s begin fun (key, value) ->
let%lwt value =
match value with
| None -> Lwt.return `Null
| Some value ->
match input with
| Text -> Lwt.return (`String value)
| JSON -> Lwt.wrap1 J.from_string value
in
Lwt.return (key, value)
end set_keys
in
let values = Some (`Assoc values) in
let (transient, persistent) =
match mode with
| `Transient -> values, None
| `Persistent -> None, values
in
let settings = ({ transient; persistent; defaults = None; } : Elastic_t.cluster_tree_settings) in
let body = (JSON (Elastic_j.string_of_cluster_tree_settings settings) : content_type) in
match%lwt request ~verbose ~body `PUT host path [] id with
| Error error -> fail_lwt "settings error:\n%s" error
| Ok result -> Lwt_io.printl result
in
let%lwt () =
match get_keys, set_keys with
| [], _ :: _ -> Lwt.return_unit
| _ ->
let include_defaults = include_defaults || type_ = Some Defaults in
let args =
List.map (fun (k, v) -> k, Option.map some v) [
"flat_settings", Some "true";
"include_defaults", flag include_defaults;
]
in
match%lwt request ~verbose `GET host path args id with
| Error error -> fail_lwt "settings error:\n%s" error
| Ok result ->
match get_keys, output, type_ with
| [], Raw, None -> Lwt_io.printl result
| _ ->
let%lwt { Elastic_t.transient; persistent; defaults; } = Lwt.wrap1 Elastic_j.cluster_flat_settings_of_string result in
let type_settings =
match type_ with
| None -> None
| Some Defaults -> Some defaults
| Some Transient -> Some transient
| Some Persistent -> Some persistent
in
let output =
match output with
| Text -> `Text
| JSON -> `JSON
| Raw -> `Raw
in
let module SS = Set.Make(String) in
let get_keys = SS.of_list get_keys in
let get_keys_empty = SS.is_empty get_keys in
let get_keys_typed_single = SS.cardinal get_keys = 1 && Option.is_some type_ in
match output with
| `Raw ->
let filter =
match get_keys_empty with
| true -> id
| false -> (fun settings -> List.filter (fun (key, _value) -> SS.mem key get_keys) settings)
in
begin match type_settings with
| Some settings ->
let settings = Option.map_default filter [] settings in
Lwt_io.printl (Elastic_j.string_of_settings settings)
| None ->
let transient = Option.map filter transient in
let persistent = Option.map filter persistent in
let defaults = Option.map filter defaults in
Lwt_io.printl (Elastic_j.string_of_cluster_flat_settings { Elastic_t.transient; persistent; defaults; })
end
| `Text | `JSON as output ->
let settings =
match type_settings with
| Some settings -> [ None, settings; ]
| None -> [ Some "transient: ", transient; Some "persistent: ", persistent; Some "defaults: ", defaults; ]
in
let string_of_value =
match output with
| `JSON -> (fun value -> J.to_string value)
| `Text ->
function
| `Null -> "null"
| `Intlit s | `String s -> s
| `Bool x -> string_of_bool x
| `Int x -> string_of_int x
| `Float x -> string_of_float x
| `List _ | `Assoc _ | `Tuple _ | `Variant _ as value -> J.to_string value
in
let print_value value =
Lwt_io.printl (string_of_value value)
in
let print_key_value prefix key value =
Lwt_io.printlf "%s%s: %s" prefix key (string_of_value value)
in
let print prefix (key, value) =
match prefix, get_keys_typed_single with
| None, true -> print_value value
| _ -> print_key_value (Option.default "" prefix) key value
in
Lwt_list.iter_s begin function
| _prefix, None -> Lwt.return_unit
| prefix, Some settings ->
match get_keys_empty with
| true -> Lwt_list.iter_s (print prefix) settings
| _ ->
Lwt_list.iter_s (function key, _ as pair when SS.mem key get_keys -> print prefix pair | _ -> Lwt.return_unit) settings
end settings
in
Lwt.return_unit
open Cmdliner
[@@@alert "-deprecated"]
module Let_syntax = struct
let map ~f t = Term.(const f $ t)
let both a b = Term.(const (fun x y -> x, y) $ a $ b)
end
let common_args =
let args es_version verbose = { es_version; verbose; } in
let docs = Manpage.s_common_options in
let es_version =
Arg.(last & vflag_all [ None; ] [
Some `ES5, Arg.info [ "5"; ] ~docs ~doc:"force ES version 5.x";
Some `ES6, Arg.info [ "6"; ] ~docs ~doc:"force ES version 6.x";
Some `ES7, Arg.info [ "7"; ] ~docs ~doc:"force ES version 7.x";
Some `ES8, Arg.info [ "8"; ] ~docs ~doc:"force ES version 8.x";
])
in
let verbose =
let doc = "verbose output" in
Arg.(value & flag & info [ "v"; "verbose"; ] ~docs ~doc)
in
Term.(const args $ es_version $ verbose)
let default_tool =
let doc = "a command-line client for ES" in
let sdocs = Manpage.s_common_options in
let exits = Term.default_exits in
let man = [] in
Term.(ret (const (fun _ -> `Help (`Pager, None)) $ common_args), info "es" ~version:Common.version ~doc ~sdocs ~exits ~man)
let alias_tool =
let action =
let parse x =
match Stre.splitc x '=' with
| alias, index -> Ok (alias, Some index)
| exception Not_found -> Ok (x, None)
in
let print fmt (alias, index) =
match index with
| Some index -> Format.fprintf fmt "%s=%s" alias index
| None -> Format.fprintf fmt "%s" alias
in
Arg.conv (parse, print)
in
let open Common_args in
let%map common_args = common_args
and host = host
and index =
let doc = "index to operate on. If not provided, -a and -r must include the =INDEX part." in
Arg.(value & pos 1 (some string) None & info [] ~docv:"INDEX" ~doc)
and add =
let doc = "add index INDEX to alias ALIAS" in
Arg.(value & opt_all action [] & info [ "a"; "add"; ] ~docv:"ALIAS[=INDEX]" ~doc)
and remove =
let doc = "remove index INDEX from alias ALIAS" in
Arg.(value & opt_all action [] & info [ "r"; "remove"; ] ~docv:"ALIAS[=INDEX]" ~doc)
in
let map action = function
| alias, Some index -> { action; index; alias; }
| alias, None ->
match index with
| Some index -> { action; index; alias; }
| None -> Exn.fail "INDEX is not specified for %s" alias
in
let add = List.map (map `Add) add in
let remove = List.map (map `Remove) remove in
alias common_args {
host;
actions = add @ remove;
}
let alias_tool =
alias_tool,
let open Term in
let doc = "add or remove index aliases" in
let exits = default_exits in
let man = [] in
info "alias" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let cat_tool =
let conv_format =
let parse = function
| "text" -> Ok Text
| "json" -> Ok JSON
| "smile" -> Ok Smile
| "yaml" -> Ok YAML
| "cbor" -> Ok CBOR
| x -> Error (`Msg x)
in
Arg.conv (parse, (fun fmt x -> Format.fprintf fmt "%s" (string_of_cat_format x)))
in
let conv_arg =
let parse x =
match Stre.splitc x '=' with
| key, value -> Ok (key, Some value)
| exception Not_found -> Ok (x, None)
in
let print fmt (key, value) =
match value with
| Some value -> Format.fprintf fmt "%s=%s" key value
| None -> Format.fprintf fmt "%s" key
in
Arg.conv (parse, print)
in
let open Common_args in
let%map common_args = common_args
and host = host
and query = Arg.(value & pos_right 0 string [] & info [] ~docv:"PATH[ SUBPATH1[ SUBPATH2]]" ~doc:"path components")
and help = Arg.(value & flag & info [ "I"; "H"; ] ~doc:"show columns help")
and headers = Arg.(value & flag & info [ "h"; "headers"; ] ~doc:"show headers")
and columns = Arg.(value & opt_all string [] & info [ "i"; "columns"; ] ~doc:"include columns")
and sort = sort
and format = Arg.(value & opt (some conv_format) None & info [ "f"; "format"; ] ~doc:"output format")
and time_units = Arg.(value & opt (some string) None & info [ "T"; "time-units"; ] ~doc:"time units")
and size_units = Arg.(value & opt (some string) None & info [ "S"; "size-units"; ] ~doc:"size units")
and byte_units = Arg.(value & opt (some string) None & info [ "B"; "byte-units"; ] ~doc:"byte units")
and expand_wildcards = expand_wildcards
and args = Arg.(value & opt_all conv_arg [] & info [ "a"; "arg"; ] ~docv:"KEY[=VALUE]" ~doc:"add arbitrary &key[=value] to the request") in
cat common_args {
host;
query;
help;
headers;
columns;
sort;
format;
time_units;
size_units;
byte_units;
expand_wildcards;
args;
}
let cat_tool =
cat_tool,
let open Term in
let doc = "cat" in
let exits = default_exits in
let man = [] in
info "cat" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let count_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and timeout = timeout
and routing = routing
and preference = preference
and query = Arg.(value & opt (some string) None & info [ "q"; "query"; ] ~doc:"query using query_string query")
and body_query = Arg.(value & pos 2 (some string) None & info [] ~docv:"BODY_QUERY" ~doc:"body query")
and analyzer = Arg.(value & opt (some string) None & info [ "A"; "analyzer"; ] ~doc:"analyzer to be used for query_string query")
and analyze_wildcard = Arg.(value & flag & info [ "W"; "analyze-wildcard"; ] ~doc:"analyze wildcard and prefix queries in query_string query")
and default_field = Arg.(value & opt (some string) None & info [ "d"; "default-field"; ] ~doc:"default field to be used for query_string query")
and default_operator = Arg.(value & opt (some string) None & info [ "O"; "default-operator"; ] ~doc:"default operator to be used for query_string query")
and retry = Arg.(value & flag & info [ "R"; "retry"; ] ~doc:"retry if there are any failed shards") in
count common_args {
host;
index;
doc_type;
timeout;
routing;
preference;
query;
body_query;
analyzer;
analyze_wildcard;
default_field;
default_operator;
retry;
}
let count_tool =
count_tool,
let open Term in
let doc = "count" in
let exits = default_exits in
let man = [] in
info "count" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let delete_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and doc_ids = doc_ids
and timeout = timeout
and routing = routing in
delete common_args {
host;
index;
doc_type;
doc_ids;
timeout;
routing;
}
let delete_tool =
delete_tool,
let open Term in
let doc = "delete document(s)" in
let exits = default_exits in
let man = [] in
info "delete" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let flush_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and indices =
let doc = "indices to flush" in
Arg.(value & pos_right 0 string [] & info [] ~docv:"INDEX1[ INDEX2[ INDEX3...]]" ~doc)
and force = Arg.(value & flag & info [ "f"; "force"; ] ~doc:"force flush")
and synced = Arg.(value & flag & info [ "s"; "synced"; ] ~doc:"synced flush")
and wait = Arg.(value & flag & info [ "w"; "wait"; ] ~doc:"wait if another flush is already ongoing") in
flush common_args {
host;
indices;
force;
synced;
wait;
}
let flush_tool =
flush_tool,
let open Term in
let doc = "flush indices" in
let exits = default_exits in
let man = [] in
info "flush" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let get_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and doc_ids = doc_ids
and timeout = timeout
and source_includes = source_includes
and source_excludes = source_excludes
and routing = routing
and preference = preference
and format = format in
get common_args {
host;
index;
doc_type;
doc_ids;
timeout;
source_includes;
source_excludes;
routing;
preference;
format;
}
let get_tool =
get_tool,
let open Term in
let doc = "get document(s)" in
let exits = default_exits in
let man = [] in
info "get" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let health_tool =
let%map common_args = common_args
and hosts = Arg.(value & pos_all string [] & info [] ~docv:"HOST1[ HOST2[ HOST3...]]" ~doc:"hosts") in
health common_args {
hosts;
}
let health_tool =
health_tool,
let open Term in
let doc = "cluster health" in
let exits = default_exits in
let man = [] in
info "health" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let index_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and action =
Arg.(value & vflag (Get : index_action) [
Create, info [ "C"; "create"; ] ~doc:"create index";
Delete, info [ "D"; "delete"; ] ~doc:"delete index";
Open, info [ "o"; "open"; ] ~doc:"open index";
Close, info [ "c"; "close"; ] ~doc:"close index";
Freeze, info [ "f"; "freeze"; ] ~doc:"freeze index";
Unfreeze, info [ "u"; "unfreeze"; ] ~doc:"unfreeze index";
Mappings, info [ "m"; "mappings"; ] ~doc:"operate on index mappings";
Settings, info [ "s"; "settings"; ] ~doc:"operator on index settings";
])
and expand_wildcards = expand_wildcards
and body = Arg.(value & pos 2 (some string) None & info [] ~docv:"BODY" ~doc:"body to put") in
index_tool common_args {
host;
index;
action;
expand_wildcards;
body;
}
let index_tool =
index_tool,
let open Term in
let doc = "index" in
let exits = default_exits in
let man = [] in
info "index" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let nodes_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and check_nodes =
let doc = "check presence of specified nodes" in
Arg.(value & pos_right 0 string [] & info [] ~docv:"HOST1[ HOST2[ HOST3...]]" ~doc)
in
nodes common_args {
host;
check_nodes;
}
let nodes_tool =
nodes_tool,
let open Term in
let doc = "cluster nodes" in
let exits = default_exits in
let man = [] in
info "nodes" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let put_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and doc_id = Arg.value doc_id
and routing = routing
and body =
let doc = "document source to put" in
Arg.(value & opt (some string) None & info [ "s"; "source"; ] ~docv:"DOC" ~doc)
in
put common_args {
host;
index;
doc_type;
doc_id;
routing;
body;
}
let put_tool =
put_tool,
let open Term in
let doc = "put document" in
let exits = default_exits in
let man = [] in
info "put" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let recovery_tool =
let format =
let parse format =
match index_shard_format_of_string format with
| exception Failure msg -> Error (`Msg msg)
| format -> Ok format
in
let print fmt format =
Format.fprintf fmt "%s" (string_of_index_shard_format format)
in
Arg.conv (parse, print)
in
let filter = Arg.pair ~sep:'=' format Arg.string in
let format = Arg.list format in
let%map common_args = common_args
and host = Common_args.host
and indices =
let doc = "indices to check" in
Arg.(value & pos_right 0 string [] & info [] ~docv:"INDEX1[ INDEX2[ INDEX3...]]" ~doc)
and format = Arg.(value & opt_all format [] & info [ "f"; "format"; ] ~doc:"map hits according to specified format")
and filter_include =
let doc = "include only shards matching filter" in
Arg.(value & opt_all filter [] & info [ "i"; "include"; ] ~doc ~docv:"COLUMN=VALUE")
and filter_exclude =
let doc = "exclude shards matching filter" in
Arg.(value & opt_all filter [] & info [ "e"; "exclude"; ] ~doc ~docv:"COLUMN=VALUE")
in
recovery common_args {
host;
indices;
filter_include;
filter_exclude;
format;
}
let recovery_tool =
recovery_tool,
let open Term in
let doc = "cluster recovery" in
let exits = default_exits in
let man = [] in
info "recovery" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let refresh_tool =
let open Common_args in
let%map common_args = common_args
and host = host
and indices =
let doc = "indices to refresh" in
Arg.(value & pos_right 0 string [] & info [] ~docv:"INDEX1[ INDEX2[ INDEX3...]]" ~doc)
in
refresh common_args {
host;
indices;
}
let refresh_tool =
refresh_tool,
let open Term in
let doc = "refresh indices" in
let exits = default_exits in
let man = [] in
info "refresh" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let search_tool =
let aggregation =
let module Let_syntax =
struct
let map ~f = function Ok x -> f x | Error _ as error -> error
let bind ~f = function [] -> f (None, []) | hd :: tl -> f (Some hd, tl)
end
in
let missing_field name = Error (`Msg (sprintf "terms aggregation %s missing field" name)) in
let parse conv =
let parse = Arg.conv_parser conv in
fun x -> Option.map_default (fun x -> let%map x = parse x in Ok (Some x)) (Ok None) x
in
let parse_bool = parse Arg.bool in
let parse_float = parse Arg.float in
let parse_int = parse Arg.int in
let parse_metrics name = function
| [] -> missing_field name
| field :: params ->
let%bind (missing, params) = params in
let agg = { field; missing; } in
Ok (agg, params)
in
let parse_cardinality name params =
let%map (common, params) = parse_metrics name params in
let%bind (precision_threshold, params) = params in
let%map precision_threshold = parse_int precision_threshold in
Ok (Cardinality { common; precision_threshold; }, params)
in
let parse_extended_stats name params =
let%map (common, params) = parse_metrics name params in
let%bind (sigma, params) = params in
let%map sigma = parse_float sigma in
Ok (ExtendedStats { common; sigma; }, params)
in
let parse_string_stats name params =
let%map (common, params) = parse_metrics name params in
let%bind (show_distribution, params) = params in
let%map show_distribution = parse_bool show_distribution in
Ok (StringStats { common; show_distribution; }, params)
in
let parse_terms name = function
| [] -> missing_field name
| field :: params ->
let%bind (size, params) = params in
let%map size = parse_int size in
let agg = { field; size; } in
Ok (Terms agg, params)
in
let parse_field name = function
| [] -> missing_field name
| field :: params -> Ok ({ field; }, params)
in
let parse agg =
match Stre.nsplitc agg ':' with
| [] -> assert false
| name :: [] -> Error (`Msg (sprintf "aggregation %s missing type" name))
| name :: type_ :: params ->
let%map (agg, params) =
match type_ with
| "a" | "avg" -> let%map (agg, params) = parse_metrics name params in Ok (Avg agg, params)
| "u" | "cardinal" | "cardinality" -> parse_cardinality name params
| "e" | "est" | "extended_stats" -> parse_extended_stats name params
| "min" -> let%map (agg, params) = parse_metrics name params in Ok (Min agg, params)
| "max" -> let%map (agg, params) = parse_metrics name params in Ok (Max agg, params)
| "st" | "stats" -> let%map (agg, params) = parse_metrics name params in Ok (Stats agg, params)
| "sst" | "string_stats" -> parse_string_stats name params
| "s" | "sum" -> let%map (agg, params) = parse_metrics name params in Ok (Sum agg, params)
| "t" | "terms" -> parse_terms name params
| "n" | "count" | "value_count" -> let%map (agg, params) = parse_field name params in Ok (ValueCount agg, params)
| agg -> Error (`Msg (sprintf "unknown aggregation type: %s" agg))
in
match params with
| [] -> Ok (name, agg)
| _ :: _ ->
let msg = sprintf "%s aggregation %s unknown extra parameters: %s" (string_of_aggregation agg) name (String.concat ":" params) in
Error (`Msg msg)
in
let print fmt (name, agg) =
let cons map hd tl = match hd with Some hd -> map hd :: tl | None -> tl in
let params =
match agg with
| Avg params | Max params | Min params | Stats params | Sum params ->
let { field; missing } = params in
name :: field :: cons id missing []
| Cardinality { common = { field; missing; }; precision_threshold; } ->
let params = cons string_of_int precision_threshold [] in
let params = cons id missing params in
name :: field :: params
| ExtendedStats { common = { field; missing; }; sigma; } ->
let params = cons string_of_float sigma [] in
let params = cons id missing params in
name :: field :: params
| StringStats { common = { field; missing; }; show_distribution; } ->
let params = cons string_of_bool show_distribution [] in
let params = cons id missing params in
name :: field :: params
| Terms { field; size } ->
name :: field :: cons string_of_int size []
| ValueCount { field; } ->
name :: field :: []
in
Format.fprintf fmt "%s" (String.concat ":" params)
in
Arg.(conv (parse, print))
in
let open Common_args in
let%map common_args = common_args
and host = host
and index = index
and doc_type = doc_type
and timeout = timeout
and source_includes = source_includes
and source_excludes = source_excludes
and routing = routing
and preference = preference
and format = format
and size = Arg.(value & opt (some int) None & info [ "n"; "size"; ] ~doc:"size")
and from = Arg.(value & opt (some int) None & info [ "o"; "from"; ] ~doc:"from")
and sort = sort
and fields = Arg.(value & opt_all string [] & info [ "F"; "fields"; ] ~doc:"fields")
and scroll = Arg.(value & opt (some string) None & info [ "S"; "scroll"; ] ~doc:"scroll")
and slice_max = Arg.(value & opt (some int) None & info [ "N"; "slice-max"; ] ~doc:"slice_max")
and slice_id = Arg.(value & opt (some int) None & info [ "I"; "slice-id"; ] ~doc:"slice_id")
and query = Arg.(value & opt (some string) None & info [ "q"; "query"; ] ~doc:"query using query_string query")
and body_query = Arg.(value & pos 2 (some string) None & info [] ~docv:"BODY_QUERY" ~doc:"body query")
and aggregations = Arg.(value & opt_all aggregation [] & info [ "a"; "aggregation"; ] ~doc:"add simple aggregation")
and analyzer = Arg.(value & opt (some string) None & info [ "A"; "analyzer"; ] ~doc:"analyzer to be used for query_string query")
and analyze_wildcard = Arg.(value & flag & info [ "W"; "analyze-wildcard"; ] ~doc:"analyze wildcard and prefix queries in query_string query")
and default_field = Arg.(value & opt (some string) None & info [ "d"; "default-field"; ] ~doc:"default field to be used for query_string query")
and default_operator = Arg.(value & opt (some string) None & info [ "O"; "default-operator"; ] ~doc:"default operator to be used for query_string query")
and explain = Arg.(value & flag & info [ "E"; "explain"; ] ~doc:"explain hits")
and show_count = Arg.(value & flag & info [ "c"; "show-count"; ] ~doc:"output total number of hits")
and track_total_hits = Arg.(value & opt (some string) None & info [ "C"; "track-total-hits"; ] ~doc:"track total number hits (true, false, or a number)")
and retry = Arg.(value & flag & info [ "R"; "retry"; ] ~doc:"retry if there are any failed shards") in
search common_args {
host;
index;
doc_type;
timeout;
size;
from;
sort;
source_includes;
source_excludes;
fields;
routing;
preference;
scroll;
slice_id;
slice_max;
query;
body_query;
aggregations;
analyzer;
analyze_wildcard;
default_field;
default_operator;
explain;
show_count;
track_total_hits;
retry;
format;
}
let search_tool =
search_tool,
let open Term in
let doc = "search" in
let exits = default_exits in
let man = [] in
info "search" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let settings_tool =
let open Common_args in
let open Settings in
let%map common_args = common_args
and host = host
and keys = Arg.(value & pos_right 0 string [] & info [] ~docv:"KEYS" ~doc:"setting keys")
and reset = Arg.(value & flag & info [ "r"; "reset"; ] ~doc:"reset keys")
and include_defaults = Arg.(value & flag & info [ "D"; "include-defaults"; ] ~doc:"include defaults")
and input = Arg.(value & vflag (Text : input) [ JSON, info [ "j"; "input-json"; ] ~doc:"json input format"; ])
and output =
let output =
let parse output =
match output with
| "text" -> Ok (Text : output)
| "json" -> Ok JSON
| "raw" -> Ok Raw
| _ -> Error (`Msg (sprintf "unknown output format: %s" output))
in
let print fmt output =
let output =
match output with
| (Text : output) -> "text"
| JSON -> "json"
| Raw -> "raw"
in
Format.fprintf fmt "%s" output
in
Arg.(conv (parse, print))
in
Arg.(value & opt ~vopt:(JSON : output) output Text & info [ "J"; "output"; ] ~doc:"choose output format")
and type_ =
let type_transient = Some Transient, Arg.info [ "t"; "transient"; ] ~doc:"transient setting" in
let type_persistent = Some Persistent, Arg.info [ "p"; "persistent"; ] ~doc:"persistent setting" in
let type_defaults = Some Defaults, Arg.info [ "d"; "default"; ] ~doc:"default setting" in
Arg.(value & vflag None [ type_transient; type_persistent; type_defaults; ])
in
settings common_args {
host;
keys;
reset;
include_defaults;
input;
output;
type_;
}
let settings_tool =
settings_tool,
let open Term in
let doc = "manage cluster settings" in
let exits = default_exits in
let man = [] in
info "settings" ~doc ~sdocs:Manpage.s_common_options ~exits ~man
let tools = [
alias_tool;
cat_tool;
count_tool;
delete_tool;
flush_tool;
get_tool;
health_tool;
index_tool;
nodes_tool;
put_tool;
recovery_tool;
refresh_tool;
search_tool;
settings_tool;
]
let () =
try
let argv = Common.get_argv () in
Term.(exit (eval_choice ~catch:false ~argv default_tool tools))
with
| ErrorExit -> exit 1
| exn -> log #error ~exn "uncaught exception"; exit 125
|
61f1c50389fcb051838678df9ebc23138a4ab0217264799bdceccdf319f3a7d7 | snmsts/cl-langserver | slynk-indentation.lisp | (in-package :ls-base)
(defvar *application-hints-tables* '()
"A list of hash tables mapping symbols to indentation hints (lists
of symbols and numbers as per cl-indent.el). Applications can add hash
tables to the list to change the auto indentation sly sends to
emacs.")
(defun has-application-indentation-hint-p (symbol)
(let ((default (load-time-value (gensym))))
(dolist (table *application-hints-tables*)
(let ((indentation (gethash symbol table default)))
(unless (eq default indentation)
(return-from has-application-indentation-hint-p
(values indentation t))))))
(values nil nil))
(defun application-indentation-hint (symbol)
(let ((indentation (has-application-indentation-hint-p symbol)))
(labels ((walk (indentation-spec)
(etypecase indentation-spec
(null nil)
(number indentation-spec)
(symbol (string-downcase indentation-spec))
(cons (cons (walk (car indentation-spec))
(walk (cdr indentation-spec)))))))
(walk indentation))))
;;; override slynk version of this function
(defun symbol-indentation (symbol)
"Return a form describing the indentation of SYMBOL.
The form is to be used as the `common-lisp-indent-function' property
in Emacs."
(cond
((has-application-indentation-hint-p symbol)
(application-indentation-hint symbol))
((and (macro-function symbol)
(not (known-to-emacs-p symbol)))
(let ((arglist (arglist symbol)))
(etypecase arglist
((member :not-available)
nil)
(list
(macro-indentation arglist)))))
(t nil)))
;;; More complex version.
(defun macro-indentation (arglist)
(labels ((frob (list &optional base)
(if (every (lambda (x)
(member x '(nil "&rest") :test #'equal))
list)
;; If there was nothing interesting, don't return anything.
nil
Otherwise substitute leading NIL 's with 4 or 1 .
(let ((ok t))
(substitute-if (if base
4
1)
(lambda (x)
(if (and ok (not x))
t
(setf ok nil)))
list))))
(walk (list level &optional firstp)
(when (consp list)
(let ((head (car list)))
(if (consp head)
(let ((indent (frob (walk head (+ level 1) t))))
(cons (list* "&whole" (if (zerop level)
4
1)
indent) (walk (cdr list) level)))
(case head
;; &BODY is &BODY, this is clear.
(&body
'("&body"))
;; &KEY is tricksy. If it's at the base level, we want
;; to indent them normally:
;;
( foo bar quux
: quux t
: )
;;
If it 's at a destructuring level , we want indent of 1 :
;;
;; (with-foo (var arg
;; :foo t
: quux nil )
;; ...)
(&key
(if (zerop level)
'("&rest" nil)
'("&rest" 1)))
;; &REST is tricksy. If it's at the front of
destructuring , we want to indent by 1 , otherwise
;; normally:
;;
( foo ( bar quux
;; zot)
;; ...)
;;
;; but
;;
( foo bar quux
;; zot)
(&rest
(if (and (plusp level) firstp)
'("&rest" 1)
'("&rest" nil)))
;; &WHOLE and &ENVIRONMENT are skipped as if they weren't there
;; at all.
((&whole &environment)
(walk (cddr list) level firstp))
;; &OPTIONAL is indented normally -- and the &OPTIONAL marker
;; itself is not counted.
(&optional
(walk (cdr list) level))
;; Indent normally, walk the tail -- but
;; unknown lambda-list keywords terminate the walk.
(otherwise
(unless (member head lambda-list-keywords)
(cons nil (walk (cdr list) level))))))))))
(frob (walk arglist 0 t) t)))
#+nil
(progn
(assert (equal '(4 4 ("&whole" 4 "&rest" 1) "&body")
(macro-indentation '(bar quux (&rest slots) &body body))))
(assert (equal nil
(macro-indentation '(a b c &rest more))))
(assert (equal '(4 4 4 "&body")
(macro-indentation '(a b c &body more))))
(assert (equal '(("&whole" 4 1 1 "&rest" 1) "&body")
(macro-indentation '((name zot &key foo bar) &body body))))
(assert (equal nil
(macro-indentation '(x y &key z)))))
(provide :ls-indentation)
| null | https://raw.githubusercontent.com/snmsts/cl-langserver/3b1246a5d0bd58459e7a64708f820bf718cf7175/src/contrib/slynk-indentation.lisp | lisp | override slynk version of this function
More complex version.
If there was nothing interesting, don't return anything.
&BODY is &BODY, this is clear.
&KEY is tricksy. If it's at the base level, we want
to indent them normally:
(with-foo (var arg
:foo t
...)
&REST is tricksy. If it's at the front of
normally:
zot)
...)
but
zot)
&WHOLE and &ENVIRONMENT are skipped as if they weren't there
at all.
&OPTIONAL is indented normally -- and the &OPTIONAL marker
itself is not counted.
Indent normally, walk the tail -- but
unknown lambda-list keywords terminate the walk. | (in-package :ls-base)
(defvar *application-hints-tables* '()
"A list of hash tables mapping symbols to indentation hints (lists
of symbols and numbers as per cl-indent.el). Applications can add hash
tables to the list to change the auto indentation sly sends to
emacs.")
(defun has-application-indentation-hint-p (symbol)
(let ((default (load-time-value (gensym))))
(dolist (table *application-hints-tables*)
(let ((indentation (gethash symbol table default)))
(unless (eq default indentation)
(return-from has-application-indentation-hint-p
(values indentation t))))))
(values nil nil))
(defun application-indentation-hint (symbol)
(let ((indentation (has-application-indentation-hint-p symbol)))
(labels ((walk (indentation-spec)
(etypecase indentation-spec
(null nil)
(number indentation-spec)
(symbol (string-downcase indentation-spec))
(cons (cons (walk (car indentation-spec))
(walk (cdr indentation-spec)))))))
(walk indentation))))
(defun symbol-indentation (symbol)
"Return a form describing the indentation of SYMBOL.
The form is to be used as the `common-lisp-indent-function' property
in Emacs."
(cond
((has-application-indentation-hint-p symbol)
(application-indentation-hint symbol))
((and (macro-function symbol)
(not (known-to-emacs-p symbol)))
(let ((arglist (arglist symbol)))
(etypecase arglist
((member :not-available)
nil)
(list
(macro-indentation arglist)))))
(t nil)))
(defun macro-indentation (arglist)
(labels ((frob (list &optional base)
(if (every (lambda (x)
(member x '(nil "&rest") :test #'equal))
list)
nil
Otherwise substitute leading NIL 's with 4 or 1 .
(let ((ok t))
(substitute-if (if base
4
1)
(lambda (x)
(if (and ok (not x))
t
(setf ok nil)))
list))))
(walk (list level &optional firstp)
(when (consp list)
(let ((head (car list)))
(if (consp head)
(let ((indent (frob (walk head (+ level 1) t))))
(cons (list* "&whole" (if (zerop level)
4
1)
indent) (walk (cdr list) level)))
(case head
(&body
'("&body"))
( foo bar quux
: quux t
: )
If it 's at a destructuring level , we want indent of 1 :
: quux nil )
(&key
(if (zerop level)
'("&rest" nil)
'("&rest" 1)))
destructuring , we want to indent by 1 , otherwise
( foo ( bar quux
( foo bar quux
(&rest
(if (and (plusp level) firstp)
'("&rest" 1)
'("&rest" nil)))
((&whole &environment)
(walk (cddr list) level firstp))
(&optional
(walk (cdr list) level))
(otherwise
(unless (member head lambda-list-keywords)
(cons nil (walk (cdr list) level))))))))))
(frob (walk arglist 0 t) t)))
#+nil
(progn
(assert (equal '(4 4 ("&whole" 4 "&rest" 1) "&body")
(macro-indentation '(bar quux (&rest slots) &body body))))
(assert (equal nil
(macro-indentation '(a b c &rest more))))
(assert (equal '(4 4 4 "&body")
(macro-indentation '(a b c &body more))))
(assert (equal '(("&whole" 4 1 1 "&rest" 1) "&body")
(macro-indentation '((name zot &key foo bar) &body body))))
(assert (equal nil
(macro-indentation '(x y &key z)))))
(provide :ls-indentation)
|
42e5b4127d6010f14d70f382aaa2b175d993ef4155c0ccd2c481acf5a0ed649c | HealthSamurai/dojo.clj | model.cljc | (ns app.rest.model
(:require
[re-frame.core :as rf]
[app.routes :refer [href]]))
(def page-key :rest/index)
(rf/reg-event-fx
page-key
(fn [{db :db} [_ phase params]]
(cond
(= :init phase)
{:db (assoc db page-key
{:title "DB"})}
(= :params phase)
{:db db}
(= :deinit phase)
{:db db})))
(rf/reg-sub
page-key
(fn [db] (get db page-key)))
(comment
(println "Here")
(rf/dispatch [page-key :init {}])
)
| null | https://raw.githubusercontent.com/HealthSamurai/dojo.clj/94922640f534897ab2b181c608b54bfbb8351d7b/ui/src/app/rest/model.cljc | clojure | (ns app.rest.model
(:require
[re-frame.core :as rf]
[app.routes :refer [href]]))
(def page-key :rest/index)
(rf/reg-event-fx
page-key
(fn [{db :db} [_ phase params]]
(cond
(= :init phase)
{:db (assoc db page-key
{:title "DB"})}
(= :params phase)
{:db db}
(= :deinit phase)
{:db db})))
(rf/reg-sub
page-key
(fn [db] (get db page-key)))
(comment
(println "Here")
(rf/dispatch [page-key :init {}])
)
| |
e1cae4ef1d7d3e0afa7f4c67db275eecf2da7d38dac8d3aec3fcf503831aec62 | grin-compiler/ghc-wpc-sample-programs | HTML.hs | {-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE ViewPatterns #
|
Module : Text . Pandoc . Writers . HTML
Copyright : Copyright ( C ) 2006 - 2020
License : GNU GPL , version 2 or above
Maintainer : < >
Stability : alpha
Portability : portable
Conversion of ' Pandoc ' documents to HTML .
Module : Text.Pandoc.Writers.HTML
Copyright : Copyright (C) 2006-2020 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to HTML.
-}
module Text.Pandoc.Writers.HTML (
writeHtml4,
writeHtml4String,
writeHtml5,
writeHtml5String,
writeHtmlStringForEPUB,
writeS5,
writeSlidy,
writeSlideous,
writeDZSlides,
writeRevealJs,
tagWithAttributes
) where
import Control.Monad.State.Strict
import Data.Char (ord)
import Data.List (intercalate, intersperse, partition, delete, (\\))
import Data.Maybe (fromMaybe, isJust, isNothing, mapMaybe)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Network.HTTP (urlEncode)
import Network.URI (URI (..), parseURIReference)
import Numeric (showHex)
import Text.DocLayout (render, literal)
import Text.Blaze.Internal (MarkupM (Empty), customLeaf, customParent)
import Text.DocTemplates (FromContext (lookupContext), Context (..))
import Text.Blaze.Html hiding (contents)
import Text.Pandoc.Definition
import Text.Pandoc.Highlighting (formatHtmlBlock, formatHtmlInline, highlight,
styleToCss)
import Text.Pandoc.ImageSize
import Text.Pandoc.Options
import Text.Pandoc.Shared
import Text.Pandoc.Slides
import Text.Pandoc.Templates (renderTemplate)
import Text.Pandoc.Walk
import Text.Pandoc.Writers.Math
import Text.Pandoc.Writers.Shared
import Text.Pandoc.XML (escapeStringForXML, fromEntities, toEntities,
html5Attributes, html4Attributes, rdfaAttributes)
import qualified Text.Blaze.XHtml5 as H5
import qualified Text.Blaze.XHtml5.Attributes as A5
import Control.Monad.Except (throwError)
import System.FilePath (takeBaseName)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import qualified Text.Blaze.XHtml1.Transitional as H
import qualified Text.Blaze.XHtml1.Transitional.Attributes as A
import Text.Pandoc.Class.PandocMonad (PandocMonad, report)
import Text.Pandoc.Class.PandocPure (runPure)
import Text.Pandoc.Error
import Text.Pandoc.Logging
import Text.Pandoc.MIME (mediaCategory)
import Text.TeXMath
import Text.XML.Light (elChildren, unode, unqual)
import qualified Text.XML.Light as XML
import Text.XML.Light.Output
data WriterState = WriterState
{ stNotes :: [Html] -- ^ List of notes
, stMath :: Bool -- ^ Math is used in document
, stQuotes :: Bool -- ^ <q> tag is used
, stHighlighting :: Bool -- ^ Syntax highlighting is used
, stHtml5 :: Bool -- ^ Use HTML5
, stEPUBVersion :: Maybe EPUBVersion -- ^ EPUB version if for epub
, stSlideVariant :: HTMLSlideVariant
, stSlideLevel :: Int -- ^ Slide level
, stInSection :: Bool -- ^ Content is in a section (revealjs)
, stCodeBlockNum :: Int -- ^ Number of code block
}
defaultWriterState :: WriterState
defaultWriterState = WriterState {stNotes= [], stMath = False, stQuotes = False,
stHighlighting = False,
stHtml5 = False,
stEPUBVersion = Nothing,
stSlideVariant = NoSlides,
stSlideLevel = 1,
stInSection = False,
stCodeBlockNum = 0}
-- Helpers to render HTML with the appropriate function.
strToHtml :: Text -> Html
strToHtml = strToHtml' . T.unpack
where
strToHtml' ('\'':xs) = preEscapedString "\'" `mappend` strToHtml' xs
strToHtml' ('"' :xs) = preEscapedString "\"" `mappend` strToHtml' xs
strToHtml' (x:xs) | needsVariationSelector x
= preEscapedString [x, '\xFE0E'] `mappend`
case xs of
('\xFE0E':ys) -> strToHtml' ys
_ -> strToHtml' xs
strToHtml' xs@(_:_) = case break (\c -> c == '\'' || c == '"' ||
needsVariationSelector c) xs of
(_ ,[]) -> toHtml xs
(ys,zs) -> toHtml ys `mappend` strToHtml' zs
strToHtml' [] = ""
-- See #5469: this prevents iOS from substituting emojis.
needsVariationSelector :: Char -> Bool
needsVariationSelector '↩' = True
needsVariationSelector '↔' = True
needsVariationSelector _ = False
-- | Hard linebreak.
nl :: WriterOptions -> Html
nl opts = if writerWrapText opts == WrapNone
then mempty
else preEscapedString "\n"
| Convert Pandoc document to Html 5 string .
writeHtml5String :: PandocMonad m => WriterOptions -> Pandoc -> m Text
writeHtml5String = writeHtmlString'
defaultWriterState{ stHtml5 = True }
| Convert Pandoc document to Html 5 structure .
writeHtml5 :: PandocMonad m => WriterOptions -> Pandoc -> m Html
writeHtml5 = writeHtml' defaultWriterState{ stHtml5 = True }
| Convert Pandoc document to Html 4 string .
writeHtml4String :: PandocMonad m => WriterOptions -> Pandoc -> m Text
writeHtml4String = writeHtmlString'
defaultWriterState{ stHtml5 = False }
| Convert Pandoc document to Html 4 structure .
writeHtml4 :: PandocMonad m => WriterOptions -> Pandoc -> m Html
writeHtml4 = writeHtml' defaultWriterState{ stHtml5 = False }
-- | Convert Pandoc document to Html appropriate for an epub version.
writeHtmlStringForEPUB :: PandocMonad m
=> EPUBVersion -> WriterOptions -> Pandoc
-> m Text
writeHtmlStringForEPUB version o = writeHtmlString'
defaultWriterState{ stHtml5 = version == EPUB3,
stEPUBVersion = Just version } o
-- | Convert Pandoc document to Reveal JS HTML slide show.
writeRevealJs :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeRevealJs = writeHtmlSlideShow' RevealJsSlides
-- | Convert Pandoc document to S5 HTML slide show.
writeS5 :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeS5 = writeHtmlSlideShow' S5Slides
| Convert Pandoc document to Slidy HTML slide show .
writeSlidy :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeSlidy = writeHtmlSlideShow' SlidySlides
| Convert Pandoc document to Slideous HTML slide show .
writeSlideous :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeSlideous = writeHtmlSlideShow' SlideousSlides
-- | Convert Pandoc document to DZSlides HTML slide show.
writeDZSlides :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeDZSlides = writeHtmlSlideShow' DZSlides
writeHtmlSlideShow' :: PandocMonad m
=> HTMLSlideVariant -> WriterOptions -> Pandoc -> m Text
writeHtmlSlideShow' variant = writeHtmlString'
defaultWriterState{ stSlideVariant = variant
, stHtml5 = case variant of
RevealJsSlides -> True
S5Slides -> False
SlidySlides -> False
DZSlides -> True
SlideousSlides -> False
NoSlides -> False
}
renderHtml' :: Html -> Text
renderHtml' = TL.toStrict . renderHtml
writeHtmlString' :: PandocMonad m
=> WriterState -> WriterOptions -> Pandoc -> m Text
writeHtmlString' st opts d = do
(body, context) <- evalStateT (pandocToHtml opts d) st
(if writerPreferAscii opts
then toEntities
else id) <$>
case writerTemplate opts of
Nothing -> return $ renderHtml' body
Just tpl -> do
-- warn if empty lang
when (isNothing (getField "lang" context :: Maybe Text)) $
report NoLangSpecified
-- check for empty pagetitle
context' <-
case getField "pagetitle" context of
Just (s :: Text) | not (T.null s) -> return context
_ -> do
let fallback = T.pack $
case lookupContext "sourcefile"
(writerVariables opts) of
Nothing -> "Untitled"
Just [] -> "Untitled"
Just (x:_) -> takeBaseName $ T.unpack x
report $ NoTitleElement fallback
return $ resetField "pagetitle" fallback context
return $ render Nothing $ renderTemplate tpl
(defField "body" (renderHtml' body) context')
writeHtml' :: PandocMonad m => WriterState -> WriterOptions -> Pandoc -> m Html
writeHtml' st opts d =
case writerTemplate opts of
Just _ -> preEscapedText <$> writeHtmlString' st opts d
Nothing
| writerPreferAscii opts
-> preEscapedText <$> writeHtmlString' st opts d
| otherwise -> do
(body, _) <- evalStateT (pandocToHtml opts d) st
return body
-- result is (title, authors, date, toc, body, new variables)
pandocToHtml :: PandocMonad m
=> WriterOptions
-> Pandoc
-> StateT WriterState m (Html, Context Text)
pandocToHtml opts (Pandoc meta blocks) = do
let slideLevel = fromMaybe (getSlideLevel blocks) $ writerSlideLevel opts
modify $ \st -> st{ stSlideLevel = slideLevel }
metadata <- metaToContext opts
(fmap (literal . renderHtml') . blockListToHtml opts)
(fmap (literal . renderHtml') . inlineListToHtml opts)
meta
let stringifyHTML = escapeStringForXML . stringify
let authsMeta = map stringifyHTML $ docAuthors meta
let dateMeta = stringifyHTML $ docDate meta
slideVariant <- gets stSlideVariant
let sects = adjustNumbers opts $
makeSections (writerNumberSections opts) Nothing $
if slideVariant == NoSlides
then blocks
else prepSlides slideLevel blocks
toc <- if writerTableOfContents opts && slideVariant /= S5Slides
then fmap renderHtml' <$> tableOfContents opts sects
else return Nothing
blocks' <- blockListToHtml opts sects
st <- get
notes <- footnoteSection opts (reverse (stNotes st))
let thebody = blocks' >> notes
let math = case writerHTMLMathMethod opts of
MathJax url
| slideVariant /= RevealJsSlides ->
mathjax is handled via a special plugin in revealjs
H.script ! A.src (toValue url)
! A.type_ "text/javascript"
$ case slideVariant of
SlideousSlides ->
preEscapedString
"MathJax.Hub.Queue([\"Typeset\",MathJax.Hub]);"
_ -> mempty
KaTeX url -> do
H.script !
A.src (toValue $ url <> "katex.min.js") $ mempty
nl opts
let katexFlushLeft =
case lookupContext "classoption" metadata of
Just clsops | "fleqn" `elem` (clsops :: [Text]) -> "true"
_ -> "false"
H.script $ text $ T.unlines [
"document.addEventListener(\"DOMContentLoaded\", function () {"
, " var mathElements = document.getElementsByClassName(\"math\");"
, " for (var i = 0; i < mathElements.length; i++) {"
, " var texText = mathElements[i].firstChild;"
, " if (mathElements[i].tagName == \"SPAN\") {"
, " katex.render(texText.data, mathElements[i], {"
, " displayMode: mathElements[i].classList.contains('display'),"
, " throwOnError: false,"
, " fleqn: " <> katexFlushLeft
, " });"
, "}}});"
]
nl opts
H.link ! A.rel "stylesheet" !
A.href (toValue $ url <> "katex.min.css")
_ -> case lookupContext "mathml-script"
(writerVariables opts) of
Just s | not (stHtml5 st) ->
H.script ! A.type_ "text/javascript"
$ preEscapedString
("/*<![CDATA[*/\n" ++ T.unpack s ++
"/*]]>*/\n")
| otherwise -> mempty
Nothing -> mempty
let context = (if stHighlighting st
then case writerHighlightStyle opts of
Just sty -> defField "highlighting-css"
(T.pack $ styleToCss sty)
Nothing -> id
else id) $
(if stMath st
then defField "math" (renderHtml' math)
else id) $
(case writerHTMLMathMethod opts of
MathJax u -> defField "mathjax" True .
defField "mathjaxurl"
(T.takeWhile (/='?') u)
_ -> defField "mathjax" False) $
defField "quotes" (stQuotes st) $
-- for backwards compatibility we populate toc
-- with the contents of the toc, rather than a
-- boolean:
maybe id (defField "toc") toc $
maybe id (defField "table-of-contents") toc $
defField "author-meta" authsMeta $
maybe id (defField "date-meta")
(normalizeDate dateMeta) $
defField "pagetitle"
(stringifyHTML . docTitle $ meta) $
defField "idprefix" (writerIdentifierPrefix opts) $
-- these should maybe be set in pandoc.hs
defField "slidy-url"
("" :: Text) $
defField "slideous-url" ("slideous" :: Text) $
defField "revealjs-url" ("reveal.js" :: Text) $
defField "s5-url" ("s5/default" :: Text) $
defField "html5" (stHtml5 st)
metadata
return (thebody, context)
| Like Text . 's identifier , but adds the writerIdentifierPrefix
prefixedId :: WriterOptions -> Text -> Attribute
prefixedId opts s =
case s of
"" -> mempty
_ -> A.id $ toValue $ writerIdentifierPrefix opts <> s
toList :: PandocMonad m
=> (Html -> Html)
-> WriterOptions
-> [Html]
-> StateT WriterState m Html
toList listop opts items = do
slideVariant <- gets stSlideVariant
return $
if writerIncremental opts
then if slideVariant /= RevealJsSlides
then listop (mconcat items) ! A.class_ "incremental"
else listop $ mconcat $ map (! A.class_ "fragment") items
else listop $ mconcat items
unordList :: PandocMonad m
=> WriterOptions -> [Html] -> StateT WriterState m Html
unordList opts = toList H.ul opts . toListItems opts
ordList :: PandocMonad m
=> WriterOptions -> [Html] -> StateT WriterState m Html
ordList opts = toList H.ol opts . toListItems opts
defList :: PandocMonad m
=> WriterOptions -> [Html] -> StateT WriterState m Html
defList opts items = toList H.dl opts (items ++ [nl opts])
isTaskListItem :: [Block] -> Bool
isTaskListItem (Plain (Str "☐":Space:_):_) = True
isTaskListItem (Plain (Str "☒":Space:_):_) = True
isTaskListItem (Para (Str "☐":Space:_):_) = True
isTaskListItem (Para (Str "☒":Space:_):_) = True
isTaskListItem _ = False
listItemToHtml :: PandocMonad m
=> WriterOptions -> [Block] -> StateT WriterState m Html
listItemToHtml opts bls
| Plain (Str "☐":Space:is) : bs <- bls = taskListItem False id is bs
| Plain (Str "☒":Space:is) : bs <- bls = taskListItem True id is bs
| Para (Str "☐":Space:is) : bs <- bls = taskListItem False H.p is bs
| Para (Str "☒":Space:is) : bs <- bls = taskListItem True H.p is bs
| otherwise = blockListToHtml opts bls
where
taskListItem checked constr is bs = do
let checkbox = if checked
then checkbox' ! A.checked ""
else checkbox'
checkbox' = H.input ! A.type_ "checkbox" ! A.disabled "" >> nl opts
isContents <- inlineListToHtml opts is
bsContents <- blockListToHtml opts bs
return $ constr (checkbox >> isContents) >> bsContents
-- | Construct table of contents from list of elements.
tableOfContents :: PandocMonad m => WriterOptions -> [Block]
-> StateT WriterState m (Maybe Html)
tableOfContents _ [] = return Nothing
tableOfContents opts sects = do
-- in reveal.js, we need #/apples, not #apples:
slideVariant <- gets stSlideVariant
let opts' = case slideVariant of
RevealJsSlides ->
opts{ writerIdentifierPrefix =
"/" <> writerIdentifierPrefix opts }
_ -> opts
case toTableOfContents opts sects of
bl@(BulletList (_:_)) -> Just <$> blockToHtml opts' bl
_ -> return Nothing
-- | Convert list of Note blocks to a footnote <div>.
-- Assumes notes are sorted.
footnoteSection :: PandocMonad m
=> WriterOptions -> [Html] -> StateT WriterState m Html
footnoteSection opts notes = do
html5 <- gets stHtml5
slideVariant <- gets stSlideVariant
let hrtag = if html5 then H5.hr else H.hr
epubVersion <- gets stEPUBVersion
let container x
| html5
, epubVersion == Just EPUB3
= H5.section ! A.class_ "footnotes"
! customAttribute "epub:type" "footnotes" $ x
| html5 = H5.section ! A.class_ "footnotes"
! customAttribute "role" "doc-endnotes"
$ x
| slideVariant /= NoSlides = H.div ! A.class_ "footnotes slide" $ x
| otherwise = H.div ! A.class_ "footnotes" $ x
return $
if null notes
then mempty
else nl opts >> container (nl opts >> hrtag >> nl opts >>
H.ol (mconcat notes >> nl opts) >> nl opts)
-- | Parse a mailto link; return Just (name, domain) or Nothing.
parseMailto :: Text -> Maybe (Text, Text)
parseMailto s =
case T.break (==':') s of
(xs,T.uncons -> Just (':',addr)) | T.toLower xs == "mailto" -> do
let (name', rest) = T.span (/='@') addr
let domain = T.drop 1 rest
return (name', domain)
_ -> Prelude.fail "not a mailto: URL"
-- | Obfuscate a "mailto:" link.
obfuscateLink :: PandocMonad m
=> WriterOptions -> Attr -> Html -> Text
-> StateT WriterState m Html
obfuscateLink opts attr txt s | writerEmailObfuscation opts == NoObfuscation =
addAttrs opts attr $ H.a ! A.href (toValue s) $ txt
obfuscateLink opts attr (TL.toStrict . renderHtml -> txt) s =
let meth = writerEmailObfuscation opts
s' = T.toLower (T.take 7 s) <> T.drop 7 s
in case parseMailto s' of
(Just (name', domain)) ->
let domain' = T.replace "." " dot " domain
at' = obfuscateChar '@'
(linkText, altText) =
if txt == T.drop 7 s' -- autolink
then ("e", name' <> " at " <> domain')
else ("'" <> obfuscateString txt <> "'",
txt <> " (" <> name' <> " at " <> domain' <> ")")
(_, classNames, _) = attr
classNamesStr = T.concat $ map (" "<>) classNames
in case meth of
ReferenceObfuscation ->
-- need to use preEscapedString or &'s are escaped to & in URL
return $
preEscapedText $ "<a href=\"" <> obfuscateString s'
<> "\" class=\"email\">" <> obfuscateString txt <> "</a>"
JavascriptObfuscation ->
return $
(H.script ! A.type_ "text/javascript" $
preEscapedText ("\n<!--\nh='" <>
obfuscateString domain <> "';a='" <> at' <> "';n='" <>
obfuscateString name' <> "';e=n+a+h;\n" <>
"document.write('<a h'+'ref'+'=\"ma'+'ilto'+':'+e+'\" clas'+'s=\"em' + 'ail" <>
classNamesStr <> "\">'+" <>
linkText <> "+'<\\/'+'a'+'>');\n// -->\n")) >>
H.noscript (preEscapedText $ obfuscateString altText)
_ -> throwError $ PandocSomeError $ "Unknown obfuscation method: " <> tshow meth
_ -> addAttrs opts attr $ H.a ! A.href (toValue s) $ toHtml txt -- malformed email
-- | Obfuscate character as entity.
obfuscateChar :: Char -> Text
obfuscateChar char =
let num = ord char
numstr = if even num then show num else "x" <> showHex num ""
in "&#" <> T.pack numstr <> ";"
-- | Obfuscate string using entities.
obfuscateString :: Text -> Text
obfuscateString = T.concatMap obfuscateChar . fromEntities
-- | Create HTML tag with attributes.
tagWithAttributes :: WriterOptions
-> Bool -- ^ True for HTML5
-> Bool -- ^ True if self-closing tag
-> Text -- ^ Tag text
-> Attr -- ^ Pandoc style tag attributes
-> Text
tagWithAttributes opts html5 selfClosing tagname attr =
let mktag = (TL.toStrict . renderHtml <$> evalStateT
(addAttrs opts attr (customLeaf (textTag tagname) selfClosing))
defaultWriterState{ stHtml5 = html5 })
in case runPure mktag of
Left _ -> mempty
Right t -> t
addAttrs :: PandocMonad m
=> WriterOptions -> Attr -> Html -> StateT WriterState m Html
addAttrs opts attr h = foldl (!) h <$> attrsToHtml opts attr
toAttrs :: PandocMonad m
=> [(Text, Text)] -> StateT WriterState m [Attribute]
toAttrs kvs = do
html5 <- gets stHtml5
mbEpubVersion <- gets stEPUBVersion
return $ mapMaybe (\(x,y) ->
if html5
then
if x `Set.member` (html5Attributes <> rdfaAttributes)
|| T.any (== ':') x -- e.g. epub: namespace
|| "data-" `T.isPrefixOf` x
|| "aria-" `T.isPrefixOf` x
then Just $ customAttribute (textTag x) (toValue y)
else Just $ customAttribute (textTag ("data-" <> x))
(toValue y)
else
if mbEpubVersion == Just EPUB2 &&
not (x `Set.member` (html4Attributes <> rdfaAttributes) ||
"xml:" `T.isPrefixOf` x)
then Nothing
else Just $ customAttribute (textTag x) (toValue y))
kvs
attrsToHtml :: PandocMonad m
=> WriterOptions -> Attr -> StateT WriterState m [Attribute]
attrsToHtml opts (id',classes',keyvals) = do
attrs <- toAttrs keyvals
return $
[prefixedId opts id' | not (T.null id')] ++
[A.class_ (toValue $ T.unwords classes') | not (null classes')] ++ attrs
imgAttrsToHtml :: PandocMonad m
=> WriterOptions -> Attr -> StateT WriterState m [Attribute]
imgAttrsToHtml opts attr = do
attrs <- attrsToHtml opts (ident,cls,kvs')
dimattrs <- toAttrs (dimensionsToAttrList attr)
return $ attrs ++ dimattrs
where
(ident,cls,kvs) = attr
kvs' = filter isNotDim kvs
isNotDim ("width", _) = False
isNotDim ("height", _) = False
isNotDim _ = True
dimensionsToAttrList :: Attr -> [(Text, Text)]
dimensionsToAttrList attr = consolidateStyles $ go Width ++ go Height
where
consolidateStyles :: [(Text, Text)] -> [(Text, Text)]
consolidateStyles xs =
case partition isStyle xs of
([], _) -> xs
(ss, rest) -> ("style", T.intercalate ";" $ map snd ss) : rest
isStyle ("style", _) = True
isStyle _ = False
go dir = case dimension dir attr of
(Just (Pixel a)) -> [(tshow dir, tshow a)]
(Just x) -> [("style", tshow dir <> ":" <> tshow x)]
Nothing -> []
figure :: PandocMonad m
=> WriterOptions -> Attr -> [Inline] -> (Text, Text)
-> StateT WriterState m Html
figure opts attr txt (s,tit) = do
img <- inlineToHtml opts (Image attr [Str ""] (s,tit))
html5 <- gets stHtml5
let tocapt = if html5
then H5.figcaption
else H.p ! A.class_ "caption"
capt <- if null txt
then return mempty
else tocapt `fmap` inlineListToHtml opts txt
return $ if html5
then H5.figure $ mconcat
[nl opts, img, capt, nl opts]
else H.div ! A.class_ "figure" $ mconcat
[nl opts, img, nl opts, capt, nl opts]
adjustNumbers :: WriterOptions -> [Block] -> [Block]
adjustNumbers opts doc =
if all (==0) (writerNumberOffset opts)
then doc
else walk go doc
where
go (Header level (ident,classes,kvs) lst) =
Header level (ident,classes,map fixnum kvs) lst
go x = x
fixnum ("number",num) = ("number",
showSecNum $ zipWith (+)
(writerNumberOffset opts ++ repeat 0)
(map (fromMaybe 0 . safeRead) $
T.split (=='.') num))
fixnum x = x
showSecNum = T.intercalate "." . map tshow
-- | Convert Pandoc block element to HTML.
blockToHtml :: PandocMonad m => WriterOptions -> Block -> StateT WriterState m Html
blockToHtml _ Null = return mempty
blockToHtml opts (Plain lst) = inlineListToHtml opts lst
blockToHtml opts (Para [Image attr@(_,classes,_) txt (src,tit)])
| "stretch" `elem` classes = do
slideVariant <- gets stSlideVariant
case slideVariant of
RevealJsSlides ->
-- a "stretched" image in reveal.js must be a direct child
-- of the slide container
inlineToHtml opts (Image attr txt (src, tit))
_ -> figure opts attr txt (src, tit)
-- title beginning with fig: indicates that the image is a figure
blockToHtml opts (Para [Image attr txt (s,T.stripPrefix "fig:" -> Just tit)]) =
figure opts attr txt (s,tit)
blockToHtml opts (Para lst) = do
contents <- inlineListToHtml opts lst
case contents of
Empty _ | not (isEnabled Ext_empty_paragraphs opts) -> return mempty
_ -> return $ H.p contents
blockToHtml opts (LineBlock lns) =
if writerWrapText opts == WrapNone
then blockToHtml opts $ linesToPara lns
else do
htmlLines <- inlineListToHtml opts $ intercalate [LineBreak] lns
return $ H.div ! A.class_ "line-block" $ htmlLines
blockToHtml opts (Div (ident, "section":dclasses, dkvs)
(Header level
hattr@(hident,hclasses,hkvs) ils : xs)) = do
slideVariant <- gets stSlideVariant
slideLevel <- gets stSlideLevel
let slide = slideVariant /= NoSlides &&
DROPPED old fix for # 5168 here
html5 <- gets stHtml5
let titleSlide = slide && level < slideLevel
let level' = if level <= slideLevel && slideVariant == SlidySlides
see # 3566
else level
header' <- if ils == [Str "\0"] -- marker for hrule
then return mempty
else blockToHtml opts (Header level' hattr ils)
let isSec (Div (_,"section":_,_) _) = True
isSec (Div _ zs) = any isSec zs
isSec _ = False
let isPause (Para [Str ".",Space,Str ".",Space,Str "."]) = True
isPause _ = False
let fragmentClass = case slideVariant of
RevealJsSlides -> "fragment"
_ -> "incremental"
let inDiv zs = RawBlock (Format "html") ("<div class=\""
<> fragmentClass <> "\">") :
(zs ++ [RawBlock (Format "html") "</div>"])
let breakOnPauses zs = case splitBy isPause zs of
[] -> []
y:ys -> y ++ concatMap inDiv ys
let (titleBlocks, innerSecs) =
if titleSlide
-- title slides have no content of their own
then let (as, bs) = break isSec xs
in (breakOnPauses as, bs)
else ([], breakOnPauses xs)
let secttag = if html5
then H5.section
else H.div
titleContents <- blockListToHtml opts titleBlocks
inSection <- gets stInSection
innerContents <- do
modify $ \st -> st{ stInSection = True }
res <- blockListToHtml opts innerSecs
modify $ \st -> st{ stInSection = inSection }
return res
let classes' = ordNub $
["title-slide" | titleSlide] ++ ["slide" | slide] ++
["section" | (slide || writerSectionDivs opts) &&
not html5 ] ++
["level" <> tshow level | slide || writerSectionDivs opts ]
<> dclasses
let attr = (ident, classes', dkvs)
if titleSlide
then do
t <- addAttrs opts attr $
secttag $ nl opts <> header' <> nl opts <> titleContents <> nl opts
ensure 2D nesting for revealjs , but only for one level ;
revealjs does n't like more than one level of nesting
return $
if slideVariant == RevealJsSlides && not inSection &&
not (null innerSecs)
then H5.section (nl opts <> t <> nl opts <> innerContents)
else t <> nl opts <> if null innerSecs
then mempty
else innerContents <> nl opts
else if writerSectionDivs opts || slide ||
(hident /= ident && not (T.null hident || T.null ident)) ||
(hclasses /= dclasses) || (hkvs /= dkvs)
then addAttrs opts attr
$ secttag
$ nl opts <> header' <> nl opts <>
if null innerSecs
then mempty
else innerContents <> nl opts
else do
let attr' = (ident, classes' \\ hclasses, dkvs \\ hkvs)
t <- addAttrs opts attr' header'
return $ t <>
if null innerSecs
then mempty
else nl opts <> innerContents
blockToHtml opts (Div attr@(ident, classes, kvs') bs) = do
html5 <- gets stHtml5
slideVariant <- gets stSlideVariant
let kvs = [(k,v) | (k,v) <- kvs', k /= "width"] ++
[("style", "width:" <> w <> ";") | "column" `elem` classes,
("width", w) <- kvs'] ++
[("role", "doc-bibliography") | ident == "refs" && html5] ++
[("role", "doc-biblioentry")
| "ref-item" `T.isPrefixOf` ident && html5]
let speakerNotes = "notes" `elem` classes
we do n't want incremental output inside speaker notes , see # 1394
let opts' = if | speakerNotes -> opts{ writerIncremental = False }
| "incremental" `elem` classes -> opts{ writerIncremental = True }
| "nonincremental" `elem` classes -> opts{ writerIncremental = False }
| otherwise -> opts
we remove " incremental " and " " if we 're in a
-- slide presentaiton format.
classes' = case slideVariant of
NoSlides -> classes
_ -> filter (\k -> k /= "incremental" && k /= "nonincremental") classes
contents <- if "columns" `elem` classes'
we do n't use because it inserts
a newline between the column , which throws
off widths ! see # 4028
mconcat <$> mapM (blockToHtml opts) bs
else blockListToHtml opts' bs
let contents' = nl opts >> contents >> nl opts
let (divtag, classes'') = if html5 && "section" `elem` classes'
then (H5.section, filter (/= "section") classes')
else (H.div, classes')
if speakerNotes
then case slideVariant of
RevealJsSlides -> addAttrs opts' attr $
H5.aside contents'
DZSlides -> do
t <- addAttrs opts' attr $
H5.div contents'
return $ t ! H5.customAttribute "role" "note"
NoSlides -> addAttrs opts' attr $
H.div contents'
_ -> return mempty
else addAttrs opts (ident, classes'', kvs) $
divtag contents'
blockToHtml opts (RawBlock f str) = do
ishtml <- isRawHtml f
if ishtml
then return $ preEscapedText str
else if (f == Format "latex" || f == Format "tex") &&
allowsMathEnvironments (writerHTMLMathMethod opts) &&
isMathEnvironment str
then blockToHtml opts $ Plain [Math DisplayMath str]
else do
report $ BlockNotRendered (RawBlock f str)
return mempty
blockToHtml _ HorizontalRule = do
html5 <- gets stHtml5
return $ if html5 then H5.hr else H.hr
blockToHtml opts (CodeBlock (id',classes,keyvals) rawCode) = do
id'' <- if T.null id'
then do
modify $ \st -> st{ stCodeBlockNum = stCodeBlockNum st + 1 }
codeblocknum <- gets stCodeBlockNum
return (writerIdentifierPrefix opts <> "cb" <> tshow codeblocknum)
else return (writerIdentifierPrefix opts <> id')
let tolhs = isEnabled Ext_literate_haskell opts &&
any (\c -> T.toLower c == "haskell") classes &&
any (\c -> T.toLower c == "literate") classes
classes' = if tolhs
then map (\c -> if T.toLower c == "haskell"
then "literatehaskell"
else c) classes
else classes
adjCode = if tolhs
then T.unlines . map ("> " <>) . T.lines $ rawCode
else rawCode
hlCode = if isJust (writerHighlightStyle opts)
then highlight (writerSyntaxMap opts) formatHtmlBlock
(id'',classes',keyvals) adjCode
else Left ""
case hlCode of
Left msg -> do
unless (T.null msg) $
report $ CouldNotHighlight msg
addAttrs opts (id',classes,keyvals)
$ H.pre $ H.code $ toHtml adjCode
Right h -> modify (\st -> st{ stHighlighting = True }) >>
-- we set writerIdentifierPrefix to "" since id'' already
-- includes it:
addAttrs opts{writerIdentifierPrefix = ""} (id'',[],keyvals) h
blockToHtml opts (BlockQuote blocks) = do
-- in S5, treat list in blockquote specially
-- if default is incremental, make it nonincremental;
-- otherwise incremental
slideVariant <- gets stSlideVariant
if slideVariant /= NoSlides
then let inc = not (writerIncremental opts) in
case blocks of
[BulletList lst] -> blockToHtml (opts {writerIncremental = inc})
(BulletList lst)
[OrderedList attribs lst] ->
blockToHtml (opts {writerIncremental = inc})
(OrderedList attribs lst)
[DefinitionList lst] ->
blockToHtml (opts {writerIncremental = inc})
(DefinitionList lst)
_ -> do contents <- blockListToHtml opts blocks
return $ H.blockquote
$ nl opts >> contents >> nl opts
else do
contents <- blockListToHtml opts blocks
return $ H.blockquote $ nl opts >> contents >> nl opts
blockToHtml opts (Header level attr@(_,classes,kvs) lst) = do
contents <- inlineListToHtml opts lst
let secnum = fromMaybe mempty $ lookup "number" kvs
let contents' = if writerNumberSections opts && not (T.null secnum)
&& "unnumbered" `notElem` classes
then (H.span ! A.class_ "header-section-number"
$ toHtml secnum) >> strToHtml " " >> contents
else contents
addAttrs opts attr
$ case level of
1 -> H.h1 contents'
2 -> H.h2 contents'
3 -> H.h3 contents'
4 -> H.h4 contents'
5 -> H.h5 contents'
6 -> H.h6 contents'
_ -> H.p ! A.class_ "heading" $ contents'
blockToHtml opts (BulletList lst) = do
contents <- mapM (listItemToHtml opts) lst
let isTaskList = not (null lst) && all isTaskListItem lst
(if isTaskList then (! A.class_ "task-list") else id) <$>
unordList opts contents
blockToHtml opts (OrderedList (startnum, numstyle, _) lst) = do
contents <- mapM (listItemToHtml opts) lst
html5 <- gets stHtml5
let numstyle' = case numstyle of
Example -> "decimal"
_ -> camelCaseToHyphenated $ tshow numstyle
let attribs = [A.start $ toValue startnum | startnum /= 1] ++
[A.class_ "example" | numstyle == Example] ++
(if numstyle /= DefaultStyle
then if html5
then [A.type_ $
case numstyle of
Decimal -> "1"
LowerAlpha -> "a"
UpperAlpha -> "A"
LowerRoman -> "i"
UpperRoman -> "I"
_ -> "1"]
else [A.style $ toValue $ "list-style-type: " <>
numstyle']
else [])
l <- ordList opts contents
return $ foldl (!) l attribs
blockToHtml opts (DefinitionList lst) = do
contents <- mapM (\(term, defs) ->
do term' <- liftM H.dt $ inlineListToHtml opts term
defs' <- mapM (liftM (\x -> H.dd (x >> nl opts)) .
blockListToHtml opts) defs
return $ mconcat $ nl opts : term' : nl opts :
intersperse (nl opts) defs') lst
defList opts contents
blockToHtml opts (Table capt aligns widths headers rows') = do
captionDoc <- if null capt
then return mempty
else do
cs <- inlineListToHtml opts capt
return $ H.caption cs >> nl opts
html5 <- gets stHtml5
let percent w = show (truncate (100*w) :: Integer) <> "%"
let coltags = if all (== 0.0) widths
then mempty
else do
H.colgroup $ do
nl opts
mapM_ (\w -> do
if html5
then H.col ! A.style (toValue $ "width: " <>
percent w)
else H.col ! A.width (toValue $ percent w)
nl opts) widths
nl opts
head' <- if all null headers
then return mempty
else do
contents <- tableRowToHtml opts aligns 0 headers
return $ H.thead (nl opts >> contents) >> nl opts
body' <- liftM (\x -> H.tbody (nl opts >> mconcat x)) $
zipWithM (tableRowToHtml opts aligns) [1..] rows'
let tbl = H.table $
nl opts >> captionDoc >> coltags >> head' >> body' >> nl opts
let totalWidth = sum widths
When widths of columns are < 100 % , we need to set width for the whole
-- table, or some browsers give us skinny columns with lots of space between:
return $ if totalWidth == 0 || totalWidth == 1
then tbl
else tbl ! A.style (toValue $ "width:" <>
show (round (totalWidth * 100) :: Int) <> "%;")
tableRowToHtml :: PandocMonad m
=> WriterOptions
-> [Alignment]
-> Int
-> [[Block]]
-> StateT WriterState m Html
tableRowToHtml opts aligns rownum cols' = do
let mkcell = if rownum == 0 then H.th else H.td
let rowclass = case rownum of
0 -> "header"
x | x `rem` 2 == 1 -> "odd"
_ -> "even"
cols'' <- zipWithM
(\alignment item -> tableItemToHtml opts mkcell alignment item)
aligns cols'
return $ (H.tr ! A.class_ rowclass $ nl opts >> mconcat cols'')
>> nl opts
alignmentToString :: Alignment -> [Char]
alignmentToString alignment = case alignment of
AlignLeft -> "left"
AlignRight -> "right"
AlignCenter -> "center"
AlignDefault -> ""
tableItemToHtml :: PandocMonad m
=> WriterOptions
-> (Html -> Html)
-> Alignment
-> [Block]
-> StateT WriterState m Html
tableItemToHtml opts tag' align' item = do
contents <- blockListToHtml opts item
html5 <- gets stHtml5
let alignStr = alignmentToString align'
let attribs = if html5
then A.style (toValue $ "text-align: " <> alignStr <> ";")
else A.align (toValue alignStr)
let tag'' = if null alignStr
then tag'
else tag' ! attribs
return $ tag'' contents >> nl opts
toListItems :: WriterOptions -> [Html] -> [Html]
toListItems opts items = map (toListItem opts) items ++ [nl opts]
toListItem :: WriterOptions -> Html -> Html
toListItem opts item = nl opts >> H.li item
blockListToHtml :: PandocMonad m
=> WriterOptions -> [Block] -> StateT WriterState m Html
blockListToHtml opts lst =
mconcat . intersperse (nl opts) . filter nonempty
<$> mapM (blockToHtml opts) lst
where nonempty (Empty _) = False
nonempty _ = True
| Convert list of Pandoc inline elements to HTML .
inlineListToHtml :: PandocMonad m => WriterOptions -> [Inline] -> StateT WriterState m Html
inlineListToHtml opts lst = mconcat <$> mapM (inlineToHtml opts) lst
| Annotates a expression with the tex source
annotateMML :: XML.Element -> Text -> XML.Element
annotateMML e tex = math (unode "semantics" [cs, unode "annotation" (annotAttrs, T.unpack tex)])
where
cs = case elChildren e of
[] -> unode "mrow" ()
[x] -> x
xs -> unode "mrow" xs
math childs = XML.Element q as [XML.Elem childs] l
where
(XML.Element q as _ l) = e
annotAttrs = [XML.Attr (unqual "encoding") "application/x-tex"]
-- | Convert Pandoc inline element to HTML.
inlineToHtml :: PandocMonad m
=> WriterOptions -> Inline -> StateT WriterState m Html
inlineToHtml opts inline = do
html5 <- gets stHtml5
case inline of
(Str str) -> return $ strToHtml str
Space -> return $ strToHtml " "
SoftBreak -> return $ case writerWrapText opts of
WrapNone -> preEscapedText " "
WrapAuto -> preEscapedText " "
WrapPreserve -> preEscapedText "\n"
LineBreak -> return $ do
if html5 then H5.br else H.br
strToHtml "\n"
(Span (id',classes,kvs) ils) ->
let spanLikeTag = case classes of
(c:_) -> do
guard (c `Set.member` htmlSpanLikeElements)
pure $ customParent (textTag c)
_ -> Nothing
in case spanLikeTag of
Just tag -> do
h <- inlineListToHtml opts ils
addAttrs opts (id',tail classes',kvs') $ tag h
Nothing -> do
h <- inlineListToHtml opts ils
addAttrs opts (id',classes',kvs') (H.span h)
where
styles = ["font-style:normal;"
| "csl-no-emph" `elem` classes]
++ ["font-weight:normal;"
| "csl-no-strong" `elem` classes]
++ ["font-variant:normal;"
| "csl-no-smallcaps" `elem` classes]
kvs' = if null styles
then kvs
else ("style", T.concat styles) : kvs
classes' = [ c | c <- classes
, c `notElem` [ "csl-no-emph"
, "csl-no-strong"
, "csl-no-smallcaps"
]
]
(Emph lst) -> H.em <$> inlineListToHtml opts lst
(Strong lst) -> H.strong <$> inlineListToHtml opts lst
(Code attr@(ids,cs,kvs) str)
-> case hlCode of
Left msg -> do
unless (T.null msg) $
report $ CouldNotHighlight msg
addAttrs opts (ids,cs',kvs) $
fromMaybe H.code sampOrVar $
strToHtml str
Right h -> do
modify $ \st -> st{ stHighlighting = True }
addAttrs opts (ids,[],kvs) $
fromMaybe id sampOrVar h
where hlCode = if isJust (writerHighlightStyle opts)
then highlight
(writerSyntaxMap opts)
formatHtmlInline attr str
else Left ""
(sampOrVar,cs')
| "sample" `elem` cs =
(Just H.samp,"sample" `delete` cs)
| "variable" `elem` cs =
(Just H.var,"variable" `delete` cs)
| otherwise = (Nothing,cs)
(Strikeout lst) -> H.del <$> inlineListToHtml opts lst
(SmallCaps lst) -> (H.span ! A.class_ "smallcaps") <$>
inlineListToHtml opts lst
(Superscript lst) -> H.sup <$> inlineListToHtml opts lst
(Subscript lst) -> H.sub <$> inlineListToHtml opts lst
(Quoted quoteType lst) ->
let (leftQuote, rightQuote) = case quoteType of
SingleQuote -> (strToHtml "‘",
strToHtml "’")
DoubleQuote -> (strToHtml "“",
strToHtml "”")
in if writerHtmlQTags opts
then do
modify $ \st -> st{ stQuotes = True }
let (maybeAttr, lst') = case lst of
[Span attr@(_, _, kvs) cs]
| any ((=="cite") . fst) kvs
-> (Just attr, cs)
cs -> (Nothing, cs)
H.q `fmap` inlineListToHtml opts lst'
>>= maybe return (addAttrs opts) maybeAttr
else (\x -> leftQuote >> x >> rightQuote)
`fmap` inlineListToHtml opts lst
(Math t str) -> do
modify (\st -> st {stMath = True})
let mathClass = toValue $ ("math " :: Text) <>
if t == InlineMath then "inline" else "display"
case writerHTMLMathMethod opts of
WebTeX url -> do
let imtag = if html5 then H5.img else H.img
let s = case t of
InlineMath -> "\\textstyle "
DisplayMath -> "\\displaystyle "
let m = imtag ! A.style "vertical-align:middle"
! A.src (toValue $ url <> T.pack (urlEncode (T.unpack $ s <> str)))
! A.alt (toValue str)
! A.title (toValue str)
let brtag = if html5 then H5.br else H.br
return $ case t of
InlineMath -> m
DisplayMath -> brtag >> m >> brtag
GladTeX ->
return $
customParent (textTag "eq") !
customAttribute "env"
(toValue $ if t == InlineMath
then ("math" :: Text)
else "displaymath") $ strToHtml str
MathML -> do
let conf = useShortEmptyTags (const False)
defaultConfigPP
res <- lift $ convertMath writeMathML t str
case res of
Right r -> return $ preEscapedString $
ppcElement conf (annotateMML r str)
Left il -> (H.span ! A.class_ mathClass) <$>
inlineToHtml opts il
MathJax _ -> return $ H.span ! A.class_ mathClass $ toHtml $
case t of
InlineMath -> "\\(" <> str <> "\\)"
DisplayMath -> "\\[" <> str <> "\\]"
KaTeX _ -> return $ H.span ! A.class_ mathClass $ toHtml $
case t of
InlineMath -> str
DisplayMath -> str
PlainMath -> do
x <- lift (texMathToInlines t str) >>= inlineListToHtml opts
let m = H.span ! A.class_ mathClass $ x
let brtag = if html5 then H5.br else H.br
return $ case t of
InlineMath -> m
DisplayMath -> brtag >> m >> brtag
(RawInline f str) -> do
ishtml <- isRawHtml f
if ishtml
then return $ preEscapedText str
else if (f == Format "latex" || f == Format "tex") &&
allowsMathEnvironments (writerHTMLMathMethod opts) &&
isMathEnvironment str
then inlineToHtml opts $ Math DisplayMath str
else do
report $ InlineNotRendered inline
return mempty
(Link attr txt (s,_)) | "mailto:" `T.isPrefixOf` s -> do
linkText <- inlineListToHtml opts txt
obfuscateLink opts attr linkText s
(Link (ident,classes,kvs) txt (s,tit)) -> do
linkText <- inlineListToHtml opts txt
slideVariant <- gets stSlideVariant
let s' = case T.uncons s of
Just ('#',xs) -> let prefix = if slideVariant == RevealJsSlides
then "/"
else writerIdentifierPrefix opts
in "#" <> prefix <> xs
_ -> s
let link = H.a ! A.href (toValue s') $ linkText
link' <- addAttrs opts (ident, classes, kvs) link
return $ if T.null tit
then link'
else link' ! A.title (toValue tit)
(Image attr txt (s,tit)) -> do
let alternate = stringify txt
slideVariant <- gets stSlideVariant
let isReveal = slideVariant == RevealJsSlides
attrs <- imgAttrsToHtml opts attr
let attributes =
-- reveal.js uses data-src for lazy loading
(if isReveal
then customAttribute "data-src" $ toValue s
else A.src $ toValue s) :
[A.title $ toValue tit | not (T.null tit)] ++
attrs
imageTag = (if html5 then H5.img else H.img
, [A.alt $ toValue alternate | not (null txt)] )
mediaTag tg fallbackTxt =
let linkTxt = if null txt
then fallbackTxt
else alternate
in (tg $ H.a ! A.href (toValue s) $ toHtml linkTxt
, [A5.controls ""] )
normSrc = maybe (T.unpack s) uriPath (parseURIReference $ T.unpack s)
(tag, specAttrs) = case mediaCategory normSrc of
Just "image" -> imageTag
Just "video" -> mediaTag H5.video "Video"
Just "audio" -> mediaTag H5.audio "Audio"
Just _ -> (H5.embed, [])
_ -> imageTag
return $ foldl (!) tag $ attributes ++ specAttrs
note : null title included , as in Markdown.pl
(Note contents) -> do
notes <- gets stNotes
let number = length notes + 1
let ref = tshow number
htmlContents <- blockListToNote opts ref contents
epubVersion <- gets stEPUBVersion
-- push contents onto front of notes
modify $ \st -> st {stNotes = htmlContents:notes}
slideVariant <- gets stSlideVariant
let revealSlash = T.pack ['/' | slideVariant == RevealJsSlides]
let link = H.a ! A.href (toValue $ "#" <>
revealSlash <>
writerIdentifierPrefix opts <> "fn" <> ref)
! A.class_ "footnote-ref"
! prefixedId opts ("fnref" <> ref)
$ (if isJust epubVersion
then id
else H.sup)
$ toHtml ref
return $ case epubVersion of
Just EPUB3 -> link ! customAttribute "epub:type" "noteref"
_ | html5 -> link ! H5.customAttribute
"role" "doc-noteref"
_ -> link
(Cite cits il)-> do contents <- inlineListToHtml opts (walk addRoleToLink il)
let citationIds = T.unwords $ map citationId cits
let result = H.span ! A.class_ "citation" $ contents
return $ if html5
then result ! customAttribute "data-cites" (toValue citationIds)
else result
addRoleToLink :: Inline -> Inline
addRoleToLink (Link (id',classes,kvs) ils (src,tit)) =
Link (id',classes,("role","doc-biblioref"):kvs) ils (src,tit)
addRoleToLink x = x
blockListToNote :: PandocMonad m
=> WriterOptions -> Text -> [Block]
-> StateT WriterState m Html
blockListToNote opts ref blocks = do
html5 <- gets stHtml5
If last block is Para or Plain , include the backlink at the end of
that block . Otherwise , insert a new Plain block with the backlink .
let kvs = [("role","doc-backlink") | html5]
let backlink = [Link ("",["footnote-back"],kvs)
[Str "↩"] ("#" <> "fnref" <> ref,"")]
let blocks' = if null blocks
then []
else let lastBlock = last blocks
otherBlocks = init blocks
in case lastBlock of
(Para lst) -> otherBlocks ++
[Para (lst ++ backlink)]
(Plain lst) -> otherBlocks ++
[Plain (lst ++ backlink)]
_ -> otherBlocks ++ [lastBlock,
Plain backlink]
contents <- blockListToHtml opts blocks'
let noteItem = H.li ! prefixedId opts ("fn" <> ref) $ contents
epubVersion <- gets stEPUBVersion
let noteItem' = case epubVersion of
Just EPUB3 -> noteItem !
customAttribute "epub:type" "footnote"
_ | html5 -> noteItem !
customAttribute "role" "doc-endnote"
_ -> noteItem
return $ nl opts >> noteItem'
isMathEnvironment :: Text -> Bool
isMathEnvironment s = "\\begin{" `T.isPrefixOf` s &&
envName `elem` mathmlenvs
where envName = T.takeWhile (/= '}') (T.drop 7 s)
mathmlenvs = [ "align"
, "align*"
, "alignat"
, "alignat*"
, "aligned"
, "alignedat"
, "array"
, "Bmatrix"
, "bmatrix"
, "cases"
, "CD"
, "eqnarray"
, "eqnarray*"
, "equation"
, "equation*"
, "gather"
, "gather*"
, "gathered"
, "matrix"
, "multline"
, "multline*"
, "pmatrix"
, "smallmatrix"
, "split"
, "subarray"
, "Vmatrix"
, "vmatrix" ]
allowsMathEnvironments :: HTMLMathMethod -> Bool
allowsMathEnvironments (MathJax _) = True
allowsMathEnvironments MathML = True
allowsMathEnvironments (WebTeX _) = True
allowsMathEnvironments _ = False
isRawHtml :: PandocMonad m => Format -> StateT WriterState m Bool
isRawHtml f = do
html5 <- gets stHtml5
return $ f == Format "html" ||
((html5 && f == Format "html5") || f == Format "html4")
| null | https://raw.githubusercontent.com/grin-compiler/ghc-wpc-sample-programs/0e3a9b8b7cc3fa0da7c77fb7588dd4830fb087f7/pandoc-11df2a3c0f2b1b8e351ad8caaa7cdf583e1b3b2e/src/Text/Pandoc/Writers/HTML.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
^ List of notes
^ Math is used in document
^ <q> tag is used
^ Syntax highlighting is used
^ Use HTML5
^ EPUB version if for epub
^ Slide level
^ Content is in a section (revealjs)
^ Number of code block
Helpers to render HTML with the appropriate function.
See #5469: this prevents iOS from substituting emojis.
| Hard linebreak.
| Convert Pandoc document to Html appropriate for an epub version.
| Convert Pandoc document to Reveal JS HTML slide show.
| Convert Pandoc document to S5 HTML slide show.
| Convert Pandoc document to DZSlides HTML slide show.
warn if empty lang
check for empty pagetitle
result is (title, authors, date, toc, body, new variables)
for backwards compatibility we populate toc
with the contents of the toc, rather than a
boolean:
these should maybe be set in pandoc.hs
| Construct table of contents from list of elements.
in reveal.js, we need #/apples, not #apples:
| Convert list of Note blocks to a footnote <div>.
Assumes notes are sorted.
| Parse a mailto link; return Just (name, domain) or Nothing.
| Obfuscate a "mailto:" link.
autolink
need to use preEscapedString or &'s are escaped to & in URL
malformed email
| Obfuscate character as entity.
| Obfuscate string using entities.
| Create HTML tag with attributes.
^ True for HTML5
^ True if self-closing tag
^ Tag text
^ Pandoc style tag attributes
e.g. epub: namespace
| Convert Pandoc block element to HTML.
a "stretched" image in reveal.js must be a direct child
of the slide container
title beginning with fig: indicates that the image is a figure
marker for hrule
title slides have no content of their own
slide presentaiton format.
we set writerIdentifierPrefix to "" since id'' already
includes it:
in S5, treat list in blockquote specially
if default is incremental, make it nonincremental;
otherwise incremental
table, or some browsers give us skinny columns with lots of space between:
| Convert Pandoc inline element to HTML.
reveal.js uses data-src for lazy loading
push contents onto front of notes | # LANGUAGE ScopedTypeVariables #
# LANGUAGE ViewPatterns #
|
Module : Text . Pandoc . Writers . HTML
Copyright : Copyright ( C ) 2006 - 2020
License : GNU GPL , version 2 or above
Maintainer : < >
Stability : alpha
Portability : portable
Conversion of ' Pandoc ' documents to HTML .
Module : Text.Pandoc.Writers.HTML
Copyright : Copyright (C) 2006-2020 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <>
Stability : alpha
Portability : portable
Conversion of 'Pandoc' documents to HTML.
-}
module Text.Pandoc.Writers.HTML (
writeHtml4,
writeHtml4String,
writeHtml5,
writeHtml5String,
writeHtmlStringForEPUB,
writeS5,
writeSlidy,
writeSlideous,
writeDZSlides,
writeRevealJs,
tagWithAttributes
) where
import Control.Monad.State.Strict
import Data.Char (ord)
import Data.List (intercalate, intersperse, partition, delete, (\\))
import Data.Maybe (fromMaybe, isJust, isNothing, mapMaybe)
import qualified Data.Set as Set
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Network.HTTP (urlEncode)
import Network.URI (URI (..), parseURIReference)
import Numeric (showHex)
import Text.DocLayout (render, literal)
import Text.Blaze.Internal (MarkupM (Empty), customLeaf, customParent)
import Text.DocTemplates (FromContext (lookupContext), Context (..))
import Text.Blaze.Html hiding (contents)
import Text.Pandoc.Definition
import Text.Pandoc.Highlighting (formatHtmlBlock, formatHtmlInline, highlight,
styleToCss)
import Text.Pandoc.ImageSize
import Text.Pandoc.Options
import Text.Pandoc.Shared
import Text.Pandoc.Slides
import Text.Pandoc.Templates (renderTemplate)
import Text.Pandoc.Walk
import Text.Pandoc.Writers.Math
import Text.Pandoc.Writers.Shared
import Text.Pandoc.XML (escapeStringForXML, fromEntities, toEntities,
html5Attributes, html4Attributes, rdfaAttributes)
import qualified Text.Blaze.XHtml5 as H5
import qualified Text.Blaze.XHtml5.Attributes as A5
import Control.Monad.Except (throwError)
import System.FilePath (takeBaseName)
import Text.Blaze.Html.Renderer.Text (renderHtml)
import qualified Text.Blaze.XHtml1.Transitional as H
import qualified Text.Blaze.XHtml1.Transitional.Attributes as A
import Text.Pandoc.Class.PandocMonad (PandocMonad, report)
import Text.Pandoc.Class.PandocPure (runPure)
import Text.Pandoc.Error
import Text.Pandoc.Logging
import Text.Pandoc.MIME (mediaCategory)
import Text.TeXMath
import Text.XML.Light (elChildren, unode, unqual)
import qualified Text.XML.Light as XML
import Text.XML.Light.Output
data WriterState = WriterState
, stSlideVariant :: HTMLSlideVariant
}
defaultWriterState :: WriterState
defaultWriterState = WriterState {stNotes= [], stMath = False, stQuotes = False,
stHighlighting = False,
stHtml5 = False,
stEPUBVersion = Nothing,
stSlideVariant = NoSlides,
stSlideLevel = 1,
stInSection = False,
stCodeBlockNum = 0}
strToHtml :: Text -> Html
strToHtml = strToHtml' . T.unpack
where
strToHtml' ('\'':xs) = preEscapedString "\'" `mappend` strToHtml' xs
strToHtml' ('"' :xs) = preEscapedString "\"" `mappend` strToHtml' xs
strToHtml' (x:xs) | needsVariationSelector x
= preEscapedString [x, '\xFE0E'] `mappend`
case xs of
('\xFE0E':ys) -> strToHtml' ys
_ -> strToHtml' xs
strToHtml' xs@(_:_) = case break (\c -> c == '\'' || c == '"' ||
needsVariationSelector c) xs of
(_ ,[]) -> toHtml xs
(ys,zs) -> toHtml ys `mappend` strToHtml' zs
strToHtml' [] = ""
needsVariationSelector :: Char -> Bool
needsVariationSelector '↩' = True
needsVariationSelector '↔' = True
needsVariationSelector _ = False
nl :: WriterOptions -> Html
nl opts = if writerWrapText opts == WrapNone
then mempty
else preEscapedString "\n"
| Convert Pandoc document to Html 5 string .
writeHtml5String :: PandocMonad m => WriterOptions -> Pandoc -> m Text
writeHtml5String = writeHtmlString'
defaultWriterState{ stHtml5 = True }
| Convert Pandoc document to Html 5 structure .
writeHtml5 :: PandocMonad m => WriterOptions -> Pandoc -> m Html
writeHtml5 = writeHtml' defaultWriterState{ stHtml5 = True }
| Convert Pandoc document to Html 4 string .
writeHtml4String :: PandocMonad m => WriterOptions -> Pandoc -> m Text
writeHtml4String = writeHtmlString'
defaultWriterState{ stHtml5 = False }
| Convert Pandoc document to Html 4 structure .
writeHtml4 :: PandocMonad m => WriterOptions -> Pandoc -> m Html
writeHtml4 = writeHtml' defaultWriterState{ stHtml5 = False }
writeHtmlStringForEPUB :: PandocMonad m
=> EPUBVersion -> WriterOptions -> Pandoc
-> m Text
writeHtmlStringForEPUB version o = writeHtmlString'
defaultWriterState{ stHtml5 = version == EPUB3,
stEPUBVersion = Just version } o
writeRevealJs :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeRevealJs = writeHtmlSlideShow' RevealJsSlides
writeS5 :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeS5 = writeHtmlSlideShow' S5Slides
| Convert Pandoc document to Slidy HTML slide show .
writeSlidy :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeSlidy = writeHtmlSlideShow' SlidySlides
| Convert Pandoc document to Slideous HTML slide show .
writeSlideous :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeSlideous = writeHtmlSlideShow' SlideousSlides
writeDZSlides :: PandocMonad m
=> WriterOptions -> Pandoc -> m Text
writeDZSlides = writeHtmlSlideShow' DZSlides
writeHtmlSlideShow' :: PandocMonad m
=> HTMLSlideVariant -> WriterOptions -> Pandoc -> m Text
writeHtmlSlideShow' variant = writeHtmlString'
defaultWriterState{ stSlideVariant = variant
, stHtml5 = case variant of
RevealJsSlides -> True
S5Slides -> False
SlidySlides -> False
DZSlides -> True
SlideousSlides -> False
NoSlides -> False
}
renderHtml' :: Html -> Text
renderHtml' = TL.toStrict . renderHtml
writeHtmlString' :: PandocMonad m
=> WriterState -> WriterOptions -> Pandoc -> m Text
writeHtmlString' st opts d = do
(body, context) <- evalStateT (pandocToHtml opts d) st
(if writerPreferAscii opts
then toEntities
else id) <$>
case writerTemplate opts of
Nothing -> return $ renderHtml' body
Just tpl -> do
when (isNothing (getField "lang" context :: Maybe Text)) $
report NoLangSpecified
context' <-
case getField "pagetitle" context of
Just (s :: Text) | not (T.null s) -> return context
_ -> do
let fallback = T.pack $
case lookupContext "sourcefile"
(writerVariables opts) of
Nothing -> "Untitled"
Just [] -> "Untitled"
Just (x:_) -> takeBaseName $ T.unpack x
report $ NoTitleElement fallback
return $ resetField "pagetitle" fallback context
return $ render Nothing $ renderTemplate tpl
(defField "body" (renderHtml' body) context')
writeHtml' :: PandocMonad m => WriterState -> WriterOptions -> Pandoc -> m Html
writeHtml' st opts d =
case writerTemplate opts of
Just _ -> preEscapedText <$> writeHtmlString' st opts d
Nothing
| writerPreferAscii opts
-> preEscapedText <$> writeHtmlString' st opts d
| otherwise -> do
(body, _) <- evalStateT (pandocToHtml opts d) st
return body
pandocToHtml :: PandocMonad m
=> WriterOptions
-> Pandoc
-> StateT WriterState m (Html, Context Text)
pandocToHtml opts (Pandoc meta blocks) = do
let slideLevel = fromMaybe (getSlideLevel blocks) $ writerSlideLevel opts
modify $ \st -> st{ stSlideLevel = slideLevel }
metadata <- metaToContext opts
(fmap (literal . renderHtml') . blockListToHtml opts)
(fmap (literal . renderHtml') . inlineListToHtml opts)
meta
let stringifyHTML = escapeStringForXML . stringify
let authsMeta = map stringifyHTML $ docAuthors meta
let dateMeta = stringifyHTML $ docDate meta
slideVariant <- gets stSlideVariant
let sects = adjustNumbers opts $
makeSections (writerNumberSections opts) Nothing $
if slideVariant == NoSlides
then blocks
else prepSlides slideLevel blocks
toc <- if writerTableOfContents opts && slideVariant /= S5Slides
then fmap renderHtml' <$> tableOfContents opts sects
else return Nothing
blocks' <- blockListToHtml opts sects
st <- get
notes <- footnoteSection opts (reverse (stNotes st))
let thebody = blocks' >> notes
let math = case writerHTMLMathMethod opts of
MathJax url
| slideVariant /= RevealJsSlides ->
mathjax is handled via a special plugin in revealjs
H.script ! A.src (toValue url)
! A.type_ "text/javascript"
$ case slideVariant of
SlideousSlides ->
preEscapedString
"MathJax.Hub.Queue([\"Typeset\",MathJax.Hub]);"
_ -> mempty
KaTeX url -> do
H.script !
A.src (toValue $ url <> "katex.min.js") $ mempty
nl opts
let katexFlushLeft =
case lookupContext "classoption" metadata of
Just clsops | "fleqn" `elem` (clsops :: [Text]) -> "true"
_ -> "false"
H.script $ text $ T.unlines [
"document.addEventListener(\"DOMContentLoaded\", function () {"
, " var mathElements = document.getElementsByClassName(\"math\");"
, " for (var i = 0; i < mathElements.length; i++) {"
, " var texText = mathElements[i].firstChild;"
, " if (mathElements[i].tagName == \"SPAN\") {"
, " katex.render(texText.data, mathElements[i], {"
, " displayMode: mathElements[i].classList.contains('display'),"
, " throwOnError: false,"
, " fleqn: " <> katexFlushLeft
, " });"
, "}}});"
]
nl opts
H.link ! A.rel "stylesheet" !
A.href (toValue $ url <> "katex.min.css")
_ -> case lookupContext "mathml-script"
(writerVariables opts) of
Just s | not (stHtml5 st) ->
H.script ! A.type_ "text/javascript"
$ preEscapedString
("/*<![CDATA[*/\n" ++ T.unpack s ++
"/*]]>*/\n")
| otherwise -> mempty
Nothing -> mempty
let context = (if stHighlighting st
then case writerHighlightStyle opts of
Just sty -> defField "highlighting-css"
(T.pack $ styleToCss sty)
Nothing -> id
else id) $
(if stMath st
then defField "math" (renderHtml' math)
else id) $
(case writerHTMLMathMethod opts of
MathJax u -> defField "mathjax" True .
defField "mathjaxurl"
(T.takeWhile (/='?') u)
_ -> defField "mathjax" False) $
defField "quotes" (stQuotes st) $
maybe id (defField "toc") toc $
maybe id (defField "table-of-contents") toc $
defField "author-meta" authsMeta $
maybe id (defField "date-meta")
(normalizeDate dateMeta) $
defField "pagetitle"
(stringifyHTML . docTitle $ meta) $
defField "idprefix" (writerIdentifierPrefix opts) $
defField "slidy-url"
("" :: Text) $
defField "slideous-url" ("slideous" :: Text) $
defField "revealjs-url" ("reveal.js" :: Text) $
defField "s5-url" ("s5/default" :: Text) $
defField "html5" (stHtml5 st)
metadata
return (thebody, context)
| Like Text . 's identifier , but adds the writerIdentifierPrefix
prefixedId :: WriterOptions -> Text -> Attribute
prefixedId opts s =
case s of
"" -> mempty
_ -> A.id $ toValue $ writerIdentifierPrefix opts <> s
toList :: PandocMonad m
=> (Html -> Html)
-> WriterOptions
-> [Html]
-> StateT WriterState m Html
toList listop opts items = do
slideVariant <- gets stSlideVariant
return $
if writerIncremental opts
then if slideVariant /= RevealJsSlides
then listop (mconcat items) ! A.class_ "incremental"
else listop $ mconcat $ map (! A.class_ "fragment") items
else listop $ mconcat items
unordList :: PandocMonad m
=> WriterOptions -> [Html] -> StateT WriterState m Html
unordList opts = toList H.ul opts . toListItems opts
ordList :: PandocMonad m
=> WriterOptions -> [Html] -> StateT WriterState m Html
ordList opts = toList H.ol opts . toListItems opts
defList :: PandocMonad m
=> WriterOptions -> [Html] -> StateT WriterState m Html
defList opts items = toList H.dl opts (items ++ [nl opts])
isTaskListItem :: [Block] -> Bool
isTaskListItem (Plain (Str "☐":Space:_):_) = True
isTaskListItem (Plain (Str "☒":Space:_):_) = True
isTaskListItem (Para (Str "☐":Space:_):_) = True
isTaskListItem (Para (Str "☒":Space:_):_) = True
isTaskListItem _ = False
listItemToHtml :: PandocMonad m
=> WriterOptions -> [Block] -> StateT WriterState m Html
listItemToHtml opts bls
| Plain (Str "☐":Space:is) : bs <- bls = taskListItem False id is bs
| Plain (Str "☒":Space:is) : bs <- bls = taskListItem True id is bs
| Para (Str "☐":Space:is) : bs <- bls = taskListItem False H.p is bs
| Para (Str "☒":Space:is) : bs <- bls = taskListItem True H.p is bs
| otherwise = blockListToHtml opts bls
where
taskListItem checked constr is bs = do
let checkbox = if checked
then checkbox' ! A.checked ""
else checkbox'
checkbox' = H.input ! A.type_ "checkbox" ! A.disabled "" >> nl opts
isContents <- inlineListToHtml opts is
bsContents <- blockListToHtml opts bs
return $ constr (checkbox >> isContents) >> bsContents
tableOfContents :: PandocMonad m => WriterOptions -> [Block]
-> StateT WriterState m (Maybe Html)
tableOfContents _ [] = return Nothing
tableOfContents opts sects = do
slideVariant <- gets stSlideVariant
let opts' = case slideVariant of
RevealJsSlides ->
opts{ writerIdentifierPrefix =
"/" <> writerIdentifierPrefix opts }
_ -> opts
case toTableOfContents opts sects of
bl@(BulletList (_:_)) -> Just <$> blockToHtml opts' bl
_ -> return Nothing
footnoteSection :: PandocMonad m
=> WriterOptions -> [Html] -> StateT WriterState m Html
footnoteSection opts notes = do
html5 <- gets stHtml5
slideVariant <- gets stSlideVariant
let hrtag = if html5 then H5.hr else H.hr
epubVersion <- gets stEPUBVersion
let container x
| html5
, epubVersion == Just EPUB3
= H5.section ! A.class_ "footnotes"
! customAttribute "epub:type" "footnotes" $ x
| html5 = H5.section ! A.class_ "footnotes"
! customAttribute "role" "doc-endnotes"
$ x
| slideVariant /= NoSlides = H.div ! A.class_ "footnotes slide" $ x
| otherwise = H.div ! A.class_ "footnotes" $ x
return $
if null notes
then mempty
else nl opts >> container (nl opts >> hrtag >> nl opts >>
H.ol (mconcat notes >> nl opts) >> nl opts)
parseMailto :: Text -> Maybe (Text, Text)
parseMailto s =
case T.break (==':') s of
(xs,T.uncons -> Just (':',addr)) | T.toLower xs == "mailto" -> do
let (name', rest) = T.span (/='@') addr
let domain = T.drop 1 rest
return (name', domain)
_ -> Prelude.fail "not a mailto: URL"
obfuscateLink :: PandocMonad m
=> WriterOptions -> Attr -> Html -> Text
-> StateT WriterState m Html
obfuscateLink opts attr txt s | writerEmailObfuscation opts == NoObfuscation =
addAttrs opts attr $ H.a ! A.href (toValue s) $ txt
obfuscateLink opts attr (TL.toStrict . renderHtml -> txt) s =
let meth = writerEmailObfuscation opts
s' = T.toLower (T.take 7 s) <> T.drop 7 s
in case parseMailto s' of
(Just (name', domain)) ->
let domain' = T.replace "." " dot " domain
at' = obfuscateChar '@'
(linkText, altText) =
then ("e", name' <> " at " <> domain')
else ("'" <> obfuscateString txt <> "'",
txt <> " (" <> name' <> " at " <> domain' <> ")")
(_, classNames, _) = attr
classNamesStr = T.concat $ map (" "<>) classNames
in case meth of
ReferenceObfuscation ->
return $
preEscapedText $ "<a href=\"" <> obfuscateString s'
<> "\" class=\"email\">" <> obfuscateString txt <> "</a>"
JavascriptObfuscation ->
return $
(H.script ! A.type_ "text/javascript" $
preEscapedText ("\n<!--\nh='" <>
obfuscateString domain <> "';a='" <> at' <> "';n='" <>
obfuscateString name' <> "';e=n+a+h;\n" <>
"document.write('<a h'+'ref'+'=\"ma'+'ilto'+':'+e+'\" clas'+'s=\"em' + 'ail" <>
classNamesStr <> "\">'+" <>
linkText <> "+'<\\/'+'a'+'>');\n// -->\n")) >>
H.noscript (preEscapedText $ obfuscateString altText)
_ -> throwError $ PandocSomeError $ "Unknown obfuscation method: " <> tshow meth
obfuscateChar :: Char -> Text
obfuscateChar char =
let num = ord char
numstr = if even num then show num else "x" <> showHex num ""
in "&#" <> T.pack numstr <> ";"
obfuscateString :: Text -> Text
obfuscateString = T.concatMap obfuscateChar . fromEntities
tagWithAttributes :: WriterOptions
-> Text
tagWithAttributes opts html5 selfClosing tagname attr =
let mktag = (TL.toStrict . renderHtml <$> evalStateT
(addAttrs opts attr (customLeaf (textTag tagname) selfClosing))
defaultWriterState{ stHtml5 = html5 })
in case runPure mktag of
Left _ -> mempty
Right t -> t
addAttrs :: PandocMonad m
=> WriterOptions -> Attr -> Html -> StateT WriterState m Html
addAttrs opts attr h = foldl (!) h <$> attrsToHtml opts attr
toAttrs :: PandocMonad m
=> [(Text, Text)] -> StateT WriterState m [Attribute]
toAttrs kvs = do
html5 <- gets stHtml5
mbEpubVersion <- gets stEPUBVersion
return $ mapMaybe (\(x,y) ->
if html5
then
if x `Set.member` (html5Attributes <> rdfaAttributes)
|| "data-" `T.isPrefixOf` x
|| "aria-" `T.isPrefixOf` x
then Just $ customAttribute (textTag x) (toValue y)
else Just $ customAttribute (textTag ("data-" <> x))
(toValue y)
else
if mbEpubVersion == Just EPUB2 &&
not (x `Set.member` (html4Attributes <> rdfaAttributes) ||
"xml:" `T.isPrefixOf` x)
then Nothing
else Just $ customAttribute (textTag x) (toValue y))
kvs
attrsToHtml :: PandocMonad m
=> WriterOptions -> Attr -> StateT WriterState m [Attribute]
attrsToHtml opts (id',classes',keyvals) = do
attrs <- toAttrs keyvals
return $
[prefixedId opts id' | not (T.null id')] ++
[A.class_ (toValue $ T.unwords classes') | not (null classes')] ++ attrs
imgAttrsToHtml :: PandocMonad m
=> WriterOptions -> Attr -> StateT WriterState m [Attribute]
imgAttrsToHtml opts attr = do
attrs <- attrsToHtml opts (ident,cls,kvs')
dimattrs <- toAttrs (dimensionsToAttrList attr)
return $ attrs ++ dimattrs
where
(ident,cls,kvs) = attr
kvs' = filter isNotDim kvs
isNotDim ("width", _) = False
isNotDim ("height", _) = False
isNotDim _ = True
dimensionsToAttrList :: Attr -> [(Text, Text)]
dimensionsToAttrList attr = consolidateStyles $ go Width ++ go Height
where
consolidateStyles :: [(Text, Text)] -> [(Text, Text)]
consolidateStyles xs =
case partition isStyle xs of
([], _) -> xs
(ss, rest) -> ("style", T.intercalate ";" $ map snd ss) : rest
isStyle ("style", _) = True
isStyle _ = False
go dir = case dimension dir attr of
(Just (Pixel a)) -> [(tshow dir, tshow a)]
(Just x) -> [("style", tshow dir <> ":" <> tshow x)]
Nothing -> []
figure :: PandocMonad m
=> WriterOptions -> Attr -> [Inline] -> (Text, Text)
-> StateT WriterState m Html
figure opts attr txt (s,tit) = do
img <- inlineToHtml opts (Image attr [Str ""] (s,tit))
html5 <- gets stHtml5
let tocapt = if html5
then H5.figcaption
else H.p ! A.class_ "caption"
capt <- if null txt
then return mempty
else tocapt `fmap` inlineListToHtml opts txt
return $ if html5
then H5.figure $ mconcat
[nl opts, img, capt, nl opts]
else H.div ! A.class_ "figure" $ mconcat
[nl opts, img, nl opts, capt, nl opts]
adjustNumbers :: WriterOptions -> [Block] -> [Block]
adjustNumbers opts doc =
if all (==0) (writerNumberOffset opts)
then doc
else walk go doc
where
go (Header level (ident,classes,kvs) lst) =
Header level (ident,classes,map fixnum kvs) lst
go x = x
fixnum ("number",num) = ("number",
showSecNum $ zipWith (+)
(writerNumberOffset opts ++ repeat 0)
(map (fromMaybe 0 . safeRead) $
T.split (=='.') num))
fixnum x = x
showSecNum = T.intercalate "." . map tshow
blockToHtml :: PandocMonad m => WriterOptions -> Block -> StateT WriterState m Html
blockToHtml _ Null = return mempty
blockToHtml opts (Plain lst) = inlineListToHtml opts lst
blockToHtml opts (Para [Image attr@(_,classes,_) txt (src,tit)])
| "stretch" `elem` classes = do
slideVariant <- gets stSlideVariant
case slideVariant of
RevealJsSlides ->
inlineToHtml opts (Image attr txt (src, tit))
_ -> figure opts attr txt (src, tit)
blockToHtml opts (Para [Image attr txt (s,T.stripPrefix "fig:" -> Just tit)]) =
figure opts attr txt (s,tit)
blockToHtml opts (Para lst) = do
contents <- inlineListToHtml opts lst
case contents of
Empty _ | not (isEnabled Ext_empty_paragraphs opts) -> return mempty
_ -> return $ H.p contents
blockToHtml opts (LineBlock lns) =
if writerWrapText opts == WrapNone
then blockToHtml opts $ linesToPara lns
else do
htmlLines <- inlineListToHtml opts $ intercalate [LineBreak] lns
return $ H.div ! A.class_ "line-block" $ htmlLines
blockToHtml opts (Div (ident, "section":dclasses, dkvs)
(Header level
hattr@(hident,hclasses,hkvs) ils : xs)) = do
slideVariant <- gets stSlideVariant
slideLevel <- gets stSlideLevel
let slide = slideVariant /= NoSlides &&
DROPPED old fix for # 5168 here
html5 <- gets stHtml5
let titleSlide = slide && level < slideLevel
let level' = if level <= slideLevel && slideVariant == SlidySlides
see # 3566
else level
then return mempty
else blockToHtml opts (Header level' hattr ils)
let isSec (Div (_,"section":_,_) _) = True
isSec (Div _ zs) = any isSec zs
isSec _ = False
let isPause (Para [Str ".",Space,Str ".",Space,Str "."]) = True
isPause _ = False
let fragmentClass = case slideVariant of
RevealJsSlides -> "fragment"
_ -> "incremental"
let inDiv zs = RawBlock (Format "html") ("<div class=\""
<> fragmentClass <> "\">") :
(zs ++ [RawBlock (Format "html") "</div>"])
let breakOnPauses zs = case splitBy isPause zs of
[] -> []
y:ys -> y ++ concatMap inDiv ys
let (titleBlocks, innerSecs) =
if titleSlide
then let (as, bs) = break isSec xs
in (breakOnPauses as, bs)
else ([], breakOnPauses xs)
let secttag = if html5
then H5.section
else H.div
titleContents <- blockListToHtml opts titleBlocks
inSection <- gets stInSection
innerContents <- do
modify $ \st -> st{ stInSection = True }
res <- blockListToHtml opts innerSecs
modify $ \st -> st{ stInSection = inSection }
return res
let classes' = ordNub $
["title-slide" | titleSlide] ++ ["slide" | slide] ++
["section" | (slide || writerSectionDivs opts) &&
not html5 ] ++
["level" <> tshow level | slide || writerSectionDivs opts ]
<> dclasses
let attr = (ident, classes', dkvs)
if titleSlide
then do
t <- addAttrs opts attr $
secttag $ nl opts <> header' <> nl opts <> titleContents <> nl opts
ensure 2D nesting for revealjs , but only for one level ;
revealjs does n't like more than one level of nesting
return $
if slideVariant == RevealJsSlides && not inSection &&
not (null innerSecs)
then H5.section (nl opts <> t <> nl opts <> innerContents)
else t <> nl opts <> if null innerSecs
then mempty
else innerContents <> nl opts
else if writerSectionDivs opts || slide ||
(hident /= ident && not (T.null hident || T.null ident)) ||
(hclasses /= dclasses) || (hkvs /= dkvs)
then addAttrs opts attr
$ secttag
$ nl opts <> header' <> nl opts <>
if null innerSecs
then mempty
else innerContents <> nl opts
else do
let attr' = (ident, classes' \\ hclasses, dkvs \\ hkvs)
t <- addAttrs opts attr' header'
return $ t <>
if null innerSecs
then mempty
else nl opts <> innerContents
blockToHtml opts (Div attr@(ident, classes, kvs') bs) = do
html5 <- gets stHtml5
slideVariant <- gets stSlideVariant
let kvs = [(k,v) | (k,v) <- kvs', k /= "width"] ++
[("style", "width:" <> w <> ";") | "column" `elem` classes,
("width", w) <- kvs'] ++
[("role", "doc-bibliography") | ident == "refs" && html5] ++
[("role", "doc-biblioentry")
| "ref-item" `T.isPrefixOf` ident && html5]
let speakerNotes = "notes" `elem` classes
we do n't want incremental output inside speaker notes , see # 1394
let opts' = if | speakerNotes -> opts{ writerIncremental = False }
| "incremental" `elem` classes -> opts{ writerIncremental = True }
| "nonincremental" `elem` classes -> opts{ writerIncremental = False }
| otherwise -> opts
we remove " incremental " and " " if we 're in a
classes' = case slideVariant of
NoSlides -> classes
_ -> filter (\k -> k /= "incremental" && k /= "nonincremental") classes
contents <- if "columns" `elem` classes'
we do n't use because it inserts
a newline between the column , which throws
off widths ! see # 4028
mconcat <$> mapM (blockToHtml opts) bs
else blockListToHtml opts' bs
let contents' = nl opts >> contents >> nl opts
let (divtag, classes'') = if html5 && "section" `elem` classes'
then (H5.section, filter (/= "section") classes')
else (H.div, classes')
if speakerNotes
then case slideVariant of
RevealJsSlides -> addAttrs opts' attr $
H5.aside contents'
DZSlides -> do
t <- addAttrs opts' attr $
H5.div contents'
return $ t ! H5.customAttribute "role" "note"
NoSlides -> addAttrs opts' attr $
H.div contents'
_ -> return mempty
else addAttrs opts (ident, classes'', kvs) $
divtag contents'
blockToHtml opts (RawBlock f str) = do
ishtml <- isRawHtml f
if ishtml
then return $ preEscapedText str
else if (f == Format "latex" || f == Format "tex") &&
allowsMathEnvironments (writerHTMLMathMethod opts) &&
isMathEnvironment str
then blockToHtml opts $ Plain [Math DisplayMath str]
else do
report $ BlockNotRendered (RawBlock f str)
return mempty
blockToHtml _ HorizontalRule = do
html5 <- gets stHtml5
return $ if html5 then H5.hr else H.hr
blockToHtml opts (CodeBlock (id',classes,keyvals) rawCode) = do
id'' <- if T.null id'
then do
modify $ \st -> st{ stCodeBlockNum = stCodeBlockNum st + 1 }
codeblocknum <- gets stCodeBlockNum
return (writerIdentifierPrefix opts <> "cb" <> tshow codeblocknum)
else return (writerIdentifierPrefix opts <> id')
let tolhs = isEnabled Ext_literate_haskell opts &&
any (\c -> T.toLower c == "haskell") classes &&
any (\c -> T.toLower c == "literate") classes
classes' = if tolhs
then map (\c -> if T.toLower c == "haskell"
then "literatehaskell"
else c) classes
else classes
adjCode = if tolhs
then T.unlines . map ("> " <>) . T.lines $ rawCode
else rawCode
hlCode = if isJust (writerHighlightStyle opts)
then highlight (writerSyntaxMap opts) formatHtmlBlock
(id'',classes',keyvals) adjCode
else Left ""
case hlCode of
Left msg -> do
unless (T.null msg) $
report $ CouldNotHighlight msg
addAttrs opts (id',classes,keyvals)
$ H.pre $ H.code $ toHtml adjCode
Right h -> modify (\st -> st{ stHighlighting = True }) >>
addAttrs opts{writerIdentifierPrefix = ""} (id'',[],keyvals) h
blockToHtml opts (BlockQuote blocks) = do
slideVariant <- gets stSlideVariant
if slideVariant /= NoSlides
then let inc = not (writerIncremental opts) in
case blocks of
[BulletList lst] -> blockToHtml (opts {writerIncremental = inc})
(BulletList lst)
[OrderedList attribs lst] ->
blockToHtml (opts {writerIncremental = inc})
(OrderedList attribs lst)
[DefinitionList lst] ->
blockToHtml (opts {writerIncremental = inc})
(DefinitionList lst)
_ -> do contents <- blockListToHtml opts blocks
return $ H.blockquote
$ nl opts >> contents >> nl opts
else do
contents <- blockListToHtml opts blocks
return $ H.blockquote $ nl opts >> contents >> nl opts
blockToHtml opts (Header level attr@(_,classes,kvs) lst) = do
contents <- inlineListToHtml opts lst
let secnum = fromMaybe mempty $ lookup "number" kvs
let contents' = if writerNumberSections opts && not (T.null secnum)
&& "unnumbered" `notElem` classes
then (H.span ! A.class_ "header-section-number"
$ toHtml secnum) >> strToHtml " " >> contents
else contents
addAttrs opts attr
$ case level of
1 -> H.h1 contents'
2 -> H.h2 contents'
3 -> H.h3 contents'
4 -> H.h4 contents'
5 -> H.h5 contents'
6 -> H.h6 contents'
_ -> H.p ! A.class_ "heading" $ contents'
blockToHtml opts (BulletList lst) = do
contents <- mapM (listItemToHtml opts) lst
let isTaskList = not (null lst) && all isTaskListItem lst
(if isTaskList then (! A.class_ "task-list") else id) <$>
unordList opts contents
blockToHtml opts (OrderedList (startnum, numstyle, _) lst) = do
contents <- mapM (listItemToHtml opts) lst
html5 <- gets stHtml5
let numstyle' = case numstyle of
Example -> "decimal"
_ -> camelCaseToHyphenated $ tshow numstyle
let attribs = [A.start $ toValue startnum | startnum /= 1] ++
[A.class_ "example" | numstyle == Example] ++
(if numstyle /= DefaultStyle
then if html5
then [A.type_ $
case numstyle of
Decimal -> "1"
LowerAlpha -> "a"
UpperAlpha -> "A"
LowerRoman -> "i"
UpperRoman -> "I"
_ -> "1"]
else [A.style $ toValue $ "list-style-type: " <>
numstyle']
else [])
l <- ordList opts contents
return $ foldl (!) l attribs
blockToHtml opts (DefinitionList lst) = do
contents <- mapM (\(term, defs) ->
do term' <- liftM H.dt $ inlineListToHtml opts term
defs' <- mapM (liftM (\x -> H.dd (x >> nl opts)) .
blockListToHtml opts) defs
return $ mconcat $ nl opts : term' : nl opts :
intersperse (nl opts) defs') lst
defList opts contents
blockToHtml opts (Table capt aligns widths headers rows') = do
captionDoc <- if null capt
then return mempty
else do
cs <- inlineListToHtml opts capt
return $ H.caption cs >> nl opts
html5 <- gets stHtml5
let percent w = show (truncate (100*w) :: Integer) <> "%"
let coltags = if all (== 0.0) widths
then mempty
else do
H.colgroup $ do
nl opts
mapM_ (\w -> do
if html5
then H.col ! A.style (toValue $ "width: " <>
percent w)
else H.col ! A.width (toValue $ percent w)
nl opts) widths
nl opts
head' <- if all null headers
then return mempty
else do
contents <- tableRowToHtml opts aligns 0 headers
return $ H.thead (nl opts >> contents) >> nl opts
body' <- liftM (\x -> H.tbody (nl opts >> mconcat x)) $
zipWithM (tableRowToHtml opts aligns) [1..] rows'
let tbl = H.table $
nl opts >> captionDoc >> coltags >> head' >> body' >> nl opts
let totalWidth = sum widths
When widths of columns are < 100 % , we need to set width for the whole
return $ if totalWidth == 0 || totalWidth == 1
then tbl
else tbl ! A.style (toValue $ "width:" <>
show (round (totalWidth * 100) :: Int) <> "%;")
tableRowToHtml :: PandocMonad m
=> WriterOptions
-> [Alignment]
-> Int
-> [[Block]]
-> StateT WriterState m Html
tableRowToHtml opts aligns rownum cols' = do
let mkcell = if rownum == 0 then H.th else H.td
let rowclass = case rownum of
0 -> "header"
x | x `rem` 2 == 1 -> "odd"
_ -> "even"
cols'' <- zipWithM
(\alignment item -> tableItemToHtml opts mkcell alignment item)
aligns cols'
return $ (H.tr ! A.class_ rowclass $ nl opts >> mconcat cols'')
>> nl opts
alignmentToString :: Alignment -> [Char]
alignmentToString alignment = case alignment of
AlignLeft -> "left"
AlignRight -> "right"
AlignCenter -> "center"
AlignDefault -> ""
tableItemToHtml :: PandocMonad m
=> WriterOptions
-> (Html -> Html)
-> Alignment
-> [Block]
-> StateT WriterState m Html
tableItemToHtml opts tag' align' item = do
contents <- blockListToHtml opts item
html5 <- gets stHtml5
let alignStr = alignmentToString align'
let attribs = if html5
then A.style (toValue $ "text-align: " <> alignStr <> ";")
else A.align (toValue alignStr)
let tag'' = if null alignStr
then tag'
else tag' ! attribs
return $ tag'' contents >> nl opts
toListItems :: WriterOptions -> [Html] -> [Html]
toListItems opts items = map (toListItem opts) items ++ [nl opts]
toListItem :: WriterOptions -> Html -> Html
toListItem opts item = nl opts >> H.li item
blockListToHtml :: PandocMonad m
=> WriterOptions -> [Block] -> StateT WriterState m Html
blockListToHtml opts lst =
mconcat . intersperse (nl opts) . filter nonempty
<$> mapM (blockToHtml opts) lst
where nonempty (Empty _) = False
nonempty _ = True
| Convert list of Pandoc inline elements to HTML .
inlineListToHtml :: PandocMonad m => WriterOptions -> [Inline] -> StateT WriterState m Html
inlineListToHtml opts lst = mconcat <$> mapM (inlineToHtml opts) lst
| Annotates a expression with the tex source
annotateMML :: XML.Element -> Text -> XML.Element
annotateMML e tex = math (unode "semantics" [cs, unode "annotation" (annotAttrs, T.unpack tex)])
where
cs = case elChildren e of
[] -> unode "mrow" ()
[x] -> x
xs -> unode "mrow" xs
math childs = XML.Element q as [XML.Elem childs] l
where
(XML.Element q as _ l) = e
annotAttrs = [XML.Attr (unqual "encoding") "application/x-tex"]
inlineToHtml :: PandocMonad m
=> WriterOptions -> Inline -> StateT WriterState m Html
inlineToHtml opts inline = do
html5 <- gets stHtml5
case inline of
(Str str) -> return $ strToHtml str
Space -> return $ strToHtml " "
SoftBreak -> return $ case writerWrapText opts of
WrapNone -> preEscapedText " "
WrapAuto -> preEscapedText " "
WrapPreserve -> preEscapedText "\n"
LineBreak -> return $ do
if html5 then H5.br else H.br
strToHtml "\n"
(Span (id',classes,kvs) ils) ->
let spanLikeTag = case classes of
(c:_) -> do
guard (c `Set.member` htmlSpanLikeElements)
pure $ customParent (textTag c)
_ -> Nothing
in case spanLikeTag of
Just tag -> do
h <- inlineListToHtml opts ils
addAttrs opts (id',tail classes',kvs') $ tag h
Nothing -> do
h <- inlineListToHtml opts ils
addAttrs opts (id',classes',kvs') (H.span h)
where
styles = ["font-style:normal;"
| "csl-no-emph" `elem` classes]
++ ["font-weight:normal;"
| "csl-no-strong" `elem` classes]
++ ["font-variant:normal;"
| "csl-no-smallcaps" `elem` classes]
kvs' = if null styles
then kvs
else ("style", T.concat styles) : kvs
classes' = [ c | c <- classes
, c `notElem` [ "csl-no-emph"
, "csl-no-strong"
, "csl-no-smallcaps"
]
]
(Emph lst) -> H.em <$> inlineListToHtml opts lst
(Strong lst) -> H.strong <$> inlineListToHtml opts lst
(Code attr@(ids,cs,kvs) str)
-> case hlCode of
Left msg -> do
unless (T.null msg) $
report $ CouldNotHighlight msg
addAttrs opts (ids,cs',kvs) $
fromMaybe H.code sampOrVar $
strToHtml str
Right h -> do
modify $ \st -> st{ stHighlighting = True }
addAttrs opts (ids,[],kvs) $
fromMaybe id sampOrVar h
where hlCode = if isJust (writerHighlightStyle opts)
then highlight
(writerSyntaxMap opts)
formatHtmlInline attr str
else Left ""
(sampOrVar,cs')
| "sample" `elem` cs =
(Just H.samp,"sample" `delete` cs)
| "variable" `elem` cs =
(Just H.var,"variable" `delete` cs)
| otherwise = (Nothing,cs)
(Strikeout lst) -> H.del <$> inlineListToHtml opts lst
(SmallCaps lst) -> (H.span ! A.class_ "smallcaps") <$>
inlineListToHtml opts lst
(Superscript lst) -> H.sup <$> inlineListToHtml opts lst
(Subscript lst) -> H.sub <$> inlineListToHtml opts lst
(Quoted quoteType lst) ->
let (leftQuote, rightQuote) = case quoteType of
SingleQuote -> (strToHtml "‘",
strToHtml "’")
DoubleQuote -> (strToHtml "“",
strToHtml "”")
in if writerHtmlQTags opts
then do
modify $ \st -> st{ stQuotes = True }
let (maybeAttr, lst') = case lst of
[Span attr@(_, _, kvs) cs]
| any ((=="cite") . fst) kvs
-> (Just attr, cs)
cs -> (Nothing, cs)
H.q `fmap` inlineListToHtml opts lst'
>>= maybe return (addAttrs opts) maybeAttr
else (\x -> leftQuote >> x >> rightQuote)
`fmap` inlineListToHtml opts lst
(Math t str) -> do
modify (\st -> st {stMath = True})
let mathClass = toValue $ ("math " :: Text) <>
if t == InlineMath then "inline" else "display"
case writerHTMLMathMethod opts of
WebTeX url -> do
let imtag = if html5 then H5.img else H.img
let s = case t of
InlineMath -> "\\textstyle "
DisplayMath -> "\\displaystyle "
let m = imtag ! A.style "vertical-align:middle"
! A.src (toValue $ url <> T.pack (urlEncode (T.unpack $ s <> str)))
! A.alt (toValue str)
! A.title (toValue str)
let brtag = if html5 then H5.br else H.br
return $ case t of
InlineMath -> m
DisplayMath -> brtag >> m >> brtag
GladTeX ->
return $
customParent (textTag "eq") !
customAttribute "env"
(toValue $ if t == InlineMath
then ("math" :: Text)
else "displaymath") $ strToHtml str
MathML -> do
let conf = useShortEmptyTags (const False)
defaultConfigPP
res <- lift $ convertMath writeMathML t str
case res of
Right r -> return $ preEscapedString $
ppcElement conf (annotateMML r str)
Left il -> (H.span ! A.class_ mathClass) <$>
inlineToHtml opts il
MathJax _ -> return $ H.span ! A.class_ mathClass $ toHtml $
case t of
InlineMath -> "\\(" <> str <> "\\)"
DisplayMath -> "\\[" <> str <> "\\]"
KaTeX _ -> return $ H.span ! A.class_ mathClass $ toHtml $
case t of
InlineMath -> str
DisplayMath -> str
PlainMath -> do
x <- lift (texMathToInlines t str) >>= inlineListToHtml opts
let m = H.span ! A.class_ mathClass $ x
let brtag = if html5 then H5.br else H.br
return $ case t of
InlineMath -> m
DisplayMath -> brtag >> m >> brtag
(RawInline f str) -> do
ishtml <- isRawHtml f
if ishtml
then return $ preEscapedText str
else if (f == Format "latex" || f == Format "tex") &&
allowsMathEnvironments (writerHTMLMathMethod opts) &&
isMathEnvironment str
then inlineToHtml opts $ Math DisplayMath str
else do
report $ InlineNotRendered inline
return mempty
(Link attr txt (s,_)) | "mailto:" `T.isPrefixOf` s -> do
linkText <- inlineListToHtml opts txt
obfuscateLink opts attr linkText s
(Link (ident,classes,kvs) txt (s,tit)) -> do
linkText <- inlineListToHtml opts txt
slideVariant <- gets stSlideVariant
let s' = case T.uncons s of
Just ('#',xs) -> let prefix = if slideVariant == RevealJsSlides
then "/"
else writerIdentifierPrefix opts
in "#" <> prefix <> xs
_ -> s
let link = H.a ! A.href (toValue s') $ linkText
link' <- addAttrs opts (ident, classes, kvs) link
return $ if T.null tit
then link'
else link' ! A.title (toValue tit)
(Image attr txt (s,tit)) -> do
let alternate = stringify txt
slideVariant <- gets stSlideVariant
let isReveal = slideVariant == RevealJsSlides
attrs <- imgAttrsToHtml opts attr
let attributes =
(if isReveal
then customAttribute "data-src" $ toValue s
else A.src $ toValue s) :
[A.title $ toValue tit | not (T.null tit)] ++
attrs
imageTag = (if html5 then H5.img else H.img
, [A.alt $ toValue alternate | not (null txt)] )
mediaTag tg fallbackTxt =
let linkTxt = if null txt
then fallbackTxt
else alternate
in (tg $ H.a ! A.href (toValue s) $ toHtml linkTxt
, [A5.controls ""] )
normSrc = maybe (T.unpack s) uriPath (parseURIReference $ T.unpack s)
(tag, specAttrs) = case mediaCategory normSrc of
Just "image" -> imageTag
Just "video" -> mediaTag H5.video "Video"
Just "audio" -> mediaTag H5.audio "Audio"
Just _ -> (H5.embed, [])
_ -> imageTag
return $ foldl (!) tag $ attributes ++ specAttrs
note : null title included , as in Markdown.pl
(Note contents) -> do
notes <- gets stNotes
let number = length notes + 1
let ref = tshow number
htmlContents <- blockListToNote opts ref contents
epubVersion <- gets stEPUBVersion
modify $ \st -> st {stNotes = htmlContents:notes}
slideVariant <- gets stSlideVariant
let revealSlash = T.pack ['/' | slideVariant == RevealJsSlides]
let link = H.a ! A.href (toValue $ "#" <>
revealSlash <>
writerIdentifierPrefix opts <> "fn" <> ref)
! A.class_ "footnote-ref"
! prefixedId opts ("fnref" <> ref)
$ (if isJust epubVersion
then id
else H.sup)
$ toHtml ref
return $ case epubVersion of
Just EPUB3 -> link ! customAttribute "epub:type" "noteref"
_ | html5 -> link ! H5.customAttribute
"role" "doc-noteref"
_ -> link
(Cite cits il)-> do contents <- inlineListToHtml opts (walk addRoleToLink il)
let citationIds = T.unwords $ map citationId cits
let result = H.span ! A.class_ "citation" $ contents
return $ if html5
then result ! customAttribute "data-cites" (toValue citationIds)
else result
addRoleToLink :: Inline -> Inline
addRoleToLink (Link (id',classes,kvs) ils (src,tit)) =
Link (id',classes,("role","doc-biblioref"):kvs) ils (src,tit)
addRoleToLink x = x
blockListToNote :: PandocMonad m
=> WriterOptions -> Text -> [Block]
-> StateT WriterState m Html
blockListToNote opts ref blocks = do
html5 <- gets stHtml5
If last block is Para or Plain , include the backlink at the end of
that block . Otherwise , insert a new Plain block with the backlink .
let kvs = [("role","doc-backlink") | html5]
let backlink = [Link ("",["footnote-back"],kvs)
[Str "↩"] ("#" <> "fnref" <> ref,"")]
let blocks' = if null blocks
then []
else let lastBlock = last blocks
otherBlocks = init blocks
in case lastBlock of
(Para lst) -> otherBlocks ++
[Para (lst ++ backlink)]
(Plain lst) -> otherBlocks ++
[Plain (lst ++ backlink)]
_ -> otherBlocks ++ [lastBlock,
Plain backlink]
contents <- blockListToHtml opts blocks'
let noteItem = H.li ! prefixedId opts ("fn" <> ref) $ contents
epubVersion <- gets stEPUBVersion
let noteItem' = case epubVersion of
Just EPUB3 -> noteItem !
customAttribute "epub:type" "footnote"
_ | html5 -> noteItem !
customAttribute "role" "doc-endnote"
_ -> noteItem
return $ nl opts >> noteItem'
isMathEnvironment :: Text -> Bool
isMathEnvironment s = "\\begin{" `T.isPrefixOf` s &&
envName `elem` mathmlenvs
where envName = T.takeWhile (/= '}') (T.drop 7 s)
mathmlenvs = [ "align"
, "align*"
, "alignat"
, "alignat*"
, "aligned"
, "alignedat"
, "array"
, "Bmatrix"
, "bmatrix"
, "cases"
, "CD"
, "eqnarray"
, "eqnarray*"
, "equation"
, "equation*"
, "gather"
, "gather*"
, "gathered"
, "matrix"
, "multline"
, "multline*"
, "pmatrix"
, "smallmatrix"
, "split"
, "subarray"
, "Vmatrix"
, "vmatrix" ]
allowsMathEnvironments :: HTMLMathMethod -> Bool
allowsMathEnvironments (MathJax _) = True
allowsMathEnvironments MathML = True
allowsMathEnvironments (WebTeX _) = True
allowsMathEnvironments _ = False
isRawHtml :: PandocMonad m => Format -> StateT WriterState m Bool
isRawHtml f = do
html5 <- gets stHtml5
return $ f == Format "html" ||
((html5 && f == Format "html5") || f == Format "html4")
|
3fb74bf6157aea77da87ff930c51d3e482c4eefea009db61dc52113284e6b944 | kappelmann/eidi2_repetitorium_tum | ha11_sol.ml | open Ha11_angabe
module MakeHashMap (H : Hashable) : (Map with type key = H.key) = struct
type key = H.key
type 'v t = (key * 'v) list array
let create () = Array.make 37 []
let size hm =
Array.fold_left (fun acc bucket -> acc + List.length bucket) 0 hm
let filtered_bucket hm k =
let hash = (H.hash k) mod (Array.length hm) in
let bucket = hm.(hash) in
let filtered = List.filter (fun (k', _) -> k != k') bucket in
(hash, filtered)
let insert hm k v =
let (hash, filtered) = filtered_bucket hm k in
hm.(hash) <- (k, v)::filtered;
hm
let remove hm k =
let (hash, filtered) = filtered_bucket hm k in
hm.(hash) <- filtered;
hm
let lookup hm k =
let hash = (H.hash k) mod (Array.length hm) in
let bucket = hm.(hash) in
try
let (_, v) = List.find (fun (k', v) -> k = k') bucket in
Some v
with Not_found -> None
end
module IntHashMap = MakeHashMap (struct
type key = int
let hash = Hashtbl.hash
end)
module MakeTreeMap (C : Comparable) : (Map with type key = C.key) = struct
type key = C.key
type 'v t = (key * 'v) bin_tree
let create () = Leaf
let size hm =
let rec inner = function
Node n -> 1 + (inner n.left) + (inner n.right)
| Leaf -> 0
in inner hm
let compare_tuple hm (k1, _) (k2, _) = C.compare k1 k2
let insert hm k v = insert hm (k, v) (compare_tuple hm)
let remove hm k = remove hm k (fun k (k', _) -> C.compare k k')
let rec lookup hm k = match hm with
Node { data = (k', v); left; right} ->
let comp = C.compare k k' in
if comp = 0 then Some v
else if comp < 0 then lookup left k
else lookup right k
| Leaf -> None
end
module IntTreeMap = MakeTreeMap (struct
type key = int
let compare = Pervasives.compare
end)
let () =
let opt_str = (function
Some x -> x
| None -> "None") in
let m = IntHashMap.create () in
let m = IntHashMap.insert m 5 "Hallo" in
let m = IntHashMap.insert m 7 "Hugo" in
Printf.printf "%s\n" (opt_str (IntHashMap.lookup m 5));
Printf.printf "%s\n" (opt_str (IntHashMap.lookup m 20));
Printf.printf "%s\n" (opt_str (IntHashMap.lookup m 7));
let m = IntTreeMap.create () in
let m = IntTreeMap.insert m 5 "Hallo" in
let m = IntTreeMap.insert m 7 "Hugo" in
Printf.printf "%s\n" (opt_str (IntTreeMap.lookup m 5));
Printf.printf "%s\n" (opt_str (IntTreeMap.lookup m 20));
Printf.printf "%s\n" (opt_str (IntTreeMap.lookup m 7))
let (%) g f x = g (f x)
let const x _ = x
let id x = x
module Lift (B : Base) : Extended with type 'a t = 'a B.t = struct
include B
' iter ' muss die , während fold hier
. Aus diesem
aufgebaut , welches ganz am .
von rechts faltet. Aus diesem Grund wird hier ein Funktionsobjekt
aufgebaut, welches ganz am Ende aufgerufen wird. *)
let iter f x = fold (fun x acc -> (fun () -> f x; acc())) x (fun () -> ()) ()
let map f x = fold (insert%f) x empty
let filter f x = fold (fun a b -> if f a then insert a b else b) x empty
let append x y = fold insert x y
let flatten x = fold append x empty
let to_list x = fold List.cons x []
let of_list x = List.fold_right insert x empty
end
module List = struct
type 'a t = 'a list
let empty = []
let insert = List.cons
let fold = List.fold_right
end
module ExtendedList = Lift (List)
module SearchTree = struct
type 'a t = Leaf | Node of ('a * 'a t * 'a t)
let empty = Leaf
let rec insert x = function
| Leaf -> Node (x, Leaf, Leaf)
| Node (y, a, b) ->
if x < y then Node (y, insert x a, b) else Node (y, a, insert x b)
let rec fold f = function (* pre-order traversal: node, left, right *)
| Leaf -> id
| Node (v, l, r) -> fun a -> fold f r (fold f l (f v a))
end
module ExtendedSearchTree = Lift (SearchTree)
let () =
let l = 4::2::1::[] in
ExtendedList.iter (fun x -> Printf.printf "%d\n" x) l;
let t = ExtendedSearchTree.insert 4
(ExtendedSearchTree.insert 6
(ExtendedSearchTree.insert 1
ExtendedSearchTree.empty)) in
ExtendedSearchTree.iter (fun x -> Printf.printf "%d\n" x) t | null | https://raw.githubusercontent.com/kappelmann/eidi2_repetitorium_tum/1d16bbc498487a85960e0d83152249eb13944611/additional_exercises/2016_17/Blatt%2011%20L%C3%B6sungen/ocaml/ha11_sol.ml | ocaml | pre-order traversal: node, left, right | open Ha11_angabe
module MakeHashMap (H : Hashable) : (Map with type key = H.key) = struct
type key = H.key
type 'v t = (key * 'v) list array
let create () = Array.make 37 []
let size hm =
Array.fold_left (fun acc bucket -> acc + List.length bucket) 0 hm
let filtered_bucket hm k =
let hash = (H.hash k) mod (Array.length hm) in
let bucket = hm.(hash) in
let filtered = List.filter (fun (k', _) -> k != k') bucket in
(hash, filtered)
let insert hm k v =
let (hash, filtered) = filtered_bucket hm k in
hm.(hash) <- (k, v)::filtered;
hm
let remove hm k =
let (hash, filtered) = filtered_bucket hm k in
hm.(hash) <- filtered;
hm
let lookup hm k =
let hash = (H.hash k) mod (Array.length hm) in
let bucket = hm.(hash) in
try
let (_, v) = List.find (fun (k', v) -> k = k') bucket in
Some v
with Not_found -> None
end
module IntHashMap = MakeHashMap (struct
type key = int
let hash = Hashtbl.hash
end)
module MakeTreeMap (C : Comparable) : (Map with type key = C.key) = struct
type key = C.key
type 'v t = (key * 'v) bin_tree
let create () = Leaf
let size hm =
let rec inner = function
Node n -> 1 + (inner n.left) + (inner n.right)
| Leaf -> 0
in inner hm
let compare_tuple hm (k1, _) (k2, _) = C.compare k1 k2
let insert hm k v = insert hm (k, v) (compare_tuple hm)
let remove hm k = remove hm k (fun k (k', _) -> C.compare k k')
let rec lookup hm k = match hm with
Node { data = (k', v); left; right} ->
let comp = C.compare k k' in
if comp = 0 then Some v
else if comp < 0 then lookup left k
else lookup right k
| Leaf -> None
end
module IntTreeMap = MakeTreeMap (struct
type key = int
let compare = Pervasives.compare
end)
let () =
let opt_str = (function
Some x -> x
| None -> "None") in
let m = IntHashMap.create () in
let m = IntHashMap.insert m 5 "Hallo" in
let m = IntHashMap.insert m 7 "Hugo" in
Printf.printf "%s\n" (opt_str (IntHashMap.lookup m 5));
Printf.printf "%s\n" (opt_str (IntHashMap.lookup m 20));
Printf.printf "%s\n" (opt_str (IntHashMap.lookup m 7));
let m = IntTreeMap.create () in
let m = IntTreeMap.insert m 5 "Hallo" in
let m = IntTreeMap.insert m 7 "Hugo" in
Printf.printf "%s\n" (opt_str (IntTreeMap.lookup m 5));
Printf.printf "%s\n" (opt_str (IntTreeMap.lookup m 20));
Printf.printf "%s\n" (opt_str (IntTreeMap.lookup m 7))
let (%) g f x = g (f x)
let const x _ = x
let id x = x
module Lift (B : Base) : Extended with type 'a t = 'a B.t = struct
include B
' iter ' muss die , während fold hier
. Aus diesem
aufgebaut , welches ganz am .
von rechts faltet. Aus diesem Grund wird hier ein Funktionsobjekt
aufgebaut, welches ganz am Ende aufgerufen wird. *)
let iter f x = fold (fun x acc -> (fun () -> f x; acc())) x (fun () -> ()) ()
let map f x = fold (insert%f) x empty
let filter f x = fold (fun a b -> if f a then insert a b else b) x empty
let append x y = fold insert x y
let flatten x = fold append x empty
let to_list x = fold List.cons x []
let of_list x = List.fold_right insert x empty
end
module List = struct
type 'a t = 'a list
let empty = []
let insert = List.cons
let fold = List.fold_right
end
module ExtendedList = Lift (List)
module SearchTree = struct
type 'a t = Leaf | Node of ('a * 'a t * 'a t)
let empty = Leaf
let rec insert x = function
| Leaf -> Node (x, Leaf, Leaf)
| Node (y, a, b) ->
if x < y then Node (y, insert x a, b) else Node (y, a, insert x b)
| Leaf -> id
| Node (v, l, r) -> fun a -> fold f r (fold f l (f v a))
end
module ExtendedSearchTree = Lift (SearchTree)
let () =
let l = 4::2::1::[] in
ExtendedList.iter (fun x -> Printf.printf "%d\n" x) l;
let t = ExtendedSearchTree.insert 4
(ExtendedSearchTree.insert 6
(ExtendedSearchTree.insert 1
ExtendedSearchTree.empty)) in
ExtendedSearchTree.iter (fun x -> Printf.printf "%d\n" x) t |
2fdda4a89306c8930ed0ad09a3820707f7cbcdce436b7a1df0cbdfc364957173 | icicle-lang/icicle-ambiata | Statement.hs | | Statements and mutable accumulators ( variables ) for Avalanche
# LANGUAGE DeriveGeneric #
# LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
module Icicle.Avalanche.Statement.Statement (
Statement (..)
, Accumulator (..)
, FactBinds (..)
, ForeachType (..)
, WhileType (..)
, nestedIfs
, transformUDStmt
, foldStmt
, factBindsAll
) where
import GHC.Generics (Generic)
import Icicle.Common.Base
import Icicle.Common.Type
import Icicle.Common.Exp
import Icicle.Data.Name
import Icicle.Internal.Pretty
import P
-- | Part of a loop
data Statement a n p
-- Branches
-- | An IF for filters
= If !(Exp a n p) (Statement a n p) (Statement a n p)
-- | Local binding, so the name better be unique
| Let {-# UNPACK #-} !(Name n) !(Exp a n p) (Statement a n p)
-- | A loop with some condition on an accumulator.
| While !WhileType {-# UNPACK #-} !(Name n) !ValType !(Exp a n p) (Statement a n p)
-- | A loop over some ints
| ForeachInts !ForeachType {-# UNPACK #-} !(Name n) !(Exp a n p) !(Exp a n p) (Statement a n p)
-- | A loop over all the facts.
-- This should only occur once in the program, and not inside a loop.
| ForeachFacts !(FactBinds n) !ValType (Statement a n p)
-- | Execute several statements in a block.
| Block [Statement a n p]
Initialise an accumulator
| InitAccumulator !(Accumulator a n p) (Statement a n p)
-- | Read from a non-latest accumulator.
First name is what to call it , second is what accumulator .
-- As let:
-- Let local = accumulator,
-- Read local = accumulator.
| Read {-# UNPACK #-} !(Name n) {-# UNPACK #-} !(Name n) !ValType (Statement a n p)
-- Leaf nodes
-- | Update a resumable or windowed fold accumulator,
with Exp : acc
| Write {-# UNPACK #-} !(Name n) !(Exp a n p)
-- | Emit a value to output
| Output {-# UNPACK #-} !OutputId !ValType ![(Exp a n p, ValType)]
deriving (Eq, Ord, Show, Generic)
instance (NFData a, NFData n, NFData p) => NFData (Statement a n p)
instance Monoid (Statement a n p) where
mempty = Block []
mappend p q
= Block [p, q]
-- | Construct nested ifs. Use this instead of "If (x && y)", since
-- A-normalisation will get rid of the short-circuit.
--
nestedIfs :: [Exp a n p] -> Statement a n p -> Statement a n p -> Statement a n p
nestedIfs [] _ _
= mempty
nestedIfs conds true false
= foldr (\cond st -> If cond st false) true conds
data FactBinds n
= FactBinds {
factBindTime :: !(Name n)
, factBindValue :: ![(Name n, ValType)]
}
deriving (Eq, Ord, Show, Generic)
instance NFData n => NFData (FactBinds n)
factBindsAll :: FactBinds n -> [(Name n, ValType)]
factBindsAll (FactBinds ntime nvalue)
= (ntime, TimeT) : nvalue
-- | Mutable accumulators
data Accumulator a n p
= Accumulator
{ accName :: !(Name n)
, accValType :: !ValType
, accInit :: !(Exp a n p)
}
deriving (Eq, Ord, Show, Generic)
instance (NFData a, NFData n, NFData p) => NFData (Accumulator a n p)
data ForeachType
= ForeachStepUp
| ForeachStepDown
deriving (Eq, Ord, Show, Generic)
instance NFData ForeachType
data WhileType
= WhileEq
| WhileNe
deriving (Eq, Ord, Show, Generic)
instance NFData WhileType
-- Transforming -------------
transformUDStmt
:: Monad m
=> (env -> Statement a n p -> m (env, Statement a n p))
-> env
-> Statement a n p
-> m (Statement a n p)
transformUDStmt fun env statements
= go env statements
where
go e s
= do (e', s') <- fun e s
case s' of
If x ss es
-> If x <$> go e' ss <*> go e' es
Let n x ss
-> Let n x <$> go e' ss
While t n vt end ss
-> While t n vt end <$> go e' ss
ForeachInts t n from to ss
-> ForeachInts t n from to <$> go e' ss
ForeachFacts binds ty ss
-> ForeachFacts binds ty <$> go e' ss
Block ss
-> Block <$> mapM (go e') ss
InitAccumulator acc ss
-> InitAccumulator acc <$> go e' ss
Read n acc vt ss
-> Read n acc vt <$> go e' ss
Write n x
-> return $ Write n x
Output n t xs
-> return $ Output n t xs
# INLINE transformUDStmt #
foldStmt
:: Monad m
=> (env -> Statement a n p -> m env)
-> (env -> res -> Statement a n p -> m res)
-> (res -> res -> res)
-> env
-> res
-> Statement a n p
-> m res
foldStmt down up rjoin env res statements
= go env statements
where
go e s
= do e' <- down e s
let sub1 ss = go e' ss >>= \r' -> up e' r' s
case s of
If _ ss es
-> do r1 <- go e' ss
r2 <- go e' es
let r' = rjoin r1 r2
up e' r' s
Let _ _ ss
-> sub1 ss
While _ _ _ _ ss
-> sub1 ss
ForeachInts _ _ _ _ ss
-> sub1 ss
ForeachFacts _ _ ss
-> sub1 ss
Block ss
-> do rs <- mapM (go e') ss
let r' = foldl' rjoin res rs
up e' r' s
InitAccumulator _ ss
-> sub1 ss
Read _ _ _ ss
-> sub1 ss
Write{}
-> up e' res s
Output{}
-> up e' res s
# INLINE foldStmt #
instance TransformX Statement where
transformX names exps stmt
= case stmt of
If x ss es
-> If <$> exps x <*> go ss <*> go es
Let n x ss
-> Let <$> names n <*> exps x <*> go ss
While t n vt end ss
-> While t <$> names n <*> pure vt <*> exps end <*> go ss
ForeachInts t n from to ss
-> ForeachInts t <$> names n <*> exps from <*> exps to <*> go ss
ForeachFacts (FactBinds ntime ns) v ss
-> let name_go (n, t) = (,) <$> names n <*> pure t
in ForeachFacts <$> (FactBinds <$> names ntime <*> traverse name_go ns) <*> return v <*> go ss
Block ss
-> Block <$> gos ss
InitAccumulator acc ss
-> InitAccumulator <$> transformX names exps acc <*> go ss
Read n acc vt ss
-> Read <$> names n <*> names acc <*> pure vt <*> go ss
Write n x
-> Write <$> names n <*> exps x
Output n ty xs
-> Output n ty <$> traverse (\(x,t) -> (,) <$> exps x <*> pure t) xs
where
go = transformX names exps
gos = mapM go
instance TransformX Accumulator where
transformX names exps (Accumulator n t x)
= do n' <- names n
x' <- exps x
return $ Accumulator n' t x'
-- Pretty printing -------------
flattenBlocks :: Statement a n p -> [Statement a n p]
flattenBlocks = \case
Block xs ->
concatMap flattenBlocks xs
x ->
[x]
instance (Pretty n, Pretty p) => Pretty (Statement a n p) where
pretty = \case
If x stmts elses ->
line <>
prettyKeyword "if" <+> prettyPunctuation "(" <> pretty x <> prettyPunctuation ")" <> line <>
subscope stmts <>
case elses of
Block [] ->
line
_ ->
line <>
prettyKeyword "else" <> line <>
subscope elses <>
line
Let n x stmts ->
annotate AnnBinding (pretty n) <+> "=" <+> pretty x <> line <>
nosubscope stmts
Read n acc _vt stmts ->
annotate AnnBinding (pretty n) <+> prettyPunctuation "=r" <+> annotate AnnVariable (pretty acc) <> line <>
nosubscope stmts
Write n x ->
annotate AnnBinding (pretty n) <+> prettyPunctuation "=w" <+> pretty x
While t n _ end stmts ->
line <>
prettyKeyword "while" <+> prettyPunctuation "(" <> annotate AnnVariable (pretty n) <+> pretty t <+> pretty end <> ")" <> line <>
subscope stmts <>
line
ForeachInts _ n from to stmts ->
line <>
prettyKeyword "foreach" <+>
prettyPunctuation "(" <> annotate AnnBinding (pretty n) <+>
prettyKeyword "in" <+> pretty from <+>
prettyPunctuation ".." <+> pretty to <>
prettyPunctuation ")" <> line <>
subscope stmts <>
line
ForeachFacts binds _ stmts ->
line <>
prettyKeyword "for_facts" <+>
prettyFactParts AnnBinding (factBindsAll binds) <+>
subscope stmts <>
line
x@(Block _) ->
-- We don't actually need to indent here,
-- because it doesn't really introduce scope
vcat (fmap pretty $ flattenBlocks x)
InitAccumulator (Accumulator n vt x) stmts ->
annotate AnnBinding (pretty n) <+> prettyPunctuation "=i" <+> prettyTypedFlat (pretty x) (pretty vt) <> line <>
nosubscope stmts
Output n t xs ->
line <>
prettyKeyword "output" <+>
prettyTypedFlat (annotate AnnBinding $ pretty n) (pretty t) <+> prettyPunctuation "=" <> line <>
case xs of
[x] ->
-- output =
-- foo
indent 2 $ prettyFactPart AnnVariable x
_ ->
-- output =
-- foo
-- , bar
indent 4 $ prettyFactParts AnnVariable xs
where
subscope stmt
= vcat
[ prettyPunctuation "{"
, indent 4 (pretty stmt)
, prettyPunctuation "}"]
-- We don't want to indent for every let or read just for aesthetic reasons:
-- it gets messy very quickly
nosubscope stmt
= pretty stmt
prettyFactPart ann (nf, tf) =
prettyTypedFlat (annotate ann $ pretty nf) (pretty tf)
prettyFactParts ann xs0 =
case reverse xs0 of
[] ->
mempty
x : xs1 ->
align . prettyItems vsep (prettyFactPart ann x) $
fmap (PrettyItem "," . prettyFactPart ann) (reverse xs1)
instance Pretty WhileType where
pretty = \case
WhileEq ->
prettyPunctuation "=="
WhileNe ->
prettyPunctuation "!="
instance (Pretty n, Pretty p) => Pretty (Accumulator a n p) where
pretty (Accumulator n vt x) =
prettyTypedFlat (annotate AnnBinding $ pretty n) (pretty vt) <+> text "=" <+> pretty x
| null | https://raw.githubusercontent.com/icicle-lang/icicle-ambiata/9b9cc45a75f66603007e4db7e5f3ba908cae2df2/icicle-compiler/src/Icicle/Avalanche/Statement/Statement.hs | haskell | # LANGUAGE OverloadedStrings #
| Part of a loop
Branches
| An IF for filters
| Local binding, so the name better be unique
# UNPACK #
| A loop with some condition on an accumulator.
# UNPACK #
| A loop over some ints
# UNPACK #
| A loop over all the facts.
This should only occur once in the program, and not inside a loop.
| Execute several statements in a block.
| Read from a non-latest accumulator.
As let:
Let local = accumulator,
Read local = accumulator.
# UNPACK #
# UNPACK #
Leaf nodes
| Update a resumable or windowed fold accumulator,
# UNPACK #
| Emit a value to output
# UNPACK #
| Construct nested ifs. Use this instead of "If (x && y)", since
A-normalisation will get rid of the short-circuit.
| Mutable accumulators
Transforming -------------
Pretty printing -------------
We don't actually need to indent here,
because it doesn't really introduce scope
output =
foo
output =
foo
, bar
We don't want to indent for every let or read just for aesthetic reasons:
it gets messy very quickly | | Statements and mutable accumulators ( variables ) for Avalanche
# LANGUAGE DeriveGeneric #
# LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
module Icicle.Avalanche.Statement.Statement (
Statement (..)
, Accumulator (..)
, FactBinds (..)
, ForeachType (..)
, WhileType (..)
, nestedIfs
, transformUDStmt
, foldStmt
, factBindsAll
) where
import GHC.Generics (Generic)
import Icicle.Common.Base
import Icicle.Common.Type
import Icicle.Common.Exp
import Icicle.Data.Name
import Icicle.Internal.Pretty
import P
data Statement a n p
= If !(Exp a n p) (Statement a n p) (Statement a n p)
| ForeachFacts !(FactBinds n) !ValType (Statement a n p)
| Block [Statement a n p]
Initialise an accumulator
| InitAccumulator !(Accumulator a n p) (Statement a n p)
First name is what to call it , second is what accumulator .
with Exp : acc
deriving (Eq, Ord, Show, Generic)
instance (NFData a, NFData n, NFData p) => NFData (Statement a n p)
instance Monoid (Statement a n p) where
mempty = Block []
mappend p q
= Block [p, q]
nestedIfs :: [Exp a n p] -> Statement a n p -> Statement a n p -> Statement a n p
nestedIfs [] _ _
= mempty
nestedIfs conds true false
= foldr (\cond st -> If cond st false) true conds
data FactBinds n
= FactBinds {
factBindTime :: !(Name n)
, factBindValue :: ![(Name n, ValType)]
}
deriving (Eq, Ord, Show, Generic)
instance NFData n => NFData (FactBinds n)
factBindsAll :: FactBinds n -> [(Name n, ValType)]
factBindsAll (FactBinds ntime nvalue)
= (ntime, TimeT) : nvalue
data Accumulator a n p
= Accumulator
{ accName :: !(Name n)
, accValType :: !ValType
, accInit :: !(Exp a n p)
}
deriving (Eq, Ord, Show, Generic)
instance (NFData a, NFData n, NFData p) => NFData (Accumulator a n p)
data ForeachType
= ForeachStepUp
| ForeachStepDown
deriving (Eq, Ord, Show, Generic)
instance NFData ForeachType
data WhileType
= WhileEq
| WhileNe
deriving (Eq, Ord, Show, Generic)
instance NFData WhileType
transformUDStmt
:: Monad m
=> (env -> Statement a n p -> m (env, Statement a n p))
-> env
-> Statement a n p
-> m (Statement a n p)
transformUDStmt fun env statements
= go env statements
where
go e s
= do (e', s') <- fun e s
case s' of
If x ss es
-> If x <$> go e' ss <*> go e' es
Let n x ss
-> Let n x <$> go e' ss
While t n vt end ss
-> While t n vt end <$> go e' ss
ForeachInts t n from to ss
-> ForeachInts t n from to <$> go e' ss
ForeachFacts binds ty ss
-> ForeachFacts binds ty <$> go e' ss
Block ss
-> Block <$> mapM (go e') ss
InitAccumulator acc ss
-> InitAccumulator acc <$> go e' ss
Read n acc vt ss
-> Read n acc vt <$> go e' ss
Write n x
-> return $ Write n x
Output n t xs
-> return $ Output n t xs
# INLINE transformUDStmt #
foldStmt
:: Monad m
=> (env -> Statement a n p -> m env)
-> (env -> res -> Statement a n p -> m res)
-> (res -> res -> res)
-> env
-> res
-> Statement a n p
-> m res
foldStmt down up rjoin env res statements
= go env statements
where
go e s
= do e' <- down e s
let sub1 ss = go e' ss >>= \r' -> up e' r' s
case s of
If _ ss es
-> do r1 <- go e' ss
r2 <- go e' es
let r' = rjoin r1 r2
up e' r' s
Let _ _ ss
-> sub1 ss
While _ _ _ _ ss
-> sub1 ss
ForeachInts _ _ _ _ ss
-> sub1 ss
ForeachFacts _ _ ss
-> sub1 ss
Block ss
-> do rs <- mapM (go e') ss
let r' = foldl' rjoin res rs
up e' r' s
InitAccumulator _ ss
-> sub1 ss
Read _ _ _ ss
-> sub1 ss
Write{}
-> up e' res s
Output{}
-> up e' res s
# INLINE foldStmt #
instance TransformX Statement where
transformX names exps stmt
= case stmt of
If x ss es
-> If <$> exps x <*> go ss <*> go es
Let n x ss
-> Let <$> names n <*> exps x <*> go ss
While t n vt end ss
-> While t <$> names n <*> pure vt <*> exps end <*> go ss
ForeachInts t n from to ss
-> ForeachInts t <$> names n <*> exps from <*> exps to <*> go ss
ForeachFacts (FactBinds ntime ns) v ss
-> let name_go (n, t) = (,) <$> names n <*> pure t
in ForeachFacts <$> (FactBinds <$> names ntime <*> traverse name_go ns) <*> return v <*> go ss
Block ss
-> Block <$> gos ss
InitAccumulator acc ss
-> InitAccumulator <$> transformX names exps acc <*> go ss
Read n acc vt ss
-> Read <$> names n <*> names acc <*> pure vt <*> go ss
Write n x
-> Write <$> names n <*> exps x
Output n ty xs
-> Output n ty <$> traverse (\(x,t) -> (,) <$> exps x <*> pure t) xs
where
go = transformX names exps
gos = mapM go
instance TransformX Accumulator where
transformX names exps (Accumulator n t x)
= do n' <- names n
x' <- exps x
return $ Accumulator n' t x'
flattenBlocks :: Statement a n p -> [Statement a n p]
flattenBlocks = \case
Block xs ->
concatMap flattenBlocks xs
x ->
[x]
instance (Pretty n, Pretty p) => Pretty (Statement a n p) where
pretty = \case
If x stmts elses ->
line <>
prettyKeyword "if" <+> prettyPunctuation "(" <> pretty x <> prettyPunctuation ")" <> line <>
subscope stmts <>
case elses of
Block [] ->
line
_ ->
line <>
prettyKeyword "else" <> line <>
subscope elses <>
line
Let n x stmts ->
annotate AnnBinding (pretty n) <+> "=" <+> pretty x <> line <>
nosubscope stmts
Read n acc _vt stmts ->
annotate AnnBinding (pretty n) <+> prettyPunctuation "=r" <+> annotate AnnVariable (pretty acc) <> line <>
nosubscope stmts
Write n x ->
annotate AnnBinding (pretty n) <+> prettyPunctuation "=w" <+> pretty x
While t n _ end stmts ->
line <>
prettyKeyword "while" <+> prettyPunctuation "(" <> annotate AnnVariable (pretty n) <+> pretty t <+> pretty end <> ")" <> line <>
subscope stmts <>
line
ForeachInts _ n from to stmts ->
line <>
prettyKeyword "foreach" <+>
prettyPunctuation "(" <> annotate AnnBinding (pretty n) <+>
prettyKeyword "in" <+> pretty from <+>
prettyPunctuation ".." <+> pretty to <>
prettyPunctuation ")" <> line <>
subscope stmts <>
line
ForeachFacts binds _ stmts ->
line <>
prettyKeyword "for_facts" <+>
prettyFactParts AnnBinding (factBindsAll binds) <+>
subscope stmts <>
line
x@(Block _) ->
vcat (fmap pretty $ flattenBlocks x)
InitAccumulator (Accumulator n vt x) stmts ->
annotate AnnBinding (pretty n) <+> prettyPunctuation "=i" <+> prettyTypedFlat (pretty x) (pretty vt) <> line <>
nosubscope stmts
Output n t xs ->
line <>
prettyKeyword "output" <+>
prettyTypedFlat (annotate AnnBinding $ pretty n) (pretty t) <+> prettyPunctuation "=" <> line <>
case xs of
[x] ->
indent 2 $ prettyFactPart AnnVariable x
_ ->
indent 4 $ prettyFactParts AnnVariable xs
where
subscope stmt
= vcat
[ prettyPunctuation "{"
, indent 4 (pretty stmt)
, prettyPunctuation "}"]
nosubscope stmt
= pretty stmt
prettyFactPart ann (nf, tf) =
prettyTypedFlat (annotate ann $ pretty nf) (pretty tf)
prettyFactParts ann xs0 =
case reverse xs0 of
[] ->
mempty
x : xs1 ->
align . prettyItems vsep (prettyFactPart ann x) $
fmap (PrettyItem "," . prettyFactPart ann) (reverse xs1)
instance Pretty WhileType where
pretty = \case
WhileEq ->
prettyPunctuation "=="
WhileNe ->
prettyPunctuation "!="
instance (Pretty n, Pretty p) => Pretty (Accumulator a n p) where
pretty (Accumulator n vt x) =
prettyTypedFlat (annotate AnnBinding $ pretty n) (pretty vt) <+> text "=" <+> pretty x
|
bf1723e4d9bcdd52cd23305b4e985b6526d17ce393f2a070be42a325622021e2 | input-output-hk/hydra | MonitoringSpec.hs | # LANGUAGE TypeApplications #
module Hydra.Logging.MonitoringSpec where
import Hydra.Prelude
import Test.Hydra.Prelude
import qualified Data.Text as Text
import Hydra.API.ServerOutput (ServerOutput (SnapshotConfirmed))
import Hydra.BehaviorSpec (testHeadId)
import Hydra.HeadLogic (
Effect (ClientEffect),
Event (NetworkEvent),
defaultTTL,
)
import Hydra.Ledger.Simple (aValidTx, utxoRefs)
import Hydra.Logging (nullTracer, traceWith)
import Hydra.Logging.Messages (HydraLog (Node))
import Hydra.Logging.Monitoring
import Hydra.Network.Message (Message (ReqTx))
import Hydra.Node (HydraNodeLog (BeginEvent, EndEffect))
import Hydra.Snapshot (Snapshot (Snapshot))
import Network.HTTP.Req (GET (..), NoReqBody (..), bsResponse, defaultHttpConfig, http, port, req, responseBody, runReq, (/:))
import Test.Hydra.Fixture (alice)
import Test.Network.Ports (randomUnusedTCPPorts)
spec :: Spec
spec =
it "provides prometheus metrics from traces" $ do
failAfter 3 $ do
[p] <- randomUnusedTCPPorts 1
withMonitoring (Just $ fromIntegral p) nullTracer $ \tracer -> do
traceWith tracer (Node $ BeginEvent alice (NetworkEvent defaultTTL (ReqTx alice (aValidTx 42))))
traceWith tracer (Node $ BeginEvent alice (NetworkEvent defaultTTL (ReqTx alice (aValidTx 43))))
threadDelay 0.1
traceWith tracer (Node $ EndEffect alice (ClientEffect (SnapshotConfirmed testHeadId (Snapshot 1 (utxoRefs [1]) [aValidTx 43, aValidTx 42]) mempty)))
metrics <-
Text.lines . decodeUtf8 . responseBody
<$> runReq @IO defaultHttpConfig (req GET (http "localhost" /: "metrics") NoReqBody bsResponse (port p))
metrics `shouldContain` ["hydra_head_confirmed_tx 2"]
metrics `shouldContain` ["hydra_head_tx_confirmation_time_ms_bucket{le=\"1000.0\"} 2.0"]
| null | https://raw.githubusercontent.com/input-output-hk/hydra/3c7df6eb4ed246416e4562797b37da3d4c3c3f3a/hydra-node/test/Hydra/Logging/MonitoringSpec.hs | haskell | # LANGUAGE TypeApplications #
module Hydra.Logging.MonitoringSpec where
import Hydra.Prelude
import Test.Hydra.Prelude
import qualified Data.Text as Text
import Hydra.API.ServerOutput (ServerOutput (SnapshotConfirmed))
import Hydra.BehaviorSpec (testHeadId)
import Hydra.HeadLogic (
Effect (ClientEffect),
Event (NetworkEvent),
defaultTTL,
)
import Hydra.Ledger.Simple (aValidTx, utxoRefs)
import Hydra.Logging (nullTracer, traceWith)
import Hydra.Logging.Messages (HydraLog (Node))
import Hydra.Logging.Monitoring
import Hydra.Network.Message (Message (ReqTx))
import Hydra.Node (HydraNodeLog (BeginEvent, EndEffect))
import Hydra.Snapshot (Snapshot (Snapshot))
import Network.HTTP.Req (GET (..), NoReqBody (..), bsResponse, defaultHttpConfig, http, port, req, responseBody, runReq, (/:))
import Test.Hydra.Fixture (alice)
import Test.Network.Ports (randomUnusedTCPPorts)
spec :: Spec
spec =
it "provides prometheus metrics from traces" $ do
failAfter 3 $ do
[p] <- randomUnusedTCPPorts 1
withMonitoring (Just $ fromIntegral p) nullTracer $ \tracer -> do
traceWith tracer (Node $ BeginEvent alice (NetworkEvent defaultTTL (ReqTx alice (aValidTx 42))))
traceWith tracer (Node $ BeginEvent alice (NetworkEvent defaultTTL (ReqTx alice (aValidTx 43))))
threadDelay 0.1
traceWith tracer (Node $ EndEffect alice (ClientEffect (SnapshotConfirmed testHeadId (Snapshot 1 (utxoRefs [1]) [aValidTx 43, aValidTx 42]) mempty)))
metrics <-
Text.lines . decodeUtf8 . responseBody
<$> runReq @IO defaultHttpConfig (req GET (http "localhost" /: "metrics") NoReqBody bsResponse (port p))
metrics `shouldContain` ["hydra_head_confirmed_tx 2"]
metrics `shouldContain` ["hydra_head_tx_confirmation_time_ms_bucket{le=\"1000.0\"} 2.0"]
| |
a319eb40548dd5c77346c13f234d2dd3903b5a626c68d5200468f9ca9ddd6c02 | ragkousism/Guix-on-Hurd | glib.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2013 , 2014 , 2015 , 2016 < >
Copyright © 2013 , 2015 < >
Copyright © 2013 < >
Copyright © 2014 , 2015 , 2016 , 2017 < >
Copyright © 2016 < >
Copyright © 2016 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages glib)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (guix build-system python)
#:use-module (gnu packages)
#:use-module (gnu packages base)
#:use-module (gnu packages backup)
#:use-module (gnu packages bison)
#:use-module (gnu packages compression)
#:use-module (gnu packages enlightenment)
#:use-module (gnu packages flex)
#:use-module (gnu packages gettext)
#:use-module (gnu packages gnome)
#:use-module (gnu packages gtk)
#:use-module (gnu packages libffi)
#:use-module (gnu packages linux)
#:use-module (gnu packages nettle)
#:use-module (gnu packages pcre)
#:use-module (gnu packages perl)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages python)
#:use-module (gnu packages xml)
#:use-module (gnu packages bash)
#:use-module (gnu packages file)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (gnu packages m4)
;; Export variables up-front to allow circular dependency with the 'xorg'
;; module.
#:export (dbus
glib
gobject-introspection
dbus-glib
intltool
itstool
libsigc++
glibmm
telepathy-glib))
(define dbus
(package
(name "dbus")
(version "1.10.16")
(source (origin
(method url-fetch)
(uri (string-append
"-"
version ".tar.gz"))
(sha256
(base32
"121kqkjsd3vgf8vca8364xl44qa5086h7qy5zs5f1l78ldpbmc57"))
(patches (search-patches "dbus-helper-search-path.patch"))))
(build-system gnu-build-system)
(arguments
'(#:configure-flags
(list
;; Install the system bus socket under /var.
"--localstatedir=/var"
;; Install the session bus socket under /tmp.
"--with-session-socket-dir=/tmp"
;; Use /etc/dbus-1 for system-wide config.
;; Look for configuration file under
;; /etc/dbus-1. This is notably required by
;; 'dbus-daemon-launch-helper', which looks for
;; the 'system.conf' file in that place,
;; regardless of what '--config-file' was
;; passed to 'dbus-daemon' on the command line;
;; see <>.
"--sysconfdir=/etc")
#:phases
(modify-phases %standard-phases
(replace 'install
(lambda _
;; Don't try to create /var and /etc.
(system* "make"
"localstatedir=/tmp/dummy"
"sysconfdir=/tmp/dummy"
"install"))))))
(native-inputs
`(("pkg-config" ,pkg-config)))
(inputs
`(("expat" ,expat)
Add a dependency on so that ' dbus - launch ' has support for
;; '--autolaunch'.
("libx11" ,libx11)))
(home-page "/")
(synopsis "Message bus for inter-process communication (IPC)")
(description
"D-Bus is a message bus system, a simple way for applications to
talk to one another. In addition to interprocess communication, D-Bus
helps coordinate process lifecycle; it makes it simple and reliable to
code a \"single instance\" application or daemon, and to launch
applications and daemons on demand when their services are needed.
D-Bus supplies both a system daemon (for events such as \"new hardware
device added\" or \"printer queue changed\") and a
per-user-login-session daemon (for general IPC needs among user
applications). Also, the message bus is built on top of a general
one-to-one message passing framework, which can be used by any two apps
to communicate directly (without going through the message bus
daemon). Currently the communicating applications are on one computer,
or through unencrypted TCP/IP suitable for use behind a firewall with
shared NFS home directories.")
or Academic Free License 2.1
(define glib
(package
(name "glib")
(version "2.50.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
name "/" (string-take version 4) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1xgvmiqbhla6grpmbidqs3bl6zrb9mjknfsh7r4hb3163xy76s5y"))
(patches (search-patches "glib-tests-timer.patch"))))
(build-system gnu-build-system)
(outputs '("out" ; everything
glib - mkenums , gtester , etc . ; depends on Python
20 MiB of GTK - Doc reference
(propagated-inputs
in the Requires.private field of glib-2.0.pc
(inputs
`(("coreutils" ,coreutils)
for
("libffi" ,libffi)
("zlib" ,zlib)
("tzdata" ,tzdata))) ; for tests/gdatetime.c
(native-inputs
`(("gettext" ,gettext-minimal)
for GDBus tests
("pkg-config" ,pkg-config)
("python" ,python-wrapper)
needed by GIO tests
("bash" ,bash)))
(arguments
'(#:phases
(modify-phases %standard-phases
(add-before 'build 'pre-build
(lambda* (#:key inputs outputs #:allow-other-keys)
;; For tests/gdatetime.c.
(setenv "TZDIR"
(string-append (assoc-ref inputs "tzdata")
"/share/zoneinfo"))
;; Some tests want write access there.
(setenv "HOME" (getcwd))
(setenv "XDG_CACHE_HOME" (getcwd))
(substitute* '("glib/gspawn.c"
"glib/tests/utils.c"
"tests/spawn-test.c")
(("/bin/sh")
(string-append (assoc-ref inputs "bash") "/bin/sh")))))
(add-before 'check 'disable-failing-tests
(lambda _
(let ((disable
(lambda (test-file test-paths)
(define pattern+procs
(map (lambda (test-path)
(cons
;; XXX: only works for single line statements.
(format #f "g_test_add_func.*\"~a\".*" test-path)
(const "")))
test-paths))
(substitute test-file pattern+procs)))
(failing-tests
'(("glib/tests/thread.c"
prlimit(2 ) returns ENOSYS on Linux 2.6.32 - 5 - xen - amd64
;; as found on hydra.gnu.org, and strace(1) doesn't
;; recognize it.
"/thread/thread4"))
("glib/tests/timer.c"
(;; fails if compiler optimizations are enabled, which they
;; are by default.
"/timer/stop"))
("gio/tests/gapplication.c"
(;; XXX: proven to be unreliable. See:
;; <>
;; <>
"/gapplication/quit"
;; XXX: fails randomly for unknown reason. See:
;; <-devel/2016-04/msg00215.html>
"/gapplication/local-actions"))
("gio/tests/contenttype.c"
(;; XXX: requires shared-mime-info.
"/contenttype/guess"
"/contenttype/subtype"
"/contenttype/list"
"/contenttype/icon"
"/contenttype/symbolic-icon"
"/contenttype/tree"))
("gio/tests/appinfo.c"
(;; XXX: requires update-desktop-database.
"/appinfo/associations"))
("gio/tests/desktop-app-info.c"
(;; XXX: requires update-desktop-database.
"/desktop-app-info/delete"
"/desktop-app-info/default"
"/desktop-app-info/fallback"
"/desktop-app-info/lastused"
"/desktop-app-info/search"))
("gio/tests/gdbus-peer.c"
(;; Requires /etc/machine-id.
"/gdbus/codegen-peer-to-peer"))
("gio/tests/gdbus-unix-addresses.c"
(;; Requires /etc/machine-id.
"/gdbus/x11-autolaunch")))))
(and-map (lambda (x) (apply disable x)) failing-tests)))))
Note : ` --docdir ' and ` --htmldir ' are not honored , so work around it .
#:configure-flags (list (string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))
In ' gio / tests ' , ' gdbus - test - codegen - generated.h ' is # included in a
;; file that gets compiled possibly before it has been fully generated.
#:parallel-tests? #f))
(native-search-paths
This variable is not really " owned " by GLib , but several related
;; packages refer to it: gobject-introspection's tools use it as a search
path for .gir files , and it 's also a search path for schemas produced
;; by 'glib-compile-schemas'.
(list (search-path-specification
(variable "XDG_DATA_DIRS")
(files '("share")))
To load extra gio modules from glib - networking , etc .
(search-path-specification
(variable "GIO_EXTRA_MODULES")
(files '("lib/gio/modules")))))
(search-paths native-search-paths)
(synopsis "Thread-safe general utility library; basis of GTK+ and GNOME")
(description
"GLib provides data structure handling for C, portability wrappers,
and interfaces for such runtime functionality as an event loop, threads,
dynamic loading, and an object system.")
(home-page "/")
(license license:lgpl2.0+))) ; some files are under lgpl2.1+
(define gobject-introspection
(package
(name "gobject-introspection")
(version "1.50.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
"gobject-introspection/" (version-major+minor version)
"/gobject-introspection-" version ".tar.xz"))
(sha256
(base32 "1i9pccig8mv6qf0c1z8fcapays190nmr7j6pyc7cfhzmcv39fr8w"))
(modules '((guix build utils)))
(snippet
'(substitute* "tools/g-ir-tool-template.in"
(("#!/usr/bin/env @PYTHON@") "#!@PYTHON@")))
(patches (search-patches
"gobject-introspection-cc.patch"
"gobject-introspection-girepository.patch"
"gobject-introspection-absolute-shlib-path.patch"))))
(build-system gnu-build-system)
(inputs
`(("bison" ,bison)
("flex" ,flex)
("glib" ,glib)
("python-2" ,python-2)))
(native-inputs
`(("glib" ,glib "bin")
("pkg-config" ,pkg-config)))
(propagated-inputs
`(;; In practice, GIR users will need libffi when using
;; gobject-introspection.
("libffi" ,libffi)))
(native-search-paths
(list (search-path-specification
(variable "GI_TYPELIB_PATH")
(files '("lib/girepository-1.0")))))
(search-paths native-search-paths)
(arguments
`(;; The patch 'gobject-introspection-absolute-shlib-path.patch' causes
;; some tests to fail.
#:tests? #f))
(home-page "")
(synopsis "Generate interface introspection data for GObject libraries")
(description
"GObject introspection is a middleware layer between C libraries (using
GObject) and language bindings. The C library can be scanned at compile time
and generate a metadata file, in addition to the actual native C library. Then
at runtime, language bindings can read this metadata and automatically provide
bindings to call into the C library.")
Some bits are distributed under the LGPL2 + , others under the +
(license license:gpl2+)))
(define intltool
(package
(name "intltool")
(version "0.51.0")
(source (origin
(method url-fetch)
(uri (string-append "/"
version "/+download/intltool-"
version ".tar.gz"))
(sha256
(base32
"1karx4sb7bnm2j67q0q74hspkfn6lqprpy5r99vkn5bb36a4viv7"))))
(build-system gnu-build-system)
(inputs
`(("file" ,file)))
(propagated-inputs
`(;; Propagate gettext because users expect it to be there, and so does
the ` intltool - update ' script .
("gettext" ,gettext-minimal)
("perl-xml-parser" ,perl-xml-parser)
("perl" ,perl)))
(arguments
`(#:phases (alist-cons-after
'unpack 'patch-file-references
(lambda* (#:key inputs #:allow-other-keys)
(let ((file (assoc-ref inputs "file")))
(substitute* "intltool-update.in"
(("`file") (string-append "`" file "/bin/file")))))
%standard-phases)))
(home-page "/+download")
(synopsis "Tools to centralise translations of different file formats")
(description
"Intltool is a set of tools to centralise translations of many different
file formats using GNU gettext-compatible PO files.
The intltool collection can be used to do these things:
Extract translatable strings from various source files (.xml.in,
glade, .desktop.in, .server.in, .oaf.in).
Collect the extracted strings together with messages from traditional
source files (.c, .h) in po/$(PACKAGE).pot.
Merge back the translations from .po files into .xml, .desktop and
oaf files. This merge step will happen at build resp. installation time.")
(license license:gpl2+)))
(define itstool
(package
(name "itstool")
(version "2.0.2")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.bz2"))
(sha256
(base32
"0fh34wi52i0qikgvlmrcpf1vx6gc1xqdad4539l4d9hikfsrz45z"))))
(build-system gnu-build-system)
(inputs
`(("libxml2" ,libxml2)
("python2-libxml2" ,python2-libxml2)
("python-2" ,python-2)))
(arguments
'(#:phases
(modify-phases %standard-phases
(add-after
'install 'wrap-program
(lambda* (#:key outputs #:allow-other-keys)
(let ((prog (string-append (assoc-ref outputs "out")
"/bin/itstool")))
(wrap-program prog
`("PYTHONPATH" = (,(getenv "PYTHONPATH"))))))))))
(home-page "")
(synopsis "Tool to translate XML documents with PO files")
(description
"ITS Tool allows you to translate your XML documents with PO files, using
rules from the W3C Internationalization Tag Set (ITS) to determine what to
translate and how to separate it into PO file messages.
PO files are the standard translation format for GNU and other Unix-like
systems. They present translatable information as discrete messages, allowing
each message to be translated independently. In contrast to whole-page
translation, translating with a message-based format like PO means you can
easily track changes to the source document down to the paragraph. When new
strings are added or existing strings are modified, you only need to update the
corresponding messages.
ITS Tool is designed to make XML documents translatable through PO files by
applying standard ITS rules, as well as extension rules specific to ITS Tool.
ITS also provides an industry standard way for authors to override translation
information in their documents, such as whether a particular element should be
translated.")
(license license:gpl3+)))
(define dbus-glib
(package
(name "dbus-glib")
(version "0.108")
(source (origin
(method url-fetch)
(uri
(string-append "-glib/dbus-glib-"
version ".tar.gz"))
(sha256
(base32
"0b307hw9j41npzr6niw1bs6ryp87m5yafg492gqwvsaj4dz0qd4z"))))
(build-system gnu-build-system)
(propagated-inputs ; according to dbus-glib-1.pc
`(("dbus" ,dbus)
("glib" ,glib)))
(inputs
`(("expat" ,expat)))
(native-inputs
`(("glib" ,glib "bin")
("pkg-config" ,pkg-config)))
(home-page "-glib/")
(synopsis "D-Bus GLib bindings")
(description
"GLib bindings for D-Bus. The package is obsolete and superseded
by GDBus included in Glib.")
or Academic Free License 2.1
(define libsigc++
(package
(name "libsigc++")
(version "2.10.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror++/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"10cd54l4zihss9qxfhd2iip2k7mr292k37i54r2cpgv0c8sdchzq"))))
(build-system gnu-build-system)
(native-inputs `(("pkg-config" ,pkg-config)
("m4" ,m4)))
(home-page "/")
(synopsis "Type-safe callback system for standard C++")
(description
"Libsigc++ implements a type-safe callback system for standard C++. It
allows you to define signals and to connect those signals to any callback
function, either global or a member function, regardless of whether it is
static or virtual.
It also contains adaptor classes for connection of dissimilar callbacks and
has an ease of use unmatched by other C++ callback libraries.")
(license license:lgpl2.1+)))
(define glibmm
(package
(name "glibmm")
(version "2.50.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
(version-major+minor version)
"/glibmm-" version ".tar.xz"))
(sha256
(base32
"152yz5w0lx0y5j9ml72az7pc83p4l92bc0sb8whpcazldqy6wwnz"))))
(build-system gnu-build-system)
(arguments
`(#:phases (alist-cons-before
'build 'pre-build
(lambda _
;; This test uses /etc/fstab as an example file to read
;; from; choose a better example.
(substitute* "tests/giomm_simple/main.cc"
(("/etc/fstab")
(string-append (getcwd)
"/tests/giomm_simple/main.cc")))
;; This test does a DNS lookup, and then expects to be able
to open a TLS session ; just skip it .
(substitute* "tests/giomm_tls_client/main.cc"
(("Gio::init.*$")
"return 77;\n")))
%standard-phases)))
(native-inputs `(("pkg-config" ,pkg-config)
("glib" ,glib "bin")))
(propagated-inputs
`(("libsigc++" ,libsigc++)
("glib" ,glib)))
(home-page "/")
(synopsis "C++ interface to the GLib library")
(description
"Glibmm provides a C++ programming interface to the part of GLib that are
useful for C++.")
(license license:lgpl2.1+)))
(define-public python2-pygobject-2
(package
(name "python2-pygobject")
This was the last version to declare the 2.0 platform number , i.e. its
;; pkg-config files were named pygobject-2.0.pc
(version "2.28.6")
(source
(origin
(method url-fetch)
(uri (string-append "mirror/"
(version-major+minor version)
"/pygobject-" version ".tar.xz"))
(sha256
(base32
"1f5dfxjnil2glfwxnqr14d2cjfbkghsbsn8n04js2c2icr7iv2pv"))
(patches (search-patches
"python2-pygobject-2-gi-info-type-error-domain.patch"))))
(build-system gnu-build-system)
(native-inputs
`(("which" ,which)
("glib-bin" ,glib "bin") ;for tests: glib-compile-schemas
("pkg-config" ,pkg-config)
("dbus" ,dbus))) ;for tests
(inputs
`(("python" ,python-2)
("glib" ,glib)
("python2-pycairo" ,python2-pycairo)
("gobject-introspection" ,gobject-introspection)))
(propagated-inputs
`(("libffi" ,libffi))) ;mentioned in pygobject-2.0.pc
(arguments
`(#:tests? #f ;segfaults during tests
#:configure-flags '("LIBS=-lcairo-gobject")))
(home-page "")
(synopsis "Python bindings for GObject")
(description
"Python bindings for GLib, GObject, and GIO.")
(license license:lgpl2.1+)))
(define-public python-pygobject
(package
(name "python-pygobject")
(version "3.22.0")
(source
(origin
(method url-fetch)
(uri (string-append "mirror/"
(version-major+minor version)
"/pygobject-" version ".tar.xz"))
(sha256
(base32
"1ryblpc4wbhxcwf7grgib4drrab5xi6p78ihhrx0zj7g13xrrch8"))))
(build-system gnu-build-system)
(native-inputs
`(("which" ,which)
("glib-bin" ,glib "bin") ;for tests: glib-compile-schemas
("pkg-config" ,pkg-config)))
(inputs
`(("python" ,python)
("python-pycairo" ,python-pycairo)
("gobject-introspection" ,gobject-introspection)))
(propagated-inputs
;; pygobject-3.0.pc refers to all these.
`(("glib" ,glib)
("libffi" ,libffi)))
(arguments
;; TODO: failing tests: test_native_calls_async
;; test_native_calls_async_errors test_native_calls_sync
;; test_native_calls_sync_errors test_python_calls_async
test_python_calls_sync test_python_calls_sync_errors
;; test_python_calls_sync_noargs test_callback_user_data_middle_none
test_callback_user_data_middle_single
;; test_callback_user_data_middle_tuple
'(#:tests? #f))
For finding typelib files , since gobject - introscpetion is n't propagated .
(native-search-paths (package-native-search-paths gobject-introspection))
(home-page "")
(synopsis "Python bindings for GObject")
(description
"Python bindings for GLib, GObject, and GIO.")
(license license:lgpl2.1+)
(properties `((python2-variant . ,(delay python2-pygobject))))))
(define-public python2-pygobject
(package (inherit (strip-python2-variant python-pygobject))
(name "python2-pygobject")
(inputs
`(("python" ,python-2)
("python-pycairo" ,python2-pycairo)
("gobject-introspection" ,gobject-introspection)))))
(define telepathy-glib
(package
(name "telepathy-glib")
(version "0.24.1")
(source
(origin
(method url-fetch)
(uri
(string-append
"-glib/"
"telepathy-glib-" version ".tar.gz"))
(sha256
(base32
"1symyzbjmxvksn2ifdkk50lafjm2llf2sbmky062gq2pz3cg23cy"))
(patches
(list
;; Don't use the same test name for multiple tests.
;; <>
(origin
(method url-fetch)
(uri "")
(file-name (string-append "telepathy-glib-duplicate-tests.patch"))
(sha256
(base32
"0z261fwrszxb28ccg3hsg9rizig4s84zvwmx6y31a4pyv7bvs5w3")))))))
(build-system gnu-build-system)
(arguments
'(#:configure-flags '("--enable-vala-bindings")))
(native-inputs
`(("glib" ,glib "bin") ; uses glib-mkenums
("gobject-introspection" ,gobject-introspection)
("pkg-config" ,pkg-config)
("python" ,python-2)
("vala" ,vala)
("xsltproc" ,libxslt)))
(propagated-inputs
There are all in the Requires.private field of telepathy-glib.pc .
`(("dbus" ,dbus)
("dbus-glib" ,dbus-glib)
("glib" ,glib)))
(home-page "/")
(synopsis "GLib Real-time communications framework over D-Bus")
(description "Telepathy is a flexible, modular communications framework
that enables real-time communication over D-Bus via pluggable protocol
backends. Telepathy is a communications service that can be accessed by
many applications simultaneously.
This package provides the library for GLib applications.")
(license license:lgpl2.1+)))
(define-public dbus-c++
(package
(name "dbus-c++")
(version "0.9.0")
(source (origin
(method url-fetch)
(uri
(string-append
"mirror-cplusplus/dbus-c%2B%2B/"
version "/libdbus-c%2B%2B-" version ".tar.gz"))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"0qafmy2i6dzx4n1dqp6pygyy6gjljnb7hwjcj2z11c1wgclsq4dw"))))
(build-system gnu-build-system)
(propagated-inputs
`(("dbus" ,dbus))) ;mentioned in the pkg-config file
(inputs
`(("efl" ,efl)
("expat" ,expat)
("glib" ,glib)))
(native-inputs
`(("pkg-config" ,pkg-config)))
(arguments
`(;; The 'configure' machinery fails to detect that it needs -lpthread.
#:configure-flags (list "LDFLAGS=-lpthread")
#:phases
(modify-phases %standard-phases
(add-before 'configure 'add-missing-header
(lambda _
(substitute* "include/dbus-c++/eventloop-integration.h"
(("#include <errno.h>")
"#include <errno.h>\n#include <unistd.h>")))))))
(synopsis "D-Bus API for C++")
(description "This package provides D-Bus client API bindings for the C++
programming langauage. It also contains the utility
@command{dbuscxx-xml2cpp}.")
(home-page "-cplusplus/")
(license license:lgpl2.1+)))
(define-public appstream-glib
(package
(name "appstream-glib")
(version "0.6.7")
(source (origin
(method url-fetch)
(uri (string-append "/~hughsient/"
"appstream-glib/releases/"
"appstream-glib-" version ".tar.xz"))
(sha256
(base32
"08mrf4k0jhnpdd4fig2grmi2vbxkgdhrwk0d0zq0j1wp5ip7arwp"))))
(build-system gnu-build-system)
(native-inputs
`(("pkg-config" ,pkg-config)))
(inputs
`(("gdk-pixbuf" ,gdk-pixbuf)
("glib" ,glib)
("gtk+" ,gtk+)
("json-glib" ,json-glib)
("libarchive" ,libarchive)
("libsoup" ,libsoup)
("nettle" ,nettle)
("util-linux" ,util-linux)))
(arguments
`(#:configure-flags
'("--disable-firmware" "--disable-dep11")
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'patch-tests
(lambda _
(substitute* "libappstream-glib/as-self-test.c"
(("g_test_add_func.*as_test_store_local_appdata_func);") "")
(("g_test_add_func.*as_test_store_speed_appdata_func);") "")
(("g_test_add_func.*as_test_store_speed_desktop_func);") ""))
#t)))))
(home-page "-glib")
(synopsis "Library for reading and writing AppStream metadata")
(description "This library provides objects and helper methods to help
reading and writing @uref{,AppStream}
metadata.")
(license license:lgpl2.1+)))
| null | https://raw.githubusercontent.com/ragkousism/Guix-on-Hurd/e951bb2c0c4961dc6ac2bda8f331b9c4cee0da95/gnu/packages/glib.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Export variables up-front to allow circular dependency with the 'xorg'
module.
Install the system bus socket under /var.
Install the session bus socket under /tmp.
Use /etc/dbus-1 for system-wide config.
Look for configuration file under
/etc/dbus-1. This is notably required by
'dbus-daemon-launch-helper', which looks for
the 'system.conf' file in that place,
regardless of what '--config-file' was
passed to 'dbus-daemon' on the command line;
see <>.
Don't try to create /var and /etc.
'--autolaunch'.
it makes it simple and reliable to
everything
depends on Python
for tests/gdatetime.c
For tests/gdatetime.c.
Some tests want write access there.
XXX: only works for single line statements.
as found on hydra.gnu.org, and strace(1) doesn't
recognize it.
fails if compiler optimizations are enabled, which they
are by default.
XXX: proven to be unreliable. See:
<>
<>
XXX: fails randomly for unknown reason. See:
<-devel/2016-04/msg00215.html>
XXX: requires shared-mime-info.
XXX: requires update-desktop-database.
XXX: requires update-desktop-database.
Requires /etc/machine-id.
Requires /etc/machine-id.
file that gets compiled possibly before it has been fully generated.
packages refer to it: gobject-introspection's tools use it as a search
by 'glib-compile-schemas'.
some files are under lgpl2.1+
In practice, GIR users will need libffi when using
gobject-introspection.
The patch 'gobject-introspection-absolute-shlib-path.patch' causes
some tests to fail.
Propagate gettext because users expect it to be there, and so does
according to dbus-glib-1.pc
This test uses /etc/fstab as an example file to read
from; choose a better example.
This test does a DNS lookup, and then expects to be able
just skip it .
pkg-config files were named pygobject-2.0.pc
for tests: glib-compile-schemas
for tests
mentioned in pygobject-2.0.pc
segfaults during tests
for tests: glib-compile-schemas
pygobject-3.0.pc refers to all these.
TODO: failing tests: test_native_calls_async
test_native_calls_async_errors test_native_calls_sync
test_native_calls_sync_errors test_python_calls_async
test_python_calls_sync_noargs test_callback_user_data_middle_none
test_callback_user_data_middle_tuple
Don't use the same test name for multiple tests.
<>
uses glib-mkenums
mentioned in the pkg-config file
The 'configure' machinery fails to detect that it needs -lpthread. | Copyright © 2013 , 2014 , 2015 , 2016 < >
Copyright © 2013 , 2015 < >
Copyright © 2013 < >
Copyright © 2014 , 2015 , 2016 , 2017 < >
Copyright © 2016 < >
Copyright © 2016 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages glib)
#:use-module ((guix licenses) #:prefix license:)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix utils)
#:use-module (guix build-system gnu)
#:use-module (guix build-system python)
#:use-module (gnu packages)
#:use-module (gnu packages base)
#:use-module (gnu packages backup)
#:use-module (gnu packages bison)
#:use-module (gnu packages compression)
#:use-module (gnu packages enlightenment)
#:use-module (gnu packages flex)
#:use-module (gnu packages gettext)
#:use-module (gnu packages gnome)
#:use-module (gnu packages gtk)
#:use-module (gnu packages libffi)
#:use-module (gnu packages linux)
#:use-module (gnu packages nettle)
#:use-module (gnu packages pcre)
#:use-module (gnu packages perl)
#:use-module (gnu packages pkg-config)
#:use-module (gnu packages python)
#:use-module (gnu packages xml)
#:use-module (gnu packages bash)
#:use-module (gnu packages file)
#:use-module (gnu packages xml)
#:use-module (gnu packages xorg)
#:use-module (gnu packages m4)
#:export (dbus
glib
gobject-introspection
dbus-glib
intltool
itstool
libsigc++
glibmm
telepathy-glib))
(define dbus
(package
(name "dbus")
(version "1.10.16")
(source (origin
(method url-fetch)
(uri (string-append
"-"
version ".tar.gz"))
(sha256
(base32
"121kqkjsd3vgf8vca8364xl44qa5086h7qy5zs5f1l78ldpbmc57"))
(patches (search-patches "dbus-helper-search-path.patch"))))
(build-system gnu-build-system)
(arguments
'(#:configure-flags
(list
"--localstatedir=/var"
"--with-session-socket-dir=/tmp"
"--sysconfdir=/etc")
#:phases
(modify-phases %standard-phases
(replace 'install
(lambda _
(system* "make"
"localstatedir=/tmp/dummy"
"sysconfdir=/tmp/dummy"
"install"))))))
(native-inputs
`(("pkg-config" ,pkg-config)))
(inputs
`(("expat" ,expat)
Add a dependency on so that ' dbus - launch ' has support for
("libx11" ,libx11)))
(home-page "/")
(synopsis "Message bus for inter-process communication (IPC)")
(description
"D-Bus is a message bus system, a simple way for applications to
talk to one another. In addition to interprocess communication, D-Bus
code a \"single instance\" application or daemon, and to launch
applications and daemons on demand when their services are needed.
D-Bus supplies both a system daemon (for events such as \"new hardware
device added\" or \"printer queue changed\") and a
per-user-login-session daemon (for general IPC needs among user
applications). Also, the message bus is built on top of a general
one-to-one message passing framework, which can be used by any two apps
to communicate directly (without going through the message bus
daemon). Currently the communicating applications are on one computer,
or through unencrypted TCP/IP suitable for use behind a firewall with
shared NFS home directories.")
or Academic Free License 2.1
(define glib
(package
(name "glib")
(version "2.50.2")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
name "/" (string-take version 4) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"1xgvmiqbhla6grpmbidqs3bl6zrb9mjknfsh7r4hb3163xy76s5y"))
(patches (search-patches "glib-tests-timer.patch"))))
(build-system gnu-build-system)
20 MiB of GTK - Doc reference
(propagated-inputs
in the Requires.private field of glib-2.0.pc
(inputs
`(("coreutils" ,coreutils)
for
("libffi" ,libffi)
("zlib" ,zlib)
(native-inputs
`(("gettext" ,gettext-minimal)
for GDBus tests
("pkg-config" ,pkg-config)
("python" ,python-wrapper)
needed by GIO tests
("bash" ,bash)))
(arguments
'(#:phases
(modify-phases %standard-phases
(add-before 'build 'pre-build
(lambda* (#:key inputs outputs #:allow-other-keys)
(setenv "TZDIR"
(string-append (assoc-ref inputs "tzdata")
"/share/zoneinfo"))
(setenv "HOME" (getcwd))
(setenv "XDG_CACHE_HOME" (getcwd))
(substitute* '("glib/gspawn.c"
"glib/tests/utils.c"
"tests/spawn-test.c")
(("/bin/sh")
(string-append (assoc-ref inputs "bash") "/bin/sh")))))
(add-before 'check 'disable-failing-tests
(lambda _
(let ((disable
(lambda (test-file test-paths)
(define pattern+procs
(map (lambda (test-path)
(cons
(format #f "g_test_add_func.*\"~a\".*" test-path)
(const "")))
test-paths))
(substitute test-file pattern+procs)))
(failing-tests
'(("glib/tests/thread.c"
prlimit(2 ) returns ENOSYS on Linux 2.6.32 - 5 - xen - amd64
"/thread/thread4"))
("glib/tests/timer.c"
"/timer/stop"))
("gio/tests/gapplication.c"
"/gapplication/quit"
"/gapplication/local-actions"))
("gio/tests/contenttype.c"
"/contenttype/guess"
"/contenttype/subtype"
"/contenttype/list"
"/contenttype/icon"
"/contenttype/symbolic-icon"
"/contenttype/tree"))
("gio/tests/appinfo.c"
"/appinfo/associations"))
("gio/tests/desktop-app-info.c"
"/desktop-app-info/delete"
"/desktop-app-info/default"
"/desktop-app-info/fallback"
"/desktop-app-info/lastused"
"/desktop-app-info/search"))
("gio/tests/gdbus-peer.c"
"/gdbus/codegen-peer-to-peer"))
("gio/tests/gdbus-unix-addresses.c"
"/gdbus/x11-autolaunch")))))
(and-map (lambda (x) (apply disable x)) failing-tests)))))
Note : ` --docdir ' and ` --htmldir ' are not honored , so work around it .
#:configure-flags (list (string-append "--with-html-dir="
(assoc-ref %outputs "doc")
"/share/gtk-doc/html"))
In ' gio / tests ' , ' gdbus - test - codegen - generated.h ' is # included in a
#:parallel-tests? #f))
(native-search-paths
This variable is not really " owned " by GLib , but several related
path for .gir files , and it 's also a search path for schemas produced
(list (search-path-specification
(variable "XDG_DATA_DIRS")
(files '("share")))
To load extra gio modules from glib - networking , etc .
(search-path-specification
(variable "GIO_EXTRA_MODULES")
(files '("lib/gio/modules")))))
(search-paths native-search-paths)
(synopsis "Thread-safe general utility library; basis of GTK+ and GNOME")
(description
"GLib provides data structure handling for C, portability wrappers,
and interfaces for such runtime functionality as an event loop, threads,
dynamic loading, and an object system.")
(home-page "/")
(define gobject-introspection
(package
(name "gobject-introspection")
(version "1.50.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
"gobject-introspection/" (version-major+minor version)
"/gobject-introspection-" version ".tar.xz"))
(sha256
(base32 "1i9pccig8mv6qf0c1z8fcapays190nmr7j6pyc7cfhzmcv39fr8w"))
(modules '((guix build utils)))
(snippet
'(substitute* "tools/g-ir-tool-template.in"
(("#!/usr/bin/env @PYTHON@") "#!@PYTHON@")))
(patches (search-patches
"gobject-introspection-cc.patch"
"gobject-introspection-girepository.patch"
"gobject-introspection-absolute-shlib-path.patch"))))
(build-system gnu-build-system)
(inputs
`(("bison" ,bison)
("flex" ,flex)
("glib" ,glib)
("python-2" ,python-2)))
(native-inputs
`(("glib" ,glib "bin")
("pkg-config" ,pkg-config)))
(propagated-inputs
("libffi" ,libffi)))
(native-search-paths
(list (search-path-specification
(variable "GI_TYPELIB_PATH")
(files '("lib/girepository-1.0")))))
(search-paths native-search-paths)
(arguments
#:tests? #f))
(home-page "")
(synopsis "Generate interface introspection data for GObject libraries")
(description
"GObject introspection is a middleware layer between C libraries (using
GObject) and language bindings. The C library can be scanned at compile time
and generate a metadata file, in addition to the actual native C library. Then
at runtime, language bindings can read this metadata and automatically provide
bindings to call into the C library.")
Some bits are distributed under the LGPL2 + , others under the +
(license license:gpl2+)))
(define intltool
(package
(name "intltool")
(version "0.51.0")
(source (origin
(method url-fetch)
(uri (string-append "/"
version "/+download/intltool-"
version ".tar.gz"))
(sha256
(base32
"1karx4sb7bnm2j67q0q74hspkfn6lqprpy5r99vkn5bb36a4viv7"))))
(build-system gnu-build-system)
(inputs
`(("file" ,file)))
(propagated-inputs
the ` intltool - update ' script .
("gettext" ,gettext-minimal)
("perl-xml-parser" ,perl-xml-parser)
("perl" ,perl)))
(arguments
`(#:phases (alist-cons-after
'unpack 'patch-file-references
(lambda* (#:key inputs #:allow-other-keys)
(let ((file (assoc-ref inputs "file")))
(substitute* "intltool-update.in"
(("`file") (string-append "`" file "/bin/file")))))
%standard-phases)))
(home-page "/+download")
(synopsis "Tools to centralise translations of different file formats")
(description
"Intltool is a set of tools to centralise translations of many different
file formats using GNU gettext-compatible PO files.
The intltool collection can be used to do these things:
Extract translatable strings from various source files (.xml.in,
glade, .desktop.in, .server.in, .oaf.in).
Collect the extracted strings together with messages from traditional
source files (.c, .h) in po/$(PACKAGE).pot.
Merge back the translations from .po files into .xml, .desktop and
oaf files. This merge step will happen at build resp. installation time.")
(license license:gpl2+)))
(define itstool
(package
(name "itstool")
(version "2.0.2")
(source (origin
(method url-fetch)
(uri (string-append "-"
version ".tar.bz2"))
(sha256
(base32
"0fh34wi52i0qikgvlmrcpf1vx6gc1xqdad4539l4d9hikfsrz45z"))))
(build-system gnu-build-system)
(inputs
`(("libxml2" ,libxml2)
("python2-libxml2" ,python2-libxml2)
("python-2" ,python-2)))
(arguments
'(#:phases
(modify-phases %standard-phases
(add-after
'install 'wrap-program
(lambda* (#:key outputs #:allow-other-keys)
(let ((prog (string-append (assoc-ref outputs "out")
"/bin/itstool")))
(wrap-program prog
`("PYTHONPATH" = (,(getenv "PYTHONPATH"))))))))))
(home-page "")
(synopsis "Tool to translate XML documents with PO files")
(description
"ITS Tool allows you to translate your XML documents with PO files, using
rules from the W3C Internationalization Tag Set (ITS) to determine what to
translate and how to separate it into PO file messages.
PO files are the standard translation format for GNU and other Unix-like
systems. They present translatable information as discrete messages, allowing
each message to be translated independently. In contrast to whole-page
translation, translating with a message-based format like PO means you can
easily track changes to the source document down to the paragraph. When new
strings are added or existing strings are modified, you only need to update the
corresponding messages.
ITS Tool is designed to make XML documents translatable through PO files by
applying standard ITS rules, as well as extension rules specific to ITS Tool.
ITS also provides an industry standard way for authors to override translation
information in their documents, such as whether a particular element should be
translated.")
(license license:gpl3+)))
(define dbus-glib
(package
(name "dbus-glib")
(version "0.108")
(source (origin
(method url-fetch)
(uri
(string-append "-glib/dbus-glib-"
version ".tar.gz"))
(sha256
(base32
"0b307hw9j41npzr6niw1bs6ryp87m5yafg492gqwvsaj4dz0qd4z"))))
(build-system gnu-build-system)
`(("dbus" ,dbus)
("glib" ,glib)))
(inputs
`(("expat" ,expat)))
(native-inputs
`(("glib" ,glib "bin")
("pkg-config" ,pkg-config)))
(home-page "-glib/")
(synopsis "D-Bus GLib bindings")
(description
"GLib bindings for D-Bus. The package is obsolete and superseded
by GDBus included in Glib.")
or Academic Free License 2.1
(define libsigc++
(package
(name "libsigc++")
(version "2.10.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror++/"
(version-major+minor version) "/"
name "-" version ".tar.xz"))
(sha256
(base32
"10cd54l4zihss9qxfhd2iip2k7mr292k37i54r2cpgv0c8sdchzq"))))
(build-system gnu-build-system)
(native-inputs `(("pkg-config" ,pkg-config)
("m4" ,m4)))
(home-page "/")
(synopsis "Type-safe callback system for standard C++")
(description
"Libsigc++ implements a type-safe callback system for standard C++. It
allows you to define signals and to connect those signals to any callback
function, either global or a member function, regardless of whether it is
static or virtual.
It also contains adaptor classes for connection of dissimilar callbacks and
has an ease of use unmatched by other C++ callback libraries.")
(license license:lgpl2.1+)))
(define glibmm
(package
(name "glibmm")
(version "2.50.0")
(source (origin
(method url-fetch)
(uri (string-append "mirror/"
(version-major+minor version)
"/glibmm-" version ".tar.xz"))
(sha256
(base32
"152yz5w0lx0y5j9ml72az7pc83p4l92bc0sb8whpcazldqy6wwnz"))))
(build-system gnu-build-system)
(arguments
`(#:phases (alist-cons-before
'build 'pre-build
(lambda _
(substitute* "tests/giomm_simple/main.cc"
(("/etc/fstab")
(string-append (getcwd)
"/tests/giomm_simple/main.cc")))
(substitute* "tests/giomm_tls_client/main.cc"
(("Gio::init.*$")
"return 77;\n")))
%standard-phases)))
(native-inputs `(("pkg-config" ,pkg-config)
("glib" ,glib "bin")))
(propagated-inputs
`(("libsigc++" ,libsigc++)
("glib" ,glib)))
(home-page "/")
(synopsis "C++ interface to the GLib library")
(description
"Glibmm provides a C++ programming interface to the part of GLib that are
useful for C++.")
(license license:lgpl2.1+)))
(define-public python2-pygobject-2
(package
(name "python2-pygobject")
This was the last version to declare the 2.0 platform number , i.e. its
(version "2.28.6")
(source
(origin
(method url-fetch)
(uri (string-append "mirror/"
(version-major+minor version)
"/pygobject-" version ".tar.xz"))
(sha256
(base32
"1f5dfxjnil2glfwxnqr14d2cjfbkghsbsn8n04js2c2icr7iv2pv"))
(patches (search-patches
"python2-pygobject-2-gi-info-type-error-domain.patch"))))
(build-system gnu-build-system)
(native-inputs
`(("which" ,which)
("pkg-config" ,pkg-config)
(inputs
`(("python" ,python-2)
("glib" ,glib)
("python2-pycairo" ,python2-pycairo)
("gobject-introspection" ,gobject-introspection)))
(propagated-inputs
(arguments
#:configure-flags '("LIBS=-lcairo-gobject")))
(home-page "")
(synopsis "Python bindings for GObject")
(description
"Python bindings for GLib, GObject, and GIO.")
(license license:lgpl2.1+)))
(define-public python-pygobject
(package
(name "python-pygobject")
(version "3.22.0")
(source
(origin
(method url-fetch)
(uri (string-append "mirror/"
(version-major+minor version)
"/pygobject-" version ".tar.xz"))
(sha256
(base32
"1ryblpc4wbhxcwf7grgib4drrab5xi6p78ihhrx0zj7g13xrrch8"))))
(build-system gnu-build-system)
(native-inputs
`(("which" ,which)
("pkg-config" ,pkg-config)))
(inputs
`(("python" ,python)
("python-pycairo" ,python-pycairo)
("gobject-introspection" ,gobject-introspection)))
(propagated-inputs
`(("glib" ,glib)
("libffi" ,libffi)))
(arguments
test_python_calls_sync test_python_calls_sync_errors
test_callback_user_data_middle_single
'(#:tests? #f))
For finding typelib files , since gobject - introscpetion is n't propagated .
(native-search-paths (package-native-search-paths gobject-introspection))
(home-page "")
(synopsis "Python bindings for GObject")
(description
"Python bindings for GLib, GObject, and GIO.")
(license license:lgpl2.1+)
(properties `((python2-variant . ,(delay python2-pygobject))))))
(define-public python2-pygobject
(package (inherit (strip-python2-variant python-pygobject))
(name "python2-pygobject")
(inputs
`(("python" ,python-2)
("python-pycairo" ,python2-pycairo)
("gobject-introspection" ,gobject-introspection)))))
(define telepathy-glib
(package
(name "telepathy-glib")
(version "0.24.1")
(source
(origin
(method url-fetch)
(uri
(string-append
"-glib/"
"telepathy-glib-" version ".tar.gz"))
(sha256
(base32
"1symyzbjmxvksn2ifdkk50lafjm2llf2sbmky062gq2pz3cg23cy"))
(patches
(list
(origin
(method url-fetch)
(uri "")
(file-name (string-append "telepathy-glib-duplicate-tests.patch"))
(sha256
(base32
"0z261fwrszxb28ccg3hsg9rizig4s84zvwmx6y31a4pyv7bvs5w3")))))))
(build-system gnu-build-system)
(arguments
'(#:configure-flags '("--enable-vala-bindings")))
(native-inputs
("gobject-introspection" ,gobject-introspection)
("pkg-config" ,pkg-config)
("python" ,python-2)
("vala" ,vala)
("xsltproc" ,libxslt)))
(propagated-inputs
There are all in the Requires.private field of telepathy-glib.pc .
`(("dbus" ,dbus)
("dbus-glib" ,dbus-glib)
("glib" ,glib)))
(home-page "/")
(synopsis "GLib Real-time communications framework over D-Bus")
(description "Telepathy is a flexible, modular communications framework
that enables real-time communication over D-Bus via pluggable protocol
backends. Telepathy is a communications service that can be accessed by
many applications simultaneously.
This package provides the library for GLib applications.")
(license license:lgpl2.1+)))
(define-public dbus-c++
(package
(name "dbus-c++")
(version "0.9.0")
(source (origin
(method url-fetch)
(uri
(string-append
"mirror-cplusplus/dbus-c%2B%2B/"
version "/libdbus-c%2B%2B-" version ".tar.gz"))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"0qafmy2i6dzx4n1dqp6pygyy6gjljnb7hwjcj2z11c1wgclsq4dw"))))
(build-system gnu-build-system)
(propagated-inputs
(inputs
`(("efl" ,efl)
("expat" ,expat)
("glib" ,glib)))
(native-inputs
`(("pkg-config" ,pkg-config)))
(arguments
#:configure-flags (list "LDFLAGS=-lpthread")
#:phases
(modify-phases %standard-phases
(add-before 'configure 'add-missing-header
(lambda _
(substitute* "include/dbus-c++/eventloop-integration.h"
(("#include <errno.h>")
"#include <errno.h>\n#include <unistd.h>")))))))
(synopsis "D-Bus API for C++")
(description "This package provides D-Bus client API bindings for the C++
programming langauage. It also contains the utility
@command{dbuscxx-xml2cpp}.")
(home-page "-cplusplus/")
(license license:lgpl2.1+)))
(define-public appstream-glib
(package
(name "appstream-glib")
(version "0.6.7")
(source (origin
(method url-fetch)
(uri (string-append "/~hughsient/"
"appstream-glib/releases/"
"appstream-glib-" version ".tar.xz"))
(sha256
(base32
"08mrf4k0jhnpdd4fig2grmi2vbxkgdhrwk0d0zq0j1wp5ip7arwp"))))
(build-system gnu-build-system)
(native-inputs
`(("pkg-config" ,pkg-config)))
(inputs
`(("gdk-pixbuf" ,gdk-pixbuf)
("glib" ,glib)
("gtk+" ,gtk+)
("json-glib" ,json-glib)
("libarchive" ,libarchive)
("libsoup" ,libsoup)
("nettle" ,nettle)
("util-linux" ,util-linux)))
(arguments
`(#:configure-flags
'("--disable-firmware" "--disable-dep11")
#:phases
(modify-phases %standard-phases
(add-after 'unpack 'patch-tests
(lambda _
(substitute* "libappstream-glib/as-self-test.c"
(("g_test_add_func.*as_test_store_local_appdata_func);") "")
(("g_test_add_func.*as_test_store_speed_appdata_func);") "")
(("g_test_add_func.*as_test_store_speed_desktop_func);") ""))
#t)))))
(home-page "-glib")
(synopsis "Library for reading and writing AppStream metadata")
(description "This library provides objects and helper methods to help
reading and writing @uref{,AppStream}
metadata.")
(license license:lgpl2.1+)))
|
bb3db93ea87361b86dcb427d20b14a2fa05ee23d887c9ee4d4c71a60683ded92 | audreyt/openafp | MDD.hs |
module OpenAFP.Records.AFP.MDD where
import OpenAFP.Types
import OpenAFP.Internals
data MDD = MDD {
mdd_Type :: !N3
,mdd_ :: !N3
,mdd :: !NStr
} deriving (Show, Typeable)
| null | https://raw.githubusercontent.com/audreyt/openafp/178e0dd427479ac7b8b461e05c263e52dd614b73/src/OpenAFP/Records/AFP/MDD.hs | haskell |
module OpenAFP.Records.AFP.MDD where
import OpenAFP.Types
import OpenAFP.Internals
data MDD = MDD {
mdd_Type :: !N3
,mdd_ :: !N3
,mdd :: !NStr
} deriving (Show, Typeable)
| |
b6b6347863c88ff25e07fd51d498e16f49c5de5e60de6920c5bd0b7633f01be1 | ddssff/refact-global-hse | Utils.hs | # LANGUAGE CPP #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts , FlexibleInstances , RankNTypes , ScopedTypeVariables #
module Utils where
import Control.Exception (SomeException, throw)
import Control.Exception.Lifted as IO (bracket, catch)
import Control.Monad (MonadPlus, msum, when)
import Control.Monad.Trans (liftIO, MonadIO)
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Bool (bool)
import Data.Generics (Data(gmapM), GenericM, listify, toConstr, Typeable)
import Data.List (intercalate, stripPrefix)
import Data.Maybe (mapMaybe)
import Data.Sequence ((|>), Seq)
import qualified Language.Haskell.Exts.Annotated as A -- (Pretty)
import Language.Haskell.Exts.Annotated.Simplify (sType)
import Language.Haskell.Exts.Pretty (defaultMode, prettyPrint, prettyPrintStyleMode)
import qualified Language.Haskell.Exts.Syntax as S (ModuleName(..), Name)
import System.Directory (createDirectoryIfMissing, doesFileExist, getCurrentDirectory, removeDirectoryRecursive, removeFile, renameFile, setCurrentDirectory)
import System.Exit (ExitCode(..))
import System.FilePath (takeDirectory)
import System.IO (hPutStrLn, stderr)
import System.IO.Error (isDoesNotExistError)
import qualified System.IO.Temp as Temp (createTempDirectory)
import System.Process (readProcess, readProcessWithExitCode)
import Text.PrettyPrint (mode, Mode(OneLineMode), style)
| dropWhile where predicate operates on two list elements .
dropWhile2 :: (a -> Maybe a -> Bool) -> [a] -> [a]
dropWhile2 f (p : q : rs) | f p (Just q) = dropWhile2 f (q : rs)
dropWhile2 f [p] | f p Nothing = []
dropWhile2 _ l = l
| Monadic variation on everywhere '
everywhereM' :: Monad m => GenericM m -> GenericM m
everywhereM' f x
= do x' <- f x
gmapM (everywhereM' f) x'
| find all values of type b in a value of type a
gFind :: (MonadPlus m, Data a, Typeable b) => a -> m b
gFind = msum . map return . listify (const True)
| Monadic version of Data . Sequence.| >
(|$>) :: Applicative m => m (Seq a) -> m a -> m (Seq a)
(|$>) s x = (|>) <$> s <*> x
-- | Do a hard reset of all the files of the repository containing the
-- working directory.
gitResetHard :: IO ()
gitResetHard = do
(code, _out, _err) <- readProcessWithExitCode "git" ["reset", "--hard"] ""
case code of
ExitSuccess -> pure ()
ExitFailure _n -> error "gitResetHard"
-- | Do a hard reset of all the files of a subdirectory within a git
-- repository. (Does this every throw an exception?)
gitResetSubdir :: FilePath -> IO ()
gitResetSubdir dir = do
(readProcess "git" ["checkout", "--", dir] "" >>
readProcess "git" ["clean", "-f", dir] "" >> pure ())
`IO.catch` \(e :: SomeException) -> hPutStrLn stderr ("gitResetSubdir " ++ show dir ++ " failed: " ++ show e) >> throw e
-- | Determine whether the repository containing the working directory
-- is in a clean state.
gitIsClean :: IO Bool
gitIsClean = do
(code, out, _err) <- readProcessWithExitCode "git" ["status", "--porcelain"] ""
case code of
ExitFailure _ -> error "gitCheckClean failure"
ExitSuccess | all unmodified (lines out) -> pure True
ExitSuccess -> pure False
where
unmodified (a : b : _) = elem a "?! " && elem b "?! "
unmodified _ = False
withCleanRepo :: IO a -> IO a
withCleanRepo action = gitIsClean >>= bool (error "withCleanRepo: please commit or revert changes") action
-- | Print a very short and readable version for trace output.
class EZPrint a where
ezPrint :: a -> String
instance EZPrint a => EZPrint [a] where
ezPrint xs = "[" ++ intercalate ", " (map ezPrint xs) ++ "]"
instance EZPrint S.ModuleName where
ezPrint (S.ModuleName s) = s
instance EZPrint S.Name where
ezPrint = prettyPrint'
instance EZPrint (Maybe S.ModuleName) where
ezPrint (Just x) = prettyPrint x
ezPrint Nothing = "Main"
instance A.SrcInfo l => EZPrint (A.InstRule l) where
ezPrint (A.IParen _ r) = ezPrint r
ezPrint (A.IRule _ _ _ h) = "instance " ++ ezPrint h
instance A.SrcInfo l => EZPrint (A.InstHead l) where
ezPrint (A.IHParen _ h) = ezPrint h
ezPrint (A.IHInfix _ t n) = "(" ++ ezPrint n ++ ") " ++ ezPrint t
ezPrint (A.IHCon _ n) = ezPrint n
ezPrint (A.IHApp _ h t) = ezPrint h ++ " " ++ ezPrint t
instance EZPrint (A.QName l) where
ezPrint = prettyPrint'
instance EZPrint (A.Name l) where
ezPrint = prettyPrint'
instance A.SrcInfo l => EZPrint (A.Type l) where
ezPrint = prettyPrint' . sType
instance A.SrcInfo l => EZPrint (A.Exp l) where
ezPrint = prettyPrint'
maybeStripPrefix :: Eq a => [a] -> [a] -> [a]
maybeStripPrefix pre lst = maybe lst id (stripPrefix pre lst)
withCurrentDirectory :: forall m a. (MonadIO m, MonadBaseControl IO m) => FilePath -> m a -> m a
withCurrentDirectory path action =
liftIO (putStrLn ("cd " ++ path)) >>
IO.bracket acquire release action'
where
acquire :: m FilePath
acquire = liftIO getCurrentDirectory >>= \save -> liftIO (setCurrentDirectory path) >> return save
release :: FilePath -> m ()
release = liftIO . setCurrentDirectory
action' :: FilePath -> m a
action' _ = action `IO.catch` (\(e :: SomeException) -> liftIO (putStrLn ("in " ++ path)) >> throw e)
withTempDirectory :: (MonadIO m, MonadBaseControl IO m) =>
Bool
-> FilePath -- ^ Temp directory to create the directory in
-> String -- ^ Directory name template. See 'openTempFile'.
-> (FilePath -> m a) -- ^ Callback that can use the directory
-> m a
withTempDirectory cleanup targetDir template callback =
IO.bracket
(liftIO $ Temp.createTempDirectory targetDir template)
(if cleanup then liftIO . ignoringIOErrors . removeDirectoryRecursive else const (pure ()))
callback
ignoringIOErrors :: IO () -> IO ()
ignoringIOErrors ioe = ioe `IO.catch` (\e -> const (return ()) (e :: IOError))
replaceFile :: FilePath -> String -> IO ()
replaceFile path text = do
createDirectoryIfMissing True (takeDirectory path)
removeFile path `IO.catch` (\e -> if isDoesNotExistError e then return () else ioError e)
writeFile path text
-- text' <- readFile path
-- when (text /= text') (error $ "Failed to replace " ++ show path)
replaceFileWithBackup :: FilePath -> String -> IO ()
replaceFileWithBackup path text = do
removeIfExists (path ++ "~")
renameIfExists path (path ++ "~")
writeFile path text
where
removeIfExists x =
do exists <- doesFileExist x
when exists (removeFile x)
renameIfExists src dst =
do exists <- doesFileExist src
when exists (System.Directory.renameFile src dst)
| Slightly modified lines function from Data . List . It preserves
-- the presence or absence of a terminating newline by appending [""]
if string ends with a newline . Thus , the corresponding unlines
-- function is intercalate "\n".
lines' :: String -> [String]
lines' "" = []
Somehow GHC does n't detect the selector thunks in the below code ,
so s ' keeps a reference to the first line via the pair and we have
a space leak ( cf . # 4334 ) .
So we need to make GHC see the selector thunks with a trick .
lines' s = cons (case break (== '\n') s of
(l, s') -> (l, case s' of
[] -> [] -- no newline
_:"" -> [""]
_:s'' -> lines' s''))
where
cons ~(h, t) = h : t
listPairs :: [a] -> [(Maybe a, Maybe a)]
listPairs [] = [(Nothing, Nothing)]
listPairs (x : xs) =
(Nothing, Just x) : listPairs' x xs
where
listPairs' x1 (x2 : xs') = (Just x1, Just x2) : listPairs' x2 xs'
listPairs' x1 [] = [(Just x1, Nothing)]
-- | listTriples [1,2,3,4] ->
-- [(Nothing,1,Just 2),(Just 1,2,Just 3),(Just 2,3,Just 4),(Just 3,4,Nothing)]
listTriples :: [a] -> [(Maybe a, a, Maybe a)]
listTriples l = zip3 ([Nothing] ++ map Just l) l (tail (map Just l ++ [Nothing]))
| Like dropWhile , except the last element that satisfied p is included :
dropWhileNext even [ 2,4,6,1,3,5,8 ] - > [ 6,1,3,5,8 ]
dropWhileNext :: (a -> Bool) -> [a] -> [a]
dropWhileNext p xs = mapMaybe fst $ dropWhile (\(_,x) -> maybe True p x) $ listPairs xs
simplify :: Functor f => f a -> f ()
simplify = fmap (const ())
con :: (Typeable a, Data a) => a -> String
con = show . toConstr
prettyPrint' :: A.Pretty a => a -> String
prettyPrint' = prettyPrintStyleMode (style {mode = OneLineMode}) defaultMode
| null | https://raw.githubusercontent.com/ddssff/refact-global-hse/519a017009cae8aa1a3db1b46eb560d76bd9895d/tests/input/rgh/Utils.hs | haskell | (Pretty)
| Do a hard reset of all the files of the repository containing the
working directory.
| Do a hard reset of all the files of a subdirectory within a git
repository. (Does this every throw an exception?)
| Determine whether the repository containing the working directory
is in a clean state.
| Print a very short and readable version for trace output.
^ Temp directory to create the directory in
^ Directory name template. See 'openTempFile'.
^ Callback that can use the directory
text' <- readFile path
when (text /= text') (error $ "Failed to replace " ++ show path)
the presence or absence of a terminating newline by appending [""]
function is intercalate "\n".
no newline
| listTriples [1,2,3,4] ->
[(Nothing,1,Just 2),(Just 1,2,Just 3),(Just 2,3,Just 4),(Just 3,4,Nothing)] | # LANGUAGE CPP #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts , FlexibleInstances , RankNTypes , ScopedTypeVariables #
module Utils where
import Control.Exception (SomeException, throw)
import Control.Exception.Lifted as IO (bracket, catch)
import Control.Monad (MonadPlus, msum, when)
import Control.Monad.Trans (liftIO, MonadIO)
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Bool (bool)
import Data.Generics (Data(gmapM), GenericM, listify, toConstr, Typeable)
import Data.List (intercalate, stripPrefix)
import Data.Maybe (mapMaybe)
import Data.Sequence ((|>), Seq)
import Language.Haskell.Exts.Annotated.Simplify (sType)
import Language.Haskell.Exts.Pretty (defaultMode, prettyPrint, prettyPrintStyleMode)
import qualified Language.Haskell.Exts.Syntax as S (ModuleName(..), Name)
import System.Directory (createDirectoryIfMissing, doesFileExist, getCurrentDirectory, removeDirectoryRecursive, removeFile, renameFile, setCurrentDirectory)
import System.Exit (ExitCode(..))
import System.FilePath (takeDirectory)
import System.IO (hPutStrLn, stderr)
import System.IO.Error (isDoesNotExistError)
import qualified System.IO.Temp as Temp (createTempDirectory)
import System.Process (readProcess, readProcessWithExitCode)
import Text.PrettyPrint (mode, Mode(OneLineMode), style)
| dropWhile where predicate operates on two list elements .
dropWhile2 :: (a -> Maybe a -> Bool) -> [a] -> [a]
dropWhile2 f (p : q : rs) | f p (Just q) = dropWhile2 f (q : rs)
dropWhile2 f [p] | f p Nothing = []
dropWhile2 _ l = l
| Monadic variation on everywhere '
everywhereM' :: Monad m => GenericM m -> GenericM m
everywhereM' f x
= do x' <- f x
gmapM (everywhereM' f) x'
| find all values of type b in a value of type a
gFind :: (MonadPlus m, Data a, Typeable b) => a -> m b
gFind = msum . map return . listify (const True)
| Monadic version of Data . Sequence.| >
(|$>) :: Applicative m => m (Seq a) -> m a -> m (Seq a)
(|$>) s x = (|>) <$> s <*> x
gitResetHard :: IO ()
gitResetHard = do
(code, _out, _err) <- readProcessWithExitCode "git" ["reset", "--hard"] ""
case code of
ExitSuccess -> pure ()
ExitFailure _n -> error "gitResetHard"
gitResetSubdir :: FilePath -> IO ()
gitResetSubdir dir = do
(readProcess "git" ["checkout", "--", dir] "" >>
readProcess "git" ["clean", "-f", dir] "" >> pure ())
`IO.catch` \(e :: SomeException) -> hPutStrLn stderr ("gitResetSubdir " ++ show dir ++ " failed: " ++ show e) >> throw e
gitIsClean :: IO Bool
gitIsClean = do
(code, out, _err) <- readProcessWithExitCode "git" ["status", "--porcelain"] ""
case code of
ExitFailure _ -> error "gitCheckClean failure"
ExitSuccess | all unmodified (lines out) -> pure True
ExitSuccess -> pure False
where
unmodified (a : b : _) = elem a "?! " && elem b "?! "
unmodified _ = False
withCleanRepo :: IO a -> IO a
withCleanRepo action = gitIsClean >>= bool (error "withCleanRepo: please commit or revert changes") action
class EZPrint a where
ezPrint :: a -> String
instance EZPrint a => EZPrint [a] where
ezPrint xs = "[" ++ intercalate ", " (map ezPrint xs) ++ "]"
instance EZPrint S.ModuleName where
ezPrint (S.ModuleName s) = s
instance EZPrint S.Name where
ezPrint = prettyPrint'
instance EZPrint (Maybe S.ModuleName) where
ezPrint (Just x) = prettyPrint x
ezPrint Nothing = "Main"
instance A.SrcInfo l => EZPrint (A.InstRule l) where
ezPrint (A.IParen _ r) = ezPrint r
ezPrint (A.IRule _ _ _ h) = "instance " ++ ezPrint h
instance A.SrcInfo l => EZPrint (A.InstHead l) where
ezPrint (A.IHParen _ h) = ezPrint h
ezPrint (A.IHInfix _ t n) = "(" ++ ezPrint n ++ ") " ++ ezPrint t
ezPrint (A.IHCon _ n) = ezPrint n
ezPrint (A.IHApp _ h t) = ezPrint h ++ " " ++ ezPrint t
instance EZPrint (A.QName l) where
ezPrint = prettyPrint'
instance EZPrint (A.Name l) where
ezPrint = prettyPrint'
instance A.SrcInfo l => EZPrint (A.Type l) where
ezPrint = prettyPrint' . sType
instance A.SrcInfo l => EZPrint (A.Exp l) where
ezPrint = prettyPrint'
maybeStripPrefix :: Eq a => [a] -> [a] -> [a]
maybeStripPrefix pre lst = maybe lst id (stripPrefix pre lst)
withCurrentDirectory :: forall m a. (MonadIO m, MonadBaseControl IO m) => FilePath -> m a -> m a
withCurrentDirectory path action =
liftIO (putStrLn ("cd " ++ path)) >>
IO.bracket acquire release action'
where
acquire :: m FilePath
acquire = liftIO getCurrentDirectory >>= \save -> liftIO (setCurrentDirectory path) >> return save
release :: FilePath -> m ()
release = liftIO . setCurrentDirectory
action' :: FilePath -> m a
action' _ = action `IO.catch` (\(e :: SomeException) -> liftIO (putStrLn ("in " ++ path)) >> throw e)
withTempDirectory :: (MonadIO m, MonadBaseControl IO m) =>
Bool
-> m a
withTempDirectory cleanup targetDir template callback =
IO.bracket
(liftIO $ Temp.createTempDirectory targetDir template)
(if cleanup then liftIO . ignoringIOErrors . removeDirectoryRecursive else const (pure ()))
callback
ignoringIOErrors :: IO () -> IO ()
ignoringIOErrors ioe = ioe `IO.catch` (\e -> const (return ()) (e :: IOError))
replaceFile :: FilePath -> String -> IO ()
replaceFile path text = do
createDirectoryIfMissing True (takeDirectory path)
removeFile path `IO.catch` (\e -> if isDoesNotExistError e then return () else ioError e)
writeFile path text
replaceFileWithBackup :: FilePath -> String -> IO ()
replaceFileWithBackup path text = do
removeIfExists (path ++ "~")
renameIfExists path (path ++ "~")
writeFile path text
where
removeIfExists x =
do exists <- doesFileExist x
when exists (removeFile x)
renameIfExists src dst =
do exists <- doesFileExist src
when exists (System.Directory.renameFile src dst)
| Slightly modified lines function from Data . List . It preserves
if string ends with a newline . Thus , the corresponding unlines
lines' :: String -> [String]
lines' "" = []
Somehow GHC does n't detect the selector thunks in the below code ,
so s ' keeps a reference to the first line via the pair and we have
a space leak ( cf . # 4334 ) .
So we need to make GHC see the selector thunks with a trick .
lines' s = cons (case break (== '\n') s of
(l, s') -> (l, case s' of
_:"" -> [""]
_:s'' -> lines' s''))
where
cons ~(h, t) = h : t
listPairs :: [a] -> [(Maybe a, Maybe a)]
listPairs [] = [(Nothing, Nothing)]
listPairs (x : xs) =
(Nothing, Just x) : listPairs' x xs
where
listPairs' x1 (x2 : xs') = (Just x1, Just x2) : listPairs' x2 xs'
listPairs' x1 [] = [(Just x1, Nothing)]
listTriples :: [a] -> [(Maybe a, a, Maybe a)]
listTriples l = zip3 ([Nothing] ++ map Just l) l (tail (map Just l ++ [Nothing]))
| Like dropWhile , except the last element that satisfied p is included :
dropWhileNext even [ 2,4,6,1,3,5,8 ] - > [ 6,1,3,5,8 ]
dropWhileNext :: (a -> Bool) -> [a] -> [a]
dropWhileNext p xs = mapMaybe fst $ dropWhile (\(_,x) -> maybe True p x) $ listPairs xs
simplify :: Functor f => f a -> f ()
simplify = fmap (const ())
con :: (Typeable a, Data a) => a -> String
con = show . toConstr
prettyPrint' :: A.Pretty a => a -> String
prettyPrint' = prettyPrintStyleMode (style {mode = OneLineMode}) defaultMode
|
1920d2e039f392fd7db4045a6de4d28879ed9253d366226f09a6caf2201edcd3 | projectcs13/sensor-cloud | analyse.erl | -module(analyse).
-export([forecast/1 , forecast/2 , init/0 , stop/0 , this/0 , get_analysis/2 ] ) .
-compile(export_all).
-include("webmachine.hrl").
-include("erlastic_search.hrl").
-define(INDEX, "sensorcloud").
-record(struct, {lst}).
< < " [ { \"value\ " : 3347 , \"date\ " : \"1995 - 06 - 09\ " } , { \"value\ " : 1833 , \"date\ " : \"1995 - 07 - 26\ " } , { \"value\ " : 2470 , \"date\ " : \"1996 - 11 - 19\ " } , { \"value\ " : 2849 , \"date\ " : \"1997 - 11 - 15\ " } , { \"value\ " : 3295 , \"date\ " : \"1998 - 10 - 01\ " } , { \"value\ " : 2853 , \"date\ " : \"1998 - 12 - 26\ " } , { \"value\ " : 3924 , \"date\ " : \"1999 - 11 - 23\ " } , { \"value\ " : 1392 , \"date\ " : \"2000 - 10 - 19\ " } , { \"value\ " : 2127 , \"date\ " : \"2001 - 03 - 09\ " } , { \"value\ " : 2121 , \"date\ " : \"2001 - 05 - 27\ " } , { \"value\ " : 2817 , \"date\ " : \"2002 - 05 - 03\ " } , { \"value\ " : 1713 , \"date\ " : \"2003 - 02 - 13\ " } , { \"value\ " : 3699 , \"date\ " : \"2003 - 05 - 25\ " } , { \"value\ " : 2387 , \"date\ " : \"2003 - 07 - 13\ " } , { \"value\ " : 2409 , \"date\ " : \"2004 - 01 - 11\ " } , { \"value\ " : 3163 , \"date\ " : \"2004 - 12 - 06\ " } , { \"value\ " : 2168 , \"date\ " : \"2005 - 10 - 05\ " } , { \"value\ " : 1276 , \"date\ " : \"2008 - 02 - 12\ " } , { \"value\ " : 2597 , \"date\ " : \"2009 - 12 - 29\ " } , { \"value\ " : 2851 , \"date\ " : \"2010 - 10 - 23\ " } ] " > >
analyse : predict ( " [ { \"value\ " : 3347 , \"date\ " : \"1995 - 06 - 09\ " } , { \"value\ " : 1833 , \"date\ " : \"1995 - 07 - 26\ " } , { \"value\ " : 2470 , \"date\ " : \"1996 - 11 - 19\ " } , { \"value\ " : 2849 , \"date\ " : \"1997 - 11 - 15\ " } , { \"value\ " : 3295 , \"date\ " : \"1998 - 10 - 01\ " } , { \"value\ " : 2853 , \"date\ " : \"1998 - 12 - 26\ " } , { \"value\ " : 3924 , \"date\ " : \"1999 - 11 - 23\ " } , { \"value\ " : 1392 , \"date\ " : \"2000 - 10 - 19\ " } , { \"value\ " : 2127 , \"date\ " : \"2001 - 03 - 09\ " } , { \"value\ " : 2121 , \"date\ " : \"2001 - 05 - 27\ " } , { \"value\ " : 2817 , \"date\ " : \"2002 - 05 - 03\ " } , { \"value\ " : 1713 , \"date\ " : \"2003 - 02 - 13\ " } , { \"value\ " : 3699 , \"date\ " : \"2003 - 05 - 25\ " } , { \"value\ " : 2387 , \"date\ " : \"2003 - 07 - 13\ " } , { \"value\ " : 2409 , \"date\ " : \"2004 - 01 - 11\ " } , { \"value\ " : 3163 , \"date\ " : \"2004 - 12 - 06\ " } , { \"value\ " : 2168 , \"date\ " : \"2005 - 10 - 05\ " } , { \"value\ " : 1276 , \"date\ " : \"2008 - 02 - 12\ " } , { \"value\ " : 2597 , \"date\ " : \"2009 - 12 - 29\ " } , { \"value\ " : 2851 , \"date\ " : \"2010 - 10 - 23\ " } ] " ) .
%% @doc
%% Function: allowed_methods/2
%% Purpose: Defines which HTTP methods are allowed
%% Returns: {List of allowed HTTP requests, string , string()}
%% @end
-spec allowed_methods(ReqData::tuple(), State::string()) -> {list(), tuple(), string()}.
allowed_methods(ReqData, State) ->
case api_help:parse_path(wrq:path(ReqData)) of
[{"streams", _StreamID}, {"_analyse"}] ->
{['GET'], ReqData, State};
[{"vstreams", _VStreamID}, {"_analyse"}] ->
{['GET'], ReqData, State};
[{"users", _UserID}, {"streams", _StreamID}, {"_analyse"}] ->
{['GET'], ReqData, State};
[error] ->
{[], ReqData, State}
end.
%% @doc
%% Function: content_types_provided/2
%% Purpose: based on the Accept header on a 'GET' request, we provide different media types to the client.
A code 406 is returned to the client if we can not return the media - type that the user has requested .
Returns : { [ { Mediatype , Handler } ] , ReqData , State }
%% @end
-spec content_types_provided(ReqData::term(),State::term()) -> {list(), term(), term()}.
content_types_provided(ReqData, State) ->
{[{"application/json", get_analysis}], ReqData, State}.
%% @doc
Function : content_types_accepted/2
%% Purpose: based on the content-type on a 'POST' or 'PUT', we know which kind of data that is allowed to be sent to the server.
A code 406 is returned to the client if we do n't accept a media type that the client has sent .
Returns : { [ { Mediatype , Handler } ] , ReqData , State }
%% @end
-spec content_types_accepted(ReqData::tuple(), State::string()) -> {list(), tuple(), string()}.
content_types_accepted(ReqData, State) ->
{[{"application/json", get_analysis}], ReqData, State}.
%% @doc
%% Function: get_analysis/2
%% Purpose: Used to handle GET requests by giving the document with the given
%% Id or listing the documents that can be found from the restrictions
given by the URI .
Returns : { Success , ReqData , State } , where Success is true if the PUT request is
%% successful and false otherwise.
%% @end
-spec get_analysis(ReqData::term(),State::term()) -> {boolean(), term(), term()}.
get_analysis(ReqData, State) ->
case proplists:get_value('streamid', wrq:path_info(ReqData)) of
undefined ->
case proplists:get_value('vstreamid', wrq:path_info(ReqData)) of
undefined ->
ErrorString = api_help:generate_error(<<"Missing stream id">>, 405),
{{halt, 405}, wrq:set_resp_body(ErrorString, ReqData), State};
VStreamId ->
NrValues = case wrq:get_qs_value("nr_values",ReqData) of
undefined ->
50;
Values ->
{Value,_} = string:to_integer(Values),
if
Value > 500 ->
500;
Value < 3 ->
3;
true ->
Value
end
end,
NrPredictions = case wrq:get_qs_value("nr_preds",ReqData) of
undefined ->
"25";
Predictions ->
{Preds,_} = string:to_integer(Predictions),
if
Preds > 500 ->
"500";
Preds < 1 ->
"1";
true ->
Predictions
end
end,
case erlastic_search:search_limit(?INDEX, "vsdatapoint","stream_id:" ++ VStreamId ++ "&sort=timestamp:desc", NrValues) of
%case erlastic_search:search_json(#erls_params{}, ?INDEX, "datapoint", create_json(StreamId), []) of
{error,{Code, Body}} ->
ErrorString = api_help:generate_error(Body, Code),
{{halt, Code}, wrq:set_resp_body(ErrorString, ReqData), State};
{ok,JsonStruct} ->
{forecast(lib_json:get_field(JsonStruct, "hits.hits"), NrPredictions),ReqData,State}
end
end;
StreamId ->
NrValues = case wrq:get_qs_value("nr_values",ReqData) of
undefined ->
50;
Values ->
{Value,_} = string:to_integer(Values),
if
Value > 500 ->
500;
Value < 3 ->
3;
true ->
Value
end
end,
NrPredictions = case wrq:get_qs_value("nr_preds",ReqData) of
undefined ->
"25";
Predictions ->
{Preds,_} = string:to_integer(Predictions),
if
Preds > 500 ->
"500";
Preds < 1 ->
"1";
true ->
Predictions
end
end,
case erlastic_search:search_limit(?INDEX, "datapoint","stream_id:" ++ StreamId ++ "&sort=timestamp:desc", NrValues) of
%case erlastic_search:search_json(#erls_params{}, ?INDEX, "datapoint", create_json(StreamId), []) of
{error,{Code, Body}} ->
ErrorString = api_help:generate_error(Body, Code),
{{halt, Code}, wrq:set_resp_body(ErrorString, ReqData), State};
{ok,JsonStruct} ->
erlang:display("NUMBER OF VALUES"),
erlang:display(NrValues),
erlang:display("NUMBER OF PREDICTIONS"),
erlang:display(NrPredictions),
{forecast(lib_json:get_field(JsonStruct, "hits.hits"), NrPredictions),ReqData,State}
end
end.
create_json(StreamId) ->
"{ \"sort\" : [{ \"timestamp\" : {\"order\" : \"asc\"}}], \"query\" : { \"term\" : { \"stream_id\" : \""++ StreamId ++ "\" }}}".
%% @doc
%% Function: init/0
%% Purpose: Initializes the analysis engine.
%% Returns: ok.
%% @end
-spec start() -> ok.
start() ->
Pid = eri:start(),
eri:connect(),
ok.
%% @doc
%% Function: init/1
Purpose : init function used to fetch path information from webmachine dispatcher .
%% Returns: {ok, undefined}
%% @end
-spec init([]) -> {ok, undefined}.
init([]) ->
{ok, undefined}.
%% @doc
%% Function: stop/0
Purpose : Tries to stop eri , but eri : stop seems to be broken .
%% Returns: CRASH.
%% @end
-spec stop() -> crash.
stop() ->
eri:stop().
%% @doc
%% Function: this/0
%% Purpose: Does a test of forecast.
%% Returns: Json object with a list of the predictions.
%% @end
-spec this() -> JSON::string().
this() ->
start(),
forecast("[ { \"value\": 3347, \"date\": \"1995-06-09\" }, { \"value\": 1833, \"date\": \"1995-07-26\" }, { \"value\": 2470, \"date\": \"1996-11-19\" }, { \"value\": 2849, \"date\": \"1997-11-15\" }, { \"value\": 3295, \"date\": \"1998-10-01\" }, { \"value\": 2853, \"date\": \"1998-12-26\" }, { \"value\": 3924, \"date\": \"1999-11-23\" }, { \"value\": 1392, \"date\": \"2000-10-19\" }, { \"value\": 2127, \"date\": \"2001-03-09\" }, { \"value\": 2121, \"date\": \"2001-05-27\" }, { \"value\": 2817, \"date\": \"2002-05-03\" }, { \"value\": 1713, \"date\": \"2003-02-13\" }, { \"value\": 3699, \"date\": \"2003-05-25\" }, { \"value\": 2387, \"date\": \"2003-07-13\" }, { \"value\": 2409, \"date\": \"2004-01-11\" }, { \"value\": 3163, \"date\": \"2004-12-06\" }, { \"value\": 2168, \"date\": \"2005-10-05\" }, { \"value\": 1276, \"date\": \"2008-02-12\" }, { \"value\": 2597, \"date\": \"2009-12-29\" }, { \"value\": 2851, \"date\": \"2010-10-23\"}]").
%% @doc
%% Function: forecast/1
Purpose : Used to do a prediction with R given a json object . Uses 10 as a default number of predictions
%% Returns: List
%% @end
-spec forecast(JSON::string()) -> JSON::string().
forecast(Json) ->
forecast(Json, 25).
%% @doc
Function : forecast/2
%% Purpose: Used to do a prediction with R given a json object, and the number of desired predicted datapoints
%% Returns: Json object with list of values
%% @end
-spec forecast(JSON::string(), Nr::integer()) -> JSON::string().
forecast(Json, Nr) ->
eri:eval("library(forecast)"),
case get_time_series(Json) of
no_values ->
"{\"predictions\": []}";
Values ->
eri:eval("V <- " ++ Values),
eri:eval("A <- auto.arima(V)"),
eri:eval("pred <- forecast(A, "++ Nr ++ ")"),
{ok, _, Mean} = eri:eval("data.frame(c(pred$mean))[[1]]"),
{ok, _, Lo80} = eri:eval("head(data.frame(c(pred$lower))[[1]], " ++ Nr ++")"),
{ok, _, Hi80} = eri:eval("head(data.frame(c(pred$upper))[[1]], " ++ Nr ++")"),
{ok, _, Lo95} = eri:eval("tail(data.frame(c(pred$lower))[[1]], " ++ Nr ++")"),
{ok, _, Hi95} = eri:eval("tail(data.frame(c(pred$upper))[[1]], " ++ Nr ++")"),
eri : eval("rm(list = ls ( ) ) " ) ,
start_format_result({Mean, Lo80, Hi80, Lo95, Hi95})
end.
%% @doc
Function :
%% Purpose: Format the results from a forecast.
%% Returns: Json object with list of values
%% @end
-spec start_format_result({Mean::list(), Lo80::list(), Hi80::list(), Lo95::list(), Hi95::list()}) -> JSON::string().
start_format_result({Mean, Lo80, Hi80, Lo95, Hi95}) ->
"{ \"predictions\": [" ++ format_result({Mean, Lo80, Hi80, Lo95, Hi95}).
%% @doc
%% Function: format_result/1
%% Purpose: Format the results from a forecast.
Returns : Everything in a Json object except the beginning .
%% @end
-spec format_result({Mean::list(), Lo80::list(), Hi80::list(), Lo95::list(), Hi95::list()}) -> string().
format_result({[HeadMean|[]], [HeadLo80|[]], [HeadHi80|[]],[HeadLo95|[]], [HeadHi95|[]]}) ->
"{ \"value\":" ++ lists:flatten(io_lib:format("~p", [HeadMean])) ++
",\"lo80\":" ++ lists:flatten(io_lib:format("~p", [HeadLo80])) ++
",\"hi80\":" ++ lists:flatten(io_lib:format("~p", [HeadHi80])) ++
",\"lo95\":" ++ lists:flatten(io_lib:format("~p", [HeadLo95])) ++
",\"hi95\":" ++ lists:flatten(io_lib:format("~p", [HeadHi95])) ++ "}]}";
format_result({[HeadMean|Mean], [HeadLo80|Lo80], [HeadHi80|Hi80],[HeadLo95|Lo95], [HeadHi95|Hi95]}) ->
"{ \"value\":" ++ lists:flatten(io_lib:format("~p", [HeadMean])) ++
",\"lo80\":" ++ lists:flatten(io_lib:format("~p", [HeadLo80])) ++
",\"hi80\":" ++ lists:flatten(io_lib:format("~p", [HeadHi80])) ++
",\"lo95\":" ++ lists:flatten(io_lib:format("~p", [HeadLo95])) ++
",\"hi95\":" ++ lists:flatten(io_lib:format("~p", [HeadHi95])) ++ "},"
++ format_result({Mean, Lo80, Hi80, Lo95, Hi95}).
%% @doc
%% Function: get_forecast_string/1
%% Purpose: Generates a string with a complete command to to forecast on Values in R
%% Returns: String with complete forecast command for R
%% @end
-spec get_forecast_string(Values::string()) -> string().
get_forecast_string(Values) ->
"forecast(auto.arima("++Values++"))".
%% @doc
Function : get_time_series/1
Purpose : Gets information as strings from a Json object ( first time , last time and a list with all values )
%% Returns: Data from JSON object as strings
%% @end
-spec get_time_series(JSON::string()) -> Values::string().
get_time_series(Json) ->
{Values, _} = parse_json_list(Json, [], []),
%{Start, End} = get_times(Times, {}),
get_values_string(Values).
%% @doc
%% Function: parse_json_list/3
Purpose : Get a list of times and values from a Json object
Returns : Lists with data from list of Json objects lists
@TODO Avoid reverse by merging this function with get_values_string
%% @end
-spec parse_json_list(Datapoint::list(), Values::list(), Times::list()) -> {Values::list(), Times::list()}.
parse_json_list([], Values, Times) -> {lists:reverse(Values), lists:reverse(Times)};
parse_json_list([Head|Rest], Values, Times) ->
Val = lib_json:get_field(Head, "_source.value"),
parse_json_list(Rest, [Val|Values], []).
%% @doc
%% Function: get_times/1
Purpose : Get the first and last time from a list ( no longer interesting )
Returns : A tuple with the first and last times in the list .
@TODO No longer necessary . Remove calls to it .
%% @end
-spec get_times(Values::string(), tuple()) -> {list(), list()}.
get_times([], {}) -> {"321", "123"};
get_times([End | List], {}) -> {"321", "123"};
get_times(List, {End}) -> {binary_to_list(lists:last(List)), binary_to_list(End)}.
%% @doc
%% Function: get_values_string/1
%% Purpose: Generates a string with a complete command to to forecast on Values in R
%% Returns: String with complete forecast command for R
%% @end
-spec get_values_string(Values::string()) -> string().
get_values_string([]) -> no_values;
get_values_string([Head | Tail]) -> get_values_string(Tail, lists:flatten(io_lib:format("~p)", [Head]))).
%% @doc
%% Function: get_values_string_/2
%% Purpose: Get a string with values formatted as an R command (to create an equivalent list in R)
%% Returns: A string with all the values in the argument
%% @end
-spec get_values_string(Values::list(), string()) -> string().
get_values_string([], S) -> "c("++S;
get_values_string([Head | Tail], S) -> get_values_string(Tail, lists:flatten(io_lib:format("~p, ", [Head]))++S).
| null | https://raw.githubusercontent.com/projectcs13/sensor-cloud/0302bd74b2e62fddbd832fb4c7a27b9c62852b90/src/analyse.erl | erlang | @doc
Function: allowed_methods/2
Purpose: Defines which HTTP methods are allowed
Returns: {List of allowed HTTP requests, string , string()}
@end
@doc
Function: content_types_provided/2
Purpose: based on the Accept header on a 'GET' request, we provide different media types to the client.
@end
@doc
Purpose: based on the content-type on a 'POST' or 'PUT', we know which kind of data that is allowed to be sent to the server.
@end
@doc
Function: get_analysis/2
Purpose: Used to handle GET requests by giving the document with the given
Id or listing the documents that can be found from the restrictions
successful and false otherwise.
@end
case erlastic_search:search_json(#erls_params{}, ?INDEX, "datapoint", create_json(StreamId), []) of
case erlastic_search:search_json(#erls_params{}, ?INDEX, "datapoint", create_json(StreamId), []) of
@doc
Function: init/0
Purpose: Initializes the analysis engine.
Returns: ok.
@end
@doc
Function: init/1
Returns: {ok, undefined}
@end
@doc
Function: stop/0
Returns: CRASH.
@end
@doc
Function: this/0
Purpose: Does a test of forecast.
Returns: Json object with a list of the predictions.
@end
@doc
Function: forecast/1
Returns: List
@end
@doc
Purpose: Used to do a prediction with R given a json object, and the number of desired predicted datapoints
Returns: Json object with list of values
@end
@doc
Purpose: Format the results from a forecast.
Returns: Json object with list of values
@end
@doc
Function: format_result/1
Purpose: Format the results from a forecast.
@end
@doc
Function: get_forecast_string/1
Purpose: Generates a string with a complete command to to forecast on Values in R
Returns: String with complete forecast command for R
@end
@doc
Returns: Data from JSON object as strings
@end
{Start, End} = get_times(Times, {}),
@doc
Function: parse_json_list/3
@end
@doc
Function: get_times/1
@end
@doc
Function: get_values_string/1
Purpose: Generates a string with a complete command to to forecast on Values in R
Returns: String with complete forecast command for R
@end
@doc
Function: get_values_string_/2
Purpose: Get a string with values formatted as an R command (to create an equivalent list in R)
Returns: A string with all the values in the argument
@end | -module(analyse).
-export([forecast/1 , forecast/2 , init/0 , stop/0 , this/0 , get_analysis/2 ] ) .
-compile(export_all).
-include("webmachine.hrl").
-include("erlastic_search.hrl").
-define(INDEX, "sensorcloud").
-record(struct, {lst}).
< < " [ { \"value\ " : 3347 , \"date\ " : \"1995 - 06 - 09\ " } , { \"value\ " : 1833 , \"date\ " : \"1995 - 07 - 26\ " } , { \"value\ " : 2470 , \"date\ " : \"1996 - 11 - 19\ " } , { \"value\ " : 2849 , \"date\ " : \"1997 - 11 - 15\ " } , { \"value\ " : 3295 , \"date\ " : \"1998 - 10 - 01\ " } , { \"value\ " : 2853 , \"date\ " : \"1998 - 12 - 26\ " } , { \"value\ " : 3924 , \"date\ " : \"1999 - 11 - 23\ " } , { \"value\ " : 1392 , \"date\ " : \"2000 - 10 - 19\ " } , { \"value\ " : 2127 , \"date\ " : \"2001 - 03 - 09\ " } , { \"value\ " : 2121 , \"date\ " : \"2001 - 05 - 27\ " } , { \"value\ " : 2817 , \"date\ " : \"2002 - 05 - 03\ " } , { \"value\ " : 1713 , \"date\ " : \"2003 - 02 - 13\ " } , { \"value\ " : 3699 , \"date\ " : \"2003 - 05 - 25\ " } , { \"value\ " : 2387 , \"date\ " : \"2003 - 07 - 13\ " } , { \"value\ " : 2409 , \"date\ " : \"2004 - 01 - 11\ " } , { \"value\ " : 3163 , \"date\ " : \"2004 - 12 - 06\ " } , { \"value\ " : 2168 , \"date\ " : \"2005 - 10 - 05\ " } , { \"value\ " : 1276 , \"date\ " : \"2008 - 02 - 12\ " } , { \"value\ " : 2597 , \"date\ " : \"2009 - 12 - 29\ " } , { \"value\ " : 2851 , \"date\ " : \"2010 - 10 - 23\ " } ] " > >
analyse : predict ( " [ { \"value\ " : 3347 , \"date\ " : \"1995 - 06 - 09\ " } , { \"value\ " : 1833 , \"date\ " : \"1995 - 07 - 26\ " } , { \"value\ " : 2470 , \"date\ " : \"1996 - 11 - 19\ " } , { \"value\ " : 2849 , \"date\ " : \"1997 - 11 - 15\ " } , { \"value\ " : 3295 , \"date\ " : \"1998 - 10 - 01\ " } , { \"value\ " : 2853 , \"date\ " : \"1998 - 12 - 26\ " } , { \"value\ " : 3924 , \"date\ " : \"1999 - 11 - 23\ " } , { \"value\ " : 1392 , \"date\ " : \"2000 - 10 - 19\ " } , { \"value\ " : 2127 , \"date\ " : \"2001 - 03 - 09\ " } , { \"value\ " : 2121 , \"date\ " : \"2001 - 05 - 27\ " } , { \"value\ " : 2817 , \"date\ " : \"2002 - 05 - 03\ " } , { \"value\ " : 1713 , \"date\ " : \"2003 - 02 - 13\ " } , { \"value\ " : 3699 , \"date\ " : \"2003 - 05 - 25\ " } , { \"value\ " : 2387 , \"date\ " : \"2003 - 07 - 13\ " } , { \"value\ " : 2409 , \"date\ " : \"2004 - 01 - 11\ " } , { \"value\ " : 3163 , \"date\ " : \"2004 - 12 - 06\ " } , { \"value\ " : 2168 , \"date\ " : \"2005 - 10 - 05\ " } , { \"value\ " : 1276 , \"date\ " : \"2008 - 02 - 12\ " } , { \"value\ " : 2597 , \"date\ " : \"2009 - 12 - 29\ " } , { \"value\ " : 2851 , \"date\ " : \"2010 - 10 - 23\ " } ] " ) .
-spec allowed_methods(ReqData::tuple(), State::string()) -> {list(), tuple(), string()}.
allowed_methods(ReqData, State) ->
case api_help:parse_path(wrq:path(ReqData)) of
[{"streams", _StreamID}, {"_analyse"}] ->
{['GET'], ReqData, State};
[{"vstreams", _VStreamID}, {"_analyse"}] ->
{['GET'], ReqData, State};
[{"users", _UserID}, {"streams", _StreamID}, {"_analyse"}] ->
{['GET'], ReqData, State};
[error] ->
{[], ReqData, State}
end.
A code 406 is returned to the client if we can not return the media - type that the user has requested .
Returns : { [ { Mediatype , Handler } ] , ReqData , State }
-spec content_types_provided(ReqData::term(),State::term()) -> {list(), term(), term()}.
content_types_provided(ReqData, State) ->
{[{"application/json", get_analysis}], ReqData, State}.
Function : content_types_accepted/2
A code 406 is returned to the client if we do n't accept a media type that the client has sent .
Returns : { [ { Mediatype , Handler } ] , ReqData , State }
-spec content_types_accepted(ReqData::tuple(), State::string()) -> {list(), tuple(), string()}.
content_types_accepted(ReqData, State) ->
{[{"application/json", get_analysis}], ReqData, State}.
given by the URI .
Returns : { Success , ReqData , State } , where Success is true if the PUT request is
-spec get_analysis(ReqData::term(),State::term()) -> {boolean(), term(), term()}.
get_analysis(ReqData, State) ->
case proplists:get_value('streamid', wrq:path_info(ReqData)) of
undefined ->
case proplists:get_value('vstreamid', wrq:path_info(ReqData)) of
undefined ->
ErrorString = api_help:generate_error(<<"Missing stream id">>, 405),
{{halt, 405}, wrq:set_resp_body(ErrorString, ReqData), State};
VStreamId ->
NrValues = case wrq:get_qs_value("nr_values",ReqData) of
undefined ->
50;
Values ->
{Value,_} = string:to_integer(Values),
if
Value > 500 ->
500;
Value < 3 ->
3;
true ->
Value
end
end,
NrPredictions = case wrq:get_qs_value("nr_preds",ReqData) of
undefined ->
"25";
Predictions ->
{Preds,_} = string:to_integer(Predictions),
if
Preds > 500 ->
"500";
Preds < 1 ->
"1";
true ->
Predictions
end
end,
case erlastic_search:search_limit(?INDEX, "vsdatapoint","stream_id:" ++ VStreamId ++ "&sort=timestamp:desc", NrValues) of
{error,{Code, Body}} ->
ErrorString = api_help:generate_error(Body, Code),
{{halt, Code}, wrq:set_resp_body(ErrorString, ReqData), State};
{ok,JsonStruct} ->
{forecast(lib_json:get_field(JsonStruct, "hits.hits"), NrPredictions),ReqData,State}
end
end;
StreamId ->
NrValues = case wrq:get_qs_value("nr_values",ReqData) of
undefined ->
50;
Values ->
{Value,_} = string:to_integer(Values),
if
Value > 500 ->
500;
Value < 3 ->
3;
true ->
Value
end
end,
NrPredictions = case wrq:get_qs_value("nr_preds",ReqData) of
undefined ->
"25";
Predictions ->
{Preds,_} = string:to_integer(Predictions),
if
Preds > 500 ->
"500";
Preds < 1 ->
"1";
true ->
Predictions
end
end,
case erlastic_search:search_limit(?INDEX, "datapoint","stream_id:" ++ StreamId ++ "&sort=timestamp:desc", NrValues) of
{error,{Code, Body}} ->
ErrorString = api_help:generate_error(Body, Code),
{{halt, Code}, wrq:set_resp_body(ErrorString, ReqData), State};
{ok,JsonStruct} ->
erlang:display("NUMBER OF VALUES"),
erlang:display(NrValues),
erlang:display("NUMBER OF PREDICTIONS"),
erlang:display(NrPredictions),
{forecast(lib_json:get_field(JsonStruct, "hits.hits"), NrPredictions),ReqData,State}
end
end.
create_json(StreamId) ->
"{ \"sort\" : [{ \"timestamp\" : {\"order\" : \"asc\"}}], \"query\" : { \"term\" : { \"stream_id\" : \""++ StreamId ++ "\" }}}".
-spec start() -> ok.
start() ->
Pid = eri:start(),
eri:connect(),
ok.
Purpose : init function used to fetch path information from webmachine dispatcher .
-spec init([]) -> {ok, undefined}.
init([]) ->
{ok, undefined}.
Purpose : Tries to stop eri , but eri : stop seems to be broken .
-spec stop() -> crash.
stop() ->
eri:stop().
-spec this() -> JSON::string().
this() ->
start(),
forecast("[ { \"value\": 3347, \"date\": \"1995-06-09\" }, { \"value\": 1833, \"date\": \"1995-07-26\" }, { \"value\": 2470, \"date\": \"1996-11-19\" }, { \"value\": 2849, \"date\": \"1997-11-15\" }, { \"value\": 3295, \"date\": \"1998-10-01\" }, { \"value\": 2853, \"date\": \"1998-12-26\" }, { \"value\": 3924, \"date\": \"1999-11-23\" }, { \"value\": 1392, \"date\": \"2000-10-19\" }, { \"value\": 2127, \"date\": \"2001-03-09\" }, { \"value\": 2121, \"date\": \"2001-05-27\" }, { \"value\": 2817, \"date\": \"2002-05-03\" }, { \"value\": 1713, \"date\": \"2003-02-13\" }, { \"value\": 3699, \"date\": \"2003-05-25\" }, { \"value\": 2387, \"date\": \"2003-07-13\" }, { \"value\": 2409, \"date\": \"2004-01-11\" }, { \"value\": 3163, \"date\": \"2004-12-06\" }, { \"value\": 2168, \"date\": \"2005-10-05\" }, { \"value\": 1276, \"date\": \"2008-02-12\" }, { \"value\": 2597, \"date\": \"2009-12-29\" }, { \"value\": 2851, \"date\": \"2010-10-23\"}]").
Purpose : Used to do a prediction with R given a json object . Uses 10 as a default number of predictions
-spec forecast(JSON::string()) -> JSON::string().
forecast(Json) ->
forecast(Json, 25).
Function : forecast/2
-spec forecast(JSON::string(), Nr::integer()) -> JSON::string().
forecast(Json, Nr) ->
eri:eval("library(forecast)"),
case get_time_series(Json) of
no_values ->
"{\"predictions\": []}";
Values ->
eri:eval("V <- " ++ Values),
eri:eval("A <- auto.arima(V)"),
eri:eval("pred <- forecast(A, "++ Nr ++ ")"),
{ok, _, Mean} = eri:eval("data.frame(c(pred$mean))[[1]]"),
{ok, _, Lo80} = eri:eval("head(data.frame(c(pred$lower))[[1]], " ++ Nr ++")"),
{ok, _, Hi80} = eri:eval("head(data.frame(c(pred$upper))[[1]], " ++ Nr ++")"),
{ok, _, Lo95} = eri:eval("tail(data.frame(c(pred$lower))[[1]], " ++ Nr ++")"),
{ok, _, Hi95} = eri:eval("tail(data.frame(c(pred$upper))[[1]], " ++ Nr ++")"),
eri : eval("rm(list = ls ( ) ) " ) ,
start_format_result({Mean, Lo80, Hi80, Lo95, Hi95})
end.
Function :
-spec start_format_result({Mean::list(), Lo80::list(), Hi80::list(), Lo95::list(), Hi95::list()}) -> JSON::string().
start_format_result({Mean, Lo80, Hi80, Lo95, Hi95}) ->
"{ \"predictions\": [" ++ format_result({Mean, Lo80, Hi80, Lo95, Hi95}).
Returns : Everything in a Json object except the beginning .
-spec format_result({Mean::list(), Lo80::list(), Hi80::list(), Lo95::list(), Hi95::list()}) -> string().
format_result({[HeadMean|[]], [HeadLo80|[]], [HeadHi80|[]],[HeadLo95|[]], [HeadHi95|[]]}) ->
"{ \"value\":" ++ lists:flatten(io_lib:format("~p", [HeadMean])) ++
",\"lo80\":" ++ lists:flatten(io_lib:format("~p", [HeadLo80])) ++
",\"hi80\":" ++ lists:flatten(io_lib:format("~p", [HeadHi80])) ++
",\"lo95\":" ++ lists:flatten(io_lib:format("~p", [HeadLo95])) ++
",\"hi95\":" ++ lists:flatten(io_lib:format("~p", [HeadHi95])) ++ "}]}";
format_result({[HeadMean|Mean], [HeadLo80|Lo80], [HeadHi80|Hi80],[HeadLo95|Lo95], [HeadHi95|Hi95]}) ->
"{ \"value\":" ++ lists:flatten(io_lib:format("~p", [HeadMean])) ++
",\"lo80\":" ++ lists:flatten(io_lib:format("~p", [HeadLo80])) ++
",\"hi80\":" ++ lists:flatten(io_lib:format("~p", [HeadHi80])) ++
",\"lo95\":" ++ lists:flatten(io_lib:format("~p", [HeadLo95])) ++
",\"hi95\":" ++ lists:flatten(io_lib:format("~p", [HeadHi95])) ++ "},"
++ format_result({Mean, Lo80, Hi80, Lo95, Hi95}).
-spec get_forecast_string(Values::string()) -> string().
get_forecast_string(Values) ->
"forecast(auto.arima("++Values++"))".
Function : get_time_series/1
Purpose : Gets information as strings from a Json object ( first time , last time and a list with all values )
-spec get_time_series(JSON::string()) -> Values::string().
get_time_series(Json) ->
{Values, _} = parse_json_list(Json, [], []),
get_values_string(Values).
Purpose : Get a list of times and values from a Json object
Returns : Lists with data from list of Json objects lists
@TODO Avoid reverse by merging this function with get_values_string
-spec parse_json_list(Datapoint::list(), Values::list(), Times::list()) -> {Values::list(), Times::list()}.
parse_json_list([], Values, Times) -> {lists:reverse(Values), lists:reverse(Times)};
parse_json_list([Head|Rest], Values, Times) ->
Val = lib_json:get_field(Head, "_source.value"),
parse_json_list(Rest, [Val|Values], []).
Purpose : Get the first and last time from a list ( no longer interesting )
Returns : A tuple with the first and last times in the list .
@TODO No longer necessary . Remove calls to it .
-spec get_times(Values::string(), tuple()) -> {list(), list()}.
get_times([], {}) -> {"321", "123"};
get_times([End | List], {}) -> {"321", "123"};
get_times(List, {End}) -> {binary_to_list(lists:last(List)), binary_to_list(End)}.
-spec get_values_string(Values::string()) -> string().
get_values_string([]) -> no_values;
get_values_string([Head | Tail]) -> get_values_string(Tail, lists:flatten(io_lib:format("~p)", [Head]))).
-spec get_values_string(Values::list(), string()) -> string().
get_values_string([], S) -> "c("++S;
get_values_string([Head | Tail], S) -> get_values_string(Tail, lists:flatten(io_lib:format("~p, ", [Head]))++S).
|
5227011c87ab8bc807cabb28fed098014b237148dc90cd6d2d873d125484af06 | Plutonomicon/plutarch-plutus | ByteStringSpec.hs | module Plutarch.ByteStringSpec (spec) where
import Data.ByteString qualified as BS
import Plutarch.Prelude
import Plutarch.Test
import Test.Hspec
# HLINT ignore spec " Monoid law , left identity " #
spec :: Spec
spec = do
describe "bytestring" . pgoldenSpec $ do
"empty" @| mempty #== phexByteStr "" @-> passert
"phexByteStr"
@| ( let a :: [String] = ["42", "ab", "df", "c9"]
in pconstant @PByteString (BS.pack $ fmap readByte a) #== phexByteStr (concat a)
)
@-> passert
"plengthByteStr" @| (plengthBS # phexByteStr "012f") #== 2 @-> passert
"pconsBS"
@| ( let xs = phexByteStr "48fCd1"
in (plengthBS #$ pconsBS # 91 # xs) #== (1 + plengthBS # xs)
)
@-> passert
"pindexByteStr"
@| (pindexBS # phexByteStr "4102af" # 1)
@== pconstant @PInteger 0x02
"psliceByteStr"
@| (psliceBS # 2 # 3 # phexByteStr "4102afde5b2a")
@== phexByteStr "afde5b"
"eq" @| phexByteStr "12" #== phexByteStr "12" @-> passert
let s1 = phexByteStr "12"
s2 = phexByteStr "34"
"semigroup" @\ do
"concats" @| s1 <> s2 @== phexByteStr "1234"
"laws" @\ do
"id.1" @| (mempty <> s1) #== s1 @-> passert
"id.2" @| s1 #== (mempty <> s1) @-> passert
| Interpret a byte .
> > > readByte " 41 "
65
>>> readByte "41"
65
-}
readByte :: Num a => String -> a
readByte a = fromInteger $ read $ "0x" <> a
| null | https://raw.githubusercontent.com/Plutonomicon/plutarch-plutus/9b83892057f2aaaed76e3af6193ad1ae242244cc/plutarch-test/tests/Plutarch/ByteStringSpec.hs | haskell | module Plutarch.ByteStringSpec (spec) where
import Data.ByteString qualified as BS
import Plutarch.Prelude
import Plutarch.Test
import Test.Hspec
# HLINT ignore spec " Monoid law , left identity " #
spec :: Spec
spec = do
describe "bytestring" . pgoldenSpec $ do
"empty" @| mempty #== phexByteStr "" @-> passert
"phexByteStr"
@| ( let a :: [String] = ["42", "ab", "df", "c9"]
in pconstant @PByteString (BS.pack $ fmap readByte a) #== phexByteStr (concat a)
)
@-> passert
"plengthByteStr" @| (plengthBS # phexByteStr "012f") #== 2 @-> passert
"pconsBS"
@| ( let xs = phexByteStr "48fCd1"
in (plengthBS #$ pconsBS # 91 # xs) #== (1 + plengthBS # xs)
)
@-> passert
"pindexByteStr"
@| (pindexBS # phexByteStr "4102af" # 1)
@== pconstant @PInteger 0x02
"psliceByteStr"
@| (psliceBS # 2 # 3 # phexByteStr "4102afde5b2a")
@== phexByteStr "afde5b"
"eq" @| phexByteStr "12" #== phexByteStr "12" @-> passert
let s1 = phexByteStr "12"
s2 = phexByteStr "34"
"semigroup" @\ do
"concats" @| s1 <> s2 @== phexByteStr "1234"
"laws" @\ do
"id.1" @| (mempty <> s1) #== s1 @-> passert
"id.2" @| s1 #== (mempty <> s1) @-> passert
| Interpret a byte .
> > > readByte " 41 "
65
>>> readByte "41"
65
-}
readByte :: Num a => String -> a
readByte a = fromInteger $ read $ "0x" <> a
| |
e885d52c2ac784120e59bdd247c8d784e1473f4b7027a308bc31226643c3df7a | input-output-hk/cardano-ledger-byron | Protocol.hs | module Cardano.Chain.Byron.API.Protocol (
previewDelegationMap
) where
import Cardano.Prelude
import qualified Cardano.Chain.Block as CC
import qualified Cardano.Chain.Delegation as Delegation
import qualified Cardano.Chain.Delegation.Validation.Interface as D.Iface
import qualified Cardano.Chain.Slotting as CC
-- | Preview the delegation map at a slot assuming no new delegations are
-- | scheduled.
previewDelegationMap :: CC.SlotNumber
-> CC.ChainValidationState
-> Delegation.Map
previewDelegationMap slot cvs =
let ds = D.Iface.activateDelegations slot $ CC.cvsDelegationState cvs
in D.Iface.delegationMap ds
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger-byron/d309449e6c303a9f0dcc8dcf172df6f0b3195ed5/cardano-ledger/src/Cardano/Chain/Byron/API/Protocol.hs | haskell | | Preview the delegation map at a slot assuming no new delegations are
| scheduled. | module Cardano.Chain.Byron.API.Protocol (
previewDelegationMap
) where
import Cardano.Prelude
import qualified Cardano.Chain.Block as CC
import qualified Cardano.Chain.Delegation as Delegation
import qualified Cardano.Chain.Delegation.Validation.Interface as D.Iface
import qualified Cardano.Chain.Slotting as CC
previewDelegationMap :: CC.SlotNumber
-> CC.ChainValidationState
-> Delegation.Map
previewDelegationMap slot cvs =
let ds = D.Iface.activateDelegations slot $ CC.cvsDelegationState cvs
in D.Iface.delegationMap ds
|
7f3ba11960d4cc8d820a972271d59d2a2e0b61cd7a4305eb13c3cda24b84f16c | ocaml-multicore/ocaml-tsan | arg.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Para , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
type key = string
type doc = string
type usage_msg = string
type anon_fun = (string -> unit)
type spec =
| Unit of (unit -> unit) (* Call the function with unit argument *)
| Bool of (bool -> unit) (* Call the function with a bool argument *)
| Set of bool ref (* Set the reference to true *)
| Clear of bool ref (* Set the reference to false *)
| String of (string -> unit) (* Call the function with a string argument *)
| Set_string of string ref (* Set the reference to the string argument *)
| Int of (int -> unit) (* Call the function with an int argument *)
| Set_int of int ref (* Set the reference to the int argument *)
| Float of (float -> unit) (* Call the function with a float argument *)
| Set_float of float ref (* Set the reference to the float argument *)
| Tuple of spec list (* Take several arguments according to the
spec list *)
| Symbol of string list * (string -> unit)
Take one of the symbols as argument and
call the function with the symbol .
call the function with the symbol. *)
| Rest of (string -> unit) (* Stop interpreting keywords and call the
function with each remaining argument *)
| Rest_all of (string list -> unit)
(* Stop interpreting keywords and call the
function with all remaining arguments. *)
| Expand of (string -> string array) (* If the remaining arguments to process
are of the form
[["-foo"; "arg"] @ rest] where "foo"
is registered as [Expand f], then the
arguments [f "arg" @ rest] are
processed. Only allowed in
[parse_and_expand_argv_dynamic]. *)
exception Bad of string
exception Help of string
type error =
| Unknown of string
| Wrong of string * string * string (* option, actual, expected *)
| Missing of string
| Message of string
exception Stop of error (* used internally *)
open Printf
let rec assoc3 x l =
match l with
| [] -> raise Not_found
| (y1, y2, _) :: _ when y1 = x -> y2
| _ :: t -> assoc3 x t
let split s =
let i = String.index s '=' in
let len = String.length s in
String.sub s 0 i, String.sub s (i+1) (len-(i+1))
let make_symlist prefix sep suffix l =
match l with
| [] -> "<none>"
| h::t -> (List.fold_left (fun x y -> x ^ sep ^ y) (prefix ^ h) t) ^ suffix
let print_spec buf (key, spec, doc) =
if String.length doc > 0 then
match spec with
| Symbol (l, _) ->
bprintf buf " %s %s%s\n" key (make_symlist "{" "|" "}" l) doc
| _ ->
bprintf buf " %s %s\n" key doc
let help_action () = raise (Stop (Unknown "-help"))
let add_help speclist =
let add1 =
try ignore (assoc3 "-help" speclist); []
with Not_found ->
["-help", Unit help_action, " Display this list of options"]
and add2 =
try ignore (assoc3 "--help" speclist); []
with Not_found ->
["--help", Unit help_action, " Display this list of options"]
in
speclist @ (add1 @ add2)
let usage_b buf speclist errmsg =
bprintf buf "%s\n" errmsg;
List.iter (print_spec buf) (add_help speclist)
let usage_string speclist errmsg =
let b = Buffer.create 200 in
usage_b b speclist errmsg;
Buffer.contents b
let usage speclist errmsg =
eprintf "%s" (usage_string speclist errmsg)
let current = ref 0
let bool_of_string_opt x =
try Some (bool_of_string x)
with Invalid_argument _ -> None
let int_of_string_opt x =
try Some (int_of_string x)
with Failure _ -> None
let float_of_string_opt x =
try Some (float_of_string x)
with Failure _ -> None
let parse_and_expand_argv_dynamic_aux allow_expand current argv speclist anonfun
errmsg =
let initpos = !current in
let convert_error error =
(* convert an internal error to a Bad/Help exception
*or* add the program name as a prefix and the usage message as a suffix
to an user-raised Bad exception.
*)
let b = Buffer.create 200 in
let progname =
if initpos < (Array.length !argv) then !argv.(initpos) else "(?)" in
begin match error with
| Unknown "-help" -> ()
| Unknown "--help" -> ()
| Unknown s ->
bprintf b "%s: unknown option '%s'.\n" progname s
| Missing s ->
bprintf b "%s: option '%s' needs an argument.\n" progname s
| Wrong (opt, arg, expected) ->
bprintf b "%s: wrong argument '%s'; option '%s' expects %s.\n"
progname arg opt expected
| Message s -> (* user error message *)
bprintf b "%s: %s.\n" progname s
end;
usage_b b !speclist errmsg;
if error = Unknown "-help" || error = Unknown "--help"
then Help (Buffer.contents b)
else Bad (Buffer.contents b)
in
incr current;
while !current < (Array.length !argv) do
begin try
let s = !argv.(!current) in
if String.starts_with ~prefix:"-" s then begin
let action, follow =
try assoc3 s !speclist, None
with Not_found ->
try
let keyword, arg = split s in
assoc3 keyword !speclist, Some arg
with Not_found -> raise (Stop (Unknown s))
in
let no_arg () =
match follow with
| None -> ()
| Some arg -> raise (Stop (Wrong (s, arg, "no argument"))) in
let get_arg () =
match follow with
| None ->
if !current + 1 < (Array.length !argv) then !argv.(!current + 1)
else raise (Stop (Missing s))
| Some arg -> arg
in
let consume_arg () =
match follow with
| None -> incr current
| Some _ -> ()
in
let rec treat_action = function
| Unit f -> no_arg (); f ();
| Bool f ->
let arg = get_arg () in
begin match bool_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a boolean")))
| Some s -> f s
end;
consume_arg ();
| Set r -> no_arg (); r := true;
| Clear r -> no_arg (); r := false;
| String f ->
let arg = get_arg () in
f arg;
consume_arg ();
| Symbol (symb, f) ->
let arg = get_arg () in
if List.mem arg symb then begin
f arg;
consume_arg ();
end else begin
raise (Stop (Wrong (s, arg, "one of: "
^ (make_symlist "" " " "" symb))))
end
| Set_string r ->
r := get_arg ();
consume_arg ();
| Int f ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> f x
end;
consume_arg ();
| Set_int r ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> r := x
end;
consume_arg ();
| Float f ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> f x
end;
consume_arg ();
| Set_float r ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> r := x
end;
consume_arg ();
| Tuple specs ->
no_arg ();
List.iter treat_action specs;
| Rest f ->
no_arg ();
while !current < (Array.length !argv) - 1 do
f !argv.(!current + 1);
consume_arg ();
done;
| Rest_all f ->
no_arg ();
let acc = ref [] in
while !current < Array.length !argv - 1 do
acc := !argv.(!current + 1) :: !acc;
consume_arg ();
done;
f (List.rev !acc)
| Expand f ->
if not allow_expand then
raise (Invalid_argument "Arg.Expand is is only allowed with \
Arg.parse_and_expand_argv_dynamic");
let arg = get_arg () in
let newarg = f arg in
consume_arg ();
let before = Array.sub !argv 0 (!current + 1)
and after =
Array.sub !argv (!current + 1)
((Array.length !argv) - !current - 1) in
argv:= Array.concat [before;newarg;after];
in
treat_action action end
else anonfun s
with | Bad m -> raise (convert_error (Message m));
| Stop e -> raise (convert_error e);
end;
incr current
done
let parse_and_expand_argv_dynamic current argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux true current argv speclist anonfun errmsg
let parse_argv_dynamic ?(current=current) argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux false current (ref argv) speclist anonfun
errmsg
let parse_argv ?(current=current) argv speclist anonfun errmsg =
parse_argv_dynamic ~current:current argv (ref speclist) anonfun errmsg
let parse l f msg =
try
parse_argv Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_dynamic l f msg =
try
parse_argv_dynamic Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_expand l f msg =
try
let argv = ref Sys.argv in
let spec = ref l in
let current = ref (!current) in
parse_and_expand_argv_dynamic current argv spec f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let second_word s =
let len = String.length s in
let rec loop n =
if n >= len then len
else if s.[n] = ' ' then loop (n+1)
else n
in
match String.index s '\t' with
| n -> loop (n+1)
| exception Not_found ->
begin match String.index s ' ' with
| n -> loop (n+1)
| exception Not_found -> len
end
let max_arg_len cur (kwd, spec, doc) =
match spec with
| Symbol _ -> Int.max cur (String.length kwd)
| _ -> Int.max cur (String.length kwd + second_word doc)
let replace_leading_tab s =
let seen = ref false in
String.map (function '\t' when not !seen -> seen := true; ' ' | c -> c) s
let add_padding len ksd =
match ksd with
| (_, _, "") ->
(* Do not pad undocumented options, so that they still don't show up when
* run through [usage] or [parse]. *)
ksd
| (kwd, (Symbol _ as spec), msg) ->
let cutcol = second_word msg in
let spaces = String.make ((Int.max 0 (len - cutcol)) + 3) ' ' in
(kwd, spec, "\n" ^ spaces ^ replace_leading_tab msg)
| (kwd, spec, msg) ->
let cutcol = second_word msg in
let kwd_len = String.length kwd in
let diff = len - kwd_len - cutcol in
if diff <= 0 then
(kwd, spec, replace_leading_tab msg)
else
let spaces = String.make diff ' ' in
let prefix = String.sub (replace_leading_tab msg) 0 cutcol in
let suffix = String.sub msg cutcol (String.length msg - cutcol) in
(kwd, spec, prefix ^ spaces ^ suffix)
let align ?(limit=max_int) speclist =
let completed = add_help speclist in
let len = List.fold_left max_arg_len 0 completed in
let len = Int.min len limit in
List.map (add_padding len) completed
let trim_cr s =
let len = String.length s in
if len > 0 && String.get s (len - 1) = '\r' then
String.sub s 0 (len - 1)
else
s
let read_aux trim sep file =
let ic = open_in_bin file in
let buf = Buffer.create 200 in
let words = ref [] in
let stash () =
let word = Buffer.contents buf in
let word = if trim then trim_cr word else word in
words := word :: !words;
Buffer.clear buf
in
begin
try while true do
let c = input_char ic in
if c = sep then stash () else Buffer.add_char buf c
done
with End_of_file -> ()
end;
if Buffer.length buf > 0 then stash ();
close_in ic;
Array.of_list (List.rev !words)
let read_arg = read_aux true '\n'
let read_arg0 = read_aux false '\x00'
let write_aux sep file args =
let oc = open_out_bin file in
Array.iter (fun s -> fprintf oc "%s%c" s sep) args;
close_out oc
let write_arg = write_aux '\n'
let write_arg0 = write_aux '\x00'
| null | https://raw.githubusercontent.com/ocaml-multicore/ocaml-tsan/f54002470cc6ab780963cc81b11a85a820a40819/stdlib/arg.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Call the function with unit argument
Call the function with a bool argument
Set the reference to true
Set the reference to false
Call the function with a string argument
Set the reference to the string argument
Call the function with an int argument
Set the reference to the int argument
Call the function with a float argument
Set the reference to the float argument
Take several arguments according to the
spec list
Stop interpreting keywords and call the
function with each remaining argument
Stop interpreting keywords and call the
function with all remaining arguments.
If the remaining arguments to process
are of the form
[["-foo"; "arg"] @ rest] where "foo"
is registered as [Expand f], then the
arguments [f "arg" @ rest] are
processed. Only allowed in
[parse_and_expand_argv_dynamic].
option, actual, expected
used internally
convert an internal error to a Bad/Help exception
*or* add the program name as a prefix and the usage message as a suffix
to an user-raised Bad exception.
user error message
Do not pad undocumented options, so that they still don't show up when
* run through [usage] or [parse]. | , projet Para , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
type key = string
type doc = string
type usage_msg = string
type anon_fun = (string -> unit)
type spec =
| Symbol of string list * (string -> unit)
Take one of the symbols as argument and
call the function with the symbol .
call the function with the symbol. *)
| Rest_all of (string list -> unit)
exception Bad of string
exception Help of string
type error =
| Unknown of string
| Missing of string
| Message of string
open Printf
let rec assoc3 x l =
match l with
| [] -> raise Not_found
| (y1, y2, _) :: _ when y1 = x -> y2
| _ :: t -> assoc3 x t
let split s =
let i = String.index s '=' in
let len = String.length s in
String.sub s 0 i, String.sub s (i+1) (len-(i+1))
let make_symlist prefix sep suffix l =
match l with
| [] -> "<none>"
| h::t -> (List.fold_left (fun x y -> x ^ sep ^ y) (prefix ^ h) t) ^ suffix
let print_spec buf (key, spec, doc) =
if String.length doc > 0 then
match spec with
| Symbol (l, _) ->
bprintf buf " %s %s%s\n" key (make_symlist "{" "|" "}" l) doc
| _ ->
bprintf buf " %s %s\n" key doc
let help_action () = raise (Stop (Unknown "-help"))
let add_help speclist =
let add1 =
try ignore (assoc3 "-help" speclist); []
with Not_found ->
["-help", Unit help_action, " Display this list of options"]
and add2 =
try ignore (assoc3 "--help" speclist); []
with Not_found ->
["--help", Unit help_action, " Display this list of options"]
in
speclist @ (add1 @ add2)
let usage_b buf speclist errmsg =
bprintf buf "%s\n" errmsg;
List.iter (print_spec buf) (add_help speclist)
let usage_string speclist errmsg =
let b = Buffer.create 200 in
usage_b b speclist errmsg;
Buffer.contents b
let usage speclist errmsg =
eprintf "%s" (usage_string speclist errmsg)
let current = ref 0
let bool_of_string_opt x =
try Some (bool_of_string x)
with Invalid_argument _ -> None
let int_of_string_opt x =
try Some (int_of_string x)
with Failure _ -> None
let float_of_string_opt x =
try Some (float_of_string x)
with Failure _ -> None
let parse_and_expand_argv_dynamic_aux allow_expand current argv speclist anonfun
errmsg =
let initpos = !current in
let convert_error error =
let b = Buffer.create 200 in
let progname =
if initpos < (Array.length !argv) then !argv.(initpos) else "(?)" in
begin match error with
| Unknown "-help" -> ()
| Unknown "--help" -> ()
| Unknown s ->
bprintf b "%s: unknown option '%s'.\n" progname s
| Missing s ->
bprintf b "%s: option '%s' needs an argument.\n" progname s
| Wrong (opt, arg, expected) ->
bprintf b "%s: wrong argument '%s'; option '%s' expects %s.\n"
progname arg opt expected
bprintf b "%s: %s.\n" progname s
end;
usage_b b !speclist errmsg;
if error = Unknown "-help" || error = Unknown "--help"
then Help (Buffer.contents b)
else Bad (Buffer.contents b)
in
incr current;
while !current < (Array.length !argv) do
begin try
let s = !argv.(!current) in
if String.starts_with ~prefix:"-" s then begin
let action, follow =
try assoc3 s !speclist, None
with Not_found ->
try
let keyword, arg = split s in
assoc3 keyword !speclist, Some arg
with Not_found -> raise (Stop (Unknown s))
in
let no_arg () =
match follow with
| None -> ()
| Some arg -> raise (Stop (Wrong (s, arg, "no argument"))) in
let get_arg () =
match follow with
| None ->
if !current + 1 < (Array.length !argv) then !argv.(!current + 1)
else raise (Stop (Missing s))
| Some arg -> arg
in
let consume_arg () =
match follow with
| None -> incr current
| Some _ -> ()
in
let rec treat_action = function
| Unit f -> no_arg (); f ();
| Bool f ->
let arg = get_arg () in
begin match bool_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a boolean")))
| Some s -> f s
end;
consume_arg ();
| Set r -> no_arg (); r := true;
| Clear r -> no_arg (); r := false;
| String f ->
let arg = get_arg () in
f arg;
consume_arg ();
| Symbol (symb, f) ->
let arg = get_arg () in
if List.mem arg symb then begin
f arg;
consume_arg ();
end else begin
raise (Stop (Wrong (s, arg, "one of: "
^ (make_symlist "" " " "" symb))))
end
| Set_string r ->
r := get_arg ();
consume_arg ();
| Int f ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> f x
end;
consume_arg ();
| Set_int r ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> r := x
end;
consume_arg ();
| Float f ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> f x
end;
consume_arg ();
| Set_float r ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> r := x
end;
consume_arg ();
| Tuple specs ->
no_arg ();
List.iter treat_action specs;
| Rest f ->
no_arg ();
while !current < (Array.length !argv) - 1 do
f !argv.(!current + 1);
consume_arg ();
done;
| Rest_all f ->
no_arg ();
let acc = ref [] in
while !current < Array.length !argv - 1 do
acc := !argv.(!current + 1) :: !acc;
consume_arg ();
done;
f (List.rev !acc)
| Expand f ->
if not allow_expand then
raise (Invalid_argument "Arg.Expand is is only allowed with \
Arg.parse_and_expand_argv_dynamic");
let arg = get_arg () in
let newarg = f arg in
consume_arg ();
let before = Array.sub !argv 0 (!current + 1)
and after =
Array.sub !argv (!current + 1)
((Array.length !argv) - !current - 1) in
argv:= Array.concat [before;newarg;after];
in
treat_action action end
else anonfun s
with | Bad m -> raise (convert_error (Message m));
| Stop e -> raise (convert_error e);
end;
incr current
done
let parse_and_expand_argv_dynamic current argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux true current argv speclist anonfun errmsg
let parse_argv_dynamic ?(current=current) argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux false current (ref argv) speclist anonfun
errmsg
let parse_argv ?(current=current) argv speclist anonfun errmsg =
parse_argv_dynamic ~current:current argv (ref speclist) anonfun errmsg
let parse l f msg =
try
parse_argv Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_dynamic l f msg =
try
parse_argv_dynamic Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_expand l f msg =
try
let argv = ref Sys.argv in
let spec = ref l in
let current = ref (!current) in
parse_and_expand_argv_dynamic current argv spec f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let second_word s =
let len = String.length s in
let rec loop n =
if n >= len then len
else if s.[n] = ' ' then loop (n+1)
else n
in
match String.index s '\t' with
| n -> loop (n+1)
| exception Not_found ->
begin match String.index s ' ' with
| n -> loop (n+1)
| exception Not_found -> len
end
let max_arg_len cur (kwd, spec, doc) =
match spec with
| Symbol _ -> Int.max cur (String.length kwd)
| _ -> Int.max cur (String.length kwd + second_word doc)
let replace_leading_tab s =
let seen = ref false in
String.map (function '\t' when not !seen -> seen := true; ' ' | c -> c) s
let add_padding len ksd =
match ksd with
| (_, _, "") ->
ksd
| (kwd, (Symbol _ as spec), msg) ->
let cutcol = second_word msg in
let spaces = String.make ((Int.max 0 (len - cutcol)) + 3) ' ' in
(kwd, spec, "\n" ^ spaces ^ replace_leading_tab msg)
| (kwd, spec, msg) ->
let cutcol = second_word msg in
let kwd_len = String.length kwd in
let diff = len - kwd_len - cutcol in
if diff <= 0 then
(kwd, spec, replace_leading_tab msg)
else
let spaces = String.make diff ' ' in
let prefix = String.sub (replace_leading_tab msg) 0 cutcol in
let suffix = String.sub msg cutcol (String.length msg - cutcol) in
(kwd, spec, prefix ^ spaces ^ suffix)
let align ?(limit=max_int) speclist =
let completed = add_help speclist in
let len = List.fold_left max_arg_len 0 completed in
let len = Int.min len limit in
List.map (add_padding len) completed
let trim_cr s =
let len = String.length s in
if len > 0 && String.get s (len - 1) = '\r' then
String.sub s 0 (len - 1)
else
s
let read_aux trim sep file =
let ic = open_in_bin file in
let buf = Buffer.create 200 in
let words = ref [] in
let stash () =
let word = Buffer.contents buf in
let word = if trim then trim_cr word else word in
words := word :: !words;
Buffer.clear buf
in
begin
try while true do
let c = input_char ic in
if c = sep then stash () else Buffer.add_char buf c
done
with End_of_file -> ()
end;
if Buffer.length buf > 0 then stash ();
close_in ic;
Array.of_list (List.rev !words)
let read_arg = read_aux true '\n'
let read_arg0 = read_aux false '\x00'
let write_aux sep file args =
let oc = open_out_bin file in
Array.iter (fun s -> fprintf oc "%s%c" s sep) args;
close_out oc
let write_arg = write_aux '\n'
let write_arg0 = write_aux '\x00'
|
51a717f27e2d91c0f2fc022c192908d50f247276923538ed606b5876003537ca | ashinn/chibi-scheme | sha2-native.scm | ;; sha2-native.scm -- SHA-2 digest algorithms native interface
Copyright ( c ) 2015 . All rights reserved .
;; BSD-style license:
(define (process-sha-data! context src)
(cond ((or (bytevector? src) (string? src))
(add-sha-data! context src))
((input-port? src)
(let lp ((chunk (read-bytevector 1024 src)))
(unless (eof-object? chunk)
(add-sha-data! context chunk)
(lp (read-bytevector 1024 src)))))
(else
(error "unknown digest source: " src))))
(define (sha-224 src)
(let ((context (start-sha type-sha-224)))
(process-sha-data! context src)
(get-sha context)))
(define (sha-256 src)
(let ((context (start-sha type-sha-256)))
(process-sha-data! context src)
(get-sha context)))
| null | https://raw.githubusercontent.com/ashinn/chibi-scheme/8b27ce97265e5028c61b2386a86a2c43c1cfba0d/lib/chibi/crypto/sha2-native.scm | scheme | sha2-native.scm -- SHA-2 digest algorithms native interface
BSD-style license: | Copyright ( c ) 2015 . All rights reserved .
(define (process-sha-data! context src)
(cond ((or (bytevector? src) (string? src))
(add-sha-data! context src))
((input-port? src)
(let lp ((chunk (read-bytevector 1024 src)))
(unless (eof-object? chunk)
(add-sha-data! context chunk)
(lp (read-bytevector 1024 src)))))
(else
(error "unknown digest source: " src))))
(define (sha-224 src)
(let ((context (start-sha type-sha-224)))
(process-sha-data! context src)
(get-sha context)))
(define (sha-256 src)
(let ((context (start-sha type-sha-256)))
(process-sha-data! context src)
(get-sha context)))
|
1f1ef9ebbe754c8dd4d1bdcf0a726d1155aad311736ffdca664345b1c105361e | haskell-compat/base-compat | Batteries.hs | {-# LANGUAGE PackageImports #-}
# OPTIONS_GHC -fno - warn - dodgy - exports -fno - warn - unused - imports #
-- | Reexports "Data.Typeable.Compat"
-- from a globally unique namespace.
module Data.Typeable.Compat.Repl.Batteries (
module Data.Typeable.Compat
) where
import "this" Data.Typeable.Compat
| null | https://raw.githubusercontent.com/haskell-compat/base-compat/e18c4664d784542505966a7610bbac43652afda6/base-compat-batteries/src/Data/Typeable/Compat/Repl/Batteries.hs | haskell | # LANGUAGE PackageImports #
| Reexports "Data.Typeable.Compat"
from a globally unique namespace. | # OPTIONS_GHC -fno - warn - dodgy - exports -fno - warn - unused - imports #
module Data.Typeable.Compat.Repl.Batteries (
module Data.Typeable.Compat
) where
import "this" Data.Typeable.Compat
|
bd47ca787eb6867d6ae86c451aee9c57fa6ab41b87b985c7a9b9db0bce9d77da | pookleblinky/lifescripts | attention.rkt | #lang racket
tiny proof of concept . What I want now is to simply choose , at random , one
;issue/news event to focus on at a time.
(require "../machinery/rng.rkt")
(provide boost-topic)
;; Now the hard part: seeding the topics list
(define topics
(list
'foo
'bar
'baz))
(define (boost-topic)
(define topic (randomchoice topics))
(printf "Today's topic to boost: ~a~n" topic))
| null | https://raw.githubusercontent.com/pookleblinky/lifescripts/eab3fe5aaf2c9f5ee9baaa441cb5d556cd7a3a78/social/attention.rkt | racket | issue/news event to focus on at a time.
Now the hard part: seeding the topics list | #lang racket
tiny proof of concept . What I want now is to simply choose , at random , one
(require "../machinery/rng.rkt")
(provide boost-topic)
(define topics
(list
'foo
'bar
'baz))
(define (boost-topic)
(define topic (randomchoice topics))
(printf "Today's topic to boost: ~a~n" topic))
|
552fdc343d2619dac47cd804f5ac096786b04224e2a1acd573deedc2b64d344c | let-def/ocaml-recovery-parser | ast_helper.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, LexiFi
(* *)
Copyright 2012 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* Helpers to produce Parsetree fragments
{ b Warning } This module is unstable and part of
{ { ! Compiler_libs}compiler - libs } .
{b Warning} This module is unstable and part of
{{!Compiler_libs}compiler-libs}.
*)
open Asttypes
open Docstrings
open Parsetree
type let_binding =
{ lb_pattern: pattern;
lb_expression: expression;
lb_attributes: attributes;
lb_docs: docs Lazy.t;
lb_text: text Lazy.t;
lb_loc: Location.t; }
type let_bindings =
{ lbs_bindings: let_binding list;
lbs_rec: rec_flag;
lbs_extension: string Asttypes.loc option;
lbs_loc: Location.t }
type 'a with_loc = 'a Location.loc
type loc = Location.t
type lid = Longident.t with_loc
type str = string with_loc
type str_opt = string option with_loc
type attrs = attribute list
* { 1 Default locations }
val default_loc: loc ref
(** Default value for all optional location arguments. *)
with_default_loc : loc - > ( unit - > ' a ) - > ' a
(** Set the [default_loc] within the scope of the execution
of the provided function. *)
(** {1 Constants} *)
module Const : sig
val char : char -> constant
val string : ?quotation_delimiter:string -> string -> constant
val integer : ?suffix:char -> string -> constant
val int : ?suffix:char -> int -> constant
val int32 : ?suffix:char -> int32 -> constant
val int64 : ?suffix:char -> int64 -> constant
val nativeint : ?suffix:char -> nativeint -> constant
val float : ?suffix:char -> string -> constant
end
(** {1 Attributes} *)
module Attr : sig
val mk: ?loc:loc -> str -> payload -> attribute
end
* { 1 Core language }
(** Type expressions *)
module Typ :
sig
val mk: ?loc:loc -> ?attrs:attrs -> core_type_desc -> core_type
val attr: core_type -> attribute -> core_type
val any: ?loc:loc -> ?attrs:attrs -> unit -> core_type
val var: ?loc:loc -> ?attrs:attrs -> string -> core_type
val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type -> core_type
-> core_type
val tuple: ?loc:loc -> ?attrs:attrs -> core_type list -> core_type
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type
val object_: ?loc:loc -> ?attrs:attrs -> object_field list
-> closed_flag -> core_type
val class_: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type
val alias: ?loc:loc -> ?attrs:attrs -> core_type -> string -> core_type
val variant: ?loc:loc -> ?attrs:attrs -> row_field list -> closed_flag
-> label list option -> core_type
val poly: ?loc:loc -> ?attrs:attrs -> str list -> core_type -> core_type
val package: ?loc:loc -> ?attrs:attrs -> lid -> (lid * core_type) list
-> core_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> core_type
val force_poly: core_type -> core_type
val varify_constructors: str list -> core_type -> core_type
* [ varify_constructors newtypes te ] is type expression [ te ] , of which
any of nullary type constructor [ tc ] is replaced by type variable of
the same name , if [ tc ] 's name appears in [ newtypes ] .
Raise [ Syntaxerr . Variable_in_scope ] if any type variable inside [ te ]
appears in [ newtypes ] .
@since 4.05
any of nullary type constructor [tc] is replaced by type variable of
the same name, if [tc]'s name appears in [newtypes].
Raise [Syntaxerr.Variable_in_scope] if any type variable inside [te]
appears in [newtypes].
@since 4.05
*)
end
(** Patterns *)
module Pat:
sig
val mk: ?loc:loc -> ?attrs:attrs -> pattern_desc -> pattern
val attr:pattern -> attribute -> pattern
val any: ?loc:loc -> ?attrs:attrs -> unit -> pattern
val var: ?loc:loc -> ?attrs:attrs -> str -> pattern
val alias: ?loc:loc -> ?attrs:attrs -> pattern -> str -> pattern
val constant: ?loc:loc -> ?attrs:attrs -> constant -> pattern
val interval: ?loc:loc -> ?attrs:attrs -> constant -> constant -> pattern
val tuple: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern
val construct: ?loc:loc -> ?attrs:attrs -> lid -> pattern option -> pattern
val variant: ?loc:loc -> ?attrs:attrs -> label -> pattern option -> pattern
val record: ?loc:loc -> ?attrs:attrs -> (lid * pattern) list -> closed_flag
-> pattern
val array: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern
val or_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern -> pattern
val constraint_: ?loc:loc -> ?attrs:attrs -> pattern -> core_type -> pattern
val type_: ?loc:loc -> ?attrs:attrs -> lid -> pattern
val lazy_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern
val unpack: ?loc:loc -> ?attrs:attrs -> str_opt -> pattern
val open_: ?loc:loc -> ?attrs:attrs -> lid -> pattern -> pattern
val exception_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern
val extension: ?loc:loc -> ?attrs:attrs -> extension -> pattern
end
(** Expressions *)
module Exp:
sig
val mk: ?loc:loc -> ?attrs:attrs -> expression_desc -> expression
val attr: expression -> attribute -> expression
val ident: ?loc:loc -> ?attrs:attrs -> lid -> expression
val constant: ?loc:loc -> ?attrs:attrs -> constant -> expression
val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list
-> expression -> expression
val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option
-> pattern -> expression -> expression
val function_: ?loc:loc -> ?attrs:attrs -> case list -> expression
val apply: ?loc:loc -> ?attrs:attrs -> expression
-> (arg_label * expression) list -> expression
val match_: ?loc:loc -> ?attrs:attrs -> expression -> case list
-> expression
val try_: ?loc:loc -> ?attrs:attrs -> expression -> case list -> expression
val tuple: ?loc:loc -> ?attrs:attrs -> expression list -> expression
val construct: ?loc:loc -> ?attrs:attrs -> lid -> expression option
-> expression
val variant: ?loc:loc -> ?attrs:attrs -> label -> expression option
-> expression
val record: ?loc:loc -> ?attrs:attrs -> (lid * expression) list
-> expression option -> expression
val field: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression
val setfield: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression
-> expression
val array: ?loc:loc -> ?attrs:attrs -> expression list -> expression
val ifthenelse: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression option -> expression
val sequence: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression
val while_: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression
val for_: ?loc:loc -> ?attrs:attrs -> pattern -> expression -> expression
-> direction_flag -> expression -> expression
val coerce: ?loc:loc -> ?attrs:attrs -> expression -> core_type option
-> core_type -> expression
val constraint_: ?loc:loc -> ?attrs:attrs -> expression -> core_type
-> expression
val send: ?loc:loc -> ?attrs:attrs -> expression -> str -> expression
val new_: ?loc:loc -> ?attrs:attrs -> lid -> expression
val setinstvar: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression
val override: ?loc:loc -> ?attrs:attrs -> (str * expression) list
-> expression
val letmodule: ?loc:loc -> ?attrs:attrs -> str_opt -> module_expr
-> expression -> expression
val letexception:
?loc:loc -> ?attrs:attrs -> extension_constructor -> expression
-> expression
val assert_: ?loc:loc -> ?attrs:attrs -> expression -> expression
val lazy_: ?loc:loc -> ?attrs:attrs -> expression -> expression
val poly: ?loc:loc -> ?attrs:attrs -> expression -> core_type option
-> expression
val object_: ?loc:loc -> ?attrs:attrs -> class_structure -> expression
val newtype: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression
val pack: ?loc:loc -> ?attrs:attrs -> module_expr -> expression
val open_: ?loc:loc -> ?attrs:attrs -> open_declaration -> expression
-> expression
val letop: ?loc:loc -> ?attrs:attrs -> binding_op
-> binding_op list -> expression -> expression
val extension: ?loc:loc -> ?attrs:attrs -> extension -> expression
val unreachable: ?loc:loc -> ?attrs:attrs -> unit -> expression
val case: pattern -> ?guard:expression -> expression -> case
val binding_op: str -> pattern -> expression -> loc -> binding_op
end
(** Value declarations *)
module Val:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
?prim:string list -> str -> core_type -> value_description
end
(** Type declarations *)
module Type:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?params:(core_type * variance) list ->
?cstrs:(core_type * core_type * loc) list ->
?kind:type_kind -> ?priv:private_flag -> ?manifest:core_type -> str ->
type_declaration
val constructor: ?loc:loc -> ?attrs:attrs -> ?info:info ->
?args:constructor_arguments -> ?res:core_type -> str ->
constructor_declaration
val field: ?loc:loc -> ?attrs:attrs -> ?info:info ->
?mut:mutable_flag -> str -> core_type -> label_declaration
end
(** Type extensions *)
module Te:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
?params:(core_type * variance) list -> ?priv:private_flag ->
lid -> extension_constructor list -> type_extension
val mk_exception: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
extension_constructor -> type_exception
val constructor: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
str -> extension_constructor_kind -> extension_constructor
val decl: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
?args:constructor_arguments -> ?res:core_type -> str ->
extension_constructor
val rebind: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
str -> lid -> extension_constructor
end
(** {1 Module language} *)
(** Module type expressions *)
module Mty:
sig
val mk: ?loc:loc -> ?attrs:attrs -> module_type_desc -> module_type
val attr: module_type -> attribute -> module_type
val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_type
val alias: ?loc:loc -> ?attrs:attrs -> lid -> module_type
val signature: ?loc:loc -> ?attrs:attrs -> signature -> module_type
val functor_: ?loc:loc -> ?attrs:attrs ->
functor_parameter -> module_type -> module_type
val with_: ?loc:loc -> ?attrs:attrs -> module_type ->
with_constraint list -> module_type
val typeof_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_type
end
(** Module expressions *)
module Mod:
sig
val mk: ?loc:loc -> ?attrs:attrs -> module_expr_desc -> module_expr
val attr: module_expr -> attribute -> module_expr
val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_expr
val structure: ?loc:loc -> ?attrs:attrs -> structure -> module_expr
val functor_: ?loc:loc -> ?attrs:attrs ->
functor_parameter -> module_expr -> module_expr
val apply: ?loc:loc -> ?attrs:attrs -> module_expr -> module_expr ->
module_expr
val constraint_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type ->
module_expr
val unpack: ?loc:loc -> ?attrs:attrs -> expression -> module_expr
val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_expr
end
(** Signature items *)
module Sig:
sig
val mk: ?loc:loc -> signature_item_desc -> signature_item
val value: ?loc:loc -> value_description -> signature_item
val type_: ?loc:loc -> rec_flag -> type_declaration list -> signature_item
val type_subst: ?loc:loc -> type_declaration list -> signature_item
val type_extension: ?loc:loc -> type_extension -> signature_item
val exception_: ?loc:loc -> type_exception -> signature_item
val module_: ?loc:loc -> module_declaration -> signature_item
val mod_subst: ?loc:loc -> module_substitution -> signature_item
val rec_module: ?loc:loc -> module_declaration list -> signature_item
val modtype: ?loc:loc -> module_type_declaration -> signature_item
val open_: ?loc:loc -> open_description -> signature_item
val include_: ?loc:loc -> include_description -> signature_item
val class_: ?loc:loc -> class_description list -> signature_item
val class_type: ?loc:loc -> class_type_declaration list -> signature_item
val extension: ?loc:loc -> ?attrs:attrs -> extension -> signature_item
val attribute: ?loc:loc -> attribute -> signature_item
val text: text -> signature_item list
end
(** Structure items *)
module Str:
sig
val mk: ?loc:loc -> structure_item_desc -> structure_item
val eval: ?loc:loc -> ?attrs:attributes -> expression -> structure_item
val value: ?loc:loc -> rec_flag -> value_binding list -> structure_item
val primitive: ?loc:loc -> value_description -> structure_item
val type_: ?loc:loc -> rec_flag -> type_declaration list -> structure_item
val type_extension: ?loc:loc -> type_extension -> structure_item
val exception_: ?loc:loc -> type_exception -> structure_item
val module_: ?loc:loc -> module_binding -> structure_item
val rec_module: ?loc:loc -> module_binding list -> structure_item
val modtype: ?loc:loc -> module_type_declaration -> structure_item
val open_: ?loc:loc -> open_declaration -> structure_item
val class_: ?loc:loc -> class_declaration list -> structure_item
val class_type: ?loc:loc -> class_type_declaration list -> structure_item
val include_: ?loc:loc -> include_declaration -> structure_item
val extension: ?loc:loc -> ?attrs:attrs -> extension -> structure_item
val attribute: ?loc:loc -> attribute -> structure_item
val text: text -> structure_item list
end
(** Module declarations *)
module Md:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str_opt -> module_type -> module_declaration
end
(** Module substitutions *)
module Ms:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str -> lid -> module_substitution
end
(** Module type declarations *)
module Mtd:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?typ:module_type -> str -> module_type_declaration
end
(** Module bindings *)
module Mb:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str_opt -> module_expr -> module_binding
end
(** Opens *)
module Opn:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs ->
?override:override_flag -> 'a -> 'a open_infos
end
(** Includes *)
module Incl:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> 'a -> 'a include_infos
end
(** Value bindings *)
module Vb:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
pattern -> expression -> value_binding
end
(** {1 Class language} *)
(** Class type expressions *)
module Cty:
sig
val mk: ?loc:loc -> ?attrs:attrs -> class_type_desc -> class_type
val attr: class_type -> attribute -> class_type
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_type
val signature: ?loc:loc -> ?attrs:attrs -> class_signature -> class_type
val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type ->
class_type -> class_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type
val open_: ?loc:loc -> ?attrs:attrs -> open_description -> class_type
-> class_type
end
(** Class type fields *)
module Ctf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
class_type_field_desc -> class_type_field
val attr: class_type_field -> attribute -> class_type_field
val inherit_: ?loc:loc -> ?attrs:attrs -> class_type -> class_type_field
val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag ->
virtual_flag -> core_type -> class_type_field
val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag ->
virtual_flag -> core_type -> class_type_field
val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type ->
class_type_field
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type_field
val attribute: ?loc:loc -> attribute -> class_type_field
val text: text -> class_type_field list
end
(** Class expressions *)
module Cl:
sig
val mk: ?loc:loc -> ?attrs:attrs -> class_expr_desc -> class_expr
val attr: class_expr -> attribute -> class_expr
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_expr
val structure: ?loc:loc -> ?attrs:attrs -> class_structure -> class_expr
val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option ->
pattern -> class_expr -> class_expr
val apply: ?loc:loc -> ?attrs:attrs -> class_expr ->
(arg_label * expression) list -> class_expr
val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list ->
class_expr -> class_expr
val constraint_: ?loc:loc -> ?attrs:attrs -> class_expr -> class_type ->
class_expr
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_expr
val open_: ?loc:loc -> ?attrs:attrs -> open_description -> class_expr
-> class_expr
end
(** Class fields *)
module Cf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> class_field_desc ->
class_field
val attr: class_field -> attribute -> class_field
val inherit_: ?loc:loc -> ?attrs:attrs -> override_flag -> class_expr ->
str option -> class_field
val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag ->
class_field_kind -> class_field
val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag ->
class_field_kind -> class_field
val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type ->
class_field
val initializer_: ?loc:loc -> ?attrs:attrs -> expression -> class_field
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_field
val attribute: ?loc:loc -> attribute -> class_field
val text: text -> class_field list
val virtual_: core_type -> class_field_kind
val concrete: override_flag -> expression -> class_field_kind
end
(** Classes *)
module Ci:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?virt:virtual_flag -> ?params:(core_type * variance) list ->
str -> 'a -> 'a class_infos
end
(** Class signatures *)
module Csig:
sig
val mk: core_type -> class_type_field list -> class_signature
end
(** Class structures *)
module Cstr:
sig
val mk: pattern -> class_field list -> class_structure
end
(** Row fields *)
module Rf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> row_field_desc -> row_field
val tag: ?loc:loc -> ?attrs:attrs ->
label with_loc -> bool -> core_type list -> row_field
val inherit_: ?loc:loc -> core_type -> row_field
end
(** Object fields *)
module Of:
sig
val mk: ?loc:loc -> ?attrs:attrs ->
object_field_desc -> object_field
val tag: ?loc:loc -> ?attrs:attrs ->
label with_loc -> core_type -> object_field
val inherit_: ?loc:loc -> core_type -> object_field
end
| null | https://raw.githubusercontent.com/let-def/ocaml-recovery-parser/87a87a25c9436f2e3187b82ec59a5f75c0e2446b/lib/ast_helper.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Default value for all optional location arguments.
* Set the [default_loc] within the scope of the execution
of the provided function.
* {1 Constants}
* {1 Attributes}
* Type expressions
* Patterns
* Expressions
* Value declarations
* Type declarations
* Type extensions
* {1 Module language}
* Module type expressions
* Module expressions
* Signature items
* Structure items
* Module declarations
* Module substitutions
* Module type declarations
* Module bindings
* Opens
* Includes
* Value bindings
* {1 Class language}
* Class type expressions
* Class type fields
* Class expressions
* Class fields
* Classes
* Class signatures
* Class structures
* Row fields
* Object fields | , LexiFi
Copyright 2012 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* Helpers to produce Parsetree fragments
{ b Warning } This module is unstable and part of
{ { ! Compiler_libs}compiler - libs } .
{b Warning} This module is unstable and part of
{{!Compiler_libs}compiler-libs}.
*)
open Asttypes
open Docstrings
open Parsetree
type let_binding =
{ lb_pattern: pattern;
lb_expression: expression;
lb_attributes: attributes;
lb_docs: docs Lazy.t;
lb_text: text Lazy.t;
lb_loc: Location.t; }
type let_bindings =
{ lbs_bindings: let_binding list;
lbs_rec: rec_flag;
lbs_extension: string Asttypes.loc option;
lbs_loc: Location.t }
type 'a with_loc = 'a Location.loc
type loc = Location.t
type lid = Longident.t with_loc
type str = string with_loc
type str_opt = string option with_loc
type attrs = attribute list
* { 1 Default locations }
val default_loc: loc ref
with_default_loc : loc - > ( unit - > ' a ) - > ' a
module Const : sig
val char : char -> constant
val string : ?quotation_delimiter:string -> string -> constant
val integer : ?suffix:char -> string -> constant
val int : ?suffix:char -> int -> constant
val int32 : ?suffix:char -> int32 -> constant
val int64 : ?suffix:char -> int64 -> constant
val nativeint : ?suffix:char -> nativeint -> constant
val float : ?suffix:char -> string -> constant
end
module Attr : sig
val mk: ?loc:loc -> str -> payload -> attribute
end
* { 1 Core language }
module Typ :
sig
val mk: ?loc:loc -> ?attrs:attrs -> core_type_desc -> core_type
val attr: core_type -> attribute -> core_type
val any: ?loc:loc -> ?attrs:attrs -> unit -> core_type
val var: ?loc:loc -> ?attrs:attrs -> string -> core_type
val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type -> core_type
-> core_type
val tuple: ?loc:loc -> ?attrs:attrs -> core_type list -> core_type
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type
val object_: ?loc:loc -> ?attrs:attrs -> object_field list
-> closed_flag -> core_type
val class_: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> core_type
val alias: ?loc:loc -> ?attrs:attrs -> core_type -> string -> core_type
val variant: ?loc:loc -> ?attrs:attrs -> row_field list -> closed_flag
-> label list option -> core_type
val poly: ?loc:loc -> ?attrs:attrs -> str list -> core_type -> core_type
val package: ?loc:loc -> ?attrs:attrs -> lid -> (lid * core_type) list
-> core_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> core_type
val force_poly: core_type -> core_type
val varify_constructors: str list -> core_type -> core_type
* [ varify_constructors newtypes te ] is type expression [ te ] , of which
any of nullary type constructor [ tc ] is replaced by type variable of
the same name , if [ tc ] 's name appears in [ newtypes ] .
Raise [ Syntaxerr . Variable_in_scope ] if any type variable inside [ te ]
appears in [ newtypes ] .
@since 4.05
any of nullary type constructor [tc] is replaced by type variable of
the same name, if [tc]'s name appears in [newtypes].
Raise [Syntaxerr.Variable_in_scope] if any type variable inside [te]
appears in [newtypes].
@since 4.05
*)
end
module Pat:
sig
val mk: ?loc:loc -> ?attrs:attrs -> pattern_desc -> pattern
val attr:pattern -> attribute -> pattern
val any: ?loc:loc -> ?attrs:attrs -> unit -> pattern
val var: ?loc:loc -> ?attrs:attrs -> str -> pattern
val alias: ?loc:loc -> ?attrs:attrs -> pattern -> str -> pattern
val constant: ?loc:loc -> ?attrs:attrs -> constant -> pattern
val interval: ?loc:loc -> ?attrs:attrs -> constant -> constant -> pattern
val tuple: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern
val construct: ?loc:loc -> ?attrs:attrs -> lid -> pattern option -> pattern
val variant: ?loc:loc -> ?attrs:attrs -> label -> pattern option -> pattern
val record: ?loc:loc -> ?attrs:attrs -> (lid * pattern) list -> closed_flag
-> pattern
val array: ?loc:loc -> ?attrs:attrs -> pattern list -> pattern
val or_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern -> pattern
val constraint_: ?loc:loc -> ?attrs:attrs -> pattern -> core_type -> pattern
val type_: ?loc:loc -> ?attrs:attrs -> lid -> pattern
val lazy_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern
val unpack: ?loc:loc -> ?attrs:attrs -> str_opt -> pattern
val open_: ?loc:loc -> ?attrs:attrs -> lid -> pattern -> pattern
val exception_: ?loc:loc -> ?attrs:attrs -> pattern -> pattern
val extension: ?loc:loc -> ?attrs:attrs -> extension -> pattern
end
module Exp:
sig
val mk: ?loc:loc -> ?attrs:attrs -> expression_desc -> expression
val attr: expression -> attribute -> expression
val ident: ?loc:loc -> ?attrs:attrs -> lid -> expression
val constant: ?loc:loc -> ?attrs:attrs -> constant -> expression
val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list
-> expression -> expression
val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option
-> pattern -> expression -> expression
val function_: ?loc:loc -> ?attrs:attrs -> case list -> expression
val apply: ?loc:loc -> ?attrs:attrs -> expression
-> (arg_label * expression) list -> expression
val match_: ?loc:loc -> ?attrs:attrs -> expression -> case list
-> expression
val try_: ?loc:loc -> ?attrs:attrs -> expression -> case list -> expression
val tuple: ?loc:loc -> ?attrs:attrs -> expression list -> expression
val construct: ?loc:loc -> ?attrs:attrs -> lid -> expression option
-> expression
val variant: ?loc:loc -> ?attrs:attrs -> label -> expression option
-> expression
val record: ?loc:loc -> ?attrs:attrs -> (lid * expression) list
-> expression option -> expression
val field: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression
val setfield: ?loc:loc -> ?attrs:attrs -> expression -> lid -> expression
-> expression
val array: ?loc:loc -> ?attrs:attrs -> expression list -> expression
val ifthenelse: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression option -> expression
val sequence: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression
val while_: ?loc:loc -> ?attrs:attrs -> expression -> expression
-> expression
val for_: ?loc:loc -> ?attrs:attrs -> pattern -> expression -> expression
-> direction_flag -> expression -> expression
val coerce: ?loc:loc -> ?attrs:attrs -> expression -> core_type option
-> core_type -> expression
val constraint_: ?loc:loc -> ?attrs:attrs -> expression -> core_type
-> expression
val send: ?loc:loc -> ?attrs:attrs -> expression -> str -> expression
val new_: ?loc:loc -> ?attrs:attrs -> lid -> expression
val setinstvar: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression
val override: ?loc:loc -> ?attrs:attrs -> (str * expression) list
-> expression
val letmodule: ?loc:loc -> ?attrs:attrs -> str_opt -> module_expr
-> expression -> expression
val letexception:
?loc:loc -> ?attrs:attrs -> extension_constructor -> expression
-> expression
val assert_: ?loc:loc -> ?attrs:attrs -> expression -> expression
val lazy_: ?loc:loc -> ?attrs:attrs -> expression -> expression
val poly: ?loc:loc -> ?attrs:attrs -> expression -> core_type option
-> expression
val object_: ?loc:loc -> ?attrs:attrs -> class_structure -> expression
val newtype: ?loc:loc -> ?attrs:attrs -> str -> expression -> expression
val pack: ?loc:loc -> ?attrs:attrs -> module_expr -> expression
val open_: ?loc:loc -> ?attrs:attrs -> open_declaration -> expression
-> expression
val letop: ?loc:loc -> ?attrs:attrs -> binding_op
-> binding_op list -> expression -> expression
val extension: ?loc:loc -> ?attrs:attrs -> extension -> expression
val unreachable: ?loc:loc -> ?attrs:attrs -> unit -> expression
val case: pattern -> ?guard:expression -> expression -> case
val binding_op: str -> pattern -> expression -> loc -> binding_op
end
module Val:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
?prim:string list -> str -> core_type -> value_description
end
module Type:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?params:(core_type * variance) list ->
?cstrs:(core_type * core_type * loc) list ->
?kind:type_kind -> ?priv:private_flag -> ?manifest:core_type -> str ->
type_declaration
val constructor: ?loc:loc -> ?attrs:attrs -> ?info:info ->
?args:constructor_arguments -> ?res:core_type -> str ->
constructor_declaration
val field: ?loc:loc -> ?attrs:attrs -> ?info:info ->
?mut:mutable_flag -> str -> core_type -> label_declaration
end
module Te:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
?params:(core_type * variance) list -> ?priv:private_flag ->
lid -> extension_constructor list -> type_extension
val mk_exception: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
extension_constructor -> type_exception
val constructor: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
str -> extension_constructor_kind -> extension_constructor
val decl: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
?args:constructor_arguments -> ?res:core_type -> str ->
extension_constructor
val rebind: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?info:info ->
str -> lid -> extension_constructor
end
module Mty:
sig
val mk: ?loc:loc -> ?attrs:attrs -> module_type_desc -> module_type
val attr: module_type -> attribute -> module_type
val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_type
val alias: ?loc:loc -> ?attrs:attrs -> lid -> module_type
val signature: ?loc:loc -> ?attrs:attrs -> signature -> module_type
val functor_: ?loc:loc -> ?attrs:attrs ->
functor_parameter -> module_type -> module_type
val with_: ?loc:loc -> ?attrs:attrs -> module_type ->
with_constraint list -> module_type
val typeof_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_type
end
module Mod:
sig
val mk: ?loc:loc -> ?attrs:attrs -> module_expr_desc -> module_expr
val attr: module_expr -> attribute -> module_expr
val ident: ?loc:loc -> ?attrs:attrs -> lid -> module_expr
val structure: ?loc:loc -> ?attrs:attrs -> structure -> module_expr
val functor_: ?loc:loc -> ?attrs:attrs ->
functor_parameter -> module_expr -> module_expr
val apply: ?loc:loc -> ?attrs:attrs -> module_expr -> module_expr ->
module_expr
val constraint_: ?loc:loc -> ?attrs:attrs -> module_expr -> module_type ->
module_expr
val unpack: ?loc:loc -> ?attrs:attrs -> expression -> module_expr
val extension: ?loc:loc -> ?attrs:attrs -> extension -> module_expr
end
module Sig:
sig
val mk: ?loc:loc -> signature_item_desc -> signature_item
val value: ?loc:loc -> value_description -> signature_item
val type_: ?loc:loc -> rec_flag -> type_declaration list -> signature_item
val type_subst: ?loc:loc -> type_declaration list -> signature_item
val type_extension: ?loc:loc -> type_extension -> signature_item
val exception_: ?loc:loc -> type_exception -> signature_item
val module_: ?loc:loc -> module_declaration -> signature_item
val mod_subst: ?loc:loc -> module_substitution -> signature_item
val rec_module: ?loc:loc -> module_declaration list -> signature_item
val modtype: ?loc:loc -> module_type_declaration -> signature_item
val open_: ?loc:loc -> open_description -> signature_item
val include_: ?loc:loc -> include_description -> signature_item
val class_: ?loc:loc -> class_description list -> signature_item
val class_type: ?loc:loc -> class_type_declaration list -> signature_item
val extension: ?loc:loc -> ?attrs:attrs -> extension -> signature_item
val attribute: ?loc:loc -> attribute -> signature_item
val text: text -> signature_item list
end
module Str:
sig
val mk: ?loc:loc -> structure_item_desc -> structure_item
val eval: ?loc:loc -> ?attrs:attributes -> expression -> structure_item
val value: ?loc:loc -> rec_flag -> value_binding list -> structure_item
val primitive: ?loc:loc -> value_description -> structure_item
val type_: ?loc:loc -> rec_flag -> type_declaration list -> structure_item
val type_extension: ?loc:loc -> type_extension -> structure_item
val exception_: ?loc:loc -> type_exception -> structure_item
val module_: ?loc:loc -> module_binding -> structure_item
val rec_module: ?loc:loc -> module_binding list -> structure_item
val modtype: ?loc:loc -> module_type_declaration -> structure_item
val open_: ?loc:loc -> open_declaration -> structure_item
val class_: ?loc:loc -> class_declaration list -> structure_item
val class_type: ?loc:loc -> class_type_declaration list -> structure_item
val include_: ?loc:loc -> include_declaration -> structure_item
val extension: ?loc:loc -> ?attrs:attrs -> extension -> structure_item
val attribute: ?loc:loc -> attribute -> structure_item
val text: text -> structure_item list
end
module Md:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str_opt -> module_type -> module_declaration
end
module Ms:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str -> lid -> module_substitution
end
module Mtd:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?typ:module_type -> str -> module_type_declaration
end
module Mb:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
str_opt -> module_expr -> module_binding
end
module Opn:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs ->
?override:override_flag -> 'a -> 'a open_infos
end
module Incl:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> 'a -> 'a include_infos
end
module Vb:
sig
val mk: ?loc: loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
pattern -> expression -> value_binding
end
module Cty:
sig
val mk: ?loc:loc -> ?attrs:attrs -> class_type_desc -> class_type
val attr: class_type -> attribute -> class_type
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_type
val signature: ?loc:loc -> ?attrs:attrs -> class_signature -> class_type
val arrow: ?loc:loc -> ?attrs:attrs -> arg_label -> core_type ->
class_type -> class_type
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type
val open_: ?loc:loc -> ?attrs:attrs -> open_description -> class_type
-> class_type
end
module Ctf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs ->
class_type_field_desc -> class_type_field
val attr: class_type_field -> attribute -> class_type_field
val inherit_: ?loc:loc -> ?attrs:attrs -> class_type -> class_type_field
val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag ->
virtual_flag -> core_type -> class_type_field
val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag ->
virtual_flag -> core_type -> class_type_field
val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type ->
class_type_field
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_type_field
val attribute: ?loc:loc -> attribute -> class_type_field
val text: text -> class_type_field list
end
module Cl:
sig
val mk: ?loc:loc -> ?attrs:attrs -> class_expr_desc -> class_expr
val attr: class_expr -> attribute -> class_expr
val constr: ?loc:loc -> ?attrs:attrs -> lid -> core_type list -> class_expr
val structure: ?loc:loc -> ?attrs:attrs -> class_structure -> class_expr
val fun_: ?loc:loc -> ?attrs:attrs -> arg_label -> expression option ->
pattern -> class_expr -> class_expr
val apply: ?loc:loc -> ?attrs:attrs -> class_expr ->
(arg_label * expression) list -> class_expr
val let_: ?loc:loc -> ?attrs:attrs -> rec_flag -> value_binding list ->
class_expr -> class_expr
val constraint_: ?loc:loc -> ?attrs:attrs -> class_expr -> class_type ->
class_expr
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_expr
val open_: ?loc:loc -> ?attrs:attrs -> open_description -> class_expr
-> class_expr
end
module Cf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> class_field_desc ->
class_field
val attr: class_field -> attribute -> class_field
val inherit_: ?loc:loc -> ?attrs:attrs -> override_flag -> class_expr ->
str option -> class_field
val val_: ?loc:loc -> ?attrs:attrs -> str -> mutable_flag ->
class_field_kind -> class_field
val method_: ?loc:loc -> ?attrs:attrs -> str -> private_flag ->
class_field_kind -> class_field
val constraint_: ?loc:loc -> ?attrs:attrs -> core_type -> core_type ->
class_field
val initializer_: ?loc:loc -> ?attrs:attrs -> expression -> class_field
val extension: ?loc:loc -> ?attrs:attrs -> extension -> class_field
val attribute: ?loc:loc -> attribute -> class_field
val text: text -> class_field list
val virtual_: core_type -> class_field_kind
val concrete: override_flag -> expression -> class_field_kind
end
module Ci:
sig
val mk: ?loc:loc -> ?attrs:attrs -> ?docs:docs -> ?text:text ->
?virt:virtual_flag -> ?params:(core_type * variance) list ->
str -> 'a -> 'a class_infos
end
module Csig:
sig
val mk: core_type -> class_type_field list -> class_signature
end
module Cstr:
sig
val mk: pattern -> class_field list -> class_structure
end
module Rf:
sig
val mk: ?loc:loc -> ?attrs:attrs -> row_field_desc -> row_field
val tag: ?loc:loc -> ?attrs:attrs ->
label with_loc -> bool -> core_type list -> row_field
val inherit_: ?loc:loc -> core_type -> row_field
end
module Of:
sig
val mk: ?loc:loc -> ?attrs:attrs ->
object_field_desc -> object_field
val tag: ?loc:loc -> ?attrs:attrs ->
label with_loc -> core_type -> object_field
val inherit_: ?loc:loc -> core_type -> object_field
end
|
3ce196aa240413b1d2fe6e953b42ef36e5d5abd7b39d0f9cd65091dd6cc467b6 | brandonbloom/wabt-clj | util.clj | (ns wabt-clj.util)
(defn fail
([msg] (fail msg {}))
([msg data]
(throw (ex-info msg data))))
(defmacro change! [var f & args]
`(set! ~var (~f ~var ~@args)))
(defn tap [x f]
(f x)
x)
| null | https://raw.githubusercontent.com/brandonbloom/wabt-clj/45b80fb05fc49d52ab117a699e9c56582a7078b3/src/wabt_clj/util.clj | clojure | (ns wabt-clj.util)
(defn fail
([msg] (fail msg {}))
([msg data]
(throw (ex-info msg data))))
(defmacro change! [var f & args]
`(set! ~var (~f ~var ~@args)))
(defn tap [x f]
(f x)
x)
| |
f1b8b514430ccc0b00749459138c336727178cfce43edfce78a9921058b841db | cirfi/sicp-my-solutions | 1.07.scm | (define (sqrt-iter guess x)
(if (good-enough? guess x)
guess
(sqrt-iter (improve guess x)
x)))
(define (improve guess x)
(average guess (/ x guess)))
(define (average x y)
(/ (+ x y) 2))
(define (sqrt x)
(sqrt-iter 1.0 x))
;;; new good-enough?
(define (good-enough? guess x)
(< (/ (abs (- (improve guess x)
guess))
guess)
0.001))
| null | https://raw.githubusercontent.com/cirfi/sicp-my-solutions/4b6cc17391aa2c8c033b42b076a663b23aa022de/ch1/1.07.scm | scheme | new good-enough? | (define (sqrt-iter guess x)
(if (good-enough? guess x)
guess
(sqrt-iter (improve guess x)
x)))
(define (improve guess x)
(average guess (/ x guess)))
(define (average x y)
(/ (+ x y) 2))
(define (sqrt x)
(sqrt-iter 1.0 x))
(define (good-enough? guess x)
(< (/ (abs (- (improve guess x)
guess))
guess)
0.001))
|
c13691f9c0f3fdd1ed57b45a505446f327f5d589ad48f9d7478cc6b6e0f704ba | symbiont-io/detsys-testkit | Executor.hs | # LANGUAGE DeriveGeneric #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE NumericUnderscores #
module Scheduler.Executor where
import Control.Applicative ((<|>))
import Data.Aeson
import Data.Aeson.Types
import Data.Aeson.Parser
import Data.ByteString.Lazy.Char8 (ByteString)
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Char (toLower)
import Data.Ratio ((%))
import qualified Data.Time as Time
import GHC.Generics (Generic)
import Scheduler.Event
import StuntDouble hiding (Value)
------------------------------------------------------------------------
data ExecutorResponse = ExecutorResponse
{ events :: [UnscheduledEvent]
, corrId :: CorrelationId
}
deriving (Generic, Show)
instance FromJSON ExecutorResponse
data UnscheduledEvent = UEMessage
{ ueEvent :: String
, ueArgs :: Value
, ueTo :: [String]
, ueFrom :: String
} |
UEOk
{ ueEvent :: String
, ueArgs :: Value
, ueTo :: [String]
, ueFrom :: String
} |
UETimer
{ ueArgs :: Value
, ueFrom :: String
, ueDuration_ns :: Int
} |
UEDelayed
{ ueInner :: UnscheduledEvent
, ueDuration :: Time.NominalDiffTime
}
deriving (Generic, Eq, Ord, Show)
instance FromJSON UnscheduledEvent where
parseJSON = genericParseJSON defaultOptions
{ fieldLabelModifier = \s -> kebabify $ case drop (length ("ue" :: String)) s of
(x : xs) -> toLower x : xs
[] -> error "parseJSON: impossible, unless the field names of `UnscheduledEvent` changed"
, sumEncoding = defaultTaggedObject
{ tagFieldName = "kind"}
, constructorTagModifier = \s -> case drop (length ("UE" :: String)) s of
(x : xs) -> toLower x : xs
[] -> error "parseJSON: impossible, unless the constructor names of `UnscheduledEvent` changed"
}
where
kebabify = map (\x -> if x == '_' then '-' else x)
isOk :: UnscheduledEvent -> Bool
isOk UEOk {} = True
isOk _otherwise = False
fromUE :: UnscheduledEvent -> String
fromUE (UEDelayed inner _) = fromUE inner
fromUE ue = ueFrom ue
toScheduled :: Time -> UnscheduledEvent -> [SchedulerEvent]
toScheduled at (UEMessage event args tos from)
= [ SchedulerEvent "message" event args to from at Nothing | to <- tos]
toScheduled at (UEOk event args tos from)
= [ SchedulerEvent "ok" event args to from at Nothing | to <- tos]
toScheduled at (UETimer args from duration)
= [ SchedulerEvent "timer" "timer" args from from at' Nothing]
where
at' = addTime at duration'
duration' = fromRational $ fromIntegral duration % 1_000_000_000
toScheduled at (UEDelayed inner duration) = toScheduled at' inner
where
at' = addTime at duration
executorCodec :: Codec
executorCodec = Codec enc dec
where
enc :: Message -> ByteString
enc (InternalMessage t v) = encode (object ["kind" .= t, "message" .= v])
enc msg = encode (genericToJSON defaultOptions msg)
-- XXX: Ideally we want to use:
-- -1.5.5.1/docs/Data-Aeson-Parser.html#v:eitherDecodeWith
-- which gives an either, but as far as I can see there's no way to create a
-- type `(Value -> IResult a)` without `iparse`
( -1.5.5.1/docs/src/Data.Aeson.Types.Internal.html#iparse )
-- which is in a hidden module...
dec :: ByteString -> Either String Message
dec = maybe
(Left "executorCodec: failed to decode")
Right . decodeWith json (parse parseJSON')
where
parseJSON' obj = withObject "InternalMessage" (\v -> InternalMessage
<$> v .: "kind"
<*> v .: "message") obj
<|> genericParseJSON defaultOptions obj
| null | https://raw.githubusercontent.com/symbiont-io/detsys-testkit/54ac69babc84d92d3ad21b14cbba00f3d82efbee/src/runtime-prototype/src/Scheduler/Executor.hs | haskell | # LANGUAGE OverloadedStrings #
----------------------------------------------------------------------
XXX: Ideally we want to use:
-1.5.5.1/docs/Data-Aeson-Parser.html#v:eitherDecodeWith
which gives an either, but as far as I can see there's no way to create a
type `(Value -> IResult a)` without `iparse`
which is in a hidden module... | # LANGUAGE DeriveGeneric #
# LANGUAGE NumericUnderscores #
module Scheduler.Executor where
import Control.Applicative ((<|>))
import Data.Aeson
import Data.Aeson.Types
import Data.Aeson.Parser
import Data.ByteString.Lazy.Char8 (ByteString)
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Char (toLower)
import Data.Ratio ((%))
import qualified Data.Time as Time
import GHC.Generics (Generic)
import Scheduler.Event
import StuntDouble hiding (Value)
data ExecutorResponse = ExecutorResponse
{ events :: [UnscheduledEvent]
, corrId :: CorrelationId
}
deriving (Generic, Show)
instance FromJSON ExecutorResponse
data UnscheduledEvent = UEMessage
{ ueEvent :: String
, ueArgs :: Value
, ueTo :: [String]
, ueFrom :: String
} |
UEOk
{ ueEvent :: String
, ueArgs :: Value
, ueTo :: [String]
, ueFrom :: String
} |
UETimer
{ ueArgs :: Value
, ueFrom :: String
, ueDuration_ns :: Int
} |
UEDelayed
{ ueInner :: UnscheduledEvent
, ueDuration :: Time.NominalDiffTime
}
deriving (Generic, Eq, Ord, Show)
instance FromJSON UnscheduledEvent where
parseJSON = genericParseJSON defaultOptions
{ fieldLabelModifier = \s -> kebabify $ case drop (length ("ue" :: String)) s of
(x : xs) -> toLower x : xs
[] -> error "parseJSON: impossible, unless the field names of `UnscheduledEvent` changed"
, sumEncoding = defaultTaggedObject
{ tagFieldName = "kind"}
, constructorTagModifier = \s -> case drop (length ("UE" :: String)) s of
(x : xs) -> toLower x : xs
[] -> error "parseJSON: impossible, unless the constructor names of `UnscheduledEvent` changed"
}
where
kebabify = map (\x -> if x == '_' then '-' else x)
isOk :: UnscheduledEvent -> Bool
isOk UEOk {} = True
isOk _otherwise = False
fromUE :: UnscheduledEvent -> String
fromUE (UEDelayed inner _) = fromUE inner
fromUE ue = ueFrom ue
toScheduled :: Time -> UnscheduledEvent -> [SchedulerEvent]
toScheduled at (UEMessage event args tos from)
= [ SchedulerEvent "message" event args to from at Nothing | to <- tos]
toScheduled at (UEOk event args tos from)
= [ SchedulerEvent "ok" event args to from at Nothing | to <- tos]
toScheduled at (UETimer args from duration)
= [ SchedulerEvent "timer" "timer" args from from at' Nothing]
where
at' = addTime at duration'
duration' = fromRational $ fromIntegral duration % 1_000_000_000
toScheduled at (UEDelayed inner duration) = toScheduled at' inner
where
at' = addTime at duration
executorCodec :: Codec
executorCodec = Codec enc dec
where
enc :: Message -> ByteString
enc (InternalMessage t v) = encode (object ["kind" .= t, "message" .= v])
enc msg = encode (genericToJSON defaultOptions msg)
( -1.5.5.1/docs/src/Data.Aeson.Types.Internal.html#iparse )
dec :: ByteString -> Either String Message
dec = maybe
(Left "executorCodec: failed to decode")
Right . decodeWith json (parse parseJSON')
where
parseJSON' obj = withObject "InternalMessage" (\v -> InternalMessage
<$> v .: "kind"
<*> v .: "message") obj
<|> genericParseJSON defaultOptions obj
|
fe145897a1ff242e0329e7bd8c6068d4ca3501694349c59c9fdc2ef0373687a6 | PascalLG/nubo-hs | CmdList.hs | -----------------------------------------------------------------------------
Nubo Client Application
Copyright ( c ) 2018 ,
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
--
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-- THE SOFTWARE.
-----------------------------------------------------------------------------
module CmdList (
cmdList
, helpList
) where
import Control.Monad (forM, forM_)
import Control.Monad.Trans (liftIO)
import System.Directory (getHomeDirectory, getDirectoryContents, doesDirectoryExist, doesFileExist, pathIsSymbolicLink)
import System.FilePath ((</>))
import System.Info (os)
import System.IO.Error (catchIOError)
import Config
import Misc
import PrettyPrint
import Environment
import Error
import Database
-----------------------------------------------------------------------------
| Parse arguments for the ' list ' command .
--
cmdList :: [String] -> EnvIO ExitStatus
cmdList args = do
result <- parseArgsM args [OptionCSV]
case result of
Left errs -> mapM_ putErr errs >> return StatusInvalidCommand
Right (_, (a:_)) -> putErr (ErrExtraArgument a) >> return StatusInvalidCommand
Right (opts, []) -> doList (OptionCSV `elem` opts)
-- | Execute the 'list' command.
--
doList :: Bool -> EnvIO ExitStatus
doList csv = liftIO $ do
list <- getHomeDirectory >>= findNuboDrives 5 ""
forM_ list $ if csv then printCsv
else let maxlen = maximum $ map (length . fst) list
in printStd maxlen
return StatusOK
where
printStd :: Int -> (String, String) -> IO ()
printStd maxlen (path, url) = putStrLn (path ++ replicate (maxlen - length path) ' ' ++ " -> " ++ url)
printCsv :: (String, String) -> IO ()
printCsv (path, url) = putStrF $ toCSV [path, url]
-- | Recursively browse directories to find nubo drives. To save
time , only the first sublevels of the user 's home folder are
-- explored. Moreover, some specifics folders that are very
-- unlikely to contain a drive are ignored.
--
findNuboDrives :: Int -> FilePath -> FilePath -> IO [(FilePath, String)]
findNuboDrives level curr home = do
let abspath = home </> curr
names <- getDirectoryContents abspath `catchIOError` (\_ -> return [])
r <- concat <$> forM names (\name -> do
let path = curr </> name
isdir <- doesDirectoryExist (home </> path)
islink <- pathIsSymbolicLink (home </> path) `catchIOError` (\_ -> return True)
if isdir &&
not islink &&
level > 0 &&
any (/= '.') name &&
path `notElem` excluded then findNuboDrives (level - 1) path home
else return [])
u <- isNuboDrive (abspath </> nuboDatabase)
return $ if null u then r
else (abspath, u):r
where
isNuboDrive :: FilePath -> IO String
isNuboDrive path = do
b <- doesFileExist path `catchIOError` (\_ -> return False)
if b then isValidNuboDB path else return ""
excluded :: [FilePath]
excluded = case os of
"darwin" -> [ ".config", ".cache", ".local", ".Trash", "Applications", "Library", "Pictures/iPhoto Library", "Music/iTunes" ]
"linux" -> [ ".config", ".cache", ".local" ]
"mingw32" -> [ "AppData", "MicrosoftEdgeBackups" ]
_ -> [ ]
-----------------------------------------------------------------------------
-- | Print usage for the remote command.
--
helpList :: EnvIO ()
helpList = do
putLine $ "{*:USAGE}}"
putLine $ " {y:nubo list}} [{y:options}}]"
putLine $ ""
putLine $ "{*:DESCRIPTION}}"
putLine $ " Recursively search the user’s home directory and its descendent folders"
putLine $ " for all {y:nubo}} synced folders. For each entry it founds, the command"
putLine $ " prints the absolute path of the folder and the URL of the server it"
putLine $ " synchonises with."
putLine $ ""
putLine $ " For performance reasons, only the five first levels of subdirectories"
putLine $ " are explored. Locations that are very unlikely to contain such a synced"
putLine $ " folder are ignored as well. The exact list of these ignored locations"
putLine $ " depends on your system."
putLine $ ""
putLine $ "{*:OPTIONS}}"
putLine $ " {y:-a}}, {y:--no-ansi}} Do not use ANSI escape sequences in output messages."
putLine $ " {y:-c}}, {y:--csv}} Format the command output as CSV."
putLine $ ""
-----------------------------------------------------------------------------
| null | https://raw.githubusercontent.com/PascalLG/nubo-hs/390212b73c31746f4ff03a3e341f92d657db0223/Client/src/CmdList.hs | haskell | ---------------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
---------------------------------------------------------------------------
---------------------------------------------------------------------------
| Execute the 'list' command.
| Recursively browse directories to find nubo drives. To save
explored. Moreover, some specifics folders that are very
unlikely to contain a drive are ignored.
---------------------------------------------------------------------------
| Print usage for the remote command.
--------------------------------------------------------------------------- | Nubo Client Application
Copyright ( c ) 2018 ,
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
module CmdList (
cmdList
, helpList
) where
import Control.Monad (forM, forM_)
import Control.Monad.Trans (liftIO)
import System.Directory (getHomeDirectory, getDirectoryContents, doesDirectoryExist, doesFileExist, pathIsSymbolicLink)
import System.FilePath ((</>))
import System.Info (os)
import System.IO.Error (catchIOError)
import Config
import Misc
import PrettyPrint
import Environment
import Error
import Database
| Parse arguments for the ' list ' command .
cmdList :: [String] -> EnvIO ExitStatus
cmdList args = do
result <- parseArgsM args [OptionCSV]
case result of
Left errs -> mapM_ putErr errs >> return StatusInvalidCommand
Right (_, (a:_)) -> putErr (ErrExtraArgument a) >> return StatusInvalidCommand
Right (opts, []) -> doList (OptionCSV `elem` opts)
doList :: Bool -> EnvIO ExitStatus
doList csv = liftIO $ do
list <- getHomeDirectory >>= findNuboDrives 5 ""
forM_ list $ if csv then printCsv
else let maxlen = maximum $ map (length . fst) list
in printStd maxlen
return StatusOK
where
printStd :: Int -> (String, String) -> IO ()
printStd maxlen (path, url) = putStrLn (path ++ replicate (maxlen - length path) ' ' ++ " -> " ++ url)
printCsv :: (String, String) -> IO ()
printCsv (path, url) = putStrF $ toCSV [path, url]
time , only the first sublevels of the user 's home folder are
findNuboDrives :: Int -> FilePath -> FilePath -> IO [(FilePath, String)]
findNuboDrives level curr home = do
let abspath = home </> curr
names <- getDirectoryContents abspath `catchIOError` (\_ -> return [])
r <- concat <$> forM names (\name -> do
let path = curr </> name
isdir <- doesDirectoryExist (home </> path)
islink <- pathIsSymbolicLink (home </> path) `catchIOError` (\_ -> return True)
if isdir &&
not islink &&
level > 0 &&
any (/= '.') name &&
path `notElem` excluded then findNuboDrives (level - 1) path home
else return [])
u <- isNuboDrive (abspath </> nuboDatabase)
return $ if null u then r
else (abspath, u):r
where
isNuboDrive :: FilePath -> IO String
isNuboDrive path = do
b <- doesFileExist path `catchIOError` (\_ -> return False)
if b then isValidNuboDB path else return ""
excluded :: [FilePath]
excluded = case os of
"darwin" -> [ ".config", ".cache", ".local", ".Trash", "Applications", "Library", "Pictures/iPhoto Library", "Music/iTunes" ]
"linux" -> [ ".config", ".cache", ".local" ]
"mingw32" -> [ "AppData", "MicrosoftEdgeBackups" ]
_ -> [ ]
helpList :: EnvIO ()
helpList = do
putLine $ "{*:USAGE}}"
putLine $ " {y:nubo list}} [{y:options}}]"
putLine $ ""
putLine $ "{*:DESCRIPTION}}"
putLine $ " Recursively search the user’s home directory and its descendent folders"
putLine $ " for all {y:nubo}} synced folders. For each entry it founds, the command"
putLine $ " prints the absolute path of the folder and the URL of the server it"
putLine $ " synchonises with."
putLine $ ""
putLine $ " For performance reasons, only the five first levels of subdirectories"
putLine $ " are explored. Locations that are very unlikely to contain such a synced"
putLine $ " folder are ignored as well. The exact list of these ignored locations"
putLine $ " depends on your system."
putLine $ ""
putLine $ "{*:OPTIONS}}"
putLine $ " {y:-a}}, {y:--no-ansi}} Do not use ANSI escape sequences in output messages."
putLine $ " {y:-c}}, {y:--csv}} Format the command output as CSV."
putLine $ ""
|
3c256cd2dc9e5aad4ddd371a96b29f1c09418cc0c275082af476ba274f4497de | gja/pwa-clojure | api.clj | (ns pwa-clojure.api
(:require [cheshire.core :as json]
[clojure.string :as str]
[pwa-clojure.server.data :as data]))
;; This namespace is super simple
(defn- api-response [body]
{:satus 200
:headers {"Content-Type" "application/json"}
:body (json/encode body)})
(defn- load-data-handler [handler]
(fn [{:keys [route-params] :as request}]
(-> handler
(data/load-data route-params)
api-response)))
(defn handler [handler]
(load-data-handler handler))
| null | https://raw.githubusercontent.com/gja/pwa-clojure/a06450747c6ead439d1a74653a34afe415d8ef02/src-clj/pwa_clojure/api.clj | clojure | This namespace is super simple | (ns pwa-clojure.api
(:require [cheshire.core :as json]
[clojure.string :as str]
[pwa-clojure.server.data :as data]))
(defn- api-response [body]
{:satus 200
:headers {"Content-Type" "application/json"}
:body (json/encode body)})
(defn- load-data-handler [handler]
(fn [{:keys [route-params] :as request}]
(-> handler
(data/load-data route-params)
api-response)))
(defn handler [handler]
(load-data-handler handler))
|
1adfa6d1a5e5c848e3f720720d7e06b7d26e93835d5c7264901051217535704c | Gopiandcode/gopcaml-mode | generic_parser.ml |
let interface buf =
(Parse.interface buf)
|> Migrate_parsetree.Migrate_409_410.copy_signature
|> Migrate_parsetree.Migrate_410_411.copy_signature
|> Migrate_parsetree.Migrate_411_412.copy_signature
|> Migrate_parsetree.Migrate_412_413.copy_signature
|> Migrate_parsetree.Migrate_413_414.copy_signature
let implementation buf = (Parse.implementation buf)
|> Migrate_parsetree.Migrate_409_410.copy_structure
|> Migrate_parsetree.Migrate_410_411.copy_structure
|> Migrate_parsetree.Migrate_411_412.copy_structure
|> Migrate_parsetree.Migrate_412_413.copy_structure
|> Migrate_parsetree.Migrate_413_414.copy_structure
let expression buf =
(Parse.expression buf)
|> Migrate_parsetree.Migrate_409_410.copy_expression
|> Migrate_parsetree.Migrate_410_411.copy_expression
|> Migrate_parsetree.Migrate_411_412.copy_expression
|> Migrate_parsetree.Migrate_412_413.copy_expression
|> Migrate_parsetree.Migrate_413_414.copy_expression
| null | https://raw.githubusercontent.com/Gopiandcode/gopcaml-mode/9e3327786d2c8b6454e4218a153339a348cea781/parser/409/generic_parser.ml | ocaml |
let interface buf =
(Parse.interface buf)
|> Migrate_parsetree.Migrate_409_410.copy_signature
|> Migrate_parsetree.Migrate_410_411.copy_signature
|> Migrate_parsetree.Migrate_411_412.copy_signature
|> Migrate_parsetree.Migrate_412_413.copy_signature
|> Migrate_parsetree.Migrate_413_414.copy_signature
let implementation buf = (Parse.implementation buf)
|> Migrate_parsetree.Migrate_409_410.copy_structure
|> Migrate_parsetree.Migrate_410_411.copy_structure
|> Migrate_parsetree.Migrate_411_412.copy_structure
|> Migrate_parsetree.Migrate_412_413.copy_structure
|> Migrate_parsetree.Migrate_413_414.copy_structure
let expression buf =
(Parse.expression buf)
|> Migrate_parsetree.Migrate_409_410.copy_expression
|> Migrate_parsetree.Migrate_410_411.copy_expression
|> Migrate_parsetree.Migrate_411_412.copy_expression
|> Migrate_parsetree.Migrate_412_413.copy_expression
|> Migrate_parsetree.Migrate_413_414.copy_expression
| |
8dbea61fd05afbb52e512a7fa0a2f1ec00029c13589b416bf56b228b532ee1d5 | futurice/haskell-mega-repo | H.hs | # LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -freduction-depth=0 #-}
module Futurice.App.GitHubProxy.H (
H, runH,
) where
import Control.Monad.Operational (Program, interpretWithMonad, singleton)
import Data.Aeson (FromJSON, object)
import Futurice.GitHub (requestToJSON)
import Futurice.Integrations.Classes (MonadGitHub (..))
import Futurice.Metrics.RateMeter (mark)
import Futurice.Prelude
import GitHub.Auth (Auth)
import Prelude ()
import qualified GitHub as GH
data R a where
R :: (NFData a, FromJSON a) => GH.Request 'GH.RA a -> R a
newtype H a = H { unH :: Program R a }
instance Functor H where
fmap f (H x) = H (fmap f x)
instance Applicative H where
pure = H . pure
H f <*> H x = H (f <*> x)
H f *> H x = H (f *> x)
instance Monad H where
return = pure
(>>) = (*>)
H f >>= k = H $ f >>= unH . k
instance MonadGitHub H where
type MonadGitHubC H = NFData
githubReq req = H (singleton (R req))
runH :: Logger -> Auth -> H a -> IO a
runH logger auth (H m) = do
mgr <- newManager tlsManagerSettings
interpretWithMonad (interpret mgr) m
where
interpret :: Manager -> R x -> IO x
interpret mgr (R req) = runLogT "github" logger $ do
logTrace "Request" $ object $ requestToJSON req
liftIO $ mark "GitHub request"
(dur, res) <- liftIO $ clocked $
GH.executeRequestWithMgr mgr auth req >>= either throwM pure
let dur' = timeSpecToSecondsD dur
logTrace ("GitHub request took " <> textShow dur') dur
return res
| null | https://raw.githubusercontent.com/futurice/haskell-mega-repo/2647723f12f5435e2edc373f6738386a9668f603/github-proxy/src/Futurice/App/GitHubProxy/H.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -freduction-depth=0 # | # LANGUAGE DataKinds #
module Futurice.App.GitHubProxy.H (
H, runH,
) where
import Control.Monad.Operational (Program, interpretWithMonad, singleton)
import Data.Aeson (FromJSON, object)
import Futurice.GitHub (requestToJSON)
import Futurice.Integrations.Classes (MonadGitHub (..))
import Futurice.Metrics.RateMeter (mark)
import Futurice.Prelude
import GitHub.Auth (Auth)
import Prelude ()
import qualified GitHub as GH
data R a where
R :: (NFData a, FromJSON a) => GH.Request 'GH.RA a -> R a
newtype H a = H { unH :: Program R a }
instance Functor H where
fmap f (H x) = H (fmap f x)
instance Applicative H where
pure = H . pure
H f <*> H x = H (f <*> x)
H f *> H x = H (f *> x)
instance Monad H where
return = pure
(>>) = (*>)
H f >>= k = H $ f >>= unH . k
instance MonadGitHub H where
type MonadGitHubC H = NFData
githubReq req = H (singleton (R req))
runH :: Logger -> Auth -> H a -> IO a
runH logger auth (H m) = do
mgr <- newManager tlsManagerSettings
interpretWithMonad (interpret mgr) m
where
interpret :: Manager -> R x -> IO x
interpret mgr (R req) = runLogT "github" logger $ do
logTrace "Request" $ object $ requestToJSON req
liftIO $ mark "GitHub request"
(dur, res) <- liftIO $ clocked $
GH.executeRequestWithMgr mgr auth req >>= either throwM pure
let dur' = timeSpecToSecondsD dur
logTrace ("GitHub request took " <> textShow dur') dur
return res
|
a6281b87006d07436d25e7080109c96845ce6717f60c3b3aeecdad8a593702cd | softlab-ntua/bencherl | dialyzer_cl_parse.erl | -*- erlang - indent - level : 2 -*-
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2006 - 2011 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(dialyzer_cl_parse).
-export([start/0, get_lib_dir/1]).
-export([collect_args/1]). % used also by typer
-include("dialyzer.hrl").
%%-----------------------------------------------------------------------
-type dial_cl_parse_ret() :: {'check_init', #options{}}
| {'plt_info', #options{}}
| {'cl', #options{}}
| {{'gui', 'gs' | 'wx'}, #options{}}
| {'error', string()}.
-type deep_string() :: string() | [deep_string()].
%%-----------------------------------------------------------------------
-spec start() -> dial_cl_parse_ret().
start() ->
init(),
Args = init:get_plain_arguments(),
try
cl(Args)
catch
throw:{dialyzer_cl_parse_error, Msg} -> {error, Msg};
_:R ->
Msg = io_lib:format("~p\n~p\n", [R, erlang:get_stacktrace()]),
{error, lists:flatten(Msg)}
end.
cl(["--add_to_plt"|T]) ->
put(dialyzer_options_analysis_type, plt_add),
cl(T);
cl(["--apps"|T]) ->
T1 = get_lib_dir(T),
{Args, T2} = collect_args(T1),
append_var(dialyzer_options_files_rec, Args),
cl(T2);
cl(["--build_plt"|T]) ->
put(dialyzer_options_analysis_type, plt_build),
cl(T);
cl(["--check_plt"|T]) ->
put(dialyzer_options_analysis_type, plt_check),
cl(T);
cl(["-n"|T]) ->
cl(["--no_check_plt"|T]);
cl(["--no_check_plt"|T]) ->
put(dialyzer_options_check_plt, false),
cl(T);
cl(["-nn"|T]) ->
cl(["--no_native"|T]);
cl(["--no_native"|T]) ->
put(dialyzer_options_native, false),
cl(T);
cl(["--plt_info"|T]) ->
put(dialyzer_options_analysis_type, plt_info),
cl(T);
cl(["--get_warnings"|T]) ->
put(dialyzer_options_get_warnings, true),
cl(T);
cl(["-D"|_]) ->
cl_error("No defines specified after -D");
cl(["-D"++Define|T]) ->
Def = re:split(Define, "=", [{return, list}]),
append_defines(Def),
cl(T);
cl(["-h"|_]) ->
help_message();
cl(["--help"|_]) ->
help_message();
cl(["-I"]) ->
cl_error("no include directory specified after -I");
cl(["-I", Dir|T]) ->
append_include(Dir),
cl(T);
cl(["-I"++Dir|T]) ->
append_include(Dir),
cl(T);
cl(["-c"++_|T]) ->
NewTail = command_line(T),
cl(NewTail);
cl(["-r"++_|T0]) ->
{Args, T} = collect_args(T0),
append_var(dialyzer_options_files_rec, Args),
cl(T);
cl(["--remove_from_plt"|T]) ->
put(dialyzer_options_analysis_type, plt_remove),
cl(T);
cl(["--com"++_|T]) ->
NewTail = command_line(T),
cl(NewTail);
cl(["--output"]) ->
cl_error("No outfile specified");
cl(["-o"]) ->
cl_error("No outfile specified");
cl(["--output",Output|T]) ->
put(dialyzer_output, Output),
cl(T);
cl(["--output_plt"]) ->
cl_error("No outfile specified for --output_plt");
cl(["--output_plt",Output|T]) ->
put(dialyzer_output_plt, Output),
cl(T);
cl(["-o", Output|T]) ->
put(dialyzer_output, Output),
cl(T);
cl(["-o"++Output|T]) ->
put(dialyzer_output, Output),
cl(T);
cl(["--raw"|T]) ->
put(dialyzer_output_format, raw),
cl(T);
cl(["--fullpath"|T]) ->
put(dialyzer_filename_opt, fullpath),
cl(T);
cl(["-pa", Path|T]) ->
case code:add_patha(Path) of
true -> cl(T);
{error, _} -> cl_error("Bad directory for -pa: " ++ Path)
end;
cl(["--plt"]) ->
error("No plt specified for --plt");
cl(["--plt", PLT|T]) ->
put(dialyzer_init_plts, [PLT]),
cl(T);
cl(["--plts"]) ->
error("No plts specified for --plts");
cl(["--plts"|T]) ->
{PLTs, NewT} = get_plts(T, []),
put(dialyzer_init_plts, PLTs),
cl(NewT);
cl(["-q"|T]) ->
put(dialyzer_options_report_mode, quiet),
cl(T);
cl(["--quiet"|T]) ->
put(dialyzer_options_report_mode, quiet),
cl(T);
cl(["--src"|T]) ->
put(dialyzer_options_from, src_code),
cl(T);
cl(["--no_spec"|T]) ->
put(dialyzer_options_use_contracts, false),
cl(T);
cl(["--statistics"|T]) ->
put(dialyzer_timing, true),
cl(T);
cl(["--resources"|T]) ->
put(dialyzer_options_report_mode, quiet),
put(dialyzer_timing, debug),
cl(T);
cl(["-v"|_]) ->
io:format("Dialyzer version "++?VSN++"\n"),
erlang:halt(?RET_NOTHING_SUSPICIOUS);
cl(["--version"|_]) ->
io:format("Dialyzer version "++?VSN++"\n"),
erlang:halt(?RET_NOTHING_SUSPICIOUS);
cl(["--verbose"|T]) ->
put(dialyzer_options_report_mode, verbose),
cl(T);
cl(["-W"|_]) ->
cl_error("-W given without warning");
cl(["-Whelp"|_]) ->
help_warnings();
cl(["-W"++Warn|T]) ->
append_var(dialyzer_warnings, [list_to_atom(Warn)]),
cl(T);
cl(["--dump_callgraph"]) ->
cl_error("No outfile specified for --dump_callgraph");
cl(["--dump_callgraph", File|T]) ->
put(dialyzer_callgraph_file, File),
cl(T);
cl(["--gui"|T]) ->
put(dialyzer_options_mode, {gui, gs}),
cl(T);
cl(["--wx"|T]) ->
put(dialyzer_options_mode, {gui, wx}),
cl(T);
cl([H|_] = L) ->
case filelib:is_file(H) orelse filelib:is_dir(H) of
true ->
NewTail = command_line(L),
cl(NewTail);
false ->
cl_error("Unknown option: " ++ H)
end;
cl([]) ->
{RetTag, Opts} =
case get(dialyzer_options_analysis_type) =:= plt_info of
true ->
put(dialyzer_options_analysis_type, plt_check),
{plt_info, cl_options()};
false ->
case get(dialyzer_options_mode) of
{gui, _} = GUI -> {GUI, common_options()};
cl ->
case get(dialyzer_options_analysis_type) =:= plt_check of
true -> {check_init, cl_options()};
false -> {cl, cl_options()}
end
end
end,
case dialyzer_options:build(Opts) of
{error, Msg} -> cl_error(Msg);
OptsRecord -> {RetTag, OptsRecord}
end.
%%-----------------------------------------------------------------------
command_line(T0) ->
{Args, T} = collect_args(T0),
append_var(dialyzer_options_files, Args),
%% if all files specified are ".erl" files, set the 'src' flag automatically
case lists:all(fun(F) -> filename:extension(F) =:= ".erl" end, Args) of
true -> put(dialyzer_options_from, src_code);
false -> ok
end,
T.
-spec cl_error(deep_string()) -> no_return().
cl_error(Str) ->
Msg = lists:flatten(Str),
throw({dialyzer_cl_parse_error, Msg}).
init() ->
put(dialyzer_options_mode, cl),
put(dialyzer_options_files_rec, []),
put(dialyzer_options_report_mode, normal),
put(dialyzer_warnings, []),
DefaultOpts = #options{},
put(dialyzer_include, DefaultOpts#options.include_dirs),
put(dialyzer_options_defines, DefaultOpts#options.defines),
put(dialyzer_options_files, DefaultOpts#options.files),
put(dialyzer_output_format, formatted),
put(dialyzer_filename_opt, basename),
put(dialyzer_options_check_plt, DefaultOpts#options.check_plt),
put(dialyzer_timing, DefaultOpts#options.timing),
ok.
append_defines([Def, Val]) ->
{ok, Tokens, _} = erl_scan:string(Val++"."),
{ok, ErlVal} = erl_parse:parse_term(Tokens),
append_var(dialyzer_options_defines, [{list_to_atom(Def), ErlVal}]);
append_defines([Def]) ->
append_var(dialyzer_options_defines, [{list_to_atom(Def), true}]).
append_include(Dir) ->
append_var(dialyzer_include, [Dir]).
append_var(Var, List) when is_list(List) ->
put(Var, get(Var) ++ List),
ok.
%%-----------------------------------------------------------------------
-spec collect_args([string()]) -> {[string()], [string()]}.
collect_args(List) ->
collect_args_1(List, []).
collect_args_1(["-"++_|_] = L, Acc) ->
{lists:reverse(Acc), L};
collect_args_1([Arg|T], Acc) ->
collect_args_1(T, [Arg|Acc]);
collect_args_1([], Acc) ->
{lists:reverse(Acc), []}.
%%-----------------------------------------------------------------------
cl_options() ->
[{files, get(dialyzer_options_files)},
{files_rec, get(dialyzer_options_files_rec)},
{output_file, get(dialyzer_output)},
{output_format, get(dialyzer_output_format)},
{filename_opt, get(dialyzer_filename_opt)},
{analysis_type, get(dialyzer_options_analysis_type)},
{get_warnings, get(dialyzer_options_get_warnings)},
{timing, get(dialyzer_timing)},
{callgraph_file, get(dialyzer_callgraph_file)}
|common_options()].
common_options() ->
[{defines, get(dialyzer_options_defines)},
{from, get(dialyzer_options_from)},
{include_dirs, get(dialyzer_include)},
{plts, get(dialyzer_init_plts)},
{output_plt, get(dialyzer_output_plt)},
{report_mode, get(dialyzer_options_report_mode)},
{use_spec, get(dialyzer_options_use_contracts)},
{warnings, get(dialyzer_warnings)},
{check_plt, get(dialyzer_options_check_plt)}].
%%-----------------------------------------------------------------------
-spec get_lib_dir([string()]) -> [string()].
get_lib_dir(Apps) ->
get_lib_dir(Apps, []).
get_lib_dir([H|T], Acc) ->
NewElem =
case code:lib_dir(list_to_atom(H)) of
{error, bad_name} ->
hack for including erts in an un - installed system
true -> filename:join(code:root_dir(), "erts/preloaded/ebin");
false -> H
end;
LibDir -> LibDir ++ "/ebin"
end,
get_lib_dir(T, [NewElem|Acc]);
get_lib_dir([], Acc) ->
lists:reverse(Acc).
%%-----------------------------------------------------------------------
get_plts(["--"|T], Acc) -> {lists:reverse(Acc), T};
get_plts(["-"++_Opt = H|T], Acc) -> {lists:reverse(Acc), [H|T]};
get_plts([H|T], Acc) -> get_plts(T, [H|Acc]);
get_plts([], Acc) -> {lists:reverse(Acc), []}.
%%-----------------------------------------------------------------------
-spec help_warnings() -> no_return().
help_warnings() ->
S = warning_options_msg(),
io:put_chars(S),
erlang:halt(?RET_NOTHING_SUSPICIOUS).
-spec help_message() -> no_return().
help_message() ->
S = "Usage: dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
[-pa dir]* [--plt plt] [--plts plt*] [-Ddefine]*
[-I include_dir]* [--output_plt file] [-Wwarn]*
[--src] [--gui | --wx] [files_or_dirs] [-r dirs]
[--apps applications] [-o outfile]
[--build_plt] [--add_to_plt] [--remove_from_plt]
[--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
[--no_native] [--fullpath] [--statistics]
Options:
files_or_dirs (for backwards compatibility also as: -c files_or_dirs)
Use Dialyzer from the command line to detect defects in the
specified files or directories containing .erl or .beam files,
depending on the type of the analysis.
-r dirs
Same as the previous but the specified directories are searched
recursively for subdirectories containing .erl or .beam files in
them, depending on the type of analysis.
--apps applications
Option typically used when building or modifying a plt as in:
dialyzer --build_plt --apps erts kernel stdlib mnesia ...
to conveniently refer to library applications corresponding to the
Erlang/OTP installation. However, the option is general and can also
be used during analysis in order to refer to Erlang/OTP applications.
In addition, file or directory names can also be included, as in:
dialyzer --apps inets ssl ./ebin ../other_lib/ebin/my_module.beam
-o outfile (or --output outfile)
When using Dialyzer from the command line, send the analysis
results to the specified outfile rather than to stdout.
--raw
When using Dialyzer from the command line, output the raw analysis
results (Erlang terms) instead of the formatted result.
The raw format is easier to post-process (for instance, to filter
warnings or to output HTML pages).
--src
Override the default, which is to analyze BEAM files, and
analyze starting from Erlang source code instead.
-Dname (or -Dname=value)
When analyzing from source, pass the define to Dialyzer. (**)
-I include_dir
When analyzing from source, pass the include_dir to Dialyzer. (**)
-pa dir
Include dir in the path for Erlang (useful when analyzing files
that have '-include_lib()' directives).
--output_plt file
Store the plt at the specified file after building it.
--plt plt
Use the specified plt as the initial plt (if the plt was built
during setup the files will be checked for consistency).
--plts plt*
Merge the specified plts to create the initial plt -- requires
that the plts are disjoint (i.e., do not have any module
appearing in more than one plt).
The plts are created in the usual way:
dialyzer --build_plt --output_plt plt_1 files_to_include
...
dialyzer --build_plt --output_plt plt_n files_to_include
and then can be used in either of the following ways:
dialyzer files_to_analyze --plts plt_1 ... plt_n
or:
dialyzer --plts plt_1 ... plt_n -- files_to_analyze
(Note the -- delimiter in the second case)
-Wwarn
A family of options which selectively turn on/off warnings
(for help on the names of warnings use dialyzer -Whelp).
--shell
Do not disable the Erlang shell while running the GUI.
--version (or -v)
Print the Dialyzer version and some more information and exit.
--help (or -h)
Print this message and exit.
--quiet (or -q)
Make Dialyzer a bit more quiet.
--verbose
Make Dialyzer a bit more verbose.
--statistics
Prints information about the progress of execution (analysis phases,
time spent in each and size of the relative input).
--build_plt
The analysis starts from an empty plt and creates a new one from the
files specified with -c and -r. Only works for beam files.
Use --plt(s) or --output_plt to override the default plt location.
--add_to_plt
The plt is extended to also include the files specified with -c and -r.
Use --plt(s) to specify which plt to start from, and --output_plt to
specify where to put the plt. Note that the analysis might include
files from the plt if they depend on the new files.
This option only works with beam files.
--remove_from_plt
The information from the files specified with -c and -r is removed
from the plt. Note that this may cause a re-analysis of the remaining
dependent files.
--check_plt
Check the plt for consistency and rebuild it if it is not up-to-date.
Actually, this option is of rare use as it is on by default.
--no_check_plt (or -n)
Skip the plt check when running Dialyzer. Useful when working with
installed plts that never change.
--plt_info
Make Dialyzer print information about the plt and then quit. The plt
can be specified with --plt(s).
--get_warnings
Make Dialyzer emit warnings even when manipulating the plt. Warnings
are only emitted for files that are actually analyzed.
--dump_callgraph file
Dump the call graph into the specified file whose format is determined
by the file name extension. Supported extensions are: raw, dot, and ps.
If something else is used as file name extension, default format '.raw'
will be used.
--no_native (or -nn)
Bypass the native code compilation of some key files that Dialyzer
heuristically performs when dialyzing many files; this avoids the
compilation time but it may result in (much) longer analysis time.
--fullpath
Display the full path names of files for which warnings are emitted.
--gui
Use the gs-based GUI.
--wx
Use the wx-based GUI.
Note:
* denotes that multiple occurrences of these options are possible.
** options -D and -I work both from command-line and in the Dialyzer GUI;
the syntax of defines and includes is the same as that used by \"erlc\".
" ++ warning_options_msg() ++ "
The exit status of the command line version is:
0 - No problems were encountered during the analysis and no
warnings were emitted.
1 - Problems were encountered during the analysis.
2 - No problems were encountered, but warnings were emitted.
",
io:put_chars(S),
erlang:halt(?RET_NOTHING_SUSPICIOUS).
warning_options_msg() ->
"Warning options:
-Wno_return
Suppress warnings for functions that will never return a value.
-Wno_unused
Suppress warnings for unused functions.
-Wno_improper_lists
Suppress warnings for construction of improper lists.
-Wno_tuple_as_fun
Suppress warnings for using tuples instead of funs.
-Wno_fun_app
Suppress warnings for fun applications that will fail.
-Wno_match
Suppress warnings for patterns that are unused or cannot match.
-Wno_opaque
Suppress warnings for violations of opaqueness of data types.
-Wno_behaviours
Suppress warnings about behaviour callbacks which drift from the published
recommended interfaces.
-Wno_undefined_callbacks
Suppress warnings about behaviours that have no -callback attributes for
their callbacks.
-Wunmatched_returns ***
Include warnings for function calls which ignore a structured return
value or do not match against one of many possible return value(s).
-Werror_handling ***
Include warnings for functions that only return by means of an exception.
-Wrace_conditions ***
Include warnings for possible race conditions.
-Wunderspecs ***
Warn about underspecified functions
(those whose -spec is strictly more allowing than the success typing).
The following options are also available but their use is not recommended:
(they are mostly for Dialyzer developers and internal debugging)
-Woverspecs ***
Warn about overspecified functions
(those whose -spec is strictly less allowing than the success typing).
-Wspecdiffs ***
Warn when the -spec is different than the success typing.
*** Identifies options that turn on warnings rather than turning them off.
".
| null | https://raw.githubusercontent.com/softlab-ntua/bencherl/317bdbf348def0b2f9ed32cb6621e21083b7e0ca/app/dialyzer/src/dialyzer_cl_parse.erl | erlang | -----------------------------------------------------------------------
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
used also by typer
-----------------------------------------------------------------------
-----------------------------------------------------------------------
-----------------------------------------------------------------------
if all files specified are ".erl" files, set the 'src' flag automatically
-----------------------------------------------------------------------
-----------------------------------------------------------------------
-----------------------------------------------------------------------
-----------------------------------------------------------------------
----------------------------------------------------------------------- | -*- erlang - indent - level : 2 -*-
Copyright Ericsson AB 2006 - 2011 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(dialyzer_cl_parse).
-export([start/0, get_lib_dir/1]).
-include("dialyzer.hrl").
-type dial_cl_parse_ret() :: {'check_init', #options{}}
| {'plt_info', #options{}}
| {'cl', #options{}}
| {{'gui', 'gs' | 'wx'}, #options{}}
| {'error', string()}.
-type deep_string() :: string() | [deep_string()].
-spec start() -> dial_cl_parse_ret().
start() ->
init(),
Args = init:get_plain_arguments(),
try
cl(Args)
catch
throw:{dialyzer_cl_parse_error, Msg} -> {error, Msg};
_:R ->
Msg = io_lib:format("~p\n~p\n", [R, erlang:get_stacktrace()]),
{error, lists:flatten(Msg)}
end.
cl(["--add_to_plt"|T]) ->
put(dialyzer_options_analysis_type, plt_add),
cl(T);
cl(["--apps"|T]) ->
T1 = get_lib_dir(T),
{Args, T2} = collect_args(T1),
append_var(dialyzer_options_files_rec, Args),
cl(T2);
cl(["--build_plt"|T]) ->
put(dialyzer_options_analysis_type, plt_build),
cl(T);
cl(["--check_plt"|T]) ->
put(dialyzer_options_analysis_type, plt_check),
cl(T);
cl(["-n"|T]) ->
cl(["--no_check_plt"|T]);
cl(["--no_check_plt"|T]) ->
put(dialyzer_options_check_plt, false),
cl(T);
cl(["-nn"|T]) ->
cl(["--no_native"|T]);
cl(["--no_native"|T]) ->
put(dialyzer_options_native, false),
cl(T);
cl(["--plt_info"|T]) ->
put(dialyzer_options_analysis_type, plt_info),
cl(T);
cl(["--get_warnings"|T]) ->
put(dialyzer_options_get_warnings, true),
cl(T);
cl(["-D"|_]) ->
cl_error("No defines specified after -D");
cl(["-D"++Define|T]) ->
Def = re:split(Define, "=", [{return, list}]),
append_defines(Def),
cl(T);
cl(["-h"|_]) ->
help_message();
cl(["--help"|_]) ->
help_message();
cl(["-I"]) ->
cl_error("no include directory specified after -I");
cl(["-I", Dir|T]) ->
append_include(Dir),
cl(T);
cl(["-I"++Dir|T]) ->
append_include(Dir),
cl(T);
cl(["-c"++_|T]) ->
NewTail = command_line(T),
cl(NewTail);
cl(["-r"++_|T0]) ->
{Args, T} = collect_args(T0),
append_var(dialyzer_options_files_rec, Args),
cl(T);
cl(["--remove_from_plt"|T]) ->
put(dialyzer_options_analysis_type, plt_remove),
cl(T);
cl(["--com"++_|T]) ->
NewTail = command_line(T),
cl(NewTail);
cl(["--output"]) ->
cl_error("No outfile specified");
cl(["-o"]) ->
cl_error("No outfile specified");
cl(["--output",Output|T]) ->
put(dialyzer_output, Output),
cl(T);
cl(["--output_plt"]) ->
cl_error("No outfile specified for --output_plt");
cl(["--output_plt",Output|T]) ->
put(dialyzer_output_plt, Output),
cl(T);
cl(["-o", Output|T]) ->
put(dialyzer_output, Output),
cl(T);
cl(["-o"++Output|T]) ->
put(dialyzer_output, Output),
cl(T);
cl(["--raw"|T]) ->
put(dialyzer_output_format, raw),
cl(T);
cl(["--fullpath"|T]) ->
put(dialyzer_filename_opt, fullpath),
cl(T);
cl(["-pa", Path|T]) ->
case code:add_patha(Path) of
true -> cl(T);
{error, _} -> cl_error("Bad directory for -pa: " ++ Path)
end;
cl(["--plt"]) ->
error("No plt specified for --plt");
cl(["--plt", PLT|T]) ->
put(dialyzer_init_plts, [PLT]),
cl(T);
cl(["--plts"]) ->
error("No plts specified for --plts");
cl(["--plts"|T]) ->
{PLTs, NewT} = get_plts(T, []),
put(dialyzer_init_plts, PLTs),
cl(NewT);
cl(["-q"|T]) ->
put(dialyzer_options_report_mode, quiet),
cl(T);
cl(["--quiet"|T]) ->
put(dialyzer_options_report_mode, quiet),
cl(T);
cl(["--src"|T]) ->
put(dialyzer_options_from, src_code),
cl(T);
cl(["--no_spec"|T]) ->
put(dialyzer_options_use_contracts, false),
cl(T);
cl(["--statistics"|T]) ->
put(dialyzer_timing, true),
cl(T);
cl(["--resources"|T]) ->
put(dialyzer_options_report_mode, quiet),
put(dialyzer_timing, debug),
cl(T);
cl(["-v"|_]) ->
io:format("Dialyzer version "++?VSN++"\n"),
erlang:halt(?RET_NOTHING_SUSPICIOUS);
cl(["--version"|_]) ->
io:format("Dialyzer version "++?VSN++"\n"),
erlang:halt(?RET_NOTHING_SUSPICIOUS);
cl(["--verbose"|T]) ->
put(dialyzer_options_report_mode, verbose),
cl(T);
cl(["-W"|_]) ->
cl_error("-W given without warning");
cl(["-Whelp"|_]) ->
help_warnings();
cl(["-W"++Warn|T]) ->
append_var(dialyzer_warnings, [list_to_atom(Warn)]),
cl(T);
cl(["--dump_callgraph"]) ->
cl_error("No outfile specified for --dump_callgraph");
cl(["--dump_callgraph", File|T]) ->
put(dialyzer_callgraph_file, File),
cl(T);
cl(["--gui"|T]) ->
put(dialyzer_options_mode, {gui, gs}),
cl(T);
cl(["--wx"|T]) ->
put(dialyzer_options_mode, {gui, wx}),
cl(T);
cl([H|_] = L) ->
case filelib:is_file(H) orelse filelib:is_dir(H) of
true ->
NewTail = command_line(L),
cl(NewTail);
false ->
cl_error("Unknown option: " ++ H)
end;
cl([]) ->
{RetTag, Opts} =
case get(dialyzer_options_analysis_type) =:= plt_info of
true ->
put(dialyzer_options_analysis_type, plt_check),
{plt_info, cl_options()};
false ->
case get(dialyzer_options_mode) of
{gui, _} = GUI -> {GUI, common_options()};
cl ->
case get(dialyzer_options_analysis_type) =:= plt_check of
true -> {check_init, cl_options()};
false -> {cl, cl_options()}
end
end
end,
case dialyzer_options:build(Opts) of
{error, Msg} -> cl_error(Msg);
OptsRecord -> {RetTag, OptsRecord}
end.
command_line(T0) ->
{Args, T} = collect_args(T0),
append_var(dialyzer_options_files, Args),
case lists:all(fun(F) -> filename:extension(F) =:= ".erl" end, Args) of
true -> put(dialyzer_options_from, src_code);
false -> ok
end,
T.
-spec cl_error(deep_string()) -> no_return().
cl_error(Str) ->
Msg = lists:flatten(Str),
throw({dialyzer_cl_parse_error, Msg}).
init() ->
put(dialyzer_options_mode, cl),
put(dialyzer_options_files_rec, []),
put(dialyzer_options_report_mode, normal),
put(dialyzer_warnings, []),
DefaultOpts = #options{},
put(dialyzer_include, DefaultOpts#options.include_dirs),
put(dialyzer_options_defines, DefaultOpts#options.defines),
put(dialyzer_options_files, DefaultOpts#options.files),
put(dialyzer_output_format, formatted),
put(dialyzer_filename_opt, basename),
put(dialyzer_options_check_plt, DefaultOpts#options.check_plt),
put(dialyzer_timing, DefaultOpts#options.timing),
ok.
append_defines([Def, Val]) ->
{ok, Tokens, _} = erl_scan:string(Val++"."),
{ok, ErlVal} = erl_parse:parse_term(Tokens),
append_var(dialyzer_options_defines, [{list_to_atom(Def), ErlVal}]);
append_defines([Def]) ->
append_var(dialyzer_options_defines, [{list_to_atom(Def), true}]).
append_include(Dir) ->
append_var(dialyzer_include, [Dir]).
append_var(Var, List) when is_list(List) ->
put(Var, get(Var) ++ List),
ok.
-spec collect_args([string()]) -> {[string()], [string()]}.
collect_args(List) ->
collect_args_1(List, []).
collect_args_1(["-"++_|_] = L, Acc) ->
{lists:reverse(Acc), L};
collect_args_1([Arg|T], Acc) ->
collect_args_1(T, [Arg|Acc]);
collect_args_1([], Acc) ->
{lists:reverse(Acc), []}.
cl_options() ->
[{files, get(dialyzer_options_files)},
{files_rec, get(dialyzer_options_files_rec)},
{output_file, get(dialyzer_output)},
{output_format, get(dialyzer_output_format)},
{filename_opt, get(dialyzer_filename_opt)},
{analysis_type, get(dialyzer_options_analysis_type)},
{get_warnings, get(dialyzer_options_get_warnings)},
{timing, get(dialyzer_timing)},
{callgraph_file, get(dialyzer_callgraph_file)}
|common_options()].
common_options() ->
[{defines, get(dialyzer_options_defines)},
{from, get(dialyzer_options_from)},
{include_dirs, get(dialyzer_include)},
{plts, get(dialyzer_init_plts)},
{output_plt, get(dialyzer_output_plt)},
{report_mode, get(dialyzer_options_report_mode)},
{use_spec, get(dialyzer_options_use_contracts)},
{warnings, get(dialyzer_warnings)},
{check_plt, get(dialyzer_options_check_plt)}].
-spec get_lib_dir([string()]) -> [string()].
get_lib_dir(Apps) ->
get_lib_dir(Apps, []).
get_lib_dir([H|T], Acc) ->
NewElem =
case code:lib_dir(list_to_atom(H)) of
{error, bad_name} ->
hack for including erts in an un - installed system
true -> filename:join(code:root_dir(), "erts/preloaded/ebin");
false -> H
end;
LibDir -> LibDir ++ "/ebin"
end,
get_lib_dir(T, [NewElem|Acc]);
get_lib_dir([], Acc) ->
lists:reverse(Acc).
get_plts(["--"|T], Acc) -> {lists:reverse(Acc), T};
get_plts(["-"++_Opt = H|T], Acc) -> {lists:reverse(Acc), [H|T]};
get_plts([H|T], Acc) -> get_plts(T, [H|Acc]);
get_plts([], Acc) -> {lists:reverse(Acc), []}.
-spec help_warnings() -> no_return().
help_warnings() ->
S = warning_options_msg(),
io:put_chars(S),
erlang:halt(?RET_NOTHING_SUSPICIOUS).
-spec help_message() -> no_return().
help_message() ->
S = "Usage: dialyzer [--help] [--version] [--shell] [--quiet] [--verbose]
[-pa dir]* [--plt plt] [--plts plt*] [-Ddefine]*
[-I include_dir]* [--output_plt file] [-Wwarn]*
[--src] [--gui | --wx] [files_or_dirs] [-r dirs]
[--apps applications] [-o outfile]
[--build_plt] [--add_to_plt] [--remove_from_plt]
[--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
[--no_native] [--fullpath] [--statistics]
Options:
files_or_dirs (for backwards compatibility also as: -c files_or_dirs)
Use Dialyzer from the command line to detect defects in the
specified files or directories containing .erl or .beam files,
depending on the type of the analysis.
-r dirs
Same as the previous but the specified directories are searched
recursively for subdirectories containing .erl or .beam files in
them, depending on the type of analysis.
--apps applications
Option typically used when building or modifying a plt as in:
dialyzer --build_plt --apps erts kernel stdlib mnesia ...
to conveniently refer to library applications corresponding to the
Erlang/OTP installation. However, the option is general and can also
be used during analysis in order to refer to Erlang/OTP applications.
In addition, file or directory names can also be included, as in:
dialyzer --apps inets ssl ./ebin ../other_lib/ebin/my_module.beam
-o outfile (or --output outfile)
When using Dialyzer from the command line, send the analysis
results to the specified outfile rather than to stdout.
--raw
When using Dialyzer from the command line, output the raw analysis
results (Erlang terms) instead of the formatted result.
The raw format is easier to post-process (for instance, to filter
warnings or to output HTML pages).
--src
Override the default, which is to analyze BEAM files, and
analyze starting from Erlang source code instead.
-Dname (or -Dname=value)
When analyzing from source, pass the define to Dialyzer. (**)
-I include_dir
When analyzing from source, pass the include_dir to Dialyzer. (**)
-pa dir
Include dir in the path for Erlang (useful when analyzing files
that have '-include_lib()' directives).
--output_plt file
Store the plt at the specified file after building it.
--plt plt
Use the specified plt as the initial plt (if the plt was built
during setup the files will be checked for consistency).
--plts plt*
Merge the specified plts to create the initial plt -- requires
that the plts are disjoint (i.e., do not have any module
appearing in more than one plt).
The plts are created in the usual way:
dialyzer --build_plt --output_plt plt_1 files_to_include
...
dialyzer --build_plt --output_plt plt_n files_to_include
and then can be used in either of the following ways:
dialyzer files_to_analyze --plts plt_1 ... plt_n
or:
dialyzer --plts plt_1 ... plt_n -- files_to_analyze
(Note the -- delimiter in the second case)
-Wwarn
A family of options which selectively turn on/off warnings
(for help on the names of warnings use dialyzer -Whelp).
--shell
Do not disable the Erlang shell while running the GUI.
--version (or -v)
Print the Dialyzer version and some more information and exit.
--help (or -h)
Print this message and exit.
--quiet (or -q)
Make Dialyzer a bit more quiet.
--verbose
Make Dialyzer a bit more verbose.
--statistics
Prints information about the progress of execution (analysis phases,
time spent in each and size of the relative input).
--build_plt
The analysis starts from an empty plt and creates a new one from the
files specified with -c and -r. Only works for beam files.
Use --plt(s) or --output_plt to override the default plt location.
--add_to_plt
The plt is extended to also include the files specified with -c and -r.
Use --plt(s) to specify which plt to start from, and --output_plt to
specify where to put the plt. Note that the analysis might include
files from the plt if they depend on the new files.
This option only works with beam files.
--remove_from_plt
The information from the files specified with -c and -r is removed
from the plt. Note that this may cause a re-analysis of the remaining
dependent files.
--check_plt
Check the plt for consistency and rebuild it if it is not up-to-date.
Actually, this option is of rare use as it is on by default.
--no_check_plt (or -n)
Skip the plt check when running Dialyzer. Useful when working with
installed plts that never change.
--plt_info
Make Dialyzer print information about the plt and then quit. The plt
can be specified with --plt(s).
--get_warnings
Make Dialyzer emit warnings even when manipulating the plt. Warnings
are only emitted for files that are actually analyzed.
--dump_callgraph file
Dump the call graph into the specified file whose format is determined
by the file name extension. Supported extensions are: raw, dot, and ps.
If something else is used as file name extension, default format '.raw'
will be used.
--no_native (or -nn)
Bypass the native code compilation of some key files that Dialyzer
heuristically performs when dialyzing many files; this avoids the
compilation time but it may result in (much) longer analysis time.
--fullpath
Display the full path names of files for which warnings are emitted.
--gui
Use the gs-based GUI.
--wx
Use the wx-based GUI.
Note:
* denotes that multiple occurrences of these options are possible.
** options -D and -I work both from command-line and in the Dialyzer GUI;
the syntax of defines and includes is the same as that used by \"erlc\".
" ++ warning_options_msg() ++ "
The exit status of the command line version is:
0 - No problems were encountered during the analysis and no
warnings were emitted.
1 - Problems were encountered during the analysis.
2 - No problems were encountered, but warnings were emitted.
",
io:put_chars(S),
erlang:halt(?RET_NOTHING_SUSPICIOUS).
warning_options_msg() ->
"Warning options:
-Wno_return
Suppress warnings for functions that will never return a value.
-Wno_unused
Suppress warnings for unused functions.
-Wno_improper_lists
Suppress warnings for construction of improper lists.
-Wno_tuple_as_fun
Suppress warnings for using tuples instead of funs.
-Wno_fun_app
Suppress warnings for fun applications that will fail.
-Wno_match
Suppress warnings for patterns that are unused or cannot match.
-Wno_opaque
Suppress warnings for violations of opaqueness of data types.
-Wno_behaviours
Suppress warnings about behaviour callbacks which drift from the published
recommended interfaces.
-Wno_undefined_callbacks
Suppress warnings about behaviours that have no -callback attributes for
their callbacks.
-Wunmatched_returns ***
Include warnings for function calls which ignore a structured return
value or do not match against one of many possible return value(s).
-Werror_handling ***
Include warnings for functions that only return by means of an exception.
-Wrace_conditions ***
Include warnings for possible race conditions.
-Wunderspecs ***
Warn about underspecified functions
(those whose -spec is strictly more allowing than the success typing).
The following options are also available but their use is not recommended:
(they are mostly for Dialyzer developers and internal debugging)
-Woverspecs ***
Warn about overspecified functions
(those whose -spec is strictly less allowing than the success typing).
-Wspecdiffs ***
Warn when the -spec is different than the success typing.
*** Identifies options that turn on warnings rather than turning them off.
".
|
c0690359617b69535227e24bd957b7eb26668a29ad17fb2d21df85d95a833ec6 | input-output-hk/marlowe-cardano | Test.hs | -----------------------------------------------------------------------------
--
-- Module : $Headers
License : Apache 2.0
--
-- Stability : Experimental
Portability : Portable
--
| Contract - testing commands in the CLI tool .
--
-----------------------------------------------------------------------------
# LANGUAGE FlexibleContexts #
{-# LANGUAGE OverloadedStrings #-}
module Language.Marlowe.CLI.Command.Test
* CLI Commands
TestCommand
, parseTestCommand
, runTestCommand
) where
import Cardano.Api (IsShelleyBasedEra, NetworkId)
import Control.Monad.Except (MonadError, MonadIO)
import Data.Maybe (fromMaybe)
import Language.Marlowe.CLI.Command.Parse (parseAddress, parseNetworkId)
import Language.Marlowe.CLI.Test (runTests)
import Language.Marlowe.CLI.Test.Types (ExecutionMode(..), MarloweTests(ScriptTests), Seconds(..))
import Language.Marlowe.CLI.Types (CliEnv, CliError, askEra)
import Control.Monad.Reader.Class (MonadReader)
import qualified Options.Applicative as O
| CLI commands and options for testing contracts .
type TestCommand era = MarloweTests era FilePath
-- | Run a contract-testing command.
runTestCommand :: IsShelleyBasedEra era
=> MonadError CliError m
=> MonadIO m
=> MonadReader (CliEnv era) m
=> TestCommand era -- ^ The command.
-> m () -- ^ Action for running the command.
runTestCommand cmd = do
era <- askEra
runTests era cmd
-- | Parser for test commands.
parseTestCommand :: IsShelleyBasedEra era => O.Mod O.OptionFields NetworkId
-> O.Mod O.OptionFields FilePath
-> O.Parser (TestCommand era)
parseTestCommand network socket =
O.hsubparser
$ O.commandGroup "Commands for testing contracts:"
<> scriptsCommand network socket
-- | Parser for the "scripts" command.
scriptsCommand :: IsShelleyBasedEra era => O.Mod O.OptionFields NetworkId
-> O.Mod O.OptionFields FilePath
-> O.Mod O.CommandFields (TestCommand era)
scriptsCommand network socket =
O.command "scripts"
$ O.info (scriptsOptions network socket)
$ O.progDesc "Test Marlowe scripts on-chain."
executionModeParser :: O.Parser ExecutionMode
executionModeParser = fmap (fromMaybe (OnChainMode (Seconds 120))) simulationModeOpt
simulationModeOpt :: O.Parser (Maybe ExecutionMode)
simulationModeOpt = O.optional (O.flag' SimulationMode (O.long "simulation-mode" <> O.help "Run test suite in simulation mode by ignoring the transaction submission timeout"))
-- | Parser for the "scripts" options.
scriptsOptions :: IsShelleyBasedEra era
=> O.Mod O.OptionFields NetworkId
-> O.Mod O.OptionFields FilePath
-> O.Parser (TestCommand era)
scriptsOptions network socket =
ScriptTests
<$> O.option parseNetworkId (O.long "testnet-magic" <> O.metavar "INTEGER" <> network <> O.help "Network magic. Defaults to the CARDANO_TESTNET_MAGIC environment variable's value." )
<*> O.strOption (O.long "socket-path" <> O.metavar "SOCKET_FILE" <> socket <> O.help "Location of the cardano-node socket file. Defaults to the CARDANO_NODE_SOCKET_PATH environment variable's value.")
<*> O.strOption (O.long "faucet-key" <> O.metavar "SIGNING_FILE" <> O.help "The file containing the signing key for the faucet." )
<*> O.option parseAddress (O.long "faucet-address" <> O.metavar "ADDRESS" <> O.help "The address of the faucet." )
<*> O.option parseAddress (O.long "burn-address" <> O.metavar "ADDRESS" <> O.help "Burn address for discarding used tokens." )
<*> executionModeParser
<*> (O.some . O.strArgument) ( O.metavar "TEST_FILE" <> O.help "JSON file containing a test case." )
| null | https://raw.githubusercontent.com/input-output-hk/marlowe-cardano/a64d953eef811081bb83c7055e7c5b8c7b902deb/marlowe-cli/src/Language/Marlowe/CLI/Command/Test.hs | haskell | ---------------------------------------------------------------------------
Module : $Headers
Stability : Experimental
---------------------------------------------------------------------------
# LANGUAGE OverloadedStrings #
| Run a contract-testing command.
^ The command.
^ Action for running the command.
| Parser for test commands.
| Parser for the "scripts" command.
| Parser for the "scripts" options. | License : Apache 2.0
Portability : Portable
| Contract - testing commands in the CLI tool .
# LANGUAGE FlexibleContexts #
module Language.Marlowe.CLI.Command.Test
* CLI Commands
TestCommand
, parseTestCommand
, runTestCommand
) where
import Cardano.Api (IsShelleyBasedEra, NetworkId)
import Control.Monad.Except (MonadError, MonadIO)
import Data.Maybe (fromMaybe)
import Language.Marlowe.CLI.Command.Parse (parseAddress, parseNetworkId)
import Language.Marlowe.CLI.Test (runTests)
import Language.Marlowe.CLI.Test.Types (ExecutionMode(..), MarloweTests(ScriptTests), Seconds(..))
import Language.Marlowe.CLI.Types (CliEnv, CliError, askEra)
import Control.Monad.Reader.Class (MonadReader)
import qualified Options.Applicative as O
| CLI commands and options for testing contracts .
type TestCommand era = MarloweTests era FilePath
runTestCommand :: IsShelleyBasedEra era
=> MonadError CliError m
=> MonadIO m
=> MonadReader (CliEnv era) m
runTestCommand cmd = do
era <- askEra
runTests era cmd
parseTestCommand :: IsShelleyBasedEra era => O.Mod O.OptionFields NetworkId
-> O.Mod O.OptionFields FilePath
-> O.Parser (TestCommand era)
parseTestCommand network socket =
O.hsubparser
$ O.commandGroup "Commands for testing contracts:"
<> scriptsCommand network socket
scriptsCommand :: IsShelleyBasedEra era => O.Mod O.OptionFields NetworkId
-> O.Mod O.OptionFields FilePath
-> O.Mod O.CommandFields (TestCommand era)
scriptsCommand network socket =
O.command "scripts"
$ O.info (scriptsOptions network socket)
$ O.progDesc "Test Marlowe scripts on-chain."
executionModeParser :: O.Parser ExecutionMode
executionModeParser = fmap (fromMaybe (OnChainMode (Seconds 120))) simulationModeOpt
simulationModeOpt :: O.Parser (Maybe ExecutionMode)
simulationModeOpt = O.optional (O.flag' SimulationMode (O.long "simulation-mode" <> O.help "Run test suite in simulation mode by ignoring the transaction submission timeout"))
scriptsOptions :: IsShelleyBasedEra era
=> O.Mod O.OptionFields NetworkId
-> O.Mod O.OptionFields FilePath
-> O.Parser (TestCommand era)
scriptsOptions network socket =
ScriptTests
<$> O.option parseNetworkId (O.long "testnet-magic" <> O.metavar "INTEGER" <> network <> O.help "Network magic. Defaults to the CARDANO_TESTNET_MAGIC environment variable's value." )
<*> O.strOption (O.long "socket-path" <> O.metavar "SOCKET_FILE" <> socket <> O.help "Location of the cardano-node socket file. Defaults to the CARDANO_NODE_SOCKET_PATH environment variable's value.")
<*> O.strOption (O.long "faucet-key" <> O.metavar "SIGNING_FILE" <> O.help "The file containing the signing key for the faucet." )
<*> O.option parseAddress (O.long "faucet-address" <> O.metavar "ADDRESS" <> O.help "The address of the faucet." )
<*> O.option parseAddress (O.long "burn-address" <> O.metavar "ADDRESS" <> O.help "Burn address for discarding used tokens." )
<*> executionModeParser
<*> (O.some . O.strArgument) ( O.metavar "TEST_FILE" <> O.help "JSON file containing a test case." )
|
cb6e7c393e8183463887e1a8104aad9b20443d5045415c3d997044d85d7024e6 | exercism/common-lisp | example.lisp | (in-package :cl-user)
(defpackage :crypto-square
(:use :cl)
(:export :encipher))
(in-package :crypto-square)
(defun normalize (text)
(string-downcase (remove-if-not #'alphanumericp text)))
(defun matrix-transpose (matrix)
(let* ((dimensions (array-dimensions matrix))
(new-matrix (make-array (reverse dimensions)))
(x-limit (first dimensions))
(y-limit (second dimensions)))
(do ((x 0 (incf x)))
((= x x-limit) new-matrix)
(do ((y 0 (incf y)))
((= y y-limit))
(setf (aref new-matrix y x) (aref matrix x y))))))
(defun text->matrix (text)
(let* ((length (length text))
(c (ceiling (sqrt length)))
(r (if (zerop c) 0 (ceiling (/ (length text) c))))
(array (make-array (list r c) :initial-element #\Space)))
(do ((i 0 (incf i)))
((= i (length text)) array)
(setf (row-major-aref array i) (char text i)))))
(defun matrix->text (matrix)
(let ((max-row (array-dimension matrix 0))
(max-col (array-dimension matrix 1)))
(do ((rows (list))
(r 0 (incf r)))
((= r max-row)
(coerce (butlast (nreverse rows)) 'string))
(do ((c 0 (incf c)))
((= c max-col))
(push (aref matrix r c) rows))
(push #\Space rows))))
(defun encipher (plaintext)
(matrix->text
(matrix-transpose
(text->matrix
(normalize plaintext)))))
| null | https://raw.githubusercontent.com/exercism/common-lisp/0bb38d1b126f7dc90d86f04c41479d4eaf35df74/exercises/practice/crypto-square/.meta/example.lisp | lisp | (in-package :cl-user)
(defpackage :crypto-square
(:use :cl)
(:export :encipher))
(in-package :crypto-square)
(defun normalize (text)
(string-downcase (remove-if-not #'alphanumericp text)))
(defun matrix-transpose (matrix)
(let* ((dimensions (array-dimensions matrix))
(new-matrix (make-array (reverse dimensions)))
(x-limit (first dimensions))
(y-limit (second dimensions)))
(do ((x 0 (incf x)))
((= x x-limit) new-matrix)
(do ((y 0 (incf y)))
((= y y-limit))
(setf (aref new-matrix y x) (aref matrix x y))))))
(defun text->matrix (text)
(let* ((length (length text))
(c (ceiling (sqrt length)))
(r (if (zerop c) 0 (ceiling (/ (length text) c))))
(array (make-array (list r c) :initial-element #\Space)))
(do ((i 0 (incf i)))
((= i (length text)) array)
(setf (row-major-aref array i) (char text i)))))
(defun matrix->text (matrix)
(let ((max-row (array-dimension matrix 0))
(max-col (array-dimension matrix 1)))
(do ((rows (list))
(r 0 (incf r)))
((= r max-row)
(coerce (butlast (nreverse rows)) 'string))
(do ((c 0 (incf c)))
((= c max-col))
(push (aref matrix r c) rows))
(push #\Space rows))))
(defun encipher (plaintext)
(matrix->text
(matrix-transpose
(text->matrix
(normalize plaintext)))))
| |
449c893856d2d8f5bdca0115cde788d3686fbbbe1ebfdd71c7eb2781a1dde012 | anmonteiro/ocaml-h2 | status.ml | ----------------------------------------------------------------------------
* Copyright ( c ) 2017 Inhabited Type LLC .
* Copyright ( c ) 2019 .
*
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions
* are met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . Neither the name of the author nor the names of his contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ` ` AS IS '' AND ANY EXPRESS
* OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED . IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
* DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
* STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE .
* ---------------------------------------------------------------------------
* Copyright (c) 2017 Inhabited Type LLC.
* Copyright (c) 2019 Antonio N. Monteiro.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the author nor the names of his contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*---------------------------------------------------------------------------*)
From RFC7540§8.1.1 :
* HTTP/2 removes support for the 101 ( Switching Protocols ) informational
* status code ( [ RFC7231 ] , Section 6.2.2 ) .
*
* Note : While the above is true , we do n't enforce in this library , as it
* makes unifying types with http / af much easier . ` H2.Status.t ` is , thus , a
* strict superset of ` Httpaf . Status.t ` .
* HTTP/2 removes support for the 101 (Switching Protocols) informational
* status code ([RFC7231], Section 6.2.2).
*
* Note: While the above is true, we don't enforce in this library, as it
* makes unifying types with http/af much easier. `H2.Status.t` is, thus, a
* strict superset of `Httpaf.Status.t`. *)
include (
Httpaf.Status :
module type of Httpaf.Status
with type client_error := Httpaf.Status.client_error
and type standard := Httpaf.Status.standard
and type t := Httpaf.Status.t)
type client_error =
[ Httpaf.Status.client_error
From RFC7540§9.1.2 :
* The 421 ( Misdirected Request ) status code indicates that the request
* was directed at a server that is not able to produce a response . This
* can be sent by a server that is not configured to produce responses
* for the combination of scheme and authority that are included in the
* request URI .
* The 421 (Misdirected Request) status code indicates that the request
* was directed at a server that is not able to produce a response. This
* can be sent by a server that is not configured to produce responses
* for the combination of scheme and authority that are included in the
* request URI. *)
`Misdirected_request
]
type standard =
[ Httpaf.Status.standard
| client_error
]
type t =
[ standard
| `Code of int
]
(* Note: The value for reason phrases is never actually serialized to the
* input or output channels.
*
* From RFC7540§8.1.2.4:
* HTTP/2 does not define a way to carry the version or reason phrase that is
* included in an HTTP/1.1 status line. *)
let default_reason_phrase = function
| `Misdirected_request -> "Misdirected Request"
| #Httpaf.Status.standard as t -> Httpaf.Status.default_reason_phrase t
let to_code = function
| `Misdirected_request -> 421
| #Httpaf.Status.t as t -> Httpaf.Status.to_code t
let unsafe_of_code = function
| 421 -> `Misdirected_request
| c -> (Httpaf.Status.unsafe_of_code c :> t)
let of_code = function
| 421 -> `Misdirected_request
| c -> (Httpaf.Status.of_code c :> t)
let is_informational = function
| `Misdirected_request -> false
| #Httpaf.Status.t as t -> Httpaf.Status.is_informational t
let is_successful = function
| `Misdirected_request -> false
| #Httpaf.Status.t as t -> Httpaf.Status.is_successful t
let is_redirection = function
| `Misdirected_request -> false
| #Httpaf.Status.t as t -> Httpaf.Status.is_redirection t
let is_client_error = function
| `Misdirected_request -> true
| #Httpaf.Status.t as t -> Httpaf.Status.is_client_error t
let is_server_error = function
| `Misdirected_request -> false
| #Httpaf.Status.t as t -> Httpaf.Status.is_server_error t
let is_error = function
| `Misdirected_request -> true
| #Httpaf.Status.t as t -> Httpaf.Status.is_error t
let to_string = function
| `Misdirected_request -> "421"
| #Httpaf.Status.t as t -> Httpaf.Status.to_string t
let of_string x = of_code (int_of_string x)
let pp_hum fmt t = Format.fprintf fmt "%u" (to_code t)
| null | https://raw.githubusercontent.com/anmonteiro/ocaml-h2/1aea7e0aebb2c440dc35ef35ca15d87d44799f4e/lib/status.ml | ocaml | Note: The value for reason phrases is never actually serialized to the
* input or output channels.
*
* From RFC7540§8.1.2.4:
* HTTP/2 does not define a way to carry the version or reason phrase that is
* included in an HTTP/1.1 status line. | ----------------------------------------------------------------------------
* Copyright ( c ) 2017 Inhabited Type LLC .
* Copyright ( c ) 2019 .
*
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions
* are met :
*
* 1 . Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . Neither the name of the author nor the names of his contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ` ` AS IS '' AND ANY EXPRESS
* OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED . IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
* DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT ,
* STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE .
* ---------------------------------------------------------------------------
* Copyright (c) 2017 Inhabited Type LLC.
* Copyright (c) 2019 Antonio N. Monteiro.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the author nor the names of his contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS ``AS IS'' AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*---------------------------------------------------------------------------*)
From RFC7540§8.1.1 :
* HTTP/2 removes support for the 101 ( Switching Protocols ) informational
* status code ( [ RFC7231 ] , Section 6.2.2 ) .
*
* Note : While the above is true , we do n't enforce in this library , as it
* makes unifying types with http / af much easier . ` H2.Status.t ` is , thus , a
* strict superset of ` Httpaf . Status.t ` .
* HTTP/2 removes support for the 101 (Switching Protocols) informational
* status code ([RFC7231], Section 6.2.2).
*
* Note: While the above is true, we don't enforce in this library, as it
* makes unifying types with http/af much easier. `H2.Status.t` is, thus, a
* strict superset of `Httpaf.Status.t`. *)
include (
Httpaf.Status :
module type of Httpaf.Status
with type client_error := Httpaf.Status.client_error
and type standard := Httpaf.Status.standard
and type t := Httpaf.Status.t)
type client_error =
[ Httpaf.Status.client_error
From RFC7540§9.1.2 :
* The 421 ( Misdirected Request ) status code indicates that the request
* was directed at a server that is not able to produce a response . This
* can be sent by a server that is not configured to produce responses
* for the combination of scheme and authority that are included in the
* request URI .
* The 421 (Misdirected Request) status code indicates that the request
* was directed at a server that is not able to produce a response. This
* can be sent by a server that is not configured to produce responses
* for the combination of scheme and authority that are included in the
* request URI. *)
`Misdirected_request
]
type standard =
[ Httpaf.Status.standard
| client_error
]
type t =
[ standard
| `Code of int
]
let default_reason_phrase = function
| `Misdirected_request -> "Misdirected Request"
| #Httpaf.Status.standard as t -> Httpaf.Status.default_reason_phrase t
let to_code = function
| `Misdirected_request -> 421
| #Httpaf.Status.t as t -> Httpaf.Status.to_code t
let unsafe_of_code = function
| 421 -> `Misdirected_request
| c -> (Httpaf.Status.unsafe_of_code c :> t)
let of_code = function
| 421 -> `Misdirected_request
| c -> (Httpaf.Status.of_code c :> t)
let is_informational = function
| `Misdirected_request -> false
| #Httpaf.Status.t as t -> Httpaf.Status.is_informational t
let is_successful = function
| `Misdirected_request -> false
| #Httpaf.Status.t as t -> Httpaf.Status.is_successful t
let is_redirection = function
| `Misdirected_request -> false
| #Httpaf.Status.t as t -> Httpaf.Status.is_redirection t
let is_client_error = function
| `Misdirected_request -> true
| #Httpaf.Status.t as t -> Httpaf.Status.is_client_error t
let is_server_error = function
| `Misdirected_request -> false
| #Httpaf.Status.t as t -> Httpaf.Status.is_server_error t
let is_error = function
| `Misdirected_request -> true
| #Httpaf.Status.t as t -> Httpaf.Status.is_error t
let to_string = function
| `Misdirected_request -> "421"
| #Httpaf.Status.t as t -> Httpaf.Status.to_string t
let of_string x = of_code (int_of_string x)
let pp_hum fmt t = Format.fprintf fmt "%u" (to_code t)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.