_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
64268bbcb62dee5228b441b100c22cdec98f63d7ad48725d75f3f361e0999f23 | heidihoward/ocaml-dns-examples | unikernel.ml | open Lwt
open V1_LWT
module Client (C:CONSOLE) (S:STACKV4) = struct
module U = S.UDPV4
module DNS = Dns_resolver_mirage.Make(OS.Time)(S)
open Dns.Packet
open Dns.Name
let start c s =
Bootvar.create () >>= fun bootvar ->
let domain = Bootvar.get_exn bootvar "domain" in
let server = Ipaddr.V4.of_string_exn (Bootvar.get_exn bootvar "server") in
let t = DNS.create s in
OS.Time.sleep 2.0
>>= fun () ->
C.log_s c ("Resolving " ^ domain)
>>= fun () ->
DNS.resolve (module Dns.Protocol.Client) t server 53 Q_IN Q_A (string_to_domain_name domain)
>>= fun r ->
let ips =
List.fold_left (fun a x ->
match x.rdata with
| A ip -> (Ipaddr.V4 ip) :: a
| _ -> a ) [] r.answers in
Lwt_list.iter_s
(fun r ->
C.log_s c ("Answer " ^ (Ipaddr.to_string r))
) ips
>>= fun () ->
C.log_s c (to_string r)
end | null | https://raw.githubusercontent.com/heidihoward/ocaml-dns-examples/0d49f892079071d6cb3221e90a8c321984745e69/basic_dig/unikernel.ml | ocaml | open Lwt
open V1_LWT
module Client (C:CONSOLE) (S:STACKV4) = struct
module U = S.UDPV4
module DNS = Dns_resolver_mirage.Make(OS.Time)(S)
open Dns.Packet
open Dns.Name
let start c s =
Bootvar.create () >>= fun bootvar ->
let domain = Bootvar.get_exn bootvar "domain" in
let server = Ipaddr.V4.of_string_exn (Bootvar.get_exn bootvar "server") in
let t = DNS.create s in
OS.Time.sleep 2.0
>>= fun () ->
C.log_s c ("Resolving " ^ domain)
>>= fun () ->
DNS.resolve (module Dns.Protocol.Client) t server 53 Q_IN Q_A (string_to_domain_name domain)
>>= fun r ->
let ips =
List.fold_left (fun a x ->
match x.rdata with
| A ip -> (Ipaddr.V4 ip) :: a
| _ -> a ) [] r.answers in
Lwt_list.iter_s
(fun r ->
C.log_s c ("Answer " ^ (Ipaddr.to_string r))
) ips
>>= fun () ->
C.log_s c (to_string r)
end | |
2ac1d7caad311f5caa5d31021eb1beb7d03d7e1a119c9fe650731129120bb7b8 | unison-code/unison | DT.hs | |
Copyright : Copyright ( c ) 2016 , RISE SICS AB
License : BSD3 ( see the LICENSE file )
Maintainer :
.
Copyright : Copyright (c) 2016, RISE SICS AB
License : BSD3 (see the LICENSE file)
Maintainer :
Dominance Tree.
-}
Main authors :
< >
This file is part of Unison , see -code.github.io
Main authors:
Roberto Castaneda Lozano <>
This file is part of Unison, see -code.github.io
-}
module Unison.Graphs.DT (fromCFG, dominators) where
import Data.Graph.Inductive
fromCFG :: Gr a b -> Gr a b
fromCFG cfg =
let nodes = labNodes cfg
edges = if noNodes cfg > 0
then [(p, c, edge cfg p c) | (c, p) <- iDom cfg 0]
else []
in mkGraph nodes edges
label g n =
let Just l = lab g n
in l
dominators dt n =
let ds = reaching n dt
in map (label dt) ds
reaching n = reachable n . grev
edge g n1 n2 =
let [l] = [l | (n1', n2', l) <- labEdges g, (n1, n2) == (n1', n2')]
in l
| null | https://raw.githubusercontent.com/unison-code/unison/9f8caf78230f956a57b50a327f8d1dca5839bf64/src/unison/src/Unison/Graphs/DT.hs | haskell | |
Copyright : Copyright ( c ) 2016 , RISE SICS AB
License : BSD3 ( see the LICENSE file )
Maintainer :
.
Copyright : Copyright (c) 2016, RISE SICS AB
License : BSD3 (see the LICENSE file)
Maintainer :
Dominance Tree.
-}
Main authors :
< >
This file is part of Unison , see -code.github.io
Main authors:
Roberto Castaneda Lozano <>
This file is part of Unison, see -code.github.io
-}
module Unison.Graphs.DT (fromCFG, dominators) where
import Data.Graph.Inductive
fromCFG :: Gr a b -> Gr a b
fromCFG cfg =
let nodes = labNodes cfg
edges = if noNodes cfg > 0
then [(p, c, edge cfg p c) | (c, p) <- iDom cfg 0]
else []
in mkGraph nodes edges
label g n =
let Just l = lab g n
in l
dominators dt n =
let ds = reaching n dt
in map (label dt) ds
reaching n = reachable n . grev
edge g n1 n2 =
let [l] = [l | (n1', n2', l) <- labEdges g, (n1, n2) == (n1', n2')]
in l
| |
4cdc56b259880c4f8b613f2a00010b42a483cc609911378b14a6c573d3d7a5eb | prl-julia/juliette-wa | redex.rkt | #lang racket
(require redex)
; import surface language
(require "../../../src/redex/core/wa-surface.rkt")
; import full language
(require "../../../src/redex/core/wa-full.rkt")
; import optimizations
(require "../../../src/redex/optimizations/wa-optimized.rkt")
(displayln "Test for litmus-wa/paper01:")
(define p
(term
(evalg
(seq
(mdef "g" () 2)
(seq
(mdef
"f"
((:: 1_x Any))
(seq (evalg (mdef "g" () 1_x)) (pcall * 1_x (mcall g))))
(mcall f 42)))))
)
(test-equal (term (run-to-r ,p)) (term 84))
(test-results) | null | https://raw.githubusercontent.com/prl-julia/juliette-wa/1d1a2154e7b4e232ea2166fba485a3bf574ebd88/tests/litmus-wa/paper01/redex.rkt | racket | import surface language
import full language
import optimizations | #lang racket
(require redex)
(require "../../../src/redex/core/wa-surface.rkt")
(require "../../../src/redex/core/wa-full.rkt")
(require "../../../src/redex/optimizations/wa-optimized.rkt")
(displayln "Test for litmus-wa/paper01:")
(define p
(term
(evalg
(seq
(mdef "g" () 2)
(seq
(mdef
"f"
((:: 1_x Any))
(seq (evalg (mdef "g" () 1_x)) (pcall * 1_x (mcall g))))
(mcall f 42)))))
)
(test-equal (term (run-to-r ,p)) (term 84))
(test-results) |
00d6eb5a85b3072806463896a18d761f96bab096f396afe2db76abe055e51870 | odo/revolver | revolver_lease.erl | -module(revolver_lease).
-export([init_state/1, new_pids/2, next_pid/1, release/2, pids/1, pid_down/2, delete_all_pids/1]).
-record(state, {
pids_available :: list(),
pids_leased :: any()
}).
API for revolver
init_state(lease) ->
#state{
pids_available = [],
pids_leased = sets:new()
}.
delete_all_pids(State) ->
{ok, State#state{pids_available = [], pids_leased = sets:mew()}}.
new_pids(Pids, State = #state{ pids_leased = PidsLeased, pids_available = PidsAvailable}) ->
PidsSet = sets:from_list(Pids),
NewPids =sets:to_list(sets:subtract(PidsSet, sets:union(PidsLeased, sets:from_list(PidsAvailable)))),
NextPidsLeased = sets:intersection(PidsSet, PidsLeased),
NextPidsAvailable = sets:to_list(sets:subtract(PidsSet, NextPidsLeased)),
NextState = State#state{ pids_available = NextPidsAvailable, pids_leased = NextPidsLeased },
{ok, NewPids, NextState}.
next_pid(State = #state{pids_available = []}) ->
{{error, overload}, State};
next_pid(State = #state{pids_available = [NextPid | RemainingPids], pids_leased = PidsLeased}) ->
NextState = State#state{ pids_available = RemainingPids, pids_leased = sets:add_element(NextPid, PidsLeased) },
{NextPid, NextState}.
release(Pid, State = #state{pids_available = PidsAvailable, pids_leased = PidsLeased}) ->
case sets:is_element(Pid, PidsLeased) of
true ->
NextState = State#state{pids_available = PidsAvailable ++ [Pid], pids_leased = sets:del_element(Pid, PidsLeased)},
{ok, NextState};
false ->
{ok, State}
end.
pid_down(Pid, State = #state{pids_available = PidsAvailable, pids_leased = PidsLeased}) ->
NextState =
case sets:is_element(Pid, PidsLeased) of
true ->
State#state{pids_leased = sets:del_element(Pid, PidsLeased)};
false ->
State#state{pids_available = lists:delete(Pid, PidsAvailable)}
end,
{ok, NextState}.
pids(State = #state{pids_available = PidsAvailable, pids_leased = PidsLeased}) ->
Pids = lists:flatten([PidsAvailable, sets:to_list(PidsLeased)]),
{{ok, Pids}, State}.
| null | https://raw.githubusercontent.com/odo/revolver/f9378f382d76c0af20081e558af882efa1c3f66d/src/revolver_lease.erl | erlang | -module(revolver_lease).
-export([init_state/1, new_pids/2, next_pid/1, release/2, pids/1, pid_down/2, delete_all_pids/1]).
-record(state, {
pids_available :: list(),
pids_leased :: any()
}).
API for revolver
init_state(lease) ->
#state{
pids_available = [],
pids_leased = sets:new()
}.
delete_all_pids(State) ->
{ok, State#state{pids_available = [], pids_leased = sets:mew()}}.
new_pids(Pids, State = #state{ pids_leased = PidsLeased, pids_available = PidsAvailable}) ->
PidsSet = sets:from_list(Pids),
NewPids =sets:to_list(sets:subtract(PidsSet, sets:union(PidsLeased, sets:from_list(PidsAvailable)))),
NextPidsLeased = sets:intersection(PidsSet, PidsLeased),
NextPidsAvailable = sets:to_list(sets:subtract(PidsSet, NextPidsLeased)),
NextState = State#state{ pids_available = NextPidsAvailable, pids_leased = NextPidsLeased },
{ok, NewPids, NextState}.
next_pid(State = #state{pids_available = []}) ->
{{error, overload}, State};
next_pid(State = #state{pids_available = [NextPid | RemainingPids], pids_leased = PidsLeased}) ->
NextState = State#state{ pids_available = RemainingPids, pids_leased = sets:add_element(NextPid, PidsLeased) },
{NextPid, NextState}.
release(Pid, State = #state{pids_available = PidsAvailable, pids_leased = PidsLeased}) ->
case sets:is_element(Pid, PidsLeased) of
true ->
NextState = State#state{pids_available = PidsAvailable ++ [Pid], pids_leased = sets:del_element(Pid, PidsLeased)},
{ok, NextState};
false ->
{ok, State}
end.
pid_down(Pid, State = #state{pids_available = PidsAvailable, pids_leased = PidsLeased}) ->
NextState =
case sets:is_element(Pid, PidsLeased) of
true ->
State#state{pids_leased = sets:del_element(Pid, PidsLeased)};
false ->
State#state{pids_available = lists:delete(Pid, PidsAvailable)}
end,
{ok, NextState}.
pids(State = #state{pids_available = PidsAvailable, pids_leased = PidsLeased}) ->
Pids = lists:flatten([PidsAvailable, sets:to_list(PidsLeased)]),
{{ok, Pids}, State}.
| |
c0f41aa0a446133f4213fca0044763fecc5d472fa669e0f8017b5f97da1f2c95 | psholtz/MIT-SICP | exercise1-37.scm | ;;
Exercise 1.37
;;
;; An infinite continued fraction is an expression of the form:
;;
;; f = N1
;; ----------------------------------
;; D1 + N2
;; ------------------
;; D2 + N3
;; --------
;; D3 + ...
;;
;; As an example, one can show that the infinite continued fraction expansion with the
N(i ) and the D(i ) all equal to 1 produces 1 / phi , where phi is the golden ration
( descibed in section 1.2.2 ) . One way to approximate an infinite continued fraction
;; is to truncate the expansion after a given number of terms. Such a truncation --
;; a so-called "k-term finite continued fraction" -- has the form:
;;
;; N1
;; --------------------------------
;; D1 + N2
;; ----------------
;; ... + NK
;; --
;; DK
;;
Suppose that n and d are procedures of one argument ( the term index i ) that return
the N(i ) and D(i ) of the terms of the continued fraction . Define a procedure cont - frac
;; such that evaluating (cont-frac n d k) computes the value of the k-term finite
continued fraction . Check your procedure by approximating 1 / phi using
;;
( cont - frac ( lambda ( i ) 1.0 )
( lambda ( i ) 1.0 )
;; k)
;;
for successive values of k. How large must you make k in order to get an approximation
that is accurate to 4 decimal places ?
;;
;;
;; (b) If your cont-frac procedure generates a recursive process, write one that generates an
;; iterative process. if it generates an iterative process, write one that generates a recursive
;; process.
;;
;;
;; Recursive Definition:
;;
(define (cont-frac n d k)
(define (cont-frac-iter i)
(cond ((= i k) (/ (n i) (d i)))
(else
(/ (n i) (+ (d i) (cont-frac-iter (+ i 1)))))))
(cont-frac-iter 1))
;;
Configure numerator and denominator to always return 1.0 , as in " phi " example :
;;
(define n (lambda (x) 1.0))
(define d (lambda (x) 1.0))
;;
;; The following unit tests all assert to true:
;;
(= (/ 1.0 1.0) (cont-frac n d 1))
(= (/ 1.0 2.0) (cont-frac n d 2))
(= (/ 2.0 3.0) (cont-frac n d 3))
(= (/ 3.0 5.0) (cont-frac n d 4))
(= (/ 5.0 8.0) (cont-frac n d 5))
(= (/ 8.0 13.0) (cont-frac n d 6))
(= (/ 13.0 21.0) (cont-frac n d 7))
(= (/ 21.0 34.0) (cont-frac n d 8))
(= (/ 34.0 55.0) (cont-frac n d 9))
(= (/ 55.0 89.0) (cont-frac n d 10))
;;
;; Now let's update the denominator procedure:
;;
(define d (lambda (x) 2.0))
;;
;; The following unit tests all assert to true:
;;
(= (/ 1.0 2.0) (cont-frac n d 1))
(= (/ 2.0 5.0) (cont-frac n d 2))
(= (/ 5.0 12.0) (cont-frac n d 3))
(= (/ 12.0 29.0) (cont-frac n d 4))
(= (/ 29.0 70.0) (cont-frac n d 5))
;;
;; Note the Fibonacci sequence generated in the numerator-to-denominator ratios.
;;
;;
Now let 's update the denominator to always be 1 , and the numerator to always be 2 :
;;
(define n (lambda (x) 2.0))
(define d (lambda (x) 1.0))
;;
;; The following unit tests all assert to true:
;;
(= 2.0 (cont-frac n d 1))
(= (/ 2.0 3.0) (cont-frac n d 2))
(= (/ 6.0 5.0) (cont-frac n d 3))
(= (/ 10.0 11.0) (cont-frac n d 4))
(= (/ 22.0 21.0) (cont-frac n d 5))
;;
To 9 digits of accuracy , the golden is 1.61803399 .
;;
;; Let's define the following test:
;;
(define (test)
4 digits of accuracy
(define phi 1.61803399)
(define target (/ 1.0 phi))
(define (test-iter k)
(let ((value (cont-frac n d k)))
(if (< (abs (- value target)) tolerance)
k
(test-iter (+ k 1)))))
(test-iter 1))
(test)
-- > 10
;;
Hence , we have to expand the continued fraction 10 times to get accuracy to within 4 decimal places :
;;
(/ 1.0 phi)
-- > 0.6180339882723972
(cont-frac n d 10)
-- > 0.6179775280898876
;;
;; Iterative:
;;
(define (cont-frac n d k)
(define (term i t)
(/ (n i) (+ (d i) t)))
(define (cont-frac-iter i t)
(cond ((= i 1) (term i t))
(else
(cont-frac-iter (- i 1) (term i t)))))
(cont-frac-iter k 0))
;;
Let 's try out the unit tests for the sequence again :
;;
(define n (lambda (x) 1.0))
(define d (lambda (x) 1.0))
(= (/ 1.0 1.0) (cont-frac n d 1))
(= (/ 1.0 2.0) (cont-frac n d 2))
(= (/ 2.0 3.0) (cont-frac n d 3))
(= (/ 3.0 5.0) (cont-frac n d 4))
(= (/ 5.0 8.0) (cont-frac n d 5))
(= (/ 8.0 13.0) (cont-frac n d 6))
(= (/ 13.0 21.0) (cont-frac n d 7))
(= (/ 21.0 34.0) (cont-frac n d 8))
(= (/ 34.0 55.0) (cont-frac n d 9))
(= (/ 55.0 89.0) (cont-frac n d 10))
;;
;; Let's run the other set of unit tests:
;;
(define d (lambda (x) 2.0))
(= (/ 1.0 2.0) (cont-frac n d 1))
(= (/ 2.0 5.0) (cont-frac n d 2))
(= (/ 5.0 12.0) (cont-frac n d 3))
(= (/ 12.0 29.0) (cont-frac n d 4))
(= (/ 29.0 70.0) (cont-frac n d 5))
;;
;; and also these unit tests:
;;
(define n (lambda (x) 2.0))
(define d (lambda (x) 1.0))
(= 2.0 (cont-frac n d 1))
(= (/ 2.0 3.0) (cont-frac n d 2))
(= (/ 6.0 5.0) (cont-frac n d 3))
(= (/ 10.0 11.0) (cont-frac n d 4))
(= (/ 22.0 21.0) (cont-frac n d 5))
;;
;; Let's run our same "test" procedure on the iterative "cont-frac" procedure,
;; to determine how many times we have to expand the continued fraction to
get an approximation to 1 / phi that is accurate to 4 decimal places :
;;
(test)
-- > 10
;;
Again , as before , the answer we get is 10 .
;;
(/ 1.0 phi)
-- > 0.6180339882723972
(cont-frac n d 10)
-- > 0.6179775280898876 | null | https://raw.githubusercontent.com/psholtz/MIT-SICP/01e9b722ac5008e26f386624849117ca8fa80906/Section-1.3/mit-scheme/exercise1-37.scm | scheme |
An infinite continued fraction is an expression of the form:
f = N1
----------------------------------
D1 + N2
------------------
D2 + N3
--------
D3 + ...
As an example, one can show that the infinite continued fraction expansion with the
is to truncate the expansion after a given number of terms. Such a truncation --
a so-called "k-term finite continued fraction" -- has the form:
N1
--------------------------------
D1 + N2
----------------
... + NK
--
DK
such that evaluating (cont-frac n d k) computes the value of the k-term finite
k)
(b) If your cont-frac procedure generates a recursive process, write one that generates an
iterative process. if it generates an iterative process, write one that generates a recursive
process.
Recursive Definition:
The following unit tests all assert to true:
Now let's update the denominator procedure:
The following unit tests all assert to true:
Note the Fibonacci sequence generated in the numerator-to-denominator ratios.
The following unit tests all assert to true:
Let's define the following test:
Iterative:
Let's run the other set of unit tests:
and also these unit tests:
Let's run our same "test" procedure on the iterative "cont-frac" procedure,
to determine how many times we have to expand the continued fraction to
| Exercise 1.37
N(i ) and the D(i ) all equal to 1 produces 1 / phi , where phi is the golden ration
( descibed in section 1.2.2 ) . One way to approximate an infinite continued fraction
Suppose that n and d are procedures of one argument ( the term index i ) that return
the N(i ) and D(i ) of the terms of the continued fraction . Define a procedure cont - frac
continued fraction . Check your procedure by approximating 1 / phi using
( cont - frac ( lambda ( i ) 1.0 )
( lambda ( i ) 1.0 )
for successive values of k. How large must you make k in order to get an approximation
that is accurate to 4 decimal places ?
(define (cont-frac n d k)
(define (cont-frac-iter i)
(cond ((= i k) (/ (n i) (d i)))
(else
(/ (n i) (+ (d i) (cont-frac-iter (+ i 1)))))))
(cont-frac-iter 1))
Configure numerator and denominator to always return 1.0 , as in " phi " example :
(define n (lambda (x) 1.0))
(define d (lambda (x) 1.0))
(= (/ 1.0 1.0) (cont-frac n d 1))
(= (/ 1.0 2.0) (cont-frac n d 2))
(= (/ 2.0 3.0) (cont-frac n d 3))
(= (/ 3.0 5.0) (cont-frac n d 4))
(= (/ 5.0 8.0) (cont-frac n d 5))
(= (/ 8.0 13.0) (cont-frac n d 6))
(= (/ 13.0 21.0) (cont-frac n d 7))
(= (/ 21.0 34.0) (cont-frac n d 8))
(= (/ 34.0 55.0) (cont-frac n d 9))
(= (/ 55.0 89.0) (cont-frac n d 10))
(define d (lambda (x) 2.0))
(= (/ 1.0 2.0) (cont-frac n d 1))
(= (/ 2.0 5.0) (cont-frac n d 2))
(= (/ 5.0 12.0) (cont-frac n d 3))
(= (/ 12.0 29.0) (cont-frac n d 4))
(= (/ 29.0 70.0) (cont-frac n d 5))
Now let 's update the denominator to always be 1 , and the numerator to always be 2 :
(define n (lambda (x) 2.0))
(define d (lambda (x) 1.0))
(= 2.0 (cont-frac n d 1))
(= (/ 2.0 3.0) (cont-frac n d 2))
(= (/ 6.0 5.0) (cont-frac n d 3))
(= (/ 10.0 11.0) (cont-frac n d 4))
(= (/ 22.0 21.0) (cont-frac n d 5))
To 9 digits of accuracy , the golden is 1.61803399 .
(define (test)
4 digits of accuracy
(define phi 1.61803399)
(define target (/ 1.0 phi))
(define (test-iter k)
(let ((value (cont-frac n d k)))
(if (< (abs (- value target)) tolerance)
k
(test-iter (+ k 1)))))
(test-iter 1))
(test)
-- > 10
Hence , we have to expand the continued fraction 10 times to get accuracy to within 4 decimal places :
(/ 1.0 phi)
-- > 0.6180339882723972
(cont-frac n d 10)
-- > 0.6179775280898876
(define (cont-frac n d k)
(define (term i t)
(/ (n i) (+ (d i) t)))
(define (cont-frac-iter i t)
(cond ((= i 1) (term i t))
(else
(cont-frac-iter (- i 1) (term i t)))))
(cont-frac-iter k 0))
Let 's try out the unit tests for the sequence again :
(define n (lambda (x) 1.0))
(define d (lambda (x) 1.0))
(= (/ 1.0 1.0) (cont-frac n d 1))
(= (/ 1.0 2.0) (cont-frac n d 2))
(= (/ 2.0 3.0) (cont-frac n d 3))
(= (/ 3.0 5.0) (cont-frac n d 4))
(= (/ 5.0 8.0) (cont-frac n d 5))
(= (/ 8.0 13.0) (cont-frac n d 6))
(= (/ 13.0 21.0) (cont-frac n d 7))
(= (/ 21.0 34.0) (cont-frac n d 8))
(= (/ 34.0 55.0) (cont-frac n d 9))
(= (/ 55.0 89.0) (cont-frac n d 10))
(define d (lambda (x) 2.0))
(= (/ 1.0 2.0) (cont-frac n d 1))
(= (/ 2.0 5.0) (cont-frac n d 2))
(= (/ 5.0 12.0) (cont-frac n d 3))
(= (/ 12.0 29.0) (cont-frac n d 4))
(= (/ 29.0 70.0) (cont-frac n d 5))
(define n (lambda (x) 2.0))
(define d (lambda (x) 1.0))
(= 2.0 (cont-frac n d 1))
(= (/ 2.0 3.0) (cont-frac n d 2))
(= (/ 6.0 5.0) (cont-frac n d 3))
(= (/ 10.0 11.0) (cont-frac n d 4))
(= (/ 22.0 21.0) (cont-frac n d 5))
get an approximation to 1 / phi that is accurate to 4 decimal places :
(test)
-- > 10
Again , as before , the answer we get is 10 .
(/ 1.0 phi)
-- > 0.6180339882723972
(cont-frac n d 10)
-- > 0.6179775280898876 |
814f4ccbe8a0e782cd776892240ef51b288d9b9828ce4bcba823658386a48977 | green-coder/embassy | util.cljc | (ns embassy.client.util
(:refer-clojure :exclude [get-in]))
#?(:cljs
(defn get-in
"Returns a dom element from a sequence of indexes."
[^js dom-element path]
(reduce (fn [dom-element index]
(-> dom-element .-childNodes (.item index)))
dom-element
path)))
#_
(get-in (-> js/document (.getElementById "app"))
[0 0 1 5])
(defn seq-indexed [coll]
(map-indexed vector coll))
(defn replace-subvec
"Returns a vector with a sub-section at position `index` replaced by the vector `sv`."
[v index sv]
(reduce (fn [v [index element]]
(assoc v index element))
v
(mapv vector (range index (+ index (count sv))) sv)))
#_(replace-subvec [:a :b :c] 1 [:x])
#_(replace-subvec [:a :b :c] 3 [:x :y :z])
| null | https://raw.githubusercontent.com/green-coder/embassy/178a866928944e7678c875888246000add503870/src/embassy/client/util.cljc | clojure | (ns embassy.client.util
(:refer-clojure :exclude [get-in]))
#?(:cljs
(defn get-in
"Returns a dom element from a sequence of indexes."
[^js dom-element path]
(reduce (fn [dom-element index]
(-> dom-element .-childNodes (.item index)))
dom-element
path)))
#_
(get-in (-> js/document (.getElementById "app"))
[0 0 1 5])
(defn seq-indexed [coll]
(map-indexed vector coll))
(defn replace-subvec
"Returns a vector with a sub-section at position `index` replaced by the vector `sv`."
[v index sv]
(reduce (fn [v [index element]]
(assoc v index element))
v
(mapv vector (range index (+ index (count sv))) sv)))
#_(replace-subvec [:a :b :c] 1 [:x])
#_(replace-subvec [:a :b :c] 3 [:x :y :z])
| |
505c80934a272d96fd16af05a8a84e6dc7f0e55a710bc86aea052bfc3456f8e3 | gonzojive/elephant | stress-test.lisp |
(in-package :elephant-tests)
(defparameter *spec* '(:bdb "/Users/eslick/Work/db/test"))
(defparameter *names* '("David" "Jim" "Peter" "Thomas"
"Arthur" "Jans" "Klaus" "James" "Martin"))
(defclass person ()
((name :initform (elt *names* (random (length *names*)))
:accessor name
:index t)
;; Actually the index t shouldn't be needed, but since elephant
;; sometimes complained that "person is not an index class", I try if this fixes it.
(age :initform (random 100) :accessor age :index t)
( made - by : initform ( elephant - utils::ele - thread - hash - key ) )
(updated-by :initform nil :accessor updated-by))
(:metaclass elephant:persistent-metaclass))
Should be 10000 , but for me elephant ca n't allocate memory after 3000 .
I think the problem it is becuase the number of locks ( 999 ) is = max 1000 . see db_stat -e
(defparameter +age+ 50)
;; I have tried different places for with-transaction below
(defun make-persons (nr-objects &optional (batch-size 500))
(loop for i from 1 to (/ nr-objects batch-size) do
(elephant:with-transaction ()
(loop for j from 1 to batch-size do
(let ((person (make-instance 'person)))
(when (zerop (mod (+ (* i batch-size) j) 1000))
(format t "~D ~a " (+ (* i batch-size) j) (name person))))))))
(defun ensure-clean-store ()
t)
( let ( ( dir ( cl - fad : pathname - as - directory ( second * spec * ) ) ) )
( when ( cl - fad : directory - exists - )
;; (cl-fad:delete-directory-and-files dir))
( ensure - directories - exist ) ) )
(defun my-test-create ()
(ensure-clean-store)
(elephant:with-open-store (*spec*)
(make-persons *nr-persons*)))
(defun subsets (size list)
(let ((subsets (cons nil nil)))
(loop for elt in list
for i from 0 do
(when (= 0 (mod i size))
(setf (car subsets) (nreverse (car subsets)))
(push nil subsets))
(push elt (car subsets)))
(setf (car subsets) (nreverse (car subsets)))
(cdr (nreverse subsets))))
(defmacro do-subsets ((subset subset-size list) &body body)
`(loop for ,subset in (subsets ,subset-size ,list) do
,@body))
(defun my-test-update (&key (new-age 27))
"Test updating all persons by changing their age."
(elephant:with-open-store (*spec*)
(do-subsets (subset 500 (elephant:get-instances-by-class 'person))
(format t "Doing subset~%")
(elephant:with-transaction ()
(mapcar #'(lambda (person)
(setf (age person) new-age))
subset)))))
(defun my-test-load ()
"Test loading all persons by computing their average age."
(let ((nr-persons 0)
(total-age 0)
(show-first nil))
(elephant:with-open-store (*spec*)
(elephant:with-transaction ()
(mapcar #'(lambda (person)
(incf nr-persons)
(print nr-persons)
(when (and show-first (> show-first))
(format t "Sample person ~a~%F" show-first)
(describe person)
(decf show-first))
(incf total-age (age person)))
(elephant:get-instances-by-class 'person))))
(values (coerce (/ total-age nr-persons) 'float)
nr-persons
total-age)))
(defun check-basic-setup ()
(my-test-update :new-age +age+)
(multiple-value-bind (average nr-persons)
(my-test-load)
(assert (= +age+ average))
(assert (= nr-persons *nr-persons*))))
| null | https://raw.githubusercontent.com/gonzojive/elephant/b29a012ab75ccea2fc7fc4f1e9d5e821f0bd60bf/tests/stress-test.lisp | lisp | Actually the index t shouldn't be needed, but since elephant
sometimes complained that "person is not an index class", I try if this fixes it.
I have tried different places for with-transaction below
(cl-fad:delete-directory-and-files dir)) |
(in-package :elephant-tests)
(defparameter *spec* '(:bdb "/Users/eslick/Work/db/test"))
(defparameter *names* '("David" "Jim" "Peter" "Thomas"
"Arthur" "Jans" "Klaus" "James" "Martin"))
(defclass person ()
((name :initform (elt *names* (random (length *names*)))
:accessor name
:index t)
(age :initform (random 100) :accessor age :index t)
( made - by : initform ( elephant - utils::ele - thread - hash - key ) )
(updated-by :initform nil :accessor updated-by))
(:metaclass elephant:persistent-metaclass))
Should be 10000 , but for me elephant ca n't allocate memory after 3000 .
I think the problem it is becuase the number of locks ( 999 ) is = max 1000 . see db_stat -e
(defparameter +age+ 50)
(defun make-persons (nr-objects &optional (batch-size 500))
(loop for i from 1 to (/ nr-objects batch-size) do
(elephant:with-transaction ()
(loop for j from 1 to batch-size do
(let ((person (make-instance 'person)))
(when (zerop (mod (+ (* i batch-size) j) 1000))
(format t "~D ~a " (+ (* i batch-size) j) (name person))))))))
(defun ensure-clean-store ()
t)
( let ( ( dir ( cl - fad : pathname - as - directory ( second * spec * ) ) ) )
( when ( cl - fad : directory - exists - )
( ensure - directories - exist ) ) )
(defun my-test-create ()
(ensure-clean-store)
(elephant:with-open-store (*spec*)
(make-persons *nr-persons*)))
(defun subsets (size list)
(let ((subsets (cons nil nil)))
(loop for elt in list
for i from 0 do
(when (= 0 (mod i size))
(setf (car subsets) (nreverse (car subsets)))
(push nil subsets))
(push elt (car subsets)))
(setf (car subsets) (nreverse (car subsets)))
(cdr (nreverse subsets))))
(defmacro do-subsets ((subset subset-size list) &body body)
`(loop for ,subset in (subsets ,subset-size ,list) do
,@body))
(defun my-test-update (&key (new-age 27))
"Test updating all persons by changing their age."
(elephant:with-open-store (*spec*)
(do-subsets (subset 500 (elephant:get-instances-by-class 'person))
(format t "Doing subset~%")
(elephant:with-transaction ()
(mapcar #'(lambda (person)
(setf (age person) new-age))
subset)))))
(defun my-test-load ()
"Test loading all persons by computing their average age."
(let ((nr-persons 0)
(total-age 0)
(show-first nil))
(elephant:with-open-store (*spec*)
(elephant:with-transaction ()
(mapcar #'(lambda (person)
(incf nr-persons)
(print nr-persons)
(when (and show-first (> show-first))
(format t "Sample person ~a~%F" show-first)
(describe person)
(decf show-first))
(incf total-age (age person)))
(elephant:get-instances-by-class 'person))))
(values (coerce (/ total-age nr-persons) 'float)
nr-persons
total-age)))
(defun check-basic-setup ()
(my-test-update :new-age +age+)
(multiple-value-bind (average nr-persons)
(my-test-load)
(assert (= +age+ average))
(assert (= nr-persons *nr-persons*))))
|
b57ad3d870b34190a49d45633a5477ea18b3459f195e8331a4732aab05cbae22 | egraphdb/egraphdb | egraph_fuse.erl | %%%-------------------------------------------------------------------
@author neerajsharma
( C ) 2018 ,
%%% @doc
%%%
%%% @end
%%%-------------------------------------------------------------------
-module(egraph_fuse).
-author("neerajsharma").
-export([api_model_to_fuse/1,
circuit_broken/1,
setup/1,
blown/1,
melt/1]).
-spec api_model_to_fuse(atom()) -> undefined | atom().
api_model_to_fuse(_) ->
http_api_latency.
-spec circuit_broken(undefined | http_api_latency) -> boolean().
circuit_broken(undefined) ->
false;
circuit_broken(http_api_latency = Fuse) ->
blown(Fuse).
-spec setup(http_api_latency) -> ok.
setup(http_api_latency = Fuse) ->
Config = egraph_config_util:circuit_breaker_config(Fuse),
MaxR = proplists:get_value(maxr, Config),
MaxT = proplists:get_value(maxt, Config),
ResetMsec = proplists:get_value(reset, Config),
Strategy = {standard, MaxR, MaxT},
Refresh = {reset, ResetMsec},
Opts = {Strategy, Refresh},
fuse:install(Fuse, Opts).
-spec blown(http_api_latency) -> boolean().
blown(http_api_latency = Fuse) ->
%% async querying fuse state is known to have race conditions
%% but is very fast and do not block unlike sync.
%% This implies that it will take a while for fuse to reflect
%% true value, but that is alright for a short while.
fuse:ask(Fuse, async_dirty) == blown.
-spec melt(http_api_latency) -> ok.
melt(http_api_latency = Fuse) ->
fuse:melt(Fuse).
| null | https://raw.githubusercontent.com/egraphdb/egraphdb/41a0131be227f7f0a35ba0e2c1cb23d70cd86b03/src/egraph_fuse.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
async querying fuse state is known to have race conditions
but is very fast and do not block unlike sync.
This implies that it will take a while for fuse to reflect
true value, but that is alright for a short while. | @author neerajsharma
( C ) 2018 ,
-module(egraph_fuse).
-author("neerajsharma").
-export([api_model_to_fuse/1,
circuit_broken/1,
setup/1,
blown/1,
melt/1]).
-spec api_model_to_fuse(atom()) -> undefined | atom().
api_model_to_fuse(_) ->
http_api_latency.
-spec circuit_broken(undefined | http_api_latency) -> boolean().
circuit_broken(undefined) ->
false;
circuit_broken(http_api_latency = Fuse) ->
blown(Fuse).
-spec setup(http_api_latency) -> ok.
setup(http_api_latency = Fuse) ->
Config = egraph_config_util:circuit_breaker_config(Fuse),
MaxR = proplists:get_value(maxr, Config),
MaxT = proplists:get_value(maxt, Config),
ResetMsec = proplists:get_value(reset, Config),
Strategy = {standard, MaxR, MaxT},
Refresh = {reset, ResetMsec},
Opts = {Strategy, Refresh},
fuse:install(Fuse, Opts).
-spec blown(http_api_latency) -> boolean().
blown(http_api_latency = Fuse) ->
fuse:ask(Fuse, async_dirty) == blown.
-spec melt(http_api_latency) -> ok.
melt(http_api_latency = Fuse) ->
fuse:melt(Fuse).
|
fabcf317b25f9d029ab59f59acf593a02f2e9818c4bad122976d978390d51b45 | rbkmoney/hellgate | hg_maybe.erl | -module(hg_maybe).
-export([apply/2]).
-export([apply/3]).
-export([get_defined/1]).
-export([get_defined/2]).
-type maybe(T) ::
undefined | T.
-export_type([maybe/1]).
-spec apply(fun(), Arg :: undefined | term()) -> term().
apply(Fun, Arg) ->
hg_maybe:apply(Fun, Arg, undefined).
-spec apply(fun(), Arg :: undefined | term(), Default :: term()) -> term().
apply(Fun, Arg, _Default) when Arg =/= undefined ->
Fun(Arg);
apply(_Fun, undefined, Default) ->
Default.
-spec get_defined([maybe(T)]) -> T | no_return().
get_defined([]) ->
erlang:error(badarg);
get_defined([Value | _Tail]) when Value =/= undefined ->
Value;
get_defined([undefined | Tail]) ->
get_defined(Tail).
-spec get_defined(maybe(T), maybe(T)) -> T | no_return().
get_defined(V1, V2) ->
get_defined([V1, V2]).
| null | https://raw.githubusercontent.com/rbkmoney/hellgate/c3e7413db06296a72fb64268eca98e63379d2ef5/apps/hellgate/src/hg_maybe.erl | erlang | -module(hg_maybe).
-export([apply/2]).
-export([apply/3]).
-export([get_defined/1]).
-export([get_defined/2]).
-type maybe(T) ::
undefined | T.
-export_type([maybe/1]).
-spec apply(fun(), Arg :: undefined | term()) -> term().
apply(Fun, Arg) ->
hg_maybe:apply(Fun, Arg, undefined).
-spec apply(fun(), Arg :: undefined | term(), Default :: term()) -> term().
apply(Fun, Arg, _Default) when Arg =/= undefined ->
Fun(Arg);
apply(_Fun, undefined, Default) ->
Default.
-spec get_defined([maybe(T)]) -> T | no_return().
get_defined([]) ->
erlang:error(badarg);
get_defined([Value | _Tail]) when Value =/= undefined ->
Value;
get_defined([undefined | Tail]) ->
get_defined(Tail).
-spec get_defined(maybe(T), maybe(T)) -> T | no_return().
get_defined(V1, V2) ->
get_defined([V1, V2]).
| |
e1bd5b6ed8e804ebd7e883e5a0c7b87c507fae62d123c5a1e54d29eeaa4ea6f3 | paurkedal/inhca | pkg.ml | #! /usr/bin/env ocaml
#use "topfind"
#require "topkg"
open Topkg
let licenses = [Pkg.std_file "COPYING"]
let opams = [Pkg.opam_file "inhca.opam"]
let build_cmd c os targets =
let ocamlbuild = Conf.tool "ocamlbuild" os in
let build_dir = Conf.build_dir c in
OS.Cmd.run @@
Cmd.(ocamlbuild
% "-use-ocamlfind"
% "-plugin-tag" % "package(eliom.ocamlbuild)"
% "-build-dir" % build_dir
%% on (Conf.debug c) (of_list ["-tag"; "debug"])
%% of_list targets)
let build = Pkg.build ~cmd:build_cmd ()
let () = Pkg.describe ~licenses ~opams ~build "inhca" @@ fun c ->
Ok [
Pkg.mllib ~api:[] "web/server/inhca.mllib";
Pkg.share ~dst:"static/" "web/static/inhca.css";
Pkg.share ~dst:"static/" "web/client/inhca.js";
]
| null | https://raw.githubusercontent.com/paurkedal/inhca/c2cc4abce931684fb17ac88169822178956f18e3/pkg/pkg.ml | ocaml | #! /usr/bin/env ocaml
#use "topfind"
#require "topkg"
open Topkg
let licenses = [Pkg.std_file "COPYING"]
let opams = [Pkg.opam_file "inhca.opam"]
let build_cmd c os targets =
let ocamlbuild = Conf.tool "ocamlbuild" os in
let build_dir = Conf.build_dir c in
OS.Cmd.run @@
Cmd.(ocamlbuild
% "-use-ocamlfind"
% "-plugin-tag" % "package(eliom.ocamlbuild)"
% "-build-dir" % build_dir
%% on (Conf.debug c) (of_list ["-tag"; "debug"])
%% of_list targets)
let build = Pkg.build ~cmd:build_cmd ()
let () = Pkg.describe ~licenses ~opams ~build "inhca" @@ fun c ->
Ok [
Pkg.mllib ~api:[] "web/server/inhca.mllib";
Pkg.share ~dst:"static/" "web/static/inhca.css";
Pkg.share ~dst:"static/" "web/client/inhca.js";
]
| |
00235939a52ea46f4559960a1f4f4033db239c0e8afd06b98b3246d9dd27372a | gafiatulin/codewars | Arrays.hs | -- Sum of positive
--
module Codewars.Arrays where
positiveSum :: [Int] -> Int
positiveSum = sum . filter (> 0)
| null | https://raw.githubusercontent.com/gafiatulin/codewars/535db608333e854be93ecfc165686a2162264fef/src/8%20kyu/Arrays.hs | haskell | Sum of positive
|
module Codewars.Arrays where
positiveSum :: [Int] -> Int
positiveSum = sum . filter (> 0)
|
3a5adfc394c78c214ab8e4fa4071bec4cef3317c599b97813a630fcea10cb8f4 | ocaml-multicore/tezos | test_preendorsement_functor.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
* Testing
-------
Component : Protocol ( preendorsement ) in Full_construction & Application modes
Invocation : dune exec src / proto_alpha / lib_protocol / test / main.exe -- test " ^preendorsement$ "
Subject : preendorsement inclusion in a block
-------
Component: Protocol (preendorsement) in Full_construction & Application modes
Invocation: dune exec src/proto_alpha/lib_protocol/test/main.exe -- test "^preendorsement$"
Subject: preendorsement inclusion in a block
*)
open Protocol
open Alpha_context
(****************************************************************)
(* Utility functions *)
(****************************************************************)
module type MODE = sig
val name : string
val baking_mode : Block.baking_mode
end
module BakeWithMode (Mode : MODE) : sig
val tests : unit Alcotest_lwt.test_case trace
end = struct
let name = Mode.name
let bake = Block.bake ~baking_mode:Mode.baking_mode
let aux_simple_preendorsement_inclusion ?(payload_round = Some Round.zero)
?(locked_round = Some Round.zero) ?(block_round = 1)
?(preend_round = Round.zero)
?(preend_branch = fun _predpred pred _curr -> pred)
?(preendorsed_block = fun _predpred _pred curr -> curr)
?(mk_ops = fun op -> [op])
?(get_delegate_and_slot =
fun _predpred _pred _curr -> return (None, None))
?(post_process = Ok (fun _ -> return_unit)) ~loc () =
Context.init ~consensus_threshold:1 5 >>=? fun (genesis, _) ->
bake genesis >>=? fun b1 ->
Op.endorsement ~endorsed_block:b1 (B genesis) () >>=? fun endo ->
let endo = Operation.pack endo in
bake b1 ~operations:[endo] >>=? fun b2 ->
let ctxt = Context.B (preend_branch genesis b1 b2) in
let endorsed_block = preendorsed_block genesis b1 b2 in
get_delegate_and_slot genesis b1 b2 >>=? fun (delegate, slot) ->
Op.preendorsement
?delegate
?slot
~round:preend_round
~endorsed_block
ctxt
()
>>=? fun p ->
let operations = endo :: (mk_ops @@ Operation.pack p) in
bake
~payload_round
~locked_round
~policy:(By_round block_round)
~operations
b1
>>= fun res ->
match (res, post_process) with
| (Ok ok, Ok success_fun) -> success_fun ok
| (Error _, Error (error_title, _error_category)) ->
Assert.proto_error_with_info ~loc res error_title
| (Ok _, Error _) -> Assert.error ~loc res (fun _ -> false)
| (Error _, Ok _) -> Assert.error ~loc res (fun _ -> false)
(****************************************************************)
(* Tests *)
(****************************************************************)
* OK : bake a block " _ b2_1 " at round 1 , containing a PQC and a locked
round of round 0
round of round 0 *)
let include_preendorsement_in_block_with_locked_round () =
aux_simple_preendorsement_inclusion ~loc:__LOC__ () >>=? fun _ ->
return_unit
* : bake a block " _ b2_1 " at round 1 , containing a PQC and a locked
round of round 0 . But the preendorsement is on a bad branch
round of round 0. But the preendorsement is on a bad branch *)
let test_preendorsement_with_bad_branch () =
aux_simple_preendorsement_inclusion
(* preendorsement should be on branch _pred to be valid *)
~preend_branch:(fun predpred _pred _curr -> predpred)
~loc:__LOC__
~post_process:(Error ("Wrong consensus operation branch", `Temporary))
()
* : The same preendorsement injected twice in the PQC
let duplicate_preendorsement_in_pqc () =
aux_simple_preendorsement_inclusion (* inject the op twice *)
~mk_ops:(fun op -> [op; op])
~loc:__LOC__
~post_process:(Error ("double inclusion of consensus operation", `Branch))
()
* : locked round declared in the block is not smaller than
that block 's round
that block's round *)
let locked_round_not_before_block_round () =
aux_simple_preendorsement_inclusion
default locked_round = 0 < block_round = 1 for this aux function
~block_round:0
~loc:__LOC__
~post_process:(Error ("Locked round not smaller than round", `Permanent))
()
* : because we announce a locked_round , but we do n't provide the
preendorsement quorum certificate in the operations
preendorsement quorum certificate in the operations *)
let with_locked_round_in_block_but_without_any_pqc () =
(* This test only fails in Application mode. If full_construction mode, the
given locked_round is not used / checked. Moreover, the test succeed in
this case.
*)
let post_process =
if Mode.baking_mode == Block.Application then
Error ("Wrong fitness", `Permanent)
else Ok (fun _ -> return_unit)
in
aux_simple_preendorsement_inclusion
with declared locked_round but without a PQC in the ops
~mk_ops:(fun _p -> [])
~loc:__LOC__
~post_process
()
* : The preendorsed block is the pred one , not the current one
let preendorsement_has_wrong_level () =
aux_simple_preendorsement_inclusion
(* preendorsement should be for _curr block to be valid *)
~preendorsed_block:(fun _predpred pred _curr -> pred)
~loc:__LOC__
~post_process:(Error ("wrong level for consensus operation", `Permanent))
()
(** OK: explicit the correct endorser and preendorsing slot in the test *)
let preendorsement_in_block_with_good_slot () =
aux_simple_preendorsement_inclusion
~get_delegate_and_slot:(fun _predpred _pred curr ->
let module V = Plugin.RPC.Validators in
Context.get_endorsers (B curr) >>=? function
| {V.delegate; slots = s :: _ as slots; _} :: _ ->
return (Some (delegate, slots), Some s)
| _ -> assert false
there is at least one endorser with a slot
~loc:__LOC__
()
* : the used slot for injecting the endorsement is not the canonical one
let preendorsement_in_block_with_wrong_slot () =
aux_simple_preendorsement_inclusion
~get_delegate_and_slot:(fun _predpred _pred curr ->
let module V = Plugin.RPC.Validators in
Context.get_endorsers (B curr) >>=? function
| {V.delegate; V.slots = _ :: non_canonical_slot :: _ as slots; _} :: _
->
return (Some (delegate, slots), Some non_canonical_slot)
| _ -> assert false
there is at least one endorser with a slot
~loc:__LOC__
~post_process:(Error ("wrong slot", `Permanent))
()
* : the delegate tries to injects with a canonical slot of another delegate
let preendorsement_in_block_with_wrong_signature () =
aux_simple_preendorsement_inclusion
~get_delegate_and_slot:(fun _predpred _pred curr ->
let module V = Plugin.RPC.Validators in
Context.get_endorsers (B curr) >>=? function
| {V.delegate; _} :: {V.slots = s :: _ as slots; _} :: _ ->
(* the canonical slot s is not owned by the delegate "delegate" !*)
return (Some (delegate, slots), Some s)
| _ -> assert false
there is at least one endorser with a slot
~loc:__LOC__
~post_process:(Error ("Invalid operation signature", `Permanent))
()
* : can not have a locked_round higher than attached PQC 's round
let locked_round_is_higher_than_pqc_round () =
(* This test only fails in Application mode. If full_construction mode, the
given locked_round is not used / checked. Moreover, the test succeed in
this case.
*)
let post_process =
if Mode.baking_mode == Application then
Error ("wrong round for consensus operation", `Permanent)
else Ok (fun _ -> return_unit)
in
aux_simple_preendorsement_inclusion
~preend_round:Round.zero
~locked_round:(Some (Round.succ Round.zero))
~block_round:2
~loc:__LOC__
~post_process
()
let my_tztest title test =
Tztest.tztest (Format.sprintf "%s: %s" name title) test
let tests =
[
my_tztest
"ok: include_preendorsement_in_block_with_locked_round"
`Quick
include_preendorsement_in_block_with_locked_round;
my_tztest
"ko: test_preendorsement_with_bad_branch"
`Quick
test_preendorsement_with_bad_branch;
my_tztest
"ko: duplicate_preendorsement_in_pqc"
`Quick
duplicate_preendorsement_in_pqc;
my_tztest
"ko:locked_round_not_before_block_round"
`Quick
locked_round_not_before_block_round;
my_tztest
"ko: with_locked_round_in_block_but_without_any_pqc"
`Quick
with_locked_round_in_block_but_without_any_pqc;
my_tztest
"ko: preendorsement_has_wrong_level"
`Quick
preendorsement_has_wrong_level;
my_tztest
"ok: preendorsement_in_block_with_good_slot"
`Quick
preendorsement_in_block_with_good_slot;
my_tztest
"ko: preendorsement_in_block_with_wrong_slot"
`Quick
preendorsement_in_block_with_wrong_slot;
my_tztest
"ko: preendorsement_in_block_with_wrong_signature"
`Quick
preendorsement_in_block_with_wrong_signature;
my_tztest
"ko: locked_round_is_higher_than_pqc_round"
`Quick
locked_round_is_higher_than_pqc_round;
]
end
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/src/proto_012_Psithaca/lib_protocol/test/test_preendorsement_functor.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
**************************************************************
Utility functions
**************************************************************
**************************************************************
Tests
**************************************************************
preendorsement should be on branch _pred to be valid
inject the op twice
This test only fails in Application mode. If full_construction mode, the
given locked_round is not used / checked. Moreover, the test succeed in
this case.
preendorsement should be for _curr block to be valid
* OK: explicit the correct endorser and preendorsing slot in the test
the canonical slot s is not owned by the delegate "delegate" !
This test only fails in Application mode. If full_construction mode, the
given locked_round is not used / checked. Moreover, the test succeed in
this case.
| Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
* Testing
-------
Component : Protocol ( preendorsement ) in Full_construction & Application modes
Invocation : dune exec src / proto_alpha / lib_protocol / test / main.exe -- test " ^preendorsement$ "
Subject : preendorsement inclusion in a block
-------
Component: Protocol (preendorsement) in Full_construction & Application modes
Invocation: dune exec src/proto_alpha/lib_protocol/test/main.exe -- test "^preendorsement$"
Subject: preendorsement inclusion in a block
*)
open Protocol
open Alpha_context
module type MODE = sig
val name : string
val baking_mode : Block.baking_mode
end
module BakeWithMode (Mode : MODE) : sig
val tests : unit Alcotest_lwt.test_case trace
end = struct
let name = Mode.name
let bake = Block.bake ~baking_mode:Mode.baking_mode
let aux_simple_preendorsement_inclusion ?(payload_round = Some Round.zero)
?(locked_round = Some Round.zero) ?(block_round = 1)
?(preend_round = Round.zero)
?(preend_branch = fun _predpred pred _curr -> pred)
?(preendorsed_block = fun _predpred _pred curr -> curr)
?(mk_ops = fun op -> [op])
?(get_delegate_and_slot =
fun _predpred _pred _curr -> return (None, None))
?(post_process = Ok (fun _ -> return_unit)) ~loc () =
Context.init ~consensus_threshold:1 5 >>=? fun (genesis, _) ->
bake genesis >>=? fun b1 ->
Op.endorsement ~endorsed_block:b1 (B genesis) () >>=? fun endo ->
let endo = Operation.pack endo in
bake b1 ~operations:[endo] >>=? fun b2 ->
let ctxt = Context.B (preend_branch genesis b1 b2) in
let endorsed_block = preendorsed_block genesis b1 b2 in
get_delegate_and_slot genesis b1 b2 >>=? fun (delegate, slot) ->
Op.preendorsement
?delegate
?slot
~round:preend_round
~endorsed_block
ctxt
()
>>=? fun p ->
let operations = endo :: (mk_ops @@ Operation.pack p) in
bake
~payload_round
~locked_round
~policy:(By_round block_round)
~operations
b1
>>= fun res ->
match (res, post_process) with
| (Ok ok, Ok success_fun) -> success_fun ok
| (Error _, Error (error_title, _error_category)) ->
Assert.proto_error_with_info ~loc res error_title
| (Ok _, Error _) -> Assert.error ~loc res (fun _ -> false)
| (Error _, Ok _) -> Assert.error ~loc res (fun _ -> false)
* OK : bake a block " _ b2_1 " at round 1 , containing a PQC and a locked
round of round 0
round of round 0 *)
let include_preendorsement_in_block_with_locked_round () =
aux_simple_preendorsement_inclusion ~loc:__LOC__ () >>=? fun _ ->
return_unit
* : bake a block " _ b2_1 " at round 1 , containing a PQC and a locked
round of round 0 . But the preendorsement is on a bad branch
round of round 0. But the preendorsement is on a bad branch *)
let test_preendorsement_with_bad_branch () =
aux_simple_preendorsement_inclusion
~preend_branch:(fun predpred _pred _curr -> predpred)
~loc:__LOC__
~post_process:(Error ("Wrong consensus operation branch", `Temporary))
()
* : The same preendorsement injected twice in the PQC
let duplicate_preendorsement_in_pqc () =
~mk_ops:(fun op -> [op; op])
~loc:__LOC__
~post_process:(Error ("double inclusion of consensus operation", `Branch))
()
* : locked round declared in the block is not smaller than
that block 's round
that block's round *)
let locked_round_not_before_block_round () =
aux_simple_preendorsement_inclusion
default locked_round = 0 < block_round = 1 for this aux function
~block_round:0
~loc:__LOC__
~post_process:(Error ("Locked round not smaller than round", `Permanent))
()
* : because we announce a locked_round , but we do n't provide the
preendorsement quorum certificate in the operations
preendorsement quorum certificate in the operations *)
let with_locked_round_in_block_but_without_any_pqc () =
let post_process =
if Mode.baking_mode == Block.Application then
Error ("Wrong fitness", `Permanent)
else Ok (fun _ -> return_unit)
in
aux_simple_preendorsement_inclusion
with declared locked_round but without a PQC in the ops
~mk_ops:(fun _p -> [])
~loc:__LOC__
~post_process
()
* : The preendorsed block is the pred one , not the current one
let preendorsement_has_wrong_level () =
aux_simple_preendorsement_inclusion
~preendorsed_block:(fun _predpred pred _curr -> pred)
~loc:__LOC__
~post_process:(Error ("wrong level for consensus operation", `Permanent))
()
let preendorsement_in_block_with_good_slot () =
aux_simple_preendorsement_inclusion
~get_delegate_and_slot:(fun _predpred _pred curr ->
let module V = Plugin.RPC.Validators in
Context.get_endorsers (B curr) >>=? function
| {V.delegate; slots = s :: _ as slots; _} :: _ ->
return (Some (delegate, slots), Some s)
| _ -> assert false
there is at least one endorser with a slot
~loc:__LOC__
()
* : the used slot for injecting the endorsement is not the canonical one
let preendorsement_in_block_with_wrong_slot () =
aux_simple_preendorsement_inclusion
~get_delegate_and_slot:(fun _predpred _pred curr ->
let module V = Plugin.RPC.Validators in
Context.get_endorsers (B curr) >>=? function
| {V.delegate; V.slots = _ :: non_canonical_slot :: _ as slots; _} :: _
->
return (Some (delegate, slots), Some non_canonical_slot)
| _ -> assert false
there is at least one endorser with a slot
~loc:__LOC__
~post_process:(Error ("wrong slot", `Permanent))
()
* : the delegate tries to injects with a canonical slot of another delegate
let preendorsement_in_block_with_wrong_signature () =
aux_simple_preendorsement_inclusion
~get_delegate_and_slot:(fun _predpred _pred curr ->
let module V = Plugin.RPC.Validators in
Context.get_endorsers (B curr) >>=? function
| {V.delegate; _} :: {V.slots = s :: _ as slots; _} :: _ ->
return (Some (delegate, slots), Some s)
| _ -> assert false
there is at least one endorser with a slot
~loc:__LOC__
~post_process:(Error ("Invalid operation signature", `Permanent))
()
* : can not have a locked_round higher than attached PQC 's round
let locked_round_is_higher_than_pqc_round () =
let post_process =
if Mode.baking_mode == Application then
Error ("wrong round for consensus operation", `Permanent)
else Ok (fun _ -> return_unit)
in
aux_simple_preendorsement_inclusion
~preend_round:Round.zero
~locked_round:(Some (Round.succ Round.zero))
~block_round:2
~loc:__LOC__
~post_process
()
let my_tztest title test =
Tztest.tztest (Format.sprintf "%s: %s" name title) test
let tests =
[
my_tztest
"ok: include_preendorsement_in_block_with_locked_round"
`Quick
include_preendorsement_in_block_with_locked_round;
my_tztest
"ko: test_preendorsement_with_bad_branch"
`Quick
test_preendorsement_with_bad_branch;
my_tztest
"ko: duplicate_preendorsement_in_pqc"
`Quick
duplicate_preendorsement_in_pqc;
my_tztest
"ko:locked_round_not_before_block_round"
`Quick
locked_round_not_before_block_round;
my_tztest
"ko: with_locked_round_in_block_but_without_any_pqc"
`Quick
with_locked_round_in_block_but_without_any_pqc;
my_tztest
"ko: preendorsement_has_wrong_level"
`Quick
preendorsement_has_wrong_level;
my_tztest
"ok: preendorsement_in_block_with_good_slot"
`Quick
preendorsement_in_block_with_good_slot;
my_tztest
"ko: preendorsement_in_block_with_wrong_slot"
`Quick
preendorsement_in_block_with_wrong_slot;
my_tztest
"ko: preendorsement_in_block_with_wrong_signature"
`Quick
preendorsement_in_block_with_wrong_signature;
my_tztest
"ko: locked_round_is_higher_than_pqc_round"
`Quick
locked_round_is_higher_than_pqc_round;
]
end
|
0125ec0402298c144447a34091307f47363430109fb8e7b4232d222e1fa63e28 | nuvla/api-server | cookies_test.clj | (ns sixsq.nuvla.auth.cookies-test
(:refer-clojure :exclude [update])
(:require
[clojure.test :refer [deftest is use-fixtures]]
[environ.core :as environ]
[ring.util.codec :as codec]
[sixsq.nuvla.auth.cookies :as t]
[sixsq.nuvla.auth.env-fixture :as env-fixture]
[sixsq.nuvla.server.app.params :as p]
[sixsq.nuvla.server.resources.group :as group]
[sixsq.nuvla.server.resources.lifecycle-test-utils :as ltu]))
(use-fixtures :once ltu/with-test-server-fixture)
(def base-uri (str p/service-context group/resource-type))
(defn serialize-cookie-value
"replaces the map cookie value with a serialized string"
[{:keys [value] :as cookie}]
(assoc cookie :value (codec/form-encode value)))
(defn damaged-cookie-value
"replaces the map cookie value with a serialized string, but modifies it to make it invalid"
[{:keys [value] :as cookie}]
(assoc cookie :value (str (codec/form-encode value) "-INVALID")))
(deftest revoked-cookie-ok
(let [revoked (t/revoked-cookie)]
(is (map? revoked))
(is (= "INVALID" (get-in revoked [:value]))))
(let [k "cookie.name"
revoked (t/revoked-cookie k)]
(is (map? revoked))
(is (= "INVALID" (get-in revoked [k :value])))))
(deftest claims-cookie-ok
(with-redefs [environ/env env-fixture/env-map]
(let [claims {:alpha "a", :beta "b", :gamma 3}
cookie (t/create-cookie claims)
k "cookie.name"
named-cookie (t/create-cookie claims k)]
(is (map? cookie))
(is (not= "INVALID" (:value cookie)))
(is (:value cookie))
(is (map? named-cookie))
(is (not= "INVALID" (get-in named-cookie [k :value])))
(is (get-in named-cookie [k :value])))))
(deftest check-extract-cookie-info
(with-redefs [environ/env env-fixture/env-map]
(let [cookie-info {:user-id "user"
:claims "role1 role2"
:session "session"}]
(is (nil? (t/extract-cookie-info nil)))
(is (nil? (-> cookie-info
t/create-cookie
damaged-cookie-value
t/extract-cookie-info)))
(let [cookie-info-extracted (-> cookie-info
t/create-cookie
serialize-cookie-value
t/extract-cookie-info)]
(is (= {:claims "role1 role2"
:session "session"
:user-id "user"} (dissoc cookie-info-extracted :exp)))
(is (some? (:exp cookie-info-extracted)))))))
| null | https://raw.githubusercontent.com/nuvla/api-server/6d06c268b8247372af697d0bb197ac59d9672cb1/code/test/sixsq/nuvla/auth/cookies_test.clj | clojure | (ns sixsq.nuvla.auth.cookies-test
(:refer-clojure :exclude [update])
(:require
[clojure.test :refer [deftest is use-fixtures]]
[environ.core :as environ]
[ring.util.codec :as codec]
[sixsq.nuvla.auth.cookies :as t]
[sixsq.nuvla.auth.env-fixture :as env-fixture]
[sixsq.nuvla.server.app.params :as p]
[sixsq.nuvla.server.resources.group :as group]
[sixsq.nuvla.server.resources.lifecycle-test-utils :as ltu]))
(use-fixtures :once ltu/with-test-server-fixture)
(def base-uri (str p/service-context group/resource-type))
(defn serialize-cookie-value
"replaces the map cookie value with a serialized string"
[{:keys [value] :as cookie}]
(assoc cookie :value (codec/form-encode value)))
(defn damaged-cookie-value
"replaces the map cookie value with a serialized string, but modifies it to make it invalid"
[{:keys [value] :as cookie}]
(assoc cookie :value (str (codec/form-encode value) "-INVALID")))
(deftest revoked-cookie-ok
(let [revoked (t/revoked-cookie)]
(is (map? revoked))
(is (= "INVALID" (get-in revoked [:value]))))
(let [k "cookie.name"
revoked (t/revoked-cookie k)]
(is (map? revoked))
(is (= "INVALID" (get-in revoked [k :value])))))
(deftest claims-cookie-ok
(with-redefs [environ/env env-fixture/env-map]
(let [claims {:alpha "a", :beta "b", :gamma 3}
cookie (t/create-cookie claims)
k "cookie.name"
named-cookie (t/create-cookie claims k)]
(is (map? cookie))
(is (not= "INVALID" (:value cookie)))
(is (:value cookie))
(is (map? named-cookie))
(is (not= "INVALID" (get-in named-cookie [k :value])))
(is (get-in named-cookie [k :value])))))
(deftest check-extract-cookie-info
(with-redefs [environ/env env-fixture/env-map]
(let [cookie-info {:user-id "user"
:claims "role1 role2"
:session "session"}]
(is (nil? (t/extract-cookie-info nil)))
(is (nil? (-> cookie-info
t/create-cookie
damaged-cookie-value
t/extract-cookie-info)))
(let [cookie-info-extracted (-> cookie-info
t/create-cookie
serialize-cookie-value
t/extract-cookie-info)]
(is (= {:claims "role1 role2"
:session "session"
:user-id "user"} (dissoc cookie-info-extracted :exp)))
(is (some? (:exp cookie-info-extracted)))))))
| |
de0ed488bcca8afd4de519f7028aff43a7d1c9f6dd89b9d0db670db49a2e428a | backtracking/astro | planets.mli |
(* Planets positions given by longitude, latitude (in degrees)
and distance from the Sun in AU. *)
type planet_position = Astro.coordonnees_heliocentriques
type julian_day = float
val mercury : julian_day -> planet_position
val venus : julian_day -> planet_position
val earth : julian_day -> planet_position
val mars : julian_day -> planet_position
val jupiter : julian_day -> planet_position
val saturn : julian_day -> planet_position
val uranus : julian_day -> planet_position
val neptune : julian_day -> planet_position
val all_planets : (julian_day -> planet_position) list
| null | https://raw.githubusercontent.com/backtracking/astro/adb8020b8a56692f3d2b50d26dce2ccc442c6f39/planets.mli | ocaml | Planets positions given by longitude, latitude (in degrees)
and distance from the Sun in AU. |
type planet_position = Astro.coordonnees_heliocentriques
type julian_day = float
val mercury : julian_day -> planet_position
val venus : julian_day -> planet_position
val earth : julian_day -> planet_position
val mars : julian_day -> planet_position
val jupiter : julian_day -> planet_position
val saturn : julian_day -> planet_position
val uranus : julian_day -> planet_position
val neptune : julian_day -> planet_position
val all_planets : (julian_day -> planet_position) list
|
7c89699ee5d1356849d5fbf0c764949ffdd6474164eb0c07e934fdf58c9c11f8 | jeromesimeon/Galax | jungle.mli | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
(* A generic exception for now *)
exception Jungle_Error of string
(* Methods for retrieving profilling information *)
val get_put_count : unit -> int
val get_get_count : unit -> int
Generic open flags for all kinds of index
type jungle_open_flags =
| JDB_CREATE
| JDB_EXCL
| JDB_DIRTY_READ
| JDB_RDONLY
| JDB_THREAD
type jungle_open_btree_comp_flags =
| JDB_BTREE_DUP
| JDB_BTREE_DUPSORT
| JDB_BTREE_RECNUM
| JDB_BTREE_REVSPLITOFF
type jungle_put_btree_excl_flags =
| JDB_BTREE_NOOVERWRITE
| JDB_BTREE_NODUPDATA
type jungle_get_btree_excl_flags =
| JDB_BTREE_GET_BOTH
| JDB_BTREE_SET_RECNO
type jungle_get_btree_comp_flags =
| JDB_BTREE_MULTIPLE
| JDB_BTREE_DIRTY_READ
| JDB_BTREE_RMW
BTREE Cursor Flags
type jungle_curopen_comp_flags =
| JDB_CUR_DIRTY_READ
| JDB_CUR_WRITECURSOR
type jungle_cur_put_btree_excl_flags =
| JDB_BTREE_CURPUT_AFTER
| JDB_BTREE_CURPUT_BEFORE
| JDB_BTREE_CURPUT_CURRENT
| JDB_BTREE_CURPUT_KEYFIRST
| JDB_BTREE_CURPUT_KEYLAST
| JDB_BTREE_CURPUT_NODUPDATA
Generic curget flags
type jungle_cur_get_comp_flags =
| JDB_CURGET_DIRTY_READ
| JDB_CURGET_MULTIPLE
| JDB_CURGET_MULTIPLE_KEY
| JDB_CURGET_RMW
type jungle_cur_put_hash_excl_flags =
| JDB_HASH_CURPUT_AFTER
| JDB_HASH_CURPUT_BEFORE
| JDB_HASH_CURPUT_CURRENT
| JDB_HASH_CURPUT_KEYFIRST
| JDB_HASH_CURPUT_KEYLAST
| JDB_HASH_CURPUT_NODUPDATA
(* Operations on BTrees *)
type jungle_btree
type jungle_btree_key = char array
type jungle_btree_value = char array
val jungle_btree_open :
string -> jungle_open_flags list * jungle_open_btree_comp_flags list -> int -> jungle_btree
(* Takes the name of the file open flags and returns a handle
on a new btree *)
val jungle_btree_put :
jungle_btree -> jungle_btree_key -> jungle_btree_value -> jungle_put_btree_excl_flags option -> unit
val jungle_btree_get :
jungle_btree -> jungle_btree_key -> jungle_get_btree_excl_flags option ->
jungle_get_btree_comp_flags list -> jungle_btree_value option
(* Return a function suitable to be used in a cursor *)
val jungle_btree_getall :
jungle_btree -> jungle_btree_key -> (unit -> jungle_btree_value option)
val jungle_btree_delete : jungle_btree -> jungle_btree_key -> jungle_btree_value -> unit
val jungle_btree_delete_all : jungle_btree -> jungle_btree_key -> unit
val jungle_btree_close :
jungle_btree -> unit
val jungle_btree_sync :
jungle_btree -> unit
type jungle_btree_cursor
val jungle_btree_cursor_open :
jungle_btree -> jungle_curopen_comp_flags list -> jungle_btree_cursor
val jungle_btree_cursor_put :
jungle_btree_cursor -> jungle_btree_key -> jungle_btree_value -> jungle_cur_put_btree_excl_flags option -> unit
val jungle_btree_cursor_get_next :
jungle_btree_cursor -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_prev :
jungle_btree_cursor -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_first :
jungle_btree_cursor -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_last :
jungle_btree_cursor -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
(**************************************)
(* These are the actual get functions *)
(**************************************)
val jungle_btree_cursor_get_set :
jungle_btree_cursor -> jungle_btree_key -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_set_range :
jungle_btree_cursor -> jungle_btree_key -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_both :
jungle_btree_cursor -> jungle_btree_key * jungle_btree_value -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_both_range :
jungle_btree_cursor -> jungle_btree_key * jungle_btree_value -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_del :
jungle_btree_cursor -> unit
val jungle_btree_cursor_close :
jungle_btree_cursor -> unit
Operations on Recno
type jungle_recno
type jungle_recno_key = int
type jungle_recno_value = char array
type jungle_open_recno_comp_flags =
| JDB_RECNO_RENUMBER
| JDB_RECNO_SNAPSHOT
type jungle_put_recno_excl_flags =
| JDB_RECNO_APPEND
| JDB_RECNO_NOOVERWRITE
(* Cannot specify both of above at the same time *)
type jungle_get_recno_excl_flags =
| JDB_RECNO_GET_BOTH
type jungle_get_recno_comp_flags =
| JDB_RECNO_MULTIPLE
| JDB_RECNO_DIRTY_READ
| JDB_RECNO_RMW
type jungle_cur_put_recno_excl_flags =
Check DB_RENUMBER_FLAG in create
Check DB_RENUMBER_FLAG in create
| JDB_RECNO_CURPUT_CURRENT
val jungle_recno_open :
string -> jungle_open_flags list * jungle_open_recno_comp_flags list -> int -> bool -> int -> jungle_recno
(* Takes the name of the file open flags and returns a handle
on a new btree *)
val jungle_recno_put :
jungle_recno -> jungle_recno_key -> jungle_recno_value -> jungle_put_recno_excl_flags option -> unit
val jungle_recno_get :
jungle_recno -> jungle_recno_key -> jungle_get_recno_excl_flags option ->
jungle_get_recno_comp_flags list -> jungle_recno_value option
val jungle_recno_get_unsafe :
jungle_recno -> jungle_recno_key -> jungle_get_recno_excl_flags option ->
jungle_get_recno_comp_flags list -> jungle_recno_value
val jungle_recno_delete :
jungle_recno -> int -> unit
val jungle_recno_close : jungle_recno -> unit
val jungle_recno_close_no_sync : jungle_recno -> unit
val jungle_recno_sync :
jungle_recno -> unit
type jungle_recno_cursor
val jungle_recno_cursor_open :
jungle_recno -> jungle_curopen_comp_flags list -> jungle_recno_cursor
val jungle_recno_cursor_put :
jungle_recno_cursor -> jungle_recno_key -> jungle_recno_value -> jungle_cur_put_recno_excl_flags option -> unit
val jungle_recno_cursor_get_next :
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_prev :
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_first:
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_last :
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_next_dup :
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_set :
jungle_recno_cursor -> jungle_recno_key -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_del :
jungle_recno_cursor -> unit
val jungle_recno_cursor_close :
jungle_recno_cursor -> unit
(* Operations on Hash *)
type jungle_hash
type jungle_hash_key = char array
type jungle_hash_value = char array
(* Hashtable Specific Flags *)
type jungle_open_hash_comp_flags =
| JDB_HASH_DUP
| JDB_HASH_DUPSORT
type jungle_put_hash_excl_flags =
| JDB_HASH_NOOVERWRITE
| JDB_HASH_NODUPDATA
(* Cannot specify both of above at the same time *)
type jungle_get_hash_excl_flags =
| JDB_HASH_GET_BOTH
| JDB_HASH_SET_RECNO
(* Cannot specify both of above at the same time *)
type jungle_get_hash_comp_flags =
| JDB_HASH_MULTIPLE
| JDB_HASH_DIRTY_READ
| JDB_HASH_RMW
val jungle_hash_open :
string -> jungle_open_flags list * jungle_open_hash_comp_flags list -> int -> jungle_hash
val jungle_hash_put :
jungle_hash -> jungle_hash_key -> jungle_hash_value -> jungle_put_hash_excl_flags option -> unit
val jungle_hash_get :
jungle_hash -> jungle_hash_key -> jungle_get_hash_excl_flags option -> jungle_get_hash_comp_flags list -> jungle_hash_value option
(* Return a function suitable to be used in a cursor *)
val jungle_hash_getall :
jungle_hash -> jungle_hash_key -> (unit -> jungle_btree_value option)
val jungle_hash_get_unsafe :
jungle_hash -> jungle_hash_key -> jungle_get_hash_excl_flags option -> jungle_get_hash_comp_flags list -> jungle_hash_value
val jungle_hash_delete :
jungle_hash -> jungle_hash_key -> unit
val jungle_hash_close :
jungle_hash -> unit
val jungle_hash_sync :
jungle_hash -> unit
type jungle_hash_cursor
val jungle_hash_cursor_open :
jungle_hash -> jungle_curopen_comp_flags list -> jungle_hash_cursor
val jungle_hash_cursor_put :
jungle_hash_cursor -> char array -> char array -> jungle_cur_put_hash_excl_flags option -> unit
val jungle_hash_cursor_get_next :
jungle_hash_cursor -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_get_prev :
jungle_hash_cursor -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_get_first :
jungle_hash_cursor -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_get_last :
jungle_hash_cursor -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
(* Both of these need key as additional parameter *)
val jungle_hash_cursor_get_set :
jungle_hash_cursor -> jungle_hash_key -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_get_both :
jungle_hash_cursor -> (jungle_hash_key * jungle_hash_value) -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_del :
jungle_hash_cursor -> unit
val jungle_hash_cursor_close :
jungle_hash_cursor -> unit
Create Secondary Index of type BTREE
val jungle_btree_primary_btree_sec_index_open :
jungle_btree-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_btree_comp_flags list -> int -> jungle_btree
Takes the primary btree database pointer , secondary index filename ,
name of the call - back function , open flags and return a handle to
new btree secondary index
name of the call-back function, open flags and return a handle to
new btree secondary index *)
val jungle_recno_primary_btree_sec_index_open :
jungle_recno -> string -> (string -> string) -> jungle_open_flags list * jungle_open_btree_comp_flags list -> int -> jungle_btree
Same as the previous but the primary database pointer is
recno instead of
recno instead of btree *)
val jungle_hash_primary_btree_sec_index_open :
jungle_hash-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_btree_comp_flags list -> int -> jungle_btree
Same as the previous but the primary database pointer is
recno instead of
recno instead of btree *)
(* Create Secondary Index of type Hash *)
val jungle_btree_primary_hash_sec_index_open :
jungle_btree-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_hash_comp_flags list -> int -> jungle_hash
val jungle_recno_primary_hash_sec_index_open :
jungle_recno -> string -> (string -> string) -> jungle_open_flags list * jungle_open_hash_comp_flags list -> int -> jungle_hash
val jungle_hash_primary_hash_sec_index_open :
jungle_hash-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_hash_comp_flags list -> int -> jungle_hash
Create Secondary Index of type Recno
val jungle_btree_primary_recno_sec_index_open :
jungle_btree-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_recno_comp_flags list -> int -> jungle_recno
val jungle_recno_primary_recno_sec_index_open :
jungle_recno -> string -> (string-> string) -> jungle_open_flags list * jungle_open_recno_comp_flags list -> int -> jungle_recno
val jungle_hash_primary_recno_sec_index_open :
jungle_hash-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_recno_comp_flags list -> int -> jungle_recno
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/tools/Jungle/jungle.mli | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
A generic exception for now
Methods for retrieving profilling information
Operations on BTrees
Takes the name of the file open flags and returns a handle
on a new btree
Return a function suitable to be used in a cursor
************************************
These are the actual get functions
************************************
Cannot specify both of above at the same time
Takes the name of the file open flags and returns a handle
on a new btree
Operations on Hash
Hashtable Specific Flags
Cannot specify both of above at the same time
Cannot specify both of above at the same time
Return a function suitable to be used in a cursor
Both of these need key as additional parameter
Create Secondary Index of type Hash | Copyright 2001 - 2007 .
exception Jungle_Error of string
val get_put_count : unit -> int
val get_get_count : unit -> int
Generic open flags for all kinds of index
type jungle_open_flags =
| JDB_CREATE
| JDB_EXCL
| JDB_DIRTY_READ
| JDB_RDONLY
| JDB_THREAD
type jungle_open_btree_comp_flags =
| JDB_BTREE_DUP
| JDB_BTREE_DUPSORT
| JDB_BTREE_RECNUM
| JDB_BTREE_REVSPLITOFF
type jungle_put_btree_excl_flags =
| JDB_BTREE_NOOVERWRITE
| JDB_BTREE_NODUPDATA
type jungle_get_btree_excl_flags =
| JDB_BTREE_GET_BOTH
| JDB_BTREE_SET_RECNO
type jungle_get_btree_comp_flags =
| JDB_BTREE_MULTIPLE
| JDB_BTREE_DIRTY_READ
| JDB_BTREE_RMW
BTREE Cursor Flags
type jungle_curopen_comp_flags =
| JDB_CUR_DIRTY_READ
| JDB_CUR_WRITECURSOR
type jungle_cur_put_btree_excl_flags =
| JDB_BTREE_CURPUT_AFTER
| JDB_BTREE_CURPUT_BEFORE
| JDB_BTREE_CURPUT_CURRENT
| JDB_BTREE_CURPUT_KEYFIRST
| JDB_BTREE_CURPUT_KEYLAST
| JDB_BTREE_CURPUT_NODUPDATA
Generic curget flags
type jungle_cur_get_comp_flags =
| JDB_CURGET_DIRTY_READ
| JDB_CURGET_MULTIPLE
| JDB_CURGET_MULTIPLE_KEY
| JDB_CURGET_RMW
type jungle_cur_put_hash_excl_flags =
| JDB_HASH_CURPUT_AFTER
| JDB_HASH_CURPUT_BEFORE
| JDB_HASH_CURPUT_CURRENT
| JDB_HASH_CURPUT_KEYFIRST
| JDB_HASH_CURPUT_KEYLAST
| JDB_HASH_CURPUT_NODUPDATA
type jungle_btree
type jungle_btree_key = char array
type jungle_btree_value = char array
val jungle_btree_open :
string -> jungle_open_flags list * jungle_open_btree_comp_flags list -> int -> jungle_btree
val jungle_btree_put :
jungle_btree -> jungle_btree_key -> jungle_btree_value -> jungle_put_btree_excl_flags option -> unit
val jungle_btree_get :
jungle_btree -> jungle_btree_key -> jungle_get_btree_excl_flags option ->
jungle_get_btree_comp_flags list -> jungle_btree_value option
val jungle_btree_getall :
jungle_btree -> jungle_btree_key -> (unit -> jungle_btree_value option)
val jungle_btree_delete : jungle_btree -> jungle_btree_key -> jungle_btree_value -> unit
val jungle_btree_delete_all : jungle_btree -> jungle_btree_key -> unit
val jungle_btree_close :
jungle_btree -> unit
val jungle_btree_sync :
jungle_btree -> unit
type jungle_btree_cursor
val jungle_btree_cursor_open :
jungle_btree -> jungle_curopen_comp_flags list -> jungle_btree_cursor
val jungle_btree_cursor_put :
jungle_btree_cursor -> jungle_btree_key -> jungle_btree_value -> jungle_cur_put_btree_excl_flags option -> unit
val jungle_btree_cursor_get_next :
jungle_btree_cursor -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_prev :
jungle_btree_cursor -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_first :
jungle_btree_cursor -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_last :
jungle_btree_cursor -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_set :
jungle_btree_cursor -> jungle_btree_key -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_set_range :
jungle_btree_cursor -> jungle_btree_key -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_both :
jungle_btree_cursor -> jungle_btree_key * jungle_btree_value -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_get_both_range :
jungle_btree_cursor -> jungle_btree_key * jungle_btree_value -> jungle_cur_get_comp_flags list -> (jungle_btree_key * jungle_btree_value) option
val jungle_btree_cursor_del :
jungle_btree_cursor -> unit
val jungle_btree_cursor_close :
jungle_btree_cursor -> unit
Operations on Recno
type jungle_recno
type jungle_recno_key = int
type jungle_recno_value = char array
type jungle_open_recno_comp_flags =
| JDB_RECNO_RENUMBER
| JDB_RECNO_SNAPSHOT
type jungle_put_recno_excl_flags =
| JDB_RECNO_APPEND
| JDB_RECNO_NOOVERWRITE
type jungle_get_recno_excl_flags =
| JDB_RECNO_GET_BOTH
type jungle_get_recno_comp_flags =
| JDB_RECNO_MULTIPLE
| JDB_RECNO_DIRTY_READ
| JDB_RECNO_RMW
type jungle_cur_put_recno_excl_flags =
Check DB_RENUMBER_FLAG in create
Check DB_RENUMBER_FLAG in create
| JDB_RECNO_CURPUT_CURRENT
val jungle_recno_open :
string -> jungle_open_flags list * jungle_open_recno_comp_flags list -> int -> bool -> int -> jungle_recno
val jungle_recno_put :
jungle_recno -> jungle_recno_key -> jungle_recno_value -> jungle_put_recno_excl_flags option -> unit
val jungle_recno_get :
jungle_recno -> jungle_recno_key -> jungle_get_recno_excl_flags option ->
jungle_get_recno_comp_flags list -> jungle_recno_value option
val jungle_recno_get_unsafe :
jungle_recno -> jungle_recno_key -> jungle_get_recno_excl_flags option ->
jungle_get_recno_comp_flags list -> jungle_recno_value
val jungle_recno_delete :
jungle_recno -> int -> unit
val jungle_recno_close : jungle_recno -> unit
val jungle_recno_close_no_sync : jungle_recno -> unit
val jungle_recno_sync :
jungle_recno -> unit
type jungle_recno_cursor
val jungle_recno_cursor_open :
jungle_recno -> jungle_curopen_comp_flags list -> jungle_recno_cursor
val jungle_recno_cursor_put :
jungle_recno_cursor -> jungle_recno_key -> jungle_recno_value -> jungle_cur_put_recno_excl_flags option -> unit
val jungle_recno_cursor_get_next :
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_prev :
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_first:
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_last :
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_next_dup :
jungle_recno_cursor -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_get_set :
jungle_recno_cursor -> jungle_recno_key -> jungle_cur_get_comp_flags list -> jungle_recno_value option
val jungle_recno_cursor_del :
jungle_recno_cursor -> unit
val jungle_recno_cursor_close :
jungle_recno_cursor -> unit
type jungle_hash
type jungle_hash_key = char array
type jungle_hash_value = char array
type jungle_open_hash_comp_flags =
| JDB_HASH_DUP
| JDB_HASH_DUPSORT
type jungle_put_hash_excl_flags =
| JDB_HASH_NOOVERWRITE
| JDB_HASH_NODUPDATA
type jungle_get_hash_excl_flags =
| JDB_HASH_GET_BOTH
| JDB_HASH_SET_RECNO
type jungle_get_hash_comp_flags =
| JDB_HASH_MULTIPLE
| JDB_HASH_DIRTY_READ
| JDB_HASH_RMW
val jungle_hash_open :
string -> jungle_open_flags list * jungle_open_hash_comp_flags list -> int -> jungle_hash
val jungle_hash_put :
jungle_hash -> jungle_hash_key -> jungle_hash_value -> jungle_put_hash_excl_flags option -> unit
val jungle_hash_get :
jungle_hash -> jungle_hash_key -> jungle_get_hash_excl_flags option -> jungle_get_hash_comp_flags list -> jungle_hash_value option
val jungle_hash_getall :
jungle_hash -> jungle_hash_key -> (unit -> jungle_btree_value option)
val jungle_hash_get_unsafe :
jungle_hash -> jungle_hash_key -> jungle_get_hash_excl_flags option -> jungle_get_hash_comp_flags list -> jungle_hash_value
val jungle_hash_delete :
jungle_hash -> jungle_hash_key -> unit
val jungle_hash_close :
jungle_hash -> unit
val jungle_hash_sync :
jungle_hash -> unit
type jungle_hash_cursor
val jungle_hash_cursor_open :
jungle_hash -> jungle_curopen_comp_flags list -> jungle_hash_cursor
val jungle_hash_cursor_put :
jungle_hash_cursor -> char array -> char array -> jungle_cur_put_hash_excl_flags option -> unit
val jungle_hash_cursor_get_next :
jungle_hash_cursor -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_get_prev :
jungle_hash_cursor -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_get_first :
jungle_hash_cursor -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_get_last :
jungle_hash_cursor -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_get_set :
jungle_hash_cursor -> jungle_hash_key -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_get_both :
jungle_hash_cursor -> (jungle_hash_key * jungle_hash_value) -> jungle_cur_get_comp_flags list -> (jungle_hash_key * jungle_hash_value) option
val jungle_hash_cursor_del :
jungle_hash_cursor -> unit
val jungle_hash_cursor_close :
jungle_hash_cursor -> unit
Create Secondary Index of type BTREE
val jungle_btree_primary_btree_sec_index_open :
jungle_btree-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_btree_comp_flags list -> int -> jungle_btree
Takes the primary btree database pointer , secondary index filename ,
name of the call - back function , open flags and return a handle to
new btree secondary index
name of the call-back function, open flags and return a handle to
new btree secondary index *)
val jungle_recno_primary_btree_sec_index_open :
jungle_recno -> string -> (string -> string) -> jungle_open_flags list * jungle_open_btree_comp_flags list -> int -> jungle_btree
Same as the previous but the primary database pointer is
recno instead of
recno instead of btree *)
val jungle_hash_primary_btree_sec_index_open :
jungle_hash-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_btree_comp_flags list -> int -> jungle_btree
Same as the previous but the primary database pointer is
recno instead of
recno instead of btree *)
val jungle_btree_primary_hash_sec_index_open :
jungle_btree-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_hash_comp_flags list -> int -> jungle_hash
val jungle_recno_primary_hash_sec_index_open :
jungle_recno -> string -> (string -> string) -> jungle_open_flags list * jungle_open_hash_comp_flags list -> int -> jungle_hash
val jungle_hash_primary_hash_sec_index_open :
jungle_hash-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_hash_comp_flags list -> int -> jungle_hash
Create Secondary Index of type Recno
val jungle_btree_primary_recno_sec_index_open :
jungle_btree-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_recno_comp_flags list -> int -> jungle_recno
val jungle_recno_primary_recno_sec_index_open :
jungle_recno -> string -> (string-> string) -> jungle_open_flags list * jungle_open_recno_comp_flags list -> int -> jungle_recno
val jungle_hash_primary_recno_sec_index_open :
jungle_hash-> string -> (string -> string -> string) -> jungle_open_flags list * jungle_open_recno_comp_flags list -> int -> jungle_recno
|
448d061cf9f82ad2c7dbdd687a02d5562b7816b4e804cb38b1e6c125001093c2 | anoma/juvix | Negative.hs | module Scope.Negative (allTests) where
import Base
import Juvix.Compiler.Builtins (iniState)
import Juvix.Compiler.Concrete.Translation.FromParsed.Analysis.Scoping.Error
import Juvix.Compiler.Pipeline
type FailMsg = String
data NegTest a = NegTest
{ _name :: String,
_relDir :: Path Rel Dir,
_file :: Path Rel File,
_checkErr :: a -> Maybe FailMsg
}
root :: Path Abs Dir
root = relToProject $(mkRelDir "tests/negative")
testDescr :: (Typeable a) => NegTest a -> TestDescr
testDescr NegTest {..} =
let tRoot = root <//> _relDir
file' = tRoot <//> _file
in TestDescr
{ _testName = _name,
_testRoot = tRoot,
_testAssertion = Single $ do
let entryPoint = defaultEntryPoint tRoot file'
res <- runIOEither iniState entryPoint upToAbstract
case mapLeft fromJuvixError res of
Left (Just err) -> whenJust (_checkErr err) assertFailure
Left Nothing -> assertFailure "An error ocurred but it was not in the scoper."
Right {} -> assertFailure "The scope checker did not find an error."
}
allTests :: TestTree
allTests =
testGroup
"Scope negative tests"
( map (mkTest . testDescr) scoperErrorTests
)
wrongError :: Maybe FailMsg
wrongError = Just "Incorrect error"
scoperErrorTests :: [NegTest ScoperError]
scoperErrorTests =
[ NegTest
"Not in scope"
$(mkRelDir ".")
$(mkRelFile "NotInScope.juvix")
$ \case
ErrSymNotInScope {} -> Nothing
_ -> wrongError,
NegTest
"Qualified not in scope"
$(mkRelDir ".")
$(mkRelFile "QualSymNotInScope.juvix")
$ \case
ErrQualSymNotInScope {} -> Nothing
_ -> wrongError,
NegTest
"Multiple declarations"
$(mkRelDir ".")
$(mkRelFile "MultipleDeclarations.juvix")
$ \case
ErrMultipleDeclarations {} -> Nothing
_ -> wrongError,
NegTest
"Import cycle"
$(mkRelDir "ImportCycle")
$(mkRelFile "A.juvix")
$ \case
ErrImportCycle {} -> Nothing
_ -> wrongError,
NegTest
"Binding group conflict (function clause)"
$(mkRelDir "BindGroupConflict")
$(mkRelFile "Clause.juvix")
$ \case
ErrMultipleDeclarations {} -> Nothing
_ -> wrongError,
NegTest
"Binding group conflict (lambda clause)"
$(mkRelDir "BindGroupConflict")
$(mkRelFile "Lambda.juvix")
$ \case
ErrMultipleDeclarations {} -> Nothing
_ -> wrongError,
NegTest
"Infix error (expression)"
$(mkRelDir ".")
$(mkRelFile "InfixError.juvix")
$ \case
ErrInfixParser {} -> Nothing
_ -> wrongError,
NegTest
"Infix error (pattern)"
$(mkRelDir ".")
$(mkRelFile "InfixErrorP.juvix")
$ \case
ErrInfixPattern {} -> Nothing
_ -> wrongError,
NegTest
"Duplicate fixity declaration"
$(mkRelDir ".")
$(mkRelFile "DuplicateFixity.juvix")
$ \case
ErrDuplicateFixity {} -> Nothing
_ -> wrongError,
NegTest
"Multiple export conflict"
$(mkRelDir ".")
$(mkRelFile "MultipleExportConflict.juvix")
$ \case
ErrMultipleExport {} -> Nothing
_ -> wrongError,
NegTest
"Module not in scope"
$(mkRelDir ".")
$(mkRelFile "ModuleNotInScope.juvix")
$ \case
ErrModuleNotInScope {} -> Nothing
_ -> wrongError,
NegTest
"Unused operator syntax definition"
$(mkRelDir ".")
$(mkRelFile "UnusedOperatorDef.juvix")
$ \case
ErrUnusedOperatorDef {} -> Nothing
_ -> wrongError,
NegTest
"Ambiguous symbol"
$(mkRelDir ".")
$(mkRelFile "AmbiguousSymbol.juvix")
$ \case
ErrAmbiguousSym {} -> Nothing
_ -> wrongError,
NegTest
"Lacks function clause"
$(mkRelDir ".")
$(mkRelFile "LacksFunctionClause.juvix")
$ \case
ErrLacksFunctionClause {} -> Nothing
_ -> wrongError,
NegTest
"Lacks function clause inside let"
$(mkRelDir ".")
$(mkRelFile "LetMissingClause.juvix")
$ \case
ErrLacksFunctionClause {} -> Nothing
_ -> wrongError,
NegTest
"Ambiguous export"
$(mkRelDir ".")
$(mkRelFile "AmbiguousExport.juvix")
$ \case
ErrMultipleExport {} -> Nothing
_ -> wrongError,
NegTest
"Ambiguous nested modules"
$(mkRelDir ".")
$(mkRelFile "AmbiguousModule.juvix")
$ \case
ErrAmbiguousModuleSym {} -> Nothing
_ -> wrongError,
NegTest
"Ambiguous nested constructors"
$(mkRelDir ".")
$(mkRelFile "AmbiguousConstructor.juvix")
$ \case
ErrAmbiguousSym {} -> Nothing
_ -> wrongError,
NegTest
"Wrong location of a compile block"
$(mkRelDir "CompileBlocks")
$(mkRelFile "WrongLocationCompileBlock.juvix")
$ \case
ErrWrongLocationCompileBlock {} -> Nothing
_ -> wrongError,
NegTest
"Implicit argument on the left of an application"
$(mkRelDir ".")
$(mkRelFile "AppLeftImplicit.juvix")
$ \case
ErrAppLeftImplicit {} -> Nothing
_ -> wrongError,
NegTest
"Multiple compile blocks for the same name"
$(mkRelDir "CompileBlocks")
$(mkRelFile "MultipleCompileBlockSameName.juvix")
$ \case
ErrMultipleCompileBlockSameName {} -> Nothing
_ -> wrongError,
NegTest
"Multiple rules for a backend inside a compile block"
$(mkRelDir "CompileBlocks")
$(mkRelFile "MultipleCompileRuleSameBackend.juvix")
$ \case
ErrMultipleCompileRuleSameBackend {} -> Nothing
_ -> wrongError,
NegTest
"issue 230"
$(mkRelDir "230")
$(mkRelFile "Prod.juvix")
$ \case
ErrQualSymNotInScope {} -> Nothing
_ -> wrongError,
NegTest
"Double braces in pattern"
$(mkRelDir ".")
$(mkRelFile "NestedPatternBraces.juvix")
$ \case
ErrDoubleBracesPattern {} -> Nothing
_ -> wrongError,
NegTest
"As-Pattern aliasing variable"
$(mkRelDir ".")
$(mkRelFile "AsPatternAlias.juvix")
$ \case
ErrAliasBinderPattern {} -> Nothing
_ -> wrongError,
NegTest
"Nested As-Patterns"
$(mkRelDir ".")
$(mkRelFile "NestedAsPatterns.juvix")
$ \case
ErrDoubleBinderPattern {} -> Nothing
_ -> wrongError,
NegTest
"Pattern matching an implicit argument on the left of an application"
$(mkRelDir ".")
$(mkRelFile "ImplicitPatternLeftApplication.juvix")
$ \case
ErrImplicitPatternLeftApplication {} -> Nothing
_ -> wrongError,
NegTest
"Constructor expected on the left of a pattern application"
$(mkRelDir ".")
$(mkRelFile "ConstructorExpectedLeftApplication.juvix")
$ \case
ErrConstructorExpectedLeftApplication {} -> Nothing
_ -> wrongError,
NegTest
"Compile block for a unsupported kind of expression"
$(mkRelDir "CompileBlocks")
$(mkRelFile "WrongKindExpressionCompileBlock.juvix")
$ \case
ErrWrongKindExpressionCompileBlock {} -> Nothing
_ -> wrongError,
NegTest
"A type parameter name occurs twice when declaring an inductive type"
$(mkRelDir ".")
$(mkRelFile "DuplicateInductiveParameterName.juvix")
$ \case
ErrMultipleDeclarations {} -> Nothing
_ -> wrongError,
NegTest
"A function has a duplicate clause"
$(mkRelDir ".")
$(mkRelFile "DuplicateClause.juvix")
$ \case
ErrDuplicateFunctionClause {} -> Nothing
_ -> wrongError,
NegTest
"A function lacks a type signature"
$(mkRelDir ".")
$(mkRelFile "LacksTypeSig.juvix")
$ \case
ErrLacksTypeSig {} -> Nothing
_ -> wrongError,
NegTest
"A function inside a let lacks a type signature that is at the top level"
$(mkRelDir ".")
$(mkRelFile "LacksTypeSig2.juvix")
$ \case
ErrLacksTypeSig {} -> Nothing
_ -> wrongError
]
| null | https://raw.githubusercontent.com/anoma/juvix/098c256da83556f8b32a831468a8d38783a967e8/test/Scope/Negative.hs | haskell | module Scope.Negative (allTests) where
import Base
import Juvix.Compiler.Builtins (iniState)
import Juvix.Compiler.Concrete.Translation.FromParsed.Analysis.Scoping.Error
import Juvix.Compiler.Pipeline
type FailMsg = String
data NegTest a = NegTest
{ _name :: String,
_relDir :: Path Rel Dir,
_file :: Path Rel File,
_checkErr :: a -> Maybe FailMsg
}
root :: Path Abs Dir
root = relToProject $(mkRelDir "tests/negative")
testDescr :: (Typeable a) => NegTest a -> TestDescr
testDescr NegTest {..} =
let tRoot = root <//> _relDir
file' = tRoot <//> _file
in TestDescr
{ _testName = _name,
_testRoot = tRoot,
_testAssertion = Single $ do
let entryPoint = defaultEntryPoint tRoot file'
res <- runIOEither iniState entryPoint upToAbstract
case mapLeft fromJuvixError res of
Left (Just err) -> whenJust (_checkErr err) assertFailure
Left Nothing -> assertFailure "An error ocurred but it was not in the scoper."
Right {} -> assertFailure "The scope checker did not find an error."
}
allTests :: TestTree
allTests =
testGroup
"Scope negative tests"
( map (mkTest . testDescr) scoperErrorTests
)
wrongError :: Maybe FailMsg
wrongError = Just "Incorrect error"
scoperErrorTests :: [NegTest ScoperError]
scoperErrorTests =
[ NegTest
"Not in scope"
$(mkRelDir ".")
$(mkRelFile "NotInScope.juvix")
$ \case
ErrSymNotInScope {} -> Nothing
_ -> wrongError,
NegTest
"Qualified not in scope"
$(mkRelDir ".")
$(mkRelFile "QualSymNotInScope.juvix")
$ \case
ErrQualSymNotInScope {} -> Nothing
_ -> wrongError,
NegTest
"Multiple declarations"
$(mkRelDir ".")
$(mkRelFile "MultipleDeclarations.juvix")
$ \case
ErrMultipleDeclarations {} -> Nothing
_ -> wrongError,
NegTest
"Import cycle"
$(mkRelDir "ImportCycle")
$(mkRelFile "A.juvix")
$ \case
ErrImportCycle {} -> Nothing
_ -> wrongError,
NegTest
"Binding group conflict (function clause)"
$(mkRelDir "BindGroupConflict")
$(mkRelFile "Clause.juvix")
$ \case
ErrMultipleDeclarations {} -> Nothing
_ -> wrongError,
NegTest
"Binding group conflict (lambda clause)"
$(mkRelDir "BindGroupConflict")
$(mkRelFile "Lambda.juvix")
$ \case
ErrMultipleDeclarations {} -> Nothing
_ -> wrongError,
NegTest
"Infix error (expression)"
$(mkRelDir ".")
$(mkRelFile "InfixError.juvix")
$ \case
ErrInfixParser {} -> Nothing
_ -> wrongError,
NegTest
"Infix error (pattern)"
$(mkRelDir ".")
$(mkRelFile "InfixErrorP.juvix")
$ \case
ErrInfixPattern {} -> Nothing
_ -> wrongError,
NegTest
"Duplicate fixity declaration"
$(mkRelDir ".")
$(mkRelFile "DuplicateFixity.juvix")
$ \case
ErrDuplicateFixity {} -> Nothing
_ -> wrongError,
NegTest
"Multiple export conflict"
$(mkRelDir ".")
$(mkRelFile "MultipleExportConflict.juvix")
$ \case
ErrMultipleExport {} -> Nothing
_ -> wrongError,
NegTest
"Module not in scope"
$(mkRelDir ".")
$(mkRelFile "ModuleNotInScope.juvix")
$ \case
ErrModuleNotInScope {} -> Nothing
_ -> wrongError,
NegTest
"Unused operator syntax definition"
$(mkRelDir ".")
$(mkRelFile "UnusedOperatorDef.juvix")
$ \case
ErrUnusedOperatorDef {} -> Nothing
_ -> wrongError,
NegTest
"Ambiguous symbol"
$(mkRelDir ".")
$(mkRelFile "AmbiguousSymbol.juvix")
$ \case
ErrAmbiguousSym {} -> Nothing
_ -> wrongError,
NegTest
"Lacks function clause"
$(mkRelDir ".")
$(mkRelFile "LacksFunctionClause.juvix")
$ \case
ErrLacksFunctionClause {} -> Nothing
_ -> wrongError,
NegTest
"Lacks function clause inside let"
$(mkRelDir ".")
$(mkRelFile "LetMissingClause.juvix")
$ \case
ErrLacksFunctionClause {} -> Nothing
_ -> wrongError,
NegTest
"Ambiguous export"
$(mkRelDir ".")
$(mkRelFile "AmbiguousExport.juvix")
$ \case
ErrMultipleExport {} -> Nothing
_ -> wrongError,
NegTest
"Ambiguous nested modules"
$(mkRelDir ".")
$(mkRelFile "AmbiguousModule.juvix")
$ \case
ErrAmbiguousModuleSym {} -> Nothing
_ -> wrongError,
NegTest
"Ambiguous nested constructors"
$(mkRelDir ".")
$(mkRelFile "AmbiguousConstructor.juvix")
$ \case
ErrAmbiguousSym {} -> Nothing
_ -> wrongError,
NegTest
"Wrong location of a compile block"
$(mkRelDir "CompileBlocks")
$(mkRelFile "WrongLocationCompileBlock.juvix")
$ \case
ErrWrongLocationCompileBlock {} -> Nothing
_ -> wrongError,
NegTest
"Implicit argument on the left of an application"
$(mkRelDir ".")
$(mkRelFile "AppLeftImplicit.juvix")
$ \case
ErrAppLeftImplicit {} -> Nothing
_ -> wrongError,
NegTest
"Multiple compile blocks for the same name"
$(mkRelDir "CompileBlocks")
$(mkRelFile "MultipleCompileBlockSameName.juvix")
$ \case
ErrMultipleCompileBlockSameName {} -> Nothing
_ -> wrongError,
NegTest
"Multiple rules for a backend inside a compile block"
$(mkRelDir "CompileBlocks")
$(mkRelFile "MultipleCompileRuleSameBackend.juvix")
$ \case
ErrMultipleCompileRuleSameBackend {} -> Nothing
_ -> wrongError,
NegTest
"issue 230"
$(mkRelDir "230")
$(mkRelFile "Prod.juvix")
$ \case
ErrQualSymNotInScope {} -> Nothing
_ -> wrongError,
NegTest
"Double braces in pattern"
$(mkRelDir ".")
$(mkRelFile "NestedPatternBraces.juvix")
$ \case
ErrDoubleBracesPattern {} -> Nothing
_ -> wrongError,
NegTest
"As-Pattern aliasing variable"
$(mkRelDir ".")
$(mkRelFile "AsPatternAlias.juvix")
$ \case
ErrAliasBinderPattern {} -> Nothing
_ -> wrongError,
NegTest
"Nested As-Patterns"
$(mkRelDir ".")
$(mkRelFile "NestedAsPatterns.juvix")
$ \case
ErrDoubleBinderPattern {} -> Nothing
_ -> wrongError,
NegTest
"Pattern matching an implicit argument on the left of an application"
$(mkRelDir ".")
$(mkRelFile "ImplicitPatternLeftApplication.juvix")
$ \case
ErrImplicitPatternLeftApplication {} -> Nothing
_ -> wrongError,
NegTest
"Constructor expected on the left of a pattern application"
$(mkRelDir ".")
$(mkRelFile "ConstructorExpectedLeftApplication.juvix")
$ \case
ErrConstructorExpectedLeftApplication {} -> Nothing
_ -> wrongError,
NegTest
"Compile block for a unsupported kind of expression"
$(mkRelDir "CompileBlocks")
$(mkRelFile "WrongKindExpressionCompileBlock.juvix")
$ \case
ErrWrongKindExpressionCompileBlock {} -> Nothing
_ -> wrongError,
NegTest
"A type parameter name occurs twice when declaring an inductive type"
$(mkRelDir ".")
$(mkRelFile "DuplicateInductiveParameterName.juvix")
$ \case
ErrMultipleDeclarations {} -> Nothing
_ -> wrongError,
NegTest
"A function has a duplicate clause"
$(mkRelDir ".")
$(mkRelFile "DuplicateClause.juvix")
$ \case
ErrDuplicateFunctionClause {} -> Nothing
_ -> wrongError,
NegTest
"A function lacks a type signature"
$(mkRelDir ".")
$(mkRelFile "LacksTypeSig.juvix")
$ \case
ErrLacksTypeSig {} -> Nothing
_ -> wrongError,
NegTest
"A function inside a let lacks a type signature that is at the top level"
$(mkRelDir ".")
$(mkRelFile "LacksTypeSig2.juvix")
$ \case
ErrLacksTypeSig {} -> Nothing
_ -> wrongError
]
| |
62b9237c9a3fa6e7902c67f71c009ac19448dc30528a771fd8865871090650f0 | jacekschae/learn-datomic-course-files | db.clj | (ns cheffy.recipe.db
(:require [datomic.client.api :as d]))
(def recipe-pattern
[:recipe/recipe-id
:recipe/prep-time
:recipe/display-name
:recipe/image-url
:recipe/public?
:recipe/favorite-count
{:recipe/owner
[:account/account-id
:account/display-name]}
{:recipe/steps
[:step/step-id
:step/description
:step/sort-order]}
{:recipe/ingredients
[:ingredient/ingredient-id
:ingredient/display-name
:ingredient/amount
:ingredient/measure
:ingredient/sort-order]}])
(defn find-all-recipes
[{:keys [db]} {:keys [account-id]}]
(let [public (mapv first (d/q '[:find (pull ?e pattern)
:in $ pattern
:where [?e :recipe/public? true]]
db recipe-pattern))]
(if account-id
(let [drafts (mapv first (d/q '[:find (pull ?e pattern)
:in $ ?account-id pattern
:where
[?owner :account/account-id ?account-id]
[?e :recipe/owner ?owner]
[?e :recipe/public? false]]
db account-id recipe-pattern))]
{:drafts drafts
:public public})
{:public public})))
(comment
(find-all-recipes
{:db (d/db (:conn user/datomic))}
{:account-id "auth0|5fbf7db6271d5e0076903601"})
; public
(mapv first (let [db (d/db (:conn user/datomic))]
(d/q '[:find (pull ?e pattern)
:in $ pattern
:where [?e :recipe/public? true]]
db recipe-pattern)))
; drafts
(mapv first (let [db (d/db (:conn user/datomic))
account-id "auth0|5fbf7db6271d5e0076903601"]
(d/q '[:find (pull ?e pattern)
:in $ ?account-id pattern
:where
[?owner :account/account-id ?account-id]
[?e :recipe/owner ?owner]
[?e :recipe/public? false]]
db account-id recipe-pattern)))
)
(defn transact-recipe
[{:keys [conn]} {:keys [recipe-id account-id name public prep-time img]}]
(d/transact conn {:tx-data [{:recipe/recipe-id recipe-id
:recipe/display-name name
:recipe/public? (or public false)
:recipe/prep-time prep-time
:recipe/image-url img
:recipe/owner [:account/account-id account-id]}]}))
(defn find-recipe-by-id
[{:keys [db]} {:keys [recipe-id]}]
(ffirst (d/q '[:find (pull ?e pattern)
:in $ ?recipe-id pattern
:where [?e :recipe/recipe-id ?recipe-id]]
db recipe-id recipe-pattern)))
(comment
(ffirst (let [db (d/db (:conn user/datomic))]
(d/q '[:find (pull ?e pattern)
:in $ ?recipe-id pattern
:where [?e :recipe/recipe-id ?recipe-id]]
db #uuid"a1995316-80ea-4a98-939d-7c6295e4bb46" recipe-pattern)))
)
(defn retract-recipe
[])
(defn transact-step
[])
(defn retract-step
[])
(defn transact-ingredient
[])
(defn retract-ingredient
[])
(defn favorite-recipe
[])
(defn unfavorite-recipe
[]) | null | https://raw.githubusercontent.com/jacekschae/learn-datomic-course-files/fe558c573f05697e97cd606add16c8c113678e9e/increments/27-delete-recipe/src/main/cheffy/recipe/db.clj | clojure | public
drafts | (ns cheffy.recipe.db
(:require [datomic.client.api :as d]))
(def recipe-pattern
[:recipe/recipe-id
:recipe/prep-time
:recipe/display-name
:recipe/image-url
:recipe/public?
:recipe/favorite-count
{:recipe/owner
[:account/account-id
:account/display-name]}
{:recipe/steps
[:step/step-id
:step/description
:step/sort-order]}
{:recipe/ingredients
[:ingredient/ingredient-id
:ingredient/display-name
:ingredient/amount
:ingredient/measure
:ingredient/sort-order]}])
(defn find-all-recipes
[{:keys [db]} {:keys [account-id]}]
(let [public (mapv first (d/q '[:find (pull ?e pattern)
:in $ pattern
:where [?e :recipe/public? true]]
db recipe-pattern))]
(if account-id
(let [drafts (mapv first (d/q '[:find (pull ?e pattern)
:in $ ?account-id pattern
:where
[?owner :account/account-id ?account-id]
[?e :recipe/owner ?owner]
[?e :recipe/public? false]]
db account-id recipe-pattern))]
{:drafts drafts
:public public})
{:public public})))
(comment
(find-all-recipes
{:db (d/db (:conn user/datomic))}
{:account-id "auth0|5fbf7db6271d5e0076903601"})
(mapv first (let [db (d/db (:conn user/datomic))]
(d/q '[:find (pull ?e pattern)
:in $ pattern
:where [?e :recipe/public? true]]
db recipe-pattern)))
(mapv first (let [db (d/db (:conn user/datomic))
account-id "auth0|5fbf7db6271d5e0076903601"]
(d/q '[:find (pull ?e pattern)
:in $ ?account-id pattern
:where
[?owner :account/account-id ?account-id]
[?e :recipe/owner ?owner]
[?e :recipe/public? false]]
db account-id recipe-pattern)))
)
(defn transact-recipe
[{:keys [conn]} {:keys [recipe-id account-id name public prep-time img]}]
(d/transact conn {:tx-data [{:recipe/recipe-id recipe-id
:recipe/display-name name
:recipe/public? (or public false)
:recipe/prep-time prep-time
:recipe/image-url img
:recipe/owner [:account/account-id account-id]}]}))
(defn find-recipe-by-id
[{:keys [db]} {:keys [recipe-id]}]
(ffirst (d/q '[:find (pull ?e pattern)
:in $ ?recipe-id pattern
:where [?e :recipe/recipe-id ?recipe-id]]
db recipe-id recipe-pattern)))
(comment
(ffirst (let [db (d/db (:conn user/datomic))]
(d/q '[:find (pull ?e pattern)
:in $ ?recipe-id pattern
:where [?e :recipe/recipe-id ?recipe-id]]
db #uuid"a1995316-80ea-4a98-939d-7c6295e4bb46" recipe-pattern)))
)
(defn retract-recipe
[])
(defn transact-step
[])
(defn retract-step
[])
(defn transact-ingredient
[])
(defn retract-ingredient
[])
(defn favorite-recipe
[])
(defn unfavorite-recipe
[]) |
69162770fa65c6de3e5ac9d5d11aa739c49ea3dce376d36b34e34f83ea6cdc2d | ocaml-multicore/ocaml-tsan | clflags.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2005 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(** Command line flags *)
(** Optimization parameters represented as ints indexed by round number. *)
module Int_arg_helper : sig
type parsed
val parse : string -> string -> parsed ref -> unit
type parse_result =
| Ok
| Parse_failed of exn
val parse_no_error : string -> parsed ref -> parse_result
val get : key:int -> parsed -> int
end
(** Optimization parameters represented as floats indexed by round number. *)
module Float_arg_helper : sig
type parsed
val parse : string -> string -> parsed ref -> unit
type parse_result =
| Ok
| Parse_failed of exn
val parse_no_error : string -> parsed ref -> parse_result
val get : key:int -> parsed -> float
end
type inlining_arguments = {
inline_call_cost : int option;
inline_alloc_cost : int option;
inline_prim_cost : int option;
inline_branch_cost : int option;
inline_indirect_cost : int option;
inline_lifting_benefit : int option;
inline_branch_factor : float option;
inline_max_depth : int option;
inline_max_unroll : int option;
inline_threshold : float option;
inline_toplevel_threshold : int option;
}
val classic_arguments : inlining_arguments
val o1_arguments : inlining_arguments
val o2_arguments : inlining_arguments
val o3_arguments : inlining_arguments
(** Set all the inlining arguments for a round.
The default is set if no round is provided. *)
val use_inlining_arguments_set : ?round:int -> inlining_arguments -> unit
val objfiles : string list ref
val ccobjs : string list ref
val dllibs : string list ref
val cmi_file : string option ref
val compile_only : bool ref
val output_name : string option ref
val include_dirs : string list ref
val no_std_include : bool ref
val no_cwd : bool ref
val print_types : bool ref
val make_archive : bool ref
val debug : bool ref
val debug_full : bool ref
val unsafe : bool ref
val use_linscan : bool ref
val link_everything : bool ref
val custom_runtime : bool ref
val no_check_prims : bool ref
val bytecode_compatible_32 : bool ref
val output_c_object : bool ref
val output_complete_object : bool ref
val output_complete_executable : bool ref
val all_ccopts : string list ref
val classic : bool ref
val nopervasives : bool ref
val match_context_rows : int ref
val safer_matching : bool ref
val open_modules : string list ref
val preprocessor : string option ref
val all_ppx : string list ref
val absname : bool ref
val annotations : bool ref
val binary_annotations : bool ref
val use_threads : bool ref
val noassert : bool ref
val verbose : bool ref
val noprompt : bool ref
val nopromptcont : bool ref
val init_file : string option ref
val noinit : bool ref
val noversion : bool ref
val use_prims : string ref
val use_runtime : string ref
val plugin : bool ref
val principal : bool ref
val real_paths : bool ref
val recursive_types : bool ref
val strict_sequence : bool ref
val strict_formats : bool ref
val applicative_functors : bool ref
val make_runtime : bool ref
val c_compiler : string option ref
val no_auto_link : bool ref
val dllpaths : string list ref
val make_package : bool ref
val for_package : string option ref
val error_size : int ref
val float_const_prop : bool ref
val transparent_modules : bool ref
val unique_ids : bool ref
val locations : bool ref
val dump_source : bool ref
val dump_parsetree : bool ref
val dump_typedtree : bool ref
val dump_shape : bool ref
val dump_rawlambda : bool ref
val dump_lambda : bool ref
val dump_rawclambda : bool ref
val dump_clambda : bool ref
val dump_rawflambda : bool ref
val dump_flambda : bool ref
val dump_flambda_let : int option ref
val dump_instr : bool ref
val keep_camlprimc_file : bool ref
val keep_asm_file : bool ref
val optimize_for_speed : bool ref
val dump_cmm : bool ref
val dump_selection : bool ref
val dump_cse : bool ref
val dump_live : bool ref
val dump_spill : bool ref
val dump_split : bool ref
val dump_interf : bool ref
val dump_prefer : bool ref
val dump_regalloc : bool ref
val dump_reload : bool ref
val dump_scheduling : bool ref
val dump_linear : bool ref
val dump_interval : bool ref
val keep_startup_file : bool ref
val dump_combine : bool ref
val native_code : bool ref
val default_inline_threshold : float
val inline_threshold : Float_arg_helper.parsed ref
val inlining_report : bool ref
val simplify_rounds : int option ref
val default_simplify_rounds : int ref
val rounds : unit -> int
val default_inline_max_unroll : int
val inline_max_unroll : Int_arg_helper.parsed ref
val default_inline_toplevel_threshold : int
val inline_toplevel_threshold : Int_arg_helper.parsed ref
val default_inline_call_cost : int
val default_inline_alloc_cost : int
val default_inline_prim_cost : int
val default_inline_branch_cost : int
val default_inline_indirect_cost : int
val default_inline_lifting_benefit : int
val inline_call_cost : Int_arg_helper.parsed ref
val inline_alloc_cost : Int_arg_helper.parsed ref
val inline_prim_cost : Int_arg_helper.parsed ref
val inline_branch_cost : Int_arg_helper.parsed ref
val inline_indirect_cost : Int_arg_helper.parsed ref
val inline_lifting_benefit : Int_arg_helper.parsed ref
val default_inline_branch_factor : float
val inline_branch_factor : Float_arg_helper.parsed ref
val dont_write_files : bool ref
val std_include_flag : string -> string
val std_include_dir : unit -> string list
val shared : bool ref
val dlcode : bool ref
val pic_code : bool ref
val runtime_variant : string ref
val with_runtime : bool ref
val force_slash : bool ref
val keep_docs : bool ref
val keep_locs : bool ref
val opaque : bool ref
val profile_columns : Profile.column list ref
val flambda_invariant_checks : bool ref
val unbox_closures : bool ref
val unbox_closures_factor : int ref
val default_unbox_closures_factor : int
val unbox_free_vars_of_closures : bool ref
val unbox_specialised_args : bool ref
val clambda_checks : bool ref
val cmm_invariants : bool ref
val default_inline_max_depth : int
val inline_max_depth : Int_arg_helper.parsed ref
val remove_unused_arguments : bool ref
val dump_flambda_verbose : bool ref
val classic_inlining : bool ref
val afl_instrument : bool ref
val afl_inst_ratio : int ref
val function_sections : bool ref
val all_passes : string list ref
val dumped_pass : string -> bool
val set_dumped_pass : string -> bool -> unit
val dump_into_file : bool ref
val dump_dir : string option ref
(* Support for flags that can also be set from an environment variable *)
type 'a env_reader = {
parse : string -> 'a option;
print : 'a -> string;
usage : string;
env_var : string;
}
val color : Misc.Color.setting option ref
val color_reader : Misc.Color.setting env_reader
val error_style : Misc.Error_style.setting option ref
val error_style_reader : Misc.Error_style.setting env_reader
val unboxed_types : bool ref
val insn_sched : bool ref
val insn_sched_default : bool
module Compiler_pass : sig
type t = Parsing | Typing | Scheduling | Emit
val of_string : string -> t option
val to_string : t -> string
val is_compilation_pass : t -> bool
val available_pass_names : filter:(t -> bool) -> native:bool -> string list
val can_save_ir_after : t -> bool
val compare : t -> t -> int
val to_output_filename: t -> prefix:string -> string
val of_input_filename: string -> t option
end
val stop_after : Compiler_pass.t option ref
val should_stop_after : Compiler_pass.t -> bool
val set_save_ir_after : Compiler_pass.t -> bool -> unit
val should_save_ir_after : Compiler_pass.t -> bool
val arg_spec : (string * Arg.spec * string) list ref
(* [add_arguments __LOC__ args] will add the arguments from [args] at
the end of [arg_spec], checking that they have not already been
added by [add_arguments] before. A warning is printed showing the
locations of the function from which the argument was previously
added. *)
val add_arguments : string -> (string * Arg.spec * string) list -> unit
(* [create_usage_msg program] creates a usage message for [program] *)
val create_usage_msg: string -> string
(* [print_arguments usage] print the standard usage message *)
val print_arguments : string -> unit
(* [reset_arguments ()] clear all declared arguments *)
val reset_arguments : unit -> unit
| null | https://raw.githubusercontent.com/ocaml-multicore/ocaml-tsan/ae9c1502103845550162a49fcd3f76276cdfa866/utils/clflags.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Command line flags
* Optimization parameters represented as ints indexed by round number.
* Optimization parameters represented as floats indexed by round number.
* Set all the inlining arguments for a round.
The default is set if no round is provided.
Support for flags that can also be set from an environment variable
[add_arguments __LOC__ args] will add the arguments from [args] at
the end of [arg_spec], checking that they have not already been
added by [add_arguments] before. A warning is printed showing the
locations of the function from which the argument was previously
added.
[create_usage_msg program] creates a usage message for [program]
[print_arguments usage] print the standard usage message
[reset_arguments ()] clear all declared arguments | , projet Cristal , INRIA Rocquencourt
Copyright 2005 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
module Int_arg_helper : sig
type parsed
val parse : string -> string -> parsed ref -> unit
type parse_result =
| Ok
| Parse_failed of exn
val parse_no_error : string -> parsed ref -> parse_result
val get : key:int -> parsed -> int
end
module Float_arg_helper : sig
type parsed
val parse : string -> string -> parsed ref -> unit
type parse_result =
| Ok
| Parse_failed of exn
val parse_no_error : string -> parsed ref -> parse_result
val get : key:int -> parsed -> float
end
type inlining_arguments = {
inline_call_cost : int option;
inline_alloc_cost : int option;
inline_prim_cost : int option;
inline_branch_cost : int option;
inline_indirect_cost : int option;
inline_lifting_benefit : int option;
inline_branch_factor : float option;
inline_max_depth : int option;
inline_max_unroll : int option;
inline_threshold : float option;
inline_toplevel_threshold : int option;
}
val classic_arguments : inlining_arguments
val o1_arguments : inlining_arguments
val o2_arguments : inlining_arguments
val o3_arguments : inlining_arguments
val use_inlining_arguments_set : ?round:int -> inlining_arguments -> unit
val objfiles : string list ref
val ccobjs : string list ref
val dllibs : string list ref
val cmi_file : string option ref
val compile_only : bool ref
val output_name : string option ref
val include_dirs : string list ref
val no_std_include : bool ref
val no_cwd : bool ref
val print_types : bool ref
val make_archive : bool ref
val debug : bool ref
val debug_full : bool ref
val unsafe : bool ref
val use_linscan : bool ref
val link_everything : bool ref
val custom_runtime : bool ref
val no_check_prims : bool ref
val bytecode_compatible_32 : bool ref
val output_c_object : bool ref
val output_complete_object : bool ref
val output_complete_executable : bool ref
val all_ccopts : string list ref
val classic : bool ref
val nopervasives : bool ref
val match_context_rows : int ref
val safer_matching : bool ref
val open_modules : string list ref
val preprocessor : string option ref
val all_ppx : string list ref
val absname : bool ref
val annotations : bool ref
val binary_annotations : bool ref
val use_threads : bool ref
val noassert : bool ref
val verbose : bool ref
val noprompt : bool ref
val nopromptcont : bool ref
val init_file : string option ref
val noinit : bool ref
val noversion : bool ref
val use_prims : string ref
val use_runtime : string ref
val plugin : bool ref
val principal : bool ref
val real_paths : bool ref
val recursive_types : bool ref
val strict_sequence : bool ref
val strict_formats : bool ref
val applicative_functors : bool ref
val make_runtime : bool ref
val c_compiler : string option ref
val no_auto_link : bool ref
val dllpaths : string list ref
val make_package : bool ref
val for_package : string option ref
val error_size : int ref
val float_const_prop : bool ref
val transparent_modules : bool ref
val unique_ids : bool ref
val locations : bool ref
val dump_source : bool ref
val dump_parsetree : bool ref
val dump_typedtree : bool ref
val dump_shape : bool ref
val dump_rawlambda : bool ref
val dump_lambda : bool ref
val dump_rawclambda : bool ref
val dump_clambda : bool ref
val dump_rawflambda : bool ref
val dump_flambda : bool ref
val dump_flambda_let : int option ref
val dump_instr : bool ref
val keep_camlprimc_file : bool ref
val keep_asm_file : bool ref
val optimize_for_speed : bool ref
val dump_cmm : bool ref
val dump_selection : bool ref
val dump_cse : bool ref
val dump_live : bool ref
val dump_spill : bool ref
val dump_split : bool ref
val dump_interf : bool ref
val dump_prefer : bool ref
val dump_regalloc : bool ref
val dump_reload : bool ref
val dump_scheduling : bool ref
val dump_linear : bool ref
val dump_interval : bool ref
val keep_startup_file : bool ref
val dump_combine : bool ref
val native_code : bool ref
val default_inline_threshold : float
val inline_threshold : Float_arg_helper.parsed ref
val inlining_report : bool ref
val simplify_rounds : int option ref
val default_simplify_rounds : int ref
val rounds : unit -> int
val default_inline_max_unroll : int
val inline_max_unroll : Int_arg_helper.parsed ref
val default_inline_toplevel_threshold : int
val inline_toplevel_threshold : Int_arg_helper.parsed ref
val default_inline_call_cost : int
val default_inline_alloc_cost : int
val default_inline_prim_cost : int
val default_inline_branch_cost : int
val default_inline_indirect_cost : int
val default_inline_lifting_benefit : int
val inline_call_cost : Int_arg_helper.parsed ref
val inline_alloc_cost : Int_arg_helper.parsed ref
val inline_prim_cost : Int_arg_helper.parsed ref
val inline_branch_cost : Int_arg_helper.parsed ref
val inline_indirect_cost : Int_arg_helper.parsed ref
val inline_lifting_benefit : Int_arg_helper.parsed ref
val default_inline_branch_factor : float
val inline_branch_factor : Float_arg_helper.parsed ref
val dont_write_files : bool ref
val std_include_flag : string -> string
val std_include_dir : unit -> string list
val shared : bool ref
val dlcode : bool ref
val pic_code : bool ref
val runtime_variant : string ref
val with_runtime : bool ref
val force_slash : bool ref
val keep_docs : bool ref
val keep_locs : bool ref
val opaque : bool ref
val profile_columns : Profile.column list ref
val flambda_invariant_checks : bool ref
val unbox_closures : bool ref
val unbox_closures_factor : int ref
val default_unbox_closures_factor : int
val unbox_free_vars_of_closures : bool ref
val unbox_specialised_args : bool ref
val clambda_checks : bool ref
val cmm_invariants : bool ref
val default_inline_max_depth : int
val inline_max_depth : Int_arg_helper.parsed ref
val remove_unused_arguments : bool ref
val dump_flambda_verbose : bool ref
val classic_inlining : bool ref
val afl_instrument : bool ref
val afl_inst_ratio : int ref
val function_sections : bool ref
val all_passes : string list ref
val dumped_pass : string -> bool
val set_dumped_pass : string -> bool -> unit
val dump_into_file : bool ref
val dump_dir : string option ref
type 'a env_reader = {
parse : string -> 'a option;
print : 'a -> string;
usage : string;
env_var : string;
}
val color : Misc.Color.setting option ref
val color_reader : Misc.Color.setting env_reader
val error_style : Misc.Error_style.setting option ref
val error_style_reader : Misc.Error_style.setting env_reader
val unboxed_types : bool ref
val insn_sched : bool ref
val insn_sched_default : bool
module Compiler_pass : sig
type t = Parsing | Typing | Scheduling | Emit
val of_string : string -> t option
val to_string : t -> string
val is_compilation_pass : t -> bool
val available_pass_names : filter:(t -> bool) -> native:bool -> string list
val can_save_ir_after : t -> bool
val compare : t -> t -> int
val to_output_filename: t -> prefix:string -> string
val of_input_filename: string -> t option
end
val stop_after : Compiler_pass.t option ref
val should_stop_after : Compiler_pass.t -> bool
val set_save_ir_after : Compiler_pass.t -> bool -> unit
val should_save_ir_after : Compiler_pass.t -> bool
val arg_spec : (string * Arg.spec * string) list ref
val add_arguments : string -> (string * Arg.spec * string) list -> unit
val create_usage_msg: string -> string
val print_arguments : string -> unit
val reset_arguments : unit -> unit
|
7b9c9ea131779f8922cc4369c5f244ab463266b08925ffde226eb7333ffd319c | originrose/cortex | cuda_gradient_test.clj | (ns ^:gpu cortex.compute.nn.cuda-gradient-test
(:require [clojure.test :refer :all]
[cortex.compute.verify.utils :refer [def-double-float-test] :as verify-utils]
;[cortex.compute.cuda.backend :as cuda-backend]
[cortex.verify.nn.gradient :as verify-gradient]
[cortex.nn.execute :as execute]))
(use-fixtures :each verify-utils/test-wrapper)
(defn create-context
[]
(execute/compute-context :datatype verify-utils/*datatype* :backend :cuda))
;;The gradient tests are just too sensitive to precision to work well here as the GPU
;;has different precision than the CPU for things. Doubles work fine but
floating point numbers will fail like 1/10 times .
(deftest corn-gradient
(verify-gradient/corn-gradient (create-context)))
(deftest batchnorm-gradient
(verify-gradient/batch-normalization-gradient (create-context)))
(deftest lrn-gradient
(verify-gradient/lrn-gradient (create-context)))
(deftest prelu-gradient
(verify-gradient/prelu-gradient (create-context)))
(deftest concat-gradient
(verify-gradient/concat-gradient (create-context)))
(deftest split-gradient
(verify-gradient/split-gradient (create-context)))
(deftest join-+-gradient
(verify-gradient/join-+-gradient (create-context)))
(deftest join-*-gradient
(verify-gradient/join-*-gradient (create-context)))
(deftest censor-gradient
(verify-gradient/censor-gradient (create-context)))
(deftest yolo-gradient
(verify-gradient/yolo-gradient (create-context)))
| null | https://raw.githubusercontent.com/originrose/cortex/94b1430538e6187f3dfd1697c36ff2c62b475901/test/clj/cortex/compute/nn/cuda_gradient_test.clj | clojure | [cortex.compute.cuda.backend :as cuda-backend]
The gradient tests are just too sensitive to precision to work well here as the GPU
has different precision than the CPU for things. Doubles work fine but | (ns ^:gpu cortex.compute.nn.cuda-gradient-test
(:require [clojure.test :refer :all]
[cortex.compute.verify.utils :refer [def-double-float-test] :as verify-utils]
[cortex.verify.nn.gradient :as verify-gradient]
[cortex.nn.execute :as execute]))
(use-fixtures :each verify-utils/test-wrapper)
(defn create-context
[]
(execute/compute-context :datatype verify-utils/*datatype* :backend :cuda))
floating point numbers will fail like 1/10 times .
(deftest corn-gradient
(verify-gradient/corn-gradient (create-context)))
(deftest batchnorm-gradient
(verify-gradient/batch-normalization-gradient (create-context)))
(deftest lrn-gradient
(verify-gradient/lrn-gradient (create-context)))
(deftest prelu-gradient
(verify-gradient/prelu-gradient (create-context)))
(deftest concat-gradient
(verify-gradient/concat-gradient (create-context)))
(deftest split-gradient
(verify-gradient/split-gradient (create-context)))
(deftest join-+-gradient
(verify-gradient/join-+-gradient (create-context)))
(deftest join-*-gradient
(verify-gradient/join-*-gradient (create-context)))
(deftest censor-gradient
(verify-gradient/censor-gradient (create-context)))
(deftest yolo-gradient
(verify-gradient/yolo-gradient (create-context)))
|
76df4aca8880ac56a5fa3e7f364aba68a5803b7b083dda32e581c75f9b29ab3e | exercism/babashka | project.clj | (defproject space-age "0.1.0-SNAPSHOT"
:description "space-age exercise."
:url "-age"
:dependencies [[org.clojure/clojure "1.10.0"]])
| null | https://raw.githubusercontent.com/exercism/babashka/707356c52e08490e66cb1b2e63e4f4439d91cf08/exercises/practice/space-age/project.clj | clojure | (defproject space-age "0.1.0-SNAPSHOT"
:description "space-age exercise."
:url "-age"
:dependencies [[org.clojure/clojure "1.10.0"]])
| |
6ae1a00fb88a03d0a74e6cf3803275b4d86317792c6162ff1a877006915135b4 | rems-project/lem | pmap.mli | (***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file ../LICENSE. *)
(* *)
(***********************************************************************)
Modified by 2010 - 11 - 30
$ I d : map.mli 10632 2010 - 07 - 24 14:16:58Z garrigue $
(** Association tables over ordered types.
This module implements applicative association tables, also known as
finite maps or dictionaries, given a total ordering function
over the keys.
All operations over maps are purely applicative (no side-effects).
The implementation uses balanced binary trees, and therefore searching
and insertion take time logarithmic in the size of the map.
*)
type ('key,+'a) map
(** The type of maps from type ['key] to type ['a]. *)
val empty: ('key -> 'key -> int) -> ('key,'a) map
(** The empty map. *)
val is_empty: ('key,'a) map -> bool
(** Test whether a map is empty or not. *)
val mem: 'key -> ('key,'a) map -> bool
(** [mem x m] returns [true] if [m] contains a binding for [x],
and [false] otherwise. *)
val add: 'key -> 'a -> ('key,'a) map -> ('key,'a) map
(** [add x y m] returns a map containing the same bindings as
[m], plus a binding of [x] to [y]. If [x] was already bound
in [m], its previous binding disappears. *)
val singleton: ('key -> 'key -> int) -> 'key -> 'a -> ('key,'a) map
* [ singleton x y ] returns the one - element map that contains a binding [ y ]
for [ x ] .
@since 3.12.0
for [x].
@since 3.12.0
*)
val remove: 'key -> ('key,'a) map -> ('key,'a) map
(** [remove x m] returns a map containing the same bindings as
[m], except for [x] which is unbound in the returned map. *)
val merge:
('key -> 'a option -> 'b option -> 'c option) -> ('key,'a) map -> ('key,'b) map -> ('key,'c) map
* [ merge f m1 m2 ] computes a map whose keys is a subset of keys of [ m1 ]
and of [ m2 ] . The presence of each such binding , and the corresponding
value , is determined with the function [ f ] .
@since 3.12.0
and of [m2]. The presence of each such binding, and the corresponding
value, is determined with the function [f].
@since 3.12.0
*)
val union: ('key,'a) map -> ('key,'a) map -> ('key,'a) map
* [ union ] computes a map whose keys is a subset of keys of [ m1 ]
and of [ m2 ] . The bindings in take precedence .
@since 3.12.0
and of [m2]. The bindings in m2 take precedence.
@since 3.12.0
*)
val compare: ('a -> 'a -> int) -> ('key,'a) map -> ('key,'a) map -> int
* Total ordering between maps . The first argument is a total ordering
used to compare data associated with equal keys in the two maps .
used to compare data associated with equal keys in the two maps. *)
val equal: ('a -> 'a -> bool) -> ('key,'a) map -> ('key,'a) map -> bool
* [ equal cmp ] tests whether the maps [ m1 ] and [ m2 ] are
equal , that is , contain equal keys and associate them with
equal data . [ cmp ] is the equality predicate used to compare
the data associated with the keys .
equal, that is, contain equal keys and associate them with
equal data. [cmp] is the equality predicate used to compare
the data associated with the keys. *)
val iter: ('key -> 'a -> unit) -> ('key,'a) map -> unit
* [ iter f m ] applies [ f ] to all bindings in map [ m ] .
[ f ] receives the key as first argument , and the associated value
as second argument . The bindings are passed to [ f ] in increasing
order with respect to the ordering over the type of the keys .
[f] receives the key as first argument, and the associated value
as second argument. The bindings are passed to [f] in increasing
order with respect to the ordering over the type of the keys. *)
val fold: ('key -> 'a -> 'b -> 'b) -> ('key,'a) map -> 'b -> 'b
(** [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)],
where [k1 ... kN] are the keys of all bindings in [m]
(in increasing order), and [d1 ... dN] are the associated data. *)
val for_all: ('key -> 'a -> bool) -> ('key,'a) map -> bool
* [ for_all p m ] checks if all the bindings of the map
satisfy the predicate [ p ] .
@since 3.12.0
satisfy the predicate [p].
@since 3.12.0
*)
val exist: ('key -> 'a -> bool) -> ('key,'a) map -> bool
* [ exists p m ] checks if at least one binding of the map
satisfy the predicate [ p ] .
@since 3.12.0
satisfy the predicate [p].
@since 3.12.0
*)
val filter: ('key -> 'a -> bool) -> ('key,'a) map -> ('key,'a) map
* [ filter p m ] returns the map with all the bindings in [ m ]
that satisfy predicate [ p ] .
@since 3.12.0
that satisfy predicate [p].
@since 3.12.0
*)
val partition: ('key -> 'a -> bool) -> ('key,'a) map -> ('key,'a) map * ('key,'a) map
* [ partition p m ] returns a pair of maps [ ( m1 , m2 ) ] , where
[ m1 ] contains all the bindings of [ s ] that satisfy the
predicate [ p ] , and [ m2 ] is the map with all the bindings of
[ s ] that do not satisfy [ p ] .
@since 3.12.0
[m1] contains all the bindings of [s] that satisfy the
predicate [p], and [m2] is the map with all the bindings of
[s] that do not satisfy [p].
@since 3.12.0
*)
val cardinal: ('key,'a) map -> int
* Return the number of bindings of a map .
@since 3.12.0
@since 3.12.0
*)
val bindings_list: ('key,'a) map -> ('key * 'a) list
* Return the list of all bindings of the given map .
The returned list is sorted in increasing order with respect
to the ordering [ Ord.compare ] , where [ ] is the argument
given to { ! Map . Make } .
@since 3.12.0
The returned list is sorted in increasing order with respect
to the ordering [Ord.compare], where [Ord] is the argument
given to {!Map.Make}.
@since 3.12.0
*)
val bindings: (('key * 'a) -> ('key * 'a) -> int) -> ('key,'a) map -> ('key * 'a) Pset.set
(** Return a set of all bindings of the given map. *)
(** [domain m] returns the domain of the map [m], i.e. the
set of keys of this map. *)
val domain : ('key,'a) map -> 'key Pset.set
(** [range m] returns the range of the map [m], i.e. the
set of all values stored in this map. *)
val range : ('a -> 'a -> int) -> ('key,'a) map -> 'a Pset.set
val min_binding: ('key,'a) map -> ('key * 'a)
* Return the smallest binding of the given map
( with respect to the [ Ord.compare ] ordering ) , or raise
[ Not_found ] if the map is empty .
@since 3.12.0
(with respect to the [Ord.compare] ordering), or raise
[Not_found] if the map is empty.
@since 3.12.0
*)
val max_binding: ('key,'a) map -> ('key * 'a)
* Same as { ! Map . S.min_binding } , but returns the largest binding
of the given map .
@since 3.12.0
of the given map.
@since 3.12.0
*)
val choose: ('key,'a) map -> ('key * 'a)
* Return one binding of the given map , or raise [ Not_found ] if
the map is empty . Which binding is chosen is unspecified ,
but equal bindings will be chosen for equal maps .
@since 3.12.0
the map is empty. Which binding is chosen is unspecified,
but equal bindings will be chosen for equal maps.
@since 3.12.0
*)
val split: 'key -> ('key,'a) map -> ('key,'a) map * 'a option * ('key,'a) map
* [ split x m ] returns a triple [ ( l , data , r ) ] , where
[ l ] is the map with all the bindings of [ m ] whose key
is strictly less than [ x ] ;
[ r ] is the map with all the bindings of [ m ] whose key
is strictly greater than [ x ] ;
[ data ] is [ None ] if [ m ] contains no binding for [ x ] ,
or [ Some v ] if [ m ] binds [ v ] to [ x ] .
@since 3.12.0
[l] is the map with all the bindings of [m] whose key
is strictly less than [x];
[r] is the map with all the bindings of [m] whose key
is strictly greater than [x];
[data] is [None] if [m] contains no binding for [x],
or [Some v] if [m] binds [v] to [x].
@since 3.12.0
*)
val find: 'key -> ('key,'a) map -> 'a
(** [find x m] returns the current binding of [x] in [m],
or raises [Not_found] if no such binding exists. *)
val lookup: 'key -> ('key,'a) map -> 'a option
(** [lookup x m] returns the current binding of [x] in [m]. In contrast to [find],
it returns [None] instead of raising an exception, if no such binding exists. *)
val map: ('a -> 'b) -> ('key,'a) map -> ('key,'b) map
(** [map f m] returns a map with same domain as [m], where the
associated value [a] of all bindings of [m] has been
replaced by the result of the application of [f] to [a].
The bindings are passed to [f] in increasing order
with respect to the ordering over the type of the keys. *)
val mapi: ('key -> 'a -> 'b) -> ('key,'a) map -> ('key,'b) map
(** Same as {!Map.S.map}, but the function receives as arguments both the
key and the associated value for each binding of the map. *)
val from_set : ('key -> 'v) -> ('key Pset.set) -> ('key, 'v) map
| null | https://raw.githubusercontent.com/rems-project/lem/a839114e468119d9ac0868d7dc53eae7f3cc3a6c/ocaml-lib/pmap.mli | ocaml | *********************************************************************
Objective Caml
the special exception on linking described in file ../LICENSE.
*********************************************************************
* Association tables over ordered types.
This module implements applicative association tables, also known as
finite maps or dictionaries, given a total ordering function
over the keys.
All operations over maps are purely applicative (no side-effects).
The implementation uses balanced binary trees, and therefore searching
and insertion take time logarithmic in the size of the map.
* The type of maps from type ['key] to type ['a].
* The empty map.
* Test whether a map is empty or not.
* [mem x m] returns [true] if [m] contains a binding for [x],
and [false] otherwise.
* [add x y m] returns a map containing the same bindings as
[m], plus a binding of [x] to [y]. If [x] was already bound
in [m], its previous binding disappears.
* [remove x m] returns a map containing the same bindings as
[m], except for [x] which is unbound in the returned map.
* [fold f m a] computes [(f kN dN ... (f k1 d1 a)...)],
where [k1 ... kN] are the keys of all bindings in [m]
(in increasing order), and [d1 ... dN] are the associated data.
* Return a set of all bindings of the given map.
* [domain m] returns the domain of the map [m], i.e. the
set of keys of this map.
* [range m] returns the range of the map [m], i.e. the
set of all values stored in this map.
* [find x m] returns the current binding of [x] in [m],
or raises [Not_found] if no such binding exists.
* [lookup x m] returns the current binding of [x] in [m]. In contrast to [find],
it returns [None] instead of raising an exception, if no such binding exists.
* [map f m] returns a map with same domain as [m], where the
associated value [a] of all bindings of [m] has been
replaced by the result of the application of [f] to [a].
The bindings are passed to [f] in increasing order
with respect to the ordering over the type of the keys.
* Same as {!Map.S.map}, but the function receives as arguments both the
key and the associated value for each binding of the map. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
Modified by 2010 - 11 - 30
$ I d : map.mli 10632 2010 - 07 - 24 14:16:58Z garrigue $
type ('key,+'a) map
val empty: ('key -> 'key -> int) -> ('key,'a) map
val is_empty: ('key,'a) map -> bool
val mem: 'key -> ('key,'a) map -> bool
val add: 'key -> 'a -> ('key,'a) map -> ('key,'a) map
val singleton: ('key -> 'key -> int) -> 'key -> 'a -> ('key,'a) map
* [ singleton x y ] returns the one - element map that contains a binding [ y ]
for [ x ] .
@since 3.12.0
for [x].
@since 3.12.0
*)
val remove: 'key -> ('key,'a) map -> ('key,'a) map
val merge:
('key -> 'a option -> 'b option -> 'c option) -> ('key,'a) map -> ('key,'b) map -> ('key,'c) map
* [ merge f m1 m2 ] computes a map whose keys is a subset of keys of [ m1 ]
and of [ m2 ] . The presence of each such binding , and the corresponding
value , is determined with the function [ f ] .
@since 3.12.0
and of [m2]. The presence of each such binding, and the corresponding
value, is determined with the function [f].
@since 3.12.0
*)
val union: ('key,'a) map -> ('key,'a) map -> ('key,'a) map
* [ union ] computes a map whose keys is a subset of keys of [ m1 ]
and of [ m2 ] . The bindings in take precedence .
@since 3.12.0
and of [m2]. The bindings in m2 take precedence.
@since 3.12.0
*)
val compare: ('a -> 'a -> int) -> ('key,'a) map -> ('key,'a) map -> int
* Total ordering between maps . The first argument is a total ordering
used to compare data associated with equal keys in the two maps .
used to compare data associated with equal keys in the two maps. *)
val equal: ('a -> 'a -> bool) -> ('key,'a) map -> ('key,'a) map -> bool
* [ equal cmp ] tests whether the maps [ m1 ] and [ m2 ] are
equal , that is , contain equal keys and associate them with
equal data . [ cmp ] is the equality predicate used to compare
the data associated with the keys .
equal, that is, contain equal keys and associate them with
equal data. [cmp] is the equality predicate used to compare
the data associated with the keys. *)
val iter: ('key -> 'a -> unit) -> ('key,'a) map -> unit
* [ iter f m ] applies [ f ] to all bindings in map [ m ] .
[ f ] receives the key as first argument , and the associated value
as second argument . The bindings are passed to [ f ] in increasing
order with respect to the ordering over the type of the keys .
[f] receives the key as first argument, and the associated value
as second argument. The bindings are passed to [f] in increasing
order with respect to the ordering over the type of the keys. *)
val fold: ('key -> 'a -> 'b -> 'b) -> ('key,'a) map -> 'b -> 'b
val for_all: ('key -> 'a -> bool) -> ('key,'a) map -> bool
* [ for_all p m ] checks if all the bindings of the map
satisfy the predicate [ p ] .
@since 3.12.0
satisfy the predicate [p].
@since 3.12.0
*)
val exist: ('key -> 'a -> bool) -> ('key,'a) map -> bool
* [ exists p m ] checks if at least one binding of the map
satisfy the predicate [ p ] .
@since 3.12.0
satisfy the predicate [p].
@since 3.12.0
*)
val filter: ('key -> 'a -> bool) -> ('key,'a) map -> ('key,'a) map
* [ filter p m ] returns the map with all the bindings in [ m ]
that satisfy predicate [ p ] .
@since 3.12.0
that satisfy predicate [p].
@since 3.12.0
*)
val partition: ('key -> 'a -> bool) -> ('key,'a) map -> ('key,'a) map * ('key,'a) map
* [ partition p m ] returns a pair of maps [ ( m1 , m2 ) ] , where
[ m1 ] contains all the bindings of [ s ] that satisfy the
predicate [ p ] , and [ m2 ] is the map with all the bindings of
[ s ] that do not satisfy [ p ] .
@since 3.12.0
[m1] contains all the bindings of [s] that satisfy the
predicate [p], and [m2] is the map with all the bindings of
[s] that do not satisfy [p].
@since 3.12.0
*)
val cardinal: ('key,'a) map -> int
* Return the number of bindings of a map .
@since 3.12.0
@since 3.12.0
*)
val bindings_list: ('key,'a) map -> ('key * 'a) list
* Return the list of all bindings of the given map .
The returned list is sorted in increasing order with respect
to the ordering [ Ord.compare ] , where [ ] is the argument
given to { ! Map . Make } .
@since 3.12.0
The returned list is sorted in increasing order with respect
to the ordering [Ord.compare], where [Ord] is the argument
given to {!Map.Make}.
@since 3.12.0
*)
val bindings: (('key * 'a) -> ('key * 'a) -> int) -> ('key,'a) map -> ('key * 'a) Pset.set
val domain : ('key,'a) map -> 'key Pset.set
val range : ('a -> 'a -> int) -> ('key,'a) map -> 'a Pset.set
val min_binding: ('key,'a) map -> ('key * 'a)
* Return the smallest binding of the given map
( with respect to the [ Ord.compare ] ordering ) , or raise
[ Not_found ] if the map is empty .
@since 3.12.0
(with respect to the [Ord.compare] ordering), or raise
[Not_found] if the map is empty.
@since 3.12.0
*)
val max_binding: ('key,'a) map -> ('key * 'a)
* Same as { ! Map . S.min_binding } , but returns the largest binding
of the given map .
@since 3.12.0
of the given map.
@since 3.12.0
*)
val choose: ('key,'a) map -> ('key * 'a)
* Return one binding of the given map , or raise [ Not_found ] if
the map is empty . Which binding is chosen is unspecified ,
but equal bindings will be chosen for equal maps .
@since 3.12.0
the map is empty. Which binding is chosen is unspecified,
but equal bindings will be chosen for equal maps.
@since 3.12.0
*)
val split: 'key -> ('key,'a) map -> ('key,'a) map * 'a option * ('key,'a) map
* [ split x m ] returns a triple [ ( l , data , r ) ] , where
[ l ] is the map with all the bindings of [ m ] whose key
is strictly less than [ x ] ;
[ r ] is the map with all the bindings of [ m ] whose key
is strictly greater than [ x ] ;
[ data ] is [ None ] if [ m ] contains no binding for [ x ] ,
or [ Some v ] if [ m ] binds [ v ] to [ x ] .
@since 3.12.0
[l] is the map with all the bindings of [m] whose key
is strictly less than [x];
[r] is the map with all the bindings of [m] whose key
is strictly greater than [x];
[data] is [None] if [m] contains no binding for [x],
or [Some v] if [m] binds [v] to [x].
@since 3.12.0
*)
val find: 'key -> ('key,'a) map -> 'a
val lookup: 'key -> ('key,'a) map -> 'a option
val map: ('a -> 'b) -> ('key,'a) map -> ('key,'b) map
val mapi: ('key -> 'a -> 'b) -> ('key,'a) map -> ('key,'b) map
val from_set : ('key -> 'v) -> ('key Pset.set) -> ('key, 'v) map
|
8f137ef1eeec6592f31c6594fdc6525237c26158c8de0baa862beffd66134075 | B-Lang-org/bsc | ListMap.hs | Copyright ( c ) 1982 - 1999 ,
-- See LICENSE for the full license.
--
module ListMap(
ListMap,
toList, fromList,
length,
null,
lookup, lookupWithDefault, lookupWithDefaultBy, lookupBy
) where
import Prelude
-- @@ Lists as finite mappings.
type ListMap a b = [(a, b)]
toList :: ListMap a b -> [(a, b)]
toList l = l
fromList :: [(a, b)] -> ListMap a b
fromList l = l
lookupWithDefault :: (Eq a) => [(a, b)] -> b -> a -> b
lookupWithDefault [] d _ = d
lookupWithDefault ((x,y):xys) d x' = if x == x' then y else lookupWithDefault xys d x'
lookupWithDefaultBy :: (a -> a -> Bool) -> [(a, b)] -> b -> a -> b
lookupWithDefaultBy _ [] d _ = d
lookupWithDefaultBy match ((x,y):xys) d x' = if (match x x') then y
else lookupWithDefaultBy match xys d x'
lookupBy :: (a -> a -> Bool) -> [(a, b)] -> a -> Maybe b
lookupBy _ [] _ = Nothing
lookupBy match ((x,y):xys) x' = if (match x x') then Just y
else lookupBy match xys x'
| null | https://raw.githubusercontent.com/B-Lang-org/bsc/bd141b505394edc5a4bdd3db442a9b0a8c101f0f/src/comp/Libs/ListMap.hs | haskell | See LICENSE for the full license.
@@ Lists as finite mappings. | Copyright ( c ) 1982 - 1999 ,
module ListMap(
ListMap,
toList, fromList,
length,
null,
lookup, lookupWithDefault, lookupWithDefaultBy, lookupBy
) where
import Prelude
type ListMap a b = [(a, b)]
toList :: ListMap a b -> [(a, b)]
toList l = l
fromList :: [(a, b)] -> ListMap a b
fromList l = l
lookupWithDefault :: (Eq a) => [(a, b)] -> b -> a -> b
lookupWithDefault [] d _ = d
lookupWithDefault ((x,y):xys) d x' = if x == x' then y else lookupWithDefault xys d x'
lookupWithDefaultBy :: (a -> a -> Bool) -> [(a, b)] -> b -> a -> b
lookupWithDefaultBy _ [] d _ = d
lookupWithDefaultBy match ((x,y):xys) d x' = if (match x x') then y
else lookupWithDefaultBy match xys d x'
lookupBy :: (a -> a -> Bool) -> [(a, b)] -> a -> Maybe b
lookupBy _ [] _ = Nothing
lookupBy match ((x,y):xys) x' = if (match x x') then Just y
else lookupBy match xys x'
|
42a59eeaccece85adedb5e48c00889244b7d295c480c3d9bd1f88d95a239741c | aryx/ocamltarzan | pa_sexp_conv.ml | pp camlp4orf
File : pa_sexp_conv.ml
Copyright ( C ) 2005-
Jane Street Holding , LLC
Author :
email : mmottl\@janestcapital.com
WWW :
This file is derived from file " pa_tywith.ml " of version 0.45 of the
library " " .
is Copyright ( C ) 2004 , 2005 by
< >
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
Copyright (C) 2005-
Jane Street Holding, LLC
Author: Markus Mottl
email: mmottl\@janestcapital.com
WWW:
This file is derived from file "pa_tywith.ml" of version 0.45 of the
library "Tywith".
Tywith is Copyright (C) 2004, 2005 by
Martin Sandin <>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
(* Pa_sexp_conv: Preprocessing Module for Automated S-expression Conversions *)
open Printf
open Lexing
open Camlp4
open PreCast
open Ast
open Pa_type_conv
(* Utility functions *)
let ( *** ) f g x = f (g x)
let mk_rev_bindings _loc fps =
let coll (i, bindings, patts, vars) fp =
let name = "v" ^ string_of_int i in
let var_expr = Gen.ide _loc name in
let expr =
match fp with
| `Fun fun_expr -> <:expr< $fun_expr$ $var_expr$ >>
| `Match matchings -> <:expr< match $var_expr$ with [ $matchings$ ] >>
in
let patt = Gen.idp _loc name in
let bindings = <:binding< $patt$ = $expr$ and $bindings$ >> in
i - 1, bindings, patt :: patts, var_expr :: vars
in
let n = List.length fps in
let _, bindings, patts, expr =
List.fold_left coll (n, BiNil _loc, [], []) fps
in
bindings, patts, expr
let mk_bindings _loc fps = mk_rev_bindings _loc (List.rev fps)
let unroll_cnv_fp _loc var = function
| `Fun fun_expr -> <:expr< $fun_expr$ $var$ >>
| `Match matchings -> <:expr< match $var$ with [ $matchings$ ] >>
let unroll_fun_matches _loc fp1 fp2 =
match fp1, fp2 with
| `Fun fun_expr1, `Fun fun_expr2 -> <:expr< $fun_expr1$ $fun_expr2$ >>
| `Fun fun_expr, `Match matching -> <:expr< $fun_expr$ (fun [ $matching$ ]) >>
| _ -> assert false (* impossible *)
let rec sig_of_tds cnv = function
| TyDcl (_loc, type_name, tps, _rhs, _cl) -> cnv _loc type_name tps
| TyAnd (_loc, tp1, tp2) ->
<:sig_item< $sig_of_tds cnv tp1$; $sig_of_tds cnv tp2$ >>
| _ -> assert false (* impossible *)
(* Generators for S-expressions *)
(* Generates the signature for type conversion to S-expressions *)
module Sig_generate_sexp_of = struct
let sig_of_td _loc type_name tps =
let rec loop this_type = function
| [] -> <:ctyp< $this_type$ -> Sexp.t >>
| tp :: tps ->
let tp = Gen.drop_variance_annotations _loc tp in
let sexp_of = loop <:ctyp< $this_type$ $tp$ >> tps in
<:ctyp< ( $tp$ -> Sexp.t ) -> $sexp_of$ >>
in
let sexp_of = loop <:ctyp< $lid:type_name$ >> tps in
<:sig_item< value $lid: "sexp_of_" ^ type_name$ : $sexp_of$ >>
let mk_sig tds = <:sig_item< $sig_of_tds sig_of_td tds$ >>
let () = add_sig_generator "sexp_of" mk_sig
end
(* Generates the signature for type conversion from S-expressions *)
module Sig_generate_of_sexp = struct
let sig_of_td _loc type_name tps =
let rec loop this_tp = function
| [] -> <:ctyp< Sexp.t -> $this_tp$ >>
| tp :: tps ->
let tp = Gen.drop_variance_annotations _loc tp in
let of_sexp = loop <:ctyp< $this_tp$ $tp$ >> tps in
<:ctyp< ( Sexp.t -> $tp$ ) -> $of_sexp$ >>
in
let of_sexp = loop <:ctyp< $lid:type_name$ >> tps in
<:sig_item<
value $lid: type_name ^ "_of_sexp"$ : $of_sexp$;
>>
let mk_sig tds = <:sig_item< $sig_of_tds sig_of_td tds$ >>
let () = add_sig_generator "of_sexp" mk_sig
end
(* Generates the signature for type conversion to S-expressions *)
module Sig_generate = struct
let () =
add_sig_generator "sexp" (fun tds ->
let _loc = Loc.ghost in
<:sig_item<
$Sig_generate_sexp_of.mk_sig tds$; $Sig_generate_of_sexp.mk_sig tds$
>>)
end
(* Generator for converters of OCaml-values to S-expressions *)
module Generate_sexp_of = struct
let mk_abst_call _loc tn rev_path =
<:expr< $id:Gen.ident_of_rev_path _loc (("sexp_of_" ^ tn) :: rev_path)$ >>
(* Conversion of type paths *)
let sexp_of_path_fun _loc id =
match Gen.get_rev_id_path id [] with
| ["unit"] -> <:expr< Conv.sexp_of_unit >>
| ["bool"] -> <:expr< Conv.sexp_of_bool >>
| ["string"] -> <:expr< Conv.sexp_of_string >>
| ["char"] -> <:expr< Conv.sexp_of_char >>
| ["int"] -> <:expr< Conv.sexp_of_int >>
| ["float"] -> <:expr< Conv.sexp_of_float >>
| ["int32"] -> <:expr< Conv.sexp_of_int32 >>
| ["int64"] -> <:expr< Conv.sexp_of_int64 >>
| ["nativeint"] -> <:expr< Conv.sexp_of_nativeint >>
| ["big_int"; "Big_int"] -> <:expr< Conv.sexp_of_big_int >>
| ["nat"; "Nat"] -> <:expr< Conv.sexp_of_nat >>
| ["num"; "Num"] -> <:expr< Conv.sexp_of_num >>
| ["ratio"; "Ratio"] -> <:expr< Conv.sexp_of_ratio >>
| ["ref"] -> <:expr< Conv.sexp_of_ref >>
| ["t"; "Lazy"] | ["lazy_t"] -> <:expr< Conv.sexp_of_lazy >>
| ["option"] -> <:expr< Conv.sexp_of_option >>
| ["list"] -> <:expr< Conv.sexp_of_list >>
| ["array"] -> <:expr< Conv.sexp_of_array >>
| ["t"; "Hashtbl"] -> <:expr< Conv.sexp_of_hashtbl >>
| tn :: rev_path -> mk_abst_call _loc tn rev_path
| [] -> assert false (* impossible *)
(* Conversion of types *)
let rec sexp_of_type _loc = function
| <:ctyp< $tp1$ $tp2$ >> -> `Fun (sexp_of_appl_fun _loc tp1 tp2)
| <:ctyp< ( $tup:tp$ ) >> -> sexp_of_tuple _loc tp
| <:ctyp< '$parm$ >> -> `Fun (Gen.ide _loc ("_of_" ^ parm))
| <:ctyp< $id:id$ >> -> `Fun (sexp_of_path_fun _loc id)
| <:ctyp< $_$ -> $_$ >> -> `Fun <:expr< Conv.sexp_of_fun >>
| <:ctyp< [< $row_fields$ ] >> | <:ctyp< [> $row_fields$ ] >>
| <:ctyp< [= $row_fields$ ] >> -> sexp_of_variant _loc row_fields
| <:ctyp< ! $parms$ . $poly_tp$ >> -> sexp_of_poly _loc parms poly_tp
| _ ->
prerr_endline (get_loc_err _loc "sexp_of_type: unknown type construct");
exit 1
(* Conversion of polymorphic types *)
and sexp_of_appl_fun _loc tp1 tp2 =
match sexp_of_type _loc tp1, sexp_of_type _loc tp2 with
| `Fun fun_expr1, `Fun fun_expr2 -> <:expr< $fun_expr1$ $fun_expr2$ >>
| `Fun fun_expr, `Match matching -> <:expr< $fun_expr$ (fun [ $matching$ ]) >>
| _ -> assert false (* impossible *)
(* Conversion of tuples *)
and sexp_of_tuple _loc tp =
let fps = List.map (sexp_of_type _loc) (list_of_ctyp tp []) in
let bindings, patts, vars = mk_bindings _loc fps in
let in_expr = <:expr< Sexp.List $Gen.mk_expr_lst _loc vars$ >> in
let expr = <:expr< let $bindings$ in $in_expr$ >> in
`Match <:match_case< ( $tup:paCom_of_list patts$ ) -> $expr$ >>
(* Conversion of variant types *)
and sexp_of_variant _loc row_fields =
let rec loop = function
| <:ctyp< $tp1$ | $tp2$ >> -> <:match_case< $loop tp1$ | $loop tp2$ >>
| <:ctyp< `$cnstr$ >> ->
<:match_case< `$cnstr$ -> Sexp.Atom $str:cnstr$ >>
| <:ctyp< `$cnstr$ of $tps$ >> ->
let fps = List.map (sexp_of_type _loc) (list_of_ctyp tps []) in
let bindings, patts, vars = mk_bindings _loc fps in
let cnstr_expr = <:expr< Sexp.Atom $str:cnstr$ >> in
let expr =
<:expr<
let $bindings$ in
Sexp.List $Gen.mk_expr_lst _loc (cnstr_expr :: vars)$
>>
in
<:match_case< `$cnstr$ $paSem_of_list patts$ -> $expr$ >>
| <:ctyp< [= $row_fields$ ] >> | <:ctyp< [> $row_fields$ ] >>
| <:ctyp< [< $row_fields$ ] >> -> loop row_fields
| <:ctyp< $tp1$ $tp2$ >> ->
let id_path = Gen.get_appl_path _loc tp1 in
let call = sexp_of_appl_fun _loc tp1 tp2 in
<:match_case< #$id_path$ as v -> $call$ v >>
| <:ctyp< $id:id$ >> ->
let call =
match Gen.get_rev_id_path id [] with
| tn :: rev_path -> mk_abst_call _loc tn rev_path
| [] -> assert false (* impossible *)
in
<:match_case< #$id$ as v -> $call$ v >>
| _ -> failwith "sexp_of_variant: unknown type"
in
`Match (loop row_fields)
(* Polymorphic record fields *)
and sexp_of_poly _loc parms tp =
let bindings =
let mk_binding parm =
<:binding<
$Gen.idp _loc ("_of_" ^ parm)$ = Conv.sexp_of_abstr
>>
in
List.map mk_binding (Gen.ty_var_list_of_ctyp parms [])
in
match sexp_of_type _loc tp with
| `Fun fun_expr -> `Fun <:expr< let $list:bindings$ in $fun_expr$ >>
| `Match matchings ->
`Match
<:match_case<
arg ->
let $list:bindings$ in
match arg with
[ $matchings$ ]
>>
(* Conversion of sum types *)
let rec branch_sum _loc = function
| <:ctyp< $tp1$ | $tp2$ >> ->
<:match_case< $branch_sum _loc tp1$ | $branch_sum _loc tp2$ >>
| <:ctyp< $uid:cnstr$ >> ->
<:match_case< $uid:cnstr$ -> Sexp.Atom $str:cnstr$ >>
| <:ctyp< $uid:cnstr$ of $tps$ >> ->
let fps = List.map (sexp_of_type _loc) (list_of_ctyp tps []) in
let cnstr_expr = <:expr< Sexp.Atom $str:cnstr$ >> in
let bindings, patts, vars = mk_bindings _loc fps in
let patt =
match patts with
| [patt] -> patt
| _ -> <:patt< ( $tup:paCom_of_list patts$ ) >>
in
<:match_case<
$uid:cnstr$ $patt$ ->
let $bindings$ in
Sexp.List $Gen.mk_expr_lst _loc (cnstr_expr :: vars)$
>>
| _ -> failwith "branch_sum: unknown type"
let sexp_of_sum _loc alts = `Match (branch_sum _loc alts)
(* Conversion of record types *)
let mk_rec_patt _loc patt name =
let p = <:patt< $lid:name$ = $lid:"v_" ^ name$ >> in
<:patt< $patt$; $p$ >>
let mk_cnv_expr _loc tp var =
match sexp_of_type _loc tp with
| `Fun fun_expr -> <:expr< $fun_expr$ $var$ >>
| `Match matchings -> <:expr< match $var$ with [ $matchings$ ] >>
let sexp_of_record _loc flds_ctyp =
let flds = list_of_ctyp flds_ctyp [] in
let rec coll (patt, expr) = function
? ? ? specific
| <:ctyp< $lid:name$ : mutable sexp_option $tp$ >>
| <:ctyp< $lid:name$ : sexp_option $tp$ >> ->
let patt = mk_rec_patt _loc patt name in
let vname = <:expr< v >> in
let cnv_expr = unroll_cnv_fp _loc vname (sexp_of_type _loc tp) in
let expr =
<:expr<
let bnds =
match $lid:"v_" ^ name$ with
[ None -> bnds
| Some v ->
let arg = $cnv_expr$ in
let bnd =
Sexp.List [Sexp.Atom $str:name$; arg]
in
[ bnd :: bnds ] ]
in
$expr$
>>
in
patt, expr
| <:ctyp< $lid:name$ : mutable $tp$ >>
| <:ctyp< $lid:name$ : $tp$ >> ->
let patt = mk_rec_patt _loc patt name in
let vname = <:expr< $lid:"v_" ^ name$ >> in
let cnv_expr = unroll_cnv_fp _loc vname (sexp_of_type _loc tp) in
let expr =
<:expr<
let arg = $cnv_expr$ in
let bnd = Sexp.List [Sexp.Atom $str:name$; arg] in
let bnds = [ bnd :: bnds ] in
$expr$
>>
in
patt, expr
| _ -> assert false (* impossible *)
in
let init_expr = <:expr< Sexp.List bnds >> in
let patt, expr = List.fold_left coll (<:patt<>>, init_expr) flds in
`Match
<:match_case<
{ $patt$ } ->
let bnds = [] in
$expr$
>>
(* Empty type *)
let sexp_of_nil _loc = `Fun <:expr< fun _v -> assert False >>
(* Generate code from type definitions *)
let sexp_of_td _loc type_name tps rhs =
let is_alias_ref = ref false in
let handle_alias _loc tp = is_alias_ref := true; sexp_of_type _loc tp in
let body =
let rec loop _loc =
Gen.switch_tp_def _loc
~alias:handle_alias
~sum:sexp_of_sum
~record:sexp_of_record
~variants:sexp_of_variant
~mani:(fun _loc _tp1 -> loop _loc)
~nil:sexp_of_nil
in
match loop _loc rhs with
| `Fun fun_expr ->
(* Prevent violation of value restriction through eta-expansion *)
if !is_alias_ref && tps = [] then <:expr< fun [ v -> $fun_expr$ v ] >>
else <:expr< $fun_expr$ >>
| `Match matchings -> <:expr< fun [ $matchings$ ] >>
in
let patts =
List.map (Gen.idp _loc *** (^) "_of_" *** Gen.get_tparam_id) tps
in
let bnd = Gen.idp _loc ("sexp_of_" ^ type_name) in
<:binding< $bnd$ = $Gen.abstract _loc patts body$ >>
let rec sexp_of_tds = function
| TyDcl (_loc, type_name, tps, rhs, _cl) ->
sexp_of_td _loc type_name tps rhs
| TyAnd (_loc, tp1, tp2) ->
<:binding< $sexp_of_tds tp1$ and $sexp_of_tds tp2$ >>
| _ -> assert false (* impossible *)
let sexp_of tds =
let binding, recursive, _loc =
match tds with
| TyDcl (_loc, type_name, tps, rhs, _cl) ->
sexp_of_td _loc type_name tps rhs,
Gen.type_is_recursive _loc type_name rhs, _loc
| TyAnd (_loc, _, _) as tds -> sexp_of_tds tds, true, _loc
| _ -> assert false (* impossible *)
in
if recursive then <:str_item< value rec $binding$ >>
else <:str_item< value $binding$ >>
(* Add code generator to the set of known generators *)
let () = add_generator "sexp_of" sexp_of
end
(* Generator for converters of S-expressions to OCaml-values *)
module Generate_of_sexp = struct
let mk_abst_call _loc tn ?(internal = false) rev_path =
let tns = tn ^ "_of_sexp" in
let tns_suff = if internal then tns ^ "__" else tns in
<:expr< $id:Gen.ident_of_rev_path _loc (tns_suff :: rev_path)$ >>
(* Utility functions for polymorphic variants *)
(* Handle backtracking when variants do not match *)
let handle_no_variant_match _loc expr =
<:match_case< Conv_error.No_variant_match _ -> $expr$ >>
let is_wildcard = function [_] -> true | _ -> false
(* Generate code depending on whether to generate a match for the last
case of matching a variant *)
let handle_variant_match_last _loc match_last matches =
if match_last || is_wildcard matches then
match matches with
| <:match_case< $_$ -> $expr$ >> :: _ -> expr
| _ -> assert false (* impossible *)
else <:expr< match atom with [ $list:matches$ ] >>
(* Generate code for matching malformed S-expressions *)
let mk_variant_other_matches _loc rev_els call =
let coll_structs acc (_loc, cnstr) =
<:match_case< $str:cnstr$ -> Conv_error.$lid:call$ _loc sexp >>
:: acc
in
let exc_no_variant_match =
<:match_case< _ -> Conv_error.no_variant_match _loc sexp >>
in
List.fold_left coll_structs [exc_no_variant_match] rev_els
(* Split the row fields of a variant type into lists of atomic variants,
structured variants, atomic variants + included variant types,
and structured variants + included variant types. *)
let rec split_row_field _loc (atoms, structs, ainhs, sinhs as acc) = function
| <:ctyp< `$cnstr$ >> ->
let tpl = _loc, cnstr in
(
tpl :: atoms,
structs,
`A tpl :: ainhs,
sinhs
)
| <:ctyp< `$cnstr$ of $tps$ >> ->
(
atoms,
(_loc, cnstr) :: structs,
ainhs,
`S (_loc, cnstr, tps) :: sinhs
)
| <:ctyp< [= $row_fields$ ] >>
| <:ctyp< [> $row_fields$ ] >>
| <:ctyp< [< $row_fields$ ] >> ->
List.fold_left (split_row_field _loc) acc (list_of_ctyp row_fields [])
| <:ctyp< $_$ $_$ >>
| <:ctyp< $id:_$ >> as inh ->
let iinh = `I (_loc, inh) in
(
atoms,
structs,
iinh :: ainhs,
iinh :: sinhs
)
| _ -> failwith "split_row_field: unknown type"
(* Conversion of type paths *)
let path_of_sexp_fun _loc id =
match Gen.get_rev_id_path id [] with
| ["unit"] -> <:expr< Conv.unit_of_sexp >>
| ["string"] -> <:expr< Conv.string_of_sexp >>
| ["int"] -> <:expr< Conv.int_of_sexp >>
| ["float"] -> <:expr< Conv.float_of_sexp >>
| ["bool"] -> <:expr< Conv.bool_of_sexp >>
| ["int32"] -> <:expr< Conv.int32_of_sexp >>
| ["int64"] -> <:expr< Conv.int64_of_sexp >>
| ["nativeint"] -> <:expr< Conv.nativeint_of_sexp >>
| ["big_int"; "Big_int"] -> <:expr< Conv.big_int_of_sexp >>
| ["nat"; "Nat"] -> <:expr< Conv.nat_of_sexp >>
| ["num"; "Num"] -> <:expr< Conv.num_of_sexp >>
| ["ratio"; "Ratio"] -> <:expr< Conv.ratio_of_sexp >>
| ["list"] -> <:expr< Conv.list_of_sexp >>
| ["array"] -> <:expr< Conv.array_of_sexp >>
| ["option"] -> <:expr< Conv.option_of_sexp >>
| ["char"] -> <:expr< Conv.char_of_sexp >>
| ["t"; "Lazy"] | ["lazy_t"] -> <:expr< Conv.lazy_of_sexp >>
| ["t"; "Hashtbl"] -> <:expr< Conv.hashtbl_of_sexp >>
| ["ref"] -> <:expr< Conv.ref_of_sexp >>
| tn :: rev_path -> mk_abst_call _loc tn rev_path
| [] -> assert false (* no empty paths *)
(* Conversion of types *)
let rec type_of_sexp _loc = function
| <:ctyp< $tp1$ $tp2$ >> ->
let fp1 = type_of_sexp _loc tp1 in
let fp2 = type_of_sexp _loc tp2 in
`Fun (unroll_fun_matches _loc fp1 fp2)
| <:ctyp< ( $tup:tp$ ) >> -> tuple_of_sexp _loc tp
| <:ctyp< '$parm$ >> -> `Fun (Gen.ide _loc ("_of_" ^ parm))
| <:ctyp< $id:id$ >> -> `Fun (path_of_sexp_fun _loc id)
| <:ctyp< $_$ -> $_$ >> -> `Fun <:expr< Conv.fun_of_sexp >>
| <:ctyp< [< $row_fields$ ] >> | <:ctyp< [> $row_fields$ ] >>
| <:ctyp< [= $row_fields$ ] >> ->
variant_of_sexp _loc ?full_type:None row_fields
| <:ctyp< ! $parms$ . $poly_tp$ >> -> poly_of_sexp _loc parms poly_tp
| _ ->
prerr_endline (get_loc_err _loc "type_of_sexp: unknown type construct");
exit 1
(* Conversion of tuples *)
and tuple_of_sexp _loc tps =
let fps = List.map (type_of_sexp _loc) (list_of_ctyp tps []) in
let bindings, patts, vars = mk_bindings _loc fps in
let n = string_of_int (List.length fps) in
`Match
<:match_case<
Sexp.List $Gen.mk_patt_lst _loc patts$ ->
let $bindings$ in
( $tup:exCom_of_list vars$ )
| sexp -> Conv_error.tuple_of_size_n_expected _loc $int:n$ sexp
>>
(* Generate internal call *)
and mk_internal_call _loc = function
| <:ctyp< $id:id$ >> ->
let call =
match Gen.get_rev_id_path id [] with
| tn :: rev_path -> mk_abst_call _loc tn ~internal:true rev_path
| [] -> assert false (* impossible *)
in
call
| <:ctyp< $tp1$ $tp2$ >> ->
let fp1 = `Fun (mk_internal_call _loc tp1) in
let fp2 = type_of_sexp _loc tp2 in
unroll_fun_matches _loc fp1 fp2
| _ -> assert false (* impossible *)
(* Generate code for matching included variant types *)
and handle_variant_inh _loc full_type match_last other_matches inh =
let fun_expr = mk_internal_call _loc inh in
let match_exc =
handle_no_variant_match _loc (
handle_variant_match_last _loc match_last other_matches) in
let new_other_matches =
[
<:match_case<
_ -> try ($fun_expr$ sexp :> $full_type$) with [ $match_exc$ ]
>>
]
in
new_other_matches, true
(* Generate code for matching atomic variants *)
and mk_variant_match_atom _loc full_type rev_atoms_inhs rev_structs =
let coll (other_matches, match_last) = function
| `A (_loc, cnstr) ->
let new_match = <:match_case< $str:cnstr$ -> `$cnstr$ >> in
new_match :: other_matches, false
| `I (_loc, inh) ->
handle_variant_inh _loc full_type match_last other_matches inh
in
let other_matches =
mk_variant_other_matches _loc rev_structs "ptag_no_args"
in
let match_atoms_inhs, match_last =
List.fold_left coll (other_matches, false) rev_atoms_inhs in
handle_variant_match_last _loc match_last match_atoms_inhs
(* Variant conversions *)
(* Match arguments of constructors (variants or sum types) *)
and mk_cnstr_args_match _loc ~is_variant cnstr tps =
let fps = List.map (type_of_sexp _loc) (list_of_ctyp tps []) in
let bindings, patts, vars = mk_bindings _loc fps in
let good_arg_match_expr =
let vars_expr =
match vars with
| [var_expr] -> var_expr
| _ -> <:expr< ( $tup:exCom_of_list vars$ ) >>
in
if is_variant then <:expr< `$cnstr$ $vars_expr$ >>
else <:expr< $uid:cnstr$ $vars_expr$ >>
in
let handle_exc =
if is_variant then "ptag_incorrect_n_args" else "stag_incorrect_n_args"
in
<:expr<
match sexp_args with
[ $Gen.mk_patt_lst _loc patts$ -> let $bindings$ in $good_arg_match_expr$
| _ -> Conv_error.$lid:handle_exc$ _loc tag sexp ]
>>
(* Generate code for matching structured variants *)
and mk_variant_match_struct _loc full_type rev_structs_inhs rev_atoms =
let has_structs_ref = ref false in
let coll (other_matches, match_last) = function
| `S (_loc, cnstr, tps) ->
has_structs_ref := true;
let expr = mk_cnstr_args_match _loc ~is_variant:true cnstr tps in
let new_match = <:match_case< ($str:cnstr$ as tag) -> $expr$ >> in
new_match :: other_matches, false
| `I (_loc, inh) ->
handle_variant_inh _loc full_type match_last other_matches inh
in
let other_matches =
mk_variant_other_matches _loc rev_atoms "ptag_no_args"
in
let match_structs_inhs, match_last =
List.fold_left coll (other_matches, false) rev_structs_inhs
in
(
handle_variant_match_last _loc match_last match_structs_inhs,
!has_structs_ref
)
(* Generate code for handling atomic and structured variants (i.e. not
included variant types) *)
and handle_variant_tag _loc full_type row_fields =
let rev_atoms, rev_structs, rev_atoms_inhs, rev_structs_inhs =
List.fold_left (split_row_field _loc) ([], [], [], []) row_fields
in
let match_struct, has_structs =
mk_variant_match_struct _loc full_type rev_structs_inhs rev_atoms in
let maybe_sexp_args_patt =
if has_structs then <:patt< sexp_args >>
else <:patt< _ >>
in
<:match_case<
Sexp.Atom atom as sexp ->
$mk_variant_match_atom _loc full_type rev_atoms_inhs rev_structs$
| Sexp.List
[Sexp.Atom atom :: $maybe_sexp_args_patt$] as sexp ->
$match_struct$
| Sexp.List [Sexp.List _ :: _] as sexp ->
Conv_error.nested_list_invalid_poly_var _loc sexp
| Sexp.List [] as sexp ->
Conv_error.empty_list_invalid_poly_var _loc sexp
>>
(* Generate matching code for variants *)
and variant_of_sexp _loc ?full_type row_tp =
let row_fields = list_of_ctyp row_tp [] in
let is_contained, full_type =
match full_type with
| None -> true, <:ctyp< [= $row_tp$ ] >>
| Some full_type -> false, full_type
in
let top_match =
match row_fields with
| (<:ctyp< $id:_$ >> | <:ctyp< $_$ $_$ >>) as inh :: rest ->
let rec loop inh row_fields =
let call =
<:expr< ( $mk_internal_call _loc inh$ sexp :> $full_type$ ) >>
in
match row_fields with
| [] -> call
| h :: t ->
let expr =
match h with
| <:ctyp< $id:_$ >> | <:ctyp< $_$ $_$ >> -> loop h t
| _ ->
let rftag_matches =
handle_variant_tag _loc full_type row_fields
in
<:expr< match sexp with [ $rftag_matches$ ] >>
in
<:expr<
try $call$ with
[ $handle_no_variant_match _loc expr$ ]
>>
in
<:match_case< sexp -> $loop inh rest$ >>
| _ :: _ -> handle_variant_tag _loc full_type row_fields
| [] -> assert false (* impossible *)
in
if is_contained then
`Fun
<:expr<
fun sexp ->
try match sexp with [ $top_match$ ]
with
[ Conv_error.No_variant_match (msg, sexp) ->
Conv.of_sexp_error msg sexp ]
>>
else `Match top_match
and poly_of_sexp _loc parms tp =
let bindings =
let mk_binding parm =
<:binding<
$Gen.idp _loc ("_of_" ^ parm)$ =
fun sexp -> Conv_error.record_poly_field_value _loc sexp
>>
in
List.map mk_binding (Gen.ty_var_list_of_ctyp parms [])
in
match type_of_sexp _loc tp with
| `Fun fun_expr -> `Fun <:expr< let $list:bindings$ in $fun_expr$ >>
| `Match matchings ->
`Match
<:match_case<
arg ->
let $list:bindings$ in
match arg with
[ $matchings$ ]
>>
(* Sum type conversions *)
(* Generate matching code for well-formed S-expressions wrt. sum types *)
let rec mk_good_sum_matches _loc = function
| <:ctyp< $uid:cnstr$ >> ->
let lccnstr = String.copy cnstr in
lccnstr.[0] <- Char.lowercase lccnstr.[0];
<:match_case<
Sexp.Atom ($str:lccnstr$ | $str:cnstr$) -> $uid:cnstr$
>>
| <:ctyp< $uid:cnstr$ of $tps$ >> ->
let lccnstr = String.copy cnstr in
lccnstr.[0] <- Char.lowercase lccnstr.[0];
<:match_case<
(Sexp.List
[Sexp.Atom ($str:lccnstr$ | $str:cnstr$ as tag) ::
sexp_args] as sexp) ->
$mk_cnstr_args_match _loc ~is_variant:false cnstr tps$
>>
| <:ctyp< $tp1$ | $tp2$ >> ->
<:match_case<
$mk_good_sum_matches _loc tp1$
| $mk_good_sum_matches _loc tp2$
>>
| _ -> assert false (* impossible *)
(* Generate matching code for malformed S-expressions with good tags
wrt. sum types *)
let rec mk_bad_sum_matches _loc = function
| <:ctyp< $uid:cnstr$ >> ->
let lccnstr = String.copy cnstr in
lccnstr.[0] <- Char.lowercase lccnstr.[0];
<:match_case<
Sexp.List
[Sexp.Atom ($str:lccnstr$ | $str:cnstr$) :: _] as sexp ->
Conv_error.stag_no_args _loc sexp
>>
| <:ctyp< $uid:cnstr$ of $_$ >> ->
let lccnstr = String.copy cnstr in
lccnstr.[0] <- Char.lowercase lccnstr.[0];
<:match_case<
Sexp.Atom ($str:lccnstr$ | $str:cnstr$) as sexp ->
Conv_error.stag_takes_args _loc sexp
>>
| <:ctyp< $tp1$ | $tp2$ >> ->
<:match_case<
$mk_bad_sum_matches _loc tp1$
| $mk_bad_sum_matches _loc tp2$
>>
| _ -> assert false (* impossible *)
(* Generate matching code for sum types *)
let sum_of_sexp _loc alts =
`Match
<:match_case<
$mk_good_sum_matches _loc alts$
| $mk_bad_sum_matches _loc alts$
| Sexp.List [Sexp.List _ :: _] as sexp ->
Conv_error.nested_list_invalid_sum _loc sexp
| Sexp.List [] as sexp ->
Conv_error.empty_list_invalid_sum _loc sexp
| sexp -> Conv_error.unexpected_stag _loc sexp
>>
(* Record conversions *)
(* Generate code for extracting record fields *)
let rec mk_extract_fields _loc = function
| <:ctyp< $tp1$; $tp2$ >> ->
<:match_case<
$mk_extract_fields _loc tp1$
| $mk_extract_fields _loc tp2$
>>
| <:ctyp< $lid:nm$ : mutable sexp_option $tp$ >>
| <:ctyp< $lid:nm$ : sexp_option $tp$ >>
| <:ctyp< $lid:nm$ : mutable $tp$ >>
| <:ctyp< $lid:nm$ : $tp$ >> ->
let unrolled =
unroll_cnv_fp _loc <:expr< field_sexp >> (type_of_sexp _loc tp)
in
<:match_case<
$str:nm$ ->
match $lid:nm ^ "_field"$.val with
[ None ->
let fvalue = $unrolled$ in
$lid:nm ^ "_field"$.val := Some fvalue
| Some _ ->
duplicates.val := [ field_name :: duplicates.val ] ]
>>
| _ -> assert false (* impossible *)
(* Generate code for handling the result of matching record fields *)
let mk_handle_record_match_result _loc has_poly flds =
let has_nonopt_fields = ref false in
let res_tpls, bi_lst, good_patts =
let rec loop (res_tpls, bi_lst, good_patts as acc) = function
| <:ctyp< $lid:nm$ : sexp_option $_$ >>
| <:ctyp< $lid:nm$ : mutable sexp_option $_$ >> ->
let fld = <:expr< $lid:nm ^ "_field"$.val >> in
(
<:expr< $fld$ >> :: res_tpls,
bi_lst,
<:patt< $lid:nm ^ "_value"$ >> :: good_patts
)
| <:ctyp< $lid:nm$ : $_$ >> ->
has_nonopt_fields := true;
let fld = <:expr< $lid:nm ^ "_field"$.val >> in
(
<:expr< $fld$ >> :: res_tpls,
<:expr< ($fld$ = None, $str:nm$) >> :: bi_lst,
<:patt< Some $lid:nm ^ "_value"$ >> :: good_patts
)
| <:ctyp< $tp1$; $tp2$ >> -> loop (loop acc tp2) tp1
| _ -> assert false (* impossible *)
in
loop ([], [], []) flds
in
let match_good_expr =
if has_poly then
let rec loop acc = function
| <:ctyp< $tp1$; $tp2$ >> -> loop (loop acc tp2) tp1
| <:ctyp< $lid:nm$ : $_$ >> -> <:expr< $lid:nm ^ "_value"$ >> :: acc
| _ -> assert false (* impossible *)
in
match loop [] flds with
| [match_good_expr] -> match_good_expr
| match_good_exprs -> <:expr< $tup:exCom_of_list match_good_exprs$ >>
else
let rec loop = function
| <:ctyp< $tp1$; $tp2$ >> -> <:rec_binding< $loop tp1$; $loop tp2$ >>
| <:ctyp< $lid:nm$ : $_$ >> ->
<:rec_binding< $lid:nm$ = $lid:nm ^ "_value"$ >>
| _ -> assert false (* impossible *)
in
<:expr< { $loop flds$ } >>
in
let expr, patt =
match res_tpls, good_patts with
| [res_expr], [res_patt] -> res_expr, res_patt
| _ ->
<:expr< $tup:exCom_of_list res_tpls$ >>,
<:patt< $tup:paCom_of_list good_patts$ >>
in
if !has_nonopt_fields then
<:expr<
match $expr$ with
[ $patt$ -> $match_good_expr$
| _ ->
Conv_error.record_undefined_elements _loc sexp
$Gen.mk_expr_lst _loc bi_lst$
]
>>
else <:expr< match $expr$ with [ $patt$ -> $match_good_expr$ ] >>
(* Generate code for converting record fields *)
let mk_cnv_fields has_poly _loc flds =
let field_refs =
let rec loop = function
| <:ctyp< $tp1$; $tp2$ >> -> <:binding< $loop tp1$ and $loop tp2$ >>
| <:ctyp< $lid:nm$ : $_$ >> ->
<:binding< $lid:nm ^ "_field"$ = ref None >>
| _ -> assert false (* impossible *)
in
loop flds
in
<:expr<
let $field_refs$ and duplicates = ref [] and extra = ref [] in
let rec iter = fun
[ [
Sexp.List
[(Sexp.Atom field_name); field_sexp] ::
tail
] ->
do {
match field_name with
[ $mk_extract_fields _loc flds$
| _ ->
if Conv.record_check_extra_fields.val then
extra.val := [ field_name :: extra.val ]
else () ];
iter tail }
| [sexp :: _] -> Conv_error.record_only_pairs_expected _loc sexp
| [] -> () ]
in
do {
iter field_sexps;
if duplicates.val <> [] then
Conv_error.record_duplicate_fields
_loc duplicates.val sexp
else if extra.val <> [] then
Conv_error.record_extra_fields _loc extra.val sexp
else $mk_handle_record_match_result _loc has_poly flds$
}
>>
let rec is_poly = function
| <:ctyp< $_$ : ! $_$ . $_$ >> -> true
| <:ctyp< $flds1$; $flds2$ >> -> is_poly flds1 || is_poly flds2
| _ -> false
(* Generate matching code for records *)
let record_of_sexp _loc flds =
let handle_fields =
let has_poly = is_poly flds in
let cnv_fields = mk_cnv_fields has_poly _loc flds in
if has_poly then
let is_singleton_ref = ref true in
let patt =
let rec loop = function
| <:ctyp< $tp1$; $tp2$ >> ->
is_singleton_ref := false;
<:patt< $loop tp1$, $loop tp2$ >>
| <:ctyp< $lid:nm$ : $_$ >> -> <:patt< $lid:nm$ >>
| _ -> assert false (* impossible *)
in
let patt = loop flds in
if !is_singleton_ref then patt
else <:patt< $tup:patt$ >>
in
let record_def =
let rec loop = function
| <:ctyp< $tp1$; $tp2$ >> ->
<:rec_binding< $loop tp1$; $loop tp2$ >>
| <:ctyp< $lid:nm$ : $_$ >> -> <:rec_binding< $lid:nm$ = $lid:nm$ >>
| _ -> assert false (* impossible *)
in
loop flds
in
<:expr<
let $patt$ = $cnv_fields$ in
{ $record_def$ }
>>
else cnv_fields
in
`Match
<:match_case<
Sexp.List field_sexps as sexp -> $handle_fields$
| Sexp.Atom _ as sexp ->
Conv_error.record_list_instead_atom _loc sexp
>>
(* Empty type *)
let nil_of_sexp _loc =
`Fun <:expr< fun sexp -> Conv_error.empty_type _loc sexp >>
(* Generate code from type definitions *)
let rec is_poly_call = function
| <:expr< $f$ $_$ >> -> is_poly_call f
| <:expr< $lid:name$ >> -> name.[0] = '_' && name.[1] = 'o'
| _ -> false
let td_of_sexp _loc type_name tps rhs =
let is_alias_ref = ref false in
let handle_alias _loc tp =
is_alias_ref := true;
type_of_sexp _loc tp
in
let coll_args tp param = <:ctyp< $tp$ $param$ >> in
let full_type = List.fold_left coll_args <:ctyp< $lid:type_name$ >> tps in
let is_variant_ref = ref false in
let handle_variant row_fields =
is_variant_ref := true;
variant_of_sexp ~full_type row_fields
in
let body =
let rec loop _loc =
Gen.switch_tp_def _loc
~alias:handle_alias
~sum:sum_of_sexp
~record:record_of_sexp
~variants:handle_variant
~mani:(fun _loc _tp1 -> loop _loc)
~nil:nil_of_sexp
in
match loop _loc rhs with
| `Fun fun_expr ->
(* Prevent violation of value restriction through eta-expansion *)
if !is_alias_ref && tps = [] then
<:expr< fun [ sexp -> $fun_expr$ sexp ] >>
else <:expr< $fun_expr$ >>
| `Match matchings -> <:expr< fun [ $matchings$ ] >>
in
let internal_name = type_name ^ "_of_sexp" ^ "__" in
let arg_patts, arg_exprs =
List.split (
List.map (function tp ->
let name = "_of_" ^ Gen.get_tparam_id tp in
Gen.idp _loc name, Gen.ide _loc name
)
tps)
in
let with_poly_call = !is_alias_ref && is_poly_call body in
let internal_fun_body =
let full_type_name = sprintf "%s.%s" (get_conv_path ()) type_name in
if with_poly_call then
Gen.abstract _loc arg_patts
<:expr<
fun sexp ->
Conv_error.silly_type $str:full_type_name$ sexp
>>
else
<:expr<
let _loc = $str:full_type_name$ in
$Gen.abstract _loc arg_patts body$
>>
in
let pre_external_fun_body =
let internal_call =
let internal_expr = Gen.ide _loc internal_name in
<:expr< $Gen.apply _loc internal_expr arg_exprs$ sexp >>
in
let no_variant_match_mc =
<:match_case<
Conv_error.No_variant_match (msg, sexp) ->
Conv.of_sexp_error msg sexp
>>
in
if with_poly_call then
<:expr< try $body$ sexp with [ $no_variant_match_mc$ ] >>
(* Type alias may refer to variant, therefore same handling here! *)
else if !is_variant_ref || !is_alias_ref then
<:expr< try $internal_call$ with [ $no_variant_match_mc$ ] >>
else internal_call
in
let internal_binding =
<:binding< $lid:internal_name$ = $internal_fun_body$ >>
in
let external_fun_patt = Gen.idp _loc (type_name ^ "_of_sexp") in
let external_fun_body =
Gen.abstract _loc arg_patts <:expr< fun sexp -> $pre_external_fun_body$ >>
in
let external_binding =
<:binding< $external_fun_patt$ = $external_fun_body$ >>
in
internal_binding, external_binding
let rec tds_of_sexp _loc acc = function
| TyDcl (_loc, type_name, tps, rhs, _cl) ->
td_of_sexp _loc type_name tps rhs :: acc
| TyAnd (_loc, tp1, tp2) -> tds_of_sexp _loc (tds_of_sexp _loc acc tp2) tp1
| _ -> assert false (* impossible *)
(* Generate code from type definitions *)
let of_sexp = function
| TyDcl (_loc, type_name, tps, rhs, _cl) ->
let internal_binding, external_binding =
td_of_sexp _loc type_name tps rhs
in
let recursive = Gen.type_is_recursive _loc type_name rhs in
if recursive then
<:str_item<
value rec $internal_binding$
and $external_binding$
>>
else
<:str_item<
value $internal_binding$;
value $external_binding$
>>
| TyAnd (_loc, _, _) as tds ->
let two_bindings = tds_of_sexp _loc [] tds in
let bindings =
List.map (fun (b1, b2) -> <:binding< $b1$ and $b2$ >>) two_bindings
in
<:str_item< value rec $list:bindings$ >>
| _ -> assert false (* impossible *)
(* Add code generator to the set of known generators *)
let () = add_generator "of_sexp" of_sexp
end
(* Add "of_sexp" and "sexp_of" as "sexp" to the set of generators *)
let () =
add_generator
"sexp"
(fun tds ->
let _loc = Loc.ghost in
<:str_item<
$Generate_of_sexp.of_sexp tds$; $Generate_sexp_of.sexp_of tds$
>>
)
| null | https://raw.githubusercontent.com/aryx/ocamltarzan/4140f5102cee83a2ca7be996ca2d92e9cb035f9c/pa/pa_sexp_conv.ml | ocaml | Pa_sexp_conv: Preprocessing Module for Automated S-expression Conversions
Utility functions
impossible
impossible
Generators for S-expressions
Generates the signature for type conversion to S-expressions
Generates the signature for type conversion from S-expressions
Generates the signature for type conversion to S-expressions
Generator for converters of OCaml-values to S-expressions
Conversion of type paths
impossible
Conversion of types
Conversion of polymorphic types
impossible
Conversion of tuples
Conversion of variant types
impossible
Polymorphic record fields
Conversion of sum types
Conversion of record types
impossible
Empty type
Generate code from type definitions
Prevent violation of value restriction through eta-expansion
impossible
impossible
Add code generator to the set of known generators
Generator for converters of S-expressions to OCaml-values
Utility functions for polymorphic variants
Handle backtracking when variants do not match
Generate code depending on whether to generate a match for the last
case of matching a variant
impossible
Generate code for matching malformed S-expressions
Split the row fields of a variant type into lists of atomic variants,
structured variants, atomic variants + included variant types,
and structured variants + included variant types.
Conversion of type paths
no empty paths
Conversion of types
Conversion of tuples
Generate internal call
impossible
impossible
Generate code for matching included variant types
Generate code for matching atomic variants
Variant conversions
Match arguments of constructors (variants or sum types)
Generate code for matching structured variants
Generate code for handling atomic and structured variants (i.e. not
included variant types)
Generate matching code for variants
impossible
Sum type conversions
Generate matching code for well-formed S-expressions wrt. sum types
impossible
Generate matching code for malformed S-expressions with good tags
wrt. sum types
impossible
Generate matching code for sum types
Record conversions
Generate code for extracting record fields
impossible
Generate code for handling the result of matching record fields
impossible
impossible
impossible
Generate code for converting record fields
impossible
Generate matching code for records
impossible
impossible
Empty type
Generate code from type definitions
Prevent violation of value restriction through eta-expansion
Type alias may refer to variant, therefore same handling here!
impossible
Generate code from type definitions
impossible
Add code generator to the set of known generators
Add "of_sexp" and "sexp_of" as "sexp" to the set of generators | pp camlp4orf
File : pa_sexp_conv.ml
Copyright ( C ) 2005-
Jane Street Holding , LLC
Author :
email : mmottl\@janestcapital.com
WWW :
This file is derived from file " pa_tywith.ml " of version 0.45 of the
library " " .
is Copyright ( C ) 2004 , 2005 by
< >
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
Copyright (C) 2005-
Jane Street Holding, LLC
Author: Markus Mottl
email: mmottl\@janestcapital.com
WWW:
This file is derived from file "pa_tywith.ml" of version 0.45 of the
library "Tywith".
Tywith is Copyright (C) 2004, 2005 by
Martin Sandin <>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
open Printf
open Lexing
open Camlp4
open PreCast
open Ast
open Pa_type_conv
let ( *** ) f g x = f (g x)
let mk_rev_bindings _loc fps =
let coll (i, bindings, patts, vars) fp =
let name = "v" ^ string_of_int i in
let var_expr = Gen.ide _loc name in
let expr =
match fp with
| `Fun fun_expr -> <:expr< $fun_expr$ $var_expr$ >>
| `Match matchings -> <:expr< match $var_expr$ with [ $matchings$ ] >>
in
let patt = Gen.idp _loc name in
let bindings = <:binding< $patt$ = $expr$ and $bindings$ >> in
i - 1, bindings, patt :: patts, var_expr :: vars
in
let n = List.length fps in
let _, bindings, patts, expr =
List.fold_left coll (n, BiNil _loc, [], []) fps
in
bindings, patts, expr
let mk_bindings _loc fps = mk_rev_bindings _loc (List.rev fps)
let unroll_cnv_fp _loc var = function
| `Fun fun_expr -> <:expr< $fun_expr$ $var$ >>
| `Match matchings -> <:expr< match $var$ with [ $matchings$ ] >>
let unroll_fun_matches _loc fp1 fp2 =
match fp1, fp2 with
| `Fun fun_expr1, `Fun fun_expr2 -> <:expr< $fun_expr1$ $fun_expr2$ >>
| `Fun fun_expr, `Match matching -> <:expr< $fun_expr$ (fun [ $matching$ ]) >>
let rec sig_of_tds cnv = function
| TyDcl (_loc, type_name, tps, _rhs, _cl) -> cnv _loc type_name tps
| TyAnd (_loc, tp1, tp2) ->
<:sig_item< $sig_of_tds cnv tp1$; $sig_of_tds cnv tp2$ >>
module Sig_generate_sexp_of = struct
let sig_of_td _loc type_name tps =
let rec loop this_type = function
| [] -> <:ctyp< $this_type$ -> Sexp.t >>
| tp :: tps ->
let tp = Gen.drop_variance_annotations _loc tp in
let sexp_of = loop <:ctyp< $this_type$ $tp$ >> tps in
<:ctyp< ( $tp$ -> Sexp.t ) -> $sexp_of$ >>
in
let sexp_of = loop <:ctyp< $lid:type_name$ >> tps in
<:sig_item< value $lid: "sexp_of_" ^ type_name$ : $sexp_of$ >>
let mk_sig tds = <:sig_item< $sig_of_tds sig_of_td tds$ >>
let () = add_sig_generator "sexp_of" mk_sig
end
module Sig_generate_of_sexp = struct
let sig_of_td _loc type_name tps =
let rec loop this_tp = function
| [] -> <:ctyp< Sexp.t -> $this_tp$ >>
| tp :: tps ->
let tp = Gen.drop_variance_annotations _loc tp in
let of_sexp = loop <:ctyp< $this_tp$ $tp$ >> tps in
<:ctyp< ( Sexp.t -> $tp$ ) -> $of_sexp$ >>
in
let of_sexp = loop <:ctyp< $lid:type_name$ >> tps in
<:sig_item<
value $lid: type_name ^ "_of_sexp"$ : $of_sexp$;
>>
let mk_sig tds = <:sig_item< $sig_of_tds sig_of_td tds$ >>
let () = add_sig_generator "of_sexp" mk_sig
end
module Sig_generate = struct
let () =
add_sig_generator "sexp" (fun tds ->
let _loc = Loc.ghost in
<:sig_item<
$Sig_generate_sexp_of.mk_sig tds$; $Sig_generate_of_sexp.mk_sig tds$
>>)
end
module Generate_sexp_of = struct
let mk_abst_call _loc tn rev_path =
<:expr< $id:Gen.ident_of_rev_path _loc (("sexp_of_" ^ tn) :: rev_path)$ >>
let sexp_of_path_fun _loc id =
match Gen.get_rev_id_path id [] with
| ["unit"] -> <:expr< Conv.sexp_of_unit >>
| ["bool"] -> <:expr< Conv.sexp_of_bool >>
| ["string"] -> <:expr< Conv.sexp_of_string >>
| ["char"] -> <:expr< Conv.sexp_of_char >>
| ["int"] -> <:expr< Conv.sexp_of_int >>
| ["float"] -> <:expr< Conv.sexp_of_float >>
| ["int32"] -> <:expr< Conv.sexp_of_int32 >>
| ["int64"] -> <:expr< Conv.sexp_of_int64 >>
| ["nativeint"] -> <:expr< Conv.sexp_of_nativeint >>
| ["big_int"; "Big_int"] -> <:expr< Conv.sexp_of_big_int >>
| ["nat"; "Nat"] -> <:expr< Conv.sexp_of_nat >>
| ["num"; "Num"] -> <:expr< Conv.sexp_of_num >>
| ["ratio"; "Ratio"] -> <:expr< Conv.sexp_of_ratio >>
| ["ref"] -> <:expr< Conv.sexp_of_ref >>
| ["t"; "Lazy"] | ["lazy_t"] -> <:expr< Conv.sexp_of_lazy >>
| ["option"] -> <:expr< Conv.sexp_of_option >>
| ["list"] -> <:expr< Conv.sexp_of_list >>
| ["array"] -> <:expr< Conv.sexp_of_array >>
| ["t"; "Hashtbl"] -> <:expr< Conv.sexp_of_hashtbl >>
| tn :: rev_path -> mk_abst_call _loc tn rev_path
let rec sexp_of_type _loc = function
| <:ctyp< $tp1$ $tp2$ >> -> `Fun (sexp_of_appl_fun _loc tp1 tp2)
| <:ctyp< ( $tup:tp$ ) >> -> sexp_of_tuple _loc tp
| <:ctyp< '$parm$ >> -> `Fun (Gen.ide _loc ("_of_" ^ parm))
| <:ctyp< $id:id$ >> -> `Fun (sexp_of_path_fun _loc id)
| <:ctyp< $_$ -> $_$ >> -> `Fun <:expr< Conv.sexp_of_fun >>
| <:ctyp< [< $row_fields$ ] >> | <:ctyp< [> $row_fields$ ] >>
| <:ctyp< [= $row_fields$ ] >> -> sexp_of_variant _loc row_fields
| <:ctyp< ! $parms$ . $poly_tp$ >> -> sexp_of_poly _loc parms poly_tp
| _ ->
prerr_endline (get_loc_err _loc "sexp_of_type: unknown type construct");
exit 1
and sexp_of_appl_fun _loc tp1 tp2 =
match sexp_of_type _loc tp1, sexp_of_type _loc tp2 with
| `Fun fun_expr1, `Fun fun_expr2 -> <:expr< $fun_expr1$ $fun_expr2$ >>
| `Fun fun_expr, `Match matching -> <:expr< $fun_expr$ (fun [ $matching$ ]) >>
and sexp_of_tuple _loc tp =
let fps = List.map (sexp_of_type _loc) (list_of_ctyp tp []) in
let bindings, patts, vars = mk_bindings _loc fps in
let in_expr = <:expr< Sexp.List $Gen.mk_expr_lst _loc vars$ >> in
let expr = <:expr< let $bindings$ in $in_expr$ >> in
`Match <:match_case< ( $tup:paCom_of_list patts$ ) -> $expr$ >>
and sexp_of_variant _loc row_fields =
let rec loop = function
| <:ctyp< $tp1$ | $tp2$ >> -> <:match_case< $loop tp1$ | $loop tp2$ >>
| <:ctyp< `$cnstr$ >> ->
<:match_case< `$cnstr$ -> Sexp.Atom $str:cnstr$ >>
| <:ctyp< `$cnstr$ of $tps$ >> ->
let fps = List.map (sexp_of_type _loc) (list_of_ctyp tps []) in
let bindings, patts, vars = mk_bindings _loc fps in
let cnstr_expr = <:expr< Sexp.Atom $str:cnstr$ >> in
let expr =
<:expr<
let $bindings$ in
Sexp.List $Gen.mk_expr_lst _loc (cnstr_expr :: vars)$
>>
in
<:match_case< `$cnstr$ $paSem_of_list patts$ -> $expr$ >>
| <:ctyp< [= $row_fields$ ] >> | <:ctyp< [> $row_fields$ ] >>
| <:ctyp< [< $row_fields$ ] >> -> loop row_fields
| <:ctyp< $tp1$ $tp2$ >> ->
let id_path = Gen.get_appl_path _loc tp1 in
let call = sexp_of_appl_fun _loc tp1 tp2 in
<:match_case< #$id_path$ as v -> $call$ v >>
| <:ctyp< $id:id$ >> ->
let call =
match Gen.get_rev_id_path id [] with
| tn :: rev_path -> mk_abst_call _loc tn rev_path
in
<:match_case< #$id$ as v -> $call$ v >>
| _ -> failwith "sexp_of_variant: unknown type"
in
`Match (loop row_fields)
and sexp_of_poly _loc parms tp =
let bindings =
let mk_binding parm =
<:binding<
$Gen.idp _loc ("_of_" ^ parm)$ = Conv.sexp_of_abstr
>>
in
List.map mk_binding (Gen.ty_var_list_of_ctyp parms [])
in
match sexp_of_type _loc tp with
| `Fun fun_expr -> `Fun <:expr< let $list:bindings$ in $fun_expr$ >>
| `Match matchings ->
`Match
<:match_case<
arg ->
let $list:bindings$ in
match arg with
[ $matchings$ ]
>>
let rec branch_sum _loc = function
| <:ctyp< $tp1$ | $tp2$ >> ->
<:match_case< $branch_sum _loc tp1$ | $branch_sum _loc tp2$ >>
| <:ctyp< $uid:cnstr$ >> ->
<:match_case< $uid:cnstr$ -> Sexp.Atom $str:cnstr$ >>
| <:ctyp< $uid:cnstr$ of $tps$ >> ->
let fps = List.map (sexp_of_type _loc) (list_of_ctyp tps []) in
let cnstr_expr = <:expr< Sexp.Atom $str:cnstr$ >> in
let bindings, patts, vars = mk_bindings _loc fps in
let patt =
match patts with
| [patt] -> patt
| _ -> <:patt< ( $tup:paCom_of_list patts$ ) >>
in
<:match_case<
$uid:cnstr$ $patt$ ->
let $bindings$ in
Sexp.List $Gen.mk_expr_lst _loc (cnstr_expr :: vars)$
>>
| _ -> failwith "branch_sum: unknown type"
let sexp_of_sum _loc alts = `Match (branch_sum _loc alts)
let mk_rec_patt _loc patt name =
let p = <:patt< $lid:name$ = $lid:"v_" ^ name$ >> in
<:patt< $patt$; $p$ >>
let mk_cnv_expr _loc tp var =
match sexp_of_type _loc tp with
| `Fun fun_expr -> <:expr< $fun_expr$ $var$ >>
| `Match matchings -> <:expr< match $var$ with [ $matchings$ ] >>
let sexp_of_record _loc flds_ctyp =
let flds = list_of_ctyp flds_ctyp [] in
let rec coll (patt, expr) = function
? ? ? specific
| <:ctyp< $lid:name$ : mutable sexp_option $tp$ >>
| <:ctyp< $lid:name$ : sexp_option $tp$ >> ->
let patt = mk_rec_patt _loc patt name in
let vname = <:expr< v >> in
let cnv_expr = unroll_cnv_fp _loc vname (sexp_of_type _loc tp) in
let expr =
<:expr<
let bnds =
match $lid:"v_" ^ name$ with
[ None -> bnds
| Some v ->
let arg = $cnv_expr$ in
let bnd =
Sexp.List [Sexp.Atom $str:name$; arg]
in
[ bnd :: bnds ] ]
in
$expr$
>>
in
patt, expr
| <:ctyp< $lid:name$ : mutable $tp$ >>
| <:ctyp< $lid:name$ : $tp$ >> ->
let patt = mk_rec_patt _loc patt name in
let vname = <:expr< $lid:"v_" ^ name$ >> in
let cnv_expr = unroll_cnv_fp _loc vname (sexp_of_type _loc tp) in
let expr =
<:expr<
let arg = $cnv_expr$ in
let bnd = Sexp.List [Sexp.Atom $str:name$; arg] in
let bnds = [ bnd :: bnds ] in
$expr$
>>
in
patt, expr
in
let init_expr = <:expr< Sexp.List bnds >> in
let patt, expr = List.fold_left coll (<:patt<>>, init_expr) flds in
`Match
<:match_case<
{ $patt$ } ->
let bnds = [] in
$expr$
>>
let sexp_of_nil _loc = `Fun <:expr< fun _v -> assert False >>
let sexp_of_td _loc type_name tps rhs =
let is_alias_ref = ref false in
let handle_alias _loc tp = is_alias_ref := true; sexp_of_type _loc tp in
let body =
let rec loop _loc =
Gen.switch_tp_def _loc
~alias:handle_alias
~sum:sexp_of_sum
~record:sexp_of_record
~variants:sexp_of_variant
~mani:(fun _loc _tp1 -> loop _loc)
~nil:sexp_of_nil
in
match loop _loc rhs with
| `Fun fun_expr ->
if !is_alias_ref && tps = [] then <:expr< fun [ v -> $fun_expr$ v ] >>
else <:expr< $fun_expr$ >>
| `Match matchings -> <:expr< fun [ $matchings$ ] >>
in
let patts =
List.map (Gen.idp _loc *** (^) "_of_" *** Gen.get_tparam_id) tps
in
let bnd = Gen.idp _loc ("sexp_of_" ^ type_name) in
<:binding< $bnd$ = $Gen.abstract _loc patts body$ >>
let rec sexp_of_tds = function
| TyDcl (_loc, type_name, tps, rhs, _cl) ->
sexp_of_td _loc type_name tps rhs
| TyAnd (_loc, tp1, tp2) ->
<:binding< $sexp_of_tds tp1$ and $sexp_of_tds tp2$ >>
let sexp_of tds =
let binding, recursive, _loc =
match tds with
| TyDcl (_loc, type_name, tps, rhs, _cl) ->
sexp_of_td _loc type_name tps rhs,
Gen.type_is_recursive _loc type_name rhs, _loc
| TyAnd (_loc, _, _) as tds -> sexp_of_tds tds, true, _loc
in
if recursive then <:str_item< value rec $binding$ >>
else <:str_item< value $binding$ >>
let () = add_generator "sexp_of" sexp_of
end
module Generate_of_sexp = struct
let mk_abst_call _loc tn ?(internal = false) rev_path =
let tns = tn ^ "_of_sexp" in
let tns_suff = if internal then tns ^ "__" else tns in
<:expr< $id:Gen.ident_of_rev_path _loc (tns_suff :: rev_path)$ >>
let handle_no_variant_match _loc expr =
<:match_case< Conv_error.No_variant_match _ -> $expr$ >>
let is_wildcard = function [_] -> true | _ -> false
let handle_variant_match_last _loc match_last matches =
if match_last || is_wildcard matches then
match matches with
| <:match_case< $_$ -> $expr$ >> :: _ -> expr
else <:expr< match atom with [ $list:matches$ ] >>
let mk_variant_other_matches _loc rev_els call =
let coll_structs acc (_loc, cnstr) =
<:match_case< $str:cnstr$ -> Conv_error.$lid:call$ _loc sexp >>
:: acc
in
let exc_no_variant_match =
<:match_case< _ -> Conv_error.no_variant_match _loc sexp >>
in
List.fold_left coll_structs [exc_no_variant_match] rev_els
let rec split_row_field _loc (atoms, structs, ainhs, sinhs as acc) = function
| <:ctyp< `$cnstr$ >> ->
let tpl = _loc, cnstr in
(
tpl :: atoms,
structs,
`A tpl :: ainhs,
sinhs
)
| <:ctyp< `$cnstr$ of $tps$ >> ->
(
atoms,
(_loc, cnstr) :: structs,
ainhs,
`S (_loc, cnstr, tps) :: sinhs
)
| <:ctyp< [= $row_fields$ ] >>
| <:ctyp< [> $row_fields$ ] >>
| <:ctyp< [< $row_fields$ ] >> ->
List.fold_left (split_row_field _loc) acc (list_of_ctyp row_fields [])
| <:ctyp< $_$ $_$ >>
| <:ctyp< $id:_$ >> as inh ->
let iinh = `I (_loc, inh) in
(
atoms,
structs,
iinh :: ainhs,
iinh :: sinhs
)
| _ -> failwith "split_row_field: unknown type"
let path_of_sexp_fun _loc id =
match Gen.get_rev_id_path id [] with
| ["unit"] -> <:expr< Conv.unit_of_sexp >>
| ["string"] -> <:expr< Conv.string_of_sexp >>
| ["int"] -> <:expr< Conv.int_of_sexp >>
| ["float"] -> <:expr< Conv.float_of_sexp >>
| ["bool"] -> <:expr< Conv.bool_of_sexp >>
| ["int32"] -> <:expr< Conv.int32_of_sexp >>
| ["int64"] -> <:expr< Conv.int64_of_sexp >>
| ["nativeint"] -> <:expr< Conv.nativeint_of_sexp >>
| ["big_int"; "Big_int"] -> <:expr< Conv.big_int_of_sexp >>
| ["nat"; "Nat"] -> <:expr< Conv.nat_of_sexp >>
| ["num"; "Num"] -> <:expr< Conv.num_of_sexp >>
| ["ratio"; "Ratio"] -> <:expr< Conv.ratio_of_sexp >>
| ["list"] -> <:expr< Conv.list_of_sexp >>
| ["array"] -> <:expr< Conv.array_of_sexp >>
| ["option"] -> <:expr< Conv.option_of_sexp >>
| ["char"] -> <:expr< Conv.char_of_sexp >>
| ["t"; "Lazy"] | ["lazy_t"] -> <:expr< Conv.lazy_of_sexp >>
| ["t"; "Hashtbl"] -> <:expr< Conv.hashtbl_of_sexp >>
| ["ref"] -> <:expr< Conv.ref_of_sexp >>
| tn :: rev_path -> mk_abst_call _loc tn rev_path
let rec type_of_sexp _loc = function
| <:ctyp< $tp1$ $tp2$ >> ->
let fp1 = type_of_sexp _loc tp1 in
let fp2 = type_of_sexp _loc tp2 in
`Fun (unroll_fun_matches _loc fp1 fp2)
| <:ctyp< ( $tup:tp$ ) >> -> tuple_of_sexp _loc tp
| <:ctyp< '$parm$ >> -> `Fun (Gen.ide _loc ("_of_" ^ parm))
| <:ctyp< $id:id$ >> -> `Fun (path_of_sexp_fun _loc id)
| <:ctyp< $_$ -> $_$ >> -> `Fun <:expr< Conv.fun_of_sexp >>
| <:ctyp< [< $row_fields$ ] >> | <:ctyp< [> $row_fields$ ] >>
| <:ctyp< [= $row_fields$ ] >> ->
variant_of_sexp _loc ?full_type:None row_fields
| <:ctyp< ! $parms$ . $poly_tp$ >> -> poly_of_sexp _loc parms poly_tp
| _ ->
prerr_endline (get_loc_err _loc "type_of_sexp: unknown type construct");
exit 1
and tuple_of_sexp _loc tps =
let fps = List.map (type_of_sexp _loc) (list_of_ctyp tps []) in
let bindings, patts, vars = mk_bindings _loc fps in
let n = string_of_int (List.length fps) in
`Match
<:match_case<
Sexp.List $Gen.mk_patt_lst _loc patts$ ->
let $bindings$ in
( $tup:exCom_of_list vars$ )
| sexp -> Conv_error.tuple_of_size_n_expected _loc $int:n$ sexp
>>
and mk_internal_call _loc = function
| <:ctyp< $id:id$ >> ->
let call =
match Gen.get_rev_id_path id [] with
| tn :: rev_path -> mk_abst_call _loc tn ~internal:true rev_path
in
call
| <:ctyp< $tp1$ $tp2$ >> ->
let fp1 = `Fun (mk_internal_call _loc tp1) in
let fp2 = type_of_sexp _loc tp2 in
unroll_fun_matches _loc fp1 fp2
and handle_variant_inh _loc full_type match_last other_matches inh =
let fun_expr = mk_internal_call _loc inh in
let match_exc =
handle_no_variant_match _loc (
handle_variant_match_last _loc match_last other_matches) in
let new_other_matches =
[
<:match_case<
_ -> try ($fun_expr$ sexp :> $full_type$) with [ $match_exc$ ]
>>
]
in
new_other_matches, true
and mk_variant_match_atom _loc full_type rev_atoms_inhs rev_structs =
let coll (other_matches, match_last) = function
| `A (_loc, cnstr) ->
let new_match = <:match_case< $str:cnstr$ -> `$cnstr$ >> in
new_match :: other_matches, false
| `I (_loc, inh) ->
handle_variant_inh _loc full_type match_last other_matches inh
in
let other_matches =
mk_variant_other_matches _loc rev_structs "ptag_no_args"
in
let match_atoms_inhs, match_last =
List.fold_left coll (other_matches, false) rev_atoms_inhs in
handle_variant_match_last _loc match_last match_atoms_inhs
and mk_cnstr_args_match _loc ~is_variant cnstr tps =
let fps = List.map (type_of_sexp _loc) (list_of_ctyp tps []) in
let bindings, patts, vars = mk_bindings _loc fps in
let good_arg_match_expr =
let vars_expr =
match vars with
| [var_expr] -> var_expr
| _ -> <:expr< ( $tup:exCom_of_list vars$ ) >>
in
if is_variant then <:expr< `$cnstr$ $vars_expr$ >>
else <:expr< $uid:cnstr$ $vars_expr$ >>
in
let handle_exc =
if is_variant then "ptag_incorrect_n_args" else "stag_incorrect_n_args"
in
<:expr<
match sexp_args with
[ $Gen.mk_patt_lst _loc patts$ -> let $bindings$ in $good_arg_match_expr$
| _ -> Conv_error.$lid:handle_exc$ _loc tag sexp ]
>>
and mk_variant_match_struct _loc full_type rev_structs_inhs rev_atoms =
let has_structs_ref = ref false in
let coll (other_matches, match_last) = function
| `S (_loc, cnstr, tps) ->
has_structs_ref := true;
let expr = mk_cnstr_args_match _loc ~is_variant:true cnstr tps in
let new_match = <:match_case< ($str:cnstr$ as tag) -> $expr$ >> in
new_match :: other_matches, false
| `I (_loc, inh) ->
handle_variant_inh _loc full_type match_last other_matches inh
in
let other_matches =
mk_variant_other_matches _loc rev_atoms "ptag_no_args"
in
let match_structs_inhs, match_last =
List.fold_left coll (other_matches, false) rev_structs_inhs
in
(
handle_variant_match_last _loc match_last match_structs_inhs,
!has_structs_ref
)
and handle_variant_tag _loc full_type row_fields =
let rev_atoms, rev_structs, rev_atoms_inhs, rev_structs_inhs =
List.fold_left (split_row_field _loc) ([], [], [], []) row_fields
in
let match_struct, has_structs =
mk_variant_match_struct _loc full_type rev_structs_inhs rev_atoms in
let maybe_sexp_args_patt =
if has_structs then <:patt< sexp_args >>
else <:patt< _ >>
in
<:match_case<
Sexp.Atom atom as sexp ->
$mk_variant_match_atom _loc full_type rev_atoms_inhs rev_structs$
| Sexp.List
[Sexp.Atom atom :: $maybe_sexp_args_patt$] as sexp ->
$match_struct$
| Sexp.List [Sexp.List _ :: _] as sexp ->
Conv_error.nested_list_invalid_poly_var _loc sexp
| Sexp.List [] as sexp ->
Conv_error.empty_list_invalid_poly_var _loc sexp
>>
and variant_of_sexp _loc ?full_type row_tp =
let row_fields = list_of_ctyp row_tp [] in
let is_contained, full_type =
match full_type with
| None -> true, <:ctyp< [= $row_tp$ ] >>
| Some full_type -> false, full_type
in
let top_match =
match row_fields with
| (<:ctyp< $id:_$ >> | <:ctyp< $_$ $_$ >>) as inh :: rest ->
let rec loop inh row_fields =
let call =
<:expr< ( $mk_internal_call _loc inh$ sexp :> $full_type$ ) >>
in
match row_fields with
| [] -> call
| h :: t ->
let expr =
match h with
| <:ctyp< $id:_$ >> | <:ctyp< $_$ $_$ >> -> loop h t
| _ ->
let rftag_matches =
handle_variant_tag _loc full_type row_fields
in
<:expr< match sexp with [ $rftag_matches$ ] >>
in
<:expr<
try $call$ with
[ $handle_no_variant_match _loc expr$ ]
>>
in
<:match_case< sexp -> $loop inh rest$ >>
| _ :: _ -> handle_variant_tag _loc full_type row_fields
in
if is_contained then
`Fun
<:expr<
fun sexp ->
try match sexp with [ $top_match$ ]
with
[ Conv_error.No_variant_match (msg, sexp) ->
Conv.of_sexp_error msg sexp ]
>>
else `Match top_match
and poly_of_sexp _loc parms tp =
let bindings =
let mk_binding parm =
<:binding<
$Gen.idp _loc ("_of_" ^ parm)$ =
fun sexp -> Conv_error.record_poly_field_value _loc sexp
>>
in
List.map mk_binding (Gen.ty_var_list_of_ctyp parms [])
in
match type_of_sexp _loc tp with
| `Fun fun_expr -> `Fun <:expr< let $list:bindings$ in $fun_expr$ >>
| `Match matchings ->
`Match
<:match_case<
arg ->
let $list:bindings$ in
match arg with
[ $matchings$ ]
>>
let rec mk_good_sum_matches _loc = function
| <:ctyp< $uid:cnstr$ >> ->
let lccnstr = String.copy cnstr in
lccnstr.[0] <- Char.lowercase lccnstr.[0];
<:match_case<
Sexp.Atom ($str:lccnstr$ | $str:cnstr$) -> $uid:cnstr$
>>
| <:ctyp< $uid:cnstr$ of $tps$ >> ->
let lccnstr = String.copy cnstr in
lccnstr.[0] <- Char.lowercase lccnstr.[0];
<:match_case<
(Sexp.List
[Sexp.Atom ($str:lccnstr$ | $str:cnstr$ as tag) ::
sexp_args] as sexp) ->
$mk_cnstr_args_match _loc ~is_variant:false cnstr tps$
>>
| <:ctyp< $tp1$ | $tp2$ >> ->
<:match_case<
$mk_good_sum_matches _loc tp1$
| $mk_good_sum_matches _loc tp2$
>>
let rec mk_bad_sum_matches _loc = function
| <:ctyp< $uid:cnstr$ >> ->
let lccnstr = String.copy cnstr in
lccnstr.[0] <- Char.lowercase lccnstr.[0];
<:match_case<
Sexp.List
[Sexp.Atom ($str:lccnstr$ | $str:cnstr$) :: _] as sexp ->
Conv_error.stag_no_args _loc sexp
>>
| <:ctyp< $uid:cnstr$ of $_$ >> ->
let lccnstr = String.copy cnstr in
lccnstr.[0] <- Char.lowercase lccnstr.[0];
<:match_case<
Sexp.Atom ($str:lccnstr$ | $str:cnstr$) as sexp ->
Conv_error.stag_takes_args _loc sexp
>>
| <:ctyp< $tp1$ | $tp2$ >> ->
<:match_case<
$mk_bad_sum_matches _loc tp1$
| $mk_bad_sum_matches _loc tp2$
>>
let sum_of_sexp _loc alts =
`Match
<:match_case<
$mk_good_sum_matches _loc alts$
| $mk_bad_sum_matches _loc alts$
| Sexp.List [Sexp.List _ :: _] as sexp ->
Conv_error.nested_list_invalid_sum _loc sexp
| Sexp.List [] as sexp ->
Conv_error.empty_list_invalid_sum _loc sexp
| sexp -> Conv_error.unexpected_stag _loc sexp
>>
let rec mk_extract_fields _loc = function
| <:ctyp< $tp1$; $tp2$ >> ->
<:match_case<
$mk_extract_fields _loc tp1$
| $mk_extract_fields _loc tp2$
>>
| <:ctyp< $lid:nm$ : mutable sexp_option $tp$ >>
| <:ctyp< $lid:nm$ : sexp_option $tp$ >>
| <:ctyp< $lid:nm$ : mutable $tp$ >>
| <:ctyp< $lid:nm$ : $tp$ >> ->
let unrolled =
unroll_cnv_fp _loc <:expr< field_sexp >> (type_of_sexp _loc tp)
in
<:match_case<
$str:nm$ ->
match $lid:nm ^ "_field"$.val with
[ None ->
let fvalue = $unrolled$ in
$lid:nm ^ "_field"$.val := Some fvalue
| Some _ ->
duplicates.val := [ field_name :: duplicates.val ] ]
>>
let mk_handle_record_match_result _loc has_poly flds =
let has_nonopt_fields = ref false in
let res_tpls, bi_lst, good_patts =
let rec loop (res_tpls, bi_lst, good_patts as acc) = function
| <:ctyp< $lid:nm$ : sexp_option $_$ >>
| <:ctyp< $lid:nm$ : mutable sexp_option $_$ >> ->
let fld = <:expr< $lid:nm ^ "_field"$.val >> in
(
<:expr< $fld$ >> :: res_tpls,
bi_lst,
<:patt< $lid:nm ^ "_value"$ >> :: good_patts
)
| <:ctyp< $lid:nm$ : $_$ >> ->
has_nonopt_fields := true;
let fld = <:expr< $lid:nm ^ "_field"$.val >> in
(
<:expr< $fld$ >> :: res_tpls,
<:expr< ($fld$ = None, $str:nm$) >> :: bi_lst,
<:patt< Some $lid:nm ^ "_value"$ >> :: good_patts
)
| <:ctyp< $tp1$; $tp2$ >> -> loop (loop acc tp2) tp1
in
loop ([], [], []) flds
in
let match_good_expr =
if has_poly then
let rec loop acc = function
| <:ctyp< $tp1$; $tp2$ >> -> loop (loop acc tp2) tp1
| <:ctyp< $lid:nm$ : $_$ >> -> <:expr< $lid:nm ^ "_value"$ >> :: acc
in
match loop [] flds with
| [match_good_expr] -> match_good_expr
| match_good_exprs -> <:expr< $tup:exCom_of_list match_good_exprs$ >>
else
let rec loop = function
| <:ctyp< $tp1$; $tp2$ >> -> <:rec_binding< $loop tp1$; $loop tp2$ >>
| <:ctyp< $lid:nm$ : $_$ >> ->
<:rec_binding< $lid:nm$ = $lid:nm ^ "_value"$ >>
in
<:expr< { $loop flds$ } >>
in
let expr, patt =
match res_tpls, good_patts with
| [res_expr], [res_patt] -> res_expr, res_patt
| _ ->
<:expr< $tup:exCom_of_list res_tpls$ >>,
<:patt< $tup:paCom_of_list good_patts$ >>
in
if !has_nonopt_fields then
<:expr<
match $expr$ with
[ $patt$ -> $match_good_expr$
| _ ->
Conv_error.record_undefined_elements _loc sexp
$Gen.mk_expr_lst _loc bi_lst$
]
>>
else <:expr< match $expr$ with [ $patt$ -> $match_good_expr$ ] >>
let mk_cnv_fields has_poly _loc flds =
let field_refs =
let rec loop = function
| <:ctyp< $tp1$; $tp2$ >> -> <:binding< $loop tp1$ and $loop tp2$ >>
| <:ctyp< $lid:nm$ : $_$ >> ->
<:binding< $lid:nm ^ "_field"$ = ref None >>
in
loop flds
in
<:expr<
let $field_refs$ and duplicates = ref [] and extra = ref [] in
let rec iter = fun
[ [
Sexp.List
[(Sexp.Atom field_name); field_sexp] ::
tail
] ->
do {
match field_name with
[ $mk_extract_fields _loc flds$
| _ ->
if Conv.record_check_extra_fields.val then
extra.val := [ field_name :: extra.val ]
else () ];
iter tail }
| [sexp :: _] -> Conv_error.record_only_pairs_expected _loc sexp
| [] -> () ]
in
do {
iter field_sexps;
if duplicates.val <> [] then
Conv_error.record_duplicate_fields
_loc duplicates.val sexp
else if extra.val <> [] then
Conv_error.record_extra_fields _loc extra.val sexp
else $mk_handle_record_match_result _loc has_poly flds$
}
>>
let rec is_poly = function
| <:ctyp< $_$ : ! $_$ . $_$ >> -> true
| <:ctyp< $flds1$; $flds2$ >> -> is_poly flds1 || is_poly flds2
| _ -> false
let record_of_sexp _loc flds =
let handle_fields =
let has_poly = is_poly flds in
let cnv_fields = mk_cnv_fields has_poly _loc flds in
if has_poly then
let is_singleton_ref = ref true in
let patt =
let rec loop = function
| <:ctyp< $tp1$; $tp2$ >> ->
is_singleton_ref := false;
<:patt< $loop tp1$, $loop tp2$ >>
| <:ctyp< $lid:nm$ : $_$ >> -> <:patt< $lid:nm$ >>
in
let patt = loop flds in
if !is_singleton_ref then patt
else <:patt< $tup:patt$ >>
in
let record_def =
let rec loop = function
| <:ctyp< $tp1$; $tp2$ >> ->
<:rec_binding< $loop tp1$; $loop tp2$ >>
| <:ctyp< $lid:nm$ : $_$ >> -> <:rec_binding< $lid:nm$ = $lid:nm$ >>
in
loop flds
in
<:expr<
let $patt$ = $cnv_fields$ in
{ $record_def$ }
>>
else cnv_fields
in
`Match
<:match_case<
Sexp.List field_sexps as sexp -> $handle_fields$
| Sexp.Atom _ as sexp ->
Conv_error.record_list_instead_atom _loc sexp
>>
let nil_of_sexp _loc =
`Fun <:expr< fun sexp -> Conv_error.empty_type _loc sexp >>
let rec is_poly_call = function
| <:expr< $f$ $_$ >> -> is_poly_call f
| <:expr< $lid:name$ >> -> name.[0] = '_' && name.[1] = 'o'
| _ -> false
let td_of_sexp _loc type_name tps rhs =
let is_alias_ref = ref false in
let handle_alias _loc tp =
is_alias_ref := true;
type_of_sexp _loc tp
in
let coll_args tp param = <:ctyp< $tp$ $param$ >> in
let full_type = List.fold_left coll_args <:ctyp< $lid:type_name$ >> tps in
let is_variant_ref = ref false in
let handle_variant row_fields =
is_variant_ref := true;
variant_of_sexp ~full_type row_fields
in
let body =
let rec loop _loc =
Gen.switch_tp_def _loc
~alias:handle_alias
~sum:sum_of_sexp
~record:record_of_sexp
~variants:handle_variant
~mani:(fun _loc _tp1 -> loop _loc)
~nil:nil_of_sexp
in
match loop _loc rhs with
| `Fun fun_expr ->
if !is_alias_ref && tps = [] then
<:expr< fun [ sexp -> $fun_expr$ sexp ] >>
else <:expr< $fun_expr$ >>
| `Match matchings -> <:expr< fun [ $matchings$ ] >>
in
let internal_name = type_name ^ "_of_sexp" ^ "__" in
let arg_patts, arg_exprs =
List.split (
List.map (function tp ->
let name = "_of_" ^ Gen.get_tparam_id tp in
Gen.idp _loc name, Gen.ide _loc name
)
tps)
in
let with_poly_call = !is_alias_ref && is_poly_call body in
let internal_fun_body =
let full_type_name = sprintf "%s.%s" (get_conv_path ()) type_name in
if with_poly_call then
Gen.abstract _loc arg_patts
<:expr<
fun sexp ->
Conv_error.silly_type $str:full_type_name$ sexp
>>
else
<:expr<
let _loc = $str:full_type_name$ in
$Gen.abstract _loc arg_patts body$
>>
in
let pre_external_fun_body =
let internal_call =
let internal_expr = Gen.ide _loc internal_name in
<:expr< $Gen.apply _loc internal_expr arg_exprs$ sexp >>
in
let no_variant_match_mc =
<:match_case<
Conv_error.No_variant_match (msg, sexp) ->
Conv.of_sexp_error msg sexp
>>
in
if with_poly_call then
<:expr< try $body$ sexp with [ $no_variant_match_mc$ ] >>
else if !is_variant_ref || !is_alias_ref then
<:expr< try $internal_call$ with [ $no_variant_match_mc$ ] >>
else internal_call
in
let internal_binding =
<:binding< $lid:internal_name$ = $internal_fun_body$ >>
in
let external_fun_patt = Gen.idp _loc (type_name ^ "_of_sexp") in
let external_fun_body =
Gen.abstract _loc arg_patts <:expr< fun sexp -> $pre_external_fun_body$ >>
in
let external_binding =
<:binding< $external_fun_patt$ = $external_fun_body$ >>
in
internal_binding, external_binding
let rec tds_of_sexp _loc acc = function
| TyDcl (_loc, type_name, tps, rhs, _cl) ->
td_of_sexp _loc type_name tps rhs :: acc
| TyAnd (_loc, tp1, tp2) -> tds_of_sexp _loc (tds_of_sexp _loc acc tp2) tp1
let of_sexp = function
| TyDcl (_loc, type_name, tps, rhs, _cl) ->
let internal_binding, external_binding =
td_of_sexp _loc type_name tps rhs
in
let recursive = Gen.type_is_recursive _loc type_name rhs in
if recursive then
<:str_item<
value rec $internal_binding$
and $external_binding$
>>
else
<:str_item<
value $internal_binding$;
value $external_binding$
>>
| TyAnd (_loc, _, _) as tds ->
let two_bindings = tds_of_sexp _loc [] tds in
let bindings =
List.map (fun (b1, b2) -> <:binding< $b1$ and $b2$ >>) two_bindings
in
<:str_item< value rec $list:bindings$ >>
let () = add_generator "of_sexp" of_sexp
end
let () =
add_generator
"sexp"
(fun tds ->
let _loc = Loc.ghost in
<:str_item<
$Generate_of_sexp.of_sexp tds$; $Generate_sexp_of.sexp_of tds$
>>
)
|
9f5212be96a86d5d2fc4651340f0e3821d238d864e9f940b9716bb21b44775b5 | WorksHub/client | url.cljc | (ns wh.common.url
(:require #?(:clj [ring.util.codec :as codec])
#?(:clj [taoensso.timbre :refer [error]])
[#?(:clj clojure.spec.alpha
:cljs cljs.spec.alpha) :as s]
[#?(:clj clojure.spec.gen.alpha
:cljs cljs.spec.gen.alpha) :as gen]
#?(:cljs [goog.Uri :as uri])
[bidi.bidi :as bidi]
[clojure.string :as str]
[wh.common.text :as text]
[wh.util :as util]))
(def wh-cookie-names {:auth "auth_token"
:tracking-consent "wh_tracking_consent"
:tracking-id "wh_aid"})
(defn sanitize-url [url]
(if (str/includes? url "://")
url
(str "http://" url)))
(defn websites-domain? [website domain]
(when domain
(str/includes? domain website)))
(defn strip-query-params
[uri]
(if-let [?-index (str/last-index-of uri "?")]
(subs uri 0 ?-index)
uri))
(s/def ::string-or-uri
(s/spec (s/or :str string?
:uri (partial instance? #?(:clj java.net.URI
:cljs goog.Uri)))
:gen (fn []
(gen/elements
(->> [""
"?"
"?hey"
"?hey="
"?hey=jude"
"?hey=jude&sergeant=pepper"
"?hey=jude&sergeant=pepper&color=yellow%20submarine"]
(mapcat (juxt identity (partial str "-url.com")))
(mapcat (juxt identity #?(:clj #(java.net.URI. %)
:cljs uri/parse)))
(concat [" " " " "=" "? " "? " "?="]))))))
(s/def ::query-params
(s/map-of (s/or :keyword keyword?
:string string?)
(s/or :string string?
:strings (s/coll-of string?))))
(s/def ::query-params-without-vector-values
(s/map-of (s/or :keyword keyword?
:string string?)
(s/or :string string?
:strings string?)))
(defn concat-vector-values
"Concatenates vector values of the `query-params` map to ';'-separated
strings while deduplicating their constituent parts.
`{\"tags\" [\"clojure\" \"scala\"], \"remote\" \"true\"}` ->
`{\"tags\" \"clojure;scala\", \"remote\" \"true\"}`
Returns the same `query-params` in case there are no vectored values."
[query-params]
(if (some (comp vector? second) query-params)
(reduce (fn [m [k v]]
(assoc m k (if (vector? v)
(->> v (distinct) (str/join ";"))
v)))
{}
query-params)
query-params))
(s/fdef concat-vector-values
:args (s/cat :query-params ::query-params)
:ret ::query-params-without-vector-values)
#?(:cljs (declare uri->query-params))
(defn parse-query-string
"Parses supplied 'www-form-urlencoded' string using UTF-8. For `nil` or
empty string returns an empty map, otherwise — a \"query params\" map."
[query-string]
#?(:clj
(if-not (empty? query-string)
(let [parsed (codec/form-decode query-string)]
(if (string? parsed)
matches ` goog .
matches ` goog .
{}))
#?(:cljs
(let [uri (uri/create nil nil nil nil nil query-string nil false)]
(uri->query-params uri))))
(s/fdef parse-query-string
:args (s/cat :query-string (s/nilable string?))
:ret ::query-params)
(defn uri->query-params
"Transforms query string of some URI into a \"query params\" map.
Accepts strings and platform native URI objects as its argument."
[uri]
#?(:clj
(if (string? uri)
(cond
(str/blank? uri) (parse-query-string "")
(= (.charAt ^String uri 0) \?) (parse-query-string (subs uri 1))
:else (-> (java.net.URI. uri) .getRawQuery parse-query-string))
(parse-query-string (when uri (.getRawQuery ^java.net.URI uri)))))
#?(:cljs
(let [params (-> uri uri/parse .getQueryData)]
(->> (interleave (.getKeys params) (.getValues params))
(partition 2)
(reduce (fn [a [k v]]
(if (contains? a k)
(update a k #(if (coll? %) (conj % v) [% v]))
(assoc a k v)))
{})))))
(s/fdef uri->query-params
:args (s/cat :uri (s/nilable ::string-or-uri))
:ret ::query-params)
(defn serialize-query-params
"Serializes a \"query params\" map into URI's query string.
Filters out query params with no value, i.e. `nil`.
Results in `nil` for `nil` or empty map."
[m]
#?(:clj
(if (map? m) (-> m (util/remove-nils) (codec/form-encode)) ""))
#?(:cljs
(let [usp (js/URLSearchParams.)]
(run! (fn [[k v]]
(if (coll? v)
(run! (fn [v'] (.append usp (name k) v')) v)
(.append usp (name k) v)))
(js->clj m))
(.toString usp))))
(s/fdef serialize-query-params
:args (s/cat :m (s/nilable ::query-params))
:ret string?)
(defn uri->domain [uri]
(let [uri (str/trim uri)]
#?(:cljs
(try
(text/not-blank (.getDomain (uri/parse uri)))
(catch js/Error _)))
#?(:clj
(try
(.getHost (java.net.URI. uri))
(catch Exception _e
(error "Failed to parse URI:" uri))))))
(defn strip-path [uri]
(let [uri (str/trim uri)]
#?(:cljs
(try
(let [u (uri/parse uri)
port (.getPort u)]
(str (.getScheme u) "://" (.getDomain u) (when (and port (pos-int? port)) (str ":" port))))
(catch js/Error _)))
#?(:clj
(try
(let [u (java.net.URI. uri)
port (.getPort u)]
(str (.getScheme u) "://" (.getHost u) (when (and port (pos-int? port)) (str ":" port))))
(catch Exception _e
(error "Failed to parse URI:" uri))))))
(defn has-domain? [uri]
(not (nil? (uri->domain uri))))
(defn detect-page-type [url]
(when url
(let [url (sanitize-url url)
[_ _ domain & remaining] (str/split url #"/")
handle (last remaining)]
(merge {:url url}
(condp websites-domain? domain
"github.com" {:type :github, :display handle}
"twitter.com" {:type :twitter, :display handle}
"facebook.com" {:type :facebook, :display handle}
"linkedin.com" {:type :linkedin, :display handle}
"stackoverflow.com" {:type :stackoverflow, :display handle}
{:type :web, :display (uri->domain url)})))))
(defn detect-urls-type [urls]
(mapv (comp detect-page-type :url) urls))
(defn vertical-homepage-href
"Server-side callers should consider wh.url/base-url as a more
robust - but less portable - version of ths fn"
[env vertical]
(case (name env)
"prod" (str "https://" (name vertical) ".works-hub.com")
"stage" (str "/?vertical=" (name vertical))
;;else
(str "http://" (name vertical) ".localdomain:8080")))
(defn share-urls [args]
(let [{:keys [text text-twitter text-linkedin url]} (util/map-vals bidi/url-encode args)]
{:twitter (str "="
(or text-twitter text) "&url=" url)
:facebook (str "=" url)
:linkedin (str "="
url "&title=" (or text-linkedin text) "&summary=&origin=")}))
(def company-landing-page "-hub.com")
(def demo-link "-off-demo")
| null | https://raw.githubusercontent.com/WorksHub/client/370a0be7890ed909e05238f3264f57e3cf2d9e8f/common/src/wh/common/url.cljc | clojure | '-separated
else | (ns wh.common.url
(:require #?(:clj [ring.util.codec :as codec])
#?(:clj [taoensso.timbre :refer [error]])
[#?(:clj clojure.spec.alpha
:cljs cljs.spec.alpha) :as s]
[#?(:clj clojure.spec.gen.alpha
:cljs cljs.spec.gen.alpha) :as gen]
#?(:cljs [goog.Uri :as uri])
[bidi.bidi :as bidi]
[clojure.string :as str]
[wh.common.text :as text]
[wh.util :as util]))
(def wh-cookie-names {:auth "auth_token"
:tracking-consent "wh_tracking_consent"
:tracking-id "wh_aid"})
(defn sanitize-url [url]
(if (str/includes? url "://")
url
(str "http://" url)))
(defn websites-domain? [website domain]
(when domain
(str/includes? domain website)))
(defn strip-query-params
[uri]
(if-let [?-index (str/last-index-of uri "?")]
(subs uri 0 ?-index)
uri))
(s/def ::string-or-uri
(s/spec (s/or :str string?
:uri (partial instance? #?(:clj java.net.URI
:cljs goog.Uri)))
:gen (fn []
(gen/elements
(->> [""
"?"
"?hey"
"?hey="
"?hey=jude"
"?hey=jude&sergeant=pepper"
"?hey=jude&sergeant=pepper&color=yellow%20submarine"]
(mapcat (juxt identity (partial str "-url.com")))
(mapcat (juxt identity #?(:clj #(java.net.URI. %)
:cljs uri/parse)))
(concat [" " " " "=" "? " "? " "?="]))))))
(s/def ::query-params
(s/map-of (s/or :keyword keyword?
:string string?)
(s/or :string string?
:strings (s/coll-of string?))))
(s/def ::query-params-without-vector-values
(s/map-of (s/or :keyword keyword?
:string string?)
(s/or :string string?
:strings string?)))
(defn concat-vector-values
strings while deduplicating their constituent parts.
`{\"tags\" [\"clojure\" \"scala\"], \"remote\" \"true\"}` ->
`{\"tags\" \"clojure;scala\", \"remote\" \"true\"}`
Returns the same `query-params` in case there are no vectored values."
[query-params]
(if (some (comp vector? second) query-params)
(reduce (fn [m [k v]]
(assoc m k (if (vector? v)
(->> v (distinct) (str/join ";"))
v)))
{}
query-params)
query-params))
(s/fdef concat-vector-values
:args (s/cat :query-params ::query-params)
:ret ::query-params-without-vector-values)
#?(:cljs (declare uri->query-params))
(defn parse-query-string
"Parses supplied 'www-form-urlencoded' string using UTF-8. For `nil` or
empty string returns an empty map, otherwise — a \"query params\" map."
[query-string]
#?(:clj
(if-not (empty? query-string)
(let [parsed (codec/form-decode query-string)]
(if (string? parsed)
matches ` goog .
matches ` goog .
{}))
#?(:cljs
(let [uri (uri/create nil nil nil nil nil query-string nil false)]
(uri->query-params uri))))
(s/fdef parse-query-string
:args (s/cat :query-string (s/nilable string?))
:ret ::query-params)
(defn uri->query-params
"Transforms query string of some URI into a \"query params\" map.
Accepts strings and platform native URI objects as its argument."
[uri]
#?(:clj
(if (string? uri)
(cond
(str/blank? uri) (parse-query-string "")
(= (.charAt ^String uri 0) \?) (parse-query-string (subs uri 1))
:else (-> (java.net.URI. uri) .getRawQuery parse-query-string))
(parse-query-string (when uri (.getRawQuery ^java.net.URI uri)))))
#?(:cljs
(let [params (-> uri uri/parse .getQueryData)]
(->> (interleave (.getKeys params) (.getValues params))
(partition 2)
(reduce (fn [a [k v]]
(if (contains? a k)
(update a k #(if (coll? %) (conj % v) [% v]))
(assoc a k v)))
{})))))
(s/fdef uri->query-params
:args (s/cat :uri (s/nilable ::string-or-uri))
:ret ::query-params)
(defn serialize-query-params
"Serializes a \"query params\" map into URI's query string.
Filters out query params with no value, i.e. `nil`.
Results in `nil` for `nil` or empty map."
[m]
#?(:clj
(if (map? m) (-> m (util/remove-nils) (codec/form-encode)) ""))
#?(:cljs
(let [usp (js/URLSearchParams.)]
(run! (fn [[k v]]
(if (coll? v)
(run! (fn [v'] (.append usp (name k) v')) v)
(.append usp (name k) v)))
(js->clj m))
(.toString usp))))
(s/fdef serialize-query-params
:args (s/cat :m (s/nilable ::query-params))
:ret string?)
(defn uri->domain [uri]
(let [uri (str/trim uri)]
#?(:cljs
(try
(text/not-blank (.getDomain (uri/parse uri)))
(catch js/Error _)))
#?(:clj
(try
(.getHost (java.net.URI. uri))
(catch Exception _e
(error "Failed to parse URI:" uri))))))
(defn strip-path [uri]
(let [uri (str/trim uri)]
#?(:cljs
(try
(let [u (uri/parse uri)
port (.getPort u)]
(str (.getScheme u) "://" (.getDomain u) (when (and port (pos-int? port)) (str ":" port))))
(catch js/Error _)))
#?(:clj
(try
(let [u (java.net.URI. uri)
port (.getPort u)]
(str (.getScheme u) "://" (.getHost u) (when (and port (pos-int? port)) (str ":" port))))
(catch Exception _e
(error "Failed to parse URI:" uri))))))
(defn has-domain? [uri]
(not (nil? (uri->domain uri))))
(defn detect-page-type [url]
(when url
(let [url (sanitize-url url)
[_ _ domain & remaining] (str/split url #"/")
handle (last remaining)]
(merge {:url url}
(condp websites-domain? domain
"github.com" {:type :github, :display handle}
"twitter.com" {:type :twitter, :display handle}
"facebook.com" {:type :facebook, :display handle}
"linkedin.com" {:type :linkedin, :display handle}
"stackoverflow.com" {:type :stackoverflow, :display handle}
{:type :web, :display (uri->domain url)})))))
(defn detect-urls-type [urls]
(mapv (comp detect-page-type :url) urls))
(defn vertical-homepage-href
"Server-side callers should consider wh.url/base-url as a more
robust - but less portable - version of ths fn"
[env vertical]
(case (name env)
"prod" (str "https://" (name vertical) ".works-hub.com")
"stage" (str "/?vertical=" (name vertical))
(str "http://" (name vertical) ".localdomain:8080")))
(defn share-urls [args]
(let [{:keys [text text-twitter text-linkedin url]} (util/map-vals bidi/url-encode args)]
{:twitter (str "="
(or text-twitter text) "&url=" url)
:facebook (str "=" url)
:linkedin (str "="
url "&title=" (or text-linkedin text) "&summary=&origin=")}))
(def company-landing-page "-hub.com")
(def demo-link "-off-demo")
|
bd40092c4c8b0aca6c4c707bc1fb5eb37a0d2cfafa98d575f5bf7abad38f8ee2 | mtgred/netrunner | rewrite.clj | (ns game.rewrite
(:require
[clojure.java.io :as io]
[jinteki.utils :refer [slugify]]
[rewrite-clj.node :as n]
[rewrite-clj.zip :as z]
[clojure.string :as str]))
(defn deftest? [zloc]
(= "deftest" (z/string zloc)))
(defn testing? [zloc]
(= "testing" (z/string zloc)))
(defn basic-test? [zloc]
(and (testing? zloc)
(re-matches #"(?i).*basic .*test.*" (-> zloc z/right z/string))))
(defn get-node-symbol
"handles `(deftest X ...)` and `(deftest ^{...} X ...)
metadata is a list and the final item in the list is the symbol"
[zloc]
(let [zloc (z/right zloc)]
(if-let [zloc (z/down zloc)]
(->> zloc
(z/right)
(z/string))
(z/string zloc))))
(defn get-testing-branches [zloc]
(->> zloc
(z/down)
(iterate z/right)
(take 100)
(remove #(basic-test? (z/down %)))
(filter #(testing? (z/down %)))
doall))
(defn slurp-and-spit [zloc]
(-> zloc
z/remove ; testing
z/down ; re-enter list
z/remove ; "asdf"
z/down ; re-enter list
z/splice ; remove outer list
z/up))
(defn slurp-and-spit-basic-test [zloc]
(z/replace
zloc
(let [zloc (z/subzip zloc)]
(loop [zloc (z/down zloc)]
(cond
;; basic test
(-> zloc z/down basic-test?)
(z/root (slurp-and-spit (-> zloc z/down)))
;; at the end?
(not (-> zloc z/right))
(-> zloc z/root)
:else
(recur (-> zloc z/right)))))))
(defn remove-testing-branches [zloc]
(z/replace
zloc
(loop [zloc (z/down (z/subzip zloc))]
(let [final-position? (not (-> zloc z/right))]
(cond
;; final position testing branch
(and final-position?
(-> zloc z/down testing?)
(not (-> zloc z/down basic-test?)))
(-> zloc z/remove z/root)
;; final position all else
final-position?
(-> zloc z/root)
;; non-final testing branch
(and (-> zloc z/down testing?)
(not (-> zloc z/down basic-test?)))
(recur (-> zloc z/remove*))
:else
(recur (-> zloc z/right)))))))
(defn build-deftest-name [deftest-name string]
(symbol (str deftest-name "-" (slugify string))))
(defn prepend-deftest [deftest-name branch]
(let [string (-> branch z/down z/right z/string)
new-deftest-name (build-deftest-name deftest-name string)
comment (try (n/comment-node
(->> string
(str/split-lines)
(str/join " ")
(#(subs % 1 (dec (count %))))
(#(str "; " % "\n"))))
(catch Exception _
(prn "exception" string)
"; TODO \n")
(catch java.lang.AssertionError _
(prn "assertion" string)
(n/comment-node "; TODO \n"))
)
]
(-> branch
z/down
(z/replace 'deftest)
z/right
(z/replace new-deftest-name)
z/right
(z/insert-left comment)
(z/insert-space-left 3)
z/up
z/node)))
(defn create-new-deftest [zloc deftest-name branch]
(-> zloc
z/insert-newline-right
z/right*
z/insert-newline-right
z/right*
(z/insert-right (prepend-deftest deftest-name branch))
z/right))
(defn convert-tests-to-deftests [zloc testing-branches]
(let [deftest-name (get-node-symbol (z/down zloc))]
(loop [zloc zloc
branches testing-branches]
(if-let [branch (first branches)]
(recur
(create-new-deftest zloc deftest-name branch)
(next branches))
zloc))))
(defn process-deftest [zloc]
(if (deftest? (z/down zloc))
(let [testing-branches (get-testing-branches zloc)
zloc (-> zloc
remove-testing-branches
slurp-and-spit-basic-test
(convert-tests-to-deftests testing-branches))]
zloc)
zloc))
(defn rewrite-file [zloc]
(let [zloc (process-deftest zloc)]
(if (not (z/right zloc))
zloc
(recur (z/right zloc)))))
(defn process-file [file]
(->> file
(z/of-file)
(rewrite-file)
(z/root-string)
(spit file)))
(defn apply-fn-to-file [f]
(let [dir (io/file "test/clj/game/cards")]
(doseq [file (file-seq dir)
:when (.isFile file)]
(f file))))
(defn rewrite-card-tests []
(apply-fn-to-file process-file))
(defn clean-file [zloc]
(z/prewalk
(z/up zloc)
(fn [zloc]
(and (z/list? zloc)
(-> zloc z/down deftest?)
(not (-> zloc z/down z/right z/right))))
z/remove))
(defn process-file-to-clean [file]
(->> file
(z/of-file)
(clean-file)
(z/root-string)
(spit file)))
(defn clean-card-tests []
(apply-fn-to-file process-file-to-clean))
(defn load-test [& [path]]
(->> (io/file (str "test/clj/game/cards/" (or path "agendas") "_test.clj"))
(z/of-file)))
| null | https://raw.githubusercontent.com/mtgred/netrunner/4f8e61fef533e2838f6dcd2bfad28e71ee5de2d5/test/clj/game/rewrite.clj | clojure | testing
re-enter list
"asdf"
re-enter list
remove outer list
basic test
at the end?
final position testing branch
final position all else
non-final testing branch | (ns game.rewrite
(:require
[clojure.java.io :as io]
[jinteki.utils :refer [slugify]]
[rewrite-clj.node :as n]
[rewrite-clj.zip :as z]
[clojure.string :as str]))
(defn deftest? [zloc]
(= "deftest" (z/string zloc)))
(defn testing? [zloc]
(= "testing" (z/string zloc)))
(defn basic-test? [zloc]
(and (testing? zloc)
(re-matches #"(?i).*basic .*test.*" (-> zloc z/right z/string))))
(defn get-node-symbol
"handles `(deftest X ...)` and `(deftest ^{...} X ...)
metadata is a list and the final item in the list is the symbol"
[zloc]
(let [zloc (z/right zloc)]
(if-let [zloc (z/down zloc)]
(->> zloc
(z/right)
(z/string))
(z/string zloc))))
(defn get-testing-branches [zloc]
(->> zloc
(z/down)
(iterate z/right)
(take 100)
(remove #(basic-test? (z/down %)))
(filter #(testing? (z/down %)))
doall))
(defn slurp-and-spit [zloc]
(-> zloc
z/up))
(defn slurp-and-spit-basic-test [zloc]
(z/replace
zloc
(let [zloc (z/subzip zloc)]
(loop [zloc (z/down zloc)]
(cond
(-> zloc z/down basic-test?)
(z/root (slurp-and-spit (-> zloc z/down)))
(not (-> zloc z/right))
(-> zloc z/root)
:else
(recur (-> zloc z/right)))))))
(defn remove-testing-branches [zloc]
(z/replace
zloc
(loop [zloc (z/down (z/subzip zloc))]
(let [final-position? (not (-> zloc z/right))]
(cond
(and final-position?
(-> zloc z/down testing?)
(not (-> zloc z/down basic-test?)))
(-> zloc z/remove z/root)
final-position?
(-> zloc z/root)
(and (-> zloc z/down testing?)
(not (-> zloc z/down basic-test?)))
(recur (-> zloc z/remove*))
:else
(recur (-> zloc z/right)))))))
(defn build-deftest-name [deftest-name string]
(symbol (str deftest-name "-" (slugify string))))
(defn prepend-deftest [deftest-name branch]
(let [string (-> branch z/down z/right z/string)
new-deftest-name (build-deftest-name deftest-name string)
comment (try (n/comment-node
(->> string
(str/split-lines)
(str/join " ")
(#(subs % 1 (dec (count %))))
(#(str "; " % "\n"))))
(catch Exception _
(prn "exception" string)
"; TODO \n")
(catch java.lang.AssertionError _
(prn "assertion" string)
(n/comment-node "; TODO \n"))
)
]
(-> branch
z/down
(z/replace 'deftest)
z/right
(z/replace new-deftest-name)
z/right
(z/insert-left comment)
(z/insert-space-left 3)
z/up
z/node)))
(defn create-new-deftest [zloc deftest-name branch]
(-> zloc
z/insert-newline-right
z/right*
z/insert-newline-right
z/right*
(z/insert-right (prepend-deftest deftest-name branch))
z/right))
(defn convert-tests-to-deftests [zloc testing-branches]
(let [deftest-name (get-node-symbol (z/down zloc))]
(loop [zloc zloc
branches testing-branches]
(if-let [branch (first branches)]
(recur
(create-new-deftest zloc deftest-name branch)
(next branches))
zloc))))
(defn process-deftest [zloc]
(if (deftest? (z/down zloc))
(let [testing-branches (get-testing-branches zloc)
zloc (-> zloc
remove-testing-branches
slurp-and-spit-basic-test
(convert-tests-to-deftests testing-branches))]
zloc)
zloc))
(defn rewrite-file [zloc]
(let [zloc (process-deftest zloc)]
(if (not (z/right zloc))
zloc
(recur (z/right zloc)))))
(defn process-file [file]
(->> file
(z/of-file)
(rewrite-file)
(z/root-string)
(spit file)))
(defn apply-fn-to-file [f]
(let [dir (io/file "test/clj/game/cards")]
(doseq [file (file-seq dir)
:when (.isFile file)]
(f file))))
(defn rewrite-card-tests []
(apply-fn-to-file process-file))
(defn clean-file [zloc]
(z/prewalk
(z/up zloc)
(fn [zloc]
(and (z/list? zloc)
(-> zloc z/down deftest?)
(not (-> zloc z/down z/right z/right))))
z/remove))
(defn process-file-to-clean [file]
(->> file
(z/of-file)
(clean-file)
(z/root-string)
(spit file)))
(defn clean-card-tests []
(apply-fn-to-file process-file-to-clean))
(defn load-test [& [path]]
(->> (io/file (str "test/clj/game/cards/" (or path "agendas") "_test.clj"))
(z/of-file)))
|
b44655d46f20941cbb14febd0d5693f287794f0c928300b8b26b1e85f55c9c04 | chetant/LibClang | TranslationUnit.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE RankNTypes #-}
-- | Functions for manipulating translation units.
--
-- To start analyzing a translation unit, call 'Index.withNew' to create a new index and
-- call 'withParsed' in the callback. Inside the callback for 'withParsed', you'll have
access to a ' FFI.TranslationUnit ' value ; you can call ' getCursor ' to turn that into an
-- AST cursor which can be traversed using the functions in "Clang.Cursor".
--
-- This module is intended to be imported qualified.
module Clang.TranslationUnit
(
-- * Creating a translation unit
withParsed
, withLoaded
, withReparsing
, FFI.TranslationUnitFlags
, FFI.ReparseFlags
, ReparsingCallback
, ParseContinuation(..)
-- * Saving
, save
, FFI.SaveTranslationUnitFlags
AST traversal and metadata
, getCursor
, getDiagnosticSet
, getSpelling
) where
import Data.Traversable
import Control.Monad.IO.Class
import Data.Maybe (fromMaybe)
import qualified Data.Vector as DV
import System . FilePath ( ( < / > ) )
import Clang.Internal.BitFlags
import Clang.Internal.Monad
import qualified Clang.Internal.FFI as FFI
import ( getDataFileName )
-- | Creates a translation unit by parsing source code.
--
-- Note that not all command line arguments which are accepted by the \'clang\' frontend can
be used here . You should avoid passing \'-c\ ' , \'-o\ ' , \'-fpch - deps\ ' , and the various
-- \'-M\' options. If you provide a 'FilePath' when calling 'withParsed', also be sure not
-- to provide the filename in the command line arguments as well.
withParsed :: ClangBase m
=> FFI.Index s' -- ^ The index into which the translation unit should be loaded.
-> Maybe FilePath -- ^ The file to load, or 'Nothing' if the file is specified in
-- the command line arguments.
-> [String] -- ^ The command line arguments libclang should use when compiling this
-- file. Most arguments which you'd use with the \'clang\' frontend
-- are accepted.
-> DV.Vector FFI.UnsavedFile -- ^ Unsaved files which may be needed to parse this
-- translation unit. This may include the source
-- file itself or any file it includes.
-> [FFI.TranslationUnitFlags] -- ^ Flags that affect the processing of this
-- translation unit.
-> (forall s. FFI.TranslationUnit s -> ClangT s m a) -- ^ A callback.
-> ClangT s' m (Maybe a) -- ^ The return value of the callback, or 'Nothing'
-- if the file couldn't be parsed.
withParsed idx mayPath args ufs flags f = do
-- liftIO $ FFI.setClangResourcesPath idx =<< clangResourcesPath
mayTU <- FFI.parseTranslationUnit idx mayPath args ufs (orFlags flags)
traverse go mayTU
where
go tu = clangScope $ f =<< fromOuterScope tu
| Creates a translation unit by loading a saved AST file .
--
-- Such an AST file can be created using 'save'.
withLoaded :: ClangBase m
=> FFI.Index s' -- ^ The index into which the translation unit should be loaded.
-> FilePath -- ^ The file to load.
-> (forall s. FFI.TranslationUnit s -> ClangT s m a) -- ^ A callback.
-> ClangT s' m a
withLoaded idx path f = do
-- liftIO $ FFI.setClangResourcesPath idx =<< clangResourcesPath
f =<< FFI.createTranslationUnit idx path
-- | Creates a translation unit by parsing source code.
--
-- This works like 'withParsed', except that the translation unit can be reparsed over and over
-- again by returning a 'Reparse' value from the callback. This is useful for interactive
-- analyses like code completion. Processing can be stopped by returning a 'ParseComplete' value.
withReparsing :: ClangBase m
=> FFI.Index s' -- ^ The index into which the translation unit should be loaded.
-> Maybe FilePath -- ^ The file to load, or 'Nothing' if the file is specified in
-- the command line arguments.
-> [String] -- ^ The command line arguments libclang should use when compiling
-- this file. Most arguments which you'd use with the \'clang\'
-- frontend are accepted.
-> DV.Vector FFI.UnsavedFile -- ^ Unsaved files which may be needed to parse this
-- translation unit. This may include the source
-- file itself or any file it includes.
-> [FFI.TranslationUnitFlags] -- ^ Flags that affect the processing of this
-- translation unit.
^ A callback which uses the translation unit . May be
-- called many times depending on the return value.
-- See 'ParseContinuation' for more information.
-> ClangT s' m (Maybe r) -- ^ The return value of the callback, as passed to
-- 'ParseComplete', or 'Nothing' if the file could
-- not be parsed.
withReparsing idx mayPath args ufs flags f = do
-- liftIO $ FFI.setClangResourcesPath idx =<< clangResourcesPath
mayTU <- FFI.parseTranslationUnit idx mayPath args ufs (orFlags flags)
case mayTU of
Nothing -> return Nothing
Just tu -> iterReparse f tu
iterReparse :: ClangBase m
=> ReparsingCallback m r
-> FFI.TranslationUnit s'
-> ClangT s' m (Maybe r)
iterReparse f tu = do
cont <- clangScope $ f =<< fromOuterScope tu
case cont of
Reparse nextF ufs mayFlags ->
do res <- FFI.reparseTranslationUnit tu ufs (makeFlags mayFlags)
if res then iterReparse nextF tu
else return Nothing
ParseComplete finalVal -> return $ Just finalVal
where
makeFlags = orFlags . (fromMaybe [FFI.DefaultReparseFlags])
-- | A callback for use with 'withReparsing'.
type ReparsingCallback m r = forall s. FFI.TranslationUnit s
-> ClangT s m (ParseContinuation m r)
-- | A continuation returned by a 'ReparsingCallback'.
data ParseContinuation m r
-- | 'Reparse' signals that the translation unit should be reparsed. It contains a callback
-- which will be called with the updated translation unit after reparsing, a 'DV.Vector' of
-- unsaved files which may be needed to reparse, and a set of flags affecting reparsing. The
-- default reparsing flags can be requested by specifying 'Nothing'.
= Reparse (ReparsingCallback m r) (DV.Vector FFI.UnsavedFile) (Maybe [FFI.ReparseFlags])
-- | 'ParseComplete' signals that processing is finished. It contains a final result which
-- will be returned by 'withReparsing'.
| ParseComplete r
-- clangResourcesPath :: IO FilePath
-- clangResourcesPath =
getDataFileName $ " build " < / > " out " < / > " lib " < / > " clang " < / > " 3.4 "
| Saves a translation unit as an AST file with can be loaded later using ' withLoaded ' .
save :: ClangBase m
=> FFI.TranslationUnit s' -- ^ The translation unit to save.
-> FilePath -- ^ The filename to save to.
-> Maybe [FFI.SaveTranslationUnitFlags] -- ^ Flags that affect saving, or 'Nothing' for
-- the default set of flags.
-> ClangT s m Bool
save t fname mayFlags = liftIO $ FFI.saveTranslationUnit t fname (orFlags flags)
where flags = fromMaybe [FFI.DefaultSaveTranslationUnitFlags] mayFlags
-- | Reparses the provided translation unit using the same command line arguments
-- that were originally used to parse it . If the file has changed on disk , or if
-- the unsaved files have changed , those changes will become visible .
--
-- Note that ' reparse ' invalidates all cursors and source locations that refer into
-- the reparsed translation unit . This makes it unsafe . However , ' reparse ' can be
-- more efficient than calling ' withParsed ' a second time .
reparse : : m
= > FFI.TranslationUnit s ' -- ^ The translation unit to reparse .
- > DV.Vector FFI.UnsavedFile -- ^ Unsaved files which may be needed to reparse
-- this translation unit .
- > Maybe [ FFI.ReparseFlags ] -- ^ Flags that affect reparsing , or ' Nothing ' for the
-- default set of flags .
- > ClangT s m Bool
reparse t ufs = FFI.reparseTranslationUnit t ufs ( orFlags flags )
where flags = fromMaybe [ FFI.DefaultReparseFlags ]
-- | Reparses the provided translation unit using the same command line arguments
-- that were originally used to parse it. If the file has changed on disk, or if
-- the unsaved files have changed, those changes will become visible.
--
-- Note that 'reparse' invalidates all cursors and source locations that refer into
-- the reparsed translation unit. This makes it unsafe. However, 'reparse' can be
-- more efficient than calling 'withParsed' a second time.
reparse :: ClangBase m
=> FFI.TranslationUnit s' -- ^ The translation unit to reparse.
-> DV.Vector FFI.UnsavedFile -- ^ Unsaved files which may be needed to reparse
-- this translation unit.
-> Maybe [FFI.ReparseFlags] -- ^ Flags that affect reparsing, or 'Nothing' for the
-- default set of flags.
-> ClangT s m Bool
reparse t ufs mayFlags = FFI.reparseTranslationUnit t ufs (orFlags flags)
where flags = fromMaybe [FFI.DefaultReparseFlags] mayFlags
-}
-- | Retrieve the cursor associated with this translation unit. This cursor is the root of
this translation unit 's AST ; you can begin exploring the AST further using the functions
-- in "Clang.Cursor".
getCursor :: ClangBase m => FFI.TranslationUnit s' -> ClangT s m (FFI.Cursor s)
getCursor tu = liftIO $ FFI.getTranslationUnitCursor mkProxy tu
-- | Retrieve the complete set of diagnostics associated with the given translation unit.
getDiagnosticSet :: ClangBase m => FFI.TranslationUnit s'-> ClangT s m (FFI.DiagnosticSet s)
getDiagnosticSet = FFI.getDiagnosticSetFromTU
-- | Retrieve a textual representation of this translation unit.
getSpelling :: ClangBase m => FFI.TranslationUnit s' -> ClangT s m (FFI.ClangString s)
getSpelling = FFI.getTranslationUnitSpelling
| null | https://raw.githubusercontent.com/chetant/LibClang/5e6d245024f7005e1a1dd1076031954b7cd8702f/src/Clang/TranslationUnit.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE RankNTypes #
| Functions for manipulating translation units.
To start analyzing a translation unit, call 'Index.withNew' to create a new index and
call 'withParsed' in the callback. Inside the callback for 'withParsed', you'll have
AST cursor which can be traversed using the functions in "Clang.Cursor".
This module is intended to be imported qualified.
* Creating a translation unit
* Saving
| Creates a translation unit by parsing source code.
Note that not all command line arguments which are accepted by the \'clang\' frontend can
\'-M\' options. If you provide a 'FilePath' when calling 'withParsed', also be sure not
to provide the filename in the command line arguments as well.
^ The index into which the translation unit should be loaded.
^ The file to load, or 'Nothing' if the file is specified in
the command line arguments.
^ The command line arguments libclang should use when compiling this
file. Most arguments which you'd use with the \'clang\' frontend
are accepted.
^ Unsaved files which may be needed to parse this
translation unit. This may include the source
file itself or any file it includes.
^ Flags that affect the processing of this
translation unit.
^ A callback.
^ The return value of the callback, or 'Nothing'
if the file couldn't be parsed.
liftIO $ FFI.setClangResourcesPath idx =<< clangResourcesPath
Such an AST file can be created using 'save'.
^ The index into which the translation unit should be loaded.
^ The file to load.
^ A callback.
liftIO $ FFI.setClangResourcesPath idx =<< clangResourcesPath
| Creates a translation unit by parsing source code.
This works like 'withParsed', except that the translation unit can be reparsed over and over
again by returning a 'Reparse' value from the callback. This is useful for interactive
analyses like code completion. Processing can be stopped by returning a 'ParseComplete' value.
^ The index into which the translation unit should be loaded.
^ The file to load, or 'Nothing' if the file is specified in
the command line arguments.
^ The command line arguments libclang should use when compiling
this file. Most arguments which you'd use with the \'clang\'
frontend are accepted.
^ Unsaved files which may be needed to parse this
translation unit. This may include the source
file itself or any file it includes.
^ Flags that affect the processing of this
translation unit.
called many times depending on the return value.
See 'ParseContinuation' for more information.
^ The return value of the callback, as passed to
'ParseComplete', or 'Nothing' if the file could
not be parsed.
liftIO $ FFI.setClangResourcesPath idx =<< clangResourcesPath
| A callback for use with 'withReparsing'.
| A continuation returned by a 'ReparsingCallback'.
| 'Reparse' signals that the translation unit should be reparsed. It contains a callback
which will be called with the updated translation unit after reparsing, a 'DV.Vector' of
unsaved files which may be needed to reparse, and a set of flags affecting reparsing. The
default reparsing flags can be requested by specifying 'Nothing'.
| 'ParseComplete' signals that processing is finished. It contains a final result which
will be returned by 'withReparsing'.
clangResourcesPath :: IO FilePath
clangResourcesPath =
^ The translation unit to save.
^ The filename to save to.
^ Flags that affect saving, or 'Nothing' for
the default set of flags.
| Reparses the provided translation unit using the same command line arguments
that were originally used to parse it . If the file has changed on disk , or if
the unsaved files have changed , those changes will become visible .
Note that ' reparse ' invalidates all cursors and source locations that refer into
the reparsed translation unit . This makes it unsafe . However , ' reparse ' can be
more efficient than calling ' withParsed ' a second time .
^ The translation unit to reparse .
^ Unsaved files which may be needed to reparse
this translation unit .
^ Flags that affect reparsing , or ' Nothing ' for the
default set of flags .
| Reparses the provided translation unit using the same command line arguments
that were originally used to parse it. If the file has changed on disk, or if
the unsaved files have changed, those changes will become visible.
Note that 'reparse' invalidates all cursors and source locations that refer into
the reparsed translation unit. This makes it unsafe. However, 'reparse' can be
more efficient than calling 'withParsed' a second time.
^ The translation unit to reparse.
^ Unsaved files which may be needed to reparse
this translation unit.
^ Flags that affect reparsing, or 'Nothing' for the
default set of flags.
| Retrieve the cursor associated with this translation unit. This cursor is the root of
in "Clang.Cursor".
| Retrieve the complete set of diagnostics associated with the given translation unit.
| Retrieve a textual representation of this translation unit. | # LANGUAGE FlexibleContexts #
access to a ' FFI.TranslationUnit ' value ; you can call ' getCursor ' to turn that into an
module Clang.TranslationUnit
(
withParsed
, withLoaded
, withReparsing
, FFI.TranslationUnitFlags
, FFI.ReparseFlags
, ReparsingCallback
, ParseContinuation(..)
, save
, FFI.SaveTranslationUnitFlags
AST traversal and metadata
, getCursor
, getDiagnosticSet
, getSpelling
) where
import Data.Traversable
import Control.Monad.IO.Class
import Data.Maybe (fromMaybe)
import qualified Data.Vector as DV
import System . FilePath ( ( < / > ) )
import Clang.Internal.BitFlags
import Clang.Internal.Monad
import qualified Clang.Internal.FFI as FFI
import ( getDataFileName )
be used here . You should avoid passing \'-c\ ' , \'-o\ ' , \'-fpch - deps\ ' , and the various
withParsed :: ClangBase m
withParsed idx mayPath args ufs flags f = do
mayTU <- FFI.parseTranslationUnit idx mayPath args ufs (orFlags flags)
traverse go mayTU
where
go tu = clangScope $ f =<< fromOuterScope tu
| Creates a translation unit by loading a saved AST file .
withLoaded :: ClangBase m
-> ClangT s' m a
withLoaded idx path f = do
f =<< FFI.createTranslationUnit idx path
withReparsing :: ClangBase m
^ A callback which uses the translation unit . May be
withReparsing idx mayPath args ufs flags f = do
mayTU <- FFI.parseTranslationUnit idx mayPath args ufs (orFlags flags)
case mayTU of
Nothing -> return Nothing
Just tu -> iterReparse f tu
iterReparse :: ClangBase m
=> ReparsingCallback m r
-> FFI.TranslationUnit s'
-> ClangT s' m (Maybe r)
iterReparse f tu = do
cont <- clangScope $ f =<< fromOuterScope tu
case cont of
Reparse nextF ufs mayFlags ->
do res <- FFI.reparseTranslationUnit tu ufs (makeFlags mayFlags)
if res then iterReparse nextF tu
else return Nothing
ParseComplete finalVal -> return $ Just finalVal
where
makeFlags = orFlags . (fromMaybe [FFI.DefaultReparseFlags])
type ReparsingCallback m r = forall s. FFI.TranslationUnit s
-> ClangT s m (ParseContinuation m r)
data ParseContinuation m r
= Reparse (ReparsingCallback m r) (DV.Vector FFI.UnsavedFile) (Maybe [FFI.ReparseFlags])
| ParseComplete r
getDataFileName $ " build " < / > " out " < / > " lib " < / > " clang " < / > " 3.4 "
| Saves a translation unit as an AST file with can be loaded later using ' withLoaded ' .
save :: ClangBase m
-> ClangT s m Bool
save t fname mayFlags = liftIO $ FFI.saveTranslationUnit t fname (orFlags flags)
where flags = fromMaybe [FFI.DefaultSaveTranslationUnitFlags] mayFlags
reparse : : m
- > ClangT s m Bool
reparse t ufs = FFI.reparseTranslationUnit t ufs ( orFlags flags )
where flags = fromMaybe [ FFI.DefaultReparseFlags ]
reparse :: ClangBase m
-> ClangT s m Bool
reparse t ufs mayFlags = FFI.reparseTranslationUnit t ufs (orFlags flags)
where flags = fromMaybe [FFI.DefaultReparseFlags] mayFlags
-}
this translation unit 's AST ; you can begin exploring the AST further using the functions
getCursor :: ClangBase m => FFI.TranslationUnit s' -> ClangT s m (FFI.Cursor s)
getCursor tu = liftIO $ FFI.getTranslationUnitCursor mkProxy tu
getDiagnosticSet :: ClangBase m => FFI.TranslationUnit s'-> ClangT s m (FFI.DiagnosticSet s)
getDiagnosticSet = FFI.getDiagnosticSetFromTU
getSpelling :: ClangBase m => FFI.TranslationUnit s' -> ClangT s m (FFI.ClangString s)
getSpelling = FFI.getTranslationUnitSpelling
|
2c4b53eefc4e7e4b0b999a19deca4396134a52ffe36cacb42764f46aca90251a | wavejumper/boonmee | cli.clj | (ns boonmee.cli
(:require [boonmee.client.stdio :as client.stdio]
[clojure.java.io :as io]
[clojure.tools.cli :as tools.cli]
[integrant.core :as ig])
(:import (java.util.concurrent CountDownLatch))
(:gen-class))
(def cli-options
[["-c" "--client client" "Specify client"
:default "stdio"
:validate [#{"stdio"} "Must be either #{stdio"]]
["-e" "--env env" "JS Environment"
:default "browser"
:validate [#{"browser" "node"} "Must be either #{browser node}"]]
["-T" "--tsserver tsserver" "tsserver"
:default "tsserver"]
["-L" "--log-file log-file" "Log file location"
:default "boonmee.log"]
["-v" "--version"]
["-h" "--help"]])
(defn -main
[& args]
(let [opts (tools.cli/parse-opts args cli-options)
latch (CountDownLatch. 1)]
(when-let [errors (seq (:errors opts))]
(doseq [error errors]
(println error))
(System/exit 1))
(when (-> opts :options :help)
(println (:summary opts))
(System/exit 0))
(when (-> opts :options :version)
(println (slurp (io/resource "version")))
(System/exit 0))
(let [config (client.stdio/config (:options opts))]
(try
(let [system (ig/init config)]
(.addShutdownHook
(Runtime/getRuntime)
(Thread. ^Runnable (fn []
(try
(ig/halt! system)
(catch Throwable e
(.printStackTrace e)))
(.countDown latch)))))
(.await latch)
(System/exit 0)
(catch Throwable e
(.printStackTrace e)
(System/exit 1))))))
| null | https://raw.githubusercontent.com/wavejumper/boonmee/fc0946568bfc53830717d2b68982872972bd532d/src/boonmee/cli.clj | clojure | (ns boonmee.cli
(:require [boonmee.client.stdio :as client.stdio]
[clojure.java.io :as io]
[clojure.tools.cli :as tools.cli]
[integrant.core :as ig])
(:import (java.util.concurrent CountDownLatch))
(:gen-class))
(def cli-options
[["-c" "--client client" "Specify client"
:default "stdio"
:validate [#{"stdio"} "Must be either #{stdio"]]
["-e" "--env env" "JS Environment"
:default "browser"
:validate [#{"browser" "node"} "Must be either #{browser node}"]]
["-T" "--tsserver tsserver" "tsserver"
:default "tsserver"]
["-L" "--log-file log-file" "Log file location"
:default "boonmee.log"]
["-v" "--version"]
["-h" "--help"]])
(defn -main
[& args]
(let [opts (tools.cli/parse-opts args cli-options)
latch (CountDownLatch. 1)]
(when-let [errors (seq (:errors opts))]
(doseq [error errors]
(println error))
(System/exit 1))
(when (-> opts :options :help)
(println (:summary opts))
(System/exit 0))
(when (-> opts :options :version)
(println (slurp (io/resource "version")))
(System/exit 0))
(let [config (client.stdio/config (:options opts))]
(try
(let [system (ig/init config)]
(.addShutdownHook
(Runtime/getRuntime)
(Thread. ^Runnable (fn []
(try
(ig/halt! system)
(catch Throwable e
(.printStackTrace e)))
(.countDown latch)))))
(.await latch)
(System/exit 0)
(catch Throwable e
(.printStackTrace e)
(System/exit 1))))))
| |
aba2af7152ae626eb5253dc6f40b46856201ca374f74996bad6738e5b0fae70b | Gabriella439/nix-diff | Main.hs | module Main where
import Test.Tasty
import Golden.Utils (initSimpleDerivations, initComplexDerivations)
import Golden.Tests (goldenTests)
import Properties (properties)
main :: IO ()
main = do
simpleTd <- initSimpleDerivations
complexTd <- initComplexDerivations
defaultMain $
testGroup "Tests" [goldenTests simpleTd complexTd, properties]
| null | https://raw.githubusercontent.com/Gabriella439/nix-diff/8d365e5e8fe9d86add59aab07f79d05ca51d6ade/test/Main.hs | haskell | module Main where
import Test.Tasty
import Golden.Utils (initSimpleDerivations, initComplexDerivations)
import Golden.Tests (goldenTests)
import Properties (properties)
main :: IO ()
main = do
simpleTd <- initSimpleDerivations
complexTd <- initComplexDerivations
defaultMain $
testGroup "Tests" [goldenTests simpleTd complexTd, properties]
| |
45aad42e70de2c2a129395e61b5936ece5defc3820852726273a71902339df82 | ChillkroeteTTS/fischer | utils_test.clj | (ns fischer.utils-test
(:require [clojure.test :refer :all]
[fischer.utils :as utils]
[clojure.java.io :as io]))
(deftest sorted-keyset-test
(is (= [[{:key1 :key1} :val1]
[{:key2 :key2} :val2]]
(utils/sorted-kv-list {{:key1 :key1} 0 {:key2 :key2} 1}
{{:key1 :key1} :val1 {:key2 :key2} :val2}))))
(def X-trans-map {:key1 [1 2 3]
:key2 [4 5 6]
:key3 [7 8 9]})
(def key->props {:key1 {:idx 0 :train-sample-complete? true}
:key2 {:idx 1 :train-sample-complete? true}
:key3 {:idx 3 :train-sample-complete? false}})
(deftest extract-bare-features-test
(testing "if it extracts simple features"
(is (= [[1 4]
[2 5]
[3 6]]
(utils/extract-bare-features X-trans-map key->props))))
(testing "if it extracts features in the correct order"
(is (= [[1 4]
[2 5]
[3 6]]
(utils/extract-bare-features (into (sorted-map) (reverse X-trans-map)) key->props))))
(testing "if it removes features with incomplete training sample"
(is (= [[1 4]
[2 5]
[3 6]]
(utils/extract-bare-features (into (sorted-map) (reverse X-trans-map)) key->props))))) | null | https://raw.githubusercontent.com/ChillkroeteTTS/fischer/2f479ac0d410f536f5bd89659d7f69113422784a/test/clj/fischer/utils_test.clj | clojure | (ns fischer.utils-test
(:require [clojure.test :refer :all]
[fischer.utils :as utils]
[clojure.java.io :as io]))
(deftest sorted-keyset-test
(is (= [[{:key1 :key1} :val1]
[{:key2 :key2} :val2]]
(utils/sorted-kv-list {{:key1 :key1} 0 {:key2 :key2} 1}
{{:key1 :key1} :val1 {:key2 :key2} :val2}))))
(def X-trans-map {:key1 [1 2 3]
:key2 [4 5 6]
:key3 [7 8 9]})
(def key->props {:key1 {:idx 0 :train-sample-complete? true}
:key2 {:idx 1 :train-sample-complete? true}
:key3 {:idx 3 :train-sample-complete? false}})
(deftest extract-bare-features-test
(testing "if it extracts simple features"
(is (= [[1 4]
[2 5]
[3 6]]
(utils/extract-bare-features X-trans-map key->props))))
(testing "if it extracts features in the correct order"
(is (= [[1 4]
[2 5]
[3 6]]
(utils/extract-bare-features (into (sorted-map) (reverse X-trans-map)) key->props))))
(testing "if it removes features with incomplete training sample"
(is (= [[1 4]
[2 5]
[3 6]]
(utils/extract-bare-features (into (sorted-map) (reverse X-trans-map)) key->props))))) | |
1e7a3dc3494ca57145760dabbc955e69e821b436a27d35688c58e799bdebd423 | Bodigrim/arithmoi | Cbrt.hs | -- |
Module : Math . . Moduli . Cbrt
Copyright : ( c ) 2020
Licence : MIT
Maintainer : < >
--
-- <#Cubic_residue_character Cubic symbol>
of two Eisenstein Integers .
# LANGUAGE LambdaCase #
module Math.NumberTheory.Moduli.Cbrt
( CubicSymbol(..)
, cubicSymbol
, symbolToNum
) where
import Math.NumberTheory.Quadratic.EisensteinIntegers
import Math.NumberTheory.Utils.FromIntegral
import qualified Data.Euclidean as A
import Math.NumberTheory.Utils
import Data.Semigroup
-- | Represents the
-- <#Cubic_residue_character cubic residue character>
It is either @0@ , @ω@ , @ω²@ or @1@.
data CubicSymbol = Zero | Omega | OmegaSquare | One deriving (Eq)
-- | The set of cubic symbols form a semigroup. Note `stimes`
-- is allowed to take non-positive values. In other words, the set
of non - zero cubic symbols is regarded as a group .
--
> > > import Data . Semigroup
> > > stimes ( -1 ) Omega
-- ω²
> > > stimes 0 Zero
1
instance Semigroup CubicSymbol where
Zero <> _ = Zero
_ <> Zero = Zero
One <> y = y
x <> One = x
Omega <> Omega = OmegaSquare
Omega <> OmegaSquare = One
OmegaSquare <> Omega = One
OmegaSquare <> OmegaSquare = Omega
stimes k n = case (k `mod` 3, n) of
(0, _) -> One
(1, symbol) -> symbol
(2, Omega) -> OmegaSquare
(2, OmegaSquare) -> Omega
(2, symbol) -> symbol
_ -> error "Math.NumberTheory.Moduli.Cbrt: exponentiation undefined."
instance Show CubicSymbol where
show = \case
Zero -> "0"
Omega -> "ω"
OmegaSquare -> "ω²"
One -> "1"
-- | Converts a
-- <#Cubic_residue_character cubic symbol>
to .
symbolToNum :: CubicSymbol -> EisensteinInteger
symbolToNum = \case
Zero -> 0
Omega -> ω
OmegaSquare -> -1 - ω
One -> 1
-- The algorithm `cubicSymbol` is adapted from
-- </~gudmund/Documents/cubicres.pdf here>.
-- It is divided in the following steps.
--
( 1 ) Check whether @beta@ is coprime to 3 .
( 2 ) Replace @alpha@ by the remainder of @alpha@ mod @beta@
-- This does not affect the cubic symbol.
( 3 ) Replace @alpha@ and @beta@ by their associated primary
-- divisors and keep track of how their cubic residue changes.
( 4 ) Check if any of the two numbers is a zero or a unit . In this
-- case, return their cubic residue.
( 5 ) Otherwise , invoke cubic reciprocity by swapping @alpha@ and
-- @beta@. Note both numbers have to be primary.
Return to Step 2 .
-- | <#Cubic_residue_character Cubic symbol>
of two Eisenstein Integers .
The first argument is the numerator and the second argument
-- is the denominator. The latter must be coprime to @3@.
-- This condition is checked.
--
-- If the arguments have a common factor, the result
is ' Zero ' , otherwise it is either ' Omega ' , ' ' or ' One ' .
--
> > > cubicSymbol ( 45 + 23*ω ) ( 11 - 30*ω )
0
> > > cubicSymbol ( 31 - ω ) ( 1 +10*ω )
-- ω
cubicSymbol :: EisensteinInteger -> EisensteinInteger -> CubicSymbol
cubicSymbol alpha beta = case beta `A.rem` (1 - ω) of
This checks whether beta is coprime to 3 , i.e. divisible by
-- In particular, it returns an error if @beta == 0@
0 -> error "Math.NumberTheory.Moduli.Cbrt: denominator is not coprime to 3."
_ -> cubicSymbolHelper alpha beta
cubicSymbolHelper :: EisensteinInteger -> EisensteinInteger -> CubicSymbol
cubicSymbolHelper alpha beta = cubicReciprocity primaryRemainder primaryBeta <> newSymbol
where
(primaryRemainder, primaryBeta, newSymbol) = extractPrimaryContributions remainder beta
remainder = A.rem alpha beta
cubicReciprocity :: EisensteinInteger -> EisensteinInteger -> CubicSymbol
Note @cubicReciprocity 0 1 = One@. It is better to adopt this convention .
cubicReciprocity _ 1 = One
Checks if first argument is zero . Note the second argument is never zero .
cubicReciprocity 0 _ = Zero
This checks if the first argument is a unit . Because it 's primary ,
it is enough to pattern match with 1 .
cubicReciprocity 1 _ = One
-- Otherwise, cubic reciprocity is called.
cubicReciprocity alpha beta = cubicSymbolHelper beta alpha
| This function takes two intgers @alpha@ and @beta@ and returns
three arguments @(gamma , delta , newSymbol)@. @gamma@ and @delta@ are the
-- associated primary numbers of alpha and beta respectively. @newSymbol@
-- is the cubic symbol measuring the discrepancy between the cubic residue
of @alpha@ and @beta@ , and the cubic residue of @gamma@ and @delta@.
extractPrimaryContributions :: EisensteinInteger -> EisensteinInteger -> (EisensteinInteger, EisensteinInteger, CubicSymbol)
extractPrimaryContributions alpha beta = (gamma, delta, newSymbol)
where
newSymbol = stimes (j * m) Omega <> stimes (- m - n) i
m :+ n = A.quot (delta - 1) 3
(i, gamma) = getPrimaryDecomposition alphaThreeFree
(_, delta) = getPrimaryDecomposition beta
j = wordToInteger jIntWord
-- This function outputs data such that
-- @(1 - ω)^jIntWord * alphaThreeFree = alpha@.
(jIntWord, alphaThreeFree) = splitOff (1 - ω) alpha
| This function takes an number and returns @(symbol , delta)@
-- where @delta@ is its associated primary integer and @symbol@ is the
cubic symbol discrepancy between @e@ and @delta@. @delta@ is defined to be
the unique associated to such that
\ ( \textrm{delta } \equiv 1 ( \textrm{mod } 3 ) \ ) .
Note that @delta@ exists if and only if is coprime to 3 . In this
-- case, an error message is displayed.
getPrimaryDecomposition :: EisensteinInteger -> (CubicSymbol, EisensteinInteger)
This is the case where a common factor between @alpha@ and @beta@ is detected .
In this instance @cubicReciprocity@ will return ` Zero ` .
-- Strictly speaking, this is not a primary decomposition.
getPrimaryDecomposition 0 = (Zero, 0)
getPrimaryDecomposition e = case e `A.rem` 3 of
1 -> (One, e)
1 :+ 1 -> (OmegaSquare, -ω * e)
0 :+ 1 -> (Omega, (-1 - ω) * e)
(-1) :+ 0 -> (One, -e)
(-1) :+ (-1) -> (OmegaSquare, ω * e)
0 :+ (-1) -> (Omega, (1 + ω) * e)
_ -> error "Math.NumberTheory.Moduli.Cbrt: primary decomposition failed."
| null | https://raw.githubusercontent.com/Bodigrim/arithmoi/6bc5e70417c87f67a0d87720902d4c89bc187007/Math/NumberTheory/Moduli/Cbrt.hs | haskell | |
<#Cubic_residue_character Cubic symbol>
| Represents the
<#Cubic_residue_character cubic residue character>
| The set of cubic symbols form a semigroup. Note `stimes`
is allowed to take non-positive values. In other words, the set
ω²
| Converts a
<#Cubic_residue_character cubic symbol>
The algorithm `cubicSymbol` is adapted from
</~gudmund/Documents/cubicres.pdf here>.
It is divided in the following steps.
This does not affect the cubic symbol.
divisors and keep track of how their cubic residue changes.
case, return their cubic residue.
@beta@. Note both numbers have to be primary.
| <#Cubic_residue_character Cubic symbol>
is the denominator. The latter must be coprime to @3@.
This condition is checked.
If the arguments have a common factor, the result
ω
In particular, it returns an error if @beta == 0@
Otherwise, cubic reciprocity is called.
associated primary numbers of alpha and beta respectively. @newSymbol@
is the cubic symbol measuring the discrepancy between the cubic residue
This function outputs data such that
@(1 - ω)^jIntWord * alphaThreeFree = alpha@.
where @delta@ is its associated primary integer and @symbol@ is the
case, an error message is displayed.
Strictly speaking, this is not a primary decomposition. | Module : Math . . Moduli . Cbrt
Copyright : ( c ) 2020
Licence : MIT
Maintainer : < >
of two Eisenstein Integers .
# LANGUAGE LambdaCase #
module Math.NumberTheory.Moduli.Cbrt
( CubicSymbol(..)
, cubicSymbol
, symbolToNum
) where
import Math.NumberTheory.Quadratic.EisensteinIntegers
import Math.NumberTheory.Utils.FromIntegral
import qualified Data.Euclidean as A
import Math.NumberTheory.Utils
import Data.Semigroup
It is either @0@ , @ω@ , @ω²@ or @1@.
data CubicSymbol = Zero | Omega | OmegaSquare | One deriving (Eq)
of non - zero cubic symbols is regarded as a group .
> > > import Data . Semigroup
> > > stimes ( -1 ) Omega
> > > stimes 0 Zero
1
instance Semigroup CubicSymbol where
Zero <> _ = Zero
_ <> Zero = Zero
One <> y = y
x <> One = x
Omega <> Omega = OmegaSquare
Omega <> OmegaSquare = One
OmegaSquare <> Omega = One
OmegaSquare <> OmegaSquare = Omega
stimes k n = case (k `mod` 3, n) of
(0, _) -> One
(1, symbol) -> symbol
(2, Omega) -> OmegaSquare
(2, OmegaSquare) -> Omega
(2, symbol) -> symbol
_ -> error "Math.NumberTheory.Moduli.Cbrt: exponentiation undefined."
instance Show CubicSymbol where
show = \case
Zero -> "0"
Omega -> "ω"
OmegaSquare -> "ω²"
One -> "1"
to .
symbolToNum :: CubicSymbol -> EisensteinInteger
symbolToNum = \case
Zero -> 0
Omega -> ω
OmegaSquare -> -1 - ω
One -> 1
( 1 ) Check whether @beta@ is coprime to 3 .
( 2 ) Replace @alpha@ by the remainder of @alpha@ mod @beta@
( 3 ) Replace @alpha@ and @beta@ by their associated primary
( 4 ) Check if any of the two numbers is a zero or a unit . In this
( 5 ) Otherwise , invoke cubic reciprocity by swapping @alpha@ and
Return to Step 2 .
of two Eisenstein Integers .
The first argument is the numerator and the second argument
is ' Zero ' , otherwise it is either ' Omega ' , ' ' or ' One ' .
> > > cubicSymbol ( 45 + 23*ω ) ( 11 - 30*ω )
0
> > > cubicSymbol ( 31 - ω ) ( 1 +10*ω )
cubicSymbol :: EisensteinInteger -> EisensteinInteger -> CubicSymbol
cubicSymbol alpha beta = case beta `A.rem` (1 - ω) of
This checks whether beta is coprime to 3 , i.e. divisible by
0 -> error "Math.NumberTheory.Moduli.Cbrt: denominator is not coprime to 3."
_ -> cubicSymbolHelper alpha beta
cubicSymbolHelper :: EisensteinInteger -> EisensteinInteger -> CubicSymbol
cubicSymbolHelper alpha beta = cubicReciprocity primaryRemainder primaryBeta <> newSymbol
where
(primaryRemainder, primaryBeta, newSymbol) = extractPrimaryContributions remainder beta
remainder = A.rem alpha beta
cubicReciprocity :: EisensteinInteger -> EisensteinInteger -> CubicSymbol
Note @cubicReciprocity 0 1 = One@. It is better to adopt this convention .
cubicReciprocity _ 1 = One
Checks if first argument is zero . Note the second argument is never zero .
cubicReciprocity 0 _ = Zero
This checks if the first argument is a unit . Because it 's primary ,
it is enough to pattern match with 1 .
cubicReciprocity 1 _ = One
cubicReciprocity alpha beta = cubicSymbolHelper beta alpha
| This function takes two intgers @alpha@ and @beta@ and returns
three arguments @(gamma , delta , newSymbol)@. @gamma@ and @delta@ are the
of @alpha@ and @beta@ , and the cubic residue of @gamma@ and @delta@.
extractPrimaryContributions :: EisensteinInteger -> EisensteinInteger -> (EisensteinInteger, EisensteinInteger, CubicSymbol)
extractPrimaryContributions alpha beta = (gamma, delta, newSymbol)
where
newSymbol = stimes (j * m) Omega <> stimes (- m - n) i
m :+ n = A.quot (delta - 1) 3
(i, gamma) = getPrimaryDecomposition alphaThreeFree
(_, delta) = getPrimaryDecomposition beta
j = wordToInteger jIntWord
(jIntWord, alphaThreeFree) = splitOff (1 - ω) alpha
| This function takes an number and returns @(symbol , delta)@
cubic symbol discrepancy between @e@ and @delta@. @delta@ is defined to be
the unique associated to such that
\ ( \textrm{delta } \equiv 1 ( \textrm{mod } 3 ) \ ) .
Note that @delta@ exists if and only if is coprime to 3 . In this
getPrimaryDecomposition :: EisensteinInteger -> (CubicSymbol, EisensteinInteger)
This is the case where a common factor between @alpha@ and @beta@ is detected .
In this instance @cubicReciprocity@ will return ` Zero ` .
getPrimaryDecomposition 0 = (Zero, 0)
getPrimaryDecomposition e = case e `A.rem` 3 of
1 -> (One, e)
1 :+ 1 -> (OmegaSquare, -ω * e)
0 :+ 1 -> (Omega, (-1 - ω) * e)
(-1) :+ 0 -> (One, -e)
(-1) :+ (-1) -> (OmegaSquare, ω * e)
0 :+ (-1) -> (Omega, (1 + ω) * e)
_ -> error "Math.NumberTheory.Moduli.Cbrt: primary decomposition failed."
|
1c619695f8e24707c4e4c1707ebe0e26ea3b75564880bf9a9ed0bdfb61fdf0fc | gvolpe/haskell-book-exercises | foldable.hs | import Data.Monoid
data Identity a = Identity a
instance Foldable Identity where
foldr f z (Identity x) = f x z
foldl f z (Identity x) = f z x
foldMap f (Identity x) = f x
data Optional a = Yep a | Nada
instance Foldable Optional where
foldr _ z Nada = z
foldr f z (Yep x) = f x z
foldl _ z Nada = z
foldl f z (Yep x) = f z x
foldMap _ Nada = mempty
foldMap f (Yep a) = f a
| null | https://raw.githubusercontent.com/gvolpe/haskell-book-exercises/5c1b9d8dc729ee5a90c8709b9c889cbacb30a2cb/chapter20/foldable.hs | haskell | import Data.Monoid
data Identity a = Identity a
instance Foldable Identity where
foldr f z (Identity x) = f x z
foldl f z (Identity x) = f z x
foldMap f (Identity x) = f x
data Optional a = Yep a | Nada
instance Foldable Optional where
foldr _ z Nada = z
foldr f z (Yep x) = f x z
foldl _ z Nada = z
foldl f z (Yep x) = f z x
foldMap _ Nada = mempty
foldMap f (Yep a) = f a
| |
71ea36fd8ee861d5d4925d2490d67544937dfdc22bf42fb77cd9e93e3794780f | falsetru/htdp | 17.8.6.scm | (define wp? cons?)
(define (web=? a-wp another-wp)
(cond
[(empty? a-wp) (empty? another-wp)]
[(symbol? (first a-wp))
(and (and (cons? another-wp) (symbol? (first another-wp)))
(and (symbol=? (first a-wp) (first another-wp))
(web=? (rest a-wp) (rest another-wp))))]
[else
(and (and (cons? another-wp) (wp? (first another-wp)))
(and (web=? (first a-wp) (first another-wp))
(web=? (rest a-wp) (rest another-wp))))]))
; +----------------+--------+---------------+-------------------+
; | | empty? | (cons s wp) | (cons ewp wp) |
; +----------------+--------+---------------+-------------------+
; | empty? | O | X | X |
; +----------------+--------+---------------+-------------------+
; | (cons s' wp) | X | s=s' & wp=wp' | X |
; +----------------+--------+---------------+-------------------+
| ( cons ewp ' wp ) | X | X | ewp = ewp ' & wp = wp ' |
; +----------------+--------+---------------+-------------------+
(require rackunit)
(require rackunit/text-ui)
(define web-page-equals-tests
(test-suite
"Test for web-page-equals"
(test-case "web=?"
(check-equal? 1 1)
(define a '(a a b))
(define b '(a b c))
(define c '((a b) x y z))
(define d '(a b (x y)))
(define e '(a (x y) z))
(check-equal? (web=? a a) true)
(check-equal? (web=? b b) true)
(check-equal? (web=? c c) true)
(check-equal? (web=? d d) true)
(check-equal? (web=? e e) true)
(check-equal? (web=? a b) false)
(check-equal? (web=? a c) false)
(check-equal? (web=? a d) false)
(check-equal? (web=? a e) false)
(check-equal? (web=? c d) false)
(check-equal? (web=? c e) false)
(check-equal? (web=? e d) false)
)
))
(run-tests web-page-equals-tests)
| null | https://raw.githubusercontent.com/falsetru/htdp/4cdad3b999f19b89ff4fa7561839cbcbaad274df/17/17.8.6.scm | scheme | +----------------+--------+---------------+-------------------+
| | empty? | (cons s wp) | (cons ewp wp) |
+----------------+--------+---------------+-------------------+
| empty? | O | X | X |
+----------------+--------+---------------+-------------------+
| (cons s' wp) | X | s=s' & wp=wp' | X |
+----------------+--------+---------------+-------------------+
+----------------+--------+---------------+-------------------+ | (define wp? cons?)
(define (web=? a-wp another-wp)
(cond
[(empty? a-wp) (empty? another-wp)]
[(symbol? (first a-wp))
(and (and (cons? another-wp) (symbol? (first another-wp)))
(and (symbol=? (first a-wp) (first another-wp))
(web=? (rest a-wp) (rest another-wp))))]
[else
(and (and (cons? another-wp) (wp? (first another-wp)))
(and (web=? (first a-wp) (first another-wp))
(web=? (rest a-wp) (rest another-wp))))]))
| ( cons ewp ' wp ) | X | X | ewp = ewp ' & wp = wp ' |
(require rackunit)
(require rackunit/text-ui)
(define web-page-equals-tests
(test-suite
"Test for web-page-equals"
(test-case "web=?"
(check-equal? 1 1)
(define a '(a a b))
(define b '(a b c))
(define c '((a b) x y z))
(define d '(a b (x y)))
(define e '(a (x y) z))
(check-equal? (web=? a a) true)
(check-equal? (web=? b b) true)
(check-equal? (web=? c c) true)
(check-equal? (web=? d d) true)
(check-equal? (web=? e e) true)
(check-equal? (web=? a b) false)
(check-equal? (web=? a c) false)
(check-equal? (web=? a d) false)
(check-equal? (web=? a e) false)
(check-equal? (web=? c d) false)
(check-equal? (web=? c e) false)
(check-equal? (web=? e d) false)
)
))
(run-tests web-page-equals-tests)
|
63ef1922c6ab7597074c9e22bbc3e1c9aebcf57dab0f33d4f0fd7979e919ea17 | SecPriv/webspec | cspcheck.mli | (********************************************************************************)
Copyright ( c ) 2022
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"), *)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included in *)
(* all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(********************************************************************************)
module type S =
sig
val generate_assertions : States.VerifierState.t -> States.VerifierState.t
val generate_checks : States.VerifierState.t -> States.VerifierState.t
end
module CSPChecker : S | null | https://raw.githubusercontent.com/SecPriv/webspec/b7ff6b714b3a4b572c108cc3136506da3d263eff/verifier/src/cspcheck.mli | ocaml | ******************************************************************************
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
****************************************************************************** | Copyright ( c ) 2022
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
module type S =
sig
val generate_assertions : States.VerifierState.t -> States.VerifierState.t
val generate_checks : States.VerifierState.t -> States.VerifierState.t
end
module CSPChecker : S |
9f9679ff927292d5240c2a27a5c3a10a54a8240f77829a3ee09c7ba2ba058fb5 | Incubaid/baardskeerder | crc32c.mli |
* This file is part of Baardskeerder .
*
* Copyright ( C ) 2011 Incubaid BVBA
*
* Baardskeerder is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* Baardskeerder is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with Baardskeerder . If not , see < / > .
* This file is part of Baardskeerder.
*
* Copyright (C) 2011 Incubaid BVBA
*
* Baardskeerder is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Baardskeerder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Baardskeerder. If not, see </>.
*)
(* calculate crc32_c buffer offset length -> crc32 *)
external calculate_crc32c : string -> int -> int -> int32 = "calculate_crc32c"
external update_crc32c : int32 -> string -> int -> int -> int32 = "update_crc32c"
| null | https://raw.githubusercontent.com/Incubaid/baardskeerder/3975cb7f0e92e1f35eeab17beeb906344e43dae0/src/crc32c.mli | ocaml | calculate crc32_c buffer offset length -> crc32 |
* This file is part of Baardskeerder .
*
* Copyright ( C ) 2011 Incubaid BVBA
*
* Baardskeerder is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* Baardskeerder is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with Baardskeerder . If not , see < / > .
* This file is part of Baardskeerder.
*
* Copyright (C) 2011 Incubaid BVBA
*
* Baardskeerder is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Baardskeerder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Baardskeerder. If not, see </>.
*)
external calculate_crc32c : string -> int -> int -> int32 = "calculate_crc32c"
external update_crc32c : int32 -> string -> int -> int -> int32 = "update_crc32c"
|
aa43c63682d365ce24cb0485689aeb3dd8d9d54677d33a0bc03f593f3b40d19a | jepst/CloudHaskell | KMeans.hs | # LANGUAGE TemplateHaskell , DeriveDataTypeable , BangPatterns #
module Main where
import Remote
import Remote.Process (roundtripResponse,setRemoteNodeLogConfig,getConfig,PayloadDisposition(..),roundtripQuery,roundtripQueryMulti)
import KMeansCommon
import Control.Exception (try,SomeException,evaluate)
import Control.Monad (liftM)
import Control.Monad.Trans (liftIO)
import System.Random (randomR,getStdRandom)
import Data.Typeable (Typeable)
import Data.Data (Data)
import Control.Exception (IOException)
import Data.Binary (Binary,get,put,encode,decode)
import Data.Maybe (fromJust)
import Data.List (minimumBy,sortBy)
import Data.Time
import Data.Either (rights)
import qualified Data.ByteString.Lazy as B
import qualified Data.Map as Map
import System.IO
import Debug.Trace
split :: Int -> [a] -> [[a]]
split numChunks l = splitSize (ceiling $ fromIntegral (length l) / fromIntegral numChunks) l
where
splitSize i v = let (first,second) = splitAt i v
in first : splitSize i second
broadcast :: (Serializable a) => [ProcessId] -> a -> ProcessM ()
broadcast pids dat = mapM_ (\pid -> send pid dat) pids
multiSpawn :: [NodeId] -> Closure (ProcessM ()) -> ProcessM [ProcessId]
multiSpawn nodes f = mapM (\node -> spawnLink node f) nodes
where s n = do mypid <- getSelfNode
setRemoteNodeLogConfig n (LogConfig LoTrivial (LtForward mypid) LfAll)
spawnLink n f
mapperProcess :: ProcessM ()
mapperProcess =
let mapProcess :: (Maybe [Vector],Maybe [ProcessId],Map.Map Int (Int,Vector)) -> ProcessM ()
mapProcess (mvecs,mreducers,mresult) =
receiveWait
[
match (\vec -> do vecs<-liftIO $ readf vec
say $ "Mapper read data file"
return (Just vecs,mreducers,mresult)),
match (\reducers -> return (mvecs,Just reducers,mresult)),
roundtripResponse (\() -> return (mresult,(mvecs,mreducers,mresult))),
roundtripResponse
(\clusters -> let tbl = analyze (fromJust mvecs) clustersandcenters Map.empty
clustersandcenters = map (\x -> (x,clusterCenter x)) clusters
reducers = fromJust mreducers
target clust = reducers !! (clust `mod` length reducers)
sendout (clustid,(count,sum)) = send (target clustid) Cluster {clId = clustid,clCount=count, clSum=sum}
in do say $ "calculating: "++show (length reducers)++" reducers"
mapM_ sendout (Map.toList tbl)
return ((),(mvecs,mreducers,tbl))),
matchUnknownThrow
] >>= mapProcess
getit :: Handle -> IO [Vector]
getit h = do l <- liftM lines $ hGetContents h
return (map read l) -- evaluate or return?
readf fn = do h <- openFile fn ReadMode
getit h
condtrace cond s val = if cond
then trace s val
else val
analyze :: [Vector] -> [(Cluster,Vector)] -> Map.Map Int (Int,Vector) -> Map.Map Int (Int,Vector)
analyze [] _ ht = ht
analyze (v:vectors) clusters ht =
let theclust = assignToCluster clusters v
newh = ht `seq` theclust `seq` Map.insertWith' (\(a,v1) (b,v2) -> let av = addVector v1 v2 in av `seq` (a+b,av) ) theclust (1,v) ht
condtrace ( blarg ` mod ` 1000 = = 0 ) ( show blarg ) $
in newh `seq` analyze vectors clusters newh
assignToCluster :: [(Cluster,Vector)] -> Vector -> Int
assignToCluster clusters vector =
let distances = map (\(x,center) -> (clId x,sqDistance center vector)) clusters
in fst $ minimumBy (\(_,a) (_,b) -> compare a b) distances
doit = mapProcess (Nothing,Nothing,Map.empty)
in doit >> return ()
reducerProcess :: ProcessM ()
reducerProcess = let reduceProcess :: ([Cluster],[Cluster]) -> ProcessM ()
reduceProcess (oldclusters,clusters) =
receiveWait [
roundtripResponse (\() -> return (clusters,(clusters,[]))),
match (\x -> return (oldclusters,combineClusters clusters x)),
matchUnknownThrow] >>= reduceProcess
combineClusters :: [Cluster] -> Cluster -> [Cluster]
combineClusters [] a = [a]
combineClusters (fstclst:rest) clust | clId fstclst == clId clust = (Cluster {clId = clId fstclst,
clCount = clCount fstclst + clCount clust,
clSum = addVector (clSum fstclst) (clSum clust)}):rest
combineClusters (fstclst:res) clust = fstclst:(combineClusters res clust)
in reduceProcess ([],[]) >> return ()
$( remotable ['mapperProcess, 'reducerProcess] )
initialProcess "MASTER" =
do peers <- getPeers
-- say $ "Got peers: " ++ show peers
cfg <- getConfig
let mappers = findPeerByRole peers "MAPPER"
let reducers = findPeerByRole peers "REDUCER"
let numreducers = length reducers
let nummappers = length mappers
say $ "Got " ++ show nummappers ++ " mappers and " ++ show numreducers ++ " reducers"
clusters <- liftIO $ getClusters "kmeans-clusters"
say $ "Got "++show (length clusters)++" clusters"
mypid <- getSelfPid
mapperPids <- multiSpawn mappers mapperProcess__closure
reducerPids <- multiSpawn reducers reducerProcess__closure
broadcast mapperPids reducerPids
mapM_ (\(pid,chunk) -> send pid chunk) (zip (mapperPids) (repeat "kmeans-points"))
say "Starting iteration"
starttime <- liftIO $ getCurrentTime
let loop howmany clusters = do
liftIO $ putStrLn $ show howmany
roundtripQueryMulti PldUser mapperPids clusters :: ProcessM [Either TransmitStatus ()]
res <- roundtripQueryMulti PldUser reducerPids () :: ProcessM [Either TransmitStatus [Cluster]]
let newclusters = rights res
let newclusters2 = (sortBy (\a b -> compare (clId a) (clId b)) (concat newclusters))
if newclusters2 == clusters || howmany >= 4
then do
donetime <- liftIO $ getCurrentTime
say $ "Converged in " ++ show howmany ++ " iterations and " ++ (show $ diffUTCTime donetime starttime)
pointmaps <- mapM (\pid -> do (Right m) <- roundtripQuery PldUser pid ()
return (m::Map.Map Int (Int,Vector))) mapperPids
let pointmap = map (\x -> sum $ map fst (Map.elems x)) pointmaps
say $ "Total points: " ++ (show $ sum pointmap)
liftIO $ writeFile " kmeans - converged " $ readableShow ( Map.toList pointmap )
respoints < - roundtripQueryAsync mapperPids ( ) : : ProcessM [ Either TransmitStatus ( Map . Map Int [ Vector ] ) ]
--liftIO $ B.writeFile "kmeans-converged" $ encode $ Map.toList $ Map.unionsWith (++) (rights respoints)
else
loop (howmany+1) newclusters2
loop 0 clusters
initialProcess "MAPPER" = receiveWait []
initialProcess "REDUCER" = receiveWait []
initialProcess _ = error "Role must be MAPPER or REDUCER or MASTER"
main = remoteInit (Just "config") [Main.__remoteCallMetaData] initialProcess
| null | https://raw.githubusercontent.com/jepst/CloudHaskell/6acbcb6b2fc6bb6789cce2083cdd4747ef427627/examples/kmeans/KMeans.hs | haskell | evaluate or return?
say $ "Got peers: " ++ show peers
liftIO $ B.writeFile "kmeans-converged" $ encode $ Map.toList $ Map.unionsWith (++) (rights respoints) | # LANGUAGE TemplateHaskell , DeriveDataTypeable , BangPatterns #
module Main where
import Remote
import Remote.Process (roundtripResponse,setRemoteNodeLogConfig,getConfig,PayloadDisposition(..),roundtripQuery,roundtripQueryMulti)
import KMeansCommon
import Control.Exception (try,SomeException,evaluate)
import Control.Monad (liftM)
import Control.Monad.Trans (liftIO)
import System.Random (randomR,getStdRandom)
import Data.Typeable (Typeable)
import Data.Data (Data)
import Control.Exception (IOException)
import Data.Binary (Binary,get,put,encode,decode)
import Data.Maybe (fromJust)
import Data.List (minimumBy,sortBy)
import Data.Time
import Data.Either (rights)
import qualified Data.ByteString.Lazy as B
import qualified Data.Map as Map
import System.IO
import Debug.Trace
split :: Int -> [a] -> [[a]]
split numChunks l = splitSize (ceiling $ fromIntegral (length l) / fromIntegral numChunks) l
where
splitSize i v = let (first,second) = splitAt i v
in first : splitSize i second
broadcast :: (Serializable a) => [ProcessId] -> a -> ProcessM ()
broadcast pids dat = mapM_ (\pid -> send pid dat) pids
multiSpawn :: [NodeId] -> Closure (ProcessM ()) -> ProcessM [ProcessId]
multiSpawn nodes f = mapM (\node -> spawnLink node f) nodes
where s n = do mypid <- getSelfNode
setRemoteNodeLogConfig n (LogConfig LoTrivial (LtForward mypid) LfAll)
spawnLink n f
mapperProcess :: ProcessM ()
mapperProcess =
let mapProcess :: (Maybe [Vector],Maybe [ProcessId],Map.Map Int (Int,Vector)) -> ProcessM ()
mapProcess (mvecs,mreducers,mresult) =
receiveWait
[
match (\vec -> do vecs<-liftIO $ readf vec
say $ "Mapper read data file"
return (Just vecs,mreducers,mresult)),
match (\reducers -> return (mvecs,Just reducers,mresult)),
roundtripResponse (\() -> return (mresult,(mvecs,mreducers,mresult))),
roundtripResponse
(\clusters -> let tbl = analyze (fromJust mvecs) clustersandcenters Map.empty
clustersandcenters = map (\x -> (x,clusterCenter x)) clusters
reducers = fromJust mreducers
target clust = reducers !! (clust `mod` length reducers)
sendout (clustid,(count,sum)) = send (target clustid) Cluster {clId = clustid,clCount=count, clSum=sum}
in do say $ "calculating: "++show (length reducers)++" reducers"
mapM_ sendout (Map.toList tbl)
return ((),(mvecs,mreducers,tbl))),
matchUnknownThrow
] >>= mapProcess
getit :: Handle -> IO [Vector]
getit h = do l <- liftM lines $ hGetContents h
readf fn = do h <- openFile fn ReadMode
getit h
condtrace cond s val = if cond
then trace s val
else val
analyze :: [Vector] -> [(Cluster,Vector)] -> Map.Map Int (Int,Vector) -> Map.Map Int (Int,Vector)
analyze [] _ ht = ht
analyze (v:vectors) clusters ht =
let theclust = assignToCluster clusters v
newh = ht `seq` theclust `seq` Map.insertWith' (\(a,v1) (b,v2) -> let av = addVector v1 v2 in av `seq` (a+b,av) ) theclust (1,v) ht
condtrace ( blarg ` mod ` 1000 = = 0 ) ( show blarg ) $
in newh `seq` analyze vectors clusters newh
assignToCluster :: [(Cluster,Vector)] -> Vector -> Int
assignToCluster clusters vector =
let distances = map (\(x,center) -> (clId x,sqDistance center vector)) clusters
in fst $ minimumBy (\(_,a) (_,b) -> compare a b) distances
doit = mapProcess (Nothing,Nothing,Map.empty)
in doit >> return ()
reducerProcess :: ProcessM ()
reducerProcess = let reduceProcess :: ([Cluster],[Cluster]) -> ProcessM ()
reduceProcess (oldclusters,clusters) =
receiveWait [
roundtripResponse (\() -> return (clusters,(clusters,[]))),
match (\x -> return (oldclusters,combineClusters clusters x)),
matchUnknownThrow] >>= reduceProcess
combineClusters :: [Cluster] -> Cluster -> [Cluster]
combineClusters [] a = [a]
combineClusters (fstclst:rest) clust | clId fstclst == clId clust = (Cluster {clId = clId fstclst,
clCount = clCount fstclst + clCount clust,
clSum = addVector (clSum fstclst) (clSum clust)}):rest
combineClusters (fstclst:res) clust = fstclst:(combineClusters res clust)
in reduceProcess ([],[]) >> return ()
$( remotable ['mapperProcess, 'reducerProcess] )
initialProcess "MASTER" =
do peers <- getPeers
cfg <- getConfig
let mappers = findPeerByRole peers "MAPPER"
let reducers = findPeerByRole peers "REDUCER"
let numreducers = length reducers
let nummappers = length mappers
say $ "Got " ++ show nummappers ++ " mappers and " ++ show numreducers ++ " reducers"
clusters <- liftIO $ getClusters "kmeans-clusters"
say $ "Got "++show (length clusters)++" clusters"
mypid <- getSelfPid
mapperPids <- multiSpawn mappers mapperProcess__closure
reducerPids <- multiSpawn reducers reducerProcess__closure
broadcast mapperPids reducerPids
mapM_ (\(pid,chunk) -> send pid chunk) (zip (mapperPids) (repeat "kmeans-points"))
say "Starting iteration"
starttime <- liftIO $ getCurrentTime
let loop howmany clusters = do
liftIO $ putStrLn $ show howmany
roundtripQueryMulti PldUser mapperPids clusters :: ProcessM [Either TransmitStatus ()]
res <- roundtripQueryMulti PldUser reducerPids () :: ProcessM [Either TransmitStatus [Cluster]]
let newclusters = rights res
let newclusters2 = (sortBy (\a b -> compare (clId a) (clId b)) (concat newclusters))
if newclusters2 == clusters || howmany >= 4
then do
donetime <- liftIO $ getCurrentTime
say $ "Converged in " ++ show howmany ++ " iterations and " ++ (show $ diffUTCTime donetime starttime)
pointmaps <- mapM (\pid -> do (Right m) <- roundtripQuery PldUser pid ()
return (m::Map.Map Int (Int,Vector))) mapperPids
let pointmap = map (\x -> sum $ map fst (Map.elems x)) pointmaps
say $ "Total points: " ++ (show $ sum pointmap)
liftIO $ writeFile " kmeans - converged " $ readableShow ( Map.toList pointmap )
respoints < - roundtripQueryAsync mapperPids ( ) : : ProcessM [ Either TransmitStatus ( Map . Map Int [ Vector ] ) ]
else
loop (howmany+1) newclusters2
loop 0 clusters
initialProcess "MAPPER" = receiveWait []
initialProcess "REDUCER" = receiveWait []
initialProcess _ = error "Role must be MAPPER or REDUCER or MASTER"
main = remoteInit (Just "config") [Main.__remoteCallMetaData] initialProcess
|
ac2d3c3599ede0f0636010226c63fa2f33ef03d0b16f0de3dc9a824603ada81b | serokell/haskell-crypto | Main.hs | SPDX - FileCopyrightText : 2021 >
--
SPDX - License - Identifier : MPL-2.0
module Main (main) where
import Data.ByteArray (constEq)
import Data.SensitiveBytes (withSecureMemory)
import Data.SensitiveBytes.IO (withUserPassword)
main :: IO ()
main = withSecureMemory $ do
withUserPassword 128 (Just "Password: ") $ \pw1 ->
withUserPassword 128 (Just "Repeat password: ") $ \pw2 ->
if pw1 `constEq` pw2
then putStrLn "You are super!"
else putStrLn "Passwords do not match."
| null | https://raw.githubusercontent.com/serokell/haskell-crypto/94045832c184ff2519dccb975b982f7825ae02f4/secure-memory/app/checkpw/Main.hs | haskell | SPDX - FileCopyrightText : 2021 >
SPDX - License - Identifier : MPL-2.0
module Main (main) where
import Data.ByteArray (constEq)
import Data.SensitiveBytes (withSecureMemory)
import Data.SensitiveBytes.IO (withUserPassword)
main :: IO ()
main = withSecureMemory $ do
withUserPassword 128 (Just "Password: ") $ \pw1 ->
withUserPassword 128 (Just "Repeat password: ") $ \pw2 ->
if pw1 `constEq` pw2
then putStrLn "You are super!"
else putStrLn "Passwords do not match."
| |
8814605d527f4cabceb6aa72ce1dec12bc1dc5abcd27786411adfe4e0d6a74a2 | mzp/coq-ruby | coq_micromega.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(* *)
: A reflexive tactic using the
(* *)
( / ) 2006 - 2008
(* *)
(************************************************************************)
open Mutils
let debug = false
let time str f x =
let t0 = (Unix.times()).Unix.tms_utime in
let res = f x in
let t1 = (Unix.times()).Unix.tms_utime in
(*if debug then*) (Printf.printf "time %s %f\n" str (t1 -. t0) ;
flush stdout);
res
type ('a,'b) formula =
| TT
| FF
| X of 'b
| A of 'a * Names.name
| C of ('a,'b) formula * ('a,'b) formula * Names.name
| D of ('a,'b) formula * ('a,'b) formula * Names.name
| N of ('a,'b) formula * Names.name
| I of ('a,'b) formula * ('a,'b) formula * Names.name
let none = Names.Anonymous
let tag_formula t f =
match f with
| A(x,_) -> A(x,t)
| C(x,y,_) -> C(x,y,t)
| D(x,y,_) -> D(x,y,t)
| N(x,_) -> N(x,t)
| I(x,y,_) -> I(x,y,t)
| _ -> f
let tt = []
let ff = [ [] ]
type ('constant,'contr) sentence =
('constant Micromega.formula, 'contr) formula
let cnf negate normalise f =
let negate a =
CoqToCaml.list (fun cl -> CoqToCaml.list (fun x -> x) cl) (negate a) in
let normalise a =
CoqToCaml.list (fun cl -> CoqToCaml.list (fun x -> x) cl) (normalise a) in
let and_cnf x y = x @ y in
let or_clause_cnf t f = List.map (fun x -> t@x ) f in
let rec or_cnf f f' =
match f with
| [] -> tt
| e :: rst -> (or_cnf rst f') @ (or_clause_cnf e f') in
let rec xcnf (pol : bool) f =
match f with
| TT -> if pol then tt else ff (* ?? *)
| FF -> if pol then ff else tt (* ?? *)
| X p -> if pol then ff else ff (* ?? *)
| A(x,t) -> if pol then normalise x else negate x
| N(e,t) -> xcnf (not pol) e
| C(e1,e2,t) ->
(if pol then and_cnf else or_cnf) (xcnf pol e1) (xcnf pol e2)
| D(e1,e2,t) ->
(if pol then or_cnf else and_cnf) (xcnf pol e1) (xcnf pol e2)
| I(e1,e2,t) ->
(if pol then or_cnf else and_cnf) (xcnf (not pol) e1) (xcnf pol e2) in
xcnf true f
module M =
struct
open Coqlib
open Term
let constant = gen_constant_in_modules " Omicron " coq_modules
let logic_dir = ["Coq";"Logic";"Decidable"]
let coq_modules =
init_modules @
[logic_dir] @ arith_modules @ zarith_base_modules @
[ ["Coq";"Lists";"List"];
["ZMicromega"];
["Tauto"];
["RingMicromega"];
["EnvRing"];
["Coq"; "micromega"; "ZMicromega"];
["Coq" ; "micromega" ; "Tauto"];
["Coq" ; "micromega" ; "RingMicromega"];
["Coq" ; "micromega" ; "EnvRing"];
["Coq";"QArith"; "QArith_base"];
["Coq";"Reals" ; "Rdefinitions"];
["Coq";"Reals" ; "Rpow_def"];
["LRing_normalise"]]
let constant = gen_constant_in_modules "ZMicromega" coq_modules
let coq_and = lazy (constant "and")
let coq_or = lazy (constant "or")
let coq_not = lazy (constant "not")
let coq_iff = lazy (constant "iff")
let coq_True = lazy (constant "True")
let coq_False = lazy (constant "False")
let coq_cons = lazy (constant "cons")
let coq_nil = lazy (constant "nil")
let coq_list = lazy (constant "list")
let coq_O = lazy (constant "O")
let coq_S = lazy (constant "S")
let coq_nat = lazy (constant "nat")
let coq_NO = lazy
(gen_constant_in_modules "N" [ ["Coq";"NArith";"BinNat" ]] "N0")
let coq_Npos = lazy
(gen_constant_in_modules "N" [ ["Coq";"NArith"; "BinNat"]] "Npos")
(* let coq_n = lazy (constant "N")*)
let coq_pair = lazy (constant "pair")
let coq_None = lazy (constant "None")
let coq_option = lazy (constant "option")
let coq_positive = lazy (constant "positive")
let coq_xH = lazy (constant "xH")
let coq_xO = lazy (constant "xO")
let coq_xI = lazy (constant "xI")
let coq_N0 = lazy (constant "N0")
let coq_N0 = lazy (constant "Npos")
let coq_Z = lazy (constant "Z")
let coq_Q = lazy (constant "Q")
let coq_R = lazy (constant "R")
let coq_ZERO = lazy (constant "Z0")
let coq_POS = lazy (constant "Zpos")
let coq_NEG = lazy (constant "Zneg")
let coq_QWitness = lazy
(gen_constant_in_modules "QMicromega"
[["Coq"; "micromega"; "QMicromega"]] "QWitness")
let coq_ZWitness = lazy
(gen_constant_in_modules "QMicromega"
[["Coq"; "micromega"; "ZMicromega"]] "ZWitness")
let coq_Build_Witness = lazy (constant "Build_Witness")
let coq_Qmake = lazy (constant "Qmake")
let coq_R0 = lazy (constant "R0")
let coq_R1 = lazy (constant "R1")
let coq_proofTerm = lazy (constant "ProofTerm")
let coq_ratProof = lazy (constant "RatProof")
let coq_cutProof = lazy (constant "CutProof")
let coq_enumProof = lazy (constant "EnumProof")
let coq_Zgt = lazy (constant "Zgt")
let coq_Zge = lazy (constant "Zge")
let coq_Zle = lazy (constant "Zle")
let coq_Zlt = lazy (constant "Zlt")
let coq_Eq = lazy (constant "eq")
let coq_Zplus = lazy (constant "Zplus")
let coq_Zminus = lazy (constant "Zminus")
let coq_Zopp = lazy (constant "Zopp")
let coq_Zmult = lazy (constant "Zmult")
let coq_Zpower = lazy (constant "Zpower")
let coq_N_of_Z = lazy
(gen_constant_in_modules "ZArithRing"
[["Coq";"setoid_ring";"ZArithRing"]] "N_of_Z")
let coq_Qgt = lazy (constant "Qgt")
let coq_Qge = lazy (constant "Qge")
let coq_Qle = lazy (constant "Qle")
let coq_Qlt = lazy (constant "Qlt")
let coq_Qeq = lazy (constant "Qeq")
let coq_Qplus = lazy (constant "Qplus")
let coq_Qminus = lazy (constant "Qminus")
let coq_Qopp = lazy (constant "Qopp")
let coq_Qmult = lazy (constant "Qmult")
let coq_Qpower = lazy (constant "Qpower")
let coq_Rgt = lazy (constant "Rgt")
let coq_Rge = lazy (constant "Rge")
let coq_Rle = lazy (constant "Rle")
let coq_Rlt = lazy (constant "Rlt")
let coq_Rplus = lazy (constant "Rplus")
let coq_Rminus = lazy (constant "Rminus")
let coq_Ropp = lazy (constant "Ropp")
let coq_Rmult = lazy (constant "Rmult")
let coq_Rpower = lazy (constant "pow")
let coq_PEX = lazy (constant "PEX" )
let coq_PEc = lazy (constant"PEc")
let coq_PEadd = lazy (constant "PEadd")
let coq_PEopp = lazy (constant "PEopp")
let coq_PEmul = lazy (constant "PEmul")
let coq_PEsub = lazy (constant "PEsub")
let coq_PEpow = lazy (constant "PEpow")
let coq_OpEq = lazy (constant "OpEq")
let coq_OpNEq = lazy (constant "OpNEq")
let coq_OpLe = lazy (constant "OpLe")
let coq_OpLt = lazy (constant "OpLt")
let coq_OpGe = lazy (constant "OpGe")
let coq_OpGt = lazy (constant "OpGt")
let coq_S_In = lazy (constant "S_In")
let coq_S_Square = lazy (constant "S_Square")
let coq_S_Monoid = lazy (constant "S_Monoid")
let coq_S_Ideal = lazy (constant "S_Ideal")
let coq_S_Mult = lazy (constant "S_Mult")
let coq_S_Add = lazy (constant "S_Add")
let coq_S_Pos = lazy (constant "S_Pos")
let coq_S_Z = lazy (constant "S_Z")
let coq_coneMember = lazy (constant "coneMember")
let coq_make_impl = lazy
(gen_constant_in_modules "Zmicromega" [["Refl"]] "make_impl")
let coq_make_conj = lazy
(gen_constant_in_modules "Zmicromega" [["Refl"]] "make_conj")
let coq_Build = lazy
(gen_constant_in_modules "RingMicromega"
[["Coq" ; "micromega" ; "RingMicromega"] ; ["RingMicromega"] ]
"Build_Formula")
let coq_Cstr = lazy
(gen_constant_in_modules "RingMicromega"
[["Coq" ; "micromega" ; "RingMicromega"] ; ["RingMicromega"] ] "Formula")
type parse_error =
| Ukn
| BadStr of string
| BadNum of int
| BadTerm of Term.constr
| Msg of string
| Goal of (Term.constr list ) * Term.constr * parse_error
let string_of_error = function
| Ukn -> "ukn"
| BadStr s -> s
| BadNum i -> string_of_int i
| BadTerm _ -> "BadTerm"
| Msg s -> s
| Goal _ -> "Goal"
exception ParseError
let get_left_construct term =
match Term.kind_of_term term with
| Term.Construct(_,i) -> (i,[| |])
| Term.App(l,rst) ->
(match Term.kind_of_term l with
| Term.Construct(_,i) -> (i,rst)
| _ -> raise ParseError
)
| _ -> raise ParseError
module Mc = Micromega
let rec parse_nat term =
let (i,c) = get_left_construct term in
match i with
| 1 -> Mc.O
| 2 -> Mc.S (parse_nat (c.(0)))
| i -> raise ParseError
let pp_nat o n = Printf.fprintf o "%i" (CoqToCaml.nat n)
let rec dump_nat x =
match x with
| Mc.O -> Lazy.force coq_O
| Mc.S p -> Term.mkApp(Lazy.force coq_S,[| dump_nat p |])
let rec parse_positive term =
let (i,c) = get_left_construct term in
match i with
| 1 -> Mc.XI (parse_positive c.(0))
| 2 -> Mc.XO (parse_positive c.(0))
| 3 -> Mc.XH
| i -> raise ParseError
let rec dump_positive x =
match x with
| Mc.XH -> Lazy.force coq_xH
| Mc.XO p -> Term.mkApp(Lazy.force coq_xO,[| dump_positive p |])
| Mc.XI p -> Term.mkApp(Lazy.force coq_xI,[| dump_positive p |])
let pp_positive o x = Printf.fprintf o "%i" (CoqToCaml.positive x)
let rec dump_n x =
match x with
| Mc.N0 -> Lazy.force coq_N0
| Mc.Npos p -> Term.mkApp(Lazy.force coq_Npos,[| dump_positive p|])
let rec dump_index x =
match x with
| Mc.XH -> Lazy.force coq_xH
| Mc.XO p -> Term.mkApp(Lazy.force coq_xO,[| dump_index p |])
| Mc.XI p -> Term.mkApp(Lazy.force coq_xI,[| dump_index p |])
let pp_index o x = Printf.fprintf o "%i" (CoqToCaml.index x)
let rec dump_n x =
match x with
| Mc.N0 -> Lazy.force coq_NO
| Mc.Npos p -> Term.mkApp(Lazy.force coq_Npos,[| dump_positive p |])
let rec pp_n o x = output_string o (string_of_int (CoqToCaml.n x))
let dump_pair t1 t2 dump_t1 dump_t2 (Mc.Pair (x,y)) =
Term.mkApp(Lazy.force coq_pair,[| t1 ; t2 ; dump_t1 x ; dump_t2 y|])
let rec parse_z term =
let (i,c) = get_left_construct term in
match i with
| 1 -> Mc.Z0
| 2 -> Mc.Zpos (parse_positive c.(0))
| 3 -> Mc.Zneg (parse_positive c.(0))
| i -> raise ParseError
let dump_z x =
match x with
| Mc.Z0 ->Lazy.force coq_ZERO
| Mc.Zpos p -> Term.mkApp(Lazy.force coq_POS,[| dump_positive p|])
| Mc.Zneg p -> Term.mkApp(Lazy.force coq_NEG,[| dump_positive p|])
let pp_z o x = Printf.fprintf o "%i" (CoqToCaml.z x)
let dump_num bd1 =
Term.mkApp(Lazy.force coq_Qmake,
[|dump_z (CamlToCoq.bigint (numerator bd1)) ;
dump_positive (CamlToCoq.positive_big_int (denominator bd1)) |])
let dump_q q =
Term.mkApp(Lazy.force coq_Qmake,
[| dump_z q.Micromega.qnum ; dump_positive q.Micromega.qden|])
let parse_q term =
match Term.kind_of_term term with
| Term.App(c, args) -> if c = Lazy.force coq_Qmake then
{Mc.qnum = parse_z args.(0) ; Mc.qden = parse_positive args.(1) }
else raise ParseError
| _ -> raise ParseError
let rec parse_list parse_elt term =
let (i,c) = get_left_construct term in
match i with
| 1 -> Mc.Nil
| 2 -> Mc.Cons(parse_elt c.(1), parse_list parse_elt c.(2))
| i -> raise ParseError
let rec dump_list typ dump_elt l =
match l with
| Mc.Nil -> Term.mkApp(Lazy.force coq_nil,[| typ |])
| Mc.Cons(e,l) -> Term.mkApp(Lazy.force coq_cons,
[| typ; dump_elt e;dump_list typ dump_elt l|])
let rec dump_ml_list typ dump_elt l =
match l with
| [] -> Term.mkApp(Lazy.force coq_nil,[| typ |])
| e::l -> Term.mkApp(Lazy.force coq_cons,
[| typ; dump_elt e;dump_ml_list typ dump_elt l|])
let pp_list op cl elt o l =
let rec _pp o l =
match l with
| Mc.Nil -> ()
| Mc.Cons(e,Mc.Nil) -> Printf.fprintf o "%a" elt e
| Mc.Cons(e,l) -> Printf.fprintf o "%a ,%a" elt e _pp l in
Printf.fprintf o "%s%a%s" op _pp l cl
let pp_var = pp_positive
let dump_var = dump_positive
let rec pp_expr o e =
match e with
| Mc.PEX n -> Printf.fprintf o "V %a" pp_var n
| Mc.PEc z -> pp_z o z
| Mc.PEadd(e1,e2) -> Printf.fprintf o "(%a)+(%a)" pp_expr e1 pp_expr e2
| Mc.PEmul(e1,e2) -> Printf.fprintf o "%a*(%a)" pp_expr e1 pp_expr e2
| Mc.PEopp e -> Printf.fprintf o "-(%a)" pp_expr e
| Mc.PEsub(e1,e2) -> Printf.fprintf o "(%a)-(%a)" pp_expr e1 pp_expr e2
| Mc.PEpow(e,n) -> Printf.fprintf o "(%a)^(%a)" pp_expr e pp_n n
let dump_expr typ dump_z e =
let rec dump_expr e =
match e with
| Mc.PEX n -> mkApp(Lazy.force coq_PEX,[| typ; dump_var n |])
| Mc.PEc z -> mkApp(Lazy.force coq_PEc,[| typ ; dump_z z |])
| Mc.PEadd(e1,e2) -> mkApp(Lazy.force coq_PEadd,
[| typ; dump_expr e1;dump_expr e2|])
| Mc.PEsub(e1,e2) -> mkApp(Lazy.force coq_PEsub,
[| typ; dump_expr e1;dump_expr e2|])
| Mc.PEopp e -> mkApp(Lazy.force coq_PEopp,
[| typ; dump_expr e|])
| Mc.PEmul(e1,e2) -> mkApp(Lazy.force coq_PEmul,
[| typ; dump_expr e1;dump_expr e2|])
| Mc.PEpow(e,n) -> mkApp(Lazy.force coq_PEpow,
[| typ; dump_expr e; dump_n n|])
in
dump_expr e
let rec dump_monoid l = dump_list (Lazy.force coq_nat) dump_nat l
let rec dump_cone typ dump_z e =
let z = Lazy.force typ in
let rec dump_cone e =
match e with
| Mc.S_In n -> mkApp(Lazy.force coq_S_In,[| z; dump_nat n |])
| Mc.S_Ideal(e,c) -> mkApp(Lazy.force coq_S_Ideal,
[| z; dump_expr z dump_z e ; dump_cone c |])
| Mc.S_Square e -> mkApp(Lazy.force coq_S_Square,
[| z;dump_expr z dump_z e|])
| Mc.S_Monoid l -> mkApp (Lazy.force coq_S_Monoid,
[|z; dump_monoid l|])
| Mc.S_Add(e1,e2) -> mkApp(Lazy.force coq_S_Add,
[| z; dump_cone e1; dump_cone e2|])
| Mc.S_Mult(e1,e2) -> mkApp(Lazy.force coq_S_Mult,
[| z; dump_cone e1; dump_cone e2|])
| Mc.S_Pos p -> mkApp(Lazy.force coq_S_Pos,[| z; dump_z p|])
| Mc.S_Z -> mkApp( Lazy.force coq_S_Z,[| z|]) in
dump_cone e
let pp_cone pp_z o e =
let rec pp_cone o e =
match e with
| Mc.S_In n ->
Printf.fprintf o "(S_In %a)%%nat" pp_nat n
| Mc.S_Ideal(e,c) ->
Printf.fprintf o "(S_Ideal %a %a)" pp_expr e pp_cone c
| Mc.S_Square e ->
Printf.fprintf o "(S_Square %a)" pp_expr e
| Mc.S_Monoid l ->
Printf.fprintf o "(S_Monoid %a)" (pp_list "[" "]" pp_nat) l
| Mc.S_Add(e1,e2) ->
Printf.fprintf o "(S_Add %a %a)" pp_cone e1 pp_cone e2
| Mc.S_Mult(e1,e2) ->
Printf.fprintf o "(S_Mult %a %a)" pp_cone e1 pp_cone e2
| Mc.S_Pos p ->
Printf.fprintf o "(S_Pos %a)%%positive" pp_z p
| Mc.S_Z ->
Printf.fprintf o "S_Z" in
pp_cone o e
let rec dump_op = function
| Mc.OpEq-> Lazy.force coq_OpEq
| Mc.OpNEq-> Lazy.force coq_OpNEq
| Mc.OpLe -> Lazy.force coq_OpLe
| Mc.OpGe -> Lazy.force coq_OpGe
| Mc.OpGt-> Lazy.force coq_OpGt
| Mc.OpLt-> Lazy.force coq_OpLt
let pp_op o e=
match e with
| Mc.OpEq-> Printf.fprintf o "="
| Mc.OpNEq-> Printf.fprintf o "<>"
| Mc.OpLe -> Printf.fprintf o "=<"
| Mc.OpGe -> Printf.fprintf o ">="
| Mc.OpGt-> Printf.fprintf o ">"
| Mc.OpLt-> Printf.fprintf o "<"
let pp_cstr o {Mc.flhs = l ; Mc.fop = op ; Mc.frhs = r } =
Printf.fprintf o"(%a %a %a)" pp_expr l pp_op op pp_expr r
let dump_cstr typ dump_constant {Mc.flhs = e1 ; Mc.fop = o ; Mc.frhs = e2} =
Term.mkApp(Lazy.force coq_Build,
[| typ; dump_expr typ dump_constant e1 ;
dump_op o ;
dump_expr typ dump_constant e2|])
let assoc_const x l =
try
snd (List.find (fun (x',y) -> x = Lazy.force x') l)
with
Not_found -> raise ParseError
let zop_table = [
coq_Zgt, Mc.OpGt ;
coq_Zge, Mc.OpGe ;
coq_Zlt, Mc.OpLt ;
coq_Zle, Mc.OpLe ]
let rop_table = [
coq_Rgt, Mc.OpGt ;
coq_Rge, Mc.OpGe ;
coq_Rlt, Mc.OpLt ;
coq_Rle, Mc.OpLe ]
let qop_table = [
coq_Qlt, Mc.OpLt ;
coq_Qle, Mc.OpLe ;
coq_Qeq, Mc.OpEq
]
let parse_zop (op,args) =
match kind_of_term op with
| Const x -> (assoc_const op zop_table, args.(0) , args.(1))
| Ind(n,0) ->
if op = Lazy.force coq_Eq && args.(0) = Lazy.force coq_Z
then (Mc.OpEq, args.(1), args.(2))
else raise ParseError
| _ -> failwith "parse_zop"
let parse_rop (op,args) =
match kind_of_term op with
| Const x -> (assoc_const op rop_table, args.(0) , args.(1))
| Ind(n,0) ->
if op = Lazy.force coq_Eq && args.(0) = Lazy.force coq_R
then (Mc.OpEq, args.(1), args.(2))
else raise ParseError
| _ -> failwith "parse_zop"
let parse_qop (op,args) =
(assoc_const op qop_table, args.(0) , args.(1))
module Env =
struct
type t = constr list
let compute_rank_add env v =
let rec _add env n v =
match env with
| [] -> ([v],n)
| e::l ->
if eq_constr e v
then (env,n)
else
let (env,n) = _add l ( n+1) v in
(e::env,n) in
let (env, n) = _add env 1 v in
(env, CamlToCoq.idx n)
let empty = []
let elements env = env
end
let is_constant t = (* This is an approx *)
match kind_of_term t with
| Construct(i,_) -> true
| _ -> false
type 'a op =
| Binop of ('a Mc.pExpr -> 'a Mc.pExpr -> 'a Mc.pExpr)
| Opp
| Power
| Ukn of string
let assoc_ops x l =
try
snd (List.find (fun (x',y) -> x = Lazy.force x') l)
with
Not_found -> Ukn "Oups"
let parse_expr parse_constant parse_exp ops_spec env term =
if debug
then (Pp.pp (Pp.str "parse_expr: ");
Pp.pp_flush ();Pp.pp (Printer.prterm term); Pp.pp_flush ());
let constant_or_variable env term =
try
( Mc.PEc (parse_constant term) , env)
with ParseError ->
let (env,n) = Env.compute_rank_add env term in
(Mc.PEX n , env) in
let rec parse_expr env term =
let combine env op (t1,t2) =
let (expr1,env) = parse_expr env t1 in
let (expr2,env) = parse_expr env t2 in
(op expr1 expr2,env) in
match kind_of_term term with
| App(t,args) ->
(
match kind_of_term t with
| Const c ->
( match assoc_ops t ops_spec with
| Binop f -> combine env f (args.(0),args.(1))
| Opp -> let (expr,env) = parse_expr env args.(0) in
(Mc.PEopp expr, env)
| Power ->
let (expr,env) = parse_expr env args.(0) in
let exp = (parse_exp args.(1)) in
(Mc.PEpow(expr, exp) , env)
| Ukn s ->
if debug
then (Printf.printf "unknown op: %s\n" s; flush stdout;);
let (env,n) = Env.compute_rank_add env term in (Mc.PEX n, env)
)
| _ -> constant_or_variable env term
)
| _ -> constant_or_variable env term in
parse_expr env term
let zop_spec =
[
coq_Zplus , Binop (fun x y -> Mc.PEadd(x,y)) ;
coq_Zminus , Binop (fun x y -> Mc.PEsub(x,y)) ;
coq_Zmult , Binop (fun x y -> Mc.PEmul (x,y)) ;
coq_Zopp , Opp ;
coq_Zpower , Power]
let qop_spec =
[
coq_Qplus , Binop (fun x y -> Mc.PEadd(x,y)) ;
coq_Qminus , Binop (fun x y -> Mc.PEsub(x,y)) ;
coq_Qmult , Binop (fun x y -> Mc.PEmul (x,y)) ;
coq_Qopp , Opp ;
coq_Qpower , Power]
let rop_spec =
[
coq_Rplus , Binop (fun x y -> Mc.PEadd(x,y)) ;
coq_Rminus , Binop (fun x y -> Mc.PEsub(x,y)) ;
coq_Rmult , Binop (fun x y -> Mc.PEmul (x,y)) ;
coq_Ropp , Opp ;
coq_Rpower , Power]
let zconstant = parse_z
let qconstant = parse_q
let rconstant term =
if debug
then (Pp.pp_flush ();
Pp.pp (Pp.str "rconstant: ");
Pp.pp (Printer.prterm term); Pp.pp_flush ());
match Term.kind_of_term term with
| Const x ->
if term = Lazy.force coq_R0
then Mc.Z0
else if term = Lazy.force coq_R1
then Mc.Zpos Mc.XH
else raise ParseError
| _ -> raise ParseError
let parse_zexpr =
parse_expr zconstant (fun x -> Mc.n_of_Z (parse_z x)) zop_spec
let parse_qexpr =
parse_expr qconstant (fun x -> Mc.n_of_Z (parse_z x)) qop_spec
let parse_rexpr =
parse_expr rconstant (fun x -> Mc.n_of_nat (parse_nat x)) rop_spec
let parse_arith parse_op parse_expr env cstr =
if debug
then (Pp.pp_flush ();
Pp.pp (Pp.str "parse_arith: ");
Pp.pp (Printer.prterm cstr);
Pp.pp_flush ());
match kind_of_term cstr with
| App(op,args) ->
let (op,lhs,rhs) = parse_op (op,args) in
let (e1,env) = parse_expr env lhs in
let (e2,env) = parse_expr env rhs in
({Mc.flhs = e1; Mc.fop = op;Mc.frhs = e2},env)
| _ -> failwith "error : parse_arith(2)"
let parse_zarith = parse_arith parse_zop parse_zexpr
let parse_qarith = parse_arith parse_qop parse_qexpr
let parse_rarith = parse_arith parse_rop parse_rexpr
(* generic parsing of arithmetic expressions *)
let rec f2f = function
| TT -> Mc.TT
| FF -> Mc.FF
| X _ -> Mc.X
| A (x,_) -> Mc.A x
| C (a,b,_) -> Mc.Cj(f2f a,f2f b)
| D (a,b,_) -> Mc.D(f2f a,f2f b)
| N (a,_) -> Mc.N(f2f a)
| I(a,b,_) -> Mc.I(f2f a,f2f b)
let is_prop t =
match t with
| Names.Anonymous -> true (* Not quite right *)
| Names.Name x -> false
let mkC f1 f2 = C(f1,f2,none)
let mkD f1 f2 = D(f1,f2,none)
let mkIff f1 f2 = C(I(f1,f2,none),I(f2,f2,none),none)
let mkI f1 f2 = I(f1,f2,none)
let mkformula_binary g term f1 f2 =
match f1 , f2 with
| X _ , X _ -> X(term)
| _ -> g f1 f2
let parse_formula parse_atom env term =
let parse_atom env t = try let (at,env) = parse_atom env t in (A(at,none), env) with _ -> (X(t),env) in
let rec xparse_formula env term =
match kind_of_term term with
| App(l,rst) ->
(match rst with
| [|a;b|] when l = Lazy.force coq_and ->
let f,env = xparse_formula env a in
let g,env = xparse_formula env b in
mkformula_binary mkC term f g,env
| [|a;b|] when l = Lazy.force coq_or ->
let f,env = xparse_formula env a in
let g,env = xparse_formula env b in
mkformula_binary mkD term f g,env
| [|a|] when l = Lazy.force coq_not ->
let (f,env) = xparse_formula env a in (N(f,none), env)
| [|a;b|] when l = Lazy.force coq_iff ->
let f,env = xparse_formula env a in
let g,env = xparse_formula env b in
mkformula_binary mkIff term f g,env
| _ -> parse_atom env term)
| Prod(typ,a,b) when not (Termops.dependent (mkRel 1) b) ->
let f,env = xparse_formula env a in
let g,env = xparse_formula env b in
mkformula_binary mkI term f g,env
| _ when term = Lazy.force coq_True -> (TT,env)
| _ when term = Lazy.force coq_False -> (FF,env)
| _ -> X(term),env in
xparse_formula env term
let coq_TT = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "TT")
let coq_FF = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "FF")
let coq_And = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "Cj")
let coq_Or = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "D")
let coq_Neg = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "N")
let coq_Atom = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "A")
let coq_X = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "X")
let coq_Impl = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "I")
let coq_Formula = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "BFormula")
let dump_formula typ dump_atom f =
let rec xdump f =
match f with
| TT -> mkApp(Lazy.force coq_TT,[| typ|])
| FF -> mkApp(Lazy.force coq_FF,[| typ|])
| C(x,y,_) -> mkApp(Lazy.force coq_And,[| typ ; xdump x ; xdump y|])
| D(x,y,_) -> mkApp(Lazy.force coq_Or,[| typ ; xdump x ; xdump y|])
| I(x,y,_) -> mkApp(Lazy.force coq_Impl,[| typ ; xdump x ; xdump y|])
| N(x,_) -> mkApp(Lazy.force coq_Neg,[| typ ; xdump x|])
| A(x,_) -> mkApp(Lazy.force coq_Atom,[| typ ; dump_atom x|])
| X(t) -> mkApp(Lazy.force coq_X,[| typ ; t|]) in
xdump f
(* ! reverse the list of bindings *)
let set l concl =
let rec _set acc = function
| [] -> acc
| (e::l) ->
let (name,expr,typ) = e in
_set (Term.mkNamedLetIn
(Names.id_of_string name)
expr typ acc) l in
_set concl l
end
open M
let rec sig_of_cone = function
| Mc.S_In n -> [CoqToCaml.nat n]
| Mc.S_Ideal(e,w) -> sig_of_cone w
| Mc.S_Mult(w1,w2) ->
(sig_of_cone w1)@(sig_of_cone w2)
| Mc.S_Add(w1,w2) -> (sig_of_cone w1)@(sig_of_cone w2)
| _ -> []
let same_proof sg cl1 cl2 =
let cl1 = CoqToCaml.list (fun x -> x) cl1 in
let cl2 = CoqToCaml.list (fun x -> x) cl2 in
let rec xsame_proof sg =
match sg with
| [] -> true
| n::sg -> (try List.nth cl1 n = List.nth cl2 n with _ -> false)
&& (xsame_proof sg ) in
xsame_proof sg
let tags_of_clause tgs wit clause =
let rec xtags tgs = function
| Mc.S_In n -> Names.Idset.union tgs
(snd (List.nth clause (CoqToCaml.nat n) ))
| Mc.S_Ideal(e,w) -> xtags tgs w
| Mc.S_Mult (w1,w2) | Mc.S_Add(w1,w2) -> xtags (xtags tgs w1) w2
| _ -> tgs in
xtags tgs wit
let tags_of_cnf wits cnf =
List.fold_left2 (fun acc w cl -> tags_of_clause acc w cl)
Names.Idset.empty wits cnf
let find_witness prover polys1 =
let l = CoqToCaml.list (fun x -> x) polys1 in
try_any prover l
let rec witness prover l1 l2 =
match l2 with
| Micromega.Nil -> Some (Micromega.Nil)
| Micromega.Cons(e,l2) ->
match find_witness prover (Micromega.Cons( e,l1)) with
| None -> None
| Some w ->
(match witness prover l1 l2 with
| None -> None
| Some l -> Some (Micromega.Cons (w,l))
)
let rec apply_ids t ids =
match ids with
| [] -> t
| i::ids -> apply_ids (Term.mkApp(t,[| Term.mkVar i |])) ids
let coq_Node = lazy
(Coqlib.gen_constant_in_modules "VarMap"
[["Coq" ; "micromega" ; "VarMap"];["VarMap"]] "Node")
let coq_Leaf = lazy
(Coqlib.gen_constant_in_modules "VarMap"
[["Coq" ; "micromega" ; "VarMap"];["VarMap"]] "Leaf")
let coq_Empty = lazy
(Coqlib.gen_constant_in_modules "VarMap"
[["Coq" ; "micromega" ;"VarMap"];["VarMap"]] "Empty")
let btree_of_array typ a =
let size_of_a = Array.length a in
let semi_size_of_a = size_of_a lsr 1 in
let node = Lazy.force coq_Node
and leaf = Lazy.force coq_Leaf
and empty = Term.mkApp (Lazy.force coq_Empty, [| typ |]) in
let rec aux n =
if n > size_of_a
then empty
else if n > semi_size_of_a
then Term.mkApp (leaf, [| typ; a.(n-1) |])
else Term.mkApp (node, [| typ; aux (2*n); a.(n-1); aux (2*n+1) |])
in
aux 1
let btree_of_array typ a =
try
btree_of_array typ a
with x ->
failwith (Printf.sprintf "btree of array : %s" (Printexc.to_string x))
let dump_varmap typ env =
btree_of_array typ (Array.of_list env)
let rec pp_varmap o vm =
match vm with
| Mc.Empty -> output_string o "[]"
| Mc.Leaf z -> Printf.fprintf o "[%a]" pp_z z
| Mc.Node(l,z,r) -> Printf.fprintf o "[%a, %a, %a]" pp_varmap l pp_z z pp_varmap r
let rec dump_proof_term = function
| Micromega.RatProof cone ->
Term.mkApp(Lazy.force coq_ratProof, [|dump_cone coq_Z dump_z cone|])
| Micromega.CutProof(e,q,cone,prf) ->
Term.mkApp(Lazy.force coq_cutProof,
[| dump_expr (Lazy.force coq_Z) dump_z e ;
dump_q q ;
dump_cone coq_Z dump_z cone ;
dump_proof_term prf|])
| Micromega.EnumProof( q1,e1,q2,c1,c2,prfs) ->
Term.mkApp (Lazy.force coq_enumProof,
[| dump_q q1 ; dump_expr (Lazy.force coq_Z) dump_z e1 ; dump_q q2;
dump_cone coq_Z dump_z c1 ; dump_cone coq_Z dump_z c2 ;
dump_list (Lazy.force coq_proofTerm) dump_proof_term prfs |])
let pp_q o q = Printf.fprintf o "%a/%a" pp_z q.Micromega.qnum pp_positive q.Micromega.qden
let rec pp_proof_term o = function
| Micromega.RatProof cone -> Printf.fprintf o "R[%a]" (pp_cone pp_z) cone
| Micromega.CutProof(e,q,_,p) -> failwith "not implemented"
| Micromega.EnumProof(q1,e1,q2,c1,c2,rst) ->
Printf.fprintf o "EP[%a,%a,%a,%a,%a,%a]"
pp_q q1 pp_expr e1 pp_q q2 (pp_cone pp_z) c1 (pp_cone pp_z) c2
(pp_list "[" "]" pp_proof_term) rst
let rec parse_hyps parse_arith env hyps =
match hyps with
| [] -> ([],env)
| (i,t)::l ->
let (lhyps,env) = parse_hyps parse_arith env l in
try
let (c,env) = parse_formula parse_arith env t in
((i,c)::lhyps, env)
with _ -> (lhyps,env)
(*(if debug then Printf.printf "parse_arith : %s\n" x);*)
exception ParseError
let parse_goal parse_arith env hyps term =
(* try*)
let (f,env) = parse_formula parse_arith env term in
let (lhyps,env) = parse_hyps parse_arith env hyps in
(lhyps,f,env)
with Failure x - > raise ParseError
type ('a, 'b) domain_spec = {
typ : Term.constr; (* Z, Q , R *)
coeff : Term.constr ; (* Z, Q *)
dump_coeff : 'a -> Term.constr ;
proof_typ : Term.constr ;
dump_proof : 'b -> Term.constr
}
let zz_domain_spec = lazy {
typ = Lazy.force coq_Z;
coeff = Lazy.force coq_Z;
dump_coeff = dump_z ;
proof_typ = Lazy.force coq_proofTerm ;
dump_proof = dump_proof_term
}
let qq_domain_spec = lazy {
typ = Lazy.force coq_Q;
coeff = Lazy.force coq_Q;
dump_coeff = dump_q ;
proof_typ = Lazy.force coq_QWitness ;
dump_proof = dump_cone coq_Q dump_q
}
let rz_domain_spec = lazy {
typ = Lazy.force coq_R;
coeff = Lazy.force coq_Z;
dump_coeff = dump_z;
proof_typ = Lazy.force coq_ZWitness ;
dump_proof = dump_cone coq_Z dump_z
}
let micromega_order_change spec cert cert_typ env ff gl =
let formula_typ = (Term.mkApp( Lazy.force coq_Cstr,[| spec.coeff|])) in
let ff = dump_formula formula_typ (dump_cstr spec.coeff spec.dump_coeff) ff in
let vm = dump_varmap ( spec.typ) env in
Tactics.change_in_concl None
(set
[
("__ff", ff, Term.mkApp(Lazy.force coq_Formula ,[| formula_typ |]));
("__varmap", vm , Term.mkApp
(Coqlib.gen_constant_in_modules "VarMap"
[["Coq" ; "micromega" ; "VarMap"];["VarMap"]] "t", [| spec.typ|]));
("__wit", cert,cert_typ)
]
(Tacmach.pf_concl gl )
)
gl
let detect_duplicates cnf wit =
let cnf = CoqToCaml.list (fun x -> x) cnf in
let wit = CoqToCaml.list (fun x -> x) wit in
let rec xdup cnf wit =
match wit with
| [] -> []
| w :: wit ->
let sg = sig_of_cone w in
match cnf with
| [] -> []
| e::cnf ->
let (dups,cnf) = (List.partition (fun x -> same_proof sg e x) cnf) in
dups@(xdup cnf wit) in
xdup cnf wit
let find_witness prover polys1 =
try_any prover polys1
let witness_list_with_tags prover l =
let rec xwitness_list l =
match l with
| [] -> Some([])
| e::l ->
match find_witness prover (List.map fst e) with
| None -> None
| Some w ->
(match xwitness_list l with
| None -> None
| Some l -> Some (w::l)
) in
xwitness_list l
let witness_list_without_tags prover l =
let rec xwitness_list l =
match l with
| [] -> Some([])
| e::l ->
match find_witness prover e with
| None -> None
| Some w ->
(match xwitness_list l with
| None -> None
| Some l -> Some (w::l)
) in
xwitness_list l
let witness_list prover l =
let rec xwitness_list l =
match l with
| Micromega.Nil -> Some(Micromega.Nil)
| Micromega.Cons(e,l) ->
match find_witness prover e with
| None -> None
| Some w ->
(match xwitness_list l with
| None -> None
| Some l -> Some (Micromega.Cons(w,l))
) in
xwitness_list l
let is_singleton = function [] -> true | [e] -> true | _ -> false
let micromega_tauto negate normalise spec prover env polys1 polys2 gl =
let spec = Lazy.force spec in
let (ff,ids) =
List.fold_right
(fun (id,f) (cc,ids) ->
match f with
X _ -> (cc,ids)
| _ -> (I(tag_formula (Names.Name id) f,cc,none), id::ids))
polys1 (polys2,[]) in
let cnf_ff = cnf negate normalise ff in
if debug then
(Pp.pp (Pp.str "Formula....\n") ;
let formula_typ = (Term.mkApp( Lazy.force coq_Cstr,[| spec.coeff|])) in
let ff = dump_formula formula_typ
(dump_cstr spec.typ spec.dump_coeff) ff in
Pp.pp (Printer.prterm ff) ; Pp.pp_flush ()) ;
match witness_list_without_tags prover cnf_ff with
| None -> Tacticals.tclFAIL 0 (Pp.str "Cannot find witness") gl
Printf.printf " \nList % i " ( res ) ;
let (ff,res,ids) = (ff,res,List.map Term.mkVar ids) in
let res' = dump_ml_list (spec.proof_typ) spec.dump_proof res in
(Tacticals.tclTHENSEQ
[
Tactics.generalize ids;
micromega_order_change spec res'
(Term.mkApp(Lazy.force coq_list,[| spec.proof_typ|])) env ff ;
]) gl
let micromega_gen parse_arith negate normalise spec prover gl =
let concl = Tacmach.pf_concl gl in
let hyps = Tacmach.pf_hyps_types gl in
try
let (hyps,concl,env) = parse_goal parse_arith Env.empty hyps concl in
let env = Env.elements env in
micromega_tauto negate normalise spec prover env hyps concl gl
with
| Failure x -> flush stdout ; Pp.pp_flush () ;
Tacticals.tclFAIL 0 (Pp.str x) gl
| ParseError -> Tacticals.tclFAIL 0 (Pp.str "Bad logical fragment") gl
let lift_ratproof prover l =
match prover l with
| None -> None
| Some c -> Some (Mc.RatProof c)
type csdpcert = Sos.positivstellensatz option
type micromega_polys = (Micromega.q Mc.pExpr, Mc.op1) Micromega.prod list
type provername = string * int option
let call_csdpcert provername poly =
let tmp_to,ch_to = Filename.open_temp_file "csdpcert" ".in" in
let tmp_from = Filename.temp_file "csdpcert" ".out" in
output_value ch_to (provername,poly : provername * micromega_polys);
close_out ch_to;
let cmdname =
List.fold_left Filename.concat (Envars.coqlib ())
["contrib"; "micromega"; "csdpcert" ^ Coq_config.exec_extension] in
let c = Sys.command (cmdname ^" "^ tmp_to ^" "^ tmp_from) in
(try Sys.remove tmp_to with _ -> ());
if c <> 0 then Util.error ("Failed to call csdp certificate generator");
let ch_from = open_in tmp_from in
let cert = (input_value ch_from : csdpcert) in
close_in ch_from; Sys.remove tmp_from;
cert
let rec z_to_q_expr e =
match e with
| Mc.PEc z -> Mc.PEc {Mc.qnum = z ; Mc.qden = Mc.XH}
| Mc.PEX x -> Mc.PEX x
| Mc.PEadd(e1,e2) -> Mc.PEadd(z_to_q_expr e1, z_to_q_expr e2)
| Mc.PEsub(e1,e2) -> Mc.PEsub(z_to_q_expr e1, z_to_q_expr e2)
| Mc.PEmul(e1,e2) -> Mc.PEmul(z_to_q_expr e1, z_to_q_expr e2)
| Mc.PEopp(e) -> Mc.PEopp(z_to_q_expr e)
| Mc.PEpow(e,n) -> Mc.PEpow(z_to_q_expr e,n)
let call_csdpcert_q provername poly =
match call_csdpcert provername poly with
| None -> None
| Some cert ->
let cert = Certificate.q_cert_of_pos cert in
match Mc.qWeakChecker (CamlToCoq.list (fun x -> x) poly) cert with
| Mc.True -> Some cert
| Mc.False -> (print_string "buggy certificate" ; flush stdout) ;None
let call_csdpcert_z provername poly =
let l = List.map (fun (Mc.Pair(e,o)) -> (Mc.Pair(z_to_q_expr e,o))) poly in
match call_csdpcert provername l with
| None -> None
| Some cert ->
let cert = Certificate.z_cert_of_pos cert in
match Mc.zWeakChecker (CamlToCoq.list (fun x -> x) poly) cert with
| Mc.True -> Some cert
| Mc.False -> (print_string "buggy certificate" ; flush stdout) ;None
let psatzl_Z gl =
micromega_gen parse_zarith Mc.negate Mc.normalise zz_domain_spec
[lift_ratproof
(Certificate.linear_prover Certificate.z_spec), "fourier refutation" ] gl
let psatzl_Q gl =
micromega_gen parse_qarith Mc.cnf_negate Mc.cnf_normalise qq_domain_spec
[ Certificate.linear_prover Certificate.q_spec, "fourier refutation" ] gl
let psatz_Q i gl =
micromega_gen parse_qarith Mc.cnf_negate Mc.cnf_normalise qq_domain_spec
[ call_csdpcert_q ("real_nonlinear_prover", Some i), "fourier refutation" ] gl
let psatzl_R gl =
micromega_gen parse_rarith Mc.cnf_negate Mc.cnf_normalise rz_domain_spec
[ Certificate.linear_prover Certificate.z_spec, "fourier refutation" ] gl
let psatz_R i gl =
micromega_gen parse_rarith Mc.cnf_negate Mc.cnf_normalise rz_domain_spec
[ call_csdpcert_z ("real_nonlinear_prover", Some i), "fourier refutation" ] gl
let psatz_Z i gl =
micromega_gen parse_zarith Mc.negate Mc.normalise zz_domain_spec
[lift_ratproof (call_csdpcert_z ("real_nonlinear_prover",Some i)),
"fourier refutation" ] gl
let sos_Z gl =
micromega_gen parse_zarith Mc.negate Mc.normalise zz_domain_spec
[lift_ratproof (call_csdpcert_z ("pure_sos", None)), "pure sos refutation"] gl
let sos_Q gl =
micromega_gen parse_qarith Mc.cnf_negate Mc.cnf_normalise qq_domain_spec
[call_csdpcert_q ("pure_sos", None), "pure sos refutation"] gl
let sos_R gl =
micromega_gen parse_rarith Mc.cnf_negate Mc.cnf_normalise rz_domain_spec
[call_csdpcert_z ("pure_sos", None), "pure sos refutation"] gl
let xlia gl =
micromega_gen parse_zarith Mc.negate Mc.normalise zz_domain_spec
[Certificate.zlinear_prover, "zprover"] gl
| null | https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/contrib/micromega/coq_micromega.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
**********************************************************************
if debug then
??
??
??
let coq_n = lazy (constant "N")
This is an approx
generic parsing of arithmetic expressions
Not quite right
! reverse the list of bindings
(if debug then Printf.printf "parse_arith : %s\n" x);
try
Z, Q , R
Z, Q | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
: A reflexive tactic using the
( / ) 2006 - 2008
open Mutils
let debug = false
let time str f x =
let t0 = (Unix.times()).Unix.tms_utime in
let res = f x in
let t1 = (Unix.times()).Unix.tms_utime in
flush stdout);
res
type ('a,'b) formula =
| TT
| FF
| X of 'b
| A of 'a * Names.name
| C of ('a,'b) formula * ('a,'b) formula * Names.name
| D of ('a,'b) formula * ('a,'b) formula * Names.name
| N of ('a,'b) formula * Names.name
| I of ('a,'b) formula * ('a,'b) formula * Names.name
let none = Names.Anonymous
let tag_formula t f =
match f with
| A(x,_) -> A(x,t)
| C(x,y,_) -> C(x,y,t)
| D(x,y,_) -> D(x,y,t)
| N(x,_) -> N(x,t)
| I(x,y,_) -> I(x,y,t)
| _ -> f
let tt = []
let ff = [ [] ]
type ('constant,'contr) sentence =
('constant Micromega.formula, 'contr) formula
let cnf negate normalise f =
let negate a =
CoqToCaml.list (fun cl -> CoqToCaml.list (fun x -> x) cl) (negate a) in
let normalise a =
CoqToCaml.list (fun cl -> CoqToCaml.list (fun x -> x) cl) (normalise a) in
let and_cnf x y = x @ y in
let or_clause_cnf t f = List.map (fun x -> t@x ) f in
let rec or_cnf f f' =
match f with
| [] -> tt
| e :: rst -> (or_cnf rst f') @ (or_clause_cnf e f') in
let rec xcnf (pol : bool) f =
match f with
| A(x,t) -> if pol then normalise x else negate x
| N(e,t) -> xcnf (not pol) e
| C(e1,e2,t) ->
(if pol then and_cnf else or_cnf) (xcnf pol e1) (xcnf pol e2)
| D(e1,e2,t) ->
(if pol then or_cnf else and_cnf) (xcnf pol e1) (xcnf pol e2)
| I(e1,e2,t) ->
(if pol then or_cnf else and_cnf) (xcnf (not pol) e1) (xcnf pol e2) in
xcnf true f
module M =
struct
open Coqlib
open Term
let constant = gen_constant_in_modules " Omicron " coq_modules
let logic_dir = ["Coq";"Logic";"Decidable"]
let coq_modules =
init_modules @
[logic_dir] @ arith_modules @ zarith_base_modules @
[ ["Coq";"Lists";"List"];
["ZMicromega"];
["Tauto"];
["RingMicromega"];
["EnvRing"];
["Coq"; "micromega"; "ZMicromega"];
["Coq" ; "micromega" ; "Tauto"];
["Coq" ; "micromega" ; "RingMicromega"];
["Coq" ; "micromega" ; "EnvRing"];
["Coq";"QArith"; "QArith_base"];
["Coq";"Reals" ; "Rdefinitions"];
["Coq";"Reals" ; "Rpow_def"];
["LRing_normalise"]]
let constant = gen_constant_in_modules "ZMicromega" coq_modules
let coq_and = lazy (constant "and")
let coq_or = lazy (constant "or")
let coq_not = lazy (constant "not")
let coq_iff = lazy (constant "iff")
let coq_True = lazy (constant "True")
let coq_False = lazy (constant "False")
let coq_cons = lazy (constant "cons")
let coq_nil = lazy (constant "nil")
let coq_list = lazy (constant "list")
let coq_O = lazy (constant "O")
let coq_S = lazy (constant "S")
let coq_nat = lazy (constant "nat")
let coq_NO = lazy
(gen_constant_in_modules "N" [ ["Coq";"NArith";"BinNat" ]] "N0")
let coq_Npos = lazy
(gen_constant_in_modules "N" [ ["Coq";"NArith"; "BinNat"]] "Npos")
let coq_pair = lazy (constant "pair")
let coq_None = lazy (constant "None")
let coq_option = lazy (constant "option")
let coq_positive = lazy (constant "positive")
let coq_xH = lazy (constant "xH")
let coq_xO = lazy (constant "xO")
let coq_xI = lazy (constant "xI")
let coq_N0 = lazy (constant "N0")
let coq_N0 = lazy (constant "Npos")
let coq_Z = lazy (constant "Z")
let coq_Q = lazy (constant "Q")
let coq_R = lazy (constant "R")
let coq_ZERO = lazy (constant "Z0")
let coq_POS = lazy (constant "Zpos")
let coq_NEG = lazy (constant "Zneg")
let coq_QWitness = lazy
(gen_constant_in_modules "QMicromega"
[["Coq"; "micromega"; "QMicromega"]] "QWitness")
let coq_ZWitness = lazy
(gen_constant_in_modules "QMicromega"
[["Coq"; "micromega"; "ZMicromega"]] "ZWitness")
let coq_Build_Witness = lazy (constant "Build_Witness")
let coq_Qmake = lazy (constant "Qmake")
let coq_R0 = lazy (constant "R0")
let coq_R1 = lazy (constant "R1")
let coq_proofTerm = lazy (constant "ProofTerm")
let coq_ratProof = lazy (constant "RatProof")
let coq_cutProof = lazy (constant "CutProof")
let coq_enumProof = lazy (constant "EnumProof")
let coq_Zgt = lazy (constant "Zgt")
let coq_Zge = lazy (constant "Zge")
let coq_Zle = lazy (constant "Zle")
let coq_Zlt = lazy (constant "Zlt")
let coq_Eq = lazy (constant "eq")
let coq_Zplus = lazy (constant "Zplus")
let coq_Zminus = lazy (constant "Zminus")
let coq_Zopp = lazy (constant "Zopp")
let coq_Zmult = lazy (constant "Zmult")
let coq_Zpower = lazy (constant "Zpower")
let coq_N_of_Z = lazy
(gen_constant_in_modules "ZArithRing"
[["Coq";"setoid_ring";"ZArithRing"]] "N_of_Z")
let coq_Qgt = lazy (constant "Qgt")
let coq_Qge = lazy (constant "Qge")
let coq_Qle = lazy (constant "Qle")
let coq_Qlt = lazy (constant "Qlt")
let coq_Qeq = lazy (constant "Qeq")
let coq_Qplus = lazy (constant "Qplus")
let coq_Qminus = lazy (constant "Qminus")
let coq_Qopp = lazy (constant "Qopp")
let coq_Qmult = lazy (constant "Qmult")
let coq_Qpower = lazy (constant "Qpower")
let coq_Rgt = lazy (constant "Rgt")
let coq_Rge = lazy (constant "Rge")
let coq_Rle = lazy (constant "Rle")
let coq_Rlt = lazy (constant "Rlt")
let coq_Rplus = lazy (constant "Rplus")
let coq_Rminus = lazy (constant "Rminus")
let coq_Ropp = lazy (constant "Ropp")
let coq_Rmult = lazy (constant "Rmult")
let coq_Rpower = lazy (constant "pow")
let coq_PEX = lazy (constant "PEX" )
let coq_PEc = lazy (constant"PEc")
let coq_PEadd = lazy (constant "PEadd")
let coq_PEopp = lazy (constant "PEopp")
let coq_PEmul = lazy (constant "PEmul")
let coq_PEsub = lazy (constant "PEsub")
let coq_PEpow = lazy (constant "PEpow")
let coq_OpEq = lazy (constant "OpEq")
let coq_OpNEq = lazy (constant "OpNEq")
let coq_OpLe = lazy (constant "OpLe")
let coq_OpLt = lazy (constant "OpLt")
let coq_OpGe = lazy (constant "OpGe")
let coq_OpGt = lazy (constant "OpGt")
let coq_S_In = lazy (constant "S_In")
let coq_S_Square = lazy (constant "S_Square")
let coq_S_Monoid = lazy (constant "S_Monoid")
let coq_S_Ideal = lazy (constant "S_Ideal")
let coq_S_Mult = lazy (constant "S_Mult")
let coq_S_Add = lazy (constant "S_Add")
let coq_S_Pos = lazy (constant "S_Pos")
let coq_S_Z = lazy (constant "S_Z")
let coq_coneMember = lazy (constant "coneMember")
let coq_make_impl = lazy
(gen_constant_in_modules "Zmicromega" [["Refl"]] "make_impl")
let coq_make_conj = lazy
(gen_constant_in_modules "Zmicromega" [["Refl"]] "make_conj")
let coq_Build = lazy
(gen_constant_in_modules "RingMicromega"
[["Coq" ; "micromega" ; "RingMicromega"] ; ["RingMicromega"] ]
"Build_Formula")
let coq_Cstr = lazy
(gen_constant_in_modules "RingMicromega"
[["Coq" ; "micromega" ; "RingMicromega"] ; ["RingMicromega"] ] "Formula")
type parse_error =
| Ukn
| BadStr of string
| BadNum of int
| BadTerm of Term.constr
| Msg of string
| Goal of (Term.constr list ) * Term.constr * parse_error
let string_of_error = function
| Ukn -> "ukn"
| BadStr s -> s
| BadNum i -> string_of_int i
| BadTerm _ -> "BadTerm"
| Msg s -> s
| Goal _ -> "Goal"
exception ParseError
let get_left_construct term =
match Term.kind_of_term term with
| Term.Construct(_,i) -> (i,[| |])
| Term.App(l,rst) ->
(match Term.kind_of_term l with
| Term.Construct(_,i) -> (i,rst)
| _ -> raise ParseError
)
| _ -> raise ParseError
module Mc = Micromega
let rec parse_nat term =
let (i,c) = get_left_construct term in
match i with
| 1 -> Mc.O
| 2 -> Mc.S (parse_nat (c.(0)))
| i -> raise ParseError
let pp_nat o n = Printf.fprintf o "%i" (CoqToCaml.nat n)
let rec dump_nat x =
match x with
| Mc.O -> Lazy.force coq_O
| Mc.S p -> Term.mkApp(Lazy.force coq_S,[| dump_nat p |])
let rec parse_positive term =
let (i,c) = get_left_construct term in
match i with
| 1 -> Mc.XI (parse_positive c.(0))
| 2 -> Mc.XO (parse_positive c.(0))
| 3 -> Mc.XH
| i -> raise ParseError
let rec dump_positive x =
match x with
| Mc.XH -> Lazy.force coq_xH
| Mc.XO p -> Term.mkApp(Lazy.force coq_xO,[| dump_positive p |])
| Mc.XI p -> Term.mkApp(Lazy.force coq_xI,[| dump_positive p |])
let pp_positive o x = Printf.fprintf o "%i" (CoqToCaml.positive x)
let rec dump_n x =
match x with
| Mc.N0 -> Lazy.force coq_N0
| Mc.Npos p -> Term.mkApp(Lazy.force coq_Npos,[| dump_positive p|])
let rec dump_index x =
match x with
| Mc.XH -> Lazy.force coq_xH
| Mc.XO p -> Term.mkApp(Lazy.force coq_xO,[| dump_index p |])
| Mc.XI p -> Term.mkApp(Lazy.force coq_xI,[| dump_index p |])
let pp_index o x = Printf.fprintf o "%i" (CoqToCaml.index x)
let rec dump_n x =
match x with
| Mc.N0 -> Lazy.force coq_NO
| Mc.Npos p -> Term.mkApp(Lazy.force coq_Npos,[| dump_positive p |])
let rec pp_n o x = output_string o (string_of_int (CoqToCaml.n x))
let dump_pair t1 t2 dump_t1 dump_t2 (Mc.Pair (x,y)) =
Term.mkApp(Lazy.force coq_pair,[| t1 ; t2 ; dump_t1 x ; dump_t2 y|])
let rec parse_z term =
let (i,c) = get_left_construct term in
match i with
| 1 -> Mc.Z0
| 2 -> Mc.Zpos (parse_positive c.(0))
| 3 -> Mc.Zneg (parse_positive c.(0))
| i -> raise ParseError
let dump_z x =
match x with
| Mc.Z0 ->Lazy.force coq_ZERO
| Mc.Zpos p -> Term.mkApp(Lazy.force coq_POS,[| dump_positive p|])
| Mc.Zneg p -> Term.mkApp(Lazy.force coq_NEG,[| dump_positive p|])
let pp_z o x = Printf.fprintf o "%i" (CoqToCaml.z x)
let dump_num bd1 =
Term.mkApp(Lazy.force coq_Qmake,
[|dump_z (CamlToCoq.bigint (numerator bd1)) ;
dump_positive (CamlToCoq.positive_big_int (denominator bd1)) |])
let dump_q q =
Term.mkApp(Lazy.force coq_Qmake,
[| dump_z q.Micromega.qnum ; dump_positive q.Micromega.qden|])
let parse_q term =
match Term.kind_of_term term with
| Term.App(c, args) -> if c = Lazy.force coq_Qmake then
{Mc.qnum = parse_z args.(0) ; Mc.qden = parse_positive args.(1) }
else raise ParseError
| _ -> raise ParseError
let rec parse_list parse_elt term =
let (i,c) = get_left_construct term in
match i with
| 1 -> Mc.Nil
| 2 -> Mc.Cons(parse_elt c.(1), parse_list parse_elt c.(2))
| i -> raise ParseError
let rec dump_list typ dump_elt l =
match l with
| Mc.Nil -> Term.mkApp(Lazy.force coq_nil,[| typ |])
| Mc.Cons(e,l) -> Term.mkApp(Lazy.force coq_cons,
[| typ; dump_elt e;dump_list typ dump_elt l|])
let rec dump_ml_list typ dump_elt l =
match l with
| [] -> Term.mkApp(Lazy.force coq_nil,[| typ |])
| e::l -> Term.mkApp(Lazy.force coq_cons,
[| typ; dump_elt e;dump_ml_list typ dump_elt l|])
let pp_list op cl elt o l =
let rec _pp o l =
match l with
| Mc.Nil -> ()
| Mc.Cons(e,Mc.Nil) -> Printf.fprintf o "%a" elt e
| Mc.Cons(e,l) -> Printf.fprintf o "%a ,%a" elt e _pp l in
Printf.fprintf o "%s%a%s" op _pp l cl
let pp_var = pp_positive
let dump_var = dump_positive
let rec pp_expr o e =
match e with
| Mc.PEX n -> Printf.fprintf o "V %a" pp_var n
| Mc.PEc z -> pp_z o z
| Mc.PEadd(e1,e2) -> Printf.fprintf o "(%a)+(%a)" pp_expr e1 pp_expr e2
| Mc.PEmul(e1,e2) -> Printf.fprintf o "%a*(%a)" pp_expr e1 pp_expr e2
| Mc.PEopp e -> Printf.fprintf o "-(%a)" pp_expr e
| Mc.PEsub(e1,e2) -> Printf.fprintf o "(%a)-(%a)" pp_expr e1 pp_expr e2
| Mc.PEpow(e,n) -> Printf.fprintf o "(%a)^(%a)" pp_expr e pp_n n
let dump_expr typ dump_z e =
let rec dump_expr e =
match e with
| Mc.PEX n -> mkApp(Lazy.force coq_PEX,[| typ; dump_var n |])
| Mc.PEc z -> mkApp(Lazy.force coq_PEc,[| typ ; dump_z z |])
| Mc.PEadd(e1,e2) -> mkApp(Lazy.force coq_PEadd,
[| typ; dump_expr e1;dump_expr e2|])
| Mc.PEsub(e1,e2) -> mkApp(Lazy.force coq_PEsub,
[| typ; dump_expr e1;dump_expr e2|])
| Mc.PEopp e -> mkApp(Lazy.force coq_PEopp,
[| typ; dump_expr e|])
| Mc.PEmul(e1,e2) -> mkApp(Lazy.force coq_PEmul,
[| typ; dump_expr e1;dump_expr e2|])
| Mc.PEpow(e,n) -> mkApp(Lazy.force coq_PEpow,
[| typ; dump_expr e; dump_n n|])
in
dump_expr e
let rec dump_monoid l = dump_list (Lazy.force coq_nat) dump_nat l
let rec dump_cone typ dump_z e =
let z = Lazy.force typ in
let rec dump_cone e =
match e with
| Mc.S_In n -> mkApp(Lazy.force coq_S_In,[| z; dump_nat n |])
| Mc.S_Ideal(e,c) -> mkApp(Lazy.force coq_S_Ideal,
[| z; dump_expr z dump_z e ; dump_cone c |])
| Mc.S_Square e -> mkApp(Lazy.force coq_S_Square,
[| z;dump_expr z dump_z e|])
| Mc.S_Monoid l -> mkApp (Lazy.force coq_S_Monoid,
[|z; dump_monoid l|])
| Mc.S_Add(e1,e2) -> mkApp(Lazy.force coq_S_Add,
[| z; dump_cone e1; dump_cone e2|])
| Mc.S_Mult(e1,e2) -> mkApp(Lazy.force coq_S_Mult,
[| z; dump_cone e1; dump_cone e2|])
| Mc.S_Pos p -> mkApp(Lazy.force coq_S_Pos,[| z; dump_z p|])
| Mc.S_Z -> mkApp( Lazy.force coq_S_Z,[| z|]) in
dump_cone e
let pp_cone pp_z o e =
let rec pp_cone o e =
match e with
| Mc.S_In n ->
Printf.fprintf o "(S_In %a)%%nat" pp_nat n
| Mc.S_Ideal(e,c) ->
Printf.fprintf o "(S_Ideal %a %a)" pp_expr e pp_cone c
| Mc.S_Square e ->
Printf.fprintf o "(S_Square %a)" pp_expr e
| Mc.S_Monoid l ->
Printf.fprintf o "(S_Monoid %a)" (pp_list "[" "]" pp_nat) l
| Mc.S_Add(e1,e2) ->
Printf.fprintf o "(S_Add %a %a)" pp_cone e1 pp_cone e2
| Mc.S_Mult(e1,e2) ->
Printf.fprintf o "(S_Mult %a %a)" pp_cone e1 pp_cone e2
| Mc.S_Pos p ->
Printf.fprintf o "(S_Pos %a)%%positive" pp_z p
| Mc.S_Z ->
Printf.fprintf o "S_Z" in
pp_cone o e
let rec dump_op = function
| Mc.OpEq-> Lazy.force coq_OpEq
| Mc.OpNEq-> Lazy.force coq_OpNEq
| Mc.OpLe -> Lazy.force coq_OpLe
| Mc.OpGe -> Lazy.force coq_OpGe
| Mc.OpGt-> Lazy.force coq_OpGt
| Mc.OpLt-> Lazy.force coq_OpLt
let pp_op o e=
match e with
| Mc.OpEq-> Printf.fprintf o "="
| Mc.OpNEq-> Printf.fprintf o "<>"
| Mc.OpLe -> Printf.fprintf o "=<"
| Mc.OpGe -> Printf.fprintf o ">="
| Mc.OpGt-> Printf.fprintf o ">"
| Mc.OpLt-> Printf.fprintf o "<"
let pp_cstr o {Mc.flhs = l ; Mc.fop = op ; Mc.frhs = r } =
Printf.fprintf o"(%a %a %a)" pp_expr l pp_op op pp_expr r
let dump_cstr typ dump_constant {Mc.flhs = e1 ; Mc.fop = o ; Mc.frhs = e2} =
Term.mkApp(Lazy.force coq_Build,
[| typ; dump_expr typ dump_constant e1 ;
dump_op o ;
dump_expr typ dump_constant e2|])
let assoc_const x l =
try
snd (List.find (fun (x',y) -> x = Lazy.force x') l)
with
Not_found -> raise ParseError
let zop_table = [
coq_Zgt, Mc.OpGt ;
coq_Zge, Mc.OpGe ;
coq_Zlt, Mc.OpLt ;
coq_Zle, Mc.OpLe ]
let rop_table = [
coq_Rgt, Mc.OpGt ;
coq_Rge, Mc.OpGe ;
coq_Rlt, Mc.OpLt ;
coq_Rle, Mc.OpLe ]
let qop_table = [
coq_Qlt, Mc.OpLt ;
coq_Qle, Mc.OpLe ;
coq_Qeq, Mc.OpEq
]
let parse_zop (op,args) =
match kind_of_term op with
| Const x -> (assoc_const op zop_table, args.(0) , args.(1))
| Ind(n,0) ->
if op = Lazy.force coq_Eq && args.(0) = Lazy.force coq_Z
then (Mc.OpEq, args.(1), args.(2))
else raise ParseError
| _ -> failwith "parse_zop"
let parse_rop (op,args) =
match kind_of_term op with
| Const x -> (assoc_const op rop_table, args.(0) , args.(1))
| Ind(n,0) ->
if op = Lazy.force coq_Eq && args.(0) = Lazy.force coq_R
then (Mc.OpEq, args.(1), args.(2))
else raise ParseError
| _ -> failwith "parse_zop"
let parse_qop (op,args) =
(assoc_const op qop_table, args.(0) , args.(1))
module Env =
struct
type t = constr list
let compute_rank_add env v =
let rec _add env n v =
match env with
| [] -> ([v],n)
| e::l ->
if eq_constr e v
then (env,n)
else
let (env,n) = _add l ( n+1) v in
(e::env,n) in
let (env, n) = _add env 1 v in
(env, CamlToCoq.idx n)
let empty = []
let elements env = env
end
match kind_of_term t with
| Construct(i,_) -> true
| _ -> false
type 'a op =
| Binop of ('a Mc.pExpr -> 'a Mc.pExpr -> 'a Mc.pExpr)
| Opp
| Power
| Ukn of string
let assoc_ops x l =
try
snd (List.find (fun (x',y) -> x = Lazy.force x') l)
with
Not_found -> Ukn "Oups"
let parse_expr parse_constant parse_exp ops_spec env term =
if debug
then (Pp.pp (Pp.str "parse_expr: ");
Pp.pp_flush ();Pp.pp (Printer.prterm term); Pp.pp_flush ());
let constant_or_variable env term =
try
( Mc.PEc (parse_constant term) , env)
with ParseError ->
let (env,n) = Env.compute_rank_add env term in
(Mc.PEX n , env) in
let rec parse_expr env term =
let combine env op (t1,t2) =
let (expr1,env) = parse_expr env t1 in
let (expr2,env) = parse_expr env t2 in
(op expr1 expr2,env) in
match kind_of_term term with
| App(t,args) ->
(
match kind_of_term t with
| Const c ->
( match assoc_ops t ops_spec with
| Binop f -> combine env f (args.(0),args.(1))
| Opp -> let (expr,env) = parse_expr env args.(0) in
(Mc.PEopp expr, env)
| Power ->
let (expr,env) = parse_expr env args.(0) in
let exp = (parse_exp args.(1)) in
(Mc.PEpow(expr, exp) , env)
| Ukn s ->
if debug
then (Printf.printf "unknown op: %s\n" s; flush stdout;);
let (env,n) = Env.compute_rank_add env term in (Mc.PEX n, env)
)
| _ -> constant_or_variable env term
)
| _ -> constant_or_variable env term in
parse_expr env term
let zop_spec =
[
coq_Zplus , Binop (fun x y -> Mc.PEadd(x,y)) ;
coq_Zminus , Binop (fun x y -> Mc.PEsub(x,y)) ;
coq_Zmult , Binop (fun x y -> Mc.PEmul (x,y)) ;
coq_Zopp , Opp ;
coq_Zpower , Power]
let qop_spec =
[
coq_Qplus , Binop (fun x y -> Mc.PEadd(x,y)) ;
coq_Qminus , Binop (fun x y -> Mc.PEsub(x,y)) ;
coq_Qmult , Binop (fun x y -> Mc.PEmul (x,y)) ;
coq_Qopp , Opp ;
coq_Qpower , Power]
let rop_spec =
[
coq_Rplus , Binop (fun x y -> Mc.PEadd(x,y)) ;
coq_Rminus , Binop (fun x y -> Mc.PEsub(x,y)) ;
coq_Rmult , Binop (fun x y -> Mc.PEmul (x,y)) ;
coq_Ropp , Opp ;
coq_Rpower , Power]
let zconstant = parse_z
let qconstant = parse_q
let rconstant term =
if debug
then (Pp.pp_flush ();
Pp.pp (Pp.str "rconstant: ");
Pp.pp (Printer.prterm term); Pp.pp_flush ());
match Term.kind_of_term term with
| Const x ->
if term = Lazy.force coq_R0
then Mc.Z0
else if term = Lazy.force coq_R1
then Mc.Zpos Mc.XH
else raise ParseError
| _ -> raise ParseError
let parse_zexpr =
parse_expr zconstant (fun x -> Mc.n_of_Z (parse_z x)) zop_spec
let parse_qexpr =
parse_expr qconstant (fun x -> Mc.n_of_Z (parse_z x)) qop_spec
let parse_rexpr =
parse_expr rconstant (fun x -> Mc.n_of_nat (parse_nat x)) rop_spec
let parse_arith parse_op parse_expr env cstr =
if debug
then (Pp.pp_flush ();
Pp.pp (Pp.str "parse_arith: ");
Pp.pp (Printer.prterm cstr);
Pp.pp_flush ());
match kind_of_term cstr with
| App(op,args) ->
let (op,lhs,rhs) = parse_op (op,args) in
let (e1,env) = parse_expr env lhs in
let (e2,env) = parse_expr env rhs in
({Mc.flhs = e1; Mc.fop = op;Mc.frhs = e2},env)
| _ -> failwith "error : parse_arith(2)"
let parse_zarith = parse_arith parse_zop parse_zexpr
let parse_qarith = parse_arith parse_qop parse_qexpr
let parse_rarith = parse_arith parse_rop parse_rexpr
let rec f2f = function
| TT -> Mc.TT
| FF -> Mc.FF
| X _ -> Mc.X
| A (x,_) -> Mc.A x
| C (a,b,_) -> Mc.Cj(f2f a,f2f b)
| D (a,b,_) -> Mc.D(f2f a,f2f b)
| N (a,_) -> Mc.N(f2f a)
| I(a,b,_) -> Mc.I(f2f a,f2f b)
let is_prop t =
match t with
| Names.Name x -> false
let mkC f1 f2 = C(f1,f2,none)
let mkD f1 f2 = D(f1,f2,none)
let mkIff f1 f2 = C(I(f1,f2,none),I(f2,f2,none),none)
let mkI f1 f2 = I(f1,f2,none)
let mkformula_binary g term f1 f2 =
match f1 , f2 with
| X _ , X _ -> X(term)
| _ -> g f1 f2
let parse_formula parse_atom env term =
let parse_atom env t = try let (at,env) = parse_atom env t in (A(at,none), env) with _ -> (X(t),env) in
let rec xparse_formula env term =
match kind_of_term term with
| App(l,rst) ->
(match rst with
| [|a;b|] when l = Lazy.force coq_and ->
let f,env = xparse_formula env a in
let g,env = xparse_formula env b in
mkformula_binary mkC term f g,env
| [|a;b|] when l = Lazy.force coq_or ->
let f,env = xparse_formula env a in
let g,env = xparse_formula env b in
mkformula_binary mkD term f g,env
| [|a|] when l = Lazy.force coq_not ->
let (f,env) = xparse_formula env a in (N(f,none), env)
| [|a;b|] when l = Lazy.force coq_iff ->
let f,env = xparse_formula env a in
let g,env = xparse_formula env b in
mkformula_binary mkIff term f g,env
| _ -> parse_atom env term)
| Prod(typ,a,b) when not (Termops.dependent (mkRel 1) b) ->
let f,env = xparse_formula env a in
let g,env = xparse_formula env b in
mkformula_binary mkI term f g,env
| _ when term = Lazy.force coq_True -> (TT,env)
| _ when term = Lazy.force coq_False -> (FF,env)
| _ -> X(term),env in
xparse_formula env term
let coq_TT = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "TT")
let coq_FF = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "FF")
let coq_And = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "Cj")
let coq_Or = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "D")
let coq_Neg = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "N")
let coq_Atom = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "A")
let coq_X = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "X")
let coq_Impl = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "I")
let coq_Formula = lazy
(gen_constant_in_modules "ZMicromega"
[["Coq" ; "micromega" ; "Tauto"];["Tauto"]] "BFormula")
let dump_formula typ dump_atom f =
let rec xdump f =
match f with
| TT -> mkApp(Lazy.force coq_TT,[| typ|])
| FF -> mkApp(Lazy.force coq_FF,[| typ|])
| C(x,y,_) -> mkApp(Lazy.force coq_And,[| typ ; xdump x ; xdump y|])
| D(x,y,_) -> mkApp(Lazy.force coq_Or,[| typ ; xdump x ; xdump y|])
| I(x,y,_) -> mkApp(Lazy.force coq_Impl,[| typ ; xdump x ; xdump y|])
| N(x,_) -> mkApp(Lazy.force coq_Neg,[| typ ; xdump x|])
| A(x,_) -> mkApp(Lazy.force coq_Atom,[| typ ; dump_atom x|])
| X(t) -> mkApp(Lazy.force coq_X,[| typ ; t|]) in
xdump f
let set l concl =
let rec _set acc = function
| [] -> acc
| (e::l) ->
let (name,expr,typ) = e in
_set (Term.mkNamedLetIn
(Names.id_of_string name)
expr typ acc) l in
_set concl l
end
open M
let rec sig_of_cone = function
| Mc.S_In n -> [CoqToCaml.nat n]
| Mc.S_Ideal(e,w) -> sig_of_cone w
| Mc.S_Mult(w1,w2) ->
(sig_of_cone w1)@(sig_of_cone w2)
| Mc.S_Add(w1,w2) -> (sig_of_cone w1)@(sig_of_cone w2)
| _ -> []
let same_proof sg cl1 cl2 =
let cl1 = CoqToCaml.list (fun x -> x) cl1 in
let cl2 = CoqToCaml.list (fun x -> x) cl2 in
let rec xsame_proof sg =
match sg with
| [] -> true
| n::sg -> (try List.nth cl1 n = List.nth cl2 n with _ -> false)
&& (xsame_proof sg ) in
xsame_proof sg
let tags_of_clause tgs wit clause =
let rec xtags tgs = function
| Mc.S_In n -> Names.Idset.union tgs
(snd (List.nth clause (CoqToCaml.nat n) ))
| Mc.S_Ideal(e,w) -> xtags tgs w
| Mc.S_Mult (w1,w2) | Mc.S_Add(w1,w2) -> xtags (xtags tgs w1) w2
| _ -> tgs in
xtags tgs wit
let tags_of_cnf wits cnf =
List.fold_left2 (fun acc w cl -> tags_of_clause acc w cl)
Names.Idset.empty wits cnf
let find_witness prover polys1 =
let l = CoqToCaml.list (fun x -> x) polys1 in
try_any prover l
let rec witness prover l1 l2 =
match l2 with
| Micromega.Nil -> Some (Micromega.Nil)
| Micromega.Cons(e,l2) ->
match find_witness prover (Micromega.Cons( e,l1)) with
| None -> None
| Some w ->
(match witness prover l1 l2 with
| None -> None
| Some l -> Some (Micromega.Cons (w,l))
)
let rec apply_ids t ids =
match ids with
| [] -> t
| i::ids -> apply_ids (Term.mkApp(t,[| Term.mkVar i |])) ids
let coq_Node = lazy
(Coqlib.gen_constant_in_modules "VarMap"
[["Coq" ; "micromega" ; "VarMap"];["VarMap"]] "Node")
let coq_Leaf = lazy
(Coqlib.gen_constant_in_modules "VarMap"
[["Coq" ; "micromega" ; "VarMap"];["VarMap"]] "Leaf")
let coq_Empty = lazy
(Coqlib.gen_constant_in_modules "VarMap"
[["Coq" ; "micromega" ;"VarMap"];["VarMap"]] "Empty")
let btree_of_array typ a =
let size_of_a = Array.length a in
let semi_size_of_a = size_of_a lsr 1 in
let node = Lazy.force coq_Node
and leaf = Lazy.force coq_Leaf
and empty = Term.mkApp (Lazy.force coq_Empty, [| typ |]) in
let rec aux n =
if n > size_of_a
then empty
else if n > semi_size_of_a
then Term.mkApp (leaf, [| typ; a.(n-1) |])
else Term.mkApp (node, [| typ; aux (2*n); a.(n-1); aux (2*n+1) |])
in
aux 1
let btree_of_array typ a =
try
btree_of_array typ a
with x ->
failwith (Printf.sprintf "btree of array : %s" (Printexc.to_string x))
let dump_varmap typ env =
btree_of_array typ (Array.of_list env)
let rec pp_varmap o vm =
match vm with
| Mc.Empty -> output_string o "[]"
| Mc.Leaf z -> Printf.fprintf o "[%a]" pp_z z
| Mc.Node(l,z,r) -> Printf.fprintf o "[%a, %a, %a]" pp_varmap l pp_z z pp_varmap r
let rec dump_proof_term = function
| Micromega.RatProof cone ->
Term.mkApp(Lazy.force coq_ratProof, [|dump_cone coq_Z dump_z cone|])
| Micromega.CutProof(e,q,cone,prf) ->
Term.mkApp(Lazy.force coq_cutProof,
[| dump_expr (Lazy.force coq_Z) dump_z e ;
dump_q q ;
dump_cone coq_Z dump_z cone ;
dump_proof_term prf|])
| Micromega.EnumProof( q1,e1,q2,c1,c2,prfs) ->
Term.mkApp (Lazy.force coq_enumProof,
[| dump_q q1 ; dump_expr (Lazy.force coq_Z) dump_z e1 ; dump_q q2;
dump_cone coq_Z dump_z c1 ; dump_cone coq_Z dump_z c2 ;
dump_list (Lazy.force coq_proofTerm) dump_proof_term prfs |])
let pp_q o q = Printf.fprintf o "%a/%a" pp_z q.Micromega.qnum pp_positive q.Micromega.qden
let rec pp_proof_term o = function
| Micromega.RatProof cone -> Printf.fprintf o "R[%a]" (pp_cone pp_z) cone
| Micromega.CutProof(e,q,_,p) -> failwith "not implemented"
| Micromega.EnumProof(q1,e1,q2,c1,c2,rst) ->
Printf.fprintf o "EP[%a,%a,%a,%a,%a,%a]"
pp_q q1 pp_expr e1 pp_q q2 (pp_cone pp_z) c1 (pp_cone pp_z) c2
(pp_list "[" "]" pp_proof_term) rst
let rec parse_hyps parse_arith env hyps =
match hyps with
| [] -> ([],env)
| (i,t)::l ->
let (lhyps,env) = parse_hyps parse_arith env l in
try
let (c,env) = parse_formula parse_arith env t in
((i,c)::lhyps, env)
with _ -> (lhyps,env)
exception ParseError
let parse_goal parse_arith env hyps term =
let (f,env) = parse_formula parse_arith env term in
let (lhyps,env) = parse_hyps parse_arith env hyps in
(lhyps,f,env)
with Failure x - > raise ParseError
type ('a, 'b) domain_spec = {
dump_coeff : 'a -> Term.constr ;
proof_typ : Term.constr ;
dump_proof : 'b -> Term.constr
}
let zz_domain_spec = lazy {
typ = Lazy.force coq_Z;
coeff = Lazy.force coq_Z;
dump_coeff = dump_z ;
proof_typ = Lazy.force coq_proofTerm ;
dump_proof = dump_proof_term
}
let qq_domain_spec = lazy {
typ = Lazy.force coq_Q;
coeff = Lazy.force coq_Q;
dump_coeff = dump_q ;
proof_typ = Lazy.force coq_QWitness ;
dump_proof = dump_cone coq_Q dump_q
}
let rz_domain_spec = lazy {
typ = Lazy.force coq_R;
coeff = Lazy.force coq_Z;
dump_coeff = dump_z;
proof_typ = Lazy.force coq_ZWitness ;
dump_proof = dump_cone coq_Z dump_z
}
let micromega_order_change spec cert cert_typ env ff gl =
let formula_typ = (Term.mkApp( Lazy.force coq_Cstr,[| spec.coeff|])) in
let ff = dump_formula formula_typ (dump_cstr spec.coeff spec.dump_coeff) ff in
let vm = dump_varmap ( spec.typ) env in
Tactics.change_in_concl None
(set
[
("__ff", ff, Term.mkApp(Lazy.force coq_Formula ,[| formula_typ |]));
("__varmap", vm , Term.mkApp
(Coqlib.gen_constant_in_modules "VarMap"
[["Coq" ; "micromega" ; "VarMap"];["VarMap"]] "t", [| spec.typ|]));
("__wit", cert,cert_typ)
]
(Tacmach.pf_concl gl )
)
gl
let detect_duplicates cnf wit =
let cnf = CoqToCaml.list (fun x -> x) cnf in
let wit = CoqToCaml.list (fun x -> x) wit in
let rec xdup cnf wit =
match wit with
| [] -> []
| w :: wit ->
let sg = sig_of_cone w in
match cnf with
| [] -> []
| e::cnf ->
let (dups,cnf) = (List.partition (fun x -> same_proof sg e x) cnf) in
dups@(xdup cnf wit) in
xdup cnf wit
let find_witness prover polys1 =
try_any prover polys1
let witness_list_with_tags prover l =
let rec xwitness_list l =
match l with
| [] -> Some([])
| e::l ->
match find_witness prover (List.map fst e) with
| None -> None
| Some w ->
(match xwitness_list l with
| None -> None
| Some l -> Some (w::l)
) in
xwitness_list l
let witness_list_without_tags prover l =
let rec xwitness_list l =
match l with
| [] -> Some([])
| e::l ->
match find_witness prover e with
| None -> None
| Some w ->
(match xwitness_list l with
| None -> None
| Some l -> Some (w::l)
) in
xwitness_list l
let witness_list prover l =
let rec xwitness_list l =
match l with
| Micromega.Nil -> Some(Micromega.Nil)
| Micromega.Cons(e,l) ->
match find_witness prover e with
| None -> None
| Some w ->
(match xwitness_list l with
| None -> None
| Some l -> Some (Micromega.Cons(w,l))
) in
xwitness_list l
let is_singleton = function [] -> true | [e] -> true | _ -> false
let micromega_tauto negate normalise spec prover env polys1 polys2 gl =
let spec = Lazy.force spec in
let (ff,ids) =
List.fold_right
(fun (id,f) (cc,ids) ->
match f with
X _ -> (cc,ids)
| _ -> (I(tag_formula (Names.Name id) f,cc,none), id::ids))
polys1 (polys2,[]) in
let cnf_ff = cnf negate normalise ff in
if debug then
(Pp.pp (Pp.str "Formula....\n") ;
let formula_typ = (Term.mkApp( Lazy.force coq_Cstr,[| spec.coeff|])) in
let ff = dump_formula formula_typ
(dump_cstr spec.typ spec.dump_coeff) ff in
Pp.pp (Printer.prterm ff) ; Pp.pp_flush ()) ;
match witness_list_without_tags prover cnf_ff with
| None -> Tacticals.tclFAIL 0 (Pp.str "Cannot find witness") gl
Printf.printf " \nList % i " ( res ) ;
let (ff,res,ids) = (ff,res,List.map Term.mkVar ids) in
let res' = dump_ml_list (spec.proof_typ) spec.dump_proof res in
(Tacticals.tclTHENSEQ
[
Tactics.generalize ids;
micromega_order_change spec res'
(Term.mkApp(Lazy.force coq_list,[| spec.proof_typ|])) env ff ;
]) gl
let micromega_gen parse_arith negate normalise spec prover gl =
let concl = Tacmach.pf_concl gl in
let hyps = Tacmach.pf_hyps_types gl in
try
let (hyps,concl,env) = parse_goal parse_arith Env.empty hyps concl in
let env = Env.elements env in
micromega_tauto negate normalise spec prover env hyps concl gl
with
| Failure x -> flush stdout ; Pp.pp_flush () ;
Tacticals.tclFAIL 0 (Pp.str x) gl
| ParseError -> Tacticals.tclFAIL 0 (Pp.str "Bad logical fragment") gl
let lift_ratproof prover l =
match prover l with
| None -> None
| Some c -> Some (Mc.RatProof c)
type csdpcert = Sos.positivstellensatz option
type micromega_polys = (Micromega.q Mc.pExpr, Mc.op1) Micromega.prod list
type provername = string * int option
let call_csdpcert provername poly =
let tmp_to,ch_to = Filename.open_temp_file "csdpcert" ".in" in
let tmp_from = Filename.temp_file "csdpcert" ".out" in
output_value ch_to (provername,poly : provername * micromega_polys);
close_out ch_to;
let cmdname =
List.fold_left Filename.concat (Envars.coqlib ())
["contrib"; "micromega"; "csdpcert" ^ Coq_config.exec_extension] in
let c = Sys.command (cmdname ^" "^ tmp_to ^" "^ tmp_from) in
(try Sys.remove tmp_to with _ -> ());
if c <> 0 then Util.error ("Failed to call csdp certificate generator");
let ch_from = open_in tmp_from in
let cert = (input_value ch_from : csdpcert) in
close_in ch_from; Sys.remove tmp_from;
cert
let rec z_to_q_expr e =
match e with
| Mc.PEc z -> Mc.PEc {Mc.qnum = z ; Mc.qden = Mc.XH}
| Mc.PEX x -> Mc.PEX x
| Mc.PEadd(e1,e2) -> Mc.PEadd(z_to_q_expr e1, z_to_q_expr e2)
| Mc.PEsub(e1,e2) -> Mc.PEsub(z_to_q_expr e1, z_to_q_expr e2)
| Mc.PEmul(e1,e2) -> Mc.PEmul(z_to_q_expr e1, z_to_q_expr e2)
| Mc.PEopp(e) -> Mc.PEopp(z_to_q_expr e)
| Mc.PEpow(e,n) -> Mc.PEpow(z_to_q_expr e,n)
let call_csdpcert_q provername poly =
match call_csdpcert provername poly with
| None -> None
| Some cert ->
let cert = Certificate.q_cert_of_pos cert in
match Mc.qWeakChecker (CamlToCoq.list (fun x -> x) poly) cert with
| Mc.True -> Some cert
| Mc.False -> (print_string "buggy certificate" ; flush stdout) ;None
let call_csdpcert_z provername poly =
let l = List.map (fun (Mc.Pair(e,o)) -> (Mc.Pair(z_to_q_expr e,o))) poly in
match call_csdpcert provername l with
| None -> None
| Some cert ->
let cert = Certificate.z_cert_of_pos cert in
match Mc.zWeakChecker (CamlToCoq.list (fun x -> x) poly) cert with
| Mc.True -> Some cert
| Mc.False -> (print_string "buggy certificate" ; flush stdout) ;None
let psatzl_Z gl =
micromega_gen parse_zarith Mc.negate Mc.normalise zz_domain_spec
[lift_ratproof
(Certificate.linear_prover Certificate.z_spec), "fourier refutation" ] gl
let psatzl_Q gl =
micromega_gen parse_qarith Mc.cnf_negate Mc.cnf_normalise qq_domain_spec
[ Certificate.linear_prover Certificate.q_spec, "fourier refutation" ] gl
let psatz_Q i gl =
micromega_gen parse_qarith Mc.cnf_negate Mc.cnf_normalise qq_domain_spec
[ call_csdpcert_q ("real_nonlinear_prover", Some i), "fourier refutation" ] gl
let psatzl_R gl =
micromega_gen parse_rarith Mc.cnf_negate Mc.cnf_normalise rz_domain_spec
[ Certificate.linear_prover Certificate.z_spec, "fourier refutation" ] gl
let psatz_R i gl =
micromega_gen parse_rarith Mc.cnf_negate Mc.cnf_normalise rz_domain_spec
[ call_csdpcert_z ("real_nonlinear_prover", Some i), "fourier refutation" ] gl
let psatz_Z i gl =
micromega_gen parse_zarith Mc.negate Mc.normalise zz_domain_spec
[lift_ratproof (call_csdpcert_z ("real_nonlinear_prover",Some i)),
"fourier refutation" ] gl
let sos_Z gl =
micromega_gen parse_zarith Mc.negate Mc.normalise zz_domain_spec
[lift_ratproof (call_csdpcert_z ("pure_sos", None)), "pure sos refutation"] gl
let sos_Q gl =
micromega_gen parse_qarith Mc.cnf_negate Mc.cnf_normalise qq_domain_spec
[call_csdpcert_q ("pure_sos", None), "pure sos refutation"] gl
let sos_R gl =
micromega_gen parse_rarith Mc.cnf_negate Mc.cnf_normalise rz_domain_spec
[call_csdpcert_z ("pure_sos", None), "pure sos refutation"] gl
let xlia gl =
micromega_gen parse_zarith Mc.negate Mc.normalise zz_domain_spec
[Certificate.zlinear_prover, "zprover"] gl
|
60451302a301ccf745466e9897dc563304a6371cf15f33560d8758a553653482 | privet-kitty/cl-competitive | rolling-hash62.lisp | (defpackage :cp/test/rolling-hash62
(:use :cl :fiveam :cp/rolling-hash62)
(:import-from :cp/test/base #:base-suite))
(in-package :cp/test/rolling-hash62)
(in-suite base-suite)
(test rolling-hash62
(declare (notinline make-rhash rhash-query rhash-concat rhash-get-lcp rhash-vector-hash))
(let ((rhash1 (make-rhash "asddfddfd" :key (lambda (x) (+ 1 (char-code x))))))
(is (= (rhash-query rhash1 2 6) (rhash-query rhash1 5 9)))
(is (= (rhash-query rhash1 2 2) (rhash-query rhash1 5 5)))
(is (/= (rhash-query rhash1 2 6) (rhash-query rhash1 3 7)))
(is (= (rhash-concat rhash1 (rhash-query rhash1 0 2) (rhash-query rhash1 5 8) 3)
(rhash-query rhash1 0 5)))
(is (= (position +rhash-mod1+ *moduli-table*)
(position +rhash-base1+ *base-table*)))
(is (= (position +rhash-mod2+ *moduli-table*)
(position +rhash-base2+ *base-table*)))
(is (rhash-query (make-rhash "") 0 0))
;; hash code of a given sequence
(is (= (rhash-vector-hash "sddf" :key (lambda (x) (+ 1 (char-code x))))
(rhash-query rhash1 1 5)))
(is (/= (rhash-vector-hash "sddf")
(rhash-query rhash1 1 5)))
(is (zerop (rhash-vector-hash "" :key (lambda (x) (+ 1 (char-code x))))))
;; longest common prefix
(is (= 0 (rhash-get-lcp rhash1 0 rhash1 3)))
(is (= 1 (rhash-get-lcp rhash1 2 rhash1 3)))
(is (= 0 (rhash-get-lcp rhash1 2 rhash1 4)))
(is (= 4 (rhash-get-lcp rhash1 2 rhash1 5)))
(is (= 7 (rhash-get-lcp rhash1 2 rhash1 2))))
zero
(let ((rhash (make-rhash #*00000 :key #'identity)))
(loop for l from 0 to 5
do (loop for r from l to 5
do (is (zerop (rhash-query rhash l r)))))
(is (zerop (rhash-vector-hash #*0000 :key #'identity)))))
| null | https://raw.githubusercontent.com/privet-kitty/cl-competitive/4d1c601ff42b10773a5d0c5989b1234da5bb98b6/module/test/rolling-hash62.lisp | lisp | hash code of a given sequence
longest common prefix | (defpackage :cp/test/rolling-hash62
(:use :cl :fiveam :cp/rolling-hash62)
(:import-from :cp/test/base #:base-suite))
(in-package :cp/test/rolling-hash62)
(in-suite base-suite)
(test rolling-hash62
(declare (notinline make-rhash rhash-query rhash-concat rhash-get-lcp rhash-vector-hash))
(let ((rhash1 (make-rhash "asddfddfd" :key (lambda (x) (+ 1 (char-code x))))))
(is (= (rhash-query rhash1 2 6) (rhash-query rhash1 5 9)))
(is (= (rhash-query rhash1 2 2) (rhash-query rhash1 5 5)))
(is (/= (rhash-query rhash1 2 6) (rhash-query rhash1 3 7)))
(is (= (rhash-concat rhash1 (rhash-query rhash1 0 2) (rhash-query rhash1 5 8) 3)
(rhash-query rhash1 0 5)))
(is (= (position +rhash-mod1+ *moduli-table*)
(position +rhash-base1+ *base-table*)))
(is (= (position +rhash-mod2+ *moduli-table*)
(position +rhash-base2+ *base-table*)))
(is (rhash-query (make-rhash "") 0 0))
(is (= (rhash-vector-hash "sddf" :key (lambda (x) (+ 1 (char-code x))))
(rhash-query rhash1 1 5)))
(is (/= (rhash-vector-hash "sddf")
(rhash-query rhash1 1 5)))
(is (zerop (rhash-vector-hash "" :key (lambda (x) (+ 1 (char-code x))))))
(is (= 0 (rhash-get-lcp rhash1 0 rhash1 3)))
(is (= 1 (rhash-get-lcp rhash1 2 rhash1 3)))
(is (= 0 (rhash-get-lcp rhash1 2 rhash1 4)))
(is (= 4 (rhash-get-lcp rhash1 2 rhash1 5)))
(is (= 7 (rhash-get-lcp rhash1 2 rhash1 2))))
zero
(let ((rhash (make-rhash #*00000 :key #'identity)))
(loop for l from 0 to 5
do (loop for r from l to 5
do (is (zerop (rhash-query rhash l r)))))
(is (zerop (rhash-vector-hash #*0000 :key #'identity)))))
|
76cd49b1a3429e174745cdc91cf5ab1e1612d86be7799742c3b68f0e1d731d5e | rickeyski/slack-api | Utils.hs | module Web.Slack.Utils where
import Data.Char
toSnake :: String -> String
toSnake (a:b:c)
| isAlpha a && (isUpper b || isDigit b) = toLower a : '_' : toSnake (toLower b : c)
| otherwise = toLower a : toSnake (b:c)
toSnake [x] = [toLower x]
toSnake [] = []
toCamel :: String -> String
toCamel ('_':x:xs) = toUpper x : toCamel xs
toCamel (x:xs) = x : toCamel xs
toCamel [] = []
| null | https://raw.githubusercontent.com/rickeyski/slack-api/5f6659e09bce19fe0ca9dfce8743bec7de518d77/src/Web/Slack/Utils.hs | haskell | module Web.Slack.Utils where
import Data.Char
toSnake :: String -> String
toSnake (a:b:c)
| isAlpha a && (isUpper b || isDigit b) = toLower a : '_' : toSnake (toLower b : c)
| otherwise = toLower a : toSnake (b:c)
toSnake [x] = [toLower x]
toSnake [] = []
toCamel :: String -> String
toCamel ('_':x:xs) = toUpper x : toCamel xs
toCamel (x:xs) = x : toCamel xs
toCamel [] = []
| |
9c48bb7c1dc4f13799432e041955adb0ca05f5af5eadbb5286419721b3d92dec | rescript-lang/rescript-compiler | ext_pervasives.mli | Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
* Extension to standard library [ ] module , safe to open
*)
external reraise : exn -> 'a = "%reraise"
val finally : 'a -> clean:('a -> unit) -> ('a -> 'b) -> 'b
(* val try_it : (unit -> 'a) -> unit *)
val with_file_as_chan : string -> (out_channel -> 'a) -> 'a
val max_int : int -> int -> int
val min_int : int -> int -> int
val max_int_option : int option -> int option -> int option
(* external id : 'a -> 'a = "%identity" *)
(** Copied from {!Btype.hash_variant}:
need sync up and add test case
*)
(* val hash_variant : string -> int *)
todo : string - > ' a
val nat_of_string_exn : string -> int
val parse_nat_of_string : string -> int ref -> int
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/e60482c6f6a69994907b9bd56e58ce87052e3659/jscomp/ext/ext_pervasives.mli | ocaml | val try_it : (unit -> 'a) -> unit
external id : 'a -> 'a = "%identity"
* Copied from {!Btype.hash_variant}:
need sync up and add test case
val hash_variant : string -> int | Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
* Extension to standard library [ ] module , safe to open
*)
external reraise : exn -> 'a = "%reraise"
val finally : 'a -> clean:('a -> unit) -> ('a -> 'b) -> 'b
val with_file_as_chan : string -> (out_channel -> 'a) -> 'a
val max_int : int -> int -> int
val min_int : int -> int -> int
val max_int_option : int option -> int option -> int option
todo : string - > ' a
val nat_of_string_exn : string -> int
val parse_nat_of_string : string -> int ref -> int
|
d03db57ab1f65c35fc69f2f5929f4139e9e12426c00e4e5aec3823832532b5c5 | sullyj3/adventofcode2022 | Day14.hs | # OPTIONS_GHC -Wno - missing - signatures #
module Day14 (main) where
import AOC
import AOC.Parse
import AOC.Parsers
import PyF ( str )
-- |~) _ _ _. _ _
|~ ( _ || _ |(_|
-- _|
-- >>> parseInput $ exampleInput
parseInput :: Text -> [()]
parseInput = unsafeParse $ linesOf $ pure ()
-- |~) _ __|_ /~\ _ _
-- |~ (_|| | \_/| |(/_
--
> > > part1 . parseInput $ exampleInput
part1 :: a -> a
part1 = id
-- |~) _ __|_ ~|~ _
-- |~ (_|| | |VV(_)
--
> > > part2 . parseInput $ exampleInput
part2 :: a -> a
part2 = id
-- |\/| _ . _
-- | |(_||| |
--
main ∷ IO ()
main = do
-- other testing here
aocSinglePartMain "inputs/14.txt" exampleInput parseInput part1
-- aocMain "inputs/14.txt" Solution { parse=parseInput, part1=part1, part2=part2 }
-- (~ _ _ _ _ | _ . _ _ _|_
( _ > < ( _ || | ||_)|(/ ||_)|_||
-- | |
exampleInput :: Text
exampleInput = toText @String [str||]
| null | https://raw.githubusercontent.com/sullyj3/adventofcode2022/1df7ca92d6651753f1df5e98da1ed449a0858bf0/src/Day14.hs | haskell | |~) _ _ _. _ _
_|
>>> parseInput $ exampleInput
|~) _ __|_ /~\ _ _
|~ (_|| | \_/| |(/_
|~) _ __|_ ~|~ _
|~ (_|| | |VV(_)
|\/| _ . _
| |(_||| |
other testing here
aocMain "inputs/14.txt" Solution { parse=parseInput, part1=part1, part2=part2 }
(~ _ _ _ _ | _ . _ _ _|_
| | | # OPTIONS_GHC -Wno - missing - signatures #
module Day14 (main) where
import AOC
import AOC.Parse
import AOC.Parsers
import PyF ( str )
|~ ( _ || _ |(_|
parseInput :: Text -> [()]
parseInput = unsafeParse $ linesOf $ pure ()
> > > part1 . parseInput $ exampleInput
part1 :: a -> a
part1 = id
> > > part2 . parseInput $ exampleInput
part2 :: a -> a
part2 = id
main ∷ IO ()
main = do
aocSinglePartMain "inputs/14.txt" exampleInput parseInput part1
( _ > < ( _ || | ||_)|(/ ||_)|_||
exampleInput :: Text
exampleInput = toText @String [str||]
|
c842d13864f6019ff07884976a35289dc38eab64b90b78ee13cf75807bdd4040 | kowainik/tomland | Di.hs | module Test.Toml.Codec.Di
( diSpec
) where
import Control.Applicative ((<|>))
import Data.Text (Text)
import Hedgehog (Gen)
import Test.Hspec (Spec, describe)
import Test.Toml.Codec.Combinator.Common (codecRoundtrip)
import Toml.Codec.Di (dimatch)
import Toml.Codec.Types (TomlCodec)
import Toml.Type.Key (Key)
import qualified Hedgehog.Gen as Gen
import qualified Test.Toml.Gen as Gen
import qualified Toml.Codec as Toml
diSpec :: Spec
diSpec = describe "Codec.Di functions tests" $
describe "dimatch" $
codecRoundtrip "SumType" sumTypeExampleCodec genSumTypeExample
data SumType
= One Bool
| Two Int Text
| Three [Int]
deriving stock (Eq, Show)
matchOne :: SumType -> Maybe Bool
matchOne = \case
One b -> Just b
_ -> Nothing
matchTwo :: SumType -> Maybe (Int, Text)
matchTwo = \case
Two i t -> Just (i, t)
_ -> Nothing
matchThree :: SumType -> Maybe [Int]
matchThree = \case
Three l -> Just l
_ -> Nothing
sumTypeExampleCodec :: Key -> TomlCodec SumType
sumTypeExampleCodec _ =
dimatch matchOne One (Toml.bool "one")
<|> dimatch matchTwo (uncurry Two) (Toml.pair (Toml.int "two.a") (Toml.text "two.b"))
<|> dimatch matchThree Three (Toml.arrayOf Toml._Int "three")
genSumTypeExample :: Gen SumType
genSumTypeExample = Gen.choice
[ One <$> Gen.genBool
, Two <$> Gen.genInt <*> Gen.genText
, Three <$> Gen.genSmallList Gen.genInt
]
| null | https://raw.githubusercontent.com/kowainik/tomland/2b4bcc465b79873a61bccfc7131d423a9a0aec1d/test/Test/Toml/Codec/Di.hs | haskell | module Test.Toml.Codec.Di
( diSpec
) where
import Control.Applicative ((<|>))
import Data.Text (Text)
import Hedgehog (Gen)
import Test.Hspec (Spec, describe)
import Test.Toml.Codec.Combinator.Common (codecRoundtrip)
import Toml.Codec.Di (dimatch)
import Toml.Codec.Types (TomlCodec)
import Toml.Type.Key (Key)
import qualified Hedgehog.Gen as Gen
import qualified Test.Toml.Gen as Gen
import qualified Toml.Codec as Toml
diSpec :: Spec
diSpec = describe "Codec.Di functions tests" $
describe "dimatch" $
codecRoundtrip "SumType" sumTypeExampleCodec genSumTypeExample
data SumType
= One Bool
| Two Int Text
| Three [Int]
deriving stock (Eq, Show)
matchOne :: SumType -> Maybe Bool
matchOne = \case
One b -> Just b
_ -> Nothing
matchTwo :: SumType -> Maybe (Int, Text)
matchTwo = \case
Two i t -> Just (i, t)
_ -> Nothing
matchThree :: SumType -> Maybe [Int]
matchThree = \case
Three l -> Just l
_ -> Nothing
sumTypeExampleCodec :: Key -> TomlCodec SumType
sumTypeExampleCodec _ =
dimatch matchOne One (Toml.bool "one")
<|> dimatch matchTwo (uncurry Two) (Toml.pair (Toml.int "two.a") (Toml.text "two.b"))
<|> dimatch matchThree Three (Toml.arrayOf Toml._Int "three")
genSumTypeExample :: Gen SumType
genSumTypeExample = Gen.choice
[ One <$> Gen.genBool
, Two <$> Gen.genInt <*> Gen.genText
, Three <$> Gen.genSmallList Gen.genInt
]
| |
f5e999c43ed64b33b228248ba097ac7d137746a2d8a31d692e52b3c7709b98b8 | mfoemmel/erlang-otp | wxPaletteChangedEvent.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%% This file is generated DO NOT EDIT
%% @doc See external documentation: <a href="">wxPaletteChangedEvent</a>.
%% <dl><dt>Use {@link wxEvtHandler:connect/3.} with EventType:</dt>
%% <dd><em>palette_changed</em></dd></dl>
%% See also the message variant {@link wxEvtHandler:wxPaletteChanged(). #wxPaletteChanged{}} event record type.
%%
%% <p>This class is derived (and can use functions) from:
%% <br />{@link wxEvent}
%% </p>
%% @type wxPaletteChangedEvent(). An object reference, The representation is internal
%% and can be changed without notice. It can't be used for comparsion
%% stored on disc or distributed for use on other nodes.
-module(wxPaletteChangedEvent).
-include("wxe.hrl").
-export([getChangedWindow/1,setChangedWindow/2]).
%% inherited exports
-export([getId/1,getSkipped/1,getTimestamp/1,isCommandEvent/1,parent_class/1,
resumePropagation/2,shouldPropagate/1,skip/1,skip/2,stopPropagation/1]).
%% @hidden
parent_class(wxEvent) -> true;
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
%% @spec (This::wxPaletteChangedEvent(), Win::wxWindow:wxWindow()) -> ok
%% @doc See <a href="#wxpalettechangedeventsetchangedwindow">external documentation</a>.
setChangedWindow(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=WinT,ref=WinRef}) ->
?CLASS(ThisT,wxPaletteChangedEvent),
?CLASS(WinT,wxWindow),
wxe_util:cast(?wxPaletteChangedEvent_SetChangedWindow,
<<ThisRef:32/?UI,WinRef:32/?UI>>).
%% @spec (This::wxPaletteChangedEvent()) -> wxWindow:wxWindow()
%% @doc See <a href="#wxpalettechangedeventgetchangedwindow">external documentation</a>.
getChangedWindow(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxPaletteChangedEvent),
wxe_util:call(?wxPaletteChangedEvent_GetChangedWindow,
<<ThisRef:32/?UI>>).
%% From wxEvent
%% @hidden
stopPropagation(This) -> wxEvent:stopPropagation(This).
%% @hidden
skip(This, Options) -> wxEvent:skip(This, Options).
%% @hidden
skip(This) -> wxEvent:skip(This).
%% @hidden
shouldPropagate(This) -> wxEvent:shouldPropagate(This).
%% @hidden
resumePropagation(This,PropagationLevel) -> wxEvent:resumePropagation(This,PropagationLevel).
%% @hidden
isCommandEvent(This) -> wxEvent:isCommandEvent(This).
%% @hidden
getTimestamp(This) -> wxEvent:getTimestamp(This).
%% @hidden
getSkipped(This) -> wxEvent:getSkipped(This).
%% @hidden
getId(This) -> wxEvent:getId(This).
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/wx/src/gen/wxPaletteChangedEvent.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
This file is generated DO NOT EDIT
@doc See external documentation: <a href="">wxPaletteChangedEvent</a>.
<dl><dt>Use {@link wxEvtHandler:connect/3.} with EventType:</dt>
<dd><em>palette_changed</em></dd></dl>
See also the message variant {@link wxEvtHandler:wxPaletteChanged(). #wxPaletteChanged{}} event record type.
<p>This class is derived (and can use functions) from:
<br />{@link wxEvent}
</p>
@type wxPaletteChangedEvent(). An object reference, The representation is internal
and can be changed without notice. It can't be used for comparsion
stored on disc or distributed for use on other nodes.
inherited exports
@hidden
@spec (This::wxPaletteChangedEvent(), Win::wxWindow:wxWindow()) -> ok
@doc See <a href="#wxpalettechangedeventsetchangedwindow">external documentation</a>.
@spec (This::wxPaletteChangedEvent()) -> wxWindow:wxWindow()
@doc See <a href="#wxpalettechangedeventgetchangedwindow">external documentation</a>.
From wxEvent
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden | Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(wxPaletteChangedEvent).
-include("wxe.hrl").
-export([getChangedWindow/1,setChangedWindow/2]).
-export([getId/1,getSkipped/1,getTimestamp/1,isCommandEvent/1,parent_class/1,
resumePropagation/2,shouldPropagate/1,skip/1,skip/2,stopPropagation/1]).
parent_class(wxEvent) -> true;
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
setChangedWindow(#wx_ref{type=ThisT,ref=ThisRef},#wx_ref{type=WinT,ref=WinRef}) ->
?CLASS(ThisT,wxPaletteChangedEvent),
?CLASS(WinT,wxWindow),
wxe_util:cast(?wxPaletteChangedEvent_SetChangedWindow,
<<ThisRef:32/?UI,WinRef:32/?UI>>).
getChangedWindow(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxPaletteChangedEvent),
wxe_util:call(?wxPaletteChangedEvent_GetChangedWindow,
<<ThisRef:32/?UI>>).
stopPropagation(This) -> wxEvent:stopPropagation(This).
skip(This, Options) -> wxEvent:skip(This, Options).
skip(This) -> wxEvent:skip(This).
shouldPropagate(This) -> wxEvent:shouldPropagate(This).
resumePropagation(This,PropagationLevel) -> wxEvent:resumePropagation(This,PropagationLevel).
isCommandEvent(This) -> wxEvent:isCommandEvent(This).
getTimestamp(This) -> wxEvent:getTimestamp(This).
getSkipped(This) -> wxEvent:getSkipped(This).
getId(This) -> wxEvent:getId(This).
|
47725758a71b83b9420ed84c96aaf57b2e67369be4bb488df3ce035d4802c83b | cyga/real-world-haskell | MonadHandleIO.hs | file : ch15 / MonadHandleIO.hs
# LANGUAGE FunctionalDependencies , MultiParamTypeClasses #
import MonadHandle
import qualified System.IO
import System.IO (IOMode(..))
import Control.Monad.Trans (MonadIO(..), MonadTrans(..))
import System.Directory (removeFile)
import SafeHello
instance MonadHandle System.IO.Handle IO where
openFile = System.IO.openFile
hPutStr = System.IO.hPutStr
hClose = System.IO.hClose
hGetContents = System.IO.hGetContents
hPutStrLn = System.IO.hPutStrLn
file : ch15 / MonadHandleIO.hs
class (MonadHandle h m, MonadIO m) => MonadHandleIO h m | m -> h
instance MonadHandleIO System.IO.Handle IO
tidierHello :: (MonadHandleIO h m) => FilePath -> m ()
tidierHello path = do
safeHello path
liftIO (removeFile path)
file : ch15 / MonadHandleIO.hs
tidyHello :: (MonadIO m, MonadHandle h m) => FilePath -> m ()
tidyHello path = do
safeHello path
liftIO (removeFile path)
| null | https://raw.githubusercontent.com/cyga/real-world-haskell/4ed581af5b96c6ef03f20d763b8de26be69d43d9/ch15/MonadHandleIO.hs | haskell | file : ch15 / MonadHandleIO.hs
# LANGUAGE FunctionalDependencies , MultiParamTypeClasses #
import MonadHandle
import qualified System.IO
import System.IO (IOMode(..))
import Control.Monad.Trans (MonadIO(..), MonadTrans(..))
import System.Directory (removeFile)
import SafeHello
instance MonadHandle System.IO.Handle IO where
openFile = System.IO.openFile
hPutStr = System.IO.hPutStr
hClose = System.IO.hClose
hGetContents = System.IO.hGetContents
hPutStrLn = System.IO.hPutStrLn
file : ch15 / MonadHandleIO.hs
class (MonadHandle h m, MonadIO m) => MonadHandleIO h m | m -> h
instance MonadHandleIO System.IO.Handle IO
tidierHello :: (MonadHandleIO h m) => FilePath -> m ()
tidierHello path = do
safeHello path
liftIO (removeFile path)
file : ch15 / MonadHandleIO.hs
tidyHello :: (MonadIO m, MonadHandle h m) => FilePath -> m ()
tidyHello path = do
safeHello path
liftIO (removeFile path)
| |
790287c506dfc42afdec679f7e6e9bd149401c8b97fdd9178bdb3fa8bbd2b7ab | grin-compiler/ghc-wpc-sample-programs | Regressions.hs | module Main where
import Control.Applicative ((<$>))
import Control.Monad (replicateM)
import qualified Data.HashMap.Strict as HM
import Data.List (delete)
import Data.Maybe
import Test.HUnit (Assertion, assert)
import Test.Framework (Test, defaultMain)
import Test.Framework.Providers.HUnit (testCase)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
issue32 :: Assertion
issue32 = assert $ isJust $ HM.lookup 7 m'
where
ns = [0..16] :: [Int]
m = HM.fromList (zip ns (repeat []))
m' = HM.delete 10 m
------------------------------------------------------------------------
Issue # 39
First regression
issue39 :: Assertion
issue39 = assert $ hm1 == hm2
where
hm1 = HM.fromList ([a, b] `zip` [1, 1 :: Int ..])
hm2 = HM.fromList ([b, a] `zip` [1, 1 :: Int ..])
a = (1, -1) :: (Int, Int)
b = (-1, 1) :: (Int, Int)
Second regression
newtype Keys = Keys [Int]
deriving Show
instance Arbitrary Keys where
arbitrary = sized $ \l -> do
pis <- replicateM (l+1) positiveInt
return (Keys $ prefixSum pis)
shrink (Keys ls) =
let l = length ls
in if l == 1
then []
else [ Keys (dropAt i ls) | i <- [0..l-1] ]
positiveInt :: Gen Int
positiveInt = (+1) . abs <$> arbitrary
prefixSum :: [Int] -> [Int]
prefixSum = loop 0
where
loop _ [] = []
loop prefix (l:ls) = let n = l + prefix
in n : loop n ls
dropAt :: Int -> [a] -> [a]
dropAt _ [] = []
dropAt i (l:ls) | i == 0 = ls
| otherwise = l : dropAt (i-1) ls
propEqAfterDelete :: Keys -> Bool
propEqAfterDelete (Keys keys) =
let keyMap = mapFromKeys keys
k = head keys
in HM.delete k keyMap == mapFromKeys (delete k keys)
mapFromKeys :: [Int] -> HM.HashMap Int ()
mapFromKeys keys = HM.fromList (zip keys (repeat ()))
------------------------------------------------------------------------
-- * Test list
tests :: [Test]
tests =
[
testCase "issue32" issue32
, testCase "issue39a" issue39
, testProperty "issue39b" propEqAfterDelete
]
------------------------------------------------------------------------
-- * Test harness
main :: IO ()
main = defaultMain tests
| null | https://raw.githubusercontent.com/grin-compiler/ghc-wpc-sample-programs/0e3a9b8b7cc3fa0da7c77fb7588dd4830fb087f7/unordered-containers-0.2.10.0/tests/Regressions.hs | haskell | ----------------------------------------------------------------------
----------------------------------------------------------------------
* Test list
----------------------------------------------------------------------
* Test harness | module Main where
import Control.Applicative ((<$>))
import Control.Monad (replicateM)
import qualified Data.HashMap.Strict as HM
import Data.List (delete)
import Data.Maybe
import Test.HUnit (Assertion, assert)
import Test.Framework (Test, defaultMain)
import Test.Framework.Providers.HUnit (testCase)
import Test.Framework.Providers.QuickCheck2 (testProperty)
import Test.QuickCheck
issue32 :: Assertion
issue32 = assert $ isJust $ HM.lookup 7 m'
where
ns = [0..16] :: [Int]
m = HM.fromList (zip ns (repeat []))
m' = HM.delete 10 m
Issue # 39
First regression
issue39 :: Assertion
issue39 = assert $ hm1 == hm2
where
hm1 = HM.fromList ([a, b] `zip` [1, 1 :: Int ..])
hm2 = HM.fromList ([b, a] `zip` [1, 1 :: Int ..])
a = (1, -1) :: (Int, Int)
b = (-1, 1) :: (Int, Int)
Second regression
newtype Keys = Keys [Int]
deriving Show
instance Arbitrary Keys where
arbitrary = sized $ \l -> do
pis <- replicateM (l+1) positiveInt
return (Keys $ prefixSum pis)
shrink (Keys ls) =
let l = length ls
in if l == 1
then []
else [ Keys (dropAt i ls) | i <- [0..l-1] ]
positiveInt :: Gen Int
positiveInt = (+1) . abs <$> arbitrary
prefixSum :: [Int] -> [Int]
prefixSum = loop 0
where
loop _ [] = []
loop prefix (l:ls) = let n = l + prefix
in n : loop n ls
dropAt :: Int -> [a] -> [a]
dropAt _ [] = []
dropAt i (l:ls) | i == 0 = ls
| otherwise = l : dropAt (i-1) ls
propEqAfterDelete :: Keys -> Bool
propEqAfterDelete (Keys keys) =
let keyMap = mapFromKeys keys
k = head keys
in HM.delete k keyMap == mapFromKeys (delete k keys)
mapFromKeys :: [Int] -> HM.HashMap Int ()
mapFromKeys keys = HM.fromList (zip keys (repeat ()))
tests :: [Test]
tests =
[
testCase "issue32" issue32
, testCase "issue39a" issue39
, testProperty "issue39b" propEqAfterDelete
]
main :: IO ()
main = defaultMain tests
|
d4b288b6ed7074b8aa7ab49576b74ca065cde2bd77fe5f4c524d00dba133a2e5 | NicklasBoto/funQ | Gates.hs |
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE BlockArguments #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE DataKinds #-}
# OPTIONS_GHC -fplugin GHC.TypeLits . KnownNat . Solver #
# OPTIONS_HADDOCK not - home #
{-|
Module : Gates
Description : Gate library
Stability : experimental
Module containing unitary gates and their matrix representations.
-}
module Gates where
import QData
import Numeric.LinearAlgebra.Static as V hiding ( outer )
import Numeric.LinearAlgebra ( flatten, outer, kronecker, ident, toList )
import qualified Numeric.LinearAlgebra as LA ( (><) )
import GHC.TypeLits ( Nat, type (+), type (^), KnownNat, natVal )
import Data.Proxy ( Proxy(..) )
import Prelude hiding ( id )
|
--
\ [ \text{X } = \begin{bmatrix }
-- 0 & 1 \\
-- 1 & 0
-- \end{bmatrix} \]
--
-- 
pauliX :: Gate 1
pauliX = fromMatrix $ fromList
[ 0 , 1
, 1 , 0 ]
-- | Pauli-Y gate
--
\ [ \text{Y } = \begin{bmatrix }
-- 0 & -i \\
-- i & 0
-- \end{bmatrix} \]
--
! [ pauliY](images / y. PNG )
pauliY :: Gate 1
pauliY = fromMatrix $ fromList
[ 0 , -i
, i , 0 ]
-- | Pauli-Z gate
--
\ [ \text{Z } = \begin{bmatrix }
-- 1 & 0 \\
-- 0 & -1
-- \end{bmatrix} \]
--
-- 
pauliZ :: Gate 1
pauliZ = fromMatrix $ fromList
[ 1 , 0
, 0 , -1 ]
-- | Hadamard gate
--
\ [ \text{X } = \frac1{\sqrt2 } \begin{bmatrix }
-- 0 & 1 \\
-- 1 & 0
-- \end{bmatrix} \]
--
-- 
hadamard :: Gate 1
hadamard = fromMatrix $ sqrt 0.5 * fromList
[ 1 , 1
, 1 , -1 ]
-- | Phase gate
--
\ [ \text{S } = \begin{bmatrix }
-- 1 & 0 \\
-- 0 & i
-- \end{bmatrix} \]
--
-- 
phase :: Gate 1
phase = fromMatrix $ fromList
[ 1 , 0
, 0 , i ]
-- | Pi/8 gate (T gate)
--
\ [ \text{T } = \begin{bmatrix }
-- 1 & 0 \\
-- 0 & e^{i\pi/4}
-- \end{bmatrix} \]
--
-- 
phasePi8 :: Gate 1
phasePi8 = fromMatrix $ fromList
[ 1 , 0
, 0 , p ]
where p = exp (i * pi / 8)
-- | CNOT gate
--
\ [ \text{CNOT } = \begin{bmatrix }
1 & 0 & 0 & 0 \\
0 & 1 & 0 & 0 \\
0 & 0 & 0 & 1 \\
0 & 0 & 1 & 0
-- \end{bmatrix}
-- \]
--
-- 
cnot :: Gate 2
cnot = fromMatrix $ fromList
[ 1, 0, 0, 0
, 0, 1, 0, 0
, 0, 0, 0, 1
, 0, 0, 1, 0 ]
-- | SWAP gate
--
\ [ \text{SWAP } = \begin{bmatrix }
1 & 0 & 0 & 0 \\
0 & 0 & 1 & 0 \\
0 & 1 & 0 & 0 \\
0 & 0 & 0 & 1
-- \end{bmatrix}
-- \]
--
! [ swap](images / swap . PNG )
swap :: Gate 2
swap = fromMatrix $ fromList
[ 1, 0, 0, 0
, 0, 0, 1, 0
, 0, 1, 0, 0
, 0, 0, 0, 1 ]
-- | Toffoli gate
--
\ [ \begin{bmatrix }
1 & 0 & 0 & 0 & 0 & 0 & 0 & 0 \\
0 & 1 & 0 & 0 & 0 & 0 & 0 & 0 \\
0 & 0 & 1 & 0 & 0 & 0 & 0 & 0 \\
0 & 0 & 0 & 1 & 0 & 0 & 0 & 0 \\
0 & 0 & 0 & 0 & 1 & 0 & 0 & 0 \\
0 & 0 & 0 & 0 & 0 & 1 & 0 & 0 \\
0 & 0 & 0 & 0 & 0 & 0 & 0 & 1 \\
0 & 0 & 0 & 0 & 0 & 0 & 1 & 0
-- \end{bmatrix} \]
--
-- 
toffoli :: Gate 3
toffoli = fromMatrix $ fromList
[ 1, 0, 0, 0, 0, 0, 0, 0
, 0, 1, 0, 0, 0, 0, 0, 0
, 0, 0, 1, 0, 0, 0, 0, 0
, 0, 0, 0, 1, 0, 0, 0, 0
, 0, 0, 0, 0, 1, 0, 0, 0
, 0, 0, 0, 0, 0, 1, 0, 0
, 0, 0, 0, 0, 0, 0, 0, 1
, 0, 0, 0, 0, 0, 0, 1, 0 ]
-- | The identity gate
identity :: forall (n :: Nat) . KnownNat n => Gate n
identity = fromMatrix let dim = natVal (Proxy :: Proxy n)
in case create $ ident $ fromInteger $ 2^dim of
Just i -> i
Nothing -> errorWithoutStackTrace
"Could not deduce matrix dimensions"
-- | Control a gate with a classical bit
controlbit :: KnownNat n => Gate n -> Bit 1 -> Gate n
controlbit g 1 = g
controlbit g 0 = identity
beamsplitter :: Gate 1
beamsplitter = fromMatrix $ sqrt 0.5 * fromList
[ 1 , i
, i , 1 ] | null | https://raw.githubusercontent.com/NicklasBoto/funQ/9444da05273be215a66044b976d031229d8832fc/legacy/Gates.hs | haskell | # LANGUAGE ScopedTypeVariables #
# LANGUAGE BlockArguments #
# LANGUAGE TypeFamilies #
# LANGUAGE Rank2Types #
# LANGUAGE DataKinds #
|
Module : Gates
Description : Gate library
Stability : experimental
Module containing unitary gates and their matrix representations.
0 & 1 \\
1 & 0
\end{bmatrix} \]

| Pauli-Y gate
0 & -i \\
i & 0
\end{bmatrix} \]
| Pauli-Z gate
1 & 0 \\
0 & -1
\end{bmatrix} \]

| Hadamard gate
0 & 1 \\
1 & 0
\end{bmatrix} \]

| Phase gate
1 & 0 \\
0 & i
\end{bmatrix} \]

| Pi/8 gate (T gate)
1 & 0 \\
0 & e^{i\pi/4}
\end{bmatrix} \]

| CNOT gate
\end{bmatrix}
\]

| SWAP gate
\end{bmatrix}
\]
| Toffoli gate
\end{bmatrix} \]

| The identity gate
| Control a gate with a classical bit |
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fplugin GHC.TypeLits . KnownNat . Solver #
# OPTIONS_HADDOCK not - home #
module Gates where
import QData
import Numeric.LinearAlgebra.Static as V hiding ( outer )
import Numeric.LinearAlgebra ( flatten, outer, kronecker, ident, toList )
import qualified Numeric.LinearAlgebra as LA ( (><) )
import GHC.TypeLits ( Nat, type (+), type (^), KnownNat, natVal )
import Data.Proxy ( Proxy(..) )
import Prelude hiding ( id )
|
\ [ \text{X } = \begin{bmatrix }
pauliX :: Gate 1
pauliX = fromMatrix $ fromList
[ 0 , 1
, 1 , 0 ]
\ [ \text{Y } = \begin{bmatrix }
! [ pauliY](images / y. PNG )
pauliY :: Gate 1
pauliY = fromMatrix $ fromList
[ 0 , -i
, i , 0 ]
\ [ \text{Z } = \begin{bmatrix }
pauliZ :: Gate 1
pauliZ = fromMatrix $ fromList
[ 1 , 0
, 0 , -1 ]
\ [ \text{X } = \frac1{\sqrt2 } \begin{bmatrix }
hadamard :: Gate 1
hadamard = fromMatrix $ sqrt 0.5 * fromList
[ 1 , 1
, 1 , -1 ]
\ [ \text{S } = \begin{bmatrix }
phase :: Gate 1
phase = fromMatrix $ fromList
[ 1 , 0
, 0 , i ]
\ [ \text{T } = \begin{bmatrix }
phasePi8 :: Gate 1
phasePi8 = fromMatrix $ fromList
[ 1 , 0
, 0 , p ]
where p = exp (i * pi / 8)
\ [ \text{CNOT } = \begin{bmatrix }
1 & 0 & 0 & 0 \\
0 & 1 & 0 & 0 \\
0 & 0 & 0 & 1 \\
0 & 0 & 1 & 0
cnot :: Gate 2
cnot = fromMatrix $ fromList
[ 1, 0, 0, 0
, 0, 1, 0, 0
, 0, 0, 0, 1
, 0, 0, 1, 0 ]
\ [ \text{SWAP } = \begin{bmatrix }
1 & 0 & 0 & 0 \\
0 & 0 & 1 & 0 \\
0 & 1 & 0 & 0 \\
0 & 0 & 0 & 1
! [ swap](images / swap . PNG )
swap :: Gate 2
swap = fromMatrix $ fromList
[ 1, 0, 0, 0
, 0, 0, 1, 0
, 0, 1, 0, 0
, 0, 0, 0, 1 ]
\ [ \begin{bmatrix }
1 & 0 & 0 & 0 & 0 & 0 & 0 & 0 \\
0 & 1 & 0 & 0 & 0 & 0 & 0 & 0 \\
0 & 0 & 1 & 0 & 0 & 0 & 0 & 0 \\
0 & 0 & 0 & 1 & 0 & 0 & 0 & 0 \\
0 & 0 & 0 & 0 & 1 & 0 & 0 & 0 \\
0 & 0 & 0 & 0 & 0 & 1 & 0 & 0 \\
0 & 0 & 0 & 0 & 0 & 0 & 0 & 1 \\
0 & 0 & 0 & 0 & 0 & 0 & 1 & 0
toffoli :: Gate 3
toffoli = fromMatrix $ fromList
[ 1, 0, 0, 0, 0, 0, 0, 0
, 0, 1, 0, 0, 0, 0, 0, 0
, 0, 0, 1, 0, 0, 0, 0, 0
, 0, 0, 0, 1, 0, 0, 0, 0
, 0, 0, 0, 0, 1, 0, 0, 0
, 0, 0, 0, 0, 0, 1, 0, 0
, 0, 0, 0, 0, 0, 0, 0, 1
, 0, 0, 0, 0, 0, 0, 1, 0 ]
identity :: forall (n :: Nat) . KnownNat n => Gate n
identity = fromMatrix let dim = natVal (Proxy :: Proxy n)
in case create $ ident $ fromInteger $ 2^dim of
Just i -> i
Nothing -> errorWithoutStackTrace
"Could not deduce matrix dimensions"
controlbit :: KnownNat n => Gate n -> Bit 1 -> Gate n
controlbit g 1 = g
controlbit g 0 = identity
beamsplitter :: Gate 1
beamsplitter = fromMatrix $ sqrt 0.5 * fromList
[ 1 , i
, i , 1 ] |
cf8faf36d717616e0003ef81b9f6ccd219db455c20051c9184cf6a059ea197aa | fredlund/McErlang | stop_after_relevant_order.erl | @author
2006 - 2009
%% @doc
@private
-module(stop_after_relevant_order).
-language(erlang).
-export([init/1,stateChange/3,monitorType/0]).
-include("state.hrl").
-include("process.hrl").
-include("node.hrl").
-include("stackEntry.hrl").
-behaviour(mce_behav_monitor).
monitorType() ->
safety.
init(State) ->
{ok,orddict:new()}.
stateChange(_,MonState,Stack) ->
try
lists:foldl
(fun (Action,CollectedState) ->
case interpret_action(Action) of
{f_button,Floor} ->
orddict:append(Floor,any,MonState);
{e_button,Elevator,Floor} ->
orddict:append(Floor,Elevator,MonState);
{stopped_at,Elevator,Floor} ->
has_permission_to_stop(Elevator,Floor,MonState);
_ ->
CollectedState
end
end, MonState, actions(Stack)) of NewMonState -> {ok,NewMonState}
catch Error -> {failed_monitor,Error} end.
has_permission_to_stop(Elevator,Floor,MonState) ->
case orddict:find(Floor,MonState) of
error -> throw({no_stop_order,Elevator,Floor,MonState});
{ok,Orders} ->
case lists:member(Elevator,Orders) of
true -> orddict:store(Floor,lists:delete(Elevator,Orders),MonState);
false ->
case lists:member(any,Orders) of
true -> orddict:store(Floor,lists:delete(any,Orders),MonState);
false -> throw({no_stop_order,Elevator,Floor,MonState})
end
end
end.
interpret_action(Action) ->
try
send = mce_erl_actions:type(Action),
{notify,Msg} = mce_erl_actions:get_send_msg(Action),
Msg
catch _:_ -> unknown end.
actions(Stack) ->
{Entry,_} = mce_behav_stackOps:pop(Stack),
Entry#stackEntry.actions.
| null | https://raw.githubusercontent.com/fredlund/McErlang/25b38a38a729fdb3c3d2afb9be016bbb14237792/examples/Elevator/src/stop_after_relevant_order.erl | erlang | @doc | @author
2006 - 2009
@private
-module(stop_after_relevant_order).
-language(erlang).
-export([init/1,stateChange/3,monitorType/0]).
-include("state.hrl").
-include("process.hrl").
-include("node.hrl").
-include("stackEntry.hrl").
-behaviour(mce_behav_monitor).
monitorType() ->
safety.
init(State) ->
{ok,orddict:new()}.
stateChange(_,MonState,Stack) ->
try
lists:foldl
(fun (Action,CollectedState) ->
case interpret_action(Action) of
{f_button,Floor} ->
orddict:append(Floor,any,MonState);
{e_button,Elevator,Floor} ->
orddict:append(Floor,Elevator,MonState);
{stopped_at,Elevator,Floor} ->
has_permission_to_stop(Elevator,Floor,MonState);
_ ->
CollectedState
end
end, MonState, actions(Stack)) of NewMonState -> {ok,NewMonState}
catch Error -> {failed_monitor,Error} end.
has_permission_to_stop(Elevator,Floor,MonState) ->
case orddict:find(Floor,MonState) of
error -> throw({no_stop_order,Elevator,Floor,MonState});
{ok,Orders} ->
case lists:member(Elevator,Orders) of
true -> orddict:store(Floor,lists:delete(Elevator,Orders),MonState);
false ->
case lists:member(any,Orders) of
true -> orddict:store(Floor,lists:delete(any,Orders),MonState);
false -> throw({no_stop_order,Elevator,Floor,MonState})
end
end
end.
interpret_action(Action) ->
try
send = mce_erl_actions:type(Action),
{notify,Msg} = mce_erl_actions:get_send_msg(Action),
Msg
catch _:_ -> unknown end.
actions(Stack) ->
{Entry,_} = mce_behav_stackOps:pop(Stack),
Entry#stackEntry.actions.
|
b9a99a56730e1fa18f730d9f998dd4f92a9d65ee3744850033072dcb254b9d35 | florence/cover | provide.rkt | #lang racket/base
(provide test
(struct-out tt))
(define test 5)
(struct tt (a b c) #:transparent)
| null | https://raw.githubusercontent.com/florence/cover/bc17e4e22d47b1da91ddaa5eafefe28f4675e85c/cover-test/cover/tests/provide.rkt | racket | #lang racket/base
(provide test
(struct-out tt))
(define test 5)
(struct tt (a b c) #:transparent)
| |
7b5b3fdb0988efa17fda6f3108b45a72b3060ecc6dd437b548c3a9bf5e655264 | anurudhp/CPHaskell | 1070.hs | -- /
import Control.Arrow ((>>>))
import Data.Maybe (fromMaybe)
main :: IO ()
main =
interact $
words >>>
head >>>
read >>> solve >>> fmap (unwords . map show) >>> fromMaybe "NO SOLUTION"
solve :: Int -> Maybe [Int]
solve n
| n == 1 = Just [1]
| n <= 3 = Nothing
| otherwise =
Just $ filter odd [5 .. n] ++ [3, 1, 4, 2] ++ filter even [5 .. n]
| null | https://raw.githubusercontent.com/anurudhp/CPHaskell/01ae8dde6aab4f6ddfebd122ded0b42779dd16f1/contests/cses/1070.hs | haskell | / | import Control.Arrow ((>>>))
import Data.Maybe (fromMaybe)
main :: IO ()
main =
interact $
words >>>
head >>>
read >>> solve >>> fmap (unwords . map show) >>> fromMaybe "NO SOLUTION"
solve :: Int -> Maybe [Int]
solve n
| n == 1 = Just [1]
| n <= 3 = Nothing
| otherwise =
Just $ filter odd [5 .. n] ++ [3, 1, 4, 2] ++ filter even [5 .. n]
|
05d7f24aee5ffc8e5612fefbc80e271fb52ee91de5b8b3a0a8526cae7caad7b5 | Calsign/p5ml | math.ml |
module Math = struct
let pi = 2. *. Stdlib.asin 1.
let half_pi = pi /. 2.
let two_pi = pi *. 2.
let e = Stdlib.exp 1.
let abs = Stdlib.abs
let absf = Stdlib.abs_float
let ceil v = Stdlib.ceil v |> int_of_float
let ceilf = Stdlib.ceil
let floor v = Stdlib.floor v |> int_of_float
let floorf = Stdlib.floor
let constrain (v : int) lower upper =
Stdlib.max v lower |> Stdlib.min upper
let constrainf (v : float) lower upper =
Stdlib.max v lower |> Stdlib.min upper
let distf x1 y1 x2 y2 = sqrt ((x2 -. x1) ** 2. +. (y2 -. y1) ** 2.)
let dist x1 y1 x2 y2 = distf (float_of_int x1) (float_of_int y1)
(float_of_int x2) (float_of_int y2)
let mag x y = dist x y 0 0
let magf x y = distf x y 0. 0.
let lerpf lower upper amt = (upper -. lower) *. amt
let lerp lower upper amt =
lerpf (float_of_int lower) (float_of_int upper) amt |> int_of_float
let log base v = (log v) /. (log base)
let mapf v from_lower from_upper to_lower to_upper =
lerpf to_lower to_upper (v /. (from_lower -. from_upper))
let map v from_lower from_upper to_lower to_upper =
mapf (float_of_int v) (float_of_int from_lower) (float_of_int from_upper)
(float_of_int to_lower) (float_of_int to_upper) |> int_of_float
let max (a : int) (b : int) = Stdlib.max a b
let maxf (a : float) (b : float) = Stdlib.max a b
let min (a : int) (b : int) = Stdlib.min a b
let minf (a : float) (b : float) = Stdlib.min a b
let normf v lower upper = mapf v lower upper 0. 1.
let norm v lower upper = normf (float_of_int v)
(float_of_int lower) (float_of_int upper)
let round v = Stdlib.floor (v +. (if v >= 0. then 0.5 else -0.5))
|> int_of_float
let sqrt = Stdlib.sqrt
let acos = Stdlib.acos
let asin = Stdlib.asin
let atan = Stdlib.atan
let atan2 = Stdlib.atan2
let cos = Stdlib.cos
let sin = Stdlib.sin
let tan = Stdlib.tan
let degrees r = r *. 180. /. pi
let radians d = d *. pi /. 180.
let angle_avg a b = atan2 (sin a +. sin b) (cos a +. cos b)
let angle_sum a b = mod_float (a +. b) (pi *. 2.)
let angle_diff a b = atan2 (sin (a -. b)) (cos (a -. b))
let () = Random.self_init ()
let random_int ?(lower_bound = 0) bound =
(Random.int (bound - lower_bound)) + lower_bound
let random_float ?(lower_bound = 0.) bound =
(Random.float (bound -. lower_bound)) +. lower_bound
let random_bool () = Random.bool ()
end
let (~.) = float_of_int
module Vector = struct
type t = float * float
let create x y = (x, y)
let of_tuple (x, y) = create x y
let of_angle theta = create (Math.cos theta) (Math.sin theta)
let (~<) (x, y) = x
let (~>) (x, y) = y
let mag_sq (x, y) = x ** 2. +. y ** 2.
let mag vec = mag_sq vec |> Math.sqrt
let add (x1, y1) (x2, y2) = create (x1 +. x2) (y1 +. y2)
let sub (x1, y1) (x2, y2) = create (x1 -. x2) (y1 -. y2)
let mult (x, y) scalar = create (x *. scalar) (y *. scalar)
let div vec scalar = mult vec (1. /. scalar)
let dist (x1, y1) (x2, y2) = Math.distf x1 y1 x2 y2
let dot (x1, y1) (x2, y2) = x1 *. x2 +. y1 *. y2
let norm vec = div vec (mag vec)
let with_mag vec scalar = mult (norm vec) scalar
let limit vec lim =
let curr_mag = mag vec
in if curr_mag > lim then with_mag vec lim else vec
let heading (x, y) = Math.atan2 y x
let rotate (x, y) theta =
create (x *. (Math.cos theta) -. y *. (Math.sin theta))
(x *. (Math.sin theta) +. y *. (Math.cos theta))
let lerp (x1, y1) (x2, y2) amt =
create (Math.lerpf x1 x2 amt) (Math.lerpf y1 y2 amt)
let angle_between vec1 vec2 =
Math.acos ((dot vec1 vec2) /. ((mag vec1) *. (mag vec2)))
let project vec onto = mult onto ((dot vec onto) /. (mag_sq onto))
let to_string (x, y) = Printf.sprintf "(%f,%f)" x y
let (++) = add
let (--) = sub
let ( *** ) = mult
let (//) = div
let ( **. ) = dot
let (~||) = mag
end
type vector = Vector.t
| null | https://raw.githubusercontent.com/Calsign/p5ml/65a379a941a7c1c99e6d43fa4a171cd4d68d4e88/core/math.ml | ocaml |
module Math = struct
let pi = 2. *. Stdlib.asin 1.
let half_pi = pi /. 2.
let two_pi = pi *. 2.
let e = Stdlib.exp 1.
let abs = Stdlib.abs
let absf = Stdlib.abs_float
let ceil v = Stdlib.ceil v |> int_of_float
let ceilf = Stdlib.ceil
let floor v = Stdlib.floor v |> int_of_float
let floorf = Stdlib.floor
let constrain (v : int) lower upper =
Stdlib.max v lower |> Stdlib.min upper
let constrainf (v : float) lower upper =
Stdlib.max v lower |> Stdlib.min upper
let distf x1 y1 x2 y2 = sqrt ((x2 -. x1) ** 2. +. (y2 -. y1) ** 2.)
let dist x1 y1 x2 y2 = distf (float_of_int x1) (float_of_int y1)
(float_of_int x2) (float_of_int y2)
let mag x y = dist x y 0 0
let magf x y = distf x y 0. 0.
let lerpf lower upper amt = (upper -. lower) *. amt
let lerp lower upper amt =
lerpf (float_of_int lower) (float_of_int upper) amt |> int_of_float
let log base v = (log v) /. (log base)
let mapf v from_lower from_upper to_lower to_upper =
lerpf to_lower to_upper (v /. (from_lower -. from_upper))
let map v from_lower from_upper to_lower to_upper =
mapf (float_of_int v) (float_of_int from_lower) (float_of_int from_upper)
(float_of_int to_lower) (float_of_int to_upper) |> int_of_float
let max (a : int) (b : int) = Stdlib.max a b
let maxf (a : float) (b : float) = Stdlib.max a b
let min (a : int) (b : int) = Stdlib.min a b
let minf (a : float) (b : float) = Stdlib.min a b
let normf v lower upper = mapf v lower upper 0. 1.
let norm v lower upper = normf (float_of_int v)
(float_of_int lower) (float_of_int upper)
let round v = Stdlib.floor (v +. (if v >= 0. then 0.5 else -0.5))
|> int_of_float
let sqrt = Stdlib.sqrt
let acos = Stdlib.acos
let asin = Stdlib.asin
let atan = Stdlib.atan
let atan2 = Stdlib.atan2
let cos = Stdlib.cos
let sin = Stdlib.sin
let tan = Stdlib.tan
let degrees r = r *. 180. /. pi
let radians d = d *. pi /. 180.
let angle_avg a b = atan2 (sin a +. sin b) (cos a +. cos b)
let angle_sum a b = mod_float (a +. b) (pi *. 2.)
let angle_diff a b = atan2 (sin (a -. b)) (cos (a -. b))
let () = Random.self_init ()
let random_int ?(lower_bound = 0) bound =
(Random.int (bound - lower_bound)) + lower_bound
let random_float ?(lower_bound = 0.) bound =
(Random.float (bound -. lower_bound)) +. lower_bound
let random_bool () = Random.bool ()
end
let (~.) = float_of_int
module Vector = struct
type t = float * float
let create x y = (x, y)
let of_tuple (x, y) = create x y
let of_angle theta = create (Math.cos theta) (Math.sin theta)
let (~<) (x, y) = x
let (~>) (x, y) = y
let mag_sq (x, y) = x ** 2. +. y ** 2.
let mag vec = mag_sq vec |> Math.sqrt
let add (x1, y1) (x2, y2) = create (x1 +. x2) (y1 +. y2)
let sub (x1, y1) (x2, y2) = create (x1 -. x2) (y1 -. y2)
let mult (x, y) scalar = create (x *. scalar) (y *. scalar)
let div vec scalar = mult vec (1. /. scalar)
let dist (x1, y1) (x2, y2) = Math.distf x1 y1 x2 y2
let dot (x1, y1) (x2, y2) = x1 *. x2 +. y1 *. y2
let norm vec = div vec (mag vec)
let with_mag vec scalar = mult (norm vec) scalar
let limit vec lim =
let curr_mag = mag vec
in if curr_mag > lim then with_mag vec lim else vec
let heading (x, y) = Math.atan2 y x
let rotate (x, y) theta =
create (x *. (Math.cos theta) -. y *. (Math.sin theta))
(x *. (Math.sin theta) +. y *. (Math.cos theta))
let lerp (x1, y1) (x2, y2) amt =
create (Math.lerpf x1 x2 amt) (Math.lerpf y1 y2 amt)
let angle_between vec1 vec2 =
Math.acos ((dot vec1 vec2) /. ((mag vec1) *. (mag vec2)))
let project vec onto = mult onto ((dot vec onto) /. (mag_sq onto))
let to_string (x, y) = Printf.sprintf "(%f,%f)" x y
let (++) = add
let (--) = sub
let ( *** ) = mult
let (//) = div
let ( **. ) = dot
let (~||) = mag
end
type vector = Vector.t
| |
381d389bb5f1bb184c36d55fe4e5429d91d0fc6ae7e129bb181837c168ab4f75 | snoyberg/why-you-should-use-stm | exercise.hs | #!/usr/bin/env stack
-- stack --resolver lts-13.21 script
-- This code doesn't compile, fix it!
main :: IO ()
main = do
putStrLn "Hello World"
putStrLn "Goodbye!
| null | https://raw.githubusercontent.com/snoyberg/why-you-should-use-stm/adf3366aebd6daf1dd702ed4cad1c2303d296afc/exercises/00-setup/exercise.hs | haskell | stack --resolver lts-13.21 script
This code doesn't compile, fix it! | #!/usr/bin/env stack
main :: IO ()
main = do
putStrLn "Hello World"
putStrLn "Goodbye!
|
33ef518176c87c2020e758df4c02f8b29cd58ef22d5ea6ca45d20676c5dc590f | paurkedal/iplogic | iplogic_diag.mli | Copyright ( C ) 2017 < >
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
val eprint_loc : Lexing.position * Lexing.position -> unit
val errf : ?loc: Lexing.position * Lexing.position ->
('a, unit, string, unit) format4 -> 'a
val failf : ?loc: Lexing.position * Lexing.position ->
('a, unit, string, 'b) format4 -> 'a
| null | https://raw.githubusercontent.com/paurkedal/iplogic/7c56b5c55c03a681381fafb80220a5c0eba9f212/lib/iplogic_diag.mli | ocaml | Copyright ( C ) 2017 < >
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
val eprint_loc : Lexing.position * Lexing.position -> unit
val errf : ?loc: Lexing.position * Lexing.position ->
('a, unit, string, unit) format4 -> 'a
val failf : ?loc: Lexing.position * Lexing.position ->
('a, unit, string, 'b) format4 -> 'a
| |
5daa18099850ebd3056e80f223055be52f487b9d325e515c14d9c7e173b04c46 | facebook/infer | PulseModelsErlang.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module L = Logging
open PulseBasicInterface
open PulseDomainInterface
open PulseOperationResult.Import
open PulseModelsImport
(** Represents the result of a transfer function that may (a) nondeterministically split the state,
and (b) some of the nondeterministic branches may be errors. Goes well with [let>] defined later
in this file. *)
type 'ok result = 'ok AccessResult.t list
* A type for transfer functions that make an object , add it to abstract state
( [ AbductiveDomain.t ] ) , and return a handle to it ( [ ( AbstractValue.t * ValueHistory.t ) ] ) . Note
that the type is simlar to that of [ PulseOperations.eval ] .
([AbductiveDomain.t]), and return a handle to it ([(AbstractValue.t * ValueHistory.t)]). Note
that the type is simlar to that of [PulseOperations.eval]. *)
type maker =
AbductiveDomain.t
-> (AbductiveDomain.t * (AbstractValue.t * ValueHistory.t)) AccessResult.t SatUnsat.t
(** special case of {!maker} when the result is known to be satisfiable *)
type sat_maker =
AbductiveDomain.t -> (AbductiveDomain.t * (AbstractValue.t * ValueHistory.t)) AccessResult.t
(** Similar to {!maker} but can return a disjunction of results. *)
type disjunction_maker =
AbductiveDomain.t -> (AbductiveDomain.t * (AbstractValue.t * ValueHistory.t)) AccessResult.t list
(** A type similar to {!maker} for transfer functions that only return an abstract value without any
history attached to it. *)
type value_maker =
AbductiveDomain.t -> (AbductiveDomain.t * AbstractValue.t) AccessResult.t SatUnsat.t
let write_field_and_deref path location ~struct_addr ~field_addr ~field_val field_name astate =
let* astate =
PulseOperations.write_field path location ~ref:struct_addr field_name ~obj:field_addr astate
in
PulseOperations.write_deref path location ~ref:field_addr ~obj:field_val astate
* Returns the erlang type of an abstract value , extracted from its dynamic type ( pulse ) attribute .
Returns [ Any ] if the value has no dynamic type , or if no erlang type can be extracted from it .
Note that it may be the case for some encoded Erlang values ( such as strings , floats or closures
at the first implementation time ) .
Returns [Any] if the value has no dynamic type, or if no erlang type can be extracted from it.
Note that it may be the case for some encoded Erlang values (such as strings, floats or closures
at the first implementation time). *)
let get_erlang_type_or_any val_ astate =
let open IOption.Let_syntax in
let typename =
let* typ_ = AbductiveDomain.AddressAttributes.get_dynamic_type val_ astate in
Typ.name typ_
in
match typename with Some (Typ.ErlangType erlang_type) -> erlang_type | _ -> ErlangTypeName.Any
let write_dynamic_type_and_return (addr_val, hist) typ ret_id astate =
let typ = Typ.mk_struct (ErlangType typ) in
let astate = PulseOperations.add_dynamic_type typ addr_val astate in
PulseOperations.write_id ret_id (addr_val, hist) astate
(** A simple helper that wraps destination-passing-style evaluation functions that also return a
handler to their result into a function that allocates the destination under the hood and simply
return that handler.
This allows to transform this (somehow recurring) pattern:
[let dest = AbstractValue.mk_fresh () in let (astate, dest) = eval dest arg1 ... argN in ....]
into the simpler: [let (astate, dest) = eval_into_fresh eval arg1 ... argN in ...] *)
let eval_into_fresh eval =
let symbol = AbstractValue.mk_fresh () in
eval symbol
* Use for chaining functions of the type ( ' a->('b,'err ) result list ) . The idea of such functions
is that they can both fan - out into a ( possibly empty ) disjunction * and * signal errors . For
example , consider [ f ] of type [ ' a->('b,'err ) result list ] and [ g ] of type
[ ' b->('c,'err ) result list ] and [ a ] is some value of type [ ' a ] . Note that the type of error is
the same , so they can be propagated forward . To chain the application of these functions , you
can write [ let > x = f a in let > y = g x in \[Ok y\ ] ] .
In several places , we have to compose with functions of the type [ ' a->('b,'err ) result ] , which
do n't produce a list . One way to handle this is to wrap those functions in a list . For example ,
if [ f ] and [ a ] have the same type as before but [ g ] has type [ ' b->('c,'err ) result ] , then we can
write [ let > = f a in let > y=\[g x\ ] in \[Ok y\ ] . ]
is that they can both fan-out into a (possibly empty) disjunction *and* signal errors. For
example, consider [f] of type ['a->('b,'err) result list] and [g] of type
['b->('c,'err) result list] and [a] is some value of type ['a]. Note that the type of error is
the same, so they can be propagated forward. To chain the application of these functions, you
can write [let> x=f a in let> y=g x in \[Ok y\]].
In several places, we have to compose with functions of the type ['a->('b,'err) result], which
don't produce a list. One way to handle this is to wrap those functions in a list. For example,
if [f] and [a] have the same type as before but [g] has type ['b->('c,'err) result], then we can
write [let> =f a in let> y=\[g x\] in \[Ok y\].] *)
let ( let> ) x f =
List.concat_map
~f:(function
| FatalError _ as error ->
[error]
| Ok ok ->
f ok
| Recoverable (ok, errors) ->
f ok |> List.map ~f:(fun result -> PulseResult.append_errors errors result) )
x
let result_fold list ~init ~f =
let init = [Ok init] in
let f result x =
let> ok = result in
f ok x
in
List.fold list ~init ~f
let result_foldi list ~init ~f =
let init = [Ok init] in
let f index result x =
let> ok = result in
f index ok x
in
List.foldi list ~init ~f
* Builds as an abstract value the truth value of the predicate " The value given as an argument as
the erlang type given as the other argument "
the erlang type given as the other argument" *)
let has_erlang_type value typ : value_maker =
fun astate ->
let instanceof_val = AbstractValue.mk_fresh () in
let sil_type = Typ.mk_struct (ErlangType typ) in
let++ astate = PulseArithmetic.and_equal_instanceof instanceof_val value sil_type astate in
(astate, instanceof_val)
let prune_type path location (value, hist) typ astate : AbductiveDomain.t result =
(let open SatUnsat.Import in
let* astate =
(* If check_addr_access fails, we stop exploring this path by marking it [Unsat] *)
PulseOperations.check_addr_access path Read location (value, hist) astate
|> PulseResult.ok |> SatUnsat.of_option
in
let** astate, instanceof_val = has_erlang_type value typ astate in
PulseArithmetic.prune_positive instanceof_val astate)
|> SatUnsat.to_list
(** Loads a field from a struct, assuming that it has the correct type (should be checked by
[prune_type]). *)
let load_field path field location obj astate =
match PulseModelsJava.load_field path field location obj astate with
| Recoverable _ | FatalError _ ->
L.die InternalError "@[<v>@[%s@]@;@[%a@]@;@]"
"Could not load field. Did you call this function without calling prune_type?"
AbductiveDomain.pp astate
| Ok result ->
result
module Errors = struct
let error err astate = [FatalError (ReportableError {astate; diagnostic= ErlangError err}, [])]
let badarg : model =
fun {location} astate -> error (Badarg {calling_context= []; location}) astate
let badkey : model =
fun {location} astate -> error (Badkey {calling_context= []; location}) astate
let badmap : model =
fun {location} astate -> error (Badmap {calling_context= []; location}) astate
let badmatch : model =
fun {location} astate -> error (Badmatch {calling_context= []; location}) astate
let badrecord : model =
fun {location} astate -> error (Badrecord {calling_context= []; location}) astate
let badreturn : model =
fun {location} astate -> error (Badreturn {calling_context= []; location}) astate
let case_clause : model =
fun {location} astate -> error (Case_clause {calling_context= []; location}) astate
let function_clause : model =
fun {location} astate -> error (Function_clause {calling_context= []; location}) astate
let if_clause : model =
fun {location} astate -> error (If_clause {calling_context= []; location}) astate
let try_clause : model =
fun {location} astate -> error (Try_clause {calling_context= []; location}) astate
end
module Atoms = struct
let value_field = Fieldname.make (ErlangType Atom) ErlangTypeName.atom_value
let hash_field = Fieldname.make (ErlangType Atom) ErlangTypeName.atom_hash
let make_raw location path value hash : sat_maker =
fun astate ->
let hist = Hist.single_alloc path location "atom" in
let addr_atom = (AbstractValue.mk_fresh (), hist) in
let* astate =
write_field_and_deref path location ~struct_addr:addr_atom
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:value value_field astate
in
let+ astate =
write_field_and_deref path location ~struct_addr:addr_atom
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:hash hash_field astate
in
( PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Atom)) (fst addr_atom) astate
, addr_atom )
let make value hash : model =
fun {location; path; ret= ret_id, _} astate ->
let<+> astate, ret = make_raw location path value hash astate in
PulseOperations.write_id ret_id ret astate
let of_string location path (name : string) : maker =
fun astate ->
(* Note: This should correspond to [ErlangTranslator.mk_atom_call]. *)
let** astate, hash =
let hash_exp : Exp.t = Const (Cint (IntLit.of_int (ErlangTypeName.calculate_hash name))) in
PulseOperations.eval path Read location hash_exp astate
in
let+* astate, name =
let name_exp : Exp.t = Const (Cstr name) in
PulseOperations.eval path Read location name_exp astate
in
make_raw location path name hash astate
(* Converts [bool_value] into true/false, and write it to [addr_atom]. *)
let of_bool path location bool_value astate =
let astate_true =
let** astate = PulseArithmetic.prune_positive bool_value astate in
of_string location path ErlangTypeName.atom_true astate
in
let astate_false :
(AbductiveDomain.t * (AbstractValue.t * ValueHistory.t)) AccessResult.t SatUnsat.t =
let** astate = PulseArithmetic.prune_eq_zero bool_value astate in
of_string location path ErlangTypeName.atom_false astate
in
let> astate, (addr, hist) = SatUnsat.to_list astate_true @ SatUnsat.to_list astate_false in
let typ = Typ.mk_struct (ErlangType Atom) in
[Ok (PulseOperations.add_dynamic_type typ addr astate, (addr, hist))]
(** Takes a boolean value, converts it to true/false atom and writes to return value. *)
let write_return_from_bool path location bool_value ret_id astate =
let> astate, ret_val = of_bool path location bool_value astate in
PulseOperations.write_id ret_id ret_val astate |> Basic.ok_continue
end
module Integers = struct
let value_field = Fieldname.make (ErlangType Integer) ErlangTypeName.integer_value
let typ = Typ.mk_struct (ErlangType Integer)
let make_raw location path value : sat_maker =
fun astate ->
let hist = Hist.single_alloc path location "integer" in
let addr = (AbstractValue.mk_fresh (), hist) in
let+ astate =
write_field_and_deref path location ~struct_addr:addr
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:value value_field astate
in
(PulseOperations.add_dynamic_type typ (fst addr) astate, addr)
let make value : model =
fun {location; path; ret= ret_id, _} astate ->
let<+> astate, ret = make_raw location path value astate in
PulseOperations.write_id ret_id ret astate
let of_intlit location path (intlit : IntLit.t) : maker =
fun astate ->
let+* astate, name =
let intlit_exp : Exp.t = Const (Cint intlit) in
PulseOperations.eval path Read location intlit_exp astate
in
make_raw location path name astate
let of_string location path (intlit : string) : maker =
of_intlit location path (IntLit.of_string intlit)
end
module Comparison = struct
module Comparator = struct
* Records that define how to compare values according to their types .
These records define a few functions that correspond to a comparison on values that have a
specific type . For instance , the [ integer ] function can assume that both compared values are
indeed of the integer type , and that function will be called by the global comparison
function on these cases .
Comparators must also define an [ unsupported ] function , that the dispatching function will
call on types that it does not support or can not determine , and an [ incompatible ] function
that will be called when both compared values are known to be of a different type .
These records define a few functions that correspond to a comparison on values that have a
specific type. For instance, the [integer] function can assume that both compared values are
indeed of the integer type, and that function will be called by the global comparison
function on these cases.
Comparators must also define an [unsupported] function, that the dispatching function will
call on types that it does not support or cannot determine, and an [incompatible] function
that will be called when both compared values are known to be of a different type. *)
(** {1 Helper functions} *)
(** These functions are provided as helpers to define the comparator functions. *)
* Compares two objects by comparing one specific field . No type checking is made and the user
should take care that the field is indeed a valid one for both arguments .
should take care that the field is indeed a valid one for both arguments. *)
let from_fields sil_op field x y location path : value_maker =
fun astate ->
let astate, _addr, (x_field, _) = load_field path field location x astate in
let astate, _addr, (y_field, _) = load_field path field location y astate in
eval_into_fresh PulseArithmetic.eval_binop_absval sil_op x_field y_field astate
(** A trivial comparison that is always false. Can be used eg. for equality on incompatible
types. *)
let const_false _x _y _location _path : value_maker =
fun astate ->
let const_false = AbstractValue.mk_fresh () in
let++ astate = PulseArithmetic.prune_eq_zero const_false astate in
(astate, const_false)
(** A trivial comparison that is always true. Can be used eg. for inequality on incompatible
types. *)
let const_true _x _y _location _path : value_maker =
fun astate ->
let const_true = AbstractValue.mk_fresh () in
let++ astate = PulseArithmetic.prune_positive const_true astate in
(astate, const_true)
(** Returns an unconstrained value. Can be used eg. for overapproximation or for unsupported
comparisons.
Note that, as an over-approximation, it can lead to some false positives, that we generally
try to avoid. However, the only solution for comparisons that we do not support (such as
ordering on atoms) would then be to consider the result as unreachable (that is, return an
empty abstract state). This would lead to code depending on such comparisons not being
analysed at all, which may be less desirable than analysing it without gaining information
from the comparison itself. *)
let unknown _x _y _location _path : value_maker =
fun astate ->
let result = AbstractValue.mk_fresh () in
Sat (Ok (astate, result))
* Takes as parameters the types of two values [ x ] and [ y ] known to be incompatible and returns
a comparator for [ x < y ] based on this type . Currently , this only supports comparisons with
at least one integer : namely , when [ x ] is known to be an integer , then the comparison is
always true , and when [ y ] is , the comparison is always false . Otherwise it is unknown .
Reference : { : #term-comparisons } .
a comparator for [x < y] based on this type. Currently, this only supports comparisons with
at least one integer: namely, when [x] is known to be an integer, then the comparison is
always true, and when [y] is, the comparison is always false. Otherwise it is unknown.
Reference: {:#term-comparisons}. *)
let incompatible_lt ty_x ty_y x y location path : value_maker =
fun astate ->
match (ty_x, ty_y) with
| ErlangTypeName.Integer, _ ->
const_true x y location path astate
| _, ErlangTypeName.Integer ->
const_false x y location path astate
| _ ->
unknown x y location path astate
* Takes as parameters the types of two values [ x ] and [ y ] known to be incompatible and returns
a comparator for [ x > y ] based on this type .
See also { ! incompatible_lt } .
a comparator for [x > y] based on this type.
See also {!incompatible_lt}. *)
let incompatible_gt ty_x ty_y x y location path : value_maker =
fun astate ->
match (ty_x, ty_y) with
| ErlangTypeName.Integer, _ ->
const_false x y location path astate
| _, ErlangTypeName.Integer ->
const_true x y location path astate
| _ ->
unknown x y location path astate
* Adapt { ! const_false } to have the expected type for the equality of incompatible values ( cf
{ ! type : t } , { ! : t.incompatible } ) .
{!type:t}, {!recfield:t.incompatible}). *)
let incompatible_eq _ty_x _ty_y = const_false
* Cf . { ! }
let incompatible_exactly_not_eq _ty_x _ty_y = const_true
(** {1 Comparators as records of functions} *)
* The type of the functions that compare two values based on a specific type combination . They
take the two values as parameters and build the abstract result that holds the comparison
value .
take the two values as parameters and build the abstract result that holds the comparison
value. *)
type monotyped_comparison =
AbstractValue.t * ValueHistory.t
-> AbstractValue.t * ValueHistory.t
-> Location.t
-> PathContext.t
-> value_maker
type t =
{ unsupported: monotyped_comparison
; incompatible: ErlangTypeName.t -> ErlangTypeName.t -> monotyped_comparison
* [ incompatible ] takes as first parameters the types of the values being compared in
order to implement type - based ordering
order to implement type-based ordering *)
; integer: monotyped_comparison
; atom: monotyped_comparison }
let eq =
{ unsupported= unknown
; incompatible= incompatible_eq
; integer= from_fields Binop.Eq Integers.value_field
; atom= from_fields Binop.Eq Atoms.hash_field }
let xne =
(* exactly_not_equal. *)
{ unsupported= unknown
; incompatible= incompatible_exactly_not_eq
; integer= from_fields Binop.Ne Integers.value_field
; atom= from_fields Binop.Ne Atoms.hash_field }
(** Makes an ordering comparator given an operator to be used for comparing integer values and a
function to compare incompatible values. *)
let ordering int_binop incompatible =
{ unsupported= unknown
; incompatible
; integer= from_fields int_binop Integers.value_field
; atom= unknown }
let gt = ordering Gt incompatible_gt
let ge = ordering Ge incompatible_gt
let lt = ordering Lt incompatible_lt
let le = ordering Le incompatible_lt
* Compare two abstract values , when one of them might be an integer , by disjuncting on the
case whether it is ( an integer ) or not .
Parameters ( not in order ):
- Two abstract values [ x ] and [ y ] . One of them is expected to ba a known integer , and the
other one to have an undetermined dynamic type .
- The ( erlang ) types of [ x ] and [ y ] as determined by the caller . Corresponding to the
expectation mentioned above , [ ( ty_x , ty_y ) ] is expected to be either [ ( Integer , Any ) ] or
[ ( Any , Integer ) ] ( this is not checked by the function and is the caller responsibility ) .
- An [ are_compatible ] ( boolean ) abstract value that witnesses if the types of [ x ] and [ y ]
are both integers or if one of them is not ( this is typically obtained by using
{ ! has_erlang_type } on the Any - typed argument ) .
Returns : a disjunction built on top of [ are_compatible ] , that compares [ x ] and [ y ] as
integers when they both are , and as incompatible values when the any - typed one is not an
integer .
case whether it is (an integer) or not.
Parameters (not in order):
- Two abstract values [x] and [y]. One of them is expected to ba a known integer, and the
other one to have an undetermined dynamic type.
- The (erlang) types of [x] and [y] as determined by the caller. Corresponding to the
expectation mentioned above, [(ty_x, ty_y)] is expected to be either [(Integer, Any)] or
[(Any, Integer)] (this is not checked by the function and is the caller responsibility).
- An [are_compatible] (boolean) abstract value that witnesses if the types of [x] and [y]
are both integers or if one of them is not (this is typically obtained by using
{!has_erlang_type} on the Any-typed argument).
Returns: a disjunction built on top of [are_compatible], that compares [x] and [y] as
integers when they both are, and as incompatible values when the any-typed one is not an
integer. *)
let any_with_integer_split cmp location path ~are_compatible ty_x ty_y x y : disjunction_maker =
fun astate ->
let int_result =
let** astate_int = PulseArithmetic.prune_positive are_compatible astate in
let++ astate_int, int_comparison = cmp.integer x y location path astate_int in
let int_hist = Hist.single_alloc path location "any_int_comparison" in
(astate_int, (int_comparison, int_hist))
in
let incompatible_result =
let** astate_incompatible = PulseArithmetic.prune_eq_zero are_compatible astate in
let++ astate_incompatible, incompatible_comparison =
cmp.incompatible ty_x ty_y x y location path astate_incompatible
in
let incompatible_hist = Hist.single_alloc path location "any_incompatible_comparison" in
(astate_incompatible, (incompatible_comparison, incompatible_hist))
in
SatUnsat.to_list int_result @ SatUnsat.to_list incompatible_result
end
* Makes an abstract value holding the comparison result of two parameters . We perform a case
analysis of the dynamic type of these parameters .
See the documentation of { ! Comparator } values for the meaning of the [ cmp ] parameter . It will
be given as an argument by specific comparisons functions and should define a few functions
that return a result for comparisons on specific types .
Note that we here say that two values are " incompatible " if they have separate types . That
does not mean that the comparison is invalid , as in erlang all comparisons are properly
defined even on differently - typed values :
{ : #term-comparisons } .
We say that two values have an " unsupported " type if they both share a type on which we do n't
do any precise comparison . Not that two values of * distinct * unsupported types are still
incompatible , and therefore might be precisely compared .
Current supported types are integers and atoms .
The final result is computed as follows :
- If the parameters are both of a supported type , integers , then we compare them accordingly
( eg . the [ cmp.integer ] function will then typically compare their value fields ) .
- If the parameters have incompatible types , then we return the result of a comparison of
incompatible types ( eg . equality would be false , and inequality would be true ) .
- If both parameters have the same unsupported type , then the comparison is unsupported and we
use the [ cmp.unsupported ] function ( that could for instance return an - overapproximating -
unconstrained result ) .
- If at least one parameter has no known dynamic type ( or , equivalently , its type is [ Any ] ) ,
then the comparison is also unsupported .
Note that , on supported types ( eg . integers ) , it is important that the [ cmp ] functions decide
themselves if they should compare some specific fields or not , instead of getting these fields
in the global function and have the methods work on the field values . This is because , when we
extend this code to work on other more complex types , which field is used or not may depend on
the actual comparison operator that we 're computing . For instance the equality of atoms can be
decided on their hash , but their relative ordering should check their names as
lexicographically ordered strings .
analysis of the dynamic type of these parameters.
See the documentation of {!Comparator} values for the meaning of the [cmp] parameter. It will
be given as an argument by specific comparisons functions and should define a few functions
that return a result for comparisons on specific types.
Note that we here say that two values are "incompatible" if they have separate types. That
does not mean that the comparison is invalid, as in erlang all comparisons are properly
defined even on differently-typed values:
{:#term-comparisons}.
We say that two values have an "unsupported" type if they both share a type on which we don't
do any precise comparison. Not that two values of *distinct* unsupported types are still
incompatible, and therefore might be precisely compared.
Current supported types are integers and atoms.
The final result is computed as follows:
- If the parameters are both of a supported type, integers, then we compare them accordingly
(eg. the [cmp.integer] function will then typically compare their value fields).
- If the parameters have incompatible types, then we return the result of a comparison of
incompatible types (eg. equality would be false, and inequality would be true).
- If both parameters have the same unsupported type, then the comparison is unsupported and we
use the [cmp.unsupported] function (that could for instance return an - overapproximating -
unconstrained result).
- If at least one parameter has no known dynamic type (or, equivalently, its type is [Any]),
then the comparison is also unsupported.
Note that, on supported types (eg. integers), it is important that the [cmp] functions decide
themselves if they should compare some specific fields or not, instead of getting these fields
in the global function and have the methods work on the field values. This is because, when we
extend this code to work on other more complex types, which field is used or not may depend on
the actual comparison operator that we're computing. For instance the equality of atoms can be
decided on their hash, but their relative ordering should check their names as
lexicographically ordered strings. *)
let make_raw (cmp : Comparator.t) location path ((x_val, _) as x) ((y_val, _) as y) :
disjunction_maker =
fun astate ->
let x_typ = get_erlang_type_or_any x_val astate in
let y_typ = get_erlang_type_or_any y_val astate in
match (x_typ, y_typ) with
| Integer, Integer ->
let<**> astate, result = cmp.integer x y location path astate in
let hist = Hist.single_alloc path location "integer_comparison" in
[Ok (astate, (result, hist))]
| Atom, Atom ->
let<**> astate, result = cmp.atom x y location path astate in
let hist = Hist.single_alloc path location "atom_comparison" in
[Ok (astate, (result, hist))]
| Integer, Any ->
let<**> astate, are_compatible = has_erlang_type y_val Integer astate in
Comparator.any_with_integer_split cmp location path ~are_compatible Integer Any x y astate
| Any, Integer ->
let<**> astate, are_compatible = has_erlang_type x_val Integer astate in
Comparator.any_with_integer_split cmp location path ~are_compatible Any Integer x y astate
| Nil, Nil | Cons, Cons | Tuple _, Tuple _ | Map, Map ->
let<**> astate, result = cmp.unsupported x y location path astate in
let hist = Hist.single_alloc path location "unsupported_comparison" in
[Ok (astate, (result, hist))]
| _ ->
let<**> astate, result = cmp.incompatible x_typ y_typ x y location path astate in
let hist = Hist.single_alloc path location "incompatible_comparison" in
[Ok (astate, (result, hist))]
* A model of comparison operators where we store in the destination the result of comparing two
parameters .
parameters. *)
let make cmp x y : model =
fun {location; path; ret= ret_id, _} astate ->
let> astate, (result, _hist) = make_raw cmp location path x y astate in
Atoms.write_return_from_bool path location result ret_id astate
(** Returns an abstract state that has been pruned on the comparison result being true. *)
let prune cmp location path x y astate : AbductiveDomain.t AccessResult.t list =
let> astate, (comparison, _hist) = make_raw cmp location path x y astate in
PulseArithmetic.prune_positive comparison astate |> SatUnsat.to_list
* { 1 Specific comparison operators }
let equal = make Comparator.eq
let prune_equal = prune Comparator.eq
let exactly_not_equal = make Comparator.xne
let greater = make Comparator.gt
let greater_or_equal = make Comparator.ge
let lesser = make Comparator.lt
let lesser_or_equal = make Comparator.le
end
module Lists = struct
let head_field = Fieldname.make (ErlangType Cons) ErlangTypeName.cons_head
let tail_field = Fieldname.make (ErlangType Cons) ErlangTypeName.cons_tail
(** Helper function to create a Nil structure without assigning it to return value *)
let make_nil_raw location path : sat_maker =
fun astate ->
let event = Hist.alloc_event path location "[]" in
let addr_nil_val = AbstractValue.mk_fresh () in
let addr_nil = (addr_nil_val, Hist.single_event path event) in
let astate =
PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Nil)) addr_nil_val astate
in
Ok (astate, addr_nil)
(** Create a Nil structure and assign it to return value *)
let make_nil : model =
fun {location; path; ret= ret_id, _} astate ->
let<+> astate, addr_nil = make_nil_raw location path astate in
PulseOperations.write_id ret_id addr_nil astate
(** Helper function to create a Cons structure without assigning it to return value *)
let make_cons_raw path location hd tl : sat_maker =
fun astate ->
let hist = Hist.single_alloc path location "[X|Xs]" in
let addr_cons_val = AbstractValue.mk_fresh () in
let addr_cons = (addr_cons_val, hist) in
let* astate =
write_field_and_deref path location ~struct_addr:addr_cons
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:hd head_field astate
in
let+ astate =
write_field_and_deref path location ~struct_addr:addr_cons
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:tl tail_field astate
in
let astate =
PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Cons)) addr_cons_val astate
in
(astate, addr_cons)
(** Create a Cons structure and assign it to return value *)
let make_cons head tail : model =
fun {location; path; ret= ret_id, _} astate ->
let<+> astate, addr_cons = make_cons_raw path location head tail astate in
PulseOperations.write_id ret_id addr_cons astate
(** Assumes that the argument is a Cons and loads the head and tail *)
let load_head_tail cons astate path location =
let> astate = prune_type path location cons Cons astate in
let astate, _, head = load_field path head_field location cons astate in
let astate, _, tail = load_field path tail_field location cons astate in
[Ok (head, tail, astate)]
(** Assumes that a list is of given length and reads the elements *)
let rec assume_and_deconstruct list length astate path location =
match length with
| 0 ->
let> astate = prune_type path location list Nil astate in
[Ok ([], astate)]
| _ ->
let> hd, tl, astate = load_head_tail list astate path location in
let> elems, astate = assume_and_deconstruct tl (length - 1) astate path location in
[Ok (hd :: elems, astate)]
let make_astate_badarg (list_val, _list_hist) data astate =
(* arg is not a list if its type is neither Cons nor Nil *)
let typ_cons = Typ.mk_struct (ErlangType Cons) in
let typ_nil = Typ.mk_struct (ErlangType Nil) in
let instanceof_val_cons = AbstractValue.mk_fresh () in
let instanceof_val_nil = AbstractValue.mk_fresh () in
let<**> astate =
PulseArithmetic.and_equal_instanceof instanceof_val_cons list_val typ_cons astate
in
let<**> astate =
PulseArithmetic.and_equal_instanceof instanceof_val_nil list_val typ_nil astate
in
let<**> astate = PulseArithmetic.prune_eq_zero instanceof_val_cons astate in
let<**> astate = PulseArithmetic.prune_eq_zero instanceof_val_nil astate in
Errors.badarg data astate
let append2 ~reverse list1 list2 : model =
fun ({location; path; ret= ret_id, _} as data) astate ->
let mk_astate_badarg = make_astate_badarg list1 data astate in
(* Makes an abstract state corresponding to appending to a list of given length *)
let mk_good_astate_concat length =
let> elems, astate = assume_and_deconstruct list1 length astate path location in
let elems = if reverse then elems else List.rev elems in
let> astate, result_list =
[ PulseResult.list_fold ~init:(astate, list2)
~f:(fun (astate, tl) hd -> make_cons_raw path location hd tl astate)
elems ]
in
[Ok (PulseOperations.write_id ret_id result_list astate)]
in
mk_astate_badarg
@ ( List.concat (List.init Config.erlang_list_unfold_depth ~f:mk_good_astate_concat)
|> List.map ~f:Basic.map_continue )
let reverse list : model =
fun ({location; path; _} as data) astate ->
let<*> astate, nil = make_nil_raw location path astate in
append2 ~reverse:true list nil data astate
let rec make_raw location path elements : sat_maker =
fun astate ->
match elements with
| [] ->
make_nil_raw location path astate
| head :: tail ->
let* astate, tail_val = make_raw location path tail astate in
make_cons_raw path location head tail_val astate
(** Approximation: we don't actually do the side-effect, just assume the return value. *)
let foreach _fun _list : model =
fun {location; path; ret= ret_id, _} astate ->
let<**> astate, ret = Atoms.of_string location path "ok" astate in
PulseOperations.write_id ret_id ret astate |> Basic.ok_continue
end
module Tuples = struct
let typ size = Typ.ErlangType (Tuple size)
let field_name size index =
if not (1 <= index && index <= size) then L.die InternalError "Erlang tuples are 1-indexed"
else Fieldname.make (typ size) (ErlangTypeName.tuple_elem index)
(** Helper: Like [Tuples.make] but with a more precise/composable type. *)
let make_raw (location : Location.t) (path : PathContext.t)
(args : (AbstractValue.t * ValueHistory.t) list) : sat_maker =
fun astate ->
let tuple_size = List.length args in
let tuple_typ_name : Typ.name = ErlangType (Tuple tuple_size) in
let hist = Hist.single_alloc path location "{}" in
let addr_tuple = (AbstractValue.mk_fresh (), hist) in
let addr_elems = List.map ~f:(function _ -> (AbstractValue.mk_fresh (), hist)) args in
let mk_field = Fieldname.make tuple_typ_name in
let field_names = ErlangTypeName.tuple_field_names tuple_size in
let addr_elems_fields_payloads = List.zip_exn addr_elems (List.zip_exn field_names args) in
let write_tuple_field astate (addr_elem, (field_name, payload)) =
write_field_and_deref path location ~struct_addr:addr_tuple ~field_addr:addr_elem
~field_val:payload (mk_field field_name) astate
in
let+ astate =
PulseResult.list_fold addr_elems_fields_payloads ~init:astate ~f:write_tuple_field
in
( PulseOperations.add_dynamic_type (Typ.mk_struct tuple_typ_name) (fst addr_tuple) astate
, addr_tuple )
let make (args : 'a ProcnameDispatcher.Call.FuncArg.t list) : model =
fun {location; path; ret= ret_id, _} astate ->
let get_payload (arg : 'a ProcnameDispatcher.Call.FuncArg.t) = arg.arg_payload in
let arg_payloads = List.map ~f:get_payload args in
let<+> astate, ret = make_raw location path arg_payloads astate in
PulseOperations.write_id ret_id ret astate
end
(** Maps are currently approximated to store only the latest key/value *)
module Maps = struct
let mk_field = Fieldname.make (ErlangType Map)
let key_field = mk_field "__infer_model_backing_map_key"
let value_field = mk_field "__infer_model_backing_map_value"
let is_empty_field = mk_field "__infer_model_backing_map_is_empty"
let make (args : 'a ProcnameDispatcher.Call.FuncArg.t list) : model =
fun {location; path; ret= ret_id, _} astate ->
let hist = Hist.single_alloc path location "#{}" in
let addr_map = (AbstractValue.mk_fresh (), hist) in
let addr_is_empty = (AbstractValue.mk_fresh (), hist) in
let is_empty_value = AbstractValue.mk_fresh () in
let fresh_val = (is_empty_value, hist) in
let is_empty_lit = match args with [] -> IntLit.one | _ -> IntLit.zero in
(* Reverse the list so we can get last key/value *)
let<*> astate =
match List.rev args with
(* Non-empty map: we just consider the last key/value, rest is ignored (approximation) *)
| value_arg :: key_arg :: _ ->
let addr_key = (AbstractValue.mk_fresh (), hist) in
let addr_value = (AbstractValue.mk_fresh (), hist) in
write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_key
~field_val:key_arg.arg_payload key_field astate
>>= write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_value
~field_val:value_arg.arg_payload value_field
| _ :: _ ->
L.die InternalError "Map create got one argument (requires even number)"
(* Empty map *)
| [] ->
Ok astate
in
let<*> astate =
write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_is_empty
~field_val:fresh_val is_empty_field astate
in
let<++> astate = PulseArithmetic.and_eq_int is_empty_value is_empty_lit astate in
write_dynamic_type_and_return addr_map Map ret_id astate
let new_ : model = make []
let make_astate_badmap (map_val, _map_hist) data astate =
let typ = Typ.mk_struct (ErlangType Map) in
let instanceof_val = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof instanceof_val map_val typ astate in
let<**> astate = PulseArithmetic.prune_eq_zero instanceof_val astate in
Errors.badmap data astate
let make_astate_goodmap path location map astate = prune_type path location map Map astate
let is_key key map : model =
fun ({location; path; ret= ret_id, _} as data) astate ->
Return 3 cases :
* - Error & assume not map
* - Ok & assume map & assume empty & return false
* - Ok & assume map & assume not empty & assume key is the tracked key & return true
* - Error & assume not map
* - Ok & assume map & assume empty & return false
* - Ok & assume map & assume not empty & assume key is the tracked key & return true
*)
let astate_badmap = make_astate_badmap map data astate in
let astate_empty =
let ret_val_false = AbstractValue.mk_fresh () in
let> astate = make_astate_goodmap path location map astate in
let astate, _isempty_addr, (is_empty, _isempty_hist) =
load_field path is_empty_field location map astate
in
let> astate = PulseArithmetic.prune_positive is_empty astate |> SatUnsat.to_list in
let> astate =
PulseArithmetic.and_eq_int ret_val_false IntLit.zero astate |> SatUnsat.to_list
in
Atoms.write_return_from_bool path location ret_val_false ret_id astate
in
let astate_haskey =
let ret_val_true = AbstractValue.mk_fresh () in
let> astate = make_astate_goodmap path location map astate in
let astate, _isempty_addr, (is_empty, _isempty_hist) =
load_field path is_empty_field location map astate
in
let> astate = PulseArithmetic.prune_eq_zero is_empty astate |> SatUnsat.to_list in
let astate, _key_addr, tracked_key = load_field path key_field location map astate in
let> astate = Comparison.prune_equal location path key tracked_key astate in
let> astate = PulseArithmetic.and_eq_int ret_val_true IntLit.one astate |> SatUnsat.to_list in
Atoms.write_return_from_bool path location ret_val_true ret_id astate
in
astate_empty @ astate_haskey @ astate_badmap
let get (key, key_history) map : model =
fun ({location; path; ret= ret_id, _} as data) astate ->
Return 3 cases :
* - Error & assume not map
* - Error & assume map & assume empty ;
* - Ok & assume map & assume nonempty & assume key is the tracked key & return tracked value
* - Error & assume not map
* - Error & assume map & assume empty;
* - Ok & assume map & assume nonempty & assume key is the tracked key & return tracked value
*)
let astate_badmap = make_astate_badmap map data astate in
let astate_ok =
let> astate = make_astate_goodmap path location map astate in
let astate, _isempty_addr, (is_empty, _isempty_hist) =
load_field path is_empty_field location map astate
in
let> astate = PulseArithmetic.prune_eq_zero is_empty astate |> SatUnsat.to_list in
let astate, _key_addr, tracked_key = load_field path key_field location map astate in
let> astate = Comparison.prune_equal location path (key, key_history) tracked_key astate in
let astate, _value_addr, tracked_value = load_field path value_field location map astate in
[Ok (PulseOperations.write_id ret_id tracked_value astate)]
in
let astate_badkey =
let> astate = make_astate_goodmap path location map astate in
let astate, _isempty_addr, (is_empty, _isempty_hist) =
load_field path is_empty_field location map astate
in
let> astate = PulseArithmetic.prune_positive is_empty astate |> SatUnsat.to_list in
Errors.badkey data astate
in
List.map ~f:Basic.map_continue astate_ok @ astate_badkey @ astate_badmap
let put key value map : model =
fun ({location; path; ret= ret_id, _} as data) astate ->
Ignore old map . We only store one key / value so we can simply create a new map .
Return 2 cases :
* - Error & assume not map
* - Ok & assume map & return new map
* - Error & assume not map
* - Ok & assume map & return new map
*)
let hist = Hist.single_alloc path location "maps_put" in
let astate_badmap = make_astate_badmap map data astate in
let astate_ok =
let addr_map = (AbstractValue.mk_fresh (), hist) in
let addr_is_empty = (AbstractValue.mk_fresh (), hist) in
let is_empty_value = AbstractValue.mk_fresh () in
let fresh_val = (is_empty_value, hist) in
let addr_key = (AbstractValue.mk_fresh (), hist) in
let addr_value = (AbstractValue.mk_fresh (), hist) in
let> astate = make_astate_goodmap path location map astate in
let> astate =
[ write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_key
~field_val:key key_field astate ]
in
let> astate =
[ write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_value
~field_val:value value_field astate ]
in
let> astate =
write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_is_empty
~field_val:fresh_val is_empty_field astate
>>>= PulseArithmetic.and_eq_int is_empty_value IntLit.zero
|> SatUnsat.to_list
in
[Ok (write_dynamic_type_and_return addr_map Map ret_id astate)]
in
List.map ~f:Basic.map_continue astate_ok @ astate_badmap
end
module Strings = struct
(** This is a temporary solution for strings to avoid false positives. For now, we consider that
the type of strings is list and compute this information whenever a string is created. Strings
should be fully supported in future. T93361792 **)
let value_field = Fieldname.make (ErlangType Integer) ErlangTypeName.cons_tail
let make_raw location path (value, _) : disjunction_maker =
fun astate ->
let new_hist = Hist.single_alloc path location "string" in
let astate_nil =
let+* astate_nil = PulseArithmetic.and_eq_int value IntLit.zero astate in
let+ astate_nil, addr_nil = Lists.make_nil_raw location path astate_nil in
(astate_nil, addr_nil)
in
let astate_not_nil =
let val_not_nil = AbstractValue.mk_fresh () in
let+* astate_not_nil = PulseArithmetic.and_positive value astate in
let astate_not_nil =
PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Cons)) val_not_nil
astate_not_nil
in
let+ astate_not_nil =
write_field_and_deref path location ~struct_addr:(val_not_nil, new_hist)
~field_addr:(AbstractValue.mk_fresh (), new_hist)
~field_val:(value, new_hist) value_field astate_not_nil
in
(astate_not_nil, (val_not_nil, new_hist))
in
SatUnsat.to_list astate_nil @ SatUnsat.to_list astate_not_nil
let make value : model =
fun {location; path; ret= ret_id, _} astate ->
let> astate, (result, _hist) = make_raw location path value astate in
PulseOperations.write_id ret_id (result, _hist) astate |> Basic.ok_continue
end
module BIF = struct
let has_type (value, _hist) type_ : model =
fun {location; path; ret= ret_id, _} astate ->
let typ = Typ.mk_struct (ErlangType type_) in
let is_typ = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_typ value typ astate in
Atoms.write_return_from_bool path location is_typ ret_id astate
let is_atom x : model = has_type x Atom
let is_boolean ((atom_val, _atom_hist) as atom) : model =
fun {location; path; ret= ret_id, _} astate ->
let astate_not_atom =
(* Assume not atom: just return false *)
let typ = Typ.mk_struct (ErlangType Atom) in
let is_atom = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_atom atom_val typ astate in
let<**> astate = PulseArithmetic.prune_eq_zero is_atom astate in
Atoms.write_return_from_bool path location is_atom ret_id astate
in
let astate_is_atom =
(* Assume atom: return hash==hashof(true) or hash==hashof(false) *)
let> astate = prune_type path location atom Atom astate in
let astate, _hash_addr, (hash, _hash_hist) =
load_field path
(Fieldname.make (ErlangType Atom) ErlangTypeName.atom_hash)
location atom astate
in
let is_true = AbstractValue.mk_fresh () in
let is_false = AbstractValue.mk_fresh () in
let is_bool = AbstractValue.mk_fresh () in
let<**> astate, is_true =
PulseArithmetic.eval_binop is_true Binop.Eq (AbstractValueOperand hash)
(ConstOperand
(Cint (IntLit.of_int (ErlangTypeName.calculate_hash ErlangTypeName.atom_true))) )
astate
in
let<**> astate, is_false =
PulseArithmetic.eval_binop is_false Binop.Eq (AbstractValueOperand hash)
(ConstOperand
(Cint (IntLit.of_int (ErlangTypeName.calculate_hash ErlangTypeName.atom_false))) )
astate
in
let<**> astate, is_bool =
PulseArithmetic.eval_binop is_bool Binop.LOr (AbstractValueOperand is_true)
(AbstractValueOperand is_false) astate
in
Atoms.write_return_from_bool path location is_bool ret_id astate
in
astate_not_atom @ astate_is_atom
let is_integer x : model = has_type x Integer
let is_list (list_val, _list_hist) : model =
fun {location; path; ret= ret_id, _} astate ->
let cons_typ = Typ.mk_struct (ErlangType Cons) in
let nil_typ = Typ.mk_struct (ErlangType Nil) in
let astate_is_cons =
let is_cons = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_cons list_val cons_typ astate in
let<**> astate = PulseArithmetic.prune_positive is_cons astate in
Atoms.write_return_from_bool path location is_cons ret_id astate
in
let astate_is_nil =
let is_nil = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_nil list_val nil_typ astate in
let<**> astate = PulseArithmetic.prune_positive is_nil astate in
Atoms.write_return_from_bool path location is_nil ret_id astate
in
let astate_not_list =
let is_cons = AbstractValue.mk_fresh () in
let is_nil = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_cons list_val cons_typ astate in
let<**> astate = PulseArithmetic.prune_eq_zero is_cons astate in
let<**> astate = PulseArithmetic.and_equal_instanceof is_nil list_val nil_typ astate in
let<**> astate = PulseArithmetic.prune_eq_zero is_nil astate in
(* At this point, both [is_cons] and [is_nil] are false so we can return any of them. *)
Atoms.write_return_from_bool path location is_nil ret_id astate
in
astate_is_cons @ astate_is_nil @ astate_not_list
let is_map x : model = has_type x Map
end
(** Custom models, specified by Config.pulse_models_for_erlang. *)
module Custom = struct
(* TODO: see T110841433 *)
(** Note: [None] means unknown/nondeterministic. *)
type erlang_value = known_erlang_value option [@@deriving of_yojson]
and known_erlang_value =
| Atom of string option
| IntLit of string
| List of erlang_value list
| Tuple of erlang_value list
type selector =
| ModuleFunctionArity of
{module_: string [@key "module"]; function_: string [@key "function"]; arity: int}
[@name "MFA"]
[@@deriving of_yojson]
type arguments_return = {arguments: erlang_value list; return: erlang_value}
[@@deriving of_yojson]
type behavior = ReturnValue of erlang_value | ArgumentsReturnList of arguments_return list
[@@deriving of_yojson]
type rule = {selector: selector; behavior: behavior} [@@deriving of_yojson]
type spec = rule list [@@deriving of_yojson]
let make_selector selector model =
let l0 f = f [] in
let l1 f x0 = f [x0] in
let l2 f x0 x1 = f [x0; x1] in
let l3 f x0 x1 x2 = f [x0; x1; x2] in
let open ProcnameDispatcher.Call in
match selector with
| ModuleFunctionArity {module_; function_; arity= 0} ->
-module_ &:: function_ <>$$--> l0 model
| ModuleFunctionArity {module_; function_; arity= 1} ->
-module_ &:: function_ <>$ capt_arg $--> l1 model
| ModuleFunctionArity {module_; function_; arity= 2} ->
-module_ &:: function_ <>$ capt_arg $+ capt_arg $--> l2 model
| ModuleFunctionArity {module_; function_; arity= 3} ->
-module_ &:: function_ <>$ capt_arg $+ capt_arg $+ capt_arg $--> l3 model
| ModuleFunctionArity {module_; function_; arity} ->
L.user_warning "@[<v>@[model for %s:%s/%d may match other arities (tool limitation)@]@;@]"
module_ function_ arity ;
-module_ &:: function_ &++> model
let return_value_helper location path =
Implementation note : [ return_value_helper ] groups two mutually recursive functions , [ one ] and
[ many ] , both of which may access [ location ] and [ path ] .
[many], both of which may access [location] and [path]. *)
let rec one (ret_val : erlang_value) : maker =
fun astate ->
match ret_val with
| None ->
let ret_addr = AbstractValue.mk_fresh () in
let ret_hist = Hist.single_alloc path location "nondet" in
Sat (Ok (astate, (ret_addr, ret_hist)))
| Some (Atom None) ->
let ret_addr = AbstractValue.mk_fresh () in
let ret_hist = Hist.single_alloc path location "nondet_atom" in
Sat
(Ok
( PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Atom)) ret_addr astate
, (ret_addr, ret_hist) ) )
| Some (Atom (Some name)) ->
Atoms.of_string location path name astate
| Some (IntLit intlit) ->
Integers.of_string location path intlit astate
| Some (List elements) ->
let mk = Lists.make_raw location path in
many mk elements astate
| Some (Tuple elements) ->
let mk = Tuples.make_raw location path in
many mk elements astate
and many (mk : (AbstractValue.t * ValueHistory.t) list -> sat_maker)
(elements : erlang_value list) : maker =
fun astate ->
let mk_arg (args, astate) element =
let++ astate, arg = one element astate in
(arg :: args, astate)
in
let+* args, astate = PulseOperationResult.list_fold ~init:([], astate) ~f:mk_arg elements in
mk (List.rev args) astate
in
fun ret_val astate -> one ret_val astate
let return_value_model (ret_val : erlang_value) : model =
fun {location; path; ret= ret_id, _} astate ->
let<++> astate, ret = return_value_helper location path ret_val astate in
PulseOperations.write_id ret_id ret astate
let rec argument_value_helper path location actual_arg (pre_arg : erlang_value) astate :
AbductiveDomain.t result =
match pre_arg with
| None ->
[Ok astate]
| Some (Atom None) ->
prune_type path location actual_arg Atom astate
| Some (Atom (Some name)) ->
let> astate = prune_type path location actual_arg Atom astate in
let astate, _, (arg_hash, _hist) =
load_field path Atoms.hash_field location actual_arg astate
in
let name_hash : Const.t = Cint (IntLit.of_int (ErlangTypeName.calculate_hash name)) in
PulseArithmetic.prune_binop ~negated:false Eq (AbstractValueOperand arg_hash)
(ConstOperand name_hash) astate
|> SatUnsat.to_list
| Some (IntLit intlit) ->
let> astate = prune_type path location actual_arg Integer astate in
let astate, _, (value, _hist) =
load_field path Integers.value_field location actual_arg astate
in
PulseArithmetic.prune_binop ~negated:false Eq (AbstractValueOperand value)
(ConstOperand (Cint (IntLit.of_string intlit)))
astate
|> SatUnsat.to_list
| Some (Tuple elements) ->
let size = List.length elements in
let> astate = prune_type path location actual_arg (Tuple size) astate in
let one_element index astate pattern =
let field = Tuples.field_name size (index + 1) in
let astate, _, argi = load_field path field location actual_arg astate in
argument_value_helper path location argi pattern astate
in
result_foldi elements ~init:astate ~f:one_element
| Some (List elements) ->
let rec go value elements astate =
match elements with
| [] ->
prune_type path location value Nil astate
| element :: rest ->
let> head, tail, astate = Lists.load_head_tail value astate path location in
let> astate = argument_value_helper path location head element astate in
go tail rest astate
in
go actual_arg elements astate
let arguments_return_model args (summaries : arguments_return list) : model =
fun {location; path; ret= ret_id, _} astate ->
let get_payload (arg : 'a ProcnameDispatcher.Call.FuncArg.t) = arg.arg_payload in
let actual_arguments = args |> List.map ~f:get_payload in
let one_summary {arguments; return} =
let one_arg astate (actual_arg, pre_arg) =
argument_value_helper path location actual_arg pre_arg astate
in
let paired =
match List.zip actual_arguments arguments with
| Unequal_lengths ->
L.internal_error "Matched wrong arity (or model has wrong arity)." ;
[]
| Ok result ->
result
in
let> astate = result_fold paired ~init:astate ~f:one_arg in
let> astate, ret = return_value_helper location path return astate |> SatUnsat.to_list in
[Ok (PulseOperations.write_id ret_id ret astate)]
in
List.concat_map ~f:one_summary summaries |> List.map ~f:Basic.map_continue
let make_model behavior args : model =
match behavior with
| ReturnValue ret_val ->
return_value_model ret_val
| ArgumentsReturnList arguments_return_list ->
arguments_return_model args arguments_return_list
let matcher_of_rule {selector; behavior} = make_selector selector (make_model behavior)
let matchers () : matcher list =
let load_spec path =
try spec_of_yojson (Yojson.Safe.from_file path) with
| Yojson.Json_error what ->
L.user_error
"@[<v>Failed to parse json from %s: %s@;\
Continuing with no custom models imported from this file.@;\
@]"
path what ;
[]
| Ppx_yojson_conv_lib__Yojson_conv.Of_yojson_error (what, json) ->
let details = match what with Failure what -> Printf.sprintf " (%s)" what | _ -> "" in
L.user_error
"@[<v>Failed to parse --pulse-models-for-erlang from %s %s:@;\
%a@;\
Continuing with no custom models imported from this file.@;\
@]"
path details Yojson.Safe.pp json ;
[]
in
let spec =
List.fold Config.pulse_models_for_erlang ~init:[] ~f:(fun spec path ->
match (Unix.stat path).st_kind with
| S_DIR ->
Utils.fold_files ~init:spec
~f:(fun spec filepath ->
if Filename.check_suffix filepath "json" then
List.append (load_spec filepath) spec
else spec )
~path
| S_REG ->
List.append (load_spec path) spec
| _ ->
spec
| exception Unix.Unix_error (ENOENT, _, _) ->
spec )
in
List.map ~f:matcher_of_rule spec
end
let matchers : matcher list =
let open ProcnameDispatcher.Call in
let arg = capt_arg_payload in
let erlang_ns = ErlangTypeName.erlang_namespace in
Custom.matchers ()
@ [ +BuiltinDecl.(match_builtin __erlang_error_badkey) <>--> Errors.badkey
; +BuiltinDecl.(match_builtin __erlang_error_badmap) <>--> Errors.badmap
; +BuiltinDecl.(match_builtin __erlang_error_badmatch) <>--> Errors.badmatch
; +BuiltinDecl.(match_builtin __erlang_error_badrecord) <>--> Errors.badrecord
; +BuiltinDecl.(match_builtin __erlang_error_badreturn) <>--> Errors.badreturn
; +BuiltinDecl.(match_builtin __erlang_error_case_clause) <>--> Errors.case_clause
; +BuiltinDecl.(match_builtin __erlang_error_function_clause) <>--> Errors.function_clause
; +BuiltinDecl.(match_builtin __erlang_error_if_clause) <>--> Errors.if_clause
; +BuiltinDecl.(match_builtin __erlang_error_try_clause) <>--> Errors.try_clause
; +BuiltinDecl.(match_builtin __erlang_make_atom) <>$ arg $+ arg $--> Atoms.make
; +BuiltinDecl.(match_builtin __erlang_make_integer) <>$ arg $--> Integers.make
; +BuiltinDecl.(match_builtin __erlang_make_nil) <>--> Lists.make_nil
; +BuiltinDecl.(match_builtin __erlang_make_cons) <>$ arg $+ arg $--> Lists.make_cons
; +BuiltinDecl.(match_builtin __erlang_make_str_const) <>$ arg $--> Strings.make
; +BuiltinDecl.(match_builtin __erlang_equal) <>$ arg $+ arg $--> Comparison.equal
; +BuiltinDecl.(match_builtin __erlang_exactly_equal) <>$ arg $+ arg $--> Comparison.equal
TODO : proper modeling of equal vs exactly equal
; +BuiltinDecl.(match_builtin __erlang_not_equal)
<>$ arg $+ arg $--> Comparison.exactly_not_equal
TODO : proper modeling of equal vs exactly equal
; +BuiltinDecl.(match_builtin __erlang_exactly_not_equal)
<>$ arg $+ arg $--> Comparison.exactly_not_equal
; +BuiltinDecl.(match_builtin __erlang_greater) <>$ arg $+ arg $--> Comparison.greater
; +BuiltinDecl.(match_builtin __erlang_greater_or_equal)
<>$ arg $+ arg $--> Comparison.greater_or_equal
; +BuiltinDecl.(match_builtin __erlang_lesser) <>$ arg $+ arg $--> Comparison.lesser
; +BuiltinDecl.(match_builtin __erlang_lesser_or_equal)
<>$ arg $+ arg $--> Comparison.lesser_or_equal
; -"lists" &:: "append" <>$ arg $+ arg $--> Lists.append2 ~reverse:false
; -"lists" &:: "foreach" <>$ arg $+ arg $--> Lists.foreach
; -"lists" &:: "reverse" <>$ arg $--> Lists.reverse
; +BuiltinDecl.(match_builtin __erlang_make_map) &++> Maps.make
; -"maps" &:: "is_key" <>$ arg $+ arg $--> Maps.is_key
; -"maps" &:: "get" <>$ arg $+ arg $--> Maps.get
; -"maps" &:: "put" <>$ arg $+ arg $+ arg $--> Maps.put
; -"maps" &:: "new" <>$$--> Maps.new_
; +BuiltinDecl.(match_builtin __erlang_make_tuple) &++> Tuples.make
; -erlang_ns &:: "is_atom" <>$ arg $--> BIF.is_atom
; -erlang_ns &:: "is_boolean" <>$ arg $--> BIF.is_boolean
; -erlang_ns &:: "is_integer" <>$ arg $--> BIF.is_integer
; -erlang_ns &:: "is_list" <>$ arg $--> BIF.is_list
; -erlang_ns &:: "is_map" <>$ arg $--> BIF.is_map ]
| null | https://raw.githubusercontent.com/facebook/infer/ca296e96a71aa96ed8b67f65219dd160ef99fbf7/infer/src/pulse/PulseModelsErlang.ml | ocaml | * Represents the result of a transfer function that may (a) nondeterministically split the state,
and (b) some of the nondeterministic branches may be errors. Goes well with [let>] defined later
in this file.
* special case of {!maker} when the result is known to be satisfiable
* Similar to {!maker} but can return a disjunction of results.
* A type similar to {!maker} for transfer functions that only return an abstract value without any
history attached to it.
* A simple helper that wraps destination-passing-style evaluation functions that also return a
handler to their result into a function that allocates the destination under the hood and simply
return that handler.
This allows to transform this (somehow recurring) pattern:
[let dest = AbstractValue.mk_fresh () in let (astate, dest) = eval dest arg1 ... argN in ....]
into the simpler: [let (astate, dest) = eval_into_fresh eval arg1 ... argN in ...]
If check_addr_access fails, we stop exploring this path by marking it [Unsat]
* Loads a field from a struct, assuming that it has the correct type (should be checked by
[prune_type]).
Note: This should correspond to [ErlangTranslator.mk_atom_call].
Converts [bool_value] into true/false, and write it to [addr_atom].
* Takes a boolean value, converts it to true/false atom and writes to return value.
* {1 Helper functions}
* These functions are provided as helpers to define the comparator functions.
* A trivial comparison that is always false. Can be used eg. for equality on incompatible
types.
* A trivial comparison that is always true. Can be used eg. for inequality on incompatible
types.
* Returns an unconstrained value. Can be used eg. for overapproximation or for unsupported
comparisons.
Note that, as an over-approximation, it can lead to some false positives, that we generally
try to avoid. However, the only solution for comparisons that we do not support (such as
ordering on atoms) would then be to consider the result as unreachable (that is, return an
empty abstract state). This would lead to code depending on such comparisons not being
analysed at all, which may be less desirable than analysing it without gaining information
from the comparison itself.
* {1 Comparators as records of functions}
exactly_not_equal.
* Makes an ordering comparator given an operator to be used for comparing integer values and a
function to compare incompatible values.
* Returns an abstract state that has been pruned on the comparison result being true.
* Helper function to create a Nil structure without assigning it to return value
* Create a Nil structure and assign it to return value
* Helper function to create a Cons structure without assigning it to return value
* Create a Cons structure and assign it to return value
* Assumes that the argument is a Cons and loads the head and tail
* Assumes that a list is of given length and reads the elements
arg is not a list if its type is neither Cons nor Nil
Makes an abstract state corresponding to appending to a list of given length
* Approximation: we don't actually do the side-effect, just assume the return value.
* Helper: Like [Tuples.make] but with a more precise/composable type.
* Maps are currently approximated to store only the latest key/value
Reverse the list so we can get last key/value
Non-empty map: we just consider the last key/value, rest is ignored (approximation)
Empty map
* This is a temporary solution for strings to avoid false positives. For now, we consider that
the type of strings is list and compute this information whenever a string is created. Strings
should be fully supported in future. T93361792 *
Assume not atom: just return false
Assume atom: return hash==hashof(true) or hash==hashof(false)
At this point, both [is_cons] and [is_nil] are false so we can return any of them.
* Custom models, specified by Config.pulse_models_for_erlang.
TODO: see T110841433
* Note: [None] means unknown/nondeterministic. |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module L = Logging
open PulseBasicInterface
open PulseDomainInterface
open PulseOperationResult.Import
open PulseModelsImport
type 'ok result = 'ok AccessResult.t list
* A type for transfer functions that make an object , add it to abstract state
( [ AbductiveDomain.t ] ) , and return a handle to it ( [ ( AbstractValue.t * ValueHistory.t ) ] ) . Note
that the type is simlar to that of [ PulseOperations.eval ] .
([AbductiveDomain.t]), and return a handle to it ([(AbstractValue.t * ValueHistory.t)]). Note
that the type is simlar to that of [PulseOperations.eval]. *)
type maker =
AbductiveDomain.t
-> (AbductiveDomain.t * (AbstractValue.t * ValueHistory.t)) AccessResult.t SatUnsat.t
type sat_maker =
AbductiveDomain.t -> (AbductiveDomain.t * (AbstractValue.t * ValueHistory.t)) AccessResult.t
type disjunction_maker =
AbductiveDomain.t -> (AbductiveDomain.t * (AbstractValue.t * ValueHistory.t)) AccessResult.t list
type value_maker =
AbductiveDomain.t -> (AbductiveDomain.t * AbstractValue.t) AccessResult.t SatUnsat.t
let write_field_and_deref path location ~struct_addr ~field_addr ~field_val field_name astate =
let* astate =
PulseOperations.write_field path location ~ref:struct_addr field_name ~obj:field_addr astate
in
PulseOperations.write_deref path location ~ref:field_addr ~obj:field_val astate
* Returns the erlang type of an abstract value , extracted from its dynamic type ( pulse ) attribute .
Returns [ Any ] if the value has no dynamic type , or if no erlang type can be extracted from it .
Note that it may be the case for some encoded Erlang values ( such as strings , floats or closures
at the first implementation time ) .
Returns [Any] if the value has no dynamic type, or if no erlang type can be extracted from it.
Note that it may be the case for some encoded Erlang values (such as strings, floats or closures
at the first implementation time). *)
let get_erlang_type_or_any val_ astate =
let open IOption.Let_syntax in
let typename =
let* typ_ = AbductiveDomain.AddressAttributes.get_dynamic_type val_ astate in
Typ.name typ_
in
match typename with Some (Typ.ErlangType erlang_type) -> erlang_type | _ -> ErlangTypeName.Any
let write_dynamic_type_and_return (addr_val, hist) typ ret_id astate =
let typ = Typ.mk_struct (ErlangType typ) in
let astate = PulseOperations.add_dynamic_type typ addr_val astate in
PulseOperations.write_id ret_id (addr_val, hist) astate
let eval_into_fresh eval =
let symbol = AbstractValue.mk_fresh () in
eval symbol
* Use for chaining functions of the type ( ' a->('b,'err ) result list ) . The idea of such functions
is that they can both fan - out into a ( possibly empty ) disjunction * and * signal errors . For
example , consider [ f ] of type [ ' a->('b,'err ) result list ] and [ g ] of type
[ ' b->('c,'err ) result list ] and [ a ] is some value of type [ ' a ] . Note that the type of error is
the same , so they can be propagated forward . To chain the application of these functions , you
can write [ let > x = f a in let > y = g x in \[Ok y\ ] ] .
In several places , we have to compose with functions of the type [ ' a->('b,'err ) result ] , which
do n't produce a list . One way to handle this is to wrap those functions in a list . For example ,
if [ f ] and [ a ] have the same type as before but [ g ] has type [ ' b->('c,'err ) result ] , then we can
write [ let > = f a in let > y=\[g x\ ] in \[Ok y\ ] . ]
is that they can both fan-out into a (possibly empty) disjunction *and* signal errors. For
example, consider [f] of type ['a->('b,'err) result list] and [g] of type
['b->('c,'err) result list] and [a] is some value of type ['a]. Note that the type of error is
the same, so they can be propagated forward. To chain the application of these functions, you
can write [let> x=f a in let> y=g x in \[Ok y\]].
In several places, we have to compose with functions of the type ['a->('b,'err) result], which
don't produce a list. One way to handle this is to wrap those functions in a list. For example,
if [f] and [a] have the same type as before but [g] has type ['b->('c,'err) result], then we can
write [let> =f a in let> y=\[g x\] in \[Ok y\].] *)
let ( let> ) x f =
List.concat_map
~f:(function
| FatalError _ as error ->
[error]
| Ok ok ->
f ok
| Recoverable (ok, errors) ->
f ok |> List.map ~f:(fun result -> PulseResult.append_errors errors result) )
x
let result_fold list ~init ~f =
let init = [Ok init] in
let f result x =
let> ok = result in
f ok x
in
List.fold list ~init ~f
let result_foldi list ~init ~f =
let init = [Ok init] in
let f index result x =
let> ok = result in
f index ok x
in
List.foldi list ~init ~f
* Builds as an abstract value the truth value of the predicate " The value given as an argument as
the erlang type given as the other argument "
the erlang type given as the other argument" *)
let has_erlang_type value typ : value_maker =
fun astate ->
let instanceof_val = AbstractValue.mk_fresh () in
let sil_type = Typ.mk_struct (ErlangType typ) in
let++ astate = PulseArithmetic.and_equal_instanceof instanceof_val value sil_type astate in
(astate, instanceof_val)
let prune_type path location (value, hist) typ astate : AbductiveDomain.t result =
(let open SatUnsat.Import in
let* astate =
PulseOperations.check_addr_access path Read location (value, hist) astate
|> PulseResult.ok |> SatUnsat.of_option
in
let** astate, instanceof_val = has_erlang_type value typ astate in
PulseArithmetic.prune_positive instanceof_val astate)
|> SatUnsat.to_list
let load_field path field location obj astate =
match PulseModelsJava.load_field path field location obj astate with
| Recoverable _ | FatalError _ ->
L.die InternalError "@[<v>@[%s@]@;@[%a@]@;@]"
"Could not load field. Did you call this function without calling prune_type?"
AbductiveDomain.pp astate
| Ok result ->
result
module Errors = struct
let error err astate = [FatalError (ReportableError {astate; diagnostic= ErlangError err}, [])]
let badarg : model =
fun {location} astate -> error (Badarg {calling_context= []; location}) astate
let badkey : model =
fun {location} astate -> error (Badkey {calling_context= []; location}) astate
let badmap : model =
fun {location} astate -> error (Badmap {calling_context= []; location}) astate
let badmatch : model =
fun {location} astate -> error (Badmatch {calling_context= []; location}) astate
let badrecord : model =
fun {location} astate -> error (Badrecord {calling_context= []; location}) astate
let badreturn : model =
fun {location} astate -> error (Badreturn {calling_context= []; location}) astate
let case_clause : model =
fun {location} astate -> error (Case_clause {calling_context= []; location}) astate
let function_clause : model =
fun {location} astate -> error (Function_clause {calling_context= []; location}) astate
let if_clause : model =
fun {location} astate -> error (If_clause {calling_context= []; location}) astate
let try_clause : model =
fun {location} astate -> error (Try_clause {calling_context= []; location}) astate
end
module Atoms = struct
let value_field = Fieldname.make (ErlangType Atom) ErlangTypeName.atom_value
let hash_field = Fieldname.make (ErlangType Atom) ErlangTypeName.atom_hash
let make_raw location path value hash : sat_maker =
fun astate ->
let hist = Hist.single_alloc path location "atom" in
let addr_atom = (AbstractValue.mk_fresh (), hist) in
let* astate =
write_field_and_deref path location ~struct_addr:addr_atom
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:value value_field astate
in
let+ astate =
write_field_and_deref path location ~struct_addr:addr_atom
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:hash hash_field astate
in
( PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Atom)) (fst addr_atom) astate
, addr_atom )
let make value hash : model =
fun {location; path; ret= ret_id, _} astate ->
let<+> astate, ret = make_raw location path value hash astate in
PulseOperations.write_id ret_id ret astate
let of_string location path (name : string) : maker =
fun astate ->
let** astate, hash =
let hash_exp : Exp.t = Const (Cint (IntLit.of_int (ErlangTypeName.calculate_hash name))) in
PulseOperations.eval path Read location hash_exp astate
in
let+* astate, name =
let name_exp : Exp.t = Const (Cstr name) in
PulseOperations.eval path Read location name_exp astate
in
make_raw location path name hash astate
let of_bool path location bool_value astate =
let astate_true =
let** astate = PulseArithmetic.prune_positive bool_value astate in
of_string location path ErlangTypeName.atom_true astate
in
let astate_false :
(AbductiveDomain.t * (AbstractValue.t * ValueHistory.t)) AccessResult.t SatUnsat.t =
let** astate = PulseArithmetic.prune_eq_zero bool_value astate in
of_string location path ErlangTypeName.atom_false astate
in
let> astate, (addr, hist) = SatUnsat.to_list astate_true @ SatUnsat.to_list astate_false in
let typ = Typ.mk_struct (ErlangType Atom) in
[Ok (PulseOperations.add_dynamic_type typ addr astate, (addr, hist))]
let write_return_from_bool path location bool_value ret_id astate =
let> astate, ret_val = of_bool path location bool_value astate in
PulseOperations.write_id ret_id ret_val astate |> Basic.ok_continue
end
module Integers = struct
let value_field = Fieldname.make (ErlangType Integer) ErlangTypeName.integer_value
let typ = Typ.mk_struct (ErlangType Integer)
let make_raw location path value : sat_maker =
fun astate ->
let hist = Hist.single_alloc path location "integer" in
let addr = (AbstractValue.mk_fresh (), hist) in
let+ astate =
write_field_and_deref path location ~struct_addr:addr
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:value value_field astate
in
(PulseOperations.add_dynamic_type typ (fst addr) astate, addr)
let make value : model =
fun {location; path; ret= ret_id, _} astate ->
let<+> astate, ret = make_raw location path value astate in
PulseOperations.write_id ret_id ret astate
let of_intlit location path (intlit : IntLit.t) : maker =
fun astate ->
let+* astate, name =
let intlit_exp : Exp.t = Const (Cint intlit) in
PulseOperations.eval path Read location intlit_exp astate
in
make_raw location path name astate
let of_string location path (intlit : string) : maker =
of_intlit location path (IntLit.of_string intlit)
end
module Comparison = struct
module Comparator = struct
* Records that define how to compare values according to their types .
These records define a few functions that correspond to a comparison on values that have a
specific type . For instance , the [ integer ] function can assume that both compared values are
indeed of the integer type , and that function will be called by the global comparison
function on these cases .
Comparators must also define an [ unsupported ] function , that the dispatching function will
call on types that it does not support or can not determine , and an [ incompatible ] function
that will be called when both compared values are known to be of a different type .
These records define a few functions that correspond to a comparison on values that have a
specific type. For instance, the [integer] function can assume that both compared values are
indeed of the integer type, and that function will be called by the global comparison
function on these cases.
Comparators must also define an [unsupported] function, that the dispatching function will
call on types that it does not support or cannot determine, and an [incompatible] function
that will be called when both compared values are known to be of a different type. *)
* Compares two objects by comparing one specific field . No type checking is made and the user
should take care that the field is indeed a valid one for both arguments .
should take care that the field is indeed a valid one for both arguments. *)
let from_fields sil_op field x y location path : value_maker =
fun astate ->
let astate, _addr, (x_field, _) = load_field path field location x astate in
let astate, _addr, (y_field, _) = load_field path field location y astate in
eval_into_fresh PulseArithmetic.eval_binop_absval sil_op x_field y_field astate
let const_false _x _y _location _path : value_maker =
fun astate ->
let const_false = AbstractValue.mk_fresh () in
let++ astate = PulseArithmetic.prune_eq_zero const_false astate in
(astate, const_false)
let const_true _x _y _location _path : value_maker =
fun astate ->
let const_true = AbstractValue.mk_fresh () in
let++ astate = PulseArithmetic.prune_positive const_true astate in
(astate, const_true)
let unknown _x _y _location _path : value_maker =
fun astate ->
let result = AbstractValue.mk_fresh () in
Sat (Ok (astate, result))
* Takes as parameters the types of two values [ x ] and [ y ] known to be incompatible and returns
a comparator for [ x < y ] based on this type . Currently , this only supports comparisons with
at least one integer : namely , when [ x ] is known to be an integer , then the comparison is
always true , and when [ y ] is , the comparison is always false . Otherwise it is unknown .
Reference : { : #term-comparisons } .
a comparator for [x < y] based on this type. Currently, this only supports comparisons with
at least one integer: namely, when [x] is known to be an integer, then the comparison is
always true, and when [y] is, the comparison is always false. Otherwise it is unknown.
Reference: {:#term-comparisons}. *)
let incompatible_lt ty_x ty_y x y location path : value_maker =
fun astate ->
match (ty_x, ty_y) with
| ErlangTypeName.Integer, _ ->
const_true x y location path astate
| _, ErlangTypeName.Integer ->
const_false x y location path astate
| _ ->
unknown x y location path astate
* Takes as parameters the types of two values [ x ] and [ y ] known to be incompatible and returns
a comparator for [ x > y ] based on this type .
See also { ! incompatible_lt } .
a comparator for [x > y] based on this type.
See also {!incompatible_lt}. *)
let incompatible_gt ty_x ty_y x y location path : value_maker =
fun astate ->
match (ty_x, ty_y) with
| ErlangTypeName.Integer, _ ->
const_false x y location path astate
| _, ErlangTypeName.Integer ->
const_true x y location path astate
| _ ->
unknown x y location path astate
* Adapt { ! const_false } to have the expected type for the equality of incompatible values ( cf
{ ! type : t } , { ! : t.incompatible } ) .
{!type:t}, {!recfield:t.incompatible}). *)
let incompatible_eq _ty_x _ty_y = const_false
* Cf . { ! }
let incompatible_exactly_not_eq _ty_x _ty_y = const_true
* The type of the functions that compare two values based on a specific type combination . They
take the two values as parameters and build the abstract result that holds the comparison
value .
take the two values as parameters and build the abstract result that holds the comparison
value. *)
type monotyped_comparison =
AbstractValue.t * ValueHistory.t
-> AbstractValue.t * ValueHistory.t
-> Location.t
-> PathContext.t
-> value_maker
type t =
{ unsupported: monotyped_comparison
; incompatible: ErlangTypeName.t -> ErlangTypeName.t -> monotyped_comparison
* [ incompatible ] takes as first parameters the types of the values being compared in
order to implement type - based ordering
order to implement type-based ordering *)
; integer: monotyped_comparison
; atom: monotyped_comparison }
let eq =
{ unsupported= unknown
; incompatible= incompatible_eq
; integer= from_fields Binop.Eq Integers.value_field
; atom= from_fields Binop.Eq Atoms.hash_field }
let xne =
{ unsupported= unknown
; incompatible= incompatible_exactly_not_eq
; integer= from_fields Binop.Ne Integers.value_field
; atom= from_fields Binop.Ne Atoms.hash_field }
let ordering int_binop incompatible =
{ unsupported= unknown
; incompatible
; integer= from_fields int_binop Integers.value_field
; atom= unknown }
let gt = ordering Gt incompatible_gt
let ge = ordering Ge incompatible_gt
let lt = ordering Lt incompatible_lt
let le = ordering Le incompatible_lt
* Compare two abstract values , when one of them might be an integer , by disjuncting on the
case whether it is ( an integer ) or not .
Parameters ( not in order ):
- Two abstract values [ x ] and [ y ] . One of them is expected to ba a known integer , and the
other one to have an undetermined dynamic type .
- The ( erlang ) types of [ x ] and [ y ] as determined by the caller . Corresponding to the
expectation mentioned above , [ ( ty_x , ty_y ) ] is expected to be either [ ( Integer , Any ) ] or
[ ( Any , Integer ) ] ( this is not checked by the function and is the caller responsibility ) .
- An [ are_compatible ] ( boolean ) abstract value that witnesses if the types of [ x ] and [ y ]
are both integers or if one of them is not ( this is typically obtained by using
{ ! has_erlang_type } on the Any - typed argument ) .
Returns : a disjunction built on top of [ are_compatible ] , that compares [ x ] and [ y ] as
integers when they both are , and as incompatible values when the any - typed one is not an
integer .
case whether it is (an integer) or not.
Parameters (not in order):
- Two abstract values [x] and [y]. One of them is expected to ba a known integer, and the
other one to have an undetermined dynamic type.
- The (erlang) types of [x] and [y] as determined by the caller. Corresponding to the
expectation mentioned above, [(ty_x, ty_y)] is expected to be either [(Integer, Any)] or
[(Any, Integer)] (this is not checked by the function and is the caller responsibility).
- An [are_compatible] (boolean) abstract value that witnesses if the types of [x] and [y]
are both integers or if one of them is not (this is typically obtained by using
{!has_erlang_type} on the Any-typed argument).
Returns: a disjunction built on top of [are_compatible], that compares [x] and [y] as
integers when they both are, and as incompatible values when the any-typed one is not an
integer. *)
let any_with_integer_split cmp location path ~are_compatible ty_x ty_y x y : disjunction_maker =
fun astate ->
let int_result =
let** astate_int = PulseArithmetic.prune_positive are_compatible astate in
let++ astate_int, int_comparison = cmp.integer x y location path astate_int in
let int_hist = Hist.single_alloc path location "any_int_comparison" in
(astate_int, (int_comparison, int_hist))
in
let incompatible_result =
let** astate_incompatible = PulseArithmetic.prune_eq_zero are_compatible astate in
let++ astate_incompatible, incompatible_comparison =
cmp.incompatible ty_x ty_y x y location path astate_incompatible
in
let incompatible_hist = Hist.single_alloc path location "any_incompatible_comparison" in
(astate_incompatible, (incompatible_comparison, incompatible_hist))
in
SatUnsat.to_list int_result @ SatUnsat.to_list incompatible_result
end
* Makes an abstract value holding the comparison result of two parameters . We perform a case
analysis of the dynamic type of these parameters .
See the documentation of { ! Comparator } values for the meaning of the [ cmp ] parameter . It will
be given as an argument by specific comparisons functions and should define a few functions
that return a result for comparisons on specific types .
Note that we here say that two values are " incompatible " if they have separate types . That
does not mean that the comparison is invalid , as in erlang all comparisons are properly
defined even on differently - typed values :
{ : #term-comparisons } .
We say that two values have an " unsupported " type if they both share a type on which we do n't
do any precise comparison . Not that two values of * distinct * unsupported types are still
incompatible , and therefore might be precisely compared .
Current supported types are integers and atoms .
The final result is computed as follows :
- If the parameters are both of a supported type , integers , then we compare them accordingly
( eg . the [ cmp.integer ] function will then typically compare their value fields ) .
- If the parameters have incompatible types , then we return the result of a comparison of
incompatible types ( eg . equality would be false , and inequality would be true ) .
- If both parameters have the same unsupported type , then the comparison is unsupported and we
use the [ cmp.unsupported ] function ( that could for instance return an - overapproximating -
unconstrained result ) .
- If at least one parameter has no known dynamic type ( or , equivalently , its type is [ Any ] ) ,
then the comparison is also unsupported .
Note that , on supported types ( eg . integers ) , it is important that the [ cmp ] functions decide
themselves if they should compare some specific fields or not , instead of getting these fields
in the global function and have the methods work on the field values . This is because , when we
extend this code to work on other more complex types , which field is used or not may depend on
the actual comparison operator that we 're computing . For instance the equality of atoms can be
decided on their hash , but their relative ordering should check their names as
lexicographically ordered strings .
analysis of the dynamic type of these parameters.
See the documentation of {!Comparator} values for the meaning of the [cmp] parameter. It will
be given as an argument by specific comparisons functions and should define a few functions
that return a result for comparisons on specific types.
Note that we here say that two values are "incompatible" if they have separate types. That
does not mean that the comparison is invalid, as in erlang all comparisons are properly
defined even on differently-typed values:
{:#term-comparisons}.
We say that two values have an "unsupported" type if they both share a type on which we don't
do any precise comparison. Not that two values of *distinct* unsupported types are still
incompatible, and therefore might be precisely compared.
Current supported types are integers and atoms.
The final result is computed as follows:
- If the parameters are both of a supported type, integers, then we compare them accordingly
(eg. the [cmp.integer] function will then typically compare their value fields).
- If the parameters have incompatible types, then we return the result of a comparison of
incompatible types (eg. equality would be false, and inequality would be true).
- If both parameters have the same unsupported type, then the comparison is unsupported and we
use the [cmp.unsupported] function (that could for instance return an - overapproximating -
unconstrained result).
- If at least one parameter has no known dynamic type (or, equivalently, its type is [Any]),
then the comparison is also unsupported.
Note that, on supported types (eg. integers), it is important that the [cmp] functions decide
themselves if they should compare some specific fields or not, instead of getting these fields
in the global function and have the methods work on the field values. This is because, when we
extend this code to work on other more complex types, which field is used or not may depend on
the actual comparison operator that we're computing. For instance the equality of atoms can be
decided on their hash, but their relative ordering should check their names as
lexicographically ordered strings. *)
let make_raw (cmp : Comparator.t) location path ((x_val, _) as x) ((y_val, _) as y) :
disjunction_maker =
fun astate ->
let x_typ = get_erlang_type_or_any x_val astate in
let y_typ = get_erlang_type_or_any y_val astate in
match (x_typ, y_typ) with
| Integer, Integer ->
let<**> astate, result = cmp.integer x y location path astate in
let hist = Hist.single_alloc path location "integer_comparison" in
[Ok (astate, (result, hist))]
| Atom, Atom ->
let<**> astate, result = cmp.atom x y location path astate in
let hist = Hist.single_alloc path location "atom_comparison" in
[Ok (astate, (result, hist))]
| Integer, Any ->
let<**> astate, are_compatible = has_erlang_type y_val Integer astate in
Comparator.any_with_integer_split cmp location path ~are_compatible Integer Any x y astate
| Any, Integer ->
let<**> astate, are_compatible = has_erlang_type x_val Integer astate in
Comparator.any_with_integer_split cmp location path ~are_compatible Any Integer x y astate
| Nil, Nil | Cons, Cons | Tuple _, Tuple _ | Map, Map ->
let<**> astate, result = cmp.unsupported x y location path astate in
let hist = Hist.single_alloc path location "unsupported_comparison" in
[Ok (astate, (result, hist))]
| _ ->
let<**> astate, result = cmp.incompatible x_typ y_typ x y location path astate in
let hist = Hist.single_alloc path location "incompatible_comparison" in
[Ok (astate, (result, hist))]
* A model of comparison operators where we store in the destination the result of comparing two
parameters .
parameters. *)
let make cmp x y : model =
fun {location; path; ret= ret_id, _} astate ->
let> astate, (result, _hist) = make_raw cmp location path x y astate in
Atoms.write_return_from_bool path location result ret_id astate
let prune cmp location path x y astate : AbductiveDomain.t AccessResult.t list =
let> astate, (comparison, _hist) = make_raw cmp location path x y astate in
PulseArithmetic.prune_positive comparison astate |> SatUnsat.to_list
* { 1 Specific comparison operators }
let equal = make Comparator.eq
let prune_equal = prune Comparator.eq
let exactly_not_equal = make Comparator.xne
let greater = make Comparator.gt
let greater_or_equal = make Comparator.ge
let lesser = make Comparator.lt
let lesser_or_equal = make Comparator.le
end
module Lists = struct
let head_field = Fieldname.make (ErlangType Cons) ErlangTypeName.cons_head
let tail_field = Fieldname.make (ErlangType Cons) ErlangTypeName.cons_tail
let make_nil_raw location path : sat_maker =
fun astate ->
let event = Hist.alloc_event path location "[]" in
let addr_nil_val = AbstractValue.mk_fresh () in
let addr_nil = (addr_nil_val, Hist.single_event path event) in
let astate =
PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Nil)) addr_nil_val astate
in
Ok (astate, addr_nil)
let make_nil : model =
fun {location; path; ret= ret_id, _} astate ->
let<+> astate, addr_nil = make_nil_raw location path astate in
PulseOperations.write_id ret_id addr_nil astate
let make_cons_raw path location hd tl : sat_maker =
fun astate ->
let hist = Hist.single_alloc path location "[X|Xs]" in
let addr_cons_val = AbstractValue.mk_fresh () in
let addr_cons = (addr_cons_val, hist) in
let* astate =
write_field_and_deref path location ~struct_addr:addr_cons
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:hd head_field astate
in
let+ astate =
write_field_and_deref path location ~struct_addr:addr_cons
~field_addr:(AbstractValue.mk_fresh (), hist)
~field_val:tl tail_field astate
in
let astate =
PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Cons)) addr_cons_val astate
in
(astate, addr_cons)
let make_cons head tail : model =
fun {location; path; ret= ret_id, _} astate ->
let<+> astate, addr_cons = make_cons_raw path location head tail astate in
PulseOperations.write_id ret_id addr_cons astate
let load_head_tail cons astate path location =
let> astate = prune_type path location cons Cons astate in
let astate, _, head = load_field path head_field location cons astate in
let astate, _, tail = load_field path tail_field location cons astate in
[Ok (head, tail, astate)]
let rec assume_and_deconstruct list length astate path location =
match length with
| 0 ->
let> astate = prune_type path location list Nil astate in
[Ok ([], astate)]
| _ ->
let> hd, tl, astate = load_head_tail list astate path location in
let> elems, astate = assume_and_deconstruct tl (length - 1) astate path location in
[Ok (hd :: elems, astate)]
let make_astate_badarg (list_val, _list_hist) data astate =
let typ_cons = Typ.mk_struct (ErlangType Cons) in
let typ_nil = Typ.mk_struct (ErlangType Nil) in
let instanceof_val_cons = AbstractValue.mk_fresh () in
let instanceof_val_nil = AbstractValue.mk_fresh () in
let<**> astate =
PulseArithmetic.and_equal_instanceof instanceof_val_cons list_val typ_cons astate
in
let<**> astate =
PulseArithmetic.and_equal_instanceof instanceof_val_nil list_val typ_nil astate
in
let<**> astate = PulseArithmetic.prune_eq_zero instanceof_val_cons astate in
let<**> astate = PulseArithmetic.prune_eq_zero instanceof_val_nil astate in
Errors.badarg data astate
let append2 ~reverse list1 list2 : model =
fun ({location; path; ret= ret_id, _} as data) astate ->
let mk_astate_badarg = make_astate_badarg list1 data astate in
let mk_good_astate_concat length =
let> elems, astate = assume_and_deconstruct list1 length astate path location in
let elems = if reverse then elems else List.rev elems in
let> astate, result_list =
[ PulseResult.list_fold ~init:(astate, list2)
~f:(fun (astate, tl) hd -> make_cons_raw path location hd tl astate)
elems ]
in
[Ok (PulseOperations.write_id ret_id result_list astate)]
in
mk_astate_badarg
@ ( List.concat (List.init Config.erlang_list_unfold_depth ~f:mk_good_astate_concat)
|> List.map ~f:Basic.map_continue )
let reverse list : model =
fun ({location; path; _} as data) astate ->
let<*> astate, nil = make_nil_raw location path astate in
append2 ~reverse:true list nil data astate
let rec make_raw location path elements : sat_maker =
fun astate ->
match elements with
| [] ->
make_nil_raw location path astate
| head :: tail ->
let* astate, tail_val = make_raw location path tail astate in
make_cons_raw path location head tail_val astate
let foreach _fun _list : model =
fun {location; path; ret= ret_id, _} astate ->
let<**> astate, ret = Atoms.of_string location path "ok" astate in
PulseOperations.write_id ret_id ret astate |> Basic.ok_continue
end
module Tuples = struct
let typ size = Typ.ErlangType (Tuple size)
let field_name size index =
if not (1 <= index && index <= size) then L.die InternalError "Erlang tuples are 1-indexed"
else Fieldname.make (typ size) (ErlangTypeName.tuple_elem index)
let make_raw (location : Location.t) (path : PathContext.t)
(args : (AbstractValue.t * ValueHistory.t) list) : sat_maker =
fun astate ->
let tuple_size = List.length args in
let tuple_typ_name : Typ.name = ErlangType (Tuple tuple_size) in
let hist = Hist.single_alloc path location "{}" in
let addr_tuple = (AbstractValue.mk_fresh (), hist) in
let addr_elems = List.map ~f:(function _ -> (AbstractValue.mk_fresh (), hist)) args in
let mk_field = Fieldname.make tuple_typ_name in
let field_names = ErlangTypeName.tuple_field_names tuple_size in
let addr_elems_fields_payloads = List.zip_exn addr_elems (List.zip_exn field_names args) in
let write_tuple_field astate (addr_elem, (field_name, payload)) =
write_field_and_deref path location ~struct_addr:addr_tuple ~field_addr:addr_elem
~field_val:payload (mk_field field_name) astate
in
let+ astate =
PulseResult.list_fold addr_elems_fields_payloads ~init:astate ~f:write_tuple_field
in
( PulseOperations.add_dynamic_type (Typ.mk_struct tuple_typ_name) (fst addr_tuple) astate
, addr_tuple )
let make (args : 'a ProcnameDispatcher.Call.FuncArg.t list) : model =
fun {location; path; ret= ret_id, _} astate ->
let get_payload (arg : 'a ProcnameDispatcher.Call.FuncArg.t) = arg.arg_payload in
let arg_payloads = List.map ~f:get_payload args in
let<+> astate, ret = make_raw location path arg_payloads astate in
PulseOperations.write_id ret_id ret astate
end
module Maps = struct
let mk_field = Fieldname.make (ErlangType Map)
let key_field = mk_field "__infer_model_backing_map_key"
let value_field = mk_field "__infer_model_backing_map_value"
let is_empty_field = mk_field "__infer_model_backing_map_is_empty"
let make (args : 'a ProcnameDispatcher.Call.FuncArg.t list) : model =
fun {location; path; ret= ret_id, _} astate ->
let hist = Hist.single_alloc path location "#{}" in
let addr_map = (AbstractValue.mk_fresh (), hist) in
let addr_is_empty = (AbstractValue.mk_fresh (), hist) in
let is_empty_value = AbstractValue.mk_fresh () in
let fresh_val = (is_empty_value, hist) in
let is_empty_lit = match args with [] -> IntLit.one | _ -> IntLit.zero in
let<*> astate =
match List.rev args with
| value_arg :: key_arg :: _ ->
let addr_key = (AbstractValue.mk_fresh (), hist) in
let addr_value = (AbstractValue.mk_fresh (), hist) in
write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_key
~field_val:key_arg.arg_payload key_field astate
>>= write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_value
~field_val:value_arg.arg_payload value_field
| _ :: _ ->
L.die InternalError "Map create got one argument (requires even number)"
| [] ->
Ok astate
in
let<*> astate =
write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_is_empty
~field_val:fresh_val is_empty_field astate
in
let<++> astate = PulseArithmetic.and_eq_int is_empty_value is_empty_lit astate in
write_dynamic_type_and_return addr_map Map ret_id astate
let new_ : model = make []
let make_astate_badmap (map_val, _map_hist) data astate =
let typ = Typ.mk_struct (ErlangType Map) in
let instanceof_val = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof instanceof_val map_val typ astate in
let<**> astate = PulseArithmetic.prune_eq_zero instanceof_val astate in
Errors.badmap data astate
let make_astate_goodmap path location map astate = prune_type path location map Map astate
let is_key key map : model =
fun ({location; path; ret= ret_id, _} as data) astate ->
Return 3 cases :
* - Error & assume not map
* - Ok & assume map & assume empty & return false
* - Ok & assume map & assume not empty & assume key is the tracked key & return true
* - Error & assume not map
* - Ok & assume map & assume empty & return false
* - Ok & assume map & assume not empty & assume key is the tracked key & return true
*)
let astate_badmap = make_astate_badmap map data astate in
let astate_empty =
let ret_val_false = AbstractValue.mk_fresh () in
let> astate = make_astate_goodmap path location map astate in
let astate, _isempty_addr, (is_empty, _isempty_hist) =
load_field path is_empty_field location map astate
in
let> astate = PulseArithmetic.prune_positive is_empty astate |> SatUnsat.to_list in
let> astate =
PulseArithmetic.and_eq_int ret_val_false IntLit.zero astate |> SatUnsat.to_list
in
Atoms.write_return_from_bool path location ret_val_false ret_id astate
in
let astate_haskey =
let ret_val_true = AbstractValue.mk_fresh () in
let> astate = make_astate_goodmap path location map astate in
let astate, _isempty_addr, (is_empty, _isempty_hist) =
load_field path is_empty_field location map astate
in
let> astate = PulseArithmetic.prune_eq_zero is_empty astate |> SatUnsat.to_list in
let astate, _key_addr, tracked_key = load_field path key_field location map astate in
let> astate = Comparison.prune_equal location path key tracked_key astate in
let> astate = PulseArithmetic.and_eq_int ret_val_true IntLit.one astate |> SatUnsat.to_list in
Atoms.write_return_from_bool path location ret_val_true ret_id astate
in
astate_empty @ astate_haskey @ astate_badmap
let get (key, key_history) map : model =
fun ({location; path; ret= ret_id, _} as data) astate ->
Return 3 cases :
* - Error & assume not map
* - Error & assume map & assume empty ;
* - Ok & assume map & assume nonempty & assume key is the tracked key & return tracked value
* - Error & assume not map
* - Error & assume map & assume empty;
* - Ok & assume map & assume nonempty & assume key is the tracked key & return tracked value
*)
let astate_badmap = make_astate_badmap map data astate in
let astate_ok =
let> astate = make_astate_goodmap path location map astate in
let astate, _isempty_addr, (is_empty, _isempty_hist) =
load_field path is_empty_field location map astate
in
let> astate = PulseArithmetic.prune_eq_zero is_empty astate |> SatUnsat.to_list in
let astate, _key_addr, tracked_key = load_field path key_field location map astate in
let> astate = Comparison.prune_equal location path (key, key_history) tracked_key astate in
let astate, _value_addr, tracked_value = load_field path value_field location map astate in
[Ok (PulseOperations.write_id ret_id tracked_value astate)]
in
let astate_badkey =
let> astate = make_astate_goodmap path location map astate in
let astate, _isempty_addr, (is_empty, _isempty_hist) =
load_field path is_empty_field location map astate
in
let> astate = PulseArithmetic.prune_positive is_empty astate |> SatUnsat.to_list in
Errors.badkey data astate
in
List.map ~f:Basic.map_continue astate_ok @ astate_badkey @ astate_badmap
let put key value map : model =
fun ({location; path; ret= ret_id, _} as data) astate ->
Ignore old map . We only store one key / value so we can simply create a new map .
Return 2 cases :
* - Error & assume not map
* - Ok & assume map & return new map
* - Error & assume not map
* - Ok & assume map & return new map
*)
let hist = Hist.single_alloc path location "maps_put" in
let astate_badmap = make_astate_badmap map data astate in
let astate_ok =
let addr_map = (AbstractValue.mk_fresh (), hist) in
let addr_is_empty = (AbstractValue.mk_fresh (), hist) in
let is_empty_value = AbstractValue.mk_fresh () in
let fresh_val = (is_empty_value, hist) in
let addr_key = (AbstractValue.mk_fresh (), hist) in
let addr_value = (AbstractValue.mk_fresh (), hist) in
let> astate = make_astate_goodmap path location map astate in
let> astate =
[ write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_key
~field_val:key key_field astate ]
in
let> astate =
[ write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_value
~field_val:value value_field astate ]
in
let> astate =
write_field_and_deref path location ~struct_addr:addr_map ~field_addr:addr_is_empty
~field_val:fresh_val is_empty_field astate
>>>= PulseArithmetic.and_eq_int is_empty_value IntLit.zero
|> SatUnsat.to_list
in
[Ok (write_dynamic_type_and_return addr_map Map ret_id astate)]
in
List.map ~f:Basic.map_continue astate_ok @ astate_badmap
end
module Strings = struct
let value_field = Fieldname.make (ErlangType Integer) ErlangTypeName.cons_tail
let make_raw location path (value, _) : disjunction_maker =
fun astate ->
let new_hist = Hist.single_alloc path location "string" in
let astate_nil =
let+* astate_nil = PulseArithmetic.and_eq_int value IntLit.zero astate in
let+ astate_nil, addr_nil = Lists.make_nil_raw location path astate_nil in
(astate_nil, addr_nil)
in
let astate_not_nil =
let val_not_nil = AbstractValue.mk_fresh () in
let+* astate_not_nil = PulseArithmetic.and_positive value astate in
let astate_not_nil =
PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Cons)) val_not_nil
astate_not_nil
in
let+ astate_not_nil =
write_field_and_deref path location ~struct_addr:(val_not_nil, new_hist)
~field_addr:(AbstractValue.mk_fresh (), new_hist)
~field_val:(value, new_hist) value_field astate_not_nil
in
(astate_not_nil, (val_not_nil, new_hist))
in
SatUnsat.to_list astate_nil @ SatUnsat.to_list astate_not_nil
let make value : model =
fun {location; path; ret= ret_id, _} astate ->
let> astate, (result, _hist) = make_raw location path value astate in
PulseOperations.write_id ret_id (result, _hist) astate |> Basic.ok_continue
end
module BIF = struct
let has_type (value, _hist) type_ : model =
fun {location; path; ret= ret_id, _} astate ->
let typ = Typ.mk_struct (ErlangType type_) in
let is_typ = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_typ value typ astate in
Atoms.write_return_from_bool path location is_typ ret_id astate
let is_atom x : model = has_type x Atom
let is_boolean ((atom_val, _atom_hist) as atom) : model =
fun {location; path; ret= ret_id, _} astate ->
let astate_not_atom =
let typ = Typ.mk_struct (ErlangType Atom) in
let is_atom = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_atom atom_val typ astate in
let<**> astate = PulseArithmetic.prune_eq_zero is_atom astate in
Atoms.write_return_from_bool path location is_atom ret_id astate
in
let astate_is_atom =
let> astate = prune_type path location atom Atom astate in
let astate, _hash_addr, (hash, _hash_hist) =
load_field path
(Fieldname.make (ErlangType Atom) ErlangTypeName.atom_hash)
location atom astate
in
let is_true = AbstractValue.mk_fresh () in
let is_false = AbstractValue.mk_fresh () in
let is_bool = AbstractValue.mk_fresh () in
let<**> astate, is_true =
PulseArithmetic.eval_binop is_true Binop.Eq (AbstractValueOperand hash)
(ConstOperand
(Cint (IntLit.of_int (ErlangTypeName.calculate_hash ErlangTypeName.atom_true))) )
astate
in
let<**> astate, is_false =
PulseArithmetic.eval_binop is_false Binop.Eq (AbstractValueOperand hash)
(ConstOperand
(Cint (IntLit.of_int (ErlangTypeName.calculate_hash ErlangTypeName.atom_false))) )
astate
in
let<**> astate, is_bool =
PulseArithmetic.eval_binop is_bool Binop.LOr (AbstractValueOperand is_true)
(AbstractValueOperand is_false) astate
in
Atoms.write_return_from_bool path location is_bool ret_id astate
in
astate_not_atom @ astate_is_atom
let is_integer x : model = has_type x Integer
let is_list (list_val, _list_hist) : model =
fun {location; path; ret= ret_id, _} astate ->
let cons_typ = Typ.mk_struct (ErlangType Cons) in
let nil_typ = Typ.mk_struct (ErlangType Nil) in
let astate_is_cons =
let is_cons = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_cons list_val cons_typ astate in
let<**> astate = PulseArithmetic.prune_positive is_cons astate in
Atoms.write_return_from_bool path location is_cons ret_id astate
in
let astate_is_nil =
let is_nil = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_nil list_val nil_typ astate in
let<**> astate = PulseArithmetic.prune_positive is_nil astate in
Atoms.write_return_from_bool path location is_nil ret_id astate
in
let astate_not_list =
let is_cons = AbstractValue.mk_fresh () in
let is_nil = AbstractValue.mk_fresh () in
let<**> astate = PulseArithmetic.and_equal_instanceof is_cons list_val cons_typ astate in
let<**> astate = PulseArithmetic.prune_eq_zero is_cons astate in
let<**> astate = PulseArithmetic.and_equal_instanceof is_nil list_val nil_typ astate in
let<**> astate = PulseArithmetic.prune_eq_zero is_nil astate in
Atoms.write_return_from_bool path location is_nil ret_id astate
in
astate_is_cons @ astate_is_nil @ astate_not_list
let is_map x : model = has_type x Map
end
module Custom = struct
type erlang_value = known_erlang_value option [@@deriving of_yojson]
and known_erlang_value =
| Atom of string option
| IntLit of string
| List of erlang_value list
| Tuple of erlang_value list
type selector =
| ModuleFunctionArity of
{module_: string [@key "module"]; function_: string [@key "function"]; arity: int}
[@name "MFA"]
[@@deriving of_yojson]
type arguments_return = {arguments: erlang_value list; return: erlang_value}
[@@deriving of_yojson]
type behavior = ReturnValue of erlang_value | ArgumentsReturnList of arguments_return list
[@@deriving of_yojson]
type rule = {selector: selector; behavior: behavior} [@@deriving of_yojson]
type spec = rule list [@@deriving of_yojson]
let make_selector selector model =
let l0 f = f [] in
let l1 f x0 = f [x0] in
let l2 f x0 x1 = f [x0; x1] in
let l3 f x0 x1 x2 = f [x0; x1; x2] in
let open ProcnameDispatcher.Call in
match selector with
| ModuleFunctionArity {module_; function_; arity= 0} ->
-module_ &:: function_ <>$$--> l0 model
| ModuleFunctionArity {module_; function_; arity= 1} ->
-module_ &:: function_ <>$ capt_arg $--> l1 model
| ModuleFunctionArity {module_; function_; arity= 2} ->
-module_ &:: function_ <>$ capt_arg $+ capt_arg $--> l2 model
| ModuleFunctionArity {module_; function_; arity= 3} ->
-module_ &:: function_ <>$ capt_arg $+ capt_arg $+ capt_arg $--> l3 model
| ModuleFunctionArity {module_; function_; arity} ->
L.user_warning "@[<v>@[model for %s:%s/%d may match other arities (tool limitation)@]@;@]"
module_ function_ arity ;
-module_ &:: function_ &++> model
let return_value_helper location path =
Implementation note : [ return_value_helper ] groups two mutually recursive functions , [ one ] and
[ many ] , both of which may access [ location ] and [ path ] .
[many], both of which may access [location] and [path]. *)
let rec one (ret_val : erlang_value) : maker =
fun astate ->
match ret_val with
| None ->
let ret_addr = AbstractValue.mk_fresh () in
let ret_hist = Hist.single_alloc path location "nondet" in
Sat (Ok (astate, (ret_addr, ret_hist)))
| Some (Atom None) ->
let ret_addr = AbstractValue.mk_fresh () in
let ret_hist = Hist.single_alloc path location "nondet_atom" in
Sat
(Ok
( PulseOperations.add_dynamic_type (Typ.mk_struct (ErlangType Atom)) ret_addr astate
, (ret_addr, ret_hist) ) )
| Some (Atom (Some name)) ->
Atoms.of_string location path name astate
| Some (IntLit intlit) ->
Integers.of_string location path intlit astate
| Some (List elements) ->
let mk = Lists.make_raw location path in
many mk elements astate
| Some (Tuple elements) ->
let mk = Tuples.make_raw location path in
many mk elements astate
and many (mk : (AbstractValue.t * ValueHistory.t) list -> sat_maker)
(elements : erlang_value list) : maker =
fun astate ->
let mk_arg (args, astate) element =
let++ astate, arg = one element astate in
(arg :: args, astate)
in
let+* args, astate = PulseOperationResult.list_fold ~init:([], astate) ~f:mk_arg elements in
mk (List.rev args) astate
in
fun ret_val astate -> one ret_val astate
let return_value_model (ret_val : erlang_value) : model =
fun {location; path; ret= ret_id, _} astate ->
let<++> astate, ret = return_value_helper location path ret_val astate in
PulseOperations.write_id ret_id ret astate
let rec argument_value_helper path location actual_arg (pre_arg : erlang_value) astate :
AbductiveDomain.t result =
match pre_arg with
| None ->
[Ok astate]
| Some (Atom None) ->
prune_type path location actual_arg Atom astate
| Some (Atom (Some name)) ->
let> astate = prune_type path location actual_arg Atom astate in
let astate, _, (arg_hash, _hist) =
load_field path Atoms.hash_field location actual_arg astate
in
let name_hash : Const.t = Cint (IntLit.of_int (ErlangTypeName.calculate_hash name)) in
PulseArithmetic.prune_binop ~negated:false Eq (AbstractValueOperand arg_hash)
(ConstOperand name_hash) astate
|> SatUnsat.to_list
| Some (IntLit intlit) ->
let> astate = prune_type path location actual_arg Integer astate in
let astate, _, (value, _hist) =
load_field path Integers.value_field location actual_arg astate
in
PulseArithmetic.prune_binop ~negated:false Eq (AbstractValueOperand value)
(ConstOperand (Cint (IntLit.of_string intlit)))
astate
|> SatUnsat.to_list
| Some (Tuple elements) ->
let size = List.length elements in
let> astate = prune_type path location actual_arg (Tuple size) astate in
let one_element index astate pattern =
let field = Tuples.field_name size (index + 1) in
let astate, _, argi = load_field path field location actual_arg astate in
argument_value_helper path location argi pattern astate
in
result_foldi elements ~init:astate ~f:one_element
| Some (List elements) ->
let rec go value elements astate =
match elements with
| [] ->
prune_type path location value Nil astate
| element :: rest ->
let> head, tail, astate = Lists.load_head_tail value astate path location in
let> astate = argument_value_helper path location head element astate in
go tail rest astate
in
go actual_arg elements astate
let arguments_return_model args (summaries : arguments_return list) : model =
fun {location; path; ret= ret_id, _} astate ->
let get_payload (arg : 'a ProcnameDispatcher.Call.FuncArg.t) = arg.arg_payload in
let actual_arguments = args |> List.map ~f:get_payload in
let one_summary {arguments; return} =
let one_arg astate (actual_arg, pre_arg) =
argument_value_helper path location actual_arg pre_arg astate
in
let paired =
match List.zip actual_arguments arguments with
| Unequal_lengths ->
L.internal_error "Matched wrong arity (or model has wrong arity)." ;
[]
| Ok result ->
result
in
let> astate = result_fold paired ~init:astate ~f:one_arg in
let> astate, ret = return_value_helper location path return astate |> SatUnsat.to_list in
[Ok (PulseOperations.write_id ret_id ret astate)]
in
List.concat_map ~f:one_summary summaries |> List.map ~f:Basic.map_continue
let make_model behavior args : model =
match behavior with
| ReturnValue ret_val ->
return_value_model ret_val
| ArgumentsReturnList arguments_return_list ->
arguments_return_model args arguments_return_list
let matcher_of_rule {selector; behavior} = make_selector selector (make_model behavior)
let matchers () : matcher list =
let load_spec path =
try spec_of_yojson (Yojson.Safe.from_file path) with
| Yojson.Json_error what ->
L.user_error
"@[<v>Failed to parse json from %s: %s@;\
Continuing with no custom models imported from this file.@;\
@]"
path what ;
[]
| Ppx_yojson_conv_lib__Yojson_conv.Of_yojson_error (what, json) ->
let details = match what with Failure what -> Printf.sprintf " (%s)" what | _ -> "" in
L.user_error
"@[<v>Failed to parse --pulse-models-for-erlang from %s %s:@;\
%a@;\
Continuing with no custom models imported from this file.@;\
@]"
path details Yojson.Safe.pp json ;
[]
in
let spec =
List.fold Config.pulse_models_for_erlang ~init:[] ~f:(fun spec path ->
match (Unix.stat path).st_kind with
| S_DIR ->
Utils.fold_files ~init:spec
~f:(fun spec filepath ->
if Filename.check_suffix filepath "json" then
List.append (load_spec filepath) spec
else spec )
~path
| S_REG ->
List.append (load_spec path) spec
| _ ->
spec
| exception Unix.Unix_error (ENOENT, _, _) ->
spec )
in
List.map ~f:matcher_of_rule spec
end
let matchers : matcher list =
let open ProcnameDispatcher.Call in
let arg = capt_arg_payload in
let erlang_ns = ErlangTypeName.erlang_namespace in
Custom.matchers ()
@ [ +BuiltinDecl.(match_builtin __erlang_error_badkey) <>--> Errors.badkey
; +BuiltinDecl.(match_builtin __erlang_error_badmap) <>--> Errors.badmap
; +BuiltinDecl.(match_builtin __erlang_error_badmatch) <>--> Errors.badmatch
; +BuiltinDecl.(match_builtin __erlang_error_badrecord) <>--> Errors.badrecord
; +BuiltinDecl.(match_builtin __erlang_error_badreturn) <>--> Errors.badreturn
; +BuiltinDecl.(match_builtin __erlang_error_case_clause) <>--> Errors.case_clause
; +BuiltinDecl.(match_builtin __erlang_error_function_clause) <>--> Errors.function_clause
; +BuiltinDecl.(match_builtin __erlang_error_if_clause) <>--> Errors.if_clause
; +BuiltinDecl.(match_builtin __erlang_error_try_clause) <>--> Errors.try_clause
; +BuiltinDecl.(match_builtin __erlang_make_atom) <>$ arg $+ arg $--> Atoms.make
; +BuiltinDecl.(match_builtin __erlang_make_integer) <>$ arg $--> Integers.make
; +BuiltinDecl.(match_builtin __erlang_make_nil) <>--> Lists.make_nil
; +BuiltinDecl.(match_builtin __erlang_make_cons) <>$ arg $+ arg $--> Lists.make_cons
; +BuiltinDecl.(match_builtin __erlang_make_str_const) <>$ arg $--> Strings.make
; +BuiltinDecl.(match_builtin __erlang_equal) <>$ arg $+ arg $--> Comparison.equal
; +BuiltinDecl.(match_builtin __erlang_exactly_equal) <>$ arg $+ arg $--> Comparison.equal
TODO : proper modeling of equal vs exactly equal
; +BuiltinDecl.(match_builtin __erlang_not_equal)
<>$ arg $+ arg $--> Comparison.exactly_not_equal
TODO : proper modeling of equal vs exactly equal
; +BuiltinDecl.(match_builtin __erlang_exactly_not_equal)
<>$ arg $+ arg $--> Comparison.exactly_not_equal
; +BuiltinDecl.(match_builtin __erlang_greater) <>$ arg $+ arg $--> Comparison.greater
; +BuiltinDecl.(match_builtin __erlang_greater_or_equal)
<>$ arg $+ arg $--> Comparison.greater_or_equal
; +BuiltinDecl.(match_builtin __erlang_lesser) <>$ arg $+ arg $--> Comparison.lesser
; +BuiltinDecl.(match_builtin __erlang_lesser_or_equal)
<>$ arg $+ arg $--> Comparison.lesser_or_equal
; -"lists" &:: "append" <>$ arg $+ arg $--> Lists.append2 ~reverse:false
; -"lists" &:: "foreach" <>$ arg $+ arg $--> Lists.foreach
; -"lists" &:: "reverse" <>$ arg $--> Lists.reverse
; +BuiltinDecl.(match_builtin __erlang_make_map) &++> Maps.make
; -"maps" &:: "is_key" <>$ arg $+ arg $--> Maps.is_key
; -"maps" &:: "get" <>$ arg $+ arg $--> Maps.get
; -"maps" &:: "put" <>$ arg $+ arg $+ arg $--> Maps.put
; -"maps" &:: "new" <>$$--> Maps.new_
; +BuiltinDecl.(match_builtin __erlang_make_tuple) &++> Tuples.make
; -erlang_ns &:: "is_atom" <>$ arg $--> BIF.is_atom
; -erlang_ns &:: "is_boolean" <>$ arg $--> BIF.is_boolean
; -erlang_ns &:: "is_integer" <>$ arg $--> BIF.is_integer
; -erlang_ns &:: "is_list" <>$ arg $--> BIF.is_list
; -erlang_ns &:: "is_map" <>$ arg $--> BIF.is_map ]
|
831d14d01d6b00fa3977da745b6f552b736d9a2ece53453c80f44993173edcdb | racket/web-server | cookie.rkt | #lang racket/base
(require net/cookies/common
net/cookies/server
web-server/http/request-structs
racket/contract
racket/match
racket/date
)
(provide (contract-out
[cookie->header (-> cookie?
header?)]
[rename make-cookie* make-cookie
(->* (cookie-name?
cookie-value?)
(#:comment any/c
#:domain (or/c domain-value? #f)
#:max-age (or/c (and/c integer? positive?) #f)
#:path (or/c path/extension-value? #f)
#:expires (or/c date? string? #f)
#:secure? any/c
#:http-only? any/c
#:extension (or/c path/extension-value? #f))
cookie?)]
))
;; cookie->header : cookie -> header
;; gets the header that will set the given cookie
(define (cookie->header cookie)
(header #"Set-Cookie" (cookie->set-cookie-header cookie)))
(define exp-date-pregexp
(pregexp (string-append "(\\d\\d)\\s+";day
"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\\s+";month
"(\\d\\d\\d\\d)\\s+";year
hr : min : sec
)))
(define (make-cookie* name
value
#:comment [_ #f]
#:domain [domain #f]
#:max-age [max-age #f]
#:path [path #f]
#:expires [exp-date/raw #f]
#:secure? [secure? #f]
#:http-only? [http-only? #f]
#:extension [extension #f])
(make-cookie name
value
#:domain domain
#:max-age max-age
#:path path
#:secure? (not (not secure?))
#:http-only? (not (not http-only?))
#:extension extension
#:expires (cond [(string? exp-date/raw)
(match exp-date/raw
[(pregexp exp-date-pregexp
(list _
(app string->number day)
month-str
(app string->number year)
(app string->number hour)
(app string->number min)
(app string->number sec)))
(with-handlers ([exn:fail? (λ (e) (failure-cont))])
(seconds->date
(find-seconds sec min hour day
(case month-str
[("Jan") 1]
[("Feb") 2]
[("Mar") 3]
[("Apr") 4]
[("May") 5]
[("Jun") 6]
[("Jul") 7]
[("Aug") 8]
[("Sep") 9]
[("Oct") 10]
[("Nov") 11]
[("Dec") 12])
year
#f)
#f))]
[_ (raise-arguments-error
'make-cookie*
"invalid #:expires string"
'expected
"#f, a date?, or a string conforming to RFC 7231 Section 7.1.1.2"
'given exp-date/raw)])]
[else exp-date/raw])))
| null | https://raw.githubusercontent.com/racket/web-server/f718800b5b3f407f7935adf85dfa663c4bba1651/web-server-lib/web-server/http/cookie.rkt | racket | cookie->header : cookie -> header
gets the header that will set the given cookie
day
month
year | #lang racket/base
(require net/cookies/common
net/cookies/server
web-server/http/request-structs
racket/contract
racket/match
racket/date
)
(provide (contract-out
[cookie->header (-> cookie?
header?)]
[rename make-cookie* make-cookie
(->* (cookie-name?
cookie-value?)
(#:comment any/c
#:domain (or/c domain-value? #f)
#:max-age (or/c (and/c integer? positive?) #f)
#:path (or/c path/extension-value? #f)
#:expires (or/c date? string? #f)
#:secure? any/c
#:http-only? any/c
#:extension (or/c path/extension-value? #f))
cookie?)]
))
(define (cookie->header cookie)
(header #"Set-Cookie" (cookie->set-cookie-header cookie)))
(define exp-date-pregexp
hr : min : sec
)))
(define (make-cookie* name
value
#:comment [_ #f]
#:domain [domain #f]
#:max-age [max-age #f]
#:path [path #f]
#:expires [exp-date/raw #f]
#:secure? [secure? #f]
#:http-only? [http-only? #f]
#:extension [extension #f])
(make-cookie name
value
#:domain domain
#:max-age max-age
#:path path
#:secure? (not (not secure?))
#:http-only? (not (not http-only?))
#:extension extension
#:expires (cond [(string? exp-date/raw)
(match exp-date/raw
[(pregexp exp-date-pregexp
(list _
(app string->number day)
month-str
(app string->number year)
(app string->number hour)
(app string->number min)
(app string->number sec)))
(with-handlers ([exn:fail? (λ (e) (failure-cont))])
(seconds->date
(find-seconds sec min hour day
(case month-str
[("Jan") 1]
[("Feb") 2]
[("Mar") 3]
[("Apr") 4]
[("May") 5]
[("Jun") 6]
[("Jul") 7]
[("Aug") 8]
[("Sep") 9]
[("Oct") 10]
[("Nov") 11]
[("Dec") 12])
year
#f)
#f))]
[_ (raise-arguments-error
'make-cookie*
"invalid #:expires string"
'expected
"#f, a date?, or a string conforming to RFC 7231 Section 7.1.1.2"
'given exp-date/raw)])]
[else exp-date/raw])))
|
9de007878020ca1353fa63bb6d0cea23d130de0b8c75a82cd466da2de1ccd8fa | manetu/temporal-clojure-sdk | codec.clj | Copyright © 2022 , Inc. All rights reserved
(ns temporal.codec
"Methods for managing codecs between a client and the Temporal backend"
(:require [clojure.core.protocols :as p]
[clojure.datafy :as d]
[medley.core :as m])
(:import [io.temporal.common.converter DefaultDataConverter CodecDataConverter]
[io.temporal.payload.codec PayloadCodec]
[io.temporal.api.common.v1 Payload]
[com.google.protobuf ByteString]
[java.util Collections]))
(defprotocol Codec
"A protocol for encoding/decoding of 'payload' maps, suitable for use with [[create]].
'payload' is a map consisting of :metadata and :data, where :metadata is a map of string/bytes pairs
and :data is bytes. The codec may choose to transform or encapsulate the input payload and return
a new payload, potentially with different data/metadata."
(decode [this payload])
(encode [this payload]))
(extend-protocol p/Datafiable
Payload
(datafy [d]
{:metadata (->> (.getMetadataMap d)
(into {})
(m/map-vals #(.toByteArray %)))
:data (-> (.getData d)
(.toByteArray))}))
(defn- ^:no-doc payload->
[x]
(d/datafy x))
(defn- ^:no-doc ->payload
[{:keys [metadata data] :as payload}]
(let [builder (Payload/newBuilder)]
(run! (fn [[k v]] (.putMetadata builder k (ByteString/copyFrom (bytes v)))) metadata)
(when (some? data)
(.setData builder (ByteString/copyFrom (bytes data))))
(.build builder)))
(defn- ^:no-doc codec-map
[f payloads]
(map (fn [payload]
(-> payload
(payload->)
(f)
(->payload)))
payloads))
(defn- ^:no-doc -encode [codec payloads]
(codec-map (partial encode codec) payloads))
(defn- ^:no-doc -decode [codec payloads]
(codec-map (partial decode codec) payloads))
(defn create
"Creates an instance of a [DataConverter](-sdk/latest/io/temporal/common/converter/DataConverter.html)
that accepts a [[Codec]]"
^CodecDataConverter [codec]
(CodecDataConverter.
(DefaultDataConverter/newDefaultInstance)
(Collections/singletonList
(reify PayloadCodec
(encode [_ payloads]
(-encode codec payloads))
(decode [_ payloads]
(-decode codec payloads))))))
| null | https://raw.githubusercontent.com/manetu/temporal-clojure-sdk/4a349878922e7b4083b5bb90eb474a18ffd48200/src/temporal/codec.clj | clojure | Copyright © 2022 , Inc. All rights reserved
(ns temporal.codec
"Methods for managing codecs between a client and the Temporal backend"
(:require [clojure.core.protocols :as p]
[clojure.datafy :as d]
[medley.core :as m])
(:import [io.temporal.common.converter DefaultDataConverter CodecDataConverter]
[io.temporal.payload.codec PayloadCodec]
[io.temporal.api.common.v1 Payload]
[com.google.protobuf ByteString]
[java.util Collections]))
(defprotocol Codec
"A protocol for encoding/decoding of 'payload' maps, suitable for use with [[create]].
'payload' is a map consisting of :metadata and :data, where :metadata is a map of string/bytes pairs
and :data is bytes. The codec may choose to transform or encapsulate the input payload and return
a new payload, potentially with different data/metadata."
(decode [this payload])
(encode [this payload]))
(extend-protocol p/Datafiable
Payload
(datafy [d]
{:metadata (->> (.getMetadataMap d)
(into {})
(m/map-vals #(.toByteArray %)))
:data (-> (.getData d)
(.toByteArray))}))
(defn- ^:no-doc payload->
[x]
(d/datafy x))
(defn- ^:no-doc ->payload
[{:keys [metadata data] :as payload}]
(let [builder (Payload/newBuilder)]
(run! (fn [[k v]] (.putMetadata builder k (ByteString/copyFrom (bytes v)))) metadata)
(when (some? data)
(.setData builder (ByteString/copyFrom (bytes data))))
(.build builder)))
(defn- ^:no-doc codec-map
[f payloads]
(map (fn [payload]
(-> payload
(payload->)
(f)
(->payload)))
payloads))
(defn- ^:no-doc -encode [codec payloads]
(codec-map (partial encode codec) payloads))
(defn- ^:no-doc -decode [codec payloads]
(codec-map (partial decode codec) payloads))
(defn create
"Creates an instance of a [DataConverter](-sdk/latest/io/temporal/common/converter/DataConverter.html)
that accepts a [[Codec]]"
^CodecDataConverter [codec]
(CodecDataConverter.
(DefaultDataConverter/newDefaultInstance)
(Collections/singletonList
(reify PayloadCodec
(encode [_ payloads]
(-encode codec payloads))
(decode [_ payloads]
(-decode codec payloads))))))
| |
decb35ef94142da5fde3e6c6b1fd1129cf487ac02e889e989e2c7eb9a47705a0 | SimulaVR/godot-haskell | VisualShaderNodeCustom.hs | # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving ,
TypeFamilies , TypeOperators , FlexibleContexts , DataKinds ,
MultiParamTypeClasses #
TypeFamilies, TypeOperators, FlexibleContexts, DataKinds,
MultiParamTypeClasses #-}
module Godot.Core.VisualShaderNodeCustom
(Godot.Core.VisualShaderNodeCustom._get_category,
Godot.Core.VisualShaderNodeCustom._get_code,
Godot.Core.VisualShaderNodeCustom._get_description,
Godot.Core.VisualShaderNodeCustom._get_global_code,
Godot.Core.VisualShaderNodeCustom._get_input_port_count,
Godot.Core.VisualShaderNodeCustom._get_input_port_name,
Godot.Core.VisualShaderNodeCustom._get_input_port_type,
Godot.Core.VisualShaderNodeCustom._get_name,
Godot.Core.VisualShaderNodeCustom._get_output_port_count,
Godot.Core.VisualShaderNodeCustom._get_output_port_name,
Godot.Core.VisualShaderNodeCustom._get_output_port_type,
Godot.Core.VisualShaderNodeCustom._get_return_icon_type,
Godot.Core.VisualShaderNodeCustom._get_subcategory)
where
import Data.Coerce
import Foreign.C
import Godot.Internal.Dispatch
import qualified Data.Vector as V
import Linear(V2(..),V3(..),M22)
import Data.Colour(withOpacity)
import Data.Colour.SRGB(sRGB)
import System.IO.Unsafe
import Godot.Gdnative.Internal
import Godot.Api.Types
import Godot.Core.VisualShaderNode()
# NOINLINE bindVisualShaderNodeCustom__get_category #
-- | Override this method to define the category of the associated custom node in the Visual Shader Editor's members dialog.
Defining this method is _ _ optional _ _ . If not overridden , the node will be filed under the " Custom " category .
bindVisualShaderNodeCustom__get_category :: MethodBind
bindVisualShaderNodeCustom__get_category
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_category" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | Override this method to define the category of the associated custom node in the Visual Shader Editor's members dialog.
Defining this method is _ _ optional _ _ . If not overridden , the node will be filed under the " Custom " category .
_get_category ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> IO GodotString
_get_category cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_category
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_category" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_category
# NOINLINE bindVisualShaderNodeCustom__get_code #
| Override this method to define the actual shader code of the associated custom node . The shader code should be returned as a string , which can have multiple lines ( the @"""@ multiline string construct can be used for convenience ) .
The @input_vars@ and @output_vars@ arrays contain the string names of the various input and output variables , as defined by @_get_input_*@ and virtual methods in this class .
The output ports can be assigned values in the shader code . For example , @return output_vars@0@ + " = " + + " ; " @.
You can customize the generated code based on the shader @mode@ ( see @enum Shader . Mode@ ) and/or @type@ ( see @enum VisualShader . Type@ ) .
-- Defining this method is __required__.
bindVisualShaderNodeCustom__get_code :: MethodBind
bindVisualShaderNodeCustom__get_code
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_code" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
| Override this method to define the actual shader code of the associated custom node . The shader code should be returned as a string , which can have multiple lines ( the @"""@ multiline string construct can be used for convenience ) .
The @input_vars@ and @output_vars@ arrays contain the string names of the various input and output variables , as defined by @_get_input_*@ and virtual methods in this class .
The output ports can be assigned values in the shader code . For example , @return output_vars@0@ + " = " + + " ; " @.
You can customize the generated code based on the shader @mode@ ( see @enum Shader . Mode@ ) and/or @type@ ( see @enum VisualShader . Type@ ) .
-- Defining this method is __required__.
_get_code ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Array -> Array -> Int -> Int -> IO GodotString
_get_code cls arg1 arg2 arg3 arg4
= withVariantArray
[toVariant arg1, toVariant arg2, toVariant arg3, toVariant arg4]
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_code
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_code"
'[Array, Array, Int, Int]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_code
# NOINLINE bindVisualShaderNodeCustom__get_description #
-- | Override this method to define the description of the associated custom node in the Visual Shader Editor's members dialog.
-- Defining this method is __optional__.
bindVisualShaderNodeCustom__get_description :: MethodBind
bindVisualShaderNodeCustom__get_description
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_description" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | Override this method to define the description of the associated custom node in the Visual Shader Editor's members dialog.
-- Defining this method is __optional__.
_get_description ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> IO GodotString
_get_description cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_description
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_description" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_description
# NOINLINE bindVisualShaderNodeCustom__get_global_code #
| Override this method to add shader code on top of the global shader , to define your own standard library of reusable methods , varyings , constants , uniforms , etc . The shader code should be returned as a string , which can have multiple lines ( the @"""@ multiline string construct can be used for convenience ) .
-- Be careful with this functionality as it can cause name conflicts with other custom nodes, so be sure to give the defined entities unique names.
You can customize the generated code based on the shader @mode@ ( see @enum Shader . Mode@ ) .
-- Defining this method is __optional__.
bindVisualShaderNodeCustom__get_global_code :: MethodBind
bindVisualShaderNodeCustom__get_global_code
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_global_code" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
| Override this method to add shader code on top of the global shader , to define your own standard library of reusable methods , varyings , constants , uniforms , etc . The shader code should be returned as a string , which can have multiple lines ( the @"""@ multiline string construct can be used for convenience ) .
-- Be careful with this functionality as it can cause name conflicts with other custom nodes, so be sure to give the defined entities unique names.
You can customize the generated code based on the shader @mode@ ( see @enum Shader . Mode@ ) .
-- Defining this method is __optional__.
_get_global_code ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO GodotString
_get_global_code cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_global_code
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_global_code"
'[Int]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_global_code
# NOINLINE bindVisualShaderNodeCustom__get_input_port_count #
-- | Override this method to define the amount of input ports of the associated custom node.
-- Defining this method is __required__. If not overridden, the node has no input ports.
bindVisualShaderNodeCustom__get_input_port_count :: MethodBind
bindVisualShaderNodeCustom__get_input_port_count
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_input_port_count" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | Override this method to define the amount of input ports of the associated custom node.
-- Defining this method is __required__. If not overridden, the node has no input ports.
_get_input_port_count ::
(VisualShaderNodeCustom :< cls, Object :< cls) => cls -> IO Int
_get_input_port_count cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_input_port_count
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_input_port_count"
'[]
(IO Int)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_input_port_count
# NOINLINE bindVisualShaderNodeCustom__get_input_port_name #
-- | Override this method to define the names of input ports of the associated custom node. The names are used both for the input slots in the editor and as identifiers in the shader code, and are passed in the @input_vars@ array in @method _get_code@.
Defining this method is _ _ optional _ _ , but recommended . If not overridden , input ports are named as @"in " + str(port)@.
bindVisualShaderNodeCustom__get_input_port_name :: MethodBind
bindVisualShaderNodeCustom__get_input_port_name
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_input_port_name" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | Override this method to define the names of input ports of the associated custom node. The names are used both for the input slots in the editor and as identifiers in the shader code, and are passed in the @input_vars@ array in @method _get_code@.
Defining this method is _ _ optional _ _ , but recommended . If not overridden , input ports are named as @"in " + str(port)@.
_get_input_port_name ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO GodotString
_get_input_port_name cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_input_port_name
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_input_port_name"
'[Int]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_input_port_name
# NOINLINE bindVisualShaderNodeCustom__get_input_port_type #
| Override this method to define the returned type of each input port of the associated custom node ( see @enum VisualShaderNode . PortType@ for possible types ) .
Defining this method is _ _ optional _ _ , but recommended . If not overridden , input ports will return the @VisualShaderNode . PORT_TYPE_SCALAR@ type .
bindVisualShaderNodeCustom__get_input_port_type :: MethodBind
bindVisualShaderNodeCustom__get_input_port_type
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_input_port_type" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
| Override this method to define the returned type of each input port of the associated custom node ( see @enum VisualShaderNode . PortType@ for possible types ) .
Defining this method is _ _ optional _ _ , but recommended . If not overridden , input ports will return the @VisualShaderNode . PORT_TYPE_SCALAR@ type .
_get_input_port_type ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO Int
_get_input_port_type cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_input_port_type
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_input_port_type"
'[Int]
(IO Int)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_input_port_type
# NOINLINE bindVisualShaderNodeCustom__get_name #
-- | Override this method to define the name of the associated custom node in the Visual Shader Editor's members dialog and graph.
-- Defining this method is __optional__, but recommended. If not overridden, the node will be named as "Unnamed".
bindVisualShaderNodeCustom__get_name :: MethodBind
bindVisualShaderNodeCustom__get_name
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_name" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | Override this method to define the name of the associated custom node in the Visual Shader Editor's members dialog and graph.
-- Defining this method is __optional__, but recommended. If not overridden, the node will be named as "Unnamed".
_get_name ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> IO GodotString
_get_name cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_name
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_name" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_name
# NOINLINE bindVisualShaderNodeCustom__get_output_port_count #
-- | Override this method to define the amount of output ports of the associated custom node.
-- Defining this method is __required__. If not overridden, the node has no output ports.
bindVisualShaderNodeCustom__get_output_port_count :: MethodBind
bindVisualShaderNodeCustom__get_output_port_count
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_output_port_count" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | Override this method to define the amount of output ports of the associated custom node.
-- Defining this method is __required__. If not overridden, the node has no output ports.
_get_output_port_count ::
(VisualShaderNodeCustom :< cls, Object :< cls) => cls -> IO Int
_get_output_port_count cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_output_port_count
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_output_port_count"
'[]
(IO Int)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_output_port_count
# NOINLINE bindVisualShaderNodeCustom__get_output_port_name #
-- | Override this method to define the names of output ports of the associated custom node. The names are used both for the output slots in the editor and as identifiers in the shader code, and are passed in the @output_vars@ array in @method _get_code@.
-- Defining this method is __optional__, but recommended. If not overridden, output ports are named as @"out" + str(port)@.
bindVisualShaderNodeCustom__get_output_port_name :: MethodBind
bindVisualShaderNodeCustom__get_output_port_name
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_output_port_name" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | Override this method to define the names of output ports of the associated custom node. The names are used both for the output slots in the editor and as identifiers in the shader code, and are passed in the @output_vars@ array in @method _get_code@.
-- Defining this method is __optional__, but recommended. If not overridden, output ports are named as @"out" + str(port)@.
_get_output_port_name ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO GodotString
_get_output_port_name cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_output_port_name
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_output_port_name"
'[Int]
(IO GodotString)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_output_port_name
# NOINLINE bindVisualShaderNodeCustom__get_output_port_type #
| Override this method to define the returned type of each output port of the associated custom node ( see @enum VisualShaderNode . PortType@ for possible types ) .
Defining this method is _ _ optional _ _ , but recommended . If not overridden , output ports will return the @VisualShaderNode . PORT_TYPE_SCALAR@ type .
bindVisualShaderNodeCustom__get_output_port_type :: MethodBind
bindVisualShaderNodeCustom__get_output_port_type
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_output_port_type" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
| Override this method to define the returned type of each output port of the associated custom node ( see @enum VisualShaderNode . PortType@ for possible types ) .
Defining this method is _ _ optional _ _ , but recommended . If not overridden , output ports will return the @VisualShaderNode . PORT_TYPE_SCALAR@ type .
_get_output_port_type ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO Int
_get_output_port_type cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_output_port_type
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_output_port_type"
'[Int]
(IO Int)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_output_port_type
# NOINLINE bindVisualShaderNodeCustom__get_return_icon_type #
-- | Override this method to define the return icon of the associated custom node in the Visual Shader Editor's members dialog.
-- Defining this method is __optional__. If not overridden, no return icon is shown.
bindVisualShaderNodeCustom__get_return_icon_type :: MethodBind
bindVisualShaderNodeCustom__get_return_icon_type
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_return_icon_type" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | Override this method to define the return icon of the associated custom node in the Visual Shader Editor's members dialog.
-- Defining this method is __optional__. If not overridden, no return icon is shown.
_get_return_icon_type ::
(VisualShaderNodeCustom :< cls, Object :< cls) => cls -> IO Int
_get_return_icon_type cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_return_icon_type
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_return_icon_type"
'[]
(IO Int)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_return_icon_type
# NOINLINE bindVisualShaderNodeCustom__get_subcategory #
-- | Override this method to define the subcategory of the associated custom node in the Visual Shader Editor's members dialog.
-- Defining this method is __optional__. If not overridden, the node will be filed under the root of the main category (see @method _get_category@).
bindVisualShaderNodeCustom__get_subcategory :: MethodBind
bindVisualShaderNodeCustom__get_subcategory
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_subcategory" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
-- | Override this method to define the subcategory of the associated custom node in the Visual Shader Editor's members dialog.
-- Defining this method is __optional__. If not overridden, the node will be filed under the root of the main category (see @method _get_category@).
_get_subcategory ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> IO GodotString
_get_subcategory cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_subcategory
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_subcategory" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_subcategory | null | https://raw.githubusercontent.com/SimulaVR/godot-haskell/e8f2c45f1b9cc2f0586ebdc9ec6002c8c2d384ae/src/Godot/Core/VisualShaderNodeCustom.hs | haskell | | Override this method to define the category of the associated custom node in the Visual Shader Editor's members dialog.
| Override this method to define the category of the associated custom node in the Visual Shader Editor's members dialog.
Defining this method is __required__.
Defining this method is __required__.
| Override this method to define the description of the associated custom node in the Visual Shader Editor's members dialog.
Defining this method is __optional__.
| Override this method to define the description of the associated custom node in the Visual Shader Editor's members dialog.
Defining this method is __optional__.
Be careful with this functionality as it can cause name conflicts with other custom nodes, so be sure to give the defined entities unique names.
Defining this method is __optional__.
Be careful with this functionality as it can cause name conflicts with other custom nodes, so be sure to give the defined entities unique names.
Defining this method is __optional__.
| Override this method to define the amount of input ports of the associated custom node.
Defining this method is __required__. If not overridden, the node has no input ports.
| Override this method to define the amount of input ports of the associated custom node.
Defining this method is __required__. If not overridden, the node has no input ports.
| Override this method to define the names of input ports of the associated custom node. The names are used both for the input slots in the editor and as identifiers in the shader code, and are passed in the @input_vars@ array in @method _get_code@.
| Override this method to define the names of input ports of the associated custom node. The names are used both for the input slots in the editor and as identifiers in the shader code, and are passed in the @input_vars@ array in @method _get_code@.
| Override this method to define the name of the associated custom node in the Visual Shader Editor's members dialog and graph.
Defining this method is __optional__, but recommended. If not overridden, the node will be named as "Unnamed".
| Override this method to define the name of the associated custom node in the Visual Shader Editor's members dialog and graph.
Defining this method is __optional__, but recommended. If not overridden, the node will be named as "Unnamed".
| Override this method to define the amount of output ports of the associated custom node.
Defining this method is __required__. If not overridden, the node has no output ports.
| Override this method to define the amount of output ports of the associated custom node.
Defining this method is __required__. If not overridden, the node has no output ports.
| Override this method to define the names of output ports of the associated custom node. The names are used both for the output slots in the editor and as identifiers in the shader code, and are passed in the @output_vars@ array in @method _get_code@.
Defining this method is __optional__, but recommended. If not overridden, output ports are named as @"out" + str(port)@.
| Override this method to define the names of output ports of the associated custom node. The names are used both for the output slots in the editor and as identifiers in the shader code, and are passed in the @output_vars@ array in @method _get_code@.
Defining this method is __optional__, but recommended. If not overridden, output ports are named as @"out" + str(port)@.
| Override this method to define the return icon of the associated custom node in the Visual Shader Editor's members dialog.
Defining this method is __optional__. If not overridden, no return icon is shown.
| Override this method to define the return icon of the associated custom node in the Visual Shader Editor's members dialog.
Defining this method is __optional__. If not overridden, no return icon is shown.
| Override this method to define the subcategory of the associated custom node in the Visual Shader Editor's members dialog.
Defining this method is __optional__. If not overridden, the node will be filed under the root of the main category (see @method _get_category@).
| Override this method to define the subcategory of the associated custom node in the Visual Shader Editor's members dialog.
Defining this method is __optional__. If not overridden, the node will be filed under the root of the main category (see @method _get_category@). | # LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving ,
TypeFamilies , TypeOperators , FlexibleContexts , DataKinds ,
MultiParamTypeClasses #
TypeFamilies, TypeOperators, FlexibleContexts, DataKinds,
MultiParamTypeClasses #-}
module Godot.Core.VisualShaderNodeCustom
(Godot.Core.VisualShaderNodeCustom._get_category,
Godot.Core.VisualShaderNodeCustom._get_code,
Godot.Core.VisualShaderNodeCustom._get_description,
Godot.Core.VisualShaderNodeCustom._get_global_code,
Godot.Core.VisualShaderNodeCustom._get_input_port_count,
Godot.Core.VisualShaderNodeCustom._get_input_port_name,
Godot.Core.VisualShaderNodeCustom._get_input_port_type,
Godot.Core.VisualShaderNodeCustom._get_name,
Godot.Core.VisualShaderNodeCustom._get_output_port_count,
Godot.Core.VisualShaderNodeCustom._get_output_port_name,
Godot.Core.VisualShaderNodeCustom._get_output_port_type,
Godot.Core.VisualShaderNodeCustom._get_return_icon_type,
Godot.Core.VisualShaderNodeCustom._get_subcategory)
where
import Data.Coerce
import Foreign.C
import Godot.Internal.Dispatch
import qualified Data.Vector as V
import Linear(V2(..),V3(..),M22)
import Data.Colour(withOpacity)
import Data.Colour.SRGB(sRGB)
import System.IO.Unsafe
import Godot.Gdnative.Internal
import Godot.Api.Types
import Godot.Core.VisualShaderNode()
# NOINLINE bindVisualShaderNodeCustom__get_category #
Defining this method is _ _ optional _ _ . If not overridden , the node will be filed under the " Custom " category .
bindVisualShaderNodeCustom__get_category :: MethodBind
bindVisualShaderNodeCustom__get_category
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_category" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
Defining this method is _ _ optional _ _ . If not overridden , the node will be filed under the " Custom " category .
_get_category ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> IO GodotString
_get_category cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_category
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_category" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_category
# NOINLINE bindVisualShaderNodeCustom__get_code #
| Override this method to define the actual shader code of the associated custom node . The shader code should be returned as a string , which can have multiple lines ( the @"""@ multiline string construct can be used for convenience ) .
The @input_vars@ and @output_vars@ arrays contain the string names of the various input and output variables , as defined by @_get_input_*@ and virtual methods in this class .
The output ports can be assigned values in the shader code . For example , @return output_vars@0@ + " = " + + " ; " @.
You can customize the generated code based on the shader @mode@ ( see @enum Shader . Mode@ ) and/or @type@ ( see @enum VisualShader . Type@ ) .
bindVisualShaderNodeCustom__get_code :: MethodBind
bindVisualShaderNodeCustom__get_code
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_code" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
| Override this method to define the actual shader code of the associated custom node . The shader code should be returned as a string , which can have multiple lines ( the @"""@ multiline string construct can be used for convenience ) .
The @input_vars@ and @output_vars@ arrays contain the string names of the various input and output variables , as defined by @_get_input_*@ and virtual methods in this class .
The output ports can be assigned values in the shader code . For example , @return output_vars@0@ + " = " + + " ; " @.
You can customize the generated code based on the shader @mode@ ( see @enum Shader . Mode@ ) and/or @type@ ( see @enum VisualShader . Type@ ) .
_get_code ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Array -> Array -> Int -> Int -> IO GodotString
_get_code cls arg1 arg2 arg3 arg4
= withVariantArray
[toVariant arg1, toVariant arg2, toVariant arg3, toVariant arg4]
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_code
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_code"
'[Array, Array, Int, Int]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_code
# NOINLINE bindVisualShaderNodeCustom__get_description #
bindVisualShaderNodeCustom__get_description :: MethodBind
bindVisualShaderNodeCustom__get_description
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_description" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
_get_description ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> IO GodotString
_get_description cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_description
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_description" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_description
# NOINLINE bindVisualShaderNodeCustom__get_global_code #
| Override this method to add shader code on top of the global shader , to define your own standard library of reusable methods , varyings , constants , uniforms , etc . The shader code should be returned as a string , which can have multiple lines ( the @"""@ multiline string construct can be used for convenience ) .
You can customize the generated code based on the shader @mode@ ( see @enum Shader . Mode@ ) .
bindVisualShaderNodeCustom__get_global_code :: MethodBind
bindVisualShaderNodeCustom__get_global_code
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_global_code" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
| Override this method to add shader code on top of the global shader , to define your own standard library of reusable methods , varyings , constants , uniforms , etc . The shader code should be returned as a string , which can have multiple lines ( the @"""@ multiline string construct can be used for convenience ) .
You can customize the generated code based on the shader @mode@ ( see @enum Shader . Mode@ ) .
_get_global_code ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO GodotString
_get_global_code cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_global_code
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_global_code"
'[Int]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_global_code
# NOINLINE bindVisualShaderNodeCustom__get_input_port_count #
bindVisualShaderNodeCustom__get_input_port_count :: MethodBind
bindVisualShaderNodeCustom__get_input_port_count
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_input_port_count" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
_get_input_port_count ::
(VisualShaderNodeCustom :< cls, Object :< cls) => cls -> IO Int
_get_input_port_count cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_input_port_count
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_input_port_count"
'[]
(IO Int)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_input_port_count
# NOINLINE bindVisualShaderNodeCustom__get_input_port_name #
Defining this method is _ _ optional _ _ , but recommended . If not overridden , input ports are named as @"in " + str(port)@.
bindVisualShaderNodeCustom__get_input_port_name :: MethodBind
bindVisualShaderNodeCustom__get_input_port_name
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_input_port_name" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
Defining this method is _ _ optional _ _ , but recommended . If not overridden , input ports are named as @"in " + str(port)@.
_get_input_port_name ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO GodotString
_get_input_port_name cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_input_port_name
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_input_port_name"
'[Int]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_input_port_name
# NOINLINE bindVisualShaderNodeCustom__get_input_port_type #
| Override this method to define the returned type of each input port of the associated custom node ( see @enum VisualShaderNode . PortType@ for possible types ) .
Defining this method is _ _ optional _ _ , but recommended . If not overridden , input ports will return the @VisualShaderNode . PORT_TYPE_SCALAR@ type .
bindVisualShaderNodeCustom__get_input_port_type :: MethodBind
bindVisualShaderNodeCustom__get_input_port_type
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_input_port_type" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
| Override this method to define the returned type of each input port of the associated custom node ( see @enum VisualShaderNode . PortType@ for possible types ) .
Defining this method is _ _ optional _ _ , but recommended . If not overridden , input ports will return the @VisualShaderNode . PORT_TYPE_SCALAR@ type .
_get_input_port_type ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO Int
_get_input_port_type cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_input_port_type
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_input_port_type"
'[Int]
(IO Int)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_input_port_type
# NOINLINE bindVisualShaderNodeCustom__get_name #
bindVisualShaderNodeCustom__get_name :: MethodBind
bindVisualShaderNodeCustom__get_name
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_name" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
_get_name ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> IO GodotString
_get_name cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_name
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_name" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_name
# NOINLINE bindVisualShaderNodeCustom__get_output_port_count #
bindVisualShaderNodeCustom__get_output_port_count :: MethodBind
bindVisualShaderNodeCustom__get_output_port_count
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_output_port_count" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
_get_output_port_count ::
(VisualShaderNodeCustom :< cls, Object :< cls) => cls -> IO Int
_get_output_port_count cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_output_port_count
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_output_port_count"
'[]
(IO Int)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_output_port_count
# NOINLINE bindVisualShaderNodeCustom__get_output_port_name #
bindVisualShaderNodeCustom__get_output_port_name :: MethodBind
bindVisualShaderNodeCustom__get_output_port_name
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_output_port_name" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
_get_output_port_name ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO GodotString
_get_output_port_name cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_output_port_name
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_output_port_name"
'[Int]
(IO GodotString)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_output_port_name
# NOINLINE bindVisualShaderNodeCustom__get_output_port_type #
| Override this method to define the returned type of each output port of the associated custom node ( see @enum VisualShaderNode . PortType@ for possible types ) .
Defining this method is _ _ optional _ _ , but recommended . If not overridden , output ports will return the @VisualShaderNode . PORT_TYPE_SCALAR@ type .
bindVisualShaderNodeCustom__get_output_port_type :: MethodBind
bindVisualShaderNodeCustom__get_output_port_type
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_output_port_type" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
| Override this method to define the returned type of each output port of the associated custom node ( see @enum VisualShaderNode . PortType@ for possible types ) .
Defining this method is _ _ optional _ _ , but recommended . If not overridden , output ports will return the @VisualShaderNode . PORT_TYPE_SCALAR@ type .
_get_output_port_type ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> Int -> IO Int
_get_output_port_type cls arg1
= withVariantArray [toVariant arg1]
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_output_port_type
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_output_port_type"
'[Int]
(IO Int)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_output_port_type
# NOINLINE bindVisualShaderNodeCustom__get_return_icon_type #
bindVisualShaderNodeCustom__get_return_icon_type :: MethodBind
bindVisualShaderNodeCustom__get_return_icon_type
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_return_icon_type" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
_get_return_icon_type ::
(VisualShaderNodeCustom :< cls, Object :< cls) => cls -> IO Int
_get_return_icon_type cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call
bindVisualShaderNodeCustom__get_return_icon_type
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_return_icon_type"
'[]
(IO Int)
where
nodeMethod
= Godot.Core.VisualShaderNodeCustom._get_return_icon_type
# NOINLINE bindVisualShaderNodeCustom__get_subcategory #
bindVisualShaderNodeCustom__get_subcategory :: MethodBind
bindVisualShaderNodeCustom__get_subcategory
= unsafePerformIO $
withCString "VisualShaderNodeCustom" $
\ clsNamePtr ->
withCString "_get_subcategory" $
\ methodNamePtr ->
godot_method_bind_get_method clsNamePtr methodNamePtr
_get_subcategory ::
(VisualShaderNodeCustom :< cls, Object :< cls) =>
cls -> IO GodotString
_get_subcategory cls
= withVariantArray []
(\ (arrPtr, len) ->
godot_method_bind_call bindVisualShaderNodeCustom__get_subcategory
(upcast cls)
arrPtr
len
>>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
instance NodeMethod VisualShaderNodeCustom "_get_subcategory" '[]
(IO GodotString)
where
nodeMethod = Godot.Core.VisualShaderNodeCustom._get_subcategory |
04cfd9ab045fe072384fed49abc2427ff8a5b814352e2b71979d82526ed985af | tqtezos/lorentz-contract-oracle | CmdLnArgs.hs | {-# OPTIONS -Wno-missing-export-lists -Wno-unused-do-bind -Wno-partial-fields -Wno-orphans #-}
module Lorentz.Contracts.Oracle.CmdLnArgs where
import Data.Char
import Control.Applicative
import Control.Monad
import Text.Show (Show(..))
import Data.List
import Data.Either
import Data.Function (id, flip, const)
import Prelude (FilePath, runReaderT)
import Data.String (String)
import Data.Maybe
import Data.Typeable
import Text.ParserCombinators.ReadP (ReadP)
import Text.Read
import qualified Text.ParserCombinators.ReadP as P
import Lorentz
import Michelson.Macro
import Michelson.Parser
import Michelson.TypeCheck.Instr
import Michelson.TypeCheck.TypeCheck
import Michelson.Typed.Annotation
import Michelson.Typed.Instr
import Michelson.Typed.Scope
import Michelson.Typed.Sing
import Michelson.Typed.T
import Michelson.Typed.Value
import qualified Michelson.Untyped.Type as U
import qualified Tezos.Address as Tezos
import qualified Options.Applicative as Opt
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Data.Constraint
import Data.Singletons
import Text.Megaparsec (eof)
import qualified Lorentz.Contracts.Oracle as Oracle
instance IsoValue (Value' Instr t) where
type ToT (Value' Instr t) = t
toVal = id
fromVal = id
-- | No `Notes`
instance SingI t => HasTypeAnn (Value' Instr t) where
getTypeAnn = starNotes
| Parse something between the two given ` Char ` 's
betweenChars :: Char -> Char -> ReadP a -> ReadP a
betweenChars beforeChar afterChar =
P.char beforeChar `P.between` P.char afterChar
-- | Parse something in parentheses
inParensP :: ReadP a -> ReadP a
inParensP = '(' `betweenChars` ')'
| Parse something in double - quotes : @"[something]"@
inQuotesP :: ReadP a -> ReadP a
inQuotesP = '"' `betweenChars` '"'
-- | Attempt to parse with given modifier, otherwise parse without
maybeLiftP :: (ReadP a -> ReadP a) -> ReadP a -> ReadP a
maybeLiftP liftP = liftM2 (<|>) liftP id
-- | Attempt to parse `inParensP`, else parse without
maybeInParensP :: ReadP a -> ReadP a
maybeInParensP = maybeLiftP inParensP
| Attempt to parse ` inQuotesP ` , else parse without
maybeInQuotesP :: ReadP a -> ReadP a
maybeInQuotesP = maybeLiftP inQuotesP
-- | Read an `Address`, inside or outside of @""@'s
readAddressP :: ReadP Address
readAddressP =
maybeInParensP . maybeInQuotesP $ do
ensureAddressPrefix
addressStr <- P.munch1 isAlphaNum
case Tezos.parseAddress $ T.pack addressStr of
Left err -> fail $ show err
Right address' -> return address'
where
ensureAddressPrefix =
(do {('t':'z':'1':_) <- P.look; return ()}) <|>
(do {('K':'T':'1':_) <- P.look; return ()})
instance Read Address where
readPrec = readP_to_Prec $ const readAddressP
-- | Parse an `Address` argument, given its field name
parseAddress :: String -> Opt.Parser Address
parseAddress name =
Opt.option Opt.auto $
mconcat
[ Opt.long name
, Opt.metavar "ADDRESS"
, Opt.help $ "Address of the " ++ name ++ "."
]
| Parse whether to output on one line
onelineOption :: Opt.Parser Bool
onelineOption = Opt.switch (
Opt.long "oneline" <>
Opt.help "Force single line output")
-- | Parse the output `FilePath`
outputOptions :: Opt.Parser (Maybe FilePath)
outputOptions = optional . Opt.strOption $ mconcat
[ Opt.short 'o'
, Opt.long "output"
, Opt.metavar "FILEPATH"
, Opt.help "File to use as output. If not specified, stdout is used."
]
assertOpAbsense :: forall (t :: T) a. SingI t => (HasNoOp t => a) -> a
assertOpAbsense f =
case opAbsense (sing @t) of
Nothing -> error "assertOpAbsense"
Just Dict -> forbiddenOp @t f
assertBigMapAbsense :: forall (t :: T) a. SingI t => (HasNoBigMap t => a) -> a
assertBigMapAbsense f =
case bigMapAbsense (sing @t) of
Nothing -> error "assertBigMapAbsense"
Just Dict -> forbiddenBigMap @t f
assertNestedBigMapsAbsense :: forall (t :: T) a. SingI t => (HasNoNestedBigMaps t => a) -> a
assertNestedBigMapsAbsense f =
case nestedBigMapsAbsense (sing @t) of
Nothing -> error "assertNestedBigMapsAbsense"
Just Dict -> forbiddenNestedBigMaps @t f
assertContractAbsense :: forall (t :: T) a. SingI t => (HasNoContract t => a) -> a
assertContractAbsense f =
case contractTypeAbsense (sing @t) of
Nothing -> error "assertContractAbsense"
Just Dict -> forbiddenContractType @t f
singTypeableCT :: forall (t :: CT). Sing t -> Dict (Typeable t)
singTypeableCT SCInt = Dict
singTypeableCT SCNat = Dict
singTypeableCT SCString = Dict
singTypeableCT SCBytes = Dict
singTypeableCT SCMutez = Dict
singTypeableCT SCBool = Dict
singTypeableCT SCKeyHash = Dict
singTypeableCT SCTimestamp = Dict
singTypeableCT SCAddress = Dict
singTypeableT :: forall (t :: T). Sing t -> Dict (Typeable t)
singTypeableT (STc ct) =
withDict (singTypeableCT ct) $
Dict
singTypeableT STKey = Dict
singTypeableT STUnit = Dict
singTypeableT STSignature = Dict
singTypeableT STChainId = Dict
singTypeableT (STOption st) =
withDict (singTypeableT st) $
Dict
singTypeableT (STList st) =
withDict (singTypeableT st) $
Dict
singTypeableT (STSet st) =
withDict (singTypeableCT st) $
Dict
singTypeableT STOperation = Dict
singTypeableT (STContract st) =
withDict (singTypeableT st) $
Dict
singTypeableT (STPair st su) =
withDict (singTypeableT st) $
withDict (singTypeableT su) $
Dict
singTypeableT (STOr st su) =
withDict (singTypeableT st) $
withDict (singTypeableT su) $
Dict
singTypeableT (STLambda st su) =
withDict (singTypeableT st) $
withDict (singTypeableT su) $
Dict
singTypeableT (STMap st su) =
withDict (singTypeableCT st) $
withDict (singTypeableT su) $
Dict
singTypeableT (STBigMap st su) =
withDict (singTypeableCT st) $
withDict (singTypeableT su) $
Dict
singICT :: forall (t :: CT). Sing t -> Dict (SingI t)
singICT SCInt = Dict
singICT SCNat = Dict
singICT SCString = Dict
singICT SCBytes = Dict
singICT SCMutez = Dict
singICT SCBool = Dict
singICT SCKeyHash = Dict
singICT SCTimestamp = Dict
singICT SCAddress = Dict
singIT :: forall (t :: T). Sing t -> Dict (SingI t)
singIT (STc ct) =
withDict (singICT ct) $
Dict
singIT STKey = Dict
singIT STUnit = Dict
singIT STSignature = Dict
singIT STChainId = Dict
singIT (STOption st) =
withDict (singIT st) $
Dict
singIT (STList st) =
withDict (singIT st) $
Dict
singIT (STSet st) =
withDict (singICT st) $
Dict
singIT STOperation = Dict
singIT (STContract st) =
withDict (singIT st) $
Dict
singIT (STPair st su) =
withDict (singIT st) $
withDict (singIT su) $
Dict
singIT (STOr st su) =
withDict (singIT st) $
withDict (singIT su) $
Dict
singIT (STLambda st su) =
withDict (singIT st) $
withDict (singIT su) $
Dict
singIT (STMap st su) =
withDict (singICT st) $
withDict (singIT su) $
Dict
singIT (STBigMap st su) =
withDict (singICT st) $
withDict (singIT su) $
Dict
data CmdLnArgs
= Print (SomeSing T) (Maybe FilePath) Bool
| PrintTimeStamped (SomeSing T) (Maybe FilePath) Bool
| Init
{ currentValue :: SomeContractParam
, admin :: Address
}
| GetValue
{ callbackContract :: Address
}
| UpdateValue
{ newValue :: SomeContractParam
}
| UpdateAdmin
{ newAdmin :: Address
}
unExplicitType :: U.Type -> U.T
unExplicitType =
\case
U.Type t _ -> t
fromUntypedComparable :: U.Comparable -> CT
fromUntypedComparable (U.Comparable ct _) = ct
fromUntypedT' :: U.Type -> T
fromUntypedT' = fromUntypedT . unExplicitType
fromUntypedT :: U.T -> T
fromUntypedT (U.Tc ct) = Tc ct
fromUntypedT U.TKey = TKey
fromUntypedT U.TUnit = TUnit
fromUntypedT U.TChainId = TChainId
fromUntypedT U.TSignature = TSignature
fromUntypedT (U.TOption x) = TOption $ fromUntypedT' x
fromUntypedT (U.TList x) = TList $ fromUntypedT' x
fromUntypedT (U.TSet ct) = TSet $ fromUntypedComparable ct
fromUntypedT U.TOperation = TOperation
fromUntypedT (U.TContract x) = TContract $ fromUntypedT' x
fromUntypedT (U.TPair _ _ x y) = TPair (fromUntypedT' x) (fromUntypedT' y)
fromUntypedT (U.TOr _ _ x y) = TOr (fromUntypedT' x) (fromUntypedT' y)
fromUntypedT (U.TLambda x y) = TLambda (fromUntypedT' x) (fromUntypedT' y)
fromUntypedT (U.TMap ct x) = TMap (fromUntypedComparable ct) $ fromUntypedT' x
fromUntypedT (U.TBigMap ct x) = TBigMap (fromUntypedComparable ct) $ fromUntypedT' x
-- | Parse some `T`
parseSomeT :: String -> Opt.Parser (SomeSing T)
parseSomeT name =
(\typeStr ->
let parsedType = parseNoEnv
type_
name
typeStr
in let type' = either (error . T.pack . show) unExplicitType parsedType
in withSomeSingT (fromUntypedT type') SomeSing
) <$>
Opt.strOption @Text
(mconcat
[ Opt.long $ name ++ "Type"
, Opt.metavar "Michelson Type"
, Opt.help $ "The Michelson Type of " ++ name
])
-- | A contract parameter with some type
data SomeContractParam where
SomeContractParam
:: (SingI t, Typeable t)
=> Value t
-> (Sing t, Notes t)
-> (Dict (HasNoOp t), Dict (HasNoBigMap t))
-> SomeContractParam
-- | Consume `SomeContractParam`
fromSomeContractParam ::
SomeContractParam
-> (forall t. (SingI t, Typeable t, HasNoOp t, HasNoBigMap t) =>
Value t -> r)
-> r
fromSomeContractParam (SomeContractParam xs (_, _) (Dict, Dict)) f = f xs
| Parse and a value
parseTypeCheckValue ::
forall t. (SingI t)
=> Parser (Value t)
parseTypeCheckValue =
(>>= either (fail . show) return) $
runTypeCheckIsolated . flip runReaderT def . typeCheckValue . expandValue <$>
(value <* eof)
parseSomeContractParam :: String -> Opt.Parser SomeContractParam
parseSomeContractParam name =
(\(SomeSing (st :: Sing t)) paramStr ->
withDict (singIT st) $
withDict (singTypeableT st) $
assertOpAbsense @t $
assertBigMapAbsense @t $
let parsedParam = parseNoEnv
(parseTypeCheckValue @t)
name
paramStr
in let param = either (error . T.pack . show) id parsedParam
in SomeContractParam param (st, starNotes) (Dict, Dict)
) <$>
parseSomeT name <*>
Opt.strOption @Text
(mconcat
[ Opt.long name
, Opt.metavar "Michelson Value"
, Opt.help $ "The Michelson Value: " ++ name
])
argParser :: Opt.Parser CmdLnArgs
argParser = Opt.hsubparser $ mconcat
[ printSubCmd
, printTimestampedSubCmd
, initSubCmd
, getValueSubCmd
, updateValueSubCmd
, updateAdminSubCmd
]
where
mkCommandParser commandName parser desc =
Opt.command commandName $
Opt.info (Opt.helper <*> parser) $
Opt.progDesc desc
printSubCmd =
mkCommandParser "print"
(Print <$> parseSomeT "value" <*> outputOptions <*> onelineOption)
"Dump the Oracle contract in form of Michelson code"
printTimestampedSubCmd =
mkCommandParser "print-timestamped"
(PrintTimeStamped <$> parseSomeT "value" <*> outputOptions <*> onelineOption)
"Dump the Timestamped Oracle contract in form of Michelson code"
initSubCmd =
mkCommandParser "init"
(Init <$>
parseSomeContractParam "initialValue" <*>
parseAddress "admin"
)
"Initial storage for the Oracle contract"
getValueSubCmd =
mkCommandParser "get-value"
(GetValue <$> parseAddress "callbackContract")
"get value"
updateValueSubCmd =
mkCommandParser "update-value"
(UpdateValue <$> parseSomeContractParam "newValue")
"update value"
updateAdminSubCmd =
mkCommandParser "update-admin"
(UpdateAdmin <$> parseAddress "admin")
"update admin"
infoMod :: Opt.InfoMod CmdLnArgs
infoMod = mconcat
[ Opt.fullDesc
, Opt.progDesc "Oracle contract CLI interface"
]
| f = maybe mOutput
runCmdLnArgs :: (Maybe FilePath -> TL.Text -> r) -> CmdLnArgs -> r
runCmdLnArgs f = \case
Print (SomeSing (st :: Sing t)) mOutput forceOneLine ->
withDict (singIT st) $
withDict (singTypeableT st) $
assertOpAbsense @t $
assertBigMapAbsense @t $
assertNestedBigMapsAbsense @t $
assertContractAbsense @t $
f mOutput $
printLorentzContract forceOneLine (Oracle.uncheckedOracleContract @(Value t))
PrintTimeStamped (SomeSing (st :: Sing t)) mOutput forceOneLine ->
withDict (singIT st) $
withDict (singTypeableT st) $
assertOpAbsense @t $
assertBigMapAbsense @t $
assertNestedBigMapsAbsense @t $
assertContractAbsense @t $
f mOutput $
printLorentzContract forceOneLine (Oracle.timestampedOracleContract @(Value t))
Init {..} ->
fromSomeContractParam currentValue $ \currentValue' ->
f Nothing . printLorentzValue forceSingleLine $
Oracle.Storage currentValue' admin
GetValue {..} ->
f Nothing . printLorentzValue forceSingleLine $
Oracle.GetValue @() $
mkView () $ callingDefTAddress @() $ toTAddress callbackContract
UpdateValue {..} ->
fromSomeContractParam newValue $ \newValue' ->
f Nothing . printLorentzValue forceSingleLine $
Oracle.UpdateValue newValue'
UpdateAdmin {..} ->
f Nothing . printLorentzValue forceSingleLine $
Oracle.UpdateAdmin @() newAdmin
where
forceSingleLine = True
| null | https://raw.githubusercontent.com/tqtezos/lorentz-contract-oracle/bad8d949bde433d5de594955d4d0253611a3312f/src/Lorentz/Contracts/Oracle/CmdLnArgs.hs | haskell | # OPTIONS -Wno-missing-export-lists -Wno-unused-do-bind -Wno-partial-fields -Wno-orphans #
| No `Notes`
| Parse something in parentheses
| Attempt to parse with given modifier, otherwise parse without
| Attempt to parse `inParensP`, else parse without
| Read an `Address`, inside or outside of @""@'s
| Parse an `Address` argument, given its field name
| Parse the output `FilePath`
| Parse some `T`
| A contract parameter with some type
| Consume `SomeContractParam` |
module Lorentz.Contracts.Oracle.CmdLnArgs where
import Data.Char
import Control.Applicative
import Control.Monad
import Text.Show (Show(..))
import Data.List
import Data.Either
import Data.Function (id, flip, const)
import Prelude (FilePath, runReaderT)
import Data.String (String)
import Data.Maybe
import Data.Typeable
import Text.ParserCombinators.ReadP (ReadP)
import Text.Read
import qualified Text.ParserCombinators.ReadP as P
import Lorentz
import Michelson.Macro
import Michelson.Parser
import Michelson.TypeCheck.Instr
import Michelson.TypeCheck.TypeCheck
import Michelson.Typed.Annotation
import Michelson.Typed.Instr
import Michelson.Typed.Scope
import Michelson.Typed.Sing
import Michelson.Typed.T
import Michelson.Typed.Value
import qualified Michelson.Untyped.Type as U
import qualified Tezos.Address as Tezos
import qualified Options.Applicative as Opt
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import Data.Constraint
import Data.Singletons
import Text.Megaparsec (eof)
import qualified Lorentz.Contracts.Oracle as Oracle
instance IsoValue (Value' Instr t) where
type ToT (Value' Instr t) = t
toVal = id
fromVal = id
instance SingI t => HasTypeAnn (Value' Instr t) where
getTypeAnn = starNotes
| Parse something between the two given ` Char ` 's
betweenChars :: Char -> Char -> ReadP a -> ReadP a
betweenChars beforeChar afterChar =
P.char beforeChar `P.between` P.char afterChar
inParensP :: ReadP a -> ReadP a
inParensP = '(' `betweenChars` ')'
| Parse something in double - quotes : @"[something]"@
inQuotesP :: ReadP a -> ReadP a
inQuotesP = '"' `betweenChars` '"'
maybeLiftP :: (ReadP a -> ReadP a) -> ReadP a -> ReadP a
maybeLiftP liftP = liftM2 (<|>) liftP id
maybeInParensP :: ReadP a -> ReadP a
maybeInParensP = maybeLiftP inParensP
| Attempt to parse ` inQuotesP ` , else parse without
maybeInQuotesP :: ReadP a -> ReadP a
maybeInQuotesP = maybeLiftP inQuotesP
readAddressP :: ReadP Address
readAddressP =
maybeInParensP . maybeInQuotesP $ do
ensureAddressPrefix
addressStr <- P.munch1 isAlphaNum
case Tezos.parseAddress $ T.pack addressStr of
Left err -> fail $ show err
Right address' -> return address'
where
ensureAddressPrefix =
(do {('t':'z':'1':_) <- P.look; return ()}) <|>
(do {('K':'T':'1':_) <- P.look; return ()})
instance Read Address where
readPrec = readP_to_Prec $ const readAddressP
parseAddress :: String -> Opt.Parser Address
parseAddress name =
Opt.option Opt.auto $
mconcat
[ Opt.long name
, Opt.metavar "ADDRESS"
, Opt.help $ "Address of the " ++ name ++ "."
]
| Parse whether to output on one line
onelineOption :: Opt.Parser Bool
onelineOption = Opt.switch (
Opt.long "oneline" <>
Opt.help "Force single line output")
outputOptions :: Opt.Parser (Maybe FilePath)
outputOptions = optional . Opt.strOption $ mconcat
[ Opt.short 'o'
, Opt.long "output"
, Opt.metavar "FILEPATH"
, Opt.help "File to use as output. If not specified, stdout is used."
]
assertOpAbsense :: forall (t :: T) a. SingI t => (HasNoOp t => a) -> a
assertOpAbsense f =
case opAbsense (sing @t) of
Nothing -> error "assertOpAbsense"
Just Dict -> forbiddenOp @t f
assertBigMapAbsense :: forall (t :: T) a. SingI t => (HasNoBigMap t => a) -> a
assertBigMapAbsense f =
case bigMapAbsense (sing @t) of
Nothing -> error "assertBigMapAbsense"
Just Dict -> forbiddenBigMap @t f
assertNestedBigMapsAbsense :: forall (t :: T) a. SingI t => (HasNoNestedBigMaps t => a) -> a
assertNestedBigMapsAbsense f =
case nestedBigMapsAbsense (sing @t) of
Nothing -> error "assertNestedBigMapsAbsense"
Just Dict -> forbiddenNestedBigMaps @t f
assertContractAbsense :: forall (t :: T) a. SingI t => (HasNoContract t => a) -> a
assertContractAbsense f =
case contractTypeAbsense (sing @t) of
Nothing -> error "assertContractAbsense"
Just Dict -> forbiddenContractType @t f
singTypeableCT :: forall (t :: CT). Sing t -> Dict (Typeable t)
singTypeableCT SCInt = Dict
singTypeableCT SCNat = Dict
singTypeableCT SCString = Dict
singTypeableCT SCBytes = Dict
singTypeableCT SCMutez = Dict
singTypeableCT SCBool = Dict
singTypeableCT SCKeyHash = Dict
singTypeableCT SCTimestamp = Dict
singTypeableCT SCAddress = Dict
singTypeableT :: forall (t :: T). Sing t -> Dict (Typeable t)
singTypeableT (STc ct) =
withDict (singTypeableCT ct) $
Dict
singTypeableT STKey = Dict
singTypeableT STUnit = Dict
singTypeableT STSignature = Dict
singTypeableT STChainId = Dict
singTypeableT (STOption st) =
withDict (singTypeableT st) $
Dict
singTypeableT (STList st) =
withDict (singTypeableT st) $
Dict
singTypeableT (STSet st) =
withDict (singTypeableCT st) $
Dict
singTypeableT STOperation = Dict
singTypeableT (STContract st) =
withDict (singTypeableT st) $
Dict
singTypeableT (STPair st su) =
withDict (singTypeableT st) $
withDict (singTypeableT su) $
Dict
singTypeableT (STOr st su) =
withDict (singTypeableT st) $
withDict (singTypeableT su) $
Dict
singTypeableT (STLambda st su) =
withDict (singTypeableT st) $
withDict (singTypeableT su) $
Dict
singTypeableT (STMap st su) =
withDict (singTypeableCT st) $
withDict (singTypeableT su) $
Dict
singTypeableT (STBigMap st su) =
withDict (singTypeableCT st) $
withDict (singTypeableT su) $
Dict
singICT :: forall (t :: CT). Sing t -> Dict (SingI t)
singICT SCInt = Dict
singICT SCNat = Dict
singICT SCString = Dict
singICT SCBytes = Dict
singICT SCMutez = Dict
singICT SCBool = Dict
singICT SCKeyHash = Dict
singICT SCTimestamp = Dict
singICT SCAddress = Dict
singIT :: forall (t :: T). Sing t -> Dict (SingI t)
singIT (STc ct) =
withDict (singICT ct) $
Dict
singIT STKey = Dict
singIT STUnit = Dict
singIT STSignature = Dict
singIT STChainId = Dict
singIT (STOption st) =
withDict (singIT st) $
Dict
singIT (STList st) =
withDict (singIT st) $
Dict
singIT (STSet st) =
withDict (singICT st) $
Dict
singIT STOperation = Dict
singIT (STContract st) =
withDict (singIT st) $
Dict
singIT (STPair st su) =
withDict (singIT st) $
withDict (singIT su) $
Dict
singIT (STOr st su) =
withDict (singIT st) $
withDict (singIT su) $
Dict
singIT (STLambda st su) =
withDict (singIT st) $
withDict (singIT su) $
Dict
singIT (STMap st su) =
withDict (singICT st) $
withDict (singIT su) $
Dict
singIT (STBigMap st su) =
withDict (singICT st) $
withDict (singIT su) $
Dict
data CmdLnArgs
= Print (SomeSing T) (Maybe FilePath) Bool
| PrintTimeStamped (SomeSing T) (Maybe FilePath) Bool
| Init
{ currentValue :: SomeContractParam
, admin :: Address
}
| GetValue
{ callbackContract :: Address
}
| UpdateValue
{ newValue :: SomeContractParam
}
| UpdateAdmin
{ newAdmin :: Address
}
unExplicitType :: U.Type -> U.T
unExplicitType =
\case
U.Type t _ -> t
fromUntypedComparable :: U.Comparable -> CT
fromUntypedComparable (U.Comparable ct _) = ct
fromUntypedT' :: U.Type -> T
fromUntypedT' = fromUntypedT . unExplicitType
fromUntypedT :: U.T -> T
fromUntypedT (U.Tc ct) = Tc ct
fromUntypedT U.TKey = TKey
fromUntypedT U.TUnit = TUnit
fromUntypedT U.TChainId = TChainId
fromUntypedT U.TSignature = TSignature
fromUntypedT (U.TOption x) = TOption $ fromUntypedT' x
fromUntypedT (U.TList x) = TList $ fromUntypedT' x
fromUntypedT (U.TSet ct) = TSet $ fromUntypedComparable ct
fromUntypedT U.TOperation = TOperation
fromUntypedT (U.TContract x) = TContract $ fromUntypedT' x
fromUntypedT (U.TPair _ _ x y) = TPair (fromUntypedT' x) (fromUntypedT' y)
fromUntypedT (U.TOr _ _ x y) = TOr (fromUntypedT' x) (fromUntypedT' y)
fromUntypedT (U.TLambda x y) = TLambda (fromUntypedT' x) (fromUntypedT' y)
fromUntypedT (U.TMap ct x) = TMap (fromUntypedComparable ct) $ fromUntypedT' x
fromUntypedT (U.TBigMap ct x) = TBigMap (fromUntypedComparable ct) $ fromUntypedT' x
parseSomeT :: String -> Opt.Parser (SomeSing T)
parseSomeT name =
(\typeStr ->
let parsedType = parseNoEnv
type_
name
typeStr
in let type' = either (error . T.pack . show) unExplicitType parsedType
in withSomeSingT (fromUntypedT type') SomeSing
) <$>
Opt.strOption @Text
(mconcat
[ Opt.long $ name ++ "Type"
, Opt.metavar "Michelson Type"
, Opt.help $ "The Michelson Type of " ++ name
])
data SomeContractParam where
SomeContractParam
:: (SingI t, Typeable t)
=> Value t
-> (Sing t, Notes t)
-> (Dict (HasNoOp t), Dict (HasNoBigMap t))
-> SomeContractParam
fromSomeContractParam ::
SomeContractParam
-> (forall t. (SingI t, Typeable t, HasNoOp t, HasNoBigMap t) =>
Value t -> r)
-> r
fromSomeContractParam (SomeContractParam xs (_, _) (Dict, Dict)) f = f xs
| Parse and a value
parseTypeCheckValue ::
forall t. (SingI t)
=> Parser (Value t)
parseTypeCheckValue =
(>>= either (fail . show) return) $
runTypeCheckIsolated . flip runReaderT def . typeCheckValue . expandValue <$>
(value <* eof)
parseSomeContractParam :: String -> Opt.Parser SomeContractParam
parseSomeContractParam name =
(\(SomeSing (st :: Sing t)) paramStr ->
withDict (singIT st) $
withDict (singTypeableT st) $
assertOpAbsense @t $
assertBigMapAbsense @t $
let parsedParam = parseNoEnv
(parseTypeCheckValue @t)
name
paramStr
in let param = either (error . T.pack . show) id parsedParam
in SomeContractParam param (st, starNotes) (Dict, Dict)
) <$>
parseSomeT name <*>
Opt.strOption @Text
(mconcat
[ Opt.long name
, Opt.metavar "Michelson Value"
, Opt.help $ "The Michelson Value: " ++ name
])
argParser :: Opt.Parser CmdLnArgs
argParser = Opt.hsubparser $ mconcat
[ printSubCmd
, printTimestampedSubCmd
, initSubCmd
, getValueSubCmd
, updateValueSubCmd
, updateAdminSubCmd
]
where
mkCommandParser commandName parser desc =
Opt.command commandName $
Opt.info (Opt.helper <*> parser) $
Opt.progDesc desc
printSubCmd =
mkCommandParser "print"
(Print <$> parseSomeT "value" <*> outputOptions <*> onelineOption)
"Dump the Oracle contract in form of Michelson code"
printTimestampedSubCmd =
mkCommandParser "print-timestamped"
(PrintTimeStamped <$> parseSomeT "value" <*> outputOptions <*> onelineOption)
"Dump the Timestamped Oracle contract in form of Michelson code"
initSubCmd =
mkCommandParser "init"
(Init <$>
parseSomeContractParam "initialValue" <*>
parseAddress "admin"
)
"Initial storage for the Oracle contract"
getValueSubCmd =
mkCommandParser "get-value"
(GetValue <$> parseAddress "callbackContract")
"get value"
updateValueSubCmd =
mkCommandParser "update-value"
(UpdateValue <$> parseSomeContractParam "newValue")
"update value"
updateAdminSubCmd =
mkCommandParser "update-admin"
(UpdateAdmin <$> parseAddress "admin")
"update admin"
infoMod :: Opt.InfoMod CmdLnArgs
infoMod = mconcat
[ Opt.fullDesc
, Opt.progDesc "Oracle contract CLI interface"
]
| f = maybe mOutput
runCmdLnArgs :: (Maybe FilePath -> TL.Text -> r) -> CmdLnArgs -> r
runCmdLnArgs f = \case
Print (SomeSing (st :: Sing t)) mOutput forceOneLine ->
withDict (singIT st) $
withDict (singTypeableT st) $
assertOpAbsense @t $
assertBigMapAbsense @t $
assertNestedBigMapsAbsense @t $
assertContractAbsense @t $
f mOutput $
printLorentzContract forceOneLine (Oracle.uncheckedOracleContract @(Value t))
PrintTimeStamped (SomeSing (st :: Sing t)) mOutput forceOneLine ->
withDict (singIT st) $
withDict (singTypeableT st) $
assertOpAbsense @t $
assertBigMapAbsense @t $
assertNestedBigMapsAbsense @t $
assertContractAbsense @t $
f mOutput $
printLorentzContract forceOneLine (Oracle.timestampedOracleContract @(Value t))
Init {..} ->
fromSomeContractParam currentValue $ \currentValue' ->
f Nothing . printLorentzValue forceSingleLine $
Oracle.Storage currentValue' admin
GetValue {..} ->
f Nothing . printLorentzValue forceSingleLine $
Oracle.GetValue @() $
mkView () $ callingDefTAddress @() $ toTAddress callbackContract
UpdateValue {..} ->
fromSomeContractParam newValue $ \newValue' ->
f Nothing . printLorentzValue forceSingleLine $
Oracle.UpdateValue newValue'
UpdateAdmin {..} ->
f Nothing . printLorentzValue forceSingleLine $
Oracle.UpdateAdmin @() newAdmin
where
forceSingleLine = True
|
26496e5c37fdd16aef29b71a550e381c96525324fc7b1556d6b2a6c065c18316 | diku-dk/pfp-e2019-pub | sudoku3-final.hs | import Sudoku
import Control.Exception
import System.Environment
import Control.Parallel.Strategies hiding (parMap)
import Data.Maybe
main :: IO ()
main = do
[f] <- getArgs
file <- readFile f
let puzzles = lines file
force IO before parallel solving
let solutions = runEval (parMap solve puzzles)
print (length (filter isJust solutions))
parMap :: (a -> b) -> [a] -> Eval [b]
parMap _ [] = return []
parMap f (a:as) = do
b <- rpar (f a)
bs <- parMap f as
return (b:bs)
| null | https://raw.githubusercontent.com/diku-dk/pfp-e2019-pub/4b6ddcc73099708c0bfa4082f5e4f2a45484b2b2/slides/L5-parallel-haskell-code/sudoku3-final.hs | haskell | import Sudoku
import Control.Exception
import System.Environment
import Control.Parallel.Strategies hiding (parMap)
import Data.Maybe
main :: IO ()
main = do
[f] <- getArgs
file <- readFile f
let puzzles = lines file
force IO before parallel solving
let solutions = runEval (parMap solve puzzles)
print (length (filter isJust solutions))
parMap :: (a -> b) -> [a] -> Eval [b]
parMap _ [] = return []
parMap f (a:as) = do
b <- rpar (f a)
bs <- parMap f as
return (b:bs)
| |
7a67f8d29390269d2c0e2cd65c3b3843f9acc4bfc26a302446a1af1bddecaf04 | SilentCircle/scpf | sc_push_top.erl | -module(sc_push_top).
-export([info/0]).
info() ->
"This is just a placeholder to keep the release system happy.".
| null | https://raw.githubusercontent.com/SilentCircle/scpf/68d46626a056cfd8234d3b6f4661d7d037758619/src/sc_push_top.erl | erlang | -module(sc_push_top).
-export([info/0]).
info() ->
"This is just a placeholder to keep the release system happy.".
| |
c4f33fe6c8e29f2c668cca2bb07dfb8beeee5c8a70e2f61b770138991e73f396 | Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library | CheckoutAcssDebitPaymentMethodOptions.hs | {-# LANGUAGE MultiWayIf #-}
CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
{-# LANGUAGE OverloadedStrings #-}
-- | Contains the types generated from the schema CheckoutAcssDebitPaymentMethodOptions
module StripeAPI.Types.CheckoutAcssDebitPaymentMethodOptions where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import {-# SOURCE #-} StripeAPI.Types.CheckoutAcssDebitMandateOptions
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
-- | Defines the object schema located at @components.schemas.checkout_acss_debit_payment_method_options@ in the specification.
data CheckoutAcssDebitPaymentMethodOptions = CheckoutAcssDebitPaymentMethodOptions
| currency : Currency supported by the bank account . Returned when the Session is in \`setup\ ` mode .
checkoutAcssDebitPaymentMethodOptionsCurrency :: (GHC.Maybe.Maybe CheckoutAcssDebitPaymentMethodOptionsCurrency'),
-- | mandate_options:
checkoutAcssDebitPaymentMethodOptionsMandateOptions :: (GHC.Maybe.Maybe CheckoutAcssDebitMandateOptions),
-- | setup_future_usage: Indicates that you intend to make future payments with this PaymentIntent\'s payment method.
--
Providing this parameter will [ attach the payment method](https:\/\/stripe.com\/docs\/payments\/save - during - payment ) to the PaymentIntent\ 's Customer , if present , after the PaymentIntent is confirmed and any required actions from the user are complete . If no Customer was provided , the payment method can still be [ ) to a Customer after the transaction completes .
--
When processing card payments , Stripe also uses \`setup_future_usage\ ` to dynamically optimize your payment flow and comply with regional legislation and network rules , such as [ SCA](https:\/\/stripe.com\/docs\/strong - customer - authentication ) .
checkoutAcssDebitPaymentMethodOptionsSetupFutureUsage :: (GHC.Maybe.Maybe CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'),
-- | verification_method: Bank account verification method.
checkoutAcssDebitPaymentMethodOptionsVerificationMethod :: (GHC.Maybe.Maybe CheckoutAcssDebitPaymentMethodOptionsVerificationMethod')
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON CheckoutAcssDebitPaymentMethodOptions where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("currency" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsCurrency obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("mandate_options" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsMandateOptions obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("setup_future_usage" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsSetupFutureUsage obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("verification_method" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsVerificationMethod obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("currency" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsCurrency obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("mandate_options" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsMandateOptions obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("setup_future_usage" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsSetupFutureUsage obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("verification_method" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsVerificationMethod obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON CheckoutAcssDebitPaymentMethodOptions where
parseJSON = Data.Aeson.Types.FromJSON.withObject "CheckoutAcssDebitPaymentMethodOptions" (\obj -> (((GHC.Base.pure CheckoutAcssDebitPaymentMethodOptions GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "currency")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "mandate_options")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "setup_future_usage")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "verification_method"))
-- | Create a new 'CheckoutAcssDebitPaymentMethodOptions' with all required fields.
mkCheckoutAcssDebitPaymentMethodOptions :: CheckoutAcssDebitPaymentMethodOptions
mkCheckoutAcssDebitPaymentMethodOptions =
CheckoutAcssDebitPaymentMethodOptions
{ checkoutAcssDebitPaymentMethodOptionsCurrency = GHC.Maybe.Nothing,
checkoutAcssDebitPaymentMethodOptionsMandateOptions = GHC.Maybe.Nothing,
checkoutAcssDebitPaymentMethodOptionsSetupFutureUsage = GHC.Maybe.Nothing,
checkoutAcssDebitPaymentMethodOptionsVerificationMethod = GHC.Maybe.Nothing
}
-- | Defines the enum schema located at @components.schemas.checkout_acss_debit_payment_method_options.properties.currency@ in the specification.
--
Currency supported by the bank account . Returned when the Session is in \`setup\ ` mode .
data CheckoutAcssDebitPaymentMethodOptionsCurrency'
= -- | This case is used if the value encountered during decoding does not match any of the provided cases in the specification.
CheckoutAcssDebitPaymentMethodOptionsCurrency'Other Data.Aeson.Types.Internal.Value
| -- | This constructor can be used to send values to the server which are not present in the specification yet.
CheckoutAcssDebitPaymentMethodOptionsCurrency'Typed Data.Text.Internal.Text
| -- | Represents the JSON value @"cad"@
CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumCad
| -- | Represents the JSON value @"usd"@
CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumUsd
deriving (GHC.Show.Show, GHC.Classes.Eq)
instance Data.Aeson.Types.ToJSON.ToJSON CheckoutAcssDebitPaymentMethodOptionsCurrency' where
toJSON (CheckoutAcssDebitPaymentMethodOptionsCurrency'Other val) = val
toJSON (CheckoutAcssDebitPaymentMethodOptionsCurrency'Typed val) = Data.Aeson.Types.ToJSON.toJSON val
toJSON (CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumCad) = "cad"
toJSON (CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumUsd) = "usd"
instance Data.Aeson.Types.FromJSON.FromJSON CheckoutAcssDebitPaymentMethodOptionsCurrency' where
parseJSON val =
GHC.Base.pure
( if
| val GHC.Classes.== "cad" -> CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumCad
| val GHC.Classes.== "usd" -> CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumUsd
| GHC.Base.otherwise -> CheckoutAcssDebitPaymentMethodOptionsCurrency'Other val
)
-- | Defines the enum schema located at @components.schemas.checkout_acss_debit_payment_method_options.properties.setup_future_usage@ in the specification.
--
-- Indicates that you intend to make future payments with this PaymentIntent\'s payment method.
--
Providing this parameter will [ attach the payment method](https:\/\/stripe.com\/docs\/payments\/save - during - payment ) to the PaymentIntent\ 's Customer , if present , after the PaymentIntent is confirmed and any required actions from the user are complete . If no Customer was provided , the payment method can still be [ ) to a Customer after the transaction completes .
--
When processing card payments , Stripe also uses \`setup_future_usage\ ` to dynamically optimize your payment flow and comply with regional legislation and network rules , such as [ SCA](https:\/\/stripe.com\/docs\/strong - customer - authentication ) .
data CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'
= -- | This case is used if the value encountered during decoding does not match any of the provided cases in the specification.
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Other Data.Aeson.Types.Internal.Value
| -- | This constructor can be used to send values to the server which are not present in the specification yet.
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Typed Data.Text.Internal.Text
| -- | Represents the JSON value @"none"@
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumNone
| Represents the JSON value
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOffSession
| -- | Represents the JSON value @"on_session"@
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOnSession
deriving (GHC.Show.Show, GHC.Classes.Eq)
instance Data.Aeson.Types.ToJSON.ToJSON CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage' where
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Other val) = val
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Typed val) = Data.Aeson.Types.ToJSON.toJSON val
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumNone) = "none"
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOffSession) = "off_session"
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOnSession) = "on_session"
instance Data.Aeson.Types.FromJSON.FromJSON CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage' where
parseJSON val =
GHC.Base.pure
( if
| val GHC.Classes.== "none" -> CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumNone
| val GHC.Classes.== "off_session" -> CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOffSession
| val GHC.Classes.== "on_session" -> CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOnSession
| GHC.Base.otherwise -> CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Other val
)
| Defines the enum schema located at in the specification .
--
Bank account verification method .
data CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'
= -- | This case is used if the value encountered during decoding does not match any of the provided cases in the specification.
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Other Data.Aeson.Types.Internal.Value
| -- | This constructor can be used to send values to the server which are not present in the specification yet.
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Typed Data.Text.Internal.Text
| -- | Represents the JSON value @"automatic"@
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumAutomatic
| Represents the JSON value
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumInstant
| -- | Represents the JSON value @"microdeposits"@
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumMicrodeposits
deriving (GHC.Show.Show, GHC.Classes.Eq)
instance Data.Aeson.Types.ToJSON.ToJSON CheckoutAcssDebitPaymentMethodOptionsVerificationMethod' where
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Other val) = val
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Typed val) = Data.Aeson.Types.ToJSON.toJSON val
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumAutomatic) = "automatic"
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumInstant) = "instant"
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumMicrodeposits) = "microdeposits"
instance Data.Aeson.Types.FromJSON.FromJSON CheckoutAcssDebitPaymentMethodOptionsVerificationMethod' where
parseJSON val =
GHC.Base.pure
( if
| val GHC.Classes.== "automatic" -> CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumAutomatic
| val GHC.Classes.== "instant" -> CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumInstant
| val GHC.Classes.== "microdeposits" -> CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumMicrodeposits
| GHC.Base.otherwise -> CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Other val
)
| null | https://raw.githubusercontent.com/Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library/ba4401f083ff054f8da68c741f762407919de42f/src/StripeAPI/Types/CheckoutAcssDebitPaymentMethodOptions.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| Contains the types generated from the schema CheckoutAcssDebitPaymentMethodOptions
# SOURCE #
| Defines the object schema located at @components.schemas.checkout_acss_debit_payment_method_options@ in the specification.
| mandate_options:
| setup_future_usage: Indicates that you intend to make future payments with this PaymentIntent\'s payment method.
| verification_method: Bank account verification method.
| Create a new 'CheckoutAcssDebitPaymentMethodOptions' with all required fields.
| Defines the enum schema located at @components.schemas.checkout_acss_debit_payment_method_options.properties.currency@ in the specification.
| This case is used if the value encountered during decoding does not match any of the provided cases in the specification.
| This constructor can be used to send values to the server which are not present in the specification yet.
| Represents the JSON value @"cad"@
| Represents the JSON value @"usd"@
| Defines the enum schema located at @components.schemas.checkout_acss_debit_payment_method_options.properties.setup_future_usage@ in the specification.
Indicates that you intend to make future payments with this PaymentIntent\'s payment method.
| This case is used if the value encountered during decoding does not match any of the provided cases in the specification.
| This constructor can be used to send values to the server which are not present in the specification yet.
| Represents the JSON value @"none"@
| Represents the JSON value @"on_session"@
| This case is used if the value encountered during decoding does not match any of the provided cases in the specification.
| This constructor can be used to send values to the server which are not present in the specification yet.
| Represents the JSON value @"automatic"@
| Represents the JSON value @"microdeposits"@ | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
module StripeAPI.Types.CheckoutAcssDebitPaymentMethodOptions where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
data CheckoutAcssDebitPaymentMethodOptions = CheckoutAcssDebitPaymentMethodOptions
| currency : Currency supported by the bank account . Returned when the Session is in \`setup\ ` mode .
checkoutAcssDebitPaymentMethodOptionsCurrency :: (GHC.Maybe.Maybe CheckoutAcssDebitPaymentMethodOptionsCurrency'),
checkoutAcssDebitPaymentMethodOptionsMandateOptions :: (GHC.Maybe.Maybe CheckoutAcssDebitMandateOptions),
Providing this parameter will [ attach the payment method](https:\/\/stripe.com\/docs\/payments\/save - during - payment ) to the PaymentIntent\ 's Customer , if present , after the PaymentIntent is confirmed and any required actions from the user are complete . If no Customer was provided , the payment method can still be [ ) to a Customer after the transaction completes .
When processing card payments , Stripe also uses \`setup_future_usage\ ` to dynamically optimize your payment flow and comply with regional legislation and network rules , such as [ SCA](https:\/\/stripe.com\/docs\/strong - customer - authentication ) .
checkoutAcssDebitPaymentMethodOptionsSetupFutureUsage :: (GHC.Maybe.Maybe CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'),
checkoutAcssDebitPaymentMethodOptionsVerificationMethod :: (GHC.Maybe.Maybe CheckoutAcssDebitPaymentMethodOptionsVerificationMethod')
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON CheckoutAcssDebitPaymentMethodOptions where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("currency" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsCurrency obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("mandate_options" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsMandateOptions obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("setup_future_usage" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsSetupFutureUsage obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("verification_method" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsVerificationMethod obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("currency" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsCurrency obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("mandate_options" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsMandateOptions obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("setup_future_usage" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsSetupFutureUsage obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("verification_method" Data.Aeson.Types.ToJSON..=)) (checkoutAcssDebitPaymentMethodOptionsVerificationMethod obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON CheckoutAcssDebitPaymentMethodOptions where
parseJSON = Data.Aeson.Types.FromJSON.withObject "CheckoutAcssDebitPaymentMethodOptions" (\obj -> (((GHC.Base.pure CheckoutAcssDebitPaymentMethodOptions GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "currency")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "mandate_options")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "setup_future_usage")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "verification_method"))
mkCheckoutAcssDebitPaymentMethodOptions :: CheckoutAcssDebitPaymentMethodOptions
mkCheckoutAcssDebitPaymentMethodOptions =
CheckoutAcssDebitPaymentMethodOptions
{ checkoutAcssDebitPaymentMethodOptionsCurrency = GHC.Maybe.Nothing,
checkoutAcssDebitPaymentMethodOptionsMandateOptions = GHC.Maybe.Nothing,
checkoutAcssDebitPaymentMethodOptionsSetupFutureUsage = GHC.Maybe.Nothing,
checkoutAcssDebitPaymentMethodOptionsVerificationMethod = GHC.Maybe.Nothing
}
Currency supported by the bank account . Returned when the Session is in \`setup\ ` mode .
data CheckoutAcssDebitPaymentMethodOptionsCurrency'
CheckoutAcssDebitPaymentMethodOptionsCurrency'Other Data.Aeson.Types.Internal.Value
CheckoutAcssDebitPaymentMethodOptionsCurrency'Typed Data.Text.Internal.Text
CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumCad
CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumUsd
deriving (GHC.Show.Show, GHC.Classes.Eq)
instance Data.Aeson.Types.ToJSON.ToJSON CheckoutAcssDebitPaymentMethodOptionsCurrency' where
toJSON (CheckoutAcssDebitPaymentMethodOptionsCurrency'Other val) = val
toJSON (CheckoutAcssDebitPaymentMethodOptionsCurrency'Typed val) = Data.Aeson.Types.ToJSON.toJSON val
toJSON (CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumCad) = "cad"
toJSON (CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumUsd) = "usd"
instance Data.Aeson.Types.FromJSON.FromJSON CheckoutAcssDebitPaymentMethodOptionsCurrency' where
parseJSON val =
GHC.Base.pure
( if
| val GHC.Classes.== "cad" -> CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumCad
| val GHC.Classes.== "usd" -> CheckoutAcssDebitPaymentMethodOptionsCurrency'EnumUsd
| GHC.Base.otherwise -> CheckoutAcssDebitPaymentMethodOptionsCurrency'Other val
)
Providing this parameter will [ attach the payment method](https:\/\/stripe.com\/docs\/payments\/save - during - payment ) to the PaymentIntent\ 's Customer , if present , after the PaymentIntent is confirmed and any required actions from the user are complete . If no Customer was provided , the payment method can still be [ ) to a Customer after the transaction completes .
When processing card payments , Stripe also uses \`setup_future_usage\ ` to dynamically optimize your payment flow and comply with regional legislation and network rules , such as [ SCA](https:\/\/stripe.com\/docs\/strong - customer - authentication ) .
data CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Other Data.Aeson.Types.Internal.Value
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Typed Data.Text.Internal.Text
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumNone
| Represents the JSON value
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOffSession
CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOnSession
deriving (GHC.Show.Show, GHC.Classes.Eq)
instance Data.Aeson.Types.ToJSON.ToJSON CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage' where
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Other val) = val
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Typed val) = Data.Aeson.Types.ToJSON.toJSON val
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumNone) = "none"
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOffSession) = "off_session"
toJSON (CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOnSession) = "on_session"
instance Data.Aeson.Types.FromJSON.FromJSON CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage' where
parseJSON val =
GHC.Base.pure
( if
| val GHC.Classes.== "none" -> CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumNone
| val GHC.Classes.== "off_session" -> CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOffSession
| val GHC.Classes.== "on_session" -> CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'EnumOnSession
| GHC.Base.otherwise -> CheckoutAcssDebitPaymentMethodOptionsSetupFutureUsage'Other val
)
| Defines the enum schema located at in the specification .
Bank account verification method .
data CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Other Data.Aeson.Types.Internal.Value
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Typed Data.Text.Internal.Text
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumAutomatic
| Represents the JSON value
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumInstant
CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumMicrodeposits
deriving (GHC.Show.Show, GHC.Classes.Eq)
instance Data.Aeson.Types.ToJSON.ToJSON CheckoutAcssDebitPaymentMethodOptionsVerificationMethod' where
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Other val) = val
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Typed val) = Data.Aeson.Types.ToJSON.toJSON val
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumAutomatic) = "automatic"
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumInstant) = "instant"
toJSON (CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumMicrodeposits) = "microdeposits"
instance Data.Aeson.Types.FromJSON.FromJSON CheckoutAcssDebitPaymentMethodOptionsVerificationMethod' where
parseJSON val =
GHC.Base.pure
( if
| val GHC.Classes.== "automatic" -> CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumAutomatic
| val GHC.Classes.== "instant" -> CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumInstant
| val GHC.Classes.== "microdeposits" -> CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'EnumMicrodeposits
| GHC.Base.otherwise -> CheckoutAcssDebitPaymentMethodOptionsVerificationMethod'Other val
)
|
b3d6e3cf0c4c8c1c23efdcbafafef20e2a2033c42d57de2645b67afcce6e5332 | geophf/1HaskellADay | Exercise.hs | {-# LANGUAGE OverloadedStrings #-}
module Y2020.M10.D13.Exercise where
-
Yesterday , we translated the results of a SPARQL query into a set of AirBase
values . Today , let 's examine these data in a graph database .
Let 's look at the relationship between airbases and countries today .
... no .
I 'm still unhappy that I had to write a parser for the point - type in the
point - as - string encoded into the JSON , so today , we 're going to do something
different . We 're going to write out the data as JSON , but we 're really going
to write out JSON , and not types - as - strings JSON that really gets my goat .
Moo .
... or something like that .
Then we 'll look at countries - continents - as - a - service , but tomorrow , not today .
-
Yesterday, we translated the results of a SPARQL query into a set of AirBase
values. Today, let's examine these data in a graph database.
Let's look at the relationship between airbases and countries today.
... no.
I'm still unhappy that I had to write a parser for the point-type in the
point-as-string encoded into the JSON, so today, we're going to do something
different. We're going to write out the data as JSON, but we're really going
to write out JSON, and not types-as-strings JSON that really gets my goat.
Moo.
... or something like that.
Then we'll look at countries-continents-as-a-service, but tomorrow, not today.
--}
import Y2020.M10.D12.Exercise
import Data.Aeson
instance ToJSON AirBase where
toJSON = undefined
instance ToJSON LongLat where
toJSON = undefined
and do n't get me started on why " Point(lon lat ) " has longitude first , unlike
-- the rest of the Milky Way galaxy but Wikidata? NOOOOOOOOO! ... and if it
-- were formatted as JSON (and not as JSON-as-a-string) it wouldn't be a
-- problem ... AT ALL ... but was it? I ASK YOU!
writeJSON :: FilePath -> [AirBase] -> IO ()
writeJSON = undefined
-- whew! I feel much better now. Thank you.
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2020/M10/D13/Exercise.hs | haskell | # LANGUAGE OverloadedStrings #
}
the rest of the Milky Way galaxy but Wikidata? NOOOOOOOOO! ... and if it
were formatted as JSON (and not as JSON-as-a-string) it wouldn't be a
problem ... AT ALL ... but was it? I ASK YOU!
whew! I feel much better now. Thank you. |
module Y2020.M10.D13.Exercise where
-
Yesterday , we translated the results of a SPARQL query into a set of AirBase
values . Today , let 's examine these data in a graph database .
Let 's look at the relationship between airbases and countries today .
... no .
I 'm still unhappy that I had to write a parser for the point - type in the
point - as - string encoded into the JSON , so today , we 're going to do something
different . We 're going to write out the data as JSON , but we 're really going
to write out JSON , and not types - as - strings JSON that really gets my goat .
Moo .
... or something like that .
Then we 'll look at countries - continents - as - a - service , but tomorrow , not today .
-
Yesterday, we translated the results of a SPARQL query into a set of AirBase
values. Today, let's examine these data in a graph database.
Let's look at the relationship between airbases and countries today.
... no.
I'm still unhappy that I had to write a parser for the point-type in the
point-as-string encoded into the JSON, so today, we're going to do something
different. We're going to write out the data as JSON, but we're really going
to write out JSON, and not types-as-strings JSON that really gets my goat.
Moo.
... or something like that.
Then we'll look at countries-continents-as-a-service, but tomorrow, not today.
import Y2020.M10.D12.Exercise
import Data.Aeson
instance ToJSON AirBase where
toJSON = undefined
instance ToJSON LongLat where
toJSON = undefined
and do n't get me started on why " Point(lon lat ) " has longitude first , unlike
writeJSON :: FilePath -> [AirBase] -> IO ()
writeJSON = undefined
|
b7084530bc98e14157c55d5f26b2194cdb24e2ee55ff7c0e71b441e935e30844 | walmartlabs/lacinia-pedestal | pedestal_test.clj | Copyright ( c ) 2017 - present Walmart , Inc.
;
Licensed under the Apache License , Version 2.0 ( the " License " )
; you may not use this file except in compliance with the License.
; You may obtain a copy of the License at
;
; -2.0
;
; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
(ns com.walmartlabs.lacinia.pedestal-test
(:require
[clojure.test :refer [deftest is use-fixtures]]
[com.walmartlabs.lacinia.pedestal :refer [inject]]
[clj-http.client :as client]
[clojure.string :as str]
[com.walmartlabs.lacinia.test-utils :refer [test-server-fixture
send-request
send-json-request
send-json-string-request]]
[clojure.spec.test.alpha :as stest])
(:import (clojure.lang ExceptionInfo)))
(stest/instrument)
(use-fixtures :once (test-server-fixture {:graphiql true}))
(deftest simple-get-query
(let [response (send-request "{ echo(value: \"hello\") { value method }}")]
(is (= 200 (:status response)))
(is (= "application/json"
(get-in response [:headers "Content-Type"])))
(is (= {:data {:echo {:method "get"
:value "hello"}}}
(:body response)))))
(deftest simple-post-query
(let [response (send-request :post "{ echo(value: \"hello\") { value method }}")]
(is (= 200 (:status response)))
(is (= {:data {:echo {:method "post"
:value "hello"}}}
(:body response)))))
(deftest invalid-json-post-query
(let [response (send-json-string-request :post "f" "application/json")]
(is (= 400 (:status response)))))
(deftest includes-content-type-check-on-post
(let [response
(send-json-request :post
{:query "{ echo(value: \"hello\") { value method }}"}
"text/plain")]
(is (= {:body {:errors [{:message "Request content type must be application/graphql or application/json."}]}
:status 400}
(select-keys response [:status :body])))))
(deftest missing-query
(let [response (send-json-request :get nil nil)]
(is (= {:body {:errors [{:message "Query parameter 'query' is missing or blank."}]}
:status 400}
(select-keys response [:status :body])))))
(deftest empty-body
(let [response (send-json-request :post nil "application/json")]
(is (= {:body {:errors [{:message "Request body is empty."}]}
:status 400}
(select-keys response [:status :body])))))
(deftest can-handle-json
(let [response
(send-json-request :post
{:query "{ echo(value: \"hello\") { value method }}"})]
(is (= 200 (:status response)))
(is (= {:data {:echo {:method "post"
:value "hello"}}}
(:body response)))))
(deftest can-handle-vars-json
(let [response
(send-json-request :post
{:query "query ($v: String) {
echo(value: $v) { value }
}"
:variables {:v "Calculon"}})]
(is (= {:body {:data {:echo {:value "Calculon"}}}
:status 200}
(select-keys response [:status :body])))))
(deftest can-handle-operation-name-json
(let [response
(send-json-request :post
{:query "query stuff($v: String) {
echo(value: $v) { value }
}"
:variables {:v "Calculon"}
:operationName "stuff"})]
(is (= {:body {:data {:echo {:value "Calculon"}}}
:status 200}
(select-keys response [:status :body])))))
(deftest status-set-by-error
(let [response (send-request "{ echo(value: \"Baked.\", error: 420) { value }}")]
(is (= {:body
{:data
{:echo {:value "Baked."}}
:errors [{:extensions {:arguments {:error 420
:value "Baked."}}
:locations [{:column 3
:line 1}]
:message "Forced error."
:path ["echo"]}]}
:status 420}
(select-keys response [:status :body])))))
(deftest can-handle-vars
(let [response (send-request :post "query ($v: String) {
echo(value: $v) { value }
}" {:v "Calculon"})]
(is (= {:body {:data {:echo {:value "Calculon"}}}
:status 200}
(select-keys response [:status :body])))))
(deftest can-access-graphiql
(let [response (client/get ":8888/" {:throw-exceptions false})]
(is (= 200 (:status response)))
(is (str/includes? (:body response) "<html>"))))
(deftest forbids-subscriptions
(let [response (send-request :post "subscription { ping(message: \"gnip\") { message }}")]
(is (= {:body {:errors [{:message "Subscription queries must be processed by the WebSockets endpoint."}]}
:status 400}
(select-keys response [:status :body])))))
(deftest can-return-failure-response
(let [response (send-request "{ fail }")]
(is (= {:status 500
:body {:errors [{:extensions {:arguments nil
:field-name "Query/fail"
:location {:column 3
:line 1}
:path ["fail"]}
:message "Exception in resolver for `Query/fail': resolver exception"}]}}
(select-keys response [:status :body])))))
(deftest inject-not-found
(is (thrown-with-msg? ExceptionInfo #"Could not find existing interceptor"
(inject [{:name :fred}] {:name :barney} :before :bam-bam))))
(deftest inject-before
(let [fred {:name :fred}
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred wilma barney]
(inject [fred barney] wilma :before :barney)))))
(deftest inject-after
(let [fred {:name :fred}
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred barney wilma]
(inject [fred barney] wilma :after :barney)))))
(deftest inject-replace
(let [fred {:name :fred}
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred wilma]
(inject [fred barney] wilma :replace :barney)))))
(deftest inject-remove
(let [fred {:name :fred}
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred wilma]
(inject [fred barney wilma] nil :replace :barney)))))
(deftest inject-skips-fns
(let [fred identity
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred wilma]
(inject [fred barney] wilma :replace :barney)))))
(deftest query-missing-from-request
(is (= {:body {:errors [{:message "GraphQL query not supplied in request body."}]}
:status 400}
(select-keys (send-request :post-json "{}")
[:status :body]))))
| null | https://raw.githubusercontent.com/walmartlabs/lacinia-pedestal/e70f853ff96ac2c8f315bc5a6d429213df0c5d04/test/com/walmartlabs/lacinia/pedestal_test.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | Copyright ( c ) 2017 - present Walmart , Inc.
Licensed under the Apache License , Version 2.0 ( the " License " )
distributed under the License is distributed on an " AS IS " BASIS ,
(ns com.walmartlabs.lacinia.pedestal-test
(:require
[clojure.test :refer [deftest is use-fixtures]]
[com.walmartlabs.lacinia.pedestal :refer [inject]]
[clj-http.client :as client]
[clojure.string :as str]
[com.walmartlabs.lacinia.test-utils :refer [test-server-fixture
send-request
send-json-request
send-json-string-request]]
[clojure.spec.test.alpha :as stest])
(:import (clojure.lang ExceptionInfo)))
(stest/instrument)
(use-fixtures :once (test-server-fixture {:graphiql true}))
(deftest simple-get-query
(let [response (send-request "{ echo(value: \"hello\") { value method }}")]
(is (= 200 (:status response)))
(is (= "application/json"
(get-in response [:headers "Content-Type"])))
(is (= {:data {:echo {:method "get"
:value "hello"}}}
(:body response)))))
(deftest simple-post-query
(let [response (send-request :post "{ echo(value: \"hello\") { value method }}")]
(is (= 200 (:status response)))
(is (= {:data {:echo {:method "post"
:value "hello"}}}
(:body response)))))
(deftest invalid-json-post-query
(let [response (send-json-string-request :post "f" "application/json")]
(is (= 400 (:status response)))))
(deftest includes-content-type-check-on-post
(let [response
(send-json-request :post
{:query "{ echo(value: \"hello\") { value method }}"}
"text/plain")]
(is (= {:body {:errors [{:message "Request content type must be application/graphql or application/json."}]}
:status 400}
(select-keys response [:status :body])))))
(deftest missing-query
(let [response (send-json-request :get nil nil)]
(is (= {:body {:errors [{:message "Query parameter 'query' is missing or blank."}]}
:status 400}
(select-keys response [:status :body])))))
(deftest empty-body
(let [response (send-json-request :post nil "application/json")]
(is (= {:body {:errors [{:message "Request body is empty."}]}
:status 400}
(select-keys response [:status :body])))))
(deftest can-handle-json
(let [response
(send-json-request :post
{:query "{ echo(value: \"hello\") { value method }}"})]
(is (= 200 (:status response)))
(is (= {:data {:echo {:method "post"
:value "hello"}}}
(:body response)))))
(deftest can-handle-vars-json
(let [response
(send-json-request :post
{:query "query ($v: String) {
echo(value: $v) { value }
}"
:variables {:v "Calculon"}})]
(is (= {:body {:data {:echo {:value "Calculon"}}}
:status 200}
(select-keys response [:status :body])))))
(deftest can-handle-operation-name-json
(let [response
(send-json-request :post
{:query "query stuff($v: String) {
echo(value: $v) { value }
}"
:variables {:v "Calculon"}
:operationName "stuff"})]
(is (= {:body {:data {:echo {:value "Calculon"}}}
:status 200}
(select-keys response [:status :body])))))
(deftest status-set-by-error
(let [response (send-request "{ echo(value: \"Baked.\", error: 420) { value }}")]
(is (= {:body
{:data
{:echo {:value "Baked."}}
:errors [{:extensions {:arguments {:error 420
:value "Baked."}}
:locations [{:column 3
:line 1}]
:message "Forced error."
:path ["echo"]}]}
:status 420}
(select-keys response [:status :body])))))
(deftest can-handle-vars
(let [response (send-request :post "query ($v: String) {
echo(value: $v) { value }
}" {:v "Calculon"})]
(is (= {:body {:data {:echo {:value "Calculon"}}}
:status 200}
(select-keys response [:status :body])))))
(deftest can-access-graphiql
(let [response (client/get ":8888/" {:throw-exceptions false})]
(is (= 200 (:status response)))
(is (str/includes? (:body response) "<html>"))))
(deftest forbids-subscriptions
(let [response (send-request :post "subscription { ping(message: \"gnip\") { message }}")]
(is (= {:body {:errors [{:message "Subscription queries must be processed by the WebSockets endpoint."}]}
:status 400}
(select-keys response [:status :body])))))
(deftest can-return-failure-response
(let [response (send-request "{ fail }")]
(is (= {:status 500
:body {:errors [{:extensions {:arguments nil
:field-name "Query/fail"
:location {:column 3
:line 1}
:path ["fail"]}
:message "Exception in resolver for `Query/fail': resolver exception"}]}}
(select-keys response [:status :body])))))
(deftest inject-not-found
(is (thrown-with-msg? ExceptionInfo #"Could not find existing interceptor"
(inject [{:name :fred}] {:name :barney} :before :bam-bam))))
(deftest inject-before
(let [fred {:name :fred}
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred wilma barney]
(inject [fred barney] wilma :before :barney)))))
(deftest inject-after
(let [fred {:name :fred}
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred barney wilma]
(inject [fred barney] wilma :after :barney)))))
(deftest inject-replace
(let [fred {:name :fred}
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred wilma]
(inject [fred barney] wilma :replace :barney)))))
(deftest inject-remove
(let [fred {:name :fred}
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred wilma]
(inject [fred barney wilma] nil :replace :barney)))))
(deftest inject-skips-fns
(let [fred identity
barney {:name :barney}
wilma {:name :wilma}]
(is (= [fred wilma]
(inject [fred barney] wilma :replace :barney)))))
(deftest query-missing-from-request
(is (= {:body {:errors [{:message "GraphQL query not supplied in request body."}]}
:status 400}
(select-keys (send-request :post-json "{}")
[:status :body]))))
|
8f8725847f6068ca3ae73ee6b74b5f08f35cf6d41f4d91126b782acaf88ea00e | pgj/mirage-kfreebsd | clock.mli |
* Copyright ( c ) 2010 - 2011 Anil Madhavapeddy < >
* Copyright ( c ) 2012
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2010-2011 Anil Madhavapeddy <>
* Copyright (c) 2012 Gabor Pali
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
type tm = {
tm_sec : int;
tm_min : int;
tm_hour : int;
tm_mday : int;
tm_mon : int;
tm_year : int;
tm_wday : int;
tm_yday : int;
tm_isdst : bool;
}
external time : unit -> int = "kern_gettimeofday"
external gmtime : int -> tm = "kern_gmtime"
| null | https://raw.githubusercontent.com/pgj/mirage-kfreebsd/0ff5b2cd7ab0975e3f2ee1bd89f8e5dbf028b102/packages/mirage-platform/lib/clock.mli | ocaml |
* Copyright ( c ) 2010 - 2011 Anil Madhavapeddy < >
* Copyright ( c ) 2012
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2010-2011 Anil Madhavapeddy <>
* Copyright (c) 2012 Gabor Pali
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
type tm = {
tm_sec : int;
tm_min : int;
tm_hour : int;
tm_mday : int;
tm_mon : int;
tm_year : int;
tm_wday : int;
tm_yday : int;
tm_isdst : bool;
}
external time : unit -> int = "kern_gettimeofday"
external gmtime : int -> tm = "kern_gmtime"
| |
784cba170da104d8589ea7468a99f7a6d5fa4a4255c6571930da61dc44c6491b | ocsigen/js_of_ocaml | fannkuch_redux_2.ml | The Computer Language Benchmarks Game
/
contributed by , transliterated from Lua program
/
contributed by Isaac Gouy, transliterated from Mike Pall's Lua program
*)
let fannkuch n =
let p = Array.make n 0 in
let q = Array.make n 0 in
let s = Array.make n 0 in
let sign = ref 1 in
let maxflips = ref 0 in
let sum = ref 0 in
for i = 0 to n - 1 do
p.(i) <- i;
q.(i) <- i;
s.(i) <- i
done;
while true do
let q0 = ref p.(0) in
if !q0 <> 0
then (
for i = 1 to n - 1 do
q.(i) <- p.(i)
done;
let flips = ref 1 in
while
let qq = q.(!q0) in
if qq = 0
then (
sum := !sum + (!sign * !flips);
if !flips > !maxflips then maxflips := !flips;
false)
else true
do
let qq = q.(!q0) in
q.(!q0) <- !q0;
(if !q0 >= 3
then
let i = ref 1 in
let j = ref (!q0 - 1) in
while
let t = q.(!i) in
q.(!i) <- q.(!j);
q.(!j) <- t;
incr i;
decr j;
!i < !j
do
()
done);
q0 := qq;
incr flips
done);
if !sign = 1
then (
let t = p.(1) in
p.(1) <- p.(0);
p.(0) <- t;
sign := -1)
else
let t = p.(1) in
p.(1) <- p.(2);
p.(2) <- t;
sign := 1;
try
for i = 2 to n - 1 do
let sx = s.(i) in
if sx <> 0
then (
s.(i) <- sx - 1;
raise Exit);
if i = n - 1
then (
if false then Format.eprintf "%d %d@." !sum !maxflips;
exit 0);
s.(i) <- i;
let t = p.(0) in
for j = 0 to i do
p.(j) <- p.(j + 1)
done;
p.(i + 1) <- t
done
with Exit -> ()
done
let n = 10
let pf = fannkuch n
//print(pf[0 ] + " \n " + " Pfannkuchen ( " + n + " ) = " + pf[1 ] ) ;
//print(pf[0] + "\n" + "Pfannkuchen(" + n + ") = " + pf[1]);
*)
| null | https://raw.githubusercontent.com/ocsigen/js_of_ocaml/58210fabc947c4839b6e71ffbbf353a4ede0dbb7/benchmarks/sources/ml/fannkuch_redux_2.ml | ocaml | The Computer Language Benchmarks Game
/
contributed by , transliterated from Lua program
/
contributed by Isaac Gouy, transliterated from Mike Pall's Lua program
*)
let fannkuch n =
let p = Array.make n 0 in
let q = Array.make n 0 in
let s = Array.make n 0 in
let sign = ref 1 in
let maxflips = ref 0 in
let sum = ref 0 in
for i = 0 to n - 1 do
p.(i) <- i;
q.(i) <- i;
s.(i) <- i
done;
while true do
let q0 = ref p.(0) in
if !q0 <> 0
then (
for i = 1 to n - 1 do
q.(i) <- p.(i)
done;
let flips = ref 1 in
while
let qq = q.(!q0) in
if qq = 0
then (
sum := !sum + (!sign * !flips);
if !flips > !maxflips then maxflips := !flips;
false)
else true
do
let qq = q.(!q0) in
q.(!q0) <- !q0;
(if !q0 >= 3
then
let i = ref 1 in
let j = ref (!q0 - 1) in
while
let t = q.(!i) in
q.(!i) <- q.(!j);
q.(!j) <- t;
incr i;
decr j;
!i < !j
do
()
done);
q0 := qq;
incr flips
done);
if !sign = 1
then (
let t = p.(1) in
p.(1) <- p.(0);
p.(0) <- t;
sign := -1)
else
let t = p.(1) in
p.(1) <- p.(2);
p.(2) <- t;
sign := 1;
try
for i = 2 to n - 1 do
let sx = s.(i) in
if sx <> 0
then (
s.(i) <- sx - 1;
raise Exit);
if i = n - 1
then (
if false then Format.eprintf "%d %d@." !sum !maxflips;
exit 0);
s.(i) <- i;
let t = p.(0) in
for j = 0 to i do
p.(j) <- p.(j + 1)
done;
p.(i + 1) <- t
done
with Exit -> ()
done
let n = 10
let pf = fannkuch n
//print(pf[0 ] + " \n " + " Pfannkuchen ( " + n + " ) = " + pf[1 ] ) ;
//print(pf[0] + "\n" + "Pfannkuchen(" + n + ") = " + pf[1]);
*)
| |
71fcc3d0d56bfefbcf219d47a7fb06d42c4ceda0d1d9c3a222d7cf2baebb3b78 | rescript-lang/rescript-compiler | js_closure.ml | Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
* Copyright ( C ) 2016 - , Authors of ReScript
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2016 - Hongbo Zhang, Authors of ReScript
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
type t = { mutable outer_loop_mutable_values : Set_ident.t }
let empty () = { outer_loop_mutable_values = Set_ident.empty }
let set_lexical_scope t v = t.outer_loop_mutable_values <- v
let get_lexical_scope t = t.outer_loop_mutable_values
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/e60482c6f6a69994907b9bd56e58ce87052e3659/jscomp/core/js_closure.ml | ocaml | Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
* Copyright ( C ) 2016 - , Authors of ReScript
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2016 - Hongbo Zhang, Authors of ReScript
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
type t = { mutable outer_loop_mutable_values : Set_ident.t }
let empty () = { outer_loop_mutable_values = Set_ident.empty }
let set_lexical_scope t v = t.outer_loop_mutable_values <- v
let get_lexical_scope t = t.outer_loop_mutable_values
| |
8df3f28e85c197b886a5b5d3089717b05e467e8233ffebe507f23457886d629d | Deep-Symmetry/afterglow | channels.clj | (ns afterglow.channels
"Functions for modeling DMX channels"
{:author "James Elliott"}
(:require [afterglow.fixtures.qxf :refer [sanitize-name]]
[camel-snake-kebab.core :as csk]
[com.evocomputing.colors :as colors]))
(defn channel
"Creates a minimal channel specification, containing just the
address offset within the fixture's list of channels. The first
channel used by a fixture is, by convention, given offset 1.
You probably want to use [[fine-channel]] rather than this function
to create even channels which do not have a `:fine-offset` because
of the other helpful features it offers, such as setting up the
channel function specification for you."
[offset]
{:offset offset})
(defn- assign-channel
"Given a universe and DMX address at which a fixture is being
patched, and a raw channel description from the fixture definition,
calculate and assign the channel's actual DMX address and universe
that it will have in a show. Also adds the back pointer to the head
that owns the channel, so dynamic and spatial parameter resolution
can access head information."
[universe start-address head raw-channel]
(cond-> raw-channel
(:offset raw-channel) (assoc :address (+ (:offset raw-channel) (dec start-address))
:index (+ (dec (:offset raw-channel)) (dec start-address))
:universe universe)
(:fine-offset raw-channel) (assoc :fine-address (+ (:fine-offset raw-channel) (dec start-address))
:fine-index (+ (dec (:fine-offset raw-channel)) (dec start-address)))
head (assoc :head head)))
(defn- patch-head
"Assigns a single head to a DMX universe and starting channel; resolves all of its
channel assignments."
[channel-assigner fixture id-fn raw-head]
(assoc (update-in raw-head [:channels] #(map (partial channel-assigner raw-head) %))
:fixture fixture :id (id-fn)))
(defn- patch-heads
"Assigns the heads of a fixture to a DMX universe and starting
channel; resolves all of their channel assignments."
[fixture channel-assigner id-fn]
(let [assigner (partial patch-head channel-assigner fixture id-fn)]
(update-in fixture [:heads] #(map assigner %))))
(defn patch-fixture
"Assign a fixture to a DMX universe and starting channel; resolves
all of its channel assignments."
[fixture universe start-address id-fn]
(let [assigner (partial assign-channel universe start-address)]
(update-in (patch-heads fixture assigner id-fn) [:channels] #(map (partial assigner fixture) %))))
(defn extract-channels
"Given a fixture list, returns the channels matching the specified predicate."
[fixtures pred]
(filter pred (mapcat :channels fixtures)))
(defn expand-heads
"Given a list of fixtures, expands it to include the heads."
[fixtures]
(mapcat #(concat [%] (:heads %)) fixtures))
(defn all-addresses
"Returns all the addresses being used by a list of patched fixtures,
including those used by any fixture heads."
[fixtures]
(mapcat #(vals (select-keys % [:address :fine-address]))
(mapcat :channels (expand-heads fixtures))))
(defn extract-heads-with-some-matching-channel
"Given a fixture list, returns all heads (which may be top-level fixtures too)
whose channels contain a match for the specified predicate."
[fixtures pred]
(filter #(some pred (:channels %)) (expand-heads fixtures)))
(defn find-rgb-heads
"Returns all heads of the supplied fixtures which are capable of
mixing RGB color, in other words they have at least a red, green,
and blue color channel. If the second argument is present and
`true`, also returns heads with color wheels."
([fixtures]
(find-rgb-heads fixtures false))
([fixtures include-color-wheels?]
(filter #(or (= 3 (count (filter #{:red :green :blue} (map :color (:channels %)))))
(and include-color-wheels? (seq (:color-wheel-hue-map %))))
(expand-heads fixtures))))
(defn has-rgb-heads?
"Given a fixture, returns a truthy value if it has any heads capable
of mixing RGB color. If the second argument is present and `true`,
having a head with a color wheel is good enough."
([fixture]
(has-rgb-heads? fixture false))
([fixture include-color-wheels?]
(seq (find-rgb-heads [fixture] include-color-wheels?))))
(defn build-function
"Returns a function spefication that encompasses a range of possible
DMX values for a channel. If start and end are not specified, the
function uses the full range of the channel."
[range-type function-type label & {:keys [start end var-label] :or {start 0 end 255}}]
(merge {:start start
:end end
:range range-type
:type function-type
:label label}
(when var-label {:var-label var-label})))
(defn fine-channel
"Defines a channel of type `chan-type` which may be paired with a
second channel in order to support multi-byte values. When a value
is passed in with `:fine-offset`, the channel specified by `offset`
is understood as containing the most-significant byte of a two-byte
value, with the least-significant byte carried in the channel whose
offset followed `:fine-offset`.
Automatically creates a [function
specification]({{guide-url}}fixture_definitions.html#function-specifications)
which spans all the legal DMX values for the channel. By default,
the function type is taken to be the same as `chan-type`, but this
can be changed by passing in a different keyword with
`:function-type`.
Similarly, the name of the function created is, by default, a
capitalized version of the function type (without its leading
colon). Since this name is displayed in the [web
interface]({{guide-url}}README.html#web-ui) as the text label in the
cue grid cell for [Function
Cues]({{guide-url}}cues.html#creating-function-cues) created for the
function, you may wish to specify a more readable name, which you
can do by passing it with `:function-name`.
Finally, you may specify a label to be used when creating a user
interface for adjusting the value associated with this
function. [Function
Cues]({{guide-url}}cues.html#creating-function-cues)
will use this as the label for the cue-local variable they create,
and it will appear in places like the [Ableton Push Effect Control
interface]({{guide-url}}push2.html#effect-control).
You can specify what this variable label should be with
`:var-label`; if you do not, the generic label `Level` will be
used."
[chan-type offset & {:keys [fine-offset function-type function-name var-label] :or {function-type chan-type}}]
{:pre [(some? chan-type) (integer? offset) (<= 1 offset 512)]}
(let [chan-type (keyword chan-type)
function-type (keyword function-type)
function-name (or function-name (clojure.string/capitalize (name function-type)))
base (assoc (channel offset)
:type chan-type
:functions [(build-function :variable function-type function-name :var-label var-label)])]
(merge base
(when fine-offset
{:fine-offset fine-offset}))))
(defn- expand-function-spec
"Expands the specification for a function at a particular starting
address. If a simple keyword was given for it, creates a map with
default contents for a variable-value range. A string is turned into
a keyword, but creates a fixed-value range. A nil specification is
expanded into a no-function range. Otherwise, adds any missing
pieces to the supplied map. In either case, assigns the range's
starting value."
[[start spec]]
(cond (keyword? spec)
{:start start
:range :variable
:type spec
:label (clojure.string/replace (csk/->Camel_Snake_Case (name spec)) "_" " ")}
(string? spec)
{:start start
:range :fixed
:type (keyword (sanitize-name spec))
:label spec}
(nil? spec)
{:start start
:range :fixed
:type :no-function
:label "No function"}
(map? spec)
(merge {:range :variable
:label (clojure.string/replace (csk/->Camel_Snake_Case (name (:type spec))) "_" " ")}
spec
{:start start})
:else
(throw (IllegalArgumentException.
(str "Don't know how to build a function specification from " spec)))))
(defn- assign-ends
"Used to figure out the ends of the ranges that make up a function
channel, by ending each range at the value one less than where the
next range begins."
[[current next]]
(if (= :end next)
(assoc current :end 255)
(let [end (dec (:start next))]
(when-not (<= 0 end 255)
(throw (IllegalArgumentException.
(str "Function ends outside of legal DMX range: " end))))
(if (< (:start current) end)
(assoc current :end end)
(if (= (:start current) end)
(assoc current :end end :range :fixed)
(throw (IllegalArgumentException.
(str "No range values available for function " (:type current)))))))))
(defn- expand-function-range
"Expands a sequence of function ranges. If a sequence was passed for
the starting point, expands it, either appending a sequential index
to the spec (if it is a keyword or string), or pairing up successive
values if the spec is itself a sequence. The starting value and spec
pairs are then expanded in turn by calling expand-function-spec. If
start is not a sequence, expand-function-range simply delegates to
expand-function-spec."
[[start spec]]
(if (sequential? start)
(if (sequential? spec)
(map #(expand-function-spec [%1 %2]) start spec)
(map-indexed (fn [index start]
(cond (string? spec)
(expand-function-spec [start (str spec " " (inc index))])
(keyword? spec)
(expand-function-spec [start (keyword (str (name spec) "-" (inc index)))])
:else
(throw (IllegalArgumentException.
(str "Don't know how to expand function range for spec " spec)))))
start))
[(expand-function-spec [start spec])]))
(defn functions
"Defines a channel whose values are divided up into different ranges
which perform different functions. After the channel type and DMX
offset, pass a list of starting values and function specifications.
The simplest form of specification is a keyword or string
identifying the function type; this will be expanded into a
variable-range (for keywords) or fixed-range (for strings) function
of that type.
For more complex functions, pass in a map containing the `:type`
keyword and any other settings you need to make (e.g. `:label`,
`:range`, `:var-label`), and the rest will be filled in for you.
To skip a range, pass `nil` for its specification.
The ranges need to be in order of increasing starting
values, and the ending values for each will be figured out by
context, e.g.
```
(functions :strobe 40
0 nil
10 \"Strobe Random\"
20 :strobe)
```
See the [Developer
Guide]({{guide-url}}fixture_definitions.html#function-channels) for
more details and examples."
[chan-type offset & functions]
{:pre [(some? chan-type) (integer? offset) (<= 1 offset 512)]}
(let [chan-type (keyword chan-type)]
(assoc (channel offset)
:type chan-type
:functions (vec (map assign-ends
(partition 2 1 [:end] (mapcat expand-function-range
(partition 2 functions))))))))
(defn color-wheel-hue
"Creates a function specification which identifies a color wheel
position with a particular hue, so it can participate in Afterglow's
color effects. The hue can be specified as a number,
a [jolby/colors]() object, or a
string which is passed to the jolby/colors `create-color` function.
The label to assign the function spec can be passed via the `:label`
optional keyword argument, or it will be inferred from the hue value
supplied. The function spec will be considered a fixed range unless
you specify `:range :variable`.
If hue is a sequence, then returns a sequence of the results of
calling `color-wheel-hue` on each of the elements in that sequence,
with the same optional arguments."
[hue & {:keys [range label] :or {range :fixed}}]
{:pre [(some? hue)]}
(if (sequential? hue)
(if (some? label)
(map #(color-wheel-hue % :range range :label label) hue)
(map #(color-wheel-hue % :range range) hue))
;; Was not a sequence, so return a single function spec
(assoc (cond (number? hue)
{:color-wheel-hue (colors/clamp-hue hue)
:label (or label (str "Color wheel hue " (colors/clamp-hue hue)))
:type (keyword (str "color-wheel-hue-" (colors/clamp-hue hue)))}
(string? hue)
{:color-wheel-hue (colors/hue (colors/create-color hue))
:label (or label (str "Color wheel hue " hue))
:type (keyword (str "color-wheel-hue-" hue))}
(instance? com.evocomputing.colors/color hue)
{:color-wheel-hue (colors/hue hue)
:label (or label (str "Color wheel hue " hue))
:type (keyword (str "color-wheel-hue-" (colors/hue hue)))}
:else
(throw (IllegalArgumentException.
(str "Don't know how to create hue from " hue))))
:range range)))
(defn dimmer
"A channel which controls a dimmer.
Normal dimmers are dark at zero, and get brighter as the channel
value increases, to a maximum brightness at 255. However, some
fixtures have inverted dimmers. If that is the case, pass the DMX
value at which the inversion takes place with `:inverted-from`. For
example, fixtures which are brightest at zero and darken as the
value approaches 255 would be specified as `:inverted-from 0`, while
fixtures which are dark at zero, jump to maximum brightness at 1,
then dim as the value grows towards 255 would be specified as
`:inverted-from 1`.
If the fixture uses two-byte values for the dimmer level, pass
the offset of the channel containing the most-significant byte in
`offset`, and specify the offset of the channel containing the
least-significant byte with `:fine-offset`."
[offset & {:keys [inverted-from fine-offset]}]
(merge (fine-channel :dimmer offset :fine-offset fine-offset :range-label "Intensity")
(when inverted-from
{:inverted-from inverted-from})))
(defn color
"A channel which controls a color component. If `:hue` is supplied
along with a hue value, this channel will participate in color
mixing even if `color` is not one of the standard values `:red`,
`:green`, `:blue`, or `:white` whose hues and contributions to color
mixing are automatically understood.
By default, the function created for the channel uses the name of
the `color` keyword as its function label. Since this label is
displayed in the [web interface]({{guide-url}}README.html#web-ui) as
the text label in the cue grid cell for [Function
Cues]({{guide-url}}cues.html#creating-function-cues) created for the
function, you may wish to specify a more readable name, which you
can do by passing it in with `:function-label`.
If the fixture uses two-byte values for this color component, pass
the offset of the channel containing the most-significant byte in
`offset`, and specify the offset of the channel containing the
least-significant byte with `:fine-offset`."
[offset color & {:keys [hue function-label fine-offset]}]
{:pre [(some? color)]}
(let [color (keyword color)]
(-> (fine-channel :color offset :fine-offset fine-offset
:function-type color :function-label (or function-label (clojure.string/capitalize (name color))))
(assoc :color (keyword color))
(cond-> hue (assoc :hue hue)))))
(defn pan
"A channel which pans a moving head, with an optional second channel
for fine control."
([offset]
(pan offset nil))
([offset fine-offset]
(fine-channel :pan offset :fine-offset fine-offset :var-label "Pan")))
(defn tilt
"A channel which tilts a moving head, with an optional second channel
for fine control."
([offset]
(tilt offset nil))
([offset fine-offset]
(fine-channel :tilt offset :fine-offset fine-offset :var-label "Tilt")))
(defn focus
"A channel which adjusts focus, with an optional second channel for
fine control."
([offset]
(focus offset nil))
([offset fine-offset]
(fine-channel :focus offset :fine-offset fine-offset :var-label "Focus")))
(defn iris
"A channel which controls an iris, with an optional second channel
for fine control."
([offset]
(iris offset nil))
([offset fine-offset]
(fine-channel :iris offset :fine-offset fine-offset :var-label "Iris")))
(defn zoom
"A channel which adjusts zoom, with an optional second channel for
fine control."
([offset]
(zoom offset nil))
([offset fine-offset]
(fine-channel :zoom offset :fine-offset fine-offset :var-label "Zoom")))
(defn frost
"A channel which adjusts frost, with an optional second channel for
fine control."
([offset]
(frost offset nil))
([offset fine-offset]
(fine-channel :frost offset :fine-offset fine-offset :var-label "Frost")))
| null | https://raw.githubusercontent.com/Deep-Symmetry/afterglow/12b8c90f22c591549adc4d89b8a19853f5e9e3a5/src/afterglow/channels.clj | clojure | resolves all of its
resolves all of their channel assignments."
resolves
if you do not, the generic label `Level` will be
this will be expanded into a
Was not a sequence, so return a single function spec | (ns afterglow.channels
"Functions for modeling DMX channels"
{:author "James Elliott"}
(:require [afterglow.fixtures.qxf :refer [sanitize-name]]
[camel-snake-kebab.core :as csk]
[com.evocomputing.colors :as colors]))
(defn channel
"Creates a minimal channel specification, containing just the
address offset within the fixture's list of channels. The first
channel used by a fixture is, by convention, given offset 1.
You probably want to use [[fine-channel]] rather than this function
to create even channels which do not have a `:fine-offset` because
of the other helpful features it offers, such as setting up the
channel function specification for you."
[offset]
{:offset offset})
(defn- assign-channel
"Given a universe and DMX address at which a fixture is being
patched, and a raw channel description from the fixture definition,
calculate and assign the channel's actual DMX address and universe
that it will have in a show. Also adds the back pointer to the head
that owns the channel, so dynamic and spatial parameter resolution
can access head information."
[universe start-address head raw-channel]
(cond-> raw-channel
(:offset raw-channel) (assoc :address (+ (:offset raw-channel) (dec start-address))
:index (+ (dec (:offset raw-channel)) (dec start-address))
:universe universe)
(:fine-offset raw-channel) (assoc :fine-address (+ (:fine-offset raw-channel) (dec start-address))
:fine-index (+ (dec (:fine-offset raw-channel)) (dec start-address)))
head (assoc :head head)))
(defn- patch-head
channel assignments."
[channel-assigner fixture id-fn raw-head]
(assoc (update-in raw-head [:channels] #(map (partial channel-assigner raw-head) %))
:fixture fixture :id (id-fn)))
(defn- patch-heads
"Assigns the heads of a fixture to a DMX universe and starting
[fixture channel-assigner id-fn]
(let [assigner (partial patch-head channel-assigner fixture id-fn)]
(update-in fixture [:heads] #(map assigner %))))
(defn patch-fixture
all of its channel assignments."
[fixture universe start-address id-fn]
(let [assigner (partial assign-channel universe start-address)]
(update-in (patch-heads fixture assigner id-fn) [:channels] #(map (partial assigner fixture) %))))
(defn extract-channels
"Given a fixture list, returns the channels matching the specified predicate."
[fixtures pred]
(filter pred (mapcat :channels fixtures)))
(defn expand-heads
"Given a list of fixtures, expands it to include the heads."
[fixtures]
(mapcat #(concat [%] (:heads %)) fixtures))
(defn all-addresses
"Returns all the addresses being used by a list of patched fixtures,
including those used by any fixture heads."
[fixtures]
(mapcat #(vals (select-keys % [:address :fine-address]))
(mapcat :channels (expand-heads fixtures))))
(defn extract-heads-with-some-matching-channel
"Given a fixture list, returns all heads (which may be top-level fixtures too)
whose channels contain a match for the specified predicate."
[fixtures pred]
(filter #(some pred (:channels %)) (expand-heads fixtures)))
(defn find-rgb-heads
"Returns all heads of the supplied fixtures which are capable of
mixing RGB color, in other words they have at least a red, green,
and blue color channel. If the second argument is present and
`true`, also returns heads with color wheels."
([fixtures]
(find-rgb-heads fixtures false))
([fixtures include-color-wheels?]
(filter #(or (= 3 (count (filter #{:red :green :blue} (map :color (:channels %)))))
(and include-color-wheels? (seq (:color-wheel-hue-map %))))
(expand-heads fixtures))))
(defn has-rgb-heads?
"Given a fixture, returns a truthy value if it has any heads capable
of mixing RGB color. If the second argument is present and `true`,
having a head with a color wheel is good enough."
([fixture]
(has-rgb-heads? fixture false))
([fixture include-color-wheels?]
(seq (find-rgb-heads [fixture] include-color-wheels?))))
(defn build-function
"Returns a function spefication that encompasses a range of possible
DMX values for a channel. If start and end are not specified, the
function uses the full range of the channel."
[range-type function-type label & {:keys [start end var-label] :or {start 0 end 255}}]
(merge {:start start
:end end
:range range-type
:type function-type
:label label}
(when var-label {:var-label var-label})))
(defn fine-channel
"Defines a channel of type `chan-type` which may be paired with a
second channel in order to support multi-byte values. When a value
is passed in with `:fine-offset`, the channel specified by `offset`
is understood as containing the most-significant byte of a two-byte
value, with the least-significant byte carried in the channel whose
offset followed `:fine-offset`.
Automatically creates a [function
specification]({{guide-url}}fixture_definitions.html#function-specifications)
which spans all the legal DMX values for the channel. By default,
the function type is taken to be the same as `chan-type`, but this
can be changed by passing in a different keyword with
`:function-type`.
Similarly, the name of the function created is, by default, a
capitalized version of the function type (without its leading
colon). Since this name is displayed in the [web
interface]({{guide-url}}README.html#web-ui) as the text label in the
cue grid cell for [Function
Cues]({{guide-url}}cues.html#creating-function-cues) created for the
function, you may wish to specify a more readable name, which you
can do by passing it with `:function-name`.
Finally, you may specify a label to be used when creating a user
interface for adjusting the value associated with this
function. [Function
Cues]({{guide-url}}cues.html#creating-function-cues)
will use this as the label for the cue-local variable they create,
and it will appear in places like the [Ableton Push Effect Control
interface]({{guide-url}}push2.html#effect-control).
You can specify what this variable label should be with
used."
[chan-type offset & {:keys [fine-offset function-type function-name var-label] :or {function-type chan-type}}]
{:pre [(some? chan-type) (integer? offset) (<= 1 offset 512)]}
(let [chan-type (keyword chan-type)
function-type (keyword function-type)
function-name (or function-name (clojure.string/capitalize (name function-type)))
base (assoc (channel offset)
:type chan-type
:functions [(build-function :variable function-type function-name :var-label var-label)])]
(merge base
(when fine-offset
{:fine-offset fine-offset}))))
(defn- expand-function-spec
"Expands the specification for a function at a particular starting
address. If a simple keyword was given for it, creates a map with
default contents for a variable-value range. A string is turned into
a keyword, but creates a fixed-value range. A nil specification is
expanded into a no-function range. Otherwise, adds any missing
pieces to the supplied map. In either case, assigns the range's
starting value."
[[start spec]]
(cond (keyword? spec)
{:start start
:range :variable
:type spec
:label (clojure.string/replace (csk/->Camel_Snake_Case (name spec)) "_" " ")}
(string? spec)
{:start start
:range :fixed
:type (keyword (sanitize-name spec))
:label spec}
(nil? spec)
{:start start
:range :fixed
:type :no-function
:label "No function"}
(map? spec)
(merge {:range :variable
:label (clojure.string/replace (csk/->Camel_Snake_Case (name (:type spec))) "_" " ")}
spec
{:start start})
:else
(throw (IllegalArgumentException.
(str "Don't know how to build a function specification from " spec)))))
(defn- assign-ends
"Used to figure out the ends of the ranges that make up a function
channel, by ending each range at the value one less than where the
next range begins."
[[current next]]
(if (= :end next)
(assoc current :end 255)
(let [end (dec (:start next))]
(when-not (<= 0 end 255)
(throw (IllegalArgumentException.
(str "Function ends outside of legal DMX range: " end))))
(if (< (:start current) end)
(assoc current :end end)
(if (= (:start current) end)
(assoc current :end end :range :fixed)
(throw (IllegalArgumentException.
(str "No range values available for function " (:type current)))))))))
(defn- expand-function-range
"Expands a sequence of function ranges. If a sequence was passed for
the starting point, expands it, either appending a sequential index
to the spec (if it is a keyword or string), or pairing up successive
values if the spec is itself a sequence. The starting value and spec
pairs are then expanded in turn by calling expand-function-spec. If
start is not a sequence, expand-function-range simply delegates to
expand-function-spec."
[[start spec]]
(if (sequential? start)
(if (sequential? spec)
(map #(expand-function-spec [%1 %2]) start spec)
(map-indexed (fn [index start]
(cond (string? spec)
(expand-function-spec [start (str spec " " (inc index))])
(keyword? spec)
(expand-function-spec [start (keyword (str (name spec) "-" (inc index)))])
:else
(throw (IllegalArgumentException.
(str "Don't know how to expand function range for spec " spec)))))
start))
[(expand-function-spec [start spec])]))
(defn functions
"Defines a channel whose values are divided up into different ranges
which perform different functions. After the channel type and DMX
offset, pass a list of starting values and function specifications.
The simplest form of specification is a keyword or string
variable-range (for keywords) or fixed-range (for strings) function
of that type.
For more complex functions, pass in a map containing the `:type`
keyword and any other settings you need to make (e.g. `:label`,
`:range`, `:var-label`), and the rest will be filled in for you.
To skip a range, pass `nil` for its specification.
The ranges need to be in order of increasing starting
values, and the ending values for each will be figured out by
context, e.g.
```
(functions :strobe 40
0 nil
10 \"Strobe Random\"
20 :strobe)
```
See the [Developer
Guide]({{guide-url}}fixture_definitions.html#function-channels) for
more details and examples."
[chan-type offset & functions]
{:pre [(some? chan-type) (integer? offset) (<= 1 offset 512)]}
(let [chan-type (keyword chan-type)]
(assoc (channel offset)
:type chan-type
:functions (vec (map assign-ends
(partition 2 1 [:end] (mapcat expand-function-range
(partition 2 functions))))))))
(defn color-wheel-hue
"Creates a function specification which identifies a color wheel
position with a particular hue, so it can participate in Afterglow's
color effects. The hue can be specified as a number,
a [jolby/colors]() object, or a
string which is passed to the jolby/colors `create-color` function.
The label to assign the function spec can be passed via the `:label`
optional keyword argument, or it will be inferred from the hue value
supplied. The function spec will be considered a fixed range unless
you specify `:range :variable`.
If hue is a sequence, then returns a sequence of the results of
calling `color-wheel-hue` on each of the elements in that sequence,
with the same optional arguments."
[hue & {:keys [range label] :or {range :fixed}}]
{:pre [(some? hue)]}
(if (sequential? hue)
(if (some? label)
(map #(color-wheel-hue % :range range :label label) hue)
(map #(color-wheel-hue % :range range) hue))
(assoc (cond (number? hue)
{:color-wheel-hue (colors/clamp-hue hue)
:label (or label (str "Color wheel hue " (colors/clamp-hue hue)))
:type (keyword (str "color-wheel-hue-" (colors/clamp-hue hue)))}
(string? hue)
{:color-wheel-hue (colors/hue (colors/create-color hue))
:label (or label (str "Color wheel hue " hue))
:type (keyword (str "color-wheel-hue-" hue))}
(instance? com.evocomputing.colors/color hue)
{:color-wheel-hue (colors/hue hue)
:label (or label (str "Color wheel hue " hue))
:type (keyword (str "color-wheel-hue-" (colors/hue hue)))}
:else
(throw (IllegalArgumentException.
(str "Don't know how to create hue from " hue))))
:range range)))
(defn dimmer
"A channel which controls a dimmer.
Normal dimmers are dark at zero, and get brighter as the channel
value increases, to a maximum brightness at 255. However, some
fixtures have inverted dimmers. If that is the case, pass the DMX
value at which the inversion takes place with `:inverted-from`. For
example, fixtures which are brightest at zero and darken as the
value approaches 255 would be specified as `:inverted-from 0`, while
fixtures which are dark at zero, jump to maximum brightness at 1,
then dim as the value grows towards 255 would be specified as
`:inverted-from 1`.
If the fixture uses two-byte values for the dimmer level, pass
the offset of the channel containing the most-significant byte in
`offset`, and specify the offset of the channel containing the
least-significant byte with `:fine-offset`."
[offset & {:keys [inverted-from fine-offset]}]
(merge (fine-channel :dimmer offset :fine-offset fine-offset :range-label "Intensity")
(when inverted-from
{:inverted-from inverted-from})))
(defn color
"A channel which controls a color component. If `:hue` is supplied
along with a hue value, this channel will participate in color
mixing even if `color` is not one of the standard values `:red`,
`:green`, `:blue`, or `:white` whose hues and contributions to color
mixing are automatically understood.
By default, the function created for the channel uses the name of
the `color` keyword as its function label. Since this label is
displayed in the [web interface]({{guide-url}}README.html#web-ui) as
the text label in the cue grid cell for [Function
Cues]({{guide-url}}cues.html#creating-function-cues) created for the
function, you may wish to specify a more readable name, which you
can do by passing it in with `:function-label`.
If the fixture uses two-byte values for this color component, pass
the offset of the channel containing the most-significant byte in
`offset`, and specify the offset of the channel containing the
least-significant byte with `:fine-offset`."
[offset color & {:keys [hue function-label fine-offset]}]
{:pre [(some? color)]}
(let [color (keyword color)]
(-> (fine-channel :color offset :fine-offset fine-offset
:function-type color :function-label (or function-label (clojure.string/capitalize (name color))))
(assoc :color (keyword color))
(cond-> hue (assoc :hue hue)))))
(defn pan
"A channel which pans a moving head, with an optional second channel
for fine control."
([offset]
(pan offset nil))
([offset fine-offset]
(fine-channel :pan offset :fine-offset fine-offset :var-label "Pan")))
(defn tilt
"A channel which tilts a moving head, with an optional second channel
for fine control."
([offset]
(tilt offset nil))
([offset fine-offset]
(fine-channel :tilt offset :fine-offset fine-offset :var-label "Tilt")))
(defn focus
"A channel which adjusts focus, with an optional second channel for
fine control."
([offset]
(focus offset nil))
([offset fine-offset]
(fine-channel :focus offset :fine-offset fine-offset :var-label "Focus")))
(defn iris
"A channel which controls an iris, with an optional second channel
for fine control."
([offset]
(iris offset nil))
([offset fine-offset]
(fine-channel :iris offset :fine-offset fine-offset :var-label "Iris")))
(defn zoom
"A channel which adjusts zoom, with an optional second channel for
fine control."
([offset]
(zoom offset nil))
([offset fine-offset]
(fine-channel :zoom offset :fine-offset fine-offset :var-label "Zoom")))
(defn frost
"A channel which adjusts frost, with an optional second channel for
fine control."
([offset]
(frost offset nil))
([offset fine-offset]
(fine-channel :frost offset :fine-offset fine-offset :var-label "Frost")))
|
437f6108a21976f8305e8a51d0718bd82ab5486ffc46952376b8872c56e36526 | bsaleil/lc | etape1-println-int.scm | (println 31415926)
(println 11)
(println 10)
(println 9)
(println 1)
(println 0)
(println -1)
(println -9)
(println -10)
(println -11)
(println -31415926)
31415926
11
10
9
1
0
;-1
-9
;-10
;-11
;-31415926
| null | https://raw.githubusercontent.com/bsaleil/lc/ee7867fd2bdbbe88924300e10b14ea717ee6434b/unit-tests/IFT3065/1/etape1-println-int.scm | scheme | -1
-10
-11
-31415926 | (println 31415926)
(println 11)
(println 10)
(println 9)
(println 1)
(println 0)
(println -1)
(println -9)
(println -10)
(println -11)
(println -31415926)
31415926
11
10
9
1
0
-9
|
e34ebf3034f183dba814aa8b99d8da428343f0c5e5e20d0a6753b00dc094a11c | camllight/camllight | ouster-f16.2.ml | #open "tk";;
let top = OpenTk() in
let lb = label__create top
[Bitmap (BitmapFile "/usr/local/lib/tk/scripts/demos/bitmaps/flagdown")]
and ll = label__create top
[Text "No new mail"] in
pack [lb; ll] [];
MainLoop()
;;
| null | https://raw.githubusercontent.com/camllight/camllight/0cc537de0846393322058dbb26449427bfc76786/sources/contrib/camltk/books-examples/ouster-f16.2.ml | ocaml | #open "tk";;
let top = OpenTk() in
let lb = label__create top
[Bitmap (BitmapFile "/usr/local/lib/tk/scripts/demos/bitmaps/flagdown")]
and ll = label__create top
[Text "No new mail"] in
pack [lb; ll] [];
MainLoop()
;;
| |
7d9685ca3789ad3d3796e65692c79af18ee71a00575a83827b73cffb797f3f75 | CrossRef/cayenne | prefix.clj | (ns cayenne.tasks.prefix
(:require [clj-http.client :as client]
[cayenne.xml :as xml]
[cayenne.conf :as conf]
[clojure.core.memoize :as memoize])
(:use [cayenne.util])
(:import [java.io StringReader]))
;; todo unixref oai parser should extract owner prefix.
(defn parse-prefix-info [xml]
(let [location (.trim (xml/xselect1 xml :> "publisher" "publisher_location" :text))]
(-> {:name (xml/xselect1 xml :> "publisher" "publisher_name" :text)}
(?> (not (empty? location)) assoc :location location))))
(defn get-prefix-info [owner-prefix]
(let [url (str (conf/get-param [:upstream :prefix-info-url]) owner-prefix)
resp (client/get url {:throw-exceptions false})]
(when (client/success? resp)
(-> (:body resp)
(StringReader.)
(xml/read-xml)
(parse-prefix-info)))))
(def get-prefix-info-memo (memoize/lru get-prefix-info :lru/threshold 100))
(defn clear! []
(memoize/memo-clear! get-prefix-info-memo))
(defn apply-to
"Attach member information to an item via calls to gerPrefixPublisher. Responses
are cached."
[item]
())
| null | https://raw.githubusercontent.com/CrossRef/cayenne/02321ad23dbb1edd3f203a415f4a4b11ebf810d7/src/cayenne/tasks/prefix.clj | clojure | todo unixref oai parser should extract owner prefix. | (ns cayenne.tasks.prefix
(:require [clj-http.client :as client]
[cayenne.xml :as xml]
[cayenne.conf :as conf]
[clojure.core.memoize :as memoize])
(:use [cayenne.util])
(:import [java.io StringReader]))
(defn parse-prefix-info [xml]
(let [location (.trim (xml/xselect1 xml :> "publisher" "publisher_location" :text))]
(-> {:name (xml/xselect1 xml :> "publisher" "publisher_name" :text)}
(?> (not (empty? location)) assoc :location location))))
(defn get-prefix-info [owner-prefix]
(let [url (str (conf/get-param [:upstream :prefix-info-url]) owner-prefix)
resp (client/get url {:throw-exceptions false})]
(when (client/success? resp)
(-> (:body resp)
(StringReader.)
(xml/read-xml)
(parse-prefix-info)))))
(def get-prefix-info-memo (memoize/lru get-prefix-info :lru/threshold 100))
(defn clear! []
(memoize/memo-clear! get-prefix-info-memo))
(defn apply-to
"Attach member information to an item via calls to gerPrefixPublisher. Responses
are cached."
[item]
())
|
7446f8cfac2a559313be350f697cadc56bfe175cd730a6b7fe559ee9bc28dbc4 | fiddlerwoaroof/cl-git | model.lisp | (in-package :fwoar.cl-git)
(defparameter *object-data-lens*
(data-lens.lenses:make-alist-lens :object-data))
(defclass pack ()
((%pack :initarg :pack :reader pack-file)
(%index :initarg :index :reader index-file)
(%repository :initarg :repository :reader repository)))
(defclass repository ()
((%root :initarg :root :reader root)))
(defclass git-repository (repository)
())
(defclass git-object ()
())
(defgeneric object-type->sym (object-type)
(:documentation "Canonicalizes different representations of an
object type to their symbol representation."))
(defmethod object-type->sym ((o-t symbol))
o-t)
(defmethod object-type->sym ((object-type number))
(ecase object-type
(1 :commit)
(2 :tree)
(3 :blob)
(4 :tag)
(6 :ofs-delta)
(7 :ref-delta)))
(defmethod object-type->sym ((object-type string))
(string-case:string-case ((string-downcase object-type))
("commit" :commit)
("tree" :tree)
("blob" :blob)
("tag" :tag)
("ofs-delta" :ofs-delta)
("ref-delta" :ref-delta)))
(define-condition alts-fallthrough (error)
((%fallthrough-message :initarg :fallthrough-message :reader fallthrough-message)
(%args :initarg :args :reader args))
(:report (lambda (c s)
(format s "~a ~s"
(fallthrough-message c)
(args c)))))
;; TODO: figure out how to handle ambiguity? restarts?
(define-method-combination alts (&key fallthrough-message) ((methods *))
(:arguments arg)
(progn
(mapc (serapeum:op
(let ((qualifiers (method-qualifiers _1)))
(unless (and (eql 'alts (car qualifiers))
(if (null (cdr qualifiers))
t
(and (symbolp (cadr qualifiers))
(null (cddr qualifiers)))))
(invalid-method-error _1 "invalid qualifiers: ~s" qualifiers))))
methods)
`(or ,@(mapcar (serapeum:op `(call-method ,_1))
methods)
(error 'alts-fallthrough
:fallthrough-message ,fallthrough-message
:args ,arg))))
(defgeneric resolve-repository (object)
(:documentation "resolve an OBJECT to a repository implementation")
(:method-combination alts :fallthrough-message "failed to resolve repository"))
(defmethod resolve-repository alts :git ((root pathname))
(alexandria:when-let ((root (probe-file root)))
(let* ((git-dir (merge-pathnames (make-pathname :directory '(:relative ".git"))
root)))
(when (probe-file git-dir)
(fw.lu:new 'git-repository root)))))
(defgeneric repository (object)
(:documentation "get the repository for an object")
(:method ((root pathname))
(resolve-repository root))
(:method ((root string))
(let ((root (parse-namestring root)))
(repository root))))
(defun get-local-branches (root)
(append (get-local-unpacked-branches root)
(get-local-packed-branches root)))
(defun loose-object-path (sha)
(let ((obj-path (fwoar.string-utils:insert-at 2 #\/ sha)))
(merge-pathnames obj-path ".git/objects/")))
(defun pack (index pack repository)
(fw.lu:new 'pack index pack repository))
(defgeneric pack-files (repo)
(:method ((repo git-repository))
(mapcar (serapeum:op
(pack _1
(merge-pathnames
(make-pathname :type "pack") _1)
repo))
(uiop:directory*
(merge-pathnames ".git/objects/pack/*.idx"
(root-of repo))))))
(defgeneric loose-object (repository id)
(:method ((repository string) id)
(when (probe-file (merge-pathnames ".git" repository))
(loose-object (repository repository) id)))
(:method ((repository pathname) id)
(when (probe-file (merge-pathnames ".git" repository))
(loose-object (repository repository) id)))
(:method ((repository repository) id)
(car
(uiop:directory*
(merge-pathnames (loose-object-path (serapeum:concat id "*"))
(root repository))))))
(defun loose-object-p (repository id)
"Is ID an ID of a loose object?"
(loose-object repository id))
(defclass git-ref ()
((%repo :initarg :repo :reader ref-repo)
(%hash :initarg :hash :reader ref-hash)))
(defclass loose-ref (git-ref)
((%file :initarg :file :reader loose-ref-file)))
(defclass packed-ref (git-ref)
((%pack :initarg :pack :reader packed-ref-pack)
(%offset :initarg :offset :reader packed-ref-offset)))
(defmethod print-object ((obj git-ref) s)
(print-unreadable-object (obj s :type t)
(format s "~a of ~a"
(subseq (ref-hash obj) 0 7)
(ref-repo obj)
#+(or)
(serapeum:string-replace (namestring (user-homedir-pathname))
(root-of (ref-repo obj))
"~/"))))
| null | https://raw.githubusercontent.com/fiddlerwoaroof/cl-git/fb9deed060d49dbb52f390f67b1783a13edf71b6/model.lisp | lisp | TODO: figure out how to handle ambiguity? restarts? | (in-package :fwoar.cl-git)
(defparameter *object-data-lens*
(data-lens.lenses:make-alist-lens :object-data))
(defclass pack ()
((%pack :initarg :pack :reader pack-file)
(%index :initarg :index :reader index-file)
(%repository :initarg :repository :reader repository)))
(defclass repository ()
((%root :initarg :root :reader root)))
(defclass git-repository (repository)
())
(defclass git-object ()
())
(defgeneric object-type->sym (object-type)
(:documentation "Canonicalizes different representations of an
object type to their symbol representation."))
(defmethod object-type->sym ((o-t symbol))
o-t)
(defmethod object-type->sym ((object-type number))
(ecase object-type
(1 :commit)
(2 :tree)
(3 :blob)
(4 :tag)
(6 :ofs-delta)
(7 :ref-delta)))
(defmethod object-type->sym ((object-type string))
(string-case:string-case ((string-downcase object-type))
("commit" :commit)
("tree" :tree)
("blob" :blob)
("tag" :tag)
("ofs-delta" :ofs-delta)
("ref-delta" :ref-delta)))
(define-condition alts-fallthrough (error)
((%fallthrough-message :initarg :fallthrough-message :reader fallthrough-message)
(%args :initarg :args :reader args))
(:report (lambda (c s)
(format s "~a ~s"
(fallthrough-message c)
(args c)))))
(define-method-combination alts (&key fallthrough-message) ((methods *))
(:arguments arg)
(progn
(mapc (serapeum:op
(let ((qualifiers (method-qualifiers _1)))
(unless (and (eql 'alts (car qualifiers))
(if (null (cdr qualifiers))
t
(and (symbolp (cadr qualifiers))
(null (cddr qualifiers)))))
(invalid-method-error _1 "invalid qualifiers: ~s" qualifiers))))
methods)
`(or ,@(mapcar (serapeum:op `(call-method ,_1))
methods)
(error 'alts-fallthrough
:fallthrough-message ,fallthrough-message
:args ,arg))))
(defgeneric resolve-repository (object)
(:documentation "resolve an OBJECT to a repository implementation")
(:method-combination alts :fallthrough-message "failed to resolve repository"))
(defmethod resolve-repository alts :git ((root pathname))
(alexandria:when-let ((root (probe-file root)))
(let* ((git-dir (merge-pathnames (make-pathname :directory '(:relative ".git"))
root)))
(when (probe-file git-dir)
(fw.lu:new 'git-repository root)))))
(defgeneric repository (object)
(:documentation "get the repository for an object")
(:method ((root pathname))
(resolve-repository root))
(:method ((root string))
(let ((root (parse-namestring root)))
(repository root))))
(defun get-local-branches (root)
(append (get-local-unpacked-branches root)
(get-local-packed-branches root)))
(defun loose-object-path (sha)
(let ((obj-path (fwoar.string-utils:insert-at 2 #\/ sha)))
(merge-pathnames obj-path ".git/objects/")))
(defun pack (index pack repository)
(fw.lu:new 'pack index pack repository))
(defgeneric pack-files (repo)
(:method ((repo git-repository))
(mapcar (serapeum:op
(pack _1
(merge-pathnames
(make-pathname :type "pack") _1)
repo))
(uiop:directory*
(merge-pathnames ".git/objects/pack/*.idx"
(root-of repo))))))
(defgeneric loose-object (repository id)
(:method ((repository string) id)
(when (probe-file (merge-pathnames ".git" repository))
(loose-object (repository repository) id)))
(:method ((repository pathname) id)
(when (probe-file (merge-pathnames ".git" repository))
(loose-object (repository repository) id)))
(:method ((repository repository) id)
(car
(uiop:directory*
(merge-pathnames (loose-object-path (serapeum:concat id "*"))
(root repository))))))
(defun loose-object-p (repository id)
"Is ID an ID of a loose object?"
(loose-object repository id))
(defclass git-ref ()
((%repo :initarg :repo :reader ref-repo)
(%hash :initarg :hash :reader ref-hash)))
(defclass loose-ref (git-ref)
((%file :initarg :file :reader loose-ref-file)))
(defclass packed-ref (git-ref)
((%pack :initarg :pack :reader packed-ref-pack)
(%offset :initarg :offset :reader packed-ref-offset)))
(defmethod print-object ((obj git-ref) s)
(print-unreadable-object (obj s :type t)
(format s "~a of ~a"
(subseq (ref-hash obj) 0 7)
(ref-repo obj)
#+(or)
(serapeum:string-replace (namestring (user-homedir-pathname))
(root-of (ref-repo obj))
"~/"))))
|
08d6f948bfd15a830070e575e702f2c5f846bfcfb09cb4952d689c2c04f9f0f9 | antono/guix-debian | sync-descriptions.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2013 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
;;;
;;; Report package synopses and descriptions that defer from those found in
;;; the GNU Womb.
;;;
(use-modules (guix gnu-maintenance)
(guix packages)
(guix utils)
(guix ui)
(gnu packages)
(srfi srfi-1)
(srfi srfi-26)
(ice-9 match))
(define official
;; GNU package descriptors from the Womb.
(official-gnu-packages))
(define gnus
;; GNU packages available in the distro.
(let ((lookup (lambda (p)
(find (lambda (descriptor)
(equal? (gnu-package-name descriptor)
(package-name p)))
official))))
(fold-packages (lambda (package result)
(or (and=> (lookup package)
(cut alist-cons package <> result))
result))
'())))
;; Iterate over GNU packages. Report those whose synopsis defers from that
;; found upstream.
(for-each (match-lambda
((package . descriptor)
(let ((upstream (gnu-package-doc-summary descriptor))
(downstream (package-synopsis package))
(loc (or (package-field-location package 'synopsis)
(package-location package))))
(unless (and upstream (string=? upstream downstream))
(format (guix-warning-port)
"~a: ~a: proposed synopsis: ~s~%"
(location->string loc) (package-name package)
upstream)))
(let ((upstream (gnu-package-doc-description descriptor))
(downstream (package-description package))
(loc (or (package-field-location package 'description)
(package-location package))))
(when (and upstream
(not (string=? (fill-paragraph upstream 100)
(fill-paragraph downstream 100))))
(format (guix-warning-port)
"~a: ~a: proposed description:~% ~a~%"
(location->string loc) (package-name package)
(fill-paragraph upstream 77 7))))))
gnus)
| null | https://raw.githubusercontent.com/antono/guix-debian/85ef443788f0788a62010a942973d4f7714d10b4/build-aux/sync-descriptions.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Report package synopses and descriptions that defer from those found in
the GNU Womb.
GNU package descriptors from the Womb.
GNU packages available in the distro.
Iterate over GNU packages. Report those whose synopsis defers from that
found upstream. | Copyright © 2013 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(use-modules (guix gnu-maintenance)
(guix packages)
(guix utils)
(guix ui)
(gnu packages)
(srfi srfi-1)
(srfi srfi-26)
(ice-9 match))
(define official
(official-gnu-packages))
(define gnus
(let ((lookup (lambda (p)
(find (lambda (descriptor)
(equal? (gnu-package-name descriptor)
(package-name p)))
official))))
(fold-packages (lambda (package result)
(or (and=> (lookup package)
(cut alist-cons package <> result))
result))
'())))
(for-each (match-lambda
((package . descriptor)
(let ((upstream (gnu-package-doc-summary descriptor))
(downstream (package-synopsis package))
(loc (or (package-field-location package 'synopsis)
(package-location package))))
(unless (and upstream (string=? upstream downstream))
(format (guix-warning-port)
"~a: ~a: proposed synopsis: ~s~%"
(location->string loc) (package-name package)
upstream)))
(let ((upstream (gnu-package-doc-description descriptor))
(downstream (package-description package))
(loc (or (package-field-location package 'description)
(package-location package))))
(when (and upstream
(not (string=? (fill-paragraph upstream 100)
(fill-paragraph downstream 100))))
(format (guix-warning-port)
"~a: ~a: proposed description:~% ~a~%"
(location->string loc) (package-name package)
(fill-paragraph upstream 77 7))))))
gnus)
|
46cf9d3bf2730834934e04728ff663a76477e1d58357896e5f9d65e27fef6c04 | gentoo-haskell/hackport | Utils.hs | {-|
Module : Merge.Utils
License : GPL-3+
Maintainer :
Internal helper functions for "Merge".
-}
module Merge.Utils
( readPackageString
, getPreviousPackageId
, first_just_of
, drop_prefix
, squash_debug
, convert_underscores
, mangle_iuse
, to_unstable
, metaFlags
, dropIfUseExpands
hspec exports
, dropIfUseExpand
) where
import qualified Control.Applicative as A
import qualified Control.Monad as M
import qualified Data.Char as C
import Data.Maybe (catMaybes, mapMaybe)
import qualified Data.List as L
import qualified Data.Map.Strict as Map
import qualified System.Directory as SD
import qualified System.FilePath as SF
import System.FilePath ((</>))
import System.Process (readCreateProcess, shell)
import Error
import qualified Portage.PackageId as Portage
import qualified Distribution.Package as Cabal
import qualified Distribution.PackageDescription as Cabal
-- | Parse a 'String' as a valid package string. E.g. @category\/name-1.0.0@.
-- Return 'HackPortError' if the string to parse is invalid.
--
-- When the 'String' is valid:
--
-- >>> readPackageString "dev-haskell/packagename1-1.0.0"
-- Right (Just (Category {unCategory = "dev-haskell"}),PackageName "packagename1",Just (Version {versionNumber = [1,0,0], versionChar = Nothing, versionSuffix = [], versionRevision = 0}))
readPackageString :: String
-> Either HackPortError ( Maybe Portage.Category
, Cabal.PackageName
, Maybe Portage.Version
)
readPackageString packageString = do
case Portage.parseFriendlyPackage packageString of
Right v@(_,_,Nothing) -> return v
-- we only allow versions we can convert into cabal versions
Right v@(_,_,Just (Portage.Version _ Nothing [] 0)) -> return v
Left e -> Left $ ArgumentError $ "Could not parse [category/]package[-version]: "
++ packageString ++ "\nParsec error: " ++ e
_ -> Left $ ArgumentError $ "Could not parse [category/]package[-version]: "
++ packageString
-- | Maybe return a 'Portage.PackageId' of the next highest version for a given
package , relative to the provided ' Portage . PackageId ' .
--
-- For example:
--
-- >>> let ebuildDir = ["foo-bar2-3.0.1.ebuild","metadata.xml"]
-- >>> let newPkgId = Portage.PackageId (Portage.PackageName (Portage.Category "dev-haskell") (Cabal.mkPackageName "foo-bar2")) (Portage.Version [3,0,2] Nothing [] 0 )
--
> > > getPreviousPackageId ebuildDir newPkgId
Just ( PackageId { packageId = PackageName { category = Category { unCategory = " dev - haskell " } , cabalPkgName = PackageName " foo - bar2 " } , pkgVersion = Version { versionNumber = [ 3,0,1 ] , versionChar = Nothing , versionSuffix = [ ] , versionRevision = 0 } } )
getPreviousPackageId :: [FilePath] -- ^ list of ebuilds for given package
^ new PackageId
^ maybe PackageId of previous version
getPreviousPackageId pkgDir newPkgId = do
let pkgIds = reverse
. L.sortOn (Portage.pkgVersion)
. filter (<newPkgId)
$ mapMaybe (Portage.filePathToPackageId (Portage.category . Portage.packageId $ newPkgId))
$ SF.dropExtension <$> filter (\fp -> SF.takeExtension fp == ".ebuild") pkgDir
case pkgIds of
x:_ -> Just x
_ -> Nothing
| for ' msum ' .
--
-- prop> \a -> first_just_of a == M.msum a
first_just_of :: [Maybe a] -> Maybe a
first_just_of = M.msum
-- | Remove @with@ or @use@ prefixes from flag names.
--
-- >>> drop_prefix "with_conduit"
-- "conduit"
-- >>> drop_prefix "use-https"
-- "https"
-- >>> drop_prefix "no_examples"
-- "no_examples"
drop_prefix :: String -> String
drop_prefix x =
let prefixes = ["with","use"]
separators = ["-","_"]
combinations = A.liftA2 (++) prefixes separators
in case catMaybes (A.liftA2 L.stripPrefix combinations [x]) of
[z] -> z
_ -> x
-- | Squash debug-related @USE@ flags under the @debug@ global
-- @USE@ flag.
--
-- >>> squash_debug "use-debug-foo"
-- "debug"
-- >>> squash_debug "foo-bar"
-- "foo-bar"
squash_debug :: String -> String
squash_debug flag = if "debug" `L.isInfixOf` (C.toLower <$> flag)
then "debug"
else flag
-- | Gentoo allows underscore ('_') names in @IUSE@ only for
-- @USE_EXPAND@ values. If it's not a user-specified rename mangle
-- it into a hyphen ('-').
--
-- >>> convert_underscores "remove_my_underscores"
-- "remove-my-underscores"
convert_underscores :: String -> String
convert_underscores = map f
where f '_' = '-'
f c = c
-- | Perform all @IUSE@ mangling.
--
-- >>> mangle_iuse "use_foo-bar_debug"
-- "debug"
-- >>> mangle_iuse "with-bar_quux"
-- "bar-quux"
mangle_iuse :: String -> String
mangle_iuse = squash_debug . drop_prefix . convert_underscores
-- | Convert all stable keywords to testing (unstable) keywords.
-- Preserve arch masks (-).
--
-- >>> to_unstable "amd64"
" ~amd64 "
> > > to_unstable " ~amd64 "
" ~amd64 "
-- >>> to_unstable "-amd64"
-- "-amd64"
to_unstable :: String -> String
to_unstable kw =
case kw of
'~':_ -> kw
'-':_ -> kw
_ -> '~':kw
| Generate a ' Map . Map ' of ' . PackageFlag ' names and their descriptions .
--
For example , if we construct a singleton list holding a ' Cabal . PackageFlag ' with
' Cabal . FlagName ' @foo@ and ' Cabal . FlagDescription ' @bar@ , we should get
-- a 'Map.Map' containing those values:
--
> > > let flags = [ ( Cabal.emptyFlag ( Cabal.mkFlagName " foo " ) ) { Cabal.flagDescription = " bar " } ]
-- >>> metaFlags flags
fromList [ ( " " ) ]
metaFlags :: [Cabal.PackageFlag] -> Map.Map String String
metaFlags flags =
Map.fromList $
zip (mangle_iuse . Cabal.unFlagName . Cabal.flagName <$> flags)
(Cabal.flagDescription <$> flags)
-- | Return a list of @USE_EXPAND@s maintained by ::gentoo.
--
-- First, 'getUseExpands' runs @portageq@ to determine the 'FilePath' of the
-- directory containing valid @USE_EXPAND@s. If the 'FilePath' exists,
-- it drops the filename extensions to return a list of @USE_EXPAND@s
as Portage understands them . If the ' FilePath ' does not exist , ' getUseExpands '
-- supplies a bare-bones list of @USE_EXPAND@s.
getUseExpands :: IO [String]
getUseExpands = do
portDir <- readCreateProcess (shell "portageq get_repo_path / gentoo") ""
let use_expands_dir = (L.dropWhileEnd C.isSpace portDir) </> "profiles" </> "desc"
path_exists <- SD.doesPathExist use_expands_dir
if path_exists
then do use_expands_contents <- SD.listDirectory use_expands_dir
return (SF.dropExtension <$> use_expands_contents)
-- Provide some sensible defaults if hackport cannot find ::gentoo
else let use_expands_contents = ["cpu_flags_arm","cpu_flags_ppc","cpu_flags_x86"]
in return use_expands_contents
| Return a ' Cabal . PackageFlag ' if it is not a @USE_EXPAND@.
--
-- If the 'Cabal.flagName' has a prefix matching any valid @USE_EXPAND@,
then return ' Nothing ' . Otherwise return ' Just ' ' Cabal . PackageFlag ' .
dropIfUseExpand :: [String] -> Cabal.PackageFlag -> Maybe Cabal.PackageFlag
dropIfUseExpand use_expands flag =
if or (A.liftA2 L.isPrefixOf use_expands [Cabal.unFlagName . Cabal.flagName $ flag])
then Nothing else Just flag
| Strip @USE_EXPAND@s from a [ ' . PackageFlag ' ] .
dropIfUseExpands :: [Cabal.PackageFlag] -> IO [Cabal.PackageFlag]
dropIfUseExpands flags = do
use_expands <- getUseExpands
return $ catMaybes (dropIfUseExpand use_expands <$> flags)
| null | https://raw.githubusercontent.com/gentoo-haskell/hackport/d0e57e8dd86731fa416422fe68720f7a994694f1/src/Merge/Utils.hs | haskell | |
Module : Merge.Utils
License : GPL-3+
Maintainer :
Internal helper functions for "Merge".
| Parse a 'String' as a valid package string. E.g. @category\/name-1.0.0@.
Return 'HackPortError' if the string to parse is invalid.
When the 'String' is valid:
>>> readPackageString "dev-haskell/packagename1-1.0.0"
Right (Just (Category {unCategory = "dev-haskell"}),PackageName "packagename1",Just (Version {versionNumber = [1,0,0], versionChar = Nothing, versionSuffix = [], versionRevision = 0}))
we only allow versions we can convert into cabal versions
| Maybe return a 'Portage.PackageId' of the next highest version for a given
For example:
>>> let ebuildDir = ["foo-bar2-3.0.1.ebuild","metadata.xml"]
>>> let newPkgId = Portage.PackageId (Portage.PackageName (Portage.Category "dev-haskell") (Cabal.mkPackageName "foo-bar2")) (Portage.Version [3,0,2] Nothing [] 0 )
^ list of ebuilds for given package
prop> \a -> first_just_of a == M.msum a
| Remove @with@ or @use@ prefixes from flag names.
>>> drop_prefix "with_conduit"
"conduit"
>>> drop_prefix "use-https"
"https"
>>> drop_prefix "no_examples"
"no_examples"
| Squash debug-related @USE@ flags under the @debug@ global
@USE@ flag.
>>> squash_debug "use-debug-foo"
"debug"
>>> squash_debug "foo-bar"
"foo-bar"
| Gentoo allows underscore ('_') names in @IUSE@ only for
@USE_EXPAND@ values. If it's not a user-specified rename mangle
it into a hyphen ('-').
>>> convert_underscores "remove_my_underscores"
"remove-my-underscores"
| Perform all @IUSE@ mangling.
>>> mangle_iuse "use_foo-bar_debug"
"debug"
>>> mangle_iuse "with-bar_quux"
"bar-quux"
| Convert all stable keywords to testing (unstable) keywords.
Preserve arch masks (-).
>>> to_unstable "amd64"
>>> to_unstable "-amd64"
"-amd64"
a 'Map.Map' containing those values:
>>> metaFlags flags
| Return a list of @USE_EXPAND@s maintained by ::gentoo.
First, 'getUseExpands' runs @portageq@ to determine the 'FilePath' of the
directory containing valid @USE_EXPAND@s. If the 'FilePath' exists,
it drops the filename extensions to return a list of @USE_EXPAND@s
supplies a bare-bones list of @USE_EXPAND@s.
Provide some sensible defaults if hackport cannot find ::gentoo
If the 'Cabal.flagName' has a prefix matching any valid @USE_EXPAND@, | module Merge.Utils
( readPackageString
, getPreviousPackageId
, first_just_of
, drop_prefix
, squash_debug
, convert_underscores
, mangle_iuse
, to_unstable
, metaFlags
, dropIfUseExpands
hspec exports
, dropIfUseExpand
) where
import qualified Control.Applicative as A
import qualified Control.Monad as M
import qualified Data.Char as C
import Data.Maybe (catMaybes, mapMaybe)
import qualified Data.List as L
import qualified Data.Map.Strict as Map
import qualified System.Directory as SD
import qualified System.FilePath as SF
import System.FilePath ((</>))
import System.Process (readCreateProcess, shell)
import Error
import qualified Portage.PackageId as Portage
import qualified Distribution.Package as Cabal
import qualified Distribution.PackageDescription as Cabal
readPackageString :: String
-> Either HackPortError ( Maybe Portage.Category
, Cabal.PackageName
, Maybe Portage.Version
)
readPackageString packageString = do
case Portage.parseFriendlyPackage packageString of
Right v@(_,_,Nothing) -> return v
Right v@(_,_,Just (Portage.Version _ Nothing [] 0)) -> return v
Left e -> Left $ ArgumentError $ "Could not parse [category/]package[-version]: "
++ packageString ++ "\nParsec error: " ++ e
_ -> Left $ ArgumentError $ "Could not parse [category/]package[-version]: "
++ packageString
package , relative to the provided ' Portage . PackageId ' .
> > > getPreviousPackageId ebuildDir newPkgId
Just ( PackageId { packageId = PackageName { category = Category { unCategory = " dev - haskell " } , cabalPkgName = PackageName " foo - bar2 " } , pkgVersion = Version { versionNumber = [ 3,0,1 ] , versionChar = Nothing , versionSuffix = [ ] , versionRevision = 0 } } )
^ new PackageId
^ maybe PackageId of previous version
getPreviousPackageId pkgDir newPkgId = do
let pkgIds = reverse
. L.sortOn (Portage.pkgVersion)
. filter (<newPkgId)
$ mapMaybe (Portage.filePathToPackageId (Portage.category . Portage.packageId $ newPkgId))
$ SF.dropExtension <$> filter (\fp -> SF.takeExtension fp == ".ebuild") pkgDir
case pkgIds of
x:_ -> Just x
_ -> Nothing
| for ' msum ' .
first_just_of :: [Maybe a] -> Maybe a
first_just_of = M.msum
drop_prefix :: String -> String
drop_prefix x =
let prefixes = ["with","use"]
separators = ["-","_"]
combinations = A.liftA2 (++) prefixes separators
in case catMaybes (A.liftA2 L.stripPrefix combinations [x]) of
[z] -> z
_ -> x
squash_debug :: String -> String
squash_debug flag = if "debug" `L.isInfixOf` (C.toLower <$> flag)
then "debug"
else flag
convert_underscores :: String -> String
convert_underscores = map f
where f '_' = '-'
f c = c
mangle_iuse :: String -> String
mangle_iuse = squash_debug . drop_prefix . convert_underscores
" ~amd64 "
> > > to_unstable " ~amd64 "
" ~amd64 "
to_unstable :: String -> String
to_unstable kw =
case kw of
'~':_ -> kw
'-':_ -> kw
_ -> '~':kw
| Generate a ' Map . Map ' of ' . PackageFlag ' names and their descriptions .
For example , if we construct a singleton list holding a ' Cabal . PackageFlag ' with
' Cabal . FlagName ' @foo@ and ' Cabal . FlagDescription ' @bar@ , we should get
> > > let flags = [ ( Cabal.emptyFlag ( Cabal.mkFlagName " foo " ) ) { Cabal.flagDescription = " bar " } ]
fromList [ ( " " ) ]
metaFlags :: [Cabal.PackageFlag] -> Map.Map String String
metaFlags flags =
Map.fromList $
zip (mangle_iuse . Cabal.unFlagName . Cabal.flagName <$> flags)
(Cabal.flagDescription <$> flags)
as Portage understands them . If the ' FilePath ' does not exist , ' getUseExpands '
getUseExpands :: IO [String]
getUseExpands = do
portDir <- readCreateProcess (shell "portageq get_repo_path / gentoo") ""
let use_expands_dir = (L.dropWhileEnd C.isSpace portDir) </> "profiles" </> "desc"
path_exists <- SD.doesPathExist use_expands_dir
if path_exists
then do use_expands_contents <- SD.listDirectory use_expands_dir
return (SF.dropExtension <$> use_expands_contents)
else let use_expands_contents = ["cpu_flags_arm","cpu_flags_ppc","cpu_flags_x86"]
in return use_expands_contents
| Return a ' Cabal . PackageFlag ' if it is not a @USE_EXPAND@.
then return ' Nothing ' . Otherwise return ' Just ' ' Cabal . PackageFlag ' .
dropIfUseExpand :: [String] -> Cabal.PackageFlag -> Maybe Cabal.PackageFlag
dropIfUseExpand use_expands flag =
if or (A.liftA2 L.isPrefixOf use_expands [Cabal.unFlagName . Cabal.flagName $ flag])
then Nothing else Just flag
| Strip @USE_EXPAND@s from a [ ' . PackageFlag ' ] .
dropIfUseExpands :: [Cabal.PackageFlag] -> IO [Cabal.PackageFlag]
dropIfUseExpands flags = do
use_expands <- getUseExpands
return $ catMaybes (dropIfUseExpand use_expands <$> flags)
|
07cba01eba431c4e19937ff2f38e50c88b906ebb053fa93a9cf29cb5b5a9c58c | incoherentsoftware/defect-process | Level.hs | module Configs.All.Level
( LevelConfig(..)
) where
import Data.Aeson.Types (FromJSON, genericParseJSON, parseJSON)
import GHC.Generics (Generic)
import qualified Data.List.NonEmpty as NE
import Enemy.LockOnReticleData
import Level.Room.ArenaWalls.EnemySpawn.Types
import Level.Room.ArenaWalls.JSON
import Level.Room.Event.SlotMachine.Util
import Util
import World.Util
data LevelConfig = LevelConfig
{ _maxNumArenas :: Int
, _runProgressScreenSecs :: Secs
, _endBossGoldValue :: GoldValue
, _endBossSpawnWaitSecs :: Secs
, _endWarpOutWaitSecs :: Secs
, _itemPickupWeaponGoldValue :: GoldValue
, _itemPickupGunGoldValue :: GoldValue
, _itemPickupMovementSkillGoldValue :: GoldValue
, _itemPickupStoneFormSkillGoldValue :: GoldValue
, _itemPickupFlightSkillGoldValue :: GoldValue
, _itemPickupFastFallSkillGoldValue :: GoldValue
, _itemPickupStasisBlastSkillGoldValue :: GoldValue
, _itemPickupMarkRecallSkillGoldValue :: GoldValue
, _itemPickupSummonPlatformSkillGoldValue :: GoldValue
, _itemPickupMeterUpgradeGoldValue :: GoldValue
, _itemPickupDoubleJumpUpgradeGoldValue :: GoldValue
, _itemPickupMovementSkillUpgradeGoldValue :: GoldValue
, _itemPickupHealthGoldValue :: GoldValue
, _itemPickupHealthMultiplicandGoldValue :: GoldValue
, _arenaWallsGoldDrops :: NE.NonEmpty RoomArenaWallsGoldDropJSON
, _arenaWallsMaxWidths :: [RoomArenaWallsMaxWidthJSON]
, _speedRailAcceleration :: Acceleration
, _speedRailMaxSpeed :: Speed
, _speedRailMaxPlayerTurnaroundSpeed :: Speed
, _speedRailSlowSpeedThreshold :: Speed
, _springLauncherVelY :: VelY
, _springLauncherWidth :: Float
, _springLauncherHeight :: Float
, _springLauncherSurfaceWidth :: Float
, _springLauncherSurfaceHeight :: Float
, _eventLightningNumWaves :: Int
, _eventLightningGoldValue :: GoldValue
, _eventLightningPerHitPenaltyGoldValue :: GoldValue
, _eventBouncingBallAliveSecs :: Secs
, _eventBouncingBallDropCooldownSecs :: Secs
, _eventBouncingBallMinSpeed :: Speed
, _eventBouncingBallMaxSpeed :: Speed
, _eventBouncingBallDropMeleeGoldValue :: GoldValue
, _eventBouncingBallDropRangedGoldValue :: GoldValue
, _eventBouncingBallLockOnReticleData :: EnemyLockOnReticleData
, _eventSlotMachineSelectionIntervalSecs :: Secs
, _eventSlotMachineSelectionActivateIntervalSecs :: Secs
, _eventSlotMachineSelectionActivateIntervalMultiplier :: Float
, _eventSlotMachineSelectionMinIntervalSecs :: Secs
, _eventSlotMachineSelectionTextOffset :: Pos2
, _eventSlotMachineSelectionOffsets :: NE.NonEmpty Pos2
, _eventSlotMachineSlotsChoices :: NE.NonEmpty SlotsChoice
, _enemySpawnWaves :: NE.NonEmpty EnemySpawnWaveJSON
}
deriving Generic
instance FromJSON LevelConfig where
parseJSON = genericParseJSON aesonFieldDropUnderscore
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/14ec46dec2c48135bc4e5965b7b75532ef19268e/src/Configs/All/Level.hs | haskell | module Configs.All.Level
( LevelConfig(..)
) where
import Data.Aeson.Types (FromJSON, genericParseJSON, parseJSON)
import GHC.Generics (Generic)
import qualified Data.List.NonEmpty as NE
import Enemy.LockOnReticleData
import Level.Room.ArenaWalls.EnemySpawn.Types
import Level.Room.ArenaWalls.JSON
import Level.Room.Event.SlotMachine.Util
import Util
import World.Util
data LevelConfig = LevelConfig
{ _maxNumArenas :: Int
, _runProgressScreenSecs :: Secs
, _endBossGoldValue :: GoldValue
, _endBossSpawnWaitSecs :: Secs
, _endWarpOutWaitSecs :: Secs
, _itemPickupWeaponGoldValue :: GoldValue
, _itemPickupGunGoldValue :: GoldValue
, _itemPickupMovementSkillGoldValue :: GoldValue
, _itemPickupStoneFormSkillGoldValue :: GoldValue
, _itemPickupFlightSkillGoldValue :: GoldValue
, _itemPickupFastFallSkillGoldValue :: GoldValue
, _itemPickupStasisBlastSkillGoldValue :: GoldValue
, _itemPickupMarkRecallSkillGoldValue :: GoldValue
, _itemPickupSummonPlatformSkillGoldValue :: GoldValue
, _itemPickupMeterUpgradeGoldValue :: GoldValue
, _itemPickupDoubleJumpUpgradeGoldValue :: GoldValue
, _itemPickupMovementSkillUpgradeGoldValue :: GoldValue
, _itemPickupHealthGoldValue :: GoldValue
, _itemPickupHealthMultiplicandGoldValue :: GoldValue
, _arenaWallsGoldDrops :: NE.NonEmpty RoomArenaWallsGoldDropJSON
, _arenaWallsMaxWidths :: [RoomArenaWallsMaxWidthJSON]
, _speedRailAcceleration :: Acceleration
, _speedRailMaxSpeed :: Speed
, _speedRailMaxPlayerTurnaroundSpeed :: Speed
, _speedRailSlowSpeedThreshold :: Speed
, _springLauncherVelY :: VelY
, _springLauncherWidth :: Float
, _springLauncherHeight :: Float
, _springLauncherSurfaceWidth :: Float
, _springLauncherSurfaceHeight :: Float
, _eventLightningNumWaves :: Int
, _eventLightningGoldValue :: GoldValue
, _eventLightningPerHitPenaltyGoldValue :: GoldValue
, _eventBouncingBallAliveSecs :: Secs
, _eventBouncingBallDropCooldownSecs :: Secs
, _eventBouncingBallMinSpeed :: Speed
, _eventBouncingBallMaxSpeed :: Speed
, _eventBouncingBallDropMeleeGoldValue :: GoldValue
, _eventBouncingBallDropRangedGoldValue :: GoldValue
, _eventBouncingBallLockOnReticleData :: EnemyLockOnReticleData
, _eventSlotMachineSelectionIntervalSecs :: Secs
, _eventSlotMachineSelectionActivateIntervalSecs :: Secs
, _eventSlotMachineSelectionActivateIntervalMultiplier :: Float
, _eventSlotMachineSelectionMinIntervalSecs :: Secs
, _eventSlotMachineSelectionTextOffset :: Pos2
, _eventSlotMachineSelectionOffsets :: NE.NonEmpty Pos2
, _eventSlotMachineSlotsChoices :: NE.NonEmpty SlotsChoice
, _enemySpawnWaves :: NE.NonEmpty EnemySpawnWaveJSON
}
deriving Generic
instance FromJSON LevelConfig where
parseJSON = genericParseJSON aesonFieldDropUnderscore
| |
bd9c793429493a9751f6cc134774e047e075f8c5b143b83ac16cbb1b29573e0b | NorfairKing/sydtest | HspecSpec.hs | module Test.Syd.HspecSpec (spec) where
import Control.Concurrent.STM
import Test.Hspec as Hspec
import Test.Hspec.QuickCheck as Hspec
import qualified Test.Syd as Syd
import qualified Test.Syd.Hspec as Syd
spec :: Syd.Spec
spec = Syd.fromHspec exampleHspecSpec
exampleHspecSpec :: Hspec.Spec
exampleHspecSpec = do
it "adds 3 and 5 together purely" $ 3 + 5 == (8 :: Int)
it "adds 3 and 5 together in io" $ 3 + 5 `shouldBe` (8 :: Int)
prop "works for a property as well" $ \ls -> reverse (reverse ls) `shouldBe` (ls :: [Int])
describe "before" $ do
var <- runIO $ newTVarIO (1 :: Int)
let readAndIncrement = atomically $ stateTVar var $ \i -> (i + 1, i + 1)
before readAndIncrement $ do
it "reads 2" $ \i ->
i `shouldBe` 2
it "reads 3" $ \i ->
i `shouldBe` 3
it "reads 4" $ \i ->
i `shouldBe` 4
| null | https://raw.githubusercontent.com/NorfairKing/sydtest/1ecd8084b5a7eb0266a654ed0e5e90b6ba170c58/sydtest-hspec/test/Test/Syd/HspecSpec.hs | haskell | module Test.Syd.HspecSpec (spec) where
import Control.Concurrent.STM
import Test.Hspec as Hspec
import Test.Hspec.QuickCheck as Hspec
import qualified Test.Syd as Syd
import qualified Test.Syd.Hspec as Syd
spec :: Syd.Spec
spec = Syd.fromHspec exampleHspecSpec
exampleHspecSpec :: Hspec.Spec
exampleHspecSpec = do
it "adds 3 and 5 together purely" $ 3 + 5 == (8 :: Int)
it "adds 3 and 5 together in io" $ 3 + 5 `shouldBe` (8 :: Int)
prop "works for a property as well" $ \ls -> reverse (reverse ls) `shouldBe` (ls :: [Int])
describe "before" $ do
var <- runIO $ newTVarIO (1 :: Int)
let readAndIncrement = atomically $ stateTVar var $ \i -> (i + 1, i + 1)
before readAndIncrement $ do
it "reads 2" $ \i ->
i `shouldBe` 2
it "reads 3" $ \i ->
i `shouldBe` 3
it "reads 4" $ \i ->
i `shouldBe` 4
| |
702851701baba5a05f9bb8be39468c9df90c733302d7723c775e73e6d37f3d89 | erlangonrails/devdb | core_vnode_eqc.erl | %% -------------------------------------------------------------------
%%
core_vnode_eqc : QuickCheck tests for riak_core_vnode code
%%
Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
@doc QuickCheck tests for riak_core_vnode code
%% Things to test...
%% riak_core_vnode_master:command gets delivered to the right node
%% riak_core_vnode_master:sync_command works
-module(core_vnode_eqc).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-include_lib("eqc/include/eqc_fsm.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("riak_core/include/riak_core_vnode.hrl").
-compile([export_all]).
-record(qcst, {started,
counters, % Dict of counters for each index
indices}).
simple_test() ->
simple_test(100).
simple_test(N) ->
?assertEqual(true, quickcheck(numtests(N, prop_simple()))).
prop_simple() ->
?FORALL(Cmds, commands(?MODULE, {stopped, initial_state_data()}),
aggregate(command_names(Cmds),
begin
start_servers(),
{H,S,Res} = run_commands(?MODULE, Cmds),
stop_servers(),
?WHENFAIL(
begin
io:format("History: ~p\n", [H]),
io:format("State: ~p\n", [S]),
io:format("Result: ~p\n", [Res])
end,
Res =:= ok)
end)).
active_index(#qcst{started=Started}) ->
elements(Started).
%% Generate a preflist element
active_preflist1(S) ->
{active_index(S), node()}.
%% Generate a preflist - making sure the partitions are unique
active_preflist(S) ->
?SUCHTHAT(Xs,list(active_preflist1(S)),lists:sort(Xs)==lists:usort(Xs)).
initial_state() ->
stopped.
index(S) ->
oneof(S#qcst.indices).
initial_state_data() ->
Ring = riak_core_ring:fresh(8, node()),
riak_core_ring_manager:set_ring_global(Ring),
#qcst{started=[],
counters=orddict:new(),
indices=[I || {I,_N} <- riak_core_ring:all_owners(Ring)]
}.
%% Mark the vnode as started
next_state_data(_From,_To,S=#qcst{started=Started,
counters=Counters},_R,
{call,?MODULE,start_vnode,[Index]}) ->
S#qcst{started=[Index|Started],
counters=orddict:store(Index, 0, Counters)};
next_state_data(_From,_To,S=#qcst{counters=Counters},_R,
{call,mock_vnode,stop,[{Index,_Node}]}) ->
%% If a node is stopped, reset the counter ready for next
%% time it is called which should start it
S#qcst{counters=orddict:store(Index, 0, Counters)};
%% Update the counters for the index if a command that changes them
next_state_data(_From,_To,S=#qcst{counters=Counters},_R,
{call,_Mod,Func,[Preflist]})
when Func =:= neverreply; Func =:= returnreply; Func =:= latereply ->
S#qcst{counters=lists:foldl(fun({I, _N}, C) ->
orddict:update_counter(I, 1, C)
end, Counters, Preflist)};
next_state_data(_From,_To,S,_R,_C) ->
S.
%
stopped(S) ->
[{running, {call,?MODULE,start_vnode,[index(S)]}}].
running(S) ->
[
{history, {call,?MODULE,start_vnode,[index(S)]}},
{history, {call,mock_vnode,get_index,[active_preflist1(S)]}},
{history, {call,mock_vnode,get_counter,[active_preflist1(S)]}},
{history, {call,mock_vnode,neverreply,[active_preflist(S)]}},
{history, {call,?MODULE,returnreply,[active_preflist(S)]}},
{history, {call,?MODULE,latereply,[active_preflist(S)]}},
{history, {call,?MODULE,restart_master,[]}},
{history, {call,mock_vnode,stop,[active_preflist1(S)]}},
{history, {call,riak_core_vnode_master,all_nodes,[mock_vnode]}}
].
precondition(_From,_To,#qcst{started=Started},{call,?MODULE,start_vnode,[Index]}) ->
not lists:member(Index, Started);
precondition(_From,_To,#qcst{started=Started},{call,_Mod,Func,[Preflist]})
when Func =:= get_index; Func =:= get_counter; Func =:= neverreply; Func =:= returnreply;
Func =:= latereply ->
preflist_is_active(Preflist, Started);
precondition(_From,_To,_S,_C) ->
true.
postcondition(_From,_To,_S,
{call,mock_vnode,get_index,[{Index,_Node}]},{ok,ReplyIndex}) ->
Index =:= ReplyIndex;
postcondition(_From,_To,#qcst{counters=Counters},
{call,mock_vnode,get_counter,[{Index,_Node}]},{ok,ReplyCount}) ->
orddict:fetch(Index, Counters) =:= ReplyCount;
postcondition(_From,_To,_S,
{call,_Mod,Func,[]},Result)
when Func =:= neverreply; Func =:= returnreply; Func =:= latereply ->
Result =:= ok;
postcondition(_From,_To,_S,
{call,riak_core_vnode_master,all_nodes,[mock_vnode]},Result) ->
Pids = [Pid || {_,Pid,_,_} <- supervisor:which_children(riak_core_vnode_sup)],
lists:sort(Result) =:= lists:sort(Pids);
postcondition(_From,_To,_S,_C,_R) ->
true.
%% Pre/post condition helpers
preflist_is_active({Index,_Node}, Started) ->
lists:member(Index, Started);
preflist_is_active(Preflist, Started) ->
lists:all(fun({Index,_Node}) -> lists:member(Index, Started) end, Preflist).
%% Local versions of commands
start_vnode(I) ->
ok = mock_vnode:start_vnode(I).
returnreply(Preflist) ->
{ok, Ref} = mock_vnode:returnreply(Preflist),
check_receive(length(Preflist), returnreply, Ref).
latereply(Preflist) ->
{ok, Ref} = mock_vnode:latereply(Preflist),
check_receive(length(Preflist), latereply, Ref).
check_receive(0, _Msg, _Ref) ->
ok;
check_receive(Replies, Msg, Ref) ->
receive
{Ref, Msg} ->
check_receive(Replies-1, Msg, Ref);
{Ref, OtherMsg} ->
{error, {bad_msg, Msg, OtherMsg}}
after
1000 ->
{error, timeout}
end.
%% Server start/stop infrastructure
start_servers() ->
stop_servers(),
{ok, _Sup} = riak_core_vnode_sup:start_link(),
{ok, _VMaster} = riak_core_vnode_master:start_link(mock_vnode).
stop_servers() ->
Make sure is killed before sup as start_vnode is a cast
%% and there may be a pending request to start the vnode.
stop_pid(whereis(mock_vnode_master)),
stop_pid(whereis(riak_core_vnode_sup)).
restart_master() ->
%% Call get status to make sure the riak_core_vnode_master
%% has processed any commands that were cast to it. Otherwise
commands like are not cast on to the vnode and the
%% counters are not updated correctly.
sys:get_status(mock_vnode_master),
stop_pid(whereis(mock_vnode_master)),
{ok, _VMaster} = riak_core_vnode_master:start_link(mock_vnode).
stop_pid(undefined) ->
ok;
stop_pid(Pid) ->
unlink(Pid),
exit(Pid, shutdown),
ok = wait_for_pid(Pid).
wait_for_pid(Pid) ->
Mref = erlang:monitor(process, Pid),
receive
{'DOWN',Mref,process,_,_} ->
ok
after
5000 ->
{error, didnotexit}
end.
-endif.
| null | https://raw.githubusercontent.com/erlangonrails/devdb/0e7eaa6bd810ec3892bfc3d933439560620d0941/dev/riak_core/test/core_vnode_eqc.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
Things to test...
riak_core_vnode_master:command gets delivered to the right node
riak_core_vnode_master:sync_command works
Dict of counters for each index
Generate a preflist element
Generate a preflist - making sure the partitions are unique
Mark the vnode as started
If a node is stopped, reset the counter ready for next
time it is called which should start it
Update the counters for the index if a command that changes them
Pre/post condition helpers
Local versions of commands
Server start/stop infrastructure
and there may be a pending request to start the vnode.
Call get status to make sure the riak_core_vnode_master
has processed any commands that were cast to it. Otherwise
counters are not updated correctly. | core_vnode_eqc : QuickCheck tests for riak_core_vnode code
Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@doc QuickCheck tests for riak_core_vnode code
-module(core_vnode_eqc).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-include_lib("eqc/include/eqc_fsm.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("riak_core/include/riak_core_vnode.hrl").
-compile([export_all]).
-record(qcst, {started,
indices}).
simple_test() ->
simple_test(100).
simple_test(N) ->
?assertEqual(true, quickcheck(numtests(N, prop_simple()))).
prop_simple() ->
?FORALL(Cmds, commands(?MODULE, {stopped, initial_state_data()}),
aggregate(command_names(Cmds),
begin
start_servers(),
{H,S,Res} = run_commands(?MODULE, Cmds),
stop_servers(),
?WHENFAIL(
begin
io:format("History: ~p\n", [H]),
io:format("State: ~p\n", [S]),
io:format("Result: ~p\n", [Res])
end,
Res =:= ok)
end)).
active_index(#qcst{started=Started}) ->
elements(Started).
active_preflist1(S) ->
{active_index(S), node()}.
active_preflist(S) ->
?SUCHTHAT(Xs,list(active_preflist1(S)),lists:sort(Xs)==lists:usort(Xs)).
initial_state() ->
stopped.
index(S) ->
oneof(S#qcst.indices).
initial_state_data() ->
Ring = riak_core_ring:fresh(8, node()),
riak_core_ring_manager:set_ring_global(Ring),
#qcst{started=[],
counters=orddict:new(),
indices=[I || {I,_N} <- riak_core_ring:all_owners(Ring)]
}.
next_state_data(_From,_To,S=#qcst{started=Started,
counters=Counters},_R,
{call,?MODULE,start_vnode,[Index]}) ->
S#qcst{started=[Index|Started],
counters=orddict:store(Index, 0, Counters)};
next_state_data(_From,_To,S=#qcst{counters=Counters},_R,
{call,mock_vnode,stop,[{Index,_Node}]}) ->
S#qcst{counters=orddict:store(Index, 0, Counters)};
next_state_data(_From,_To,S=#qcst{counters=Counters},_R,
{call,_Mod,Func,[Preflist]})
when Func =:= neverreply; Func =:= returnreply; Func =:= latereply ->
S#qcst{counters=lists:foldl(fun({I, _N}, C) ->
orddict:update_counter(I, 1, C)
end, Counters, Preflist)};
next_state_data(_From,_To,S,_R,_C) ->
S.
stopped(S) ->
[{running, {call,?MODULE,start_vnode,[index(S)]}}].
running(S) ->
[
{history, {call,?MODULE,start_vnode,[index(S)]}},
{history, {call,mock_vnode,get_index,[active_preflist1(S)]}},
{history, {call,mock_vnode,get_counter,[active_preflist1(S)]}},
{history, {call,mock_vnode,neverreply,[active_preflist(S)]}},
{history, {call,?MODULE,returnreply,[active_preflist(S)]}},
{history, {call,?MODULE,latereply,[active_preflist(S)]}},
{history, {call,?MODULE,restart_master,[]}},
{history, {call,mock_vnode,stop,[active_preflist1(S)]}},
{history, {call,riak_core_vnode_master,all_nodes,[mock_vnode]}}
].
precondition(_From,_To,#qcst{started=Started},{call,?MODULE,start_vnode,[Index]}) ->
not lists:member(Index, Started);
precondition(_From,_To,#qcst{started=Started},{call,_Mod,Func,[Preflist]})
when Func =:= get_index; Func =:= get_counter; Func =:= neverreply; Func =:= returnreply;
Func =:= latereply ->
preflist_is_active(Preflist, Started);
precondition(_From,_To,_S,_C) ->
true.
postcondition(_From,_To,_S,
{call,mock_vnode,get_index,[{Index,_Node}]},{ok,ReplyIndex}) ->
Index =:= ReplyIndex;
postcondition(_From,_To,#qcst{counters=Counters},
{call,mock_vnode,get_counter,[{Index,_Node}]},{ok,ReplyCount}) ->
orddict:fetch(Index, Counters) =:= ReplyCount;
postcondition(_From,_To,_S,
{call,_Mod,Func,[]},Result)
when Func =:= neverreply; Func =:= returnreply; Func =:= latereply ->
Result =:= ok;
postcondition(_From,_To,_S,
{call,riak_core_vnode_master,all_nodes,[mock_vnode]},Result) ->
Pids = [Pid || {_,Pid,_,_} <- supervisor:which_children(riak_core_vnode_sup)],
lists:sort(Result) =:= lists:sort(Pids);
postcondition(_From,_To,_S,_C,_R) ->
true.
preflist_is_active({Index,_Node}, Started) ->
lists:member(Index, Started);
preflist_is_active(Preflist, Started) ->
lists:all(fun({Index,_Node}) -> lists:member(Index, Started) end, Preflist).
start_vnode(I) ->
ok = mock_vnode:start_vnode(I).
returnreply(Preflist) ->
{ok, Ref} = mock_vnode:returnreply(Preflist),
check_receive(length(Preflist), returnreply, Ref).
latereply(Preflist) ->
{ok, Ref} = mock_vnode:latereply(Preflist),
check_receive(length(Preflist), latereply, Ref).
check_receive(0, _Msg, _Ref) ->
ok;
check_receive(Replies, Msg, Ref) ->
receive
{Ref, Msg} ->
check_receive(Replies-1, Msg, Ref);
{Ref, OtherMsg} ->
{error, {bad_msg, Msg, OtherMsg}}
after
1000 ->
{error, timeout}
end.
start_servers() ->
stop_servers(),
{ok, _Sup} = riak_core_vnode_sup:start_link(),
{ok, _VMaster} = riak_core_vnode_master:start_link(mock_vnode).
stop_servers() ->
Make sure is killed before sup as start_vnode is a cast
stop_pid(whereis(mock_vnode_master)),
stop_pid(whereis(riak_core_vnode_sup)).
restart_master() ->
commands like are not cast on to the vnode and the
sys:get_status(mock_vnode_master),
stop_pid(whereis(mock_vnode_master)),
{ok, _VMaster} = riak_core_vnode_master:start_link(mock_vnode).
stop_pid(undefined) ->
ok;
stop_pid(Pid) ->
unlink(Pid),
exit(Pid, shutdown),
ok = wait_for_pid(Pid).
wait_for_pid(Pid) ->
Mref = erlang:monitor(process, Pid),
receive
{'DOWN',Mref,process,_,_} ->
ok
after
5000 ->
{error, didnotexit}
end.
-endif.
|
f240430651f3688ffc90fdef709fcaa5d9c8fb3391e566135f0357f018e3325a | Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library | PortalBusinessProfile.hs | {-# LANGUAGE MultiWayIf #-}
CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
{-# LANGUAGE OverloadedStrings #-}
-- | Contains the types generated from the schema PortalBusinessProfile
module StripeAPI.Types.PortalBusinessProfile where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
| Defines the object schema located at @components.schemas.portal_business_profile@ in the specification .
data PortalBusinessProfile = PortalBusinessProfile
{ -- | headline: The messaging shown to customers in the portal.
--
-- Constraints:
--
* Maximum length of 5000
portalBusinessProfileHeadline :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
-- | privacy_policy_url: A link to the business’s publicly available privacy policy.
--
-- Constraints:
--
* Maximum length of 5000
portalBusinessProfilePrivacyPolicyUrl :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
-- | terms_of_service_url: A link to the business’s publicly available terms of service.
--
-- Constraints:
--
* Maximum length of 5000
portalBusinessProfileTermsOfServiceUrl :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text))
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON PortalBusinessProfile where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("headline" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfileHeadline obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("privacy_policy_url" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfilePrivacyPolicyUrl obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("terms_of_service_url" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfileTermsOfServiceUrl obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("headline" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfileHeadline obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("privacy_policy_url" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfilePrivacyPolicyUrl obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("terms_of_service_url" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfileTermsOfServiceUrl obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON PortalBusinessProfile where
parseJSON = Data.Aeson.Types.FromJSON.withObject "PortalBusinessProfile" (\obj -> ((GHC.Base.pure PortalBusinessProfile GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "headline")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "privacy_policy_url")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "terms_of_service_url"))
-- | Create a new 'PortalBusinessProfile' with all required fields.
mkPortalBusinessProfile :: PortalBusinessProfile
mkPortalBusinessProfile =
PortalBusinessProfile
{ portalBusinessProfileHeadline = GHC.Maybe.Nothing,
portalBusinessProfilePrivacyPolicyUrl = GHC.Maybe.Nothing,
portalBusinessProfileTermsOfServiceUrl = GHC.Maybe.Nothing
}
| null | https://raw.githubusercontent.com/Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library/ba4401f083ff054f8da68c741f762407919de42f/src/StripeAPI/Types/PortalBusinessProfile.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| Contains the types generated from the schema PortalBusinessProfile
| headline: The messaging shown to customers in the portal.
Constraints:
| privacy_policy_url: A link to the business’s publicly available privacy policy.
Constraints:
| terms_of_service_url: A link to the business’s publicly available terms of service.
Constraints:
| Create a new 'PortalBusinessProfile' with all required fields. | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
module StripeAPI.Types.PortalBusinessProfile where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
| Defines the object schema located at @components.schemas.portal_business_profile@ in the specification .
data PortalBusinessProfile = PortalBusinessProfile
* Maximum length of 5000
portalBusinessProfileHeadline :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
* Maximum length of 5000
portalBusinessProfilePrivacyPolicyUrl :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
* Maximum length of 5000
portalBusinessProfileTermsOfServiceUrl :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text))
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON PortalBusinessProfile where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("headline" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfileHeadline obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("privacy_policy_url" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfilePrivacyPolicyUrl obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("terms_of_service_url" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfileTermsOfServiceUrl obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("headline" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfileHeadline obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("privacy_policy_url" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfilePrivacyPolicyUrl obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("terms_of_service_url" Data.Aeson.Types.ToJSON..=)) (portalBusinessProfileTermsOfServiceUrl obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON PortalBusinessProfile where
parseJSON = Data.Aeson.Types.FromJSON.withObject "PortalBusinessProfile" (\obj -> ((GHC.Base.pure PortalBusinessProfile GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "headline")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "privacy_policy_url")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "terms_of_service_url"))
mkPortalBusinessProfile :: PortalBusinessProfile
mkPortalBusinessProfile =
PortalBusinessProfile
{ portalBusinessProfileHeadline = GHC.Maybe.Nothing,
portalBusinessProfilePrivacyPolicyUrl = GHC.Maybe.Nothing,
portalBusinessProfileTermsOfServiceUrl = GHC.Maybe.Nothing
}
|
88b96bb23603508b4ab26f4e7224651e1f0b23b758bb25991065d0a17452a6ab | wdebeaum/step | rpm.lisp | ;;;;
;;;; W::RPM
;;;;
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
(W::RPM
(SENSES
((LF-PARENT ONT::frequency-unit) (TEMPL other-reln-TEMPL)
(META-DATA :ORIGIN CALO :ENTRY-DATE 20040204 :CHANGE-DATE NIL :wn ("rpm%1:28:00")
:COMMENTS HTML-PURCHASING-CORPUS))))
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/rpm.lisp | lisp |
W::RPM
|
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
(W::RPM
(SENSES
((LF-PARENT ONT::frequency-unit) (TEMPL other-reln-TEMPL)
(META-DATA :ORIGIN CALO :ENTRY-DATE 20040204 :CHANGE-DATE NIL :wn ("rpm%1:28:00")
:COMMENTS HTML-PURCHASING-CORPUS))))
))
|
4a35b7ae9bff49767cac7d70203f923070fe90e39f475d65a3e9ef48faa86d00 | screenshotbot/screenshotbot-oss | test-populate.lisp | ;;;; Copyright 2018-Present Modern Interpreters Inc.
;;;;
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :screenshotbot/login/test-populate
(:use #:cl
#:fiveam)
(:import-from #:screenshotbot/login/populate
#:populate-company)
(:import-from #:util/store
#:with-test-store)
(:import-from #:screenshotbot/model/company
#:company))
(in-package :screenshotbot/login/test-populate)
(util/fiveam:def-suite)
(def-fixture state ()
(with-test-store ()
(let ((company (make-instance 'company)))
(&body))))
(test simple-populate
(with-fixture state ()
(populate-company company)))
| null | https://raw.githubusercontent.com/screenshotbot/screenshotbot-oss/02348d8e1a621e68be2d9b2410076575dc77b64f/src/screenshotbot/login/test-populate.lisp | lisp | Copyright 2018-Present Modern Interpreters Inc.
| This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(defpackage :screenshotbot/login/test-populate
(:use #:cl
#:fiveam)
(:import-from #:screenshotbot/login/populate
#:populate-company)
(:import-from #:util/store
#:with-test-store)
(:import-from #:screenshotbot/model/company
#:company))
(in-package :screenshotbot/login/test-populate)
(util/fiveam:def-suite)
(def-fixture state ()
(with-test-store ()
(let ((company (make-instance 'company)))
(&body))))
(test simple-populate
(with-fixture state ()
(populate-company company)))
|
8d8677fbe2311fa09419994148613107b1f31942a2d93c7df9ecd7a086cddd75 | kazzmir/master-of-magic | windows.ml | module WindowManager = struct
class point (_x:int) (_y:int) = object(self)
val x = _x;
val y = _y;
method getX = x;
method getY = y;
end;;
class dimension (_width:int) (_height:int) = object(self)
val width = _width;
val height = _height;
method getWidth = width;
method getHeight = height;
end;;
class color (_red:int) (_green:int) (_blue:int) = object(self)
val red = _red;
val green = _green;
val blue = _blue;
method getRed = red;
method getGreen = green;
method getBlue = blue;
method getRgb = (red lsl 16) + (green lsl 8) + (blue);
end;;
type whichWindow =
IntroTitle |
IntroCredits |
MainMenu;;
type whichEvent =
PaintEvent
(* time, button, x, y *)
| TimerEvent of float * Graphics.mouseButton * float * float
(* time, button, x, y *)
| MouseUpEvent of float * Graphics.mouseButton * float * float
(* time, button, x, y *)
| MouseDownEvent of float * Graphics.mouseButton * float * float
(* key code *)
| KeyUpEvent of Graphics.key
(* key code *)
| KeyDownEvent of Graphics.key
(* time, button, x, y *)
| MouseClickEvent of float * Graphics.mouseButton * float * float
(* time, x, y *)
| MouseHoverEvent of float * float * float
(* time, x, y *)
| MouseMoveEvent of float * float * float
(* key code *)
| KeypressEvent of int
;;
class widget = object(self)
val mutable position = (new point 0 0)
val mutable size = (new dimension 0 0)
val mutable backgroundColor = (new color 255 255 0)
(* implements event handler functions *)
(* model *)
(* view *)
method paint (graphics:Graphics.AllegroGraphics.graphics) =
graphics#fillBox position#getX position#getY size#getWidth size#getHeight backgroundColor#getRgb;
(* controller *)
method receiveEvent (m:manager) (e:whichEvent) =
match e with
| PaintEvent -> self#paint m#getGraphics;
| _ -> Printf.printf "What was that?\n";
end
(* using `and' here makes the types mutually recursive *)
and window = object(self)
inherit widget
(*val mutable widgets : widget list = []
val mutable currentlyFocusedWidget : widget;*)
(* implements event handler functions, may pass some to "focused widget" *)
(* list of widgets *)
(* widget with focus *)
(* draw: draws all widgets *)
(* background image *)
(* background color *)
(* transition in *)
(* transition out *)
(* mouse cursor stuff *)
end
and manager (_graphics : Graphics.AllegroGraphics.graphics) = object(self)
inherit Graphics.eventHandler
val graphics = _graphics
val mutable windows = Hashtbl.create 10
val mutable currentWindow:(window option) = None
initializer
self#addWindow IntroTitle (new window);
self#paint;
method sendEvent (event:whichEvent) =
match currentWindow with
| None -> raise (Failure "No window set")
| Some window -> window#receiveEvent (self :> manager) event
method mouse_down time button x y =
self#sendEvent (MouseDownEvent (time, button, x, y))
method mouse_up time button x y =
self#sendEvent (MouseUpEvent (time, button, x, y))
method key_down a =
self#sendEvent (KeyDownEvent a)
method key_up a =
self#sendEvent (KeyUpEvent a)
method mouse_click time button x y =
self#sendEvent (MouseClickEvent (time, button, x, y))
method mouse_hover time x y =
self#sendEvent (MouseHoverEvent (time, x, y))
method mouse_move time x y =
self#sendEvent (MouseMoveEvent (time, x, y))
method keypress =
self#sendEvent (KeypressEvent 0)
(* implements event handler functions, pass to "current window" *)
(* hash of string names to windows *)
method addWindow (wh:whichWindow) (w:window) =
Hashtbl.add windows wh w;
currentWindow <- Some w;
method paint =
self#sendEvent PaintEvent;
method getGraphics =
graphics;
end;;
end;;
| null | https://raw.githubusercontent.com/kazzmir/master-of-magic/830bfd1c549a5ac7370fa6a72bb06be5d3435fa0/ocaml/windows.ml | ocaml | time, button, x, y
time, button, x, y
time, button, x, y
key code
key code
time, button, x, y
time, x, y
time, x, y
key code
implements event handler functions
model
view
controller
using `and' here makes the types mutually recursive
val mutable widgets : widget list = []
val mutable currentlyFocusedWidget : widget;
implements event handler functions, may pass some to "focused widget"
list of widgets
widget with focus
draw: draws all widgets
background image
background color
transition in
transition out
mouse cursor stuff
implements event handler functions, pass to "current window"
hash of string names to windows | module WindowManager = struct
class point (_x:int) (_y:int) = object(self)
val x = _x;
val y = _y;
method getX = x;
method getY = y;
end;;
class dimension (_width:int) (_height:int) = object(self)
val width = _width;
val height = _height;
method getWidth = width;
method getHeight = height;
end;;
class color (_red:int) (_green:int) (_blue:int) = object(self)
val red = _red;
val green = _green;
val blue = _blue;
method getRed = red;
method getGreen = green;
method getBlue = blue;
method getRgb = (red lsl 16) + (green lsl 8) + (blue);
end;;
type whichWindow =
IntroTitle |
IntroCredits |
MainMenu;;
type whichEvent =
PaintEvent
| TimerEvent of float * Graphics.mouseButton * float * float
| MouseUpEvent of float * Graphics.mouseButton * float * float
| MouseDownEvent of float * Graphics.mouseButton * float * float
| KeyUpEvent of Graphics.key
| KeyDownEvent of Graphics.key
| MouseClickEvent of float * Graphics.mouseButton * float * float
| MouseHoverEvent of float * float * float
| MouseMoveEvent of float * float * float
| KeypressEvent of int
;;
class widget = object(self)
val mutable position = (new point 0 0)
val mutable size = (new dimension 0 0)
val mutable backgroundColor = (new color 255 255 0)
method paint (graphics:Graphics.AllegroGraphics.graphics) =
graphics#fillBox position#getX position#getY size#getWidth size#getHeight backgroundColor#getRgb;
method receiveEvent (m:manager) (e:whichEvent) =
match e with
| PaintEvent -> self#paint m#getGraphics;
| _ -> Printf.printf "What was that?\n";
end
and window = object(self)
inherit widget
end
and manager (_graphics : Graphics.AllegroGraphics.graphics) = object(self)
inherit Graphics.eventHandler
val graphics = _graphics
val mutable windows = Hashtbl.create 10
val mutable currentWindow:(window option) = None
initializer
self#addWindow IntroTitle (new window);
self#paint;
method sendEvent (event:whichEvent) =
match currentWindow with
| None -> raise (Failure "No window set")
| Some window -> window#receiveEvent (self :> manager) event
method mouse_down time button x y =
self#sendEvent (MouseDownEvent (time, button, x, y))
method mouse_up time button x y =
self#sendEvent (MouseUpEvent (time, button, x, y))
method key_down a =
self#sendEvent (KeyDownEvent a)
method key_up a =
self#sendEvent (KeyUpEvent a)
method mouse_click time button x y =
self#sendEvent (MouseClickEvent (time, button, x, y))
method mouse_hover time x y =
self#sendEvent (MouseHoverEvent (time, x, y))
method mouse_move time x y =
self#sendEvent (MouseMoveEvent (time, x, y))
method keypress =
self#sendEvent (KeypressEvent 0)
method addWindow (wh:whichWindow) (w:window) =
Hashtbl.add windows wh w;
currentWindow <- Some w;
method paint =
self#sendEvent PaintEvent;
method getGraphics =
graphics;
end;;
end;;
|
8436734a6c0ba997187e53efbd7efa6f5fdc6ff30b717f4f2c63a4b8af8647f0 | malcolmreynolds/GSLL | error-functions.lisp | Regression test ERROR - FUNCTIONS for GSLL , automatically generated
(in-package :gsl)
(LISP-UNIT:DEFINE-TEST ERROR-FUNCTIONS
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.8427007929497149d0 7.789237746491556d-16)
(MULTIPLE-VALUE-LIST (ERF 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.1572992070502851d0 4.0468944536809554d-16)
(MULTIPLE-VALUE-LIST (ERFC 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -1.8496055099332485d0 3.394126565390616d-15)
(MULTIPLE-VALUE-LIST (LOG-ERFC 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.24197072451914334d0 1.611848817878303d-16)
(MULTIPLE-VALUE-LIST (ERF-Z 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.15865525393145707d0 2.832400331480832d-16)
(MULTIPLE-VALUE-LIST (ERF-Q 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 1.5251352761609807d0 5.532094155354489d-15)
(MULTIPLE-VALUE-LIST (HAZARD 1.0d0))))
| null | https://raw.githubusercontent.com/malcolmreynolds/GSLL/2f722f12f1d08e1b9550a46e2a22adba8e1e52c4/tests/error-functions.lisp | lisp | Regression test ERROR - FUNCTIONS for GSLL , automatically generated
(in-package :gsl)
(LISP-UNIT:DEFINE-TEST ERROR-FUNCTIONS
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.8427007929497149d0 7.789237746491556d-16)
(MULTIPLE-VALUE-LIST (ERF 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.1572992070502851d0 4.0468944536809554d-16)
(MULTIPLE-VALUE-LIST (ERFC 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST -1.8496055099332485d0 3.394126565390616d-15)
(MULTIPLE-VALUE-LIST (LOG-ERFC 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.24197072451914334d0 1.611848817878303d-16)
(MULTIPLE-VALUE-LIST (ERF-Z 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 0.15865525393145707d0 2.832400331480832d-16)
(MULTIPLE-VALUE-LIST (ERF-Q 1.0d0)))
(LISP-UNIT::ASSERT-NUMERICAL-EQUAL
(LIST 1.5251352761609807d0 5.532094155354489d-15)
(MULTIPLE-VALUE-LIST (HAZARD 1.0d0))))
| |
ecbcd0c482312bcf1407866766d1ea3850aacee371f4e4900df68a1e7258f35f | danielholmes/wolf3d-haskell | Data.hs | module Wolf3D.Display.Data (
RenderData (..),
WallRayHit (..),
WallData,
HitDirection (..),
CIntRectangle,
fieldOfView,
screenHeight,
screenWidth,
actionAreaX,
actionAreaY,
actionWidth,
actionHeight,
halfActionWidth,
halfActionHeight,
actionArea,
intRectPos,
intRectX,
intRectY
) where
import qualified Data.Map as M
import Foreign.C.Types (CInt)
import qualified SDL
import Wolf3D.Animation
import Wolf3D.WorldData
type WallData = M.Map Wall SpriteSheet
type EnvItemData = M.Map EnvItemType (SDL.Texture, SDL.Rectangle CInt)
type WeaponData = M.Map String Animation
data RenderData = RenderData {wallTextures :: WallData
, itemTextures :: EnvItemData
, weaponTextures :: WeaponData
, hudBase :: (SDL.Texture, SDL.Rectangle CInt)
, bjFace :: Animation
, numbers :: SpriteSheet
, hudWeapons :: SpriteSheet }
data CIntRectangle = CIntRectangle (CInt, CInt) (CInt, CInt)
data HitDirection = Horizontal | Vertical
deriving (Show, Eq, Ord)
data WallRayHit = WallRayHit {material :: Wall
, direction :: HitDirection
, distance :: Int
, tilePosition :: Int}
deriving (Eq, Show)
fieldOfView :: Angle
fieldOfView = 75
intRectX :: CIntRectangle -> CInt
intRectX (CIntRectangle (x, _) _) = x
intRectY :: CIntRectangle -> CInt
intRectY (CIntRectangle (_, y) _) = y
intRectPos :: CIntRectangle -> (CInt, CInt)
intRectPos (CIntRectangle pos _) = pos
screenWidth :: CInt
screenWidth = 320 :: CInt
screenHeight :: CInt
screenHeight = 200 :: CInt
hudBorderTop :: (CInt, CInt)
hudBorderTop = (8, 4)
hudBarHeight :: CInt
hudBarHeight = 40
actionWidth :: CInt
actionWidth = screenWidth - 2 * (fst hudBorderTop)
actionHeight :: CInt
actionHeight = screenHeight - 2 * (snd hudBorderTop) - hudBarHeight
actionAreaY :: CInt
actionAreaY = snd hudBorderTop
actionAreaX :: CInt
actionAreaX = fst hudBorderTop
actionArea :: CIntRectangle
actionArea = CIntRectangle hudBorderTop (actionWidth, actionHeight)
halfActionHeight :: CInt
halfActionHeight = fromIntegral (actionHeight `div` 2)
halfActionWidth :: CInt
halfActionWidth = fromIntegral (actionWidth `div` 2)
| null | https://raw.githubusercontent.com/danielholmes/wolf3d-haskell/de934f657f1fb4351591448bb4e25aaa4923571f/src/Wolf3D/Display/Data.hs | haskell | module Wolf3D.Display.Data (
RenderData (..),
WallRayHit (..),
WallData,
HitDirection (..),
CIntRectangle,
fieldOfView,
screenHeight,
screenWidth,
actionAreaX,
actionAreaY,
actionWidth,
actionHeight,
halfActionWidth,
halfActionHeight,
actionArea,
intRectPos,
intRectX,
intRectY
) where
import qualified Data.Map as M
import Foreign.C.Types (CInt)
import qualified SDL
import Wolf3D.Animation
import Wolf3D.WorldData
type WallData = M.Map Wall SpriteSheet
type EnvItemData = M.Map EnvItemType (SDL.Texture, SDL.Rectangle CInt)
type WeaponData = M.Map String Animation
data RenderData = RenderData {wallTextures :: WallData
, itemTextures :: EnvItemData
, weaponTextures :: WeaponData
, hudBase :: (SDL.Texture, SDL.Rectangle CInt)
, bjFace :: Animation
, numbers :: SpriteSheet
, hudWeapons :: SpriteSheet }
data CIntRectangle = CIntRectangle (CInt, CInt) (CInt, CInt)
data HitDirection = Horizontal | Vertical
deriving (Show, Eq, Ord)
data WallRayHit = WallRayHit {material :: Wall
, direction :: HitDirection
, distance :: Int
, tilePosition :: Int}
deriving (Eq, Show)
fieldOfView :: Angle
fieldOfView = 75
intRectX :: CIntRectangle -> CInt
intRectX (CIntRectangle (x, _) _) = x
intRectY :: CIntRectangle -> CInt
intRectY (CIntRectangle (_, y) _) = y
intRectPos :: CIntRectangle -> (CInt, CInt)
intRectPos (CIntRectangle pos _) = pos
screenWidth :: CInt
screenWidth = 320 :: CInt
screenHeight :: CInt
screenHeight = 200 :: CInt
hudBorderTop :: (CInt, CInt)
hudBorderTop = (8, 4)
hudBarHeight :: CInt
hudBarHeight = 40
actionWidth :: CInt
actionWidth = screenWidth - 2 * (fst hudBorderTop)
actionHeight :: CInt
actionHeight = screenHeight - 2 * (snd hudBorderTop) - hudBarHeight
actionAreaY :: CInt
actionAreaY = snd hudBorderTop
actionAreaX :: CInt
actionAreaX = fst hudBorderTop
actionArea :: CIntRectangle
actionArea = CIntRectangle hudBorderTop (actionWidth, actionHeight)
halfActionHeight :: CInt
halfActionHeight = fromIntegral (actionHeight `div` 2)
halfActionWidth :: CInt
halfActionWidth = fromIntegral (actionWidth `div` 2)
| |
96c5d88eba7f386a012cca3f2a9b03d10a1a06c060672485b3b18f6b12bac3c9 | bmeurer/ocaml-experimental | fileevent.ml | (***********************************************************************)
(* *)
MLTk , Tcl / Tk interface of Objective Caml
(* *)
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
(* *)
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
(* described in file LICENSE found in the Objective Caml source tree. *)
(* *)
(***********************************************************************)
$ Id$
open Unix
open Support
open Protocol
external add_file_input : file_descr -> cbid -> unit
= "camltk_add_file_input"
external rem_file_input : file_descr -> cbid -> unit
= "camltk_rem_file_input"
external add_file_output : file_descr -> cbid -> unit
= "camltk_add_file_output"
external rem_file_output : file_descr -> cbid -> unit
= "camltk_rem_file_output"
(* File input handlers *)
let fd_table = Hashtbl.create 37 (* Avoid space leak in callback table *)
let add_fileinput ~fd ~callback:f =
let id = new_function_id () in
Hashtbl.add callback_naming_table id (fun _ -> f());
Hashtbl.add fd_table (fd, 'r') id;
if !Protocol.debug then begin
Protocol.prerr_cbid id; prerr_endline " for fileinput"
end;
add_file_input fd id
let remove_fileinput ~fd =
try
let id = Hashtbl.find fd_table (fd, 'r') in
clear_callback id;
Hashtbl.remove fd_table (fd, 'r');
if !Protocol.debug then begin
prerr_string "clear ";
Protocol.prerr_cbid id;
prerr_endline " for fileinput"
end;
rem_file_input fd id
with
Not_found -> ()
let add_fileoutput ~fd ~callback:f =
let id = new_function_id () in
Hashtbl.add callback_naming_table id (fun _ -> f());
Hashtbl.add fd_table (fd, 'w') id;
if !Protocol.debug then begin
Protocol.prerr_cbid id; prerr_endline " for fileoutput"
end;
add_file_output fd id
let remove_fileoutput ~fd =
try
let id = Hashtbl.find fd_table (fd, 'w') in
clear_callback id;
Hashtbl.remove fd_table (fd, 'w');
if !Protocol.debug then begin
prerr_string "clear ";
Protocol.prerr_cbid id;
prerr_endline " for fileoutput"
end;
rem_file_output fd id
with
Not_found -> ()
| null | https://raw.githubusercontent.com/bmeurer/ocaml-experimental/fe5c10cdb0499e43af4b08f35a3248e5c1a8b541/otherlibs/labltk/support/fileevent.ml | ocaml | *********************************************************************
described in file LICENSE found in the Objective Caml source tree.
*********************************************************************
File input handlers
Avoid space leak in callback table | MLTk , Tcl / Tk interface of Objective Caml
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
$ Id$
open Unix
open Support
open Protocol
external add_file_input : file_descr -> cbid -> unit
= "camltk_add_file_input"
external rem_file_input : file_descr -> cbid -> unit
= "camltk_rem_file_input"
external add_file_output : file_descr -> cbid -> unit
= "camltk_add_file_output"
external rem_file_output : file_descr -> cbid -> unit
= "camltk_rem_file_output"
let add_fileinput ~fd ~callback:f =
let id = new_function_id () in
Hashtbl.add callback_naming_table id (fun _ -> f());
Hashtbl.add fd_table (fd, 'r') id;
if !Protocol.debug then begin
Protocol.prerr_cbid id; prerr_endline " for fileinput"
end;
add_file_input fd id
let remove_fileinput ~fd =
try
let id = Hashtbl.find fd_table (fd, 'r') in
clear_callback id;
Hashtbl.remove fd_table (fd, 'r');
if !Protocol.debug then begin
prerr_string "clear ";
Protocol.prerr_cbid id;
prerr_endline " for fileinput"
end;
rem_file_input fd id
with
Not_found -> ()
let add_fileoutput ~fd ~callback:f =
let id = new_function_id () in
Hashtbl.add callback_naming_table id (fun _ -> f());
Hashtbl.add fd_table (fd, 'w') id;
if !Protocol.debug then begin
Protocol.prerr_cbid id; prerr_endline " for fileoutput"
end;
add_file_output fd id
let remove_fileoutput ~fd =
try
let id = Hashtbl.find fd_table (fd, 'w') in
clear_callback id;
Hashtbl.remove fd_table (fd, 'w');
if !Protocol.debug then begin
prerr_string "clear ";
Protocol.prerr_cbid id;
prerr_endline " for fileoutput"
end;
rem_file_output fd id
with
Not_found -> ()
|
a761aaef814c354b9bb71c6781609345f6bb58faebd662f0f1012bc034593a3f | rmculpepper/scriblogify | run.rkt | Copyright 2011 - 2012
Released under the terms of the LGPL version 3 or later .
;; See the file COPYRIGHT for details.
#lang racket/base
(require racket/cmdline
"main.rkt"
(prefix-in setup: "run-setup.rkt"))
(define (post args)
(define pre? #f)
(define build-dir #f)
(define upload-profile #f)
(define overwrite? #f)
(define v? #f)
(command-line
#:argv args
#:once-each
(("-d" "--dir") build-directory
"Put temporary files in <build-directory>"
(set! build-dir build-directory))
(("-n" "--nightly")
"Link to nightly build documentation pages"
(set! pre? #t))
(("-p" "--profile") profile
"Upload blog according to <profile>"
(set! upload-profile (string->symbol profile)))
(("-f" "--force")
"Overwrite existing blog post with same title"
(set! overwrite? #t))
(("-v" "--verbose")
"Verbose mode"
(set! v? #t))
("--setup"
"Run setup servlet (all other flags are ignored)"
(begin (setup:main)
(exit 0)))
#:args (file)
(scriblogify file
#:profile upload-profile
#:link-to-pre? pre?
#:overwrite? overwrite?
#:verbose? v?
#:temp-dir build-dir)))
;; ----
(post (vector->list (current-command-line-arguments)))
| null | https://raw.githubusercontent.com/rmculpepper/scriblogify/7771d00ce6101bd5d415b54134eb79c42b92f1ef/scriblogify/run.rkt | racket | See the file COPYRIGHT for details.
---- | Copyright 2011 - 2012
Released under the terms of the LGPL version 3 or later .
#lang racket/base
(require racket/cmdline
"main.rkt"
(prefix-in setup: "run-setup.rkt"))
(define (post args)
(define pre? #f)
(define build-dir #f)
(define upload-profile #f)
(define overwrite? #f)
(define v? #f)
(command-line
#:argv args
#:once-each
(("-d" "--dir") build-directory
"Put temporary files in <build-directory>"
(set! build-dir build-directory))
(("-n" "--nightly")
"Link to nightly build documentation pages"
(set! pre? #t))
(("-p" "--profile") profile
"Upload blog according to <profile>"
(set! upload-profile (string->symbol profile)))
(("-f" "--force")
"Overwrite existing blog post with same title"
(set! overwrite? #t))
(("-v" "--verbose")
"Verbose mode"
(set! v? #t))
("--setup"
"Run setup servlet (all other flags are ignored)"
(begin (setup:main)
(exit 0)))
#:args (file)
(scriblogify file
#:profile upload-profile
#:link-to-pre? pre?
#:overwrite? overwrite?
#:verbose? v?
#:temp-dir build-dir)))
(post (vector->list (current-command-line-arguments)))
|
4e512b63948024bd1270b434851f28d899adf376d77ee5189a8167ddb5a3bde2 | jimcrayne/jhc | tc215.hs | # OPTIONS_GHC -fwarn - incomplete - patterns
Test for trac # 366
The C2 case is impossible due to the types
module ShouldCompile where
data T a where
C1 :: T Char
C2 :: T Float
exhaustive :: T Char -> Char
exhaustive C1 = ' '
| null | https://raw.githubusercontent.com/jimcrayne/jhc/1ff035af3d697f9175f8761c8d08edbffde03b4e/regress/tests/1_typecheck/2_pass/ghc/uncat/tc215.hs | haskell | # OPTIONS_GHC -fwarn - incomplete - patterns
Test for trac # 366
The C2 case is impossible due to the types
module ShouldCompile where
data T a where
C1 :: T Char
C2 :: T Float
exhaustive :: T Char -> Char
exhaustive C1 = ' '
| |
d0f24f78311371cc43a6879cfa79fc1258157a8e92e442cac2cc10c0a07c4539 | AccelerateHS/accelerate-llvm | State.hs | # LANGUAGE CPP #
# LANGUAGE GeneralizedNewtypeDeriving #
{-# OPTIONS_HADDOCK hide #-}
-- |
-- Module : Data.Array.Accelerate.LLVM.State
Copyright : [ 2014 .. 2020 ] The Accelerate Team
-- License : BSD3
--
Maintainer : < >
-- Stability : experimental
Portability : non - portable ( GHC extensions )
--
module Data.Array.Accelerate.LLVM.State
where
-- library
import Control.Concurrent ( forkIO, threadDelay )
import Control.Monad.Catch ( MonadCatch, MonadThrow, MonadMask )
import Control.Monad.State ( StateT, MonadState, evalStateT )
import Control.Monad.Trans ( MonadIO )
import Prelude
-- Execution state
-- ===============
| The monad , for executing array computations . This consists of a stack
for the execution context as well as the per - execution target specific
-- state 'target'.
--
newtype LLVM target a = LLVM { runLLVM :: StateT target IO a }
deriving (Functor, Applicative, Monad, MonadIO, MonadState target, MonadThrow, MonadCatch, MonadMask)
-- | Extract the execution state: 'gets llvmTarget'
--
llvmTarget :: t -> t
llvmTarget = id
| Evaluate the given target with an context
--
evalLLVM :: t -> LLVM t a -> IO a
evalLLVM target acc =
evalStateT (runLLVM acc) target
| Make sure the GC knows that we want to keep this thing alive forever .
--
-- We may want to introduce some way to actually shut this down if, for example,
-- the object has not been accessed in a while (whatever that means).
--
Broken in ghci-7.6.1 due to bug # 7299 .
--
keepAlive :: a -> IO a
keepAlive x = forkIO (caffeine x) >> return x
where
microseconds = 5 seconds
caffeine hit
| null | https://raw.githubusercontent.com/AccelerateHS/accelerate-llvm/cf081587fecec23a19f68bfbd31334166868405e/accelerate-llvm/src/Data/Array/Accelerate/LLVM/State.hs | haskell | # OPTIONS_HADDOCK hide #
|
Module : Data.Array.Accelerate.LLVM.State
License : BSD3
Stability : experimental
library
Execution state
===============
state 'target'.
| Extract the execution state: 'gets llvmTarget'
We may want to introduce some way to actually shut this down if, for example,
the object has not been accessed in a while (whatever that means).
| # LANGUAGE CPP #
# LANGUAGE GeneralizedNewtypeDeriving #
Copyright : [ 2014 .. 2020 ] The Accelerate Team
Maintainer : < >
Portability : non - portable ( GHC extensions )
module Data.Array.Accelerate.LLVM.State
where
import Control.Concurrent ( forkIO, threadDelay )
import Control.Monad.Catch ( MonadCatch, MonadThrow, MonadMask )
import Control.Monad.State ( StateT, MonadState, evalStateT )
import Control.Monad.Trans ( MonadIO )
import Prelude
| The monad , for executing array computations . This consists of a stack
for the execution context as well as the per - execution target specific
newtype LLVM target a = LLVM { runLLVM :: StateT target IO a }
deriving (Functor, Applicative, Monad, MonadIO, MonadState target, MonadThrow, MonadCatch, MonadMask)
llvmTarget :: t -> t
llvmTarget = id
| Evaluate the given target with an context
evalLLVM :: t -> LLVM t a -> IO a
evalLLVM target acc =
evalStateT (runLLVM acc) target
| Make sure the GC knows that we want to keep this thing alive forever .
Broken in ghci-7.6.1 due to bug # 7299 .
keepAlive :: a -> IO a
keepAlive x = forkIO (caffeine x) >> return x
where
microseconds = 5 seconds
caffeine hit
|
310478522407a36636100efb7d9d0f619c36951523e93081f22c357f593a6a9b | locusmath/locus | object.clj | (ns locus.algebra.abelian.group.object
(:require [locus.set.logic.core.set :refer :all]
[locus.set.logic.limit.product :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.set.copresheaf.structure.core.protocols :refer :all]
[locus.set.quiver.relation.binary.sr :refer :all]
[locus.set.quiver.relation.binary.product :refer :all]
[locus.set.copresheaf.quiver.unital.object :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.quiver.binary.core.object :refer :all]
[locus.set.copresheaf.quiver.unital.object :refer :all]
[locus.set.copresheaf.quiver.permutable.object :refer :all]
[locus.set.copresheaf.quiver.dependency.object :refer :all]
[locus.con.core.object :refer [projection]]
[locus.con.core.setpart :refer :all]
[locus.order.general.core.object :refer :all]
[locus.order.lattice.core.object :refer :all]
[locus.algebra.commutative.semigroup.object :refer :all]
[locus.algebra.semigroup.core.object :refer :all]
[locus.algebra.commutative.monoid.object :refer :all]
[locus.algebra.semigroup.monoid.object :refer :all]
[locus.algebra.group.core.object :refer :all])
(:import (locus.algebra.commutative.semigroup.object CommutativeSemigroup)))
; Commutative groups are Z-modules. Once all relevant module related libraries have been loaded
; abelian groups can be converted into Z-modules by using the to-module command.
(deftype CommutativeGroup [elems op id inv]
ConcreteObject
(underlying-set [this] elems)
StructuredDiset
(first-set [this] elems)
(second-set [this] #{0})
StructuredQuiver
(underlying-quiver [this] (singular-quiver elems 0))
(source-fn [this] (constantly 0))
(target-fn [this] (constantly 0))
(transition [this obj] (list 0 0))
StructuredUnitalQuiver
(underlying-unital-quiver [this] (singular-unital-quiver elems 0 id))
(identity-morphism-of [this obj] id)
StructuredPermutableQuiver
(invert-morphism [this x] (inv x))
(underlying-permutable-quiver [this] (singular-permutable-quiver elems 0 inv))
StructuredDependencyQuiver
(underlying-dependency-quiver [this] (singular-dependency-quiver elems 0 id inv))
ConcreteMorphism
(inputs [this] (complete-relation elems))
(outputs [this] elems)
clojure.lang.IFn
(invoke [this obj] (op obj))
(applyTo [this args] (clojure.lang.AFn/applyToHelper this args)))
(derive CommutativeGroup :locus.set.copresheaf.structure.core.protocols/commutative-group)
; Identity and inverse elements
(defmethod invert-element CommutativeGroup
[^CommutativeGroup group, x] ((.inv group) x))
(defmethod identity-elements CommutativeGroup
[^CommutativeGroup group] #{(.id group)})
; The natural preorder on a commutative group is trivial and its condensation is the trivial monoid
(defmethod natural-preorder CommutativeGroup
[^CommutativeGroup group] (fn [[a b]] true))
(defmethod natural-condensation CommutativeGroup
[^CommutativeGroup group] trivial-monoid)
(defmethod to-commutative-monoid CommutativeGroup
[^CommutativeGroup group]
(->CommutativeMonoid
(.-elems group)
(fn [[a b]]
true)
(.-op group)
(.-id group)))
; Products of objects in the concrete category Ab of abelian groups
(defmethod product CommutativeGroup
[& groups]
(CommutativeGroup.
(apply cartesian-product (map underlying-set groups))
(apply semigroup-product-function groups)
(map identity-element groups)
(fn [obj]
(map-indexed
(fn [i v]
((.inv (nth groups i)) v))
obj))))
; Convert other objects into commutative groups whenever possible
(defmulti to-commutative-group type)
(defmethod to-commutative-group CommutativeGroup
[^CommutativeGroup group] group)
; The group of units of a commutative group is the entire group itself
(defmethod group-of-units CommutativeGroup
[^CommutativeGroup group] group)
| null | https://raw.githubusercontent.com/locusmath/locus/b94caabdbec71294c8fb320887646ddf0312ef24/src/clojure/locus/algebra/abelian/group/object.clj | clojure | Commutative groups are Z-modules. Once all relevant module related libraries have been loaded
abelian groups can be converted into Z-modules by using the to-module command.
Identity and inverse elements
The natural preorder on a commutative group is trivial and its condensation is the trivial monoid
Products of objects in the concrete category Ab of abelian groups
Convert other objects into commutative groups whenever possible
The group of units of a commutative group is the entire group itself | (ns locus.algebra.abelian.group.object
(:require [locus.set.logic.core.set :refer :all]
[locus.set.logic.limit.product :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.set.copresheaf.structure.core.protocols :refer :all]
[locus.set.quiver.relation.binary.sr :refer :all]
[locus.set.quiver.relation.binary.product :refer :all]
[locus.set.copresheaf.quiver.unital.object :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.quiver.binary.core.object :refer :all]
[locus.set.copresheaf.quiver.unital.object :refer :all]
[locus.set.copresheaf.quiver.permutable.object :refer :all]
[locus.set.copresheaf.quiver.dependency.object :refer :all]
[locus.con.core.object :refer [projection]]
[locus.con.core.setpart :refer :all]
[locus.order.general.core.object :refer :all]
[locus.order.lattice.core.object :refer :all]
[locus.algebra.commutative.semigroup.object :refer :all]
[locus.algebra.semigroup.core.object :refer :all]
[locus.algebra.commutative.monoid.object :refer :all]
[locus.algebra.semigroup.monoid.object :refer :all]
[locus.algebra.group.core.object :refer :all])
(:import (locus.algebra.commutative.semigroup.object CommutativeSemigroup)))
(deftype CommutativeGroup [elems op id inv]
ConcreteObject
(underlying-set [this] elems)
StructuredDiset
(first-set [this] elems)
(second-set [this] #{0})
StructuredQuiver
(underlying-quiver [this] (singular-quiver elems 0))
(source-fn [this] (constantly 0))
(target-fn [this] (constantly 0))
(transition [this obj] (list 0 0))
StructuredUnitalQuiver
(underlying-unital-quiver [this] (singular-unital-quiver elems 0 id))
(identity-morphism-of [this obj] id)
StructuredPermutableQuiver
(invert-morphism [this x] (inv x))
(underlying-permutable-quiver [this] (singular-permutable-quiver elems 0 inv))
StructuredDependencyQuiver
(underlying-dependency-quiver [this] (singular-dependency-quiver elems 0 id inv))
ConcreteMorphism
(inputs [this] (complete-relation elems))
(outputs [this] elems)
clojure.lang.IFn
(invoke [this obj] (op obj))
(applyTo [this args] (clojure.lang.AFn/applyToHelper this args)))
(derive CommutativeGroup :locus.set.copresheaf.structure.core.protocols/commutative-group)
(defmethod invert-element CommutativeGroup
[^CommutativeGroup group, x] ((.inv group) x))
(defmethod identity-elements CommutativeGroup
[^CommutativeGroup group] #{(.id group)})
(defmethod natural-preorder CommutativeGroup
[^CommutativeGroup group] (fn [[a b]] true))
(defmethod natural-condensation CommutativeGroup
[^CommutativeGroup group] trivial-monoid)
(defmethod to-commutative-monoid CommutativeGroup
[^CommutativeGroup group]
(->CommutativeMonoid
(.-elems group)
(fn [[a b]]
true)
(.-op group)
(.-id group)))
(defmethod product CommutativeGroup
[& groups]
(CommutativeGroup.
(apply cartesian-product (map underlying-set groups))
(apply semigroup-product-function groups)
(map identity-element groups)
(fn [obj]
(map-indexed
(fn [i v]
((.inv (nth groups i)) v))
obj))))
(defmulti to-commutative-group type)
(defmethod to-commutative-group CommutativeGroup
[^CommutativeGroup group] group)
(defmethod group-of-units CommutativeGroup
[^CommutativeGroup group] group)
|
f7b2a6d4bc5a0e31709546d9c0e536e558a1a35ee79bdbc2c201e51576daa119 | kyleburton/sandbox | net.cljs | Copyright ( c ) . All rights reserved .
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns ^{:doc "Network communication library, wrapping goog.net.
Includes a common API over XhrIo, CrossPageChannel, and Websockets."
:author "Bobby Calderwood and Alex Redington"}
clojure.browser.net
(:require [clojure.browser.event :as event]
[goog.net.XhrIo :as gxhrio]
[goog.net.EventType :as gnet-event-type]
[goog.net.xpc.CfgFields :as gxpc-config-fields]
[goog.net.xpc.CrossPageChannel :as xpc]
#_[goog.net.WebSocket :as gwebsocket]
[goog.json :as gjson]))
(def *timeout* 10000)
(def event-types
(into {}
(map
(fn [[k v]]
[(keyword (. k (toLowerCase)))
v])
(merge
(js->clj goog.net.EventType)))))
(defprotocol IConnection
(connect
[this]
[this opt1]
[this opt1 opt2]
[this opt1 opt2 opt3])
(transmit
[this opt]
[this opt opt2]
[this opt opt2 opt3]
[this opt opt2 opt3 opt4]
[this opt opt2 opt3 opt4 opt5])
(close [this]))
(extend-type goog.net.XhrIo
IConnection
(transmit
([this uri]
(transmit this uri "GET" nil nil *timeout*))
([this uri method]
(transmit this uri method nil nil *timeout*))
([this uri method content]
(transmit this uri method content nil *timeout*))
([this uri method content headers]
(transmit this uri method content headers *timeout*))
([this uri method content headers timeout]
(.setTimeoutInterval this timeout)
(.send this uri method content headers)))
event/EventType
(event-types [this]
(into {}
(map
(fn [[k v]]
[(keyword (. k (toLowerCase)))
v])
(merge
(js->clj goog.net.EventType))))))
;; TODO jQuery/sinatra/RestClient style API: (get [uri]), (post [uri payload]), (put [uri payload]), (delete [uri])
(def xpc-config-fields
(into {}
(map
(fn [[k v]]
[(keyword (. k (toLowerCase)))
v])
(js->clj goog.net.xpc.CfgFields))))
(defn xhr-connection
"Returns an XhrIo connection"
[]
(goog.net.XhrIo.))
(defprotocol ICrossPageChannel
(register-service [this service-name fn] [this service-name fn encode-json?]))
(extend-type goog.net.xpc.CrossPageChannel
ICrossPageChannel
(register-service
([this service-name fn]
(register-service this service-name fn false))
([this service-name fn encode-json?]
(.registerService this (name service-name) fn encode-json?)))
IConnection
(connect
([this]
(connect this nil))
([this on-connect-fn]
(.connect this on-connect-fn))
([this on-connect-fn config-iframe-fn]
(connect this on-connect-fn config-iframe-fn (.-body js/document)))
([this on-connect-fn config-iframe-fn iframe-parent]
(.createPeerIframe this iframe-parent config-iframe-fn)
(.connect this on-connect-fn)))
(transmit [this service-name payload]
(.send this (name service-name) payload))
(close [this]
(.close this ())))
(defn xpc-connection
"When passed with a config hash-map, returns a parent
CrossPageChannel object. Keys in the config hash map are downcased
versions of the goog.net.xpc.CfgFields enum keys,
e.g. goog.net.xpc.CfgFields.PEER_URI becomes :peer_uri in the config
hash.
When passed with no args, creates a child CrossPageChannel object,
and the config is automatically taken from the URL param 'xpc', as
per the CrossPageChannel API."
([]
(when-let [config (.getParameterValue
(goog.Uri. (.-href (.-location js/window)))
"xpc")]
(goog.net.xpc.CrossPageChannel. (gjson/parse config))))
([config]
(goog.net.xpc.CrossPageChannel.
(reduce (fn [sum [k v]]
(if-let [field (get xpc-config-fields k)]
(doto sum (aset field v))
sum))
(js-obj)
config))))
WebSocket is not supported in the 3/23/11 release of Google
;; Closure, but will be included in the next release.
#_(defprotocol IWebSocket
(open? [this]))
#_(extend-type goog.net.WebSocket
IWebSocket
(open? [this]
(.isOpen this ()))
IConnection
(connect
([this url]
(connect this url nil))
([this url protocol]
(.open this url protocol)))
(transmit [this message]
(.send this message))
(close [this]
(.close this ()))
event/EventType
(event-types [this]
(into {}
(map
(fn [[k v]]
[(keyword (. k (toLowerCase)))
v])
(merge
(js->clj goog.net.WebSocket/EventType))))))
#_(defn websocket-connection
([]
(websocket-connection nil nil))
([auto-reconnect?]
(websocket-connection auto-reconnect? nil))
([auto-reconnect? next-reconnect-fn]
(goog.net.WebSocket. auto-reconnect? next-reconnect-fn))) | null | https://raw.githubusercontent.com/kyleburton/sandbox/cccbcc9a97026336691063a0a7eb59293a35c31a/examples/clojurescript/om-tut/.repl/6045/clojure/browser/net.cljs | clojure | The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
TODO jQuery/sinatra/RestClient style API: (get [uri]), (post [uri payload]), (put [uri payload]), (delete [uri])
Closure, but will be included in the next release. | Copyright ( c ) . All rights reserved .
Eclipse Public License 1.0 ( -1.0.php )
(ns ^{:doc "Network communication library, wrapping goog.net.
Includes a common API over XhrIo, CrossPageChannel, and Websockets."
:author "Bobby Calderwood and Alex Redington"}
clojure.browser.net
(:require [clojure.browser.event :as event]
[goog.net.XhrIo :as gxhrio]
[goog.net.EventType :as gnet-event-type]
[goog.net.xpc.CfgFields :as gxpc-config-fields]
[goog.net.xpc.CrossPageChannel :as xpc]
#_[goog.net.WebSocket :as gwebsocket]
[goog.json :as gjson]))
(def *timeout* 10000)
(def event-types
(into {}
(map
(fn [[k v]]
[(keyword (. k (toLowerCase)))
v])
(merge
(js->clj goog.net.EventType)))))
(defprotocol IConnection
(connect
[this]
[this opt1]
[this opt1 opt2]
[this opt1 opt2 opt3])
(transmit
[this opt]
[this opt opt2]
[this opt opt2 opt3]
[this opt opt2 opt3 opt4]
[this opt opt2 opt3 opt4 opt5])
(close [this]))
(extend-type goog.net.XhrIo
IConnection
(transmit
([this uri]
(transmit this uri "GET" nil nil *timeout*))
([this uri method]
(transmit this uri method nil nil *timeout*))
([this uri method content]
(transmit this uri method content nil *timeout*))
([this uri method content headers]
(transmit this uri method content headers *timeout*))
([this uri method content headers timeout]
(.setTimeoutInterval this timeout)
(.send this uri method content headers)))
event/EventType
(event-types [this]
(into {}
(map
(fn [[k v]]
[(keyword (. k (toLowerCase)))
v])
(merge
(js->clj goog.net.EventType))))))
(def xpc-config-fields
(into {}
(map
(fn [[k v]]
[(keyword (. k (toLowerCase)))
v])
(js->clj goog.net.xpc.CfgFields))))
(defn xhr-connection
"Returns an XhrIo connection"
[]
(goog.net.XhrIo.))
(defprotocol ICrossPageChannel
(register-service [this service-name fn] [this service-name fn encode-json?]))
(extend-type goog.net.xpc.CrossPageChannel
ICrossPageChannel
(register-service
([this service-name fn]
(register-service this service-name fn false))
([this service-name fn encode-json?]
(.registerService this (name service-name) fn encode-json?)))
IConnection
(connect
([this]
(connect this nil))
([this on-connect-fn]
(.connect this on-connect-fn))
([this on-connect-fn config-iframe-fn]
(connect this on-connect-fn config-iframe-fn (.-body js/document)))
([this on-connect-fn config-iframe-fn iframe-parent]
(.createPeerIframe this iframe-parent config-iframe-fn)
(.connect this on-connect-fn)))
(transmit [this service-name payload]
(.send this (name service-name) payload))
(close [this]
(.close this ())))
(defn xpc-connection
"When passed with a config hash-map, returns a parent
CrossPageChannel object. Keys in the config hash map are downcased
versions of the goog.net.xpc.CfgFields enum keys,
e.g. goog.net.xpc.CfgFields.PEER_URI becomes :peer_uri in the config
hash.
When passed with no args, creates a child CrossPageChannel object,
and the config is automatically taken from the URL param 'xpc', as
per the CrossPageChannel API."
([]
(when-let [config (.getParameterValue
(goog.Uri. (.-href (.-location js/window)))
"xpc")]
(goog.net.xpc.CrossPageChannel. (gjson/parse config))))
([config]
(goog.net.xpc.CrossPageChannel.
(reduce (fn [sum [k v]]
(if-let [field (get xpc-config-fields k)]
(doto sum (aset field v))
sum))
(js-obj)
config))))
WebSocket is not supported in the 3/23/11 release of Google
#_(defprotocol IWebSocket
(open? [this]))
#_(extend-type goog.net.WebSocket
IWebSocket
(open? [this]
(.isOpen this ()))
IConnection
(connect
([this url]
(connect this url nil))
([this url protocol]
(.open this url protocol)))
(transmit [this message]
(.send this message))
(close [this]
(.close this ()))
event/EventType
(event-types [this]
(into {}
(map
(fn [[k v]]
[(keyword (. k (toLowerCase)))
v])
(merge
(js->clj goog.net.WebSocket/EventType))))))
#_(defn websocket-connection
([]
(websocket-connection nil nil))
([auto-reconnect?]
(websocket-connection auto-reconnect? nil))
([auto-reconnect? next-reconnect-fn]
(goog.net.WebSocket. auto-reconnect? next-reconnect-fn))) |
5064f27738192a9acd008aa4017211bdb3f4ad03f0d0cca997f0c181713bcf7b | takikawa/tr-pfds | hood-melville.rkt | #lang typed/racket
(provide filter remove head+tail build-queue
Queue queue enqueue head tail empty empty? queue->list
(rename-out [qmap map]
[queue-andmap andmap]
[queue-ormap ormap]) fold)
(require scheme/match)
(struct: (A) Reversing ([count : Integer]
[first : (Listof A)]
[second : (Listof A)]
[third : (Listof A)]
[fourth : (Listof A)]))
(struct: (A) Appending ([count : Integer]
[first : (Listof A)]
[second : (Listof A)]))
(struct: (A) Done ([first : (Listof A)]))
(define-type (RotationState A) (U Null (Reversing A) (Appending A) (Done A)))
(struct: (A) Queue ([lenf : Integer]
[front : (Listof A)]
[state : (RotationState A)]
[lenr : Integer]
[rear : (Listof A)]))
(: exec : (All (A) ((RotationState A) -> (RotationState A))))
(define (exec state)
(match state
[(struct Reversing (cnt (cons x first) second (cons y third) fourth))
(Reversing (add1 cnt) first (cons x second) third (cons y fourth))]
[(struct Reversing (cnt null second (list y) fourth))
(Appending cnt second (cons y fourth))]
[(struct Appending (0 first second)) (Done second)]
[(struct Appending (cnt (cons x first) second))
(Appending (sub1 cnt) first (cons x second))]
[else state]))
(: invalidate : (All (A) ((RotationState A) -> (RotationState A))))
(define (invalidate state)
(match state
[(struct Reversing (cnt first second third fourth))
(Reversing (sub1 cnt) first second third fourth)]
[(struct Appending (0 first (cons x second))) (Done second)]
[(struct Appending (cnt first second))
(Appending (sub1 cnt) first second)]
[else state]))
(: exec2 :
(All (A) (Integer (Listof A) (RotationState A) Integer (Listof A) ->
(Queue A))))
(define (exec2 lenf front state lenr rear)
(let ([newstate (exec (exec state))])
(match newstate
[(struct Done (newf)) (Queue lenf newf null lenr rear)]
[else (Queue lenf front newstate lenr rear)])))
(: check :
(All (A) (Integer (Listof A) (RotationState A) Integer (Listof A) ->
(Queue A))))
(define (check lenf front state lenr rear)
(if (<= lenr lenf)
(exec2 lenf front state lenr rear)
(exec2 (+ lenf lenr) front
(Reversing 0 front null rear null) 0 null)))
;; Check for empty queue
(: empty? : (All (A) ((Queue A) -> Boolean)))
(define (empty? que)
(zero? (Queue-lenf que)))
;; An empty queue
(define empty (Queue 0 null null 0 null))
;; Inserts an element into the queue
(: enqueue : (All (A) (A (Queue A) -> (Queue A))))
(define (enqueue elem que)
(check (Queue-lenf que)
(Queue-front que)
(Queue-state que)
(add1 (Queue-lenr que))
(cons elem (Queue-rear que))))
Returns the first element of the queue
(: head : (All (A) ((Queue A) -> A)))
(define (head que)
(let ([fr (Queue-front que)])
(if (null? fr)
(error 'head "given queue is empty")
(car fr))))
;; Returns the rest of the queue
(: tail : (All (A) ((Queue A) -> (Queue A))))
(define (tail que)
(let ([fr (Queue-front que)])
(if (null? fr)
(error 'tail "given queue is empty")
(check (sub1 (Queue-lenf que))
(cdr fr)
(invalidate (Queue-state que))
(Queue-lenr que)
(Queue-rear que)))))
;; similar to list map function
;; similar to list map function. apply is expensive so using case-lambda
;; in order to saperate the more common case
(: qmap :
(All (A C B ...)
(case-lambda
((A -> C) (Queue A) -> (Queue C))
((A B ... B -> C) (Queue A) (Queue B) ... B -> (Queue C)))))
(define qmap
(pcase-lambda: (A C B ...)
[([func : (A -> C)]
[deq : (Queue A)])
(map-single empty func deq)]
[([func : (A B ... B -> C)]
[deq : (Queue A)] . [deqs : (Queue B) ... B])
(apply map-multiple empty func deq deqs)]))
(: map-single : (All (A C) ((Queue C) (A -> C) (Queue A) -> (Queue C))))
(define (map-single accum func que)
(if (empty? que)
accum
(map-single (enqueue (func (head que)) accum) func (tail que))))
(: map-multiple :
(All (A C B ...)
((Queue C) (A B ... B -> C) (Queue A) (Queue B) ... B -> (Queue C))))
(define (map-multiple accum func que . ques)
(if (or (empty? que) (ormap empty? ques))
accum
(apply map-multiple
(enqueue (apply func (head que) (map head ques)) accum)
func
(tail que)
(map tail ques))))
;; similar to list foldr or foldl
(: fold :
(All (A C B ...)
(case-lambda ((C A -> C) C (Queue A) -> C)
((C A B ... B -> C) C (Queue A) (Queue B) ... B -> C))))
(define fold
(pcase-lambda: (A C B ...)
[([func : (C A -> C)]
[base : C]
[que : (Queue A)])
(if (empty? que)
base
(fold func (func base (head que)) (tail que)))]
[([func : (C A B ... B -> C)]
[base : C]
[que : (Queue A)] . [ques : (Queue B) ... B])
(if (or (empty? que) (ormap empty? ques))
base
(apply fold
func
(apply func base (head que) (map head ques))
(tail que)
(map tail ques)))]))
;; Queue constructor function
(: queue : (All (A) (A * -> (Queue A))))
(define (queue . lst)
(foldl (inst enqueue A) empty lst))
(: queue->list (All (A) ((Queue A) -> (Listof A))))
(define (queue->list que)
(if (empty? que)
null
(cons (head que) (queue->list (tail que)))))
;; similar to list filter function
(: filter : (All (A) ((A -> Boolean) (Queue A) -> (Queue A))))
(define (filter func que)
(: inner : (All (A) ((A -> Boolean) (Queue A) (Queue A) -> (Queue A))))
(define (inner func que accum)
(if (empty? que)
accum
(let ([head (head que)]
[tail (tail que)])
(if (func head)
(inner func tail (enqueue head accum))
(inner func tail accum)))))
(inner func que empty))
;; similar to list remove function
(: remove : (All (A) ((A -> Boolean) (Queue A) -> (Queue A))))
(define (remove func que)
(: inner : (All (A) ((A -> Boolean) (Queue A) (Queue A) -> (Queue A))))
(define (inner func que accum)
(if (empty? que)
accum
(let ([head (head que)]
[tail (tail que)])
(if (func head)
(inner func tail accum)
(inner func tail (enqueue head accum))))))
(inner func que empty))
(: head+tail : (All (A) ((Queue A) -> (Pair A (Queue A)))))
(define (head+tail que)
(let ([fr (Queue-front que)])
(if (null? fr)
(error 'head+tail "given queue is empty")
(cons (car fr)
(check (sub1 (Queue-lenf que))
(cdr fr)
(invalidate (Queue-state que))
(Queue-lenr que)
(Queue-rear que))))))
;; Similar to build-list function
(: build-queue : (All (A) (Natural (Natural -> A) -> (Queue A))))
(define (build-queue size func)
(let: loop : (Queue A) ([n : Natural size])
(if (zero? n)
empty
(let ([nsub1 (sub1 n)])
(enqueue (func nsub1) (loop nsub1))))))
similar to list andmap function
(: queue-andmap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Queue A) -> Boolean)
((A B ... B -> Boolean) (Queue A) (Queue B) ... B -> Boolean))))
(define queue-andmap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[queue : (Queue A)])
(or (empty? queue)
(and (func (head queue))
(queue-andmap func (tail queue))))]
[([func : (A B ... B -> Boolean)]
[queue : (Queue A)] . [queues : (Queue B) ... B])
(or (empty? queue) (ormap empty? queues)
(and (apply func (head queue) (map head queues))
(apply queue-andmap func (tail queue)
(map tail queues))))]))
;; Similar to ormap
(: queue-ormap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Queue A) -> Boolean)
((A B ... B -> Boolean) (Queue A) (Queue B) ... B -> Boolean))))
(define queue-ormap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[queue : (Queue A)])
(and (not (empty? queue))
(or (func (head queue))
(queue-ormap func (tail queue))))]
[([func : (A B ... B -> Boolean)]
[queue : (Queue A)] . [queues : (Queue B) ... B])
(and (not (or (empty? queue) (ormap empty? queues)))
(or (apply func (head queue) (map head queues))
(apply queue-ormap func (tail queue)
(map tail queues))))])) | null | https://raw.githubusercontent.com/takikawa/tr-pfds/a08810bdfc760bb9ed68d08ea222a59135d9a203/pfds/queue/hood-melville.rkt | racket | Check for empty queue
An empty queue
Inserts an element into the queue
Returns the rest of the queue
similar to list map function
similar to list map function. apply is expensive so using case-lambda
in order to saperate the more common case
similar to list foldr or foldl
Queue constructor function
similar to list filter function
similar to list remove function
Similar to build-list function
Similar to ormap | #lang typed/racket
(provide filter remove head+tail build-queue
Queue queue enqueue head tail empty empty? queue->list
(rename-out [qmap map]
[queue-andmap andmap]
[queue-ormap ormap]) fold)
(require scheme/match)
(struct: (A) Reversing ([count : Integer]
[first : (Listof A)]
[second : (Listof A)]
[third : (Listof A)]
[fourth : (Listof A)]))
(struct: (A) Appending ([count : Integer]
[first : (Listof A)]
[second : (Listof A)]))
(struct: (A) Done ([first : (Listof A)]))
(define-type (RotationState A) (U Null (Reversing A) (Appending A) (Done A)))
(struct: (A) Queue ([lenf : Integer]
[front : (Listof A)]
[state : (RotationState A)]
[lenr : Integer]
[rear : (Listof A)]))
(: exec : (All (A) ((RotationState A) -> (RotationState A))))
(define (exec state)
(match state
[(struct Reversing (cnt (cons x first) second (cons y third) fourth))
(Reversing (add1 cnt) first (cons x second) third (cons y fourth))]
[(struct Reversing (cnt null second (list y) fourth))
(Appending cnt second (cons y fourth))]
[(struct Appending (0 first second)) (Done second)]
[(struct Appending (cnt (cons x first) second))
(Appending (sub1 cnt) first (cons x second))]
[else state]))
(: invalidate : (All (A) ((RotationState A) -> (RotationState A))))
(define (invalidate state)
(match state
[(struct Reversing (cnt first second third fourth))
(Reversing (sub1 cnt) first second third fourth)]
[(struct Appending (0 first (cons x second))) (Done second)]
[(struct Appending (cnt first second))
(Appending (sub1 cnt) first second)]
[else state]))
(: exec2 :
(All (A) (Integer (Listof A) (RotationState A) Integer (Listof A) ->
(Queue A))))
(define (exec2 lenf front state lenr rear)
(let ([newstate (exec (exec state))])
(match newstate
[(struct Done (newf)) (Queue lenf newf null lenr rear)]
[else (Queue lenf front newstate lenr rear)])))
(: check :
(All (A) (Integer (Listof A) (RotationState A) Integer (Listof A) ->
(Queue A))))
(define (check lenf front state lenr rear)
(if (<= lenr lenf)
(exec2 lenf front state lenr rear)
(exec2 (+ lenf lenr) front
(Reversing 0 front null rear null) 0 null)))
(: empty? : (All (A) ((Queue A) -> Boolean)))
(define (empty? que)
(zero? (Queue-lenf que)))
(define empty (Queue 0 null null 0 null))
(: enqueue : (All (A) (A (Queue A) -> (Queue A))))
(define (enqueue elem que)
(check (Queue-lenf que)
(Queue-front que)
(Queue-state que)
(add1 (Queue-lenr que))
(cons elem (Queue-rear que))))
Returns the first element of the queue
(: head : (All (A) ((Queue A) -> A)))
(define (head que)
(let ([fr (Queue-front que)])
(if (null? fr)
(error 'head "given queue is empty")
(car fr))))
(: tail : (All (A) ((Queue A) -> (Queue A))))
(define (tail que)
(let ([fr (Queue-front que)])
(if (null? fr)
(error 'tail "given queue is empty")
(check (sub1 (Queue-lenf que))
(cdr fr)
(invalidate (Queue-state que))
(Queue-lenr que)
(Queue-rear que)))))
(: qmap :
(All (A C B ...)
(case-lambda
((A -> C) (Queue A) -> (Queue C))
((A B ... B -> C) (Queue A) (Queue B) ... B -> (Queue C)))))
(define qmap
(pcase-lambda: (A C B ...)
[([func : (A -> C)]
[deq : (Queue A)])
(map-single empty func deq)]
[([func : (A B ... B -> C)]
[deq : (Queue A)] . [deqs : (Queue B) ... B])
(apply map-multiple empty func deq deqs)]))
(: map-single : (All (A C) ((Queue C) (A -> C) (Queue A) -> (Queue C))))
(define (map-single accum func que)
(if (empty? que)
accum
(map-single (enqueue (func (head que)) accum) func (tail que))))
(: map-multiple :
(All (A C B ...)
((Queue C) (A B ... B -> C) (Queue A) (Queue B) ... B -> (Queue C))))
(define (map-multiple accum func que . ques)
(if (or (empty? que) (ormap empty? ques))
accum
(apply map-multiple
(enqueue (apply func (head que) (map head ques)) accum)
func
(tail que)
(map tail ques))))
(: fold :
(All (A C B ...)
(case-lambda ((C A -> C) C (Queue A) -> C)
((C A B ... B -> C) C (Queue A) (Queue B) ... B -> C))))
(define fold
(pcase-lambda: (A C B ...)
[([func : (C A -> C)]
[base : C]
[que : (Queue A)])
(if (empty? que)
base
(fold func (func base (head que)) (tail que)))]
[([func : (C A B ... B -> C)]
[base : C]
[que : (Queue A)] . [ques : (Queue B) ... B])
(if (or (empty? que) (ormap empty? ques))
base
(apply fold
func
(apply func base (head que) (map head ques))
(tail que)
(map tail ques)))]))
(: queue : (All (A) (A * -> (Queue A))))
(define (queue . lst)
(foldl (inst enqueue A) empty lst))
(: queue->list (All (A) ((Queue A) -> (Listof A))))
(define (queue->list que)
(if (empty? que)
null
(cons (head que) (queue->list (tail que)))))
(: filter : (All (A) ((A -> Boolean) (Queue A) -> (Queue A))))
(define (filter func que)
(: inner : (All (A) ((A -> Boolean) (Queue A) (Queue A) -> (Queue A))))
(define (inner func que accum)
(if (empty? que)
accum
(let ([head (head que)]
[tail (tail que)])
(if (func head)
(inner func tail (enqueue head accum))
(inner func tail accum)))))
(inner func que empty))
(: remove : (All (A) ((A -> Boolean) (Queue A) -> (Queue A))))
(define (remove func que)
(: inner : (All (A) ((A -> Boolean) (Queue A) (Queue A) -> (Queue A))))
(define (inner func que accum)
(if (empty? que)
accum
(let ([head (head que)]
[tail (tail que)])
(if (func head)
(inner func tail accum)
(inner func tail (enqueue head accum))))))
(inner func que empty))
(: head+tail : (All (A) ((Queue A) -> (Pair A (Queue A)))))
(define (head+tail que)
(let ([fr (Queue-front que)])
(if (null? fr)
(error 'head+tail "given queue is empty")
(cons (car fr)
(check (sub1 (Queue-lenf que))
(cdr fr)
(invalidate (Queue-state que))
(Queue-lenr que)
(Queue-rear que))))))
(: build-queue : (All (A) (Natural (Natural -> A) -> (Queue A))))
(define (build-queue size func)
(let: loop : (Queue A) ([n : Natural size])
(if (zero? n)
empty
(let ([nsub1 (sub1 n)])
(enqueue (func nsub1) (loop nsub1))))))
similar to list andmap function
(: queue-andmap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Queue A) -> Boolean)
((A B ... B -> Boolean) (Queue A) (Queue B) ... B -> Boolean))))
(define queue-andmap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[queue : (Queue A)])
(or (empty? queue)
(and (func (head queue))
(queue-andmap func (tail queue))))]
[([func : (A B ... B -> Boolean)]
[queue : (Queue A)] . [queues : (Queue B) ... B])
(or (empty? queue) (ormap empty? queues)
(and (apply func (head queue) (map head queues))
(apply queue-andmap func (tail queue)
(map tail queues))))]))
(: queue-ormap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Queue A) -> Boolean)
((A B ... B -> Boolean) (Queue A) (Queue B) ... B -> Boolean))))
(define queue-ormap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[queue : (Queue A)])
(and (not (empty? queue))
(or (func (head queue))
(queue-ormap func (tail queue))))]
[([func : (A B ... B -> Boolean)]
[queue : (Queue A)] . [queues : (Queue B) ... B])
(and (not (or (empty? queue) (ormap empty? queues)))
(or (apply func (head queue) (map head queues))
(apply queue-ormap func (tail queue)
(map tail queues))))])) |
2a4db8c48214f4a4b2467b9af63287b9396763e9b2f964823330b90692f8bba5 | racket/racket7 | unconstrained-domain-arrow.rkt | #lang racket/base
(require (for-syntax racket/base)
"arrow-common.rkt"
"blame.rkt"
"guts.rkt"
"prop.rkt"
"misc.rkt")
(provide (rename-out [_unconstrained-domain-> unconstrained-domain->]))
(define-syntax (_unconstrained-domain-> stx)
(syntax-case stx ()
[(_ rngs ...)
(with-syntax ([(res-x ...) (generate-temporaries #'(rngs ...))]
[(p-app-x ...) (generate-temporaries #'(rngs ...))])
#`(build-unconstrained-domain->
(list rngs ...)
(λ (val blame+neg-party rngs-list blame-party-info neg-party p-app-x ...)
(define res-checker
(case-lambda
[(res-x ...) (values/drop (p-app-x res-x neg-party) ...)]
[results
(bad-number-of-results (car blame+neg-party)
val
#,(length (syntax->list #'(rngs ...)))
results
#:missing-party neg-party)]))
(make-keyword-procedure
(λ (kwds kwd-vals . args)
(with-contract-continuation-mark
blame+neg-party
#,(check-tail-contract
#'rngs-list
#'blame-party-info
#'neg-party
(list #'res-checker)
(λ (s) #`(apply values #,@s kwd-vals args))
#'blame+neg-party)))
(λ args
(with-contract-continuation-mark
blame+neg-party
#,(check-tail-contract
#'rngs-list
#'blame-party-info
#'neg-party
(list #'res-checker)
(λ (s) #`(apply values #,@s args))
#'blame+neg-party)))))))]))
(define (build-unconstrained-domain-> range-maybe-contracts wrapper-proc)
(define range-contracts (coerce-contracts 'unconstrained-domain-> range-maybe-contracts))
(define chaperone? (andmap chaperone-contract? range-contracts))
(cond
[chaperone?
(make-chaperone-unconstrained-domain-> range-contracts wrapper-proc)]
[else
(make-impersonator-unconstrained-domain-> range-contracts wrapper-proc)]))
(define (unconstrained-domain->-projection ctc)
(define range-contracts (unconstrained-domain->-ranges ctc))
(define make-wrapper-proc (unconstrained-domain->-make-wrapper-proc ctc))
(define late-neg-projections (map get/build-late-neg-projection range-contracts))
(define can-check-procedure-result-arity? (andmap any/c? range-contracts))
(define desired-procedure-result-arity (length range-contracts))
(define chaperone-or-impersonate-procedure (if (chaperone-unconstrained-domain->? ctc)
chaperone-procedure
impersonate-procedure))
(λ (orig-blame)
(define blame-party-info (get-blame-party-info orig-blame))
(define range-blame (blame-add-range-context orig-blame))
(define projs (for/list ([late-neg-projection (in-list late-neg-projections)])
(late-neg-projection range-blame)))
(λ (val neg-party)
(check-is-a-procedure orig-blame neg-party val)
(define blame+neg-party (cons orig-blame neg-party))
(if (and can-check-procedure-result-arity?
(equal? desired-procedure-result-arity
(procedure-result-arity val)))
val
(chaperone-or-impersonate-procedure
val
(apply make-wrapper-proc
val
blame+neg-party
range-contracts
blame-party-info
neg-party
projs)
impersonator-prop:contracted ctc
impersonator-prop:blame (blame-add-missing-party orig-blame neg-party)
impersonator-prop:application-mark
(cons tail-contract-key (list* neg-party blame-party-info range-contracts)))))))
(define (unconstrained-domain->-name ud)
(apply build-compound-type-name 'unconstrained-domain->
(map contract-name (unconstrained-domain->-ranges ud))))
(define (unconstrained-domain->-first-order ud)
(λ (val)
(procedure? val)))
(define (unconstrained-domain->-stronger this that)
(and (unconstrained-domain->? that)
(pairwise-stronger-contracts? (unconstrained-domain->-ranges this)
(unconstrained-domain->-ranges that))))
(define-struct unconstrained-domain-> (ranges make-wrapper-proc)
#:property prop:custom-write custom-write-property-proc)
(define-struct (chaperone-unconstrained-domain-> unconstrained-domain->) ()
#:property
prop:chaperone-contract
(build-chaperone-contract-property
#:name unconstrained-domain->-name
#:first-order unconstrained-domain->-first-order
#:late-neg-projection unconstrained-domain->-projection
#:stronger unconstrained-domain->-stronger))
(define-struct (impersonator-unconstrained-domain-> unconstrained-domain->) ()
#:property
prop:chaperone-contract
(build-chaperone-contract-property
#:name unconstrained-domain->-name
#:first-order unconstrained-domain->-first-order
#:late-neg-projection unconstrained-domain->-projection
#:stronger unconstrained-domain->-stronger))
(define (check-is-a-procedure orig-blame neg-party val)
(unless (procedure? val)
(raise-blame-error orig-blame #:missing-party neg-party
val
'(expected: "a procedure" given: "~v")
val)))
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/racket/collects/racket/contract/private/unconstrained-domain-arrow.rkt | racket | #lang racket/base
(require (for-syntax racket/base)
"arrow-common.rkt"
"blame.rkt"
"guts.rkt"
"prop.rkt"
"misc.rkt")
(provide (rename-out [_unconstrained-domain-> unconstrained-domain->]))
(define-syntax (_unconstrained-domain-> stx)
(syntax-case stx ()
[(_ rngs ...)
(with-syntax ([(res-x ...) (generate-temporaries #'(rngs ...))]
[(p-app-x ...) (generate-temporaries #'(rngs ...))])
#`(build-unconstrained-domain->
(list rngs ...)
(λ (val blame+neg-party rngs-list blame-party-info neg-party p-app-x ...)
(define res-checker
(case-lambda
[(res-x ...) (values/drop (p-app-x res-x neg-party) ...)]
[results
(bad-number-of-results (car blame+neg-party)
val
#,(length (syntax->list #'(rngs ...)))
results
#:missing-party neg-party)]))
(make-keyword-procedure
(λ (kwds kwd-vals . args)
(with-contract-continuation-mark
blame+neg-party
#,(check-tail-contract
#'rngs-list
#'blame-party-info
#'neg-party
(list #'res-checker)
(λ (s) #`(apply values #,@s kwd-vals args))
#'blame+neg-party)))
(λ args
(with-contract-continuation-mark
blame+neg-party
#,(check-tail-contract
#'rngs-list
#'blame-party-info
#'neg-party
(list #'res-checker)
(λ (s) #`(apply values #,@s args))
#'blame+neg-party)))))))]))
(define (build-unconstrained-domain-> range-maybe-contracts wrapper-proc)
(define range-contracts (coerce-contracts 'unconstrained-domain-> range-maybe-contracts))
(define chaperone? (andmap chaperone-contract? range-contracts))
(cond
[chaperone?
(make-chaperone-unconstrained-domain-> range-contracts wrapper-proc)]
[else
(make-impersonator-unconstrained-domain-> range-contracts wrapper-proc)]))
(define (unconstrained-domain->-projection ctc)
(define range-contracts (unconstrained-domain->-ranges ctc))
(define make-wrapper-proc (unconstrained-domain->-make-wrapper-proc ctc))
(define late-neg-projections (map get/build-late-neg-projection range-contracts))
(define can-check-procedure-result-arity? (andmap any/c? range-contracts))
(define desired-procedure-result-arity (length range-contracts))
(define chaperone-or-impersonate-procedure (if (chaperone-unconstrained-domain->? ctc)
chaperone-procedure
impersonate-procedure))
(λ (orig-blame)
(define blame-party-info (get-blame-party-info orig-blame))
(define range-blame (blame-add-range-context orig-blame))
(define projs (for/list ([late-neg-projection (in-list late-neg-projections)])
(late-neg-projection range-blame)))
(λ (val neg-party)
(check-is-a-procedure orig-blame neg-party val)
(define blame+neg-party (cons orig-blame neg-party))
(if (and can-check-procedure-result-arity?
(equal? desired-procedure-result-arity
(procedure-result-arity val)))
val
(chaperone-or-impersonate-procedure
val
(apply make-wrapper-proc
val
blame+neg-party
range-contracts
blame-party-info
neg-party
projs)
impersonator-prop:contracted ctc
impersonator-prop:blame (blame-add-missing-party orig-blame neg-party)
impersonator-prop:application-mark
(cons tail-contract-key (list* neg-party blame-party-info range-contracts)))))))
(define (unconstrained-domain->-name ud)
(apply build-compound-type-name 'unconstrained-domain->
(map contract-name (unconstrained-domain->-ranges ud))))
(define (unconstrained-domain->-first-order ud)
(λ (val)
(procedure? val)))
(define (unconstrained-domain->-stronger this that)
(and (unconstrained-domain->? that)
(pairwise-stronger-contracts? (unconstrained-domain->-ranges this)
(unconstrained-domain->-ranges that))))
(define-struct unconstrained-domain-> (ranges make-wrapper-proc)
#:property prop:custom-write custom-write-property-proc)
(define-struct (chaperone-unconstrained-domain-> unconstrained-domain->) ()
#:property
prop:chaperone-contract
(build-chaperone-contract-property
#:name unconstrained-domain->-name
#:first-order unconstrained-domain->-first-order
#:late-neg-projection unconstrained-domain->-projection
#:stronger unconstrained-domain->-stronger))
(define-struct (impersonator-unconstrained-domain-> unconstrained-domain->) ()
#:property
prop:chaperone-contract
(build-chaperone-contract-property
#:name unconstrained-domain->-name
#:first-order unconstrained-domain->-first-order
#:late-neg-projection unconstrained-domain->-projection
#:stronger unconstrained-domain->-stronger))
(define (check-is-a-procedure orig-blame neg-party val)
(unless (procedure? val)
(raise-blame-error orig-blame #:missing-party neg-party
val
'(expected: "a procedure" given: "~v")
val)))
| |
eb8733416b0d11d591c9f4a7d4e2550a4fd9199dc2f1237e5055334ad874d573 | Eduap-com/WordMat | dsterf.lisp | ;;; Compiled by f2cl version:
( " f2cl1.l , v 95098eb54f13 2013/04/01 00:45:16 toy $ "
" f2cl2.l , v 95098eb54f13 2013/04/01 00:45:16 toy $ "
" f2cl3.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl4.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl5.l , v 95098eb54f13 2013/04/01 00:45:16 toy $ "
" f2cl6.l , v 1d5cbacbb977 2008/08/24 00:56:27 rtoy $ "
" macros.l , v 1409c1352feb 2013/03/24 20:44:50 toy $ " )
;;; Using Lisp CMU Common Lisp snapshot-2013-11 (20E Unicode)
;;;
;;; Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
;;; (:coerce-assigns :as-needed) (:array-type ':array)
;;; (:array-slicing t) (:declare-common nil)
;;; (:float-format single-float))
(in-package "LAPACK")
(let* ((zero 0.0d0) (one 1.0d0) (two 2.0d0) (three 3.0d0) (maxit 30))
(declare (type (double-float 0.0d0 0.0d0) zero)
(type (double-float 1.0d0 1.0d0) one)
(type (double-float 2.0d0 2.0d0) two)
(type (double-float 3.0d0 3.0d0) three)
(type (f2cl-lib:integer4 30 30) maxit)
(ignorable zero one two three maxit))
(defun dsterf (n d e info)
(declare (type (array double-float (*)) e d)
(type (f2cl-lib:integer4) info n))
(f2cl-lib:with-multi-array-data
((d double-float d-%data% d-%offset%)
(e double-float e-%data% e-%offset%))
(prog ((alpha 0.0d0) (anorm 0.0d0) (bb 0.0d0) (c 0.0d0) (eps 0.0d0)
(eps2 0.0d0) (gamma 0.0d0) (oldc 0.0d0) (oldgam 0.0d0) (p 0.0d0)
(r 0.0d0) (rt1 0.0d0) (rt2 0.0d0) (rte 0.0d0) (s 0.0d0)
(safmax 0.0d0) (safmin 0.0d0) (sigma 0.0d0) (ssfmax 0.0d0)
(ssfmin 0.0d0) (i 0) (iscale 0) (jtot 0) (l 0) (l1 0) (lend 0)
(lendsv 0) (lsv 0) (m 0) (nmaxit 0))
(declare (type (double-float) alpha anorm bb c eps eps2 gamma oldc
oldgam p r rt1 rt2 rte s safmax safmin
sigma ssfmax ssfmin)
(type (f2cl-lib:integer4) i iscale jtot l l1 lend lendsv lsv m
nmaxit))
(setf info 0)
(cond
((< n 0)
(setf info -1)
(xerbla "DSTERF" (f2cl-lib:int-sub info))
(go end_label)))
(if (<= n 1) (go end_label))
(setf eps (dlamch "E"))
(setf eps2 (expt eps 2))
(setf safmin (dlamch "S"))
(setf safmax (/ one safmin))
(setf ssfmax (/ (f2cl-lib:fsqrt safmax) three))
(setf ssfmin (/ (f2cl-lib:fsqrt safmin) eps2))
(setf nmaxit (f2cl-lib:int-mul n maxit))
(setf sigma zero)
(setf jtot 0)
(setf l1 1)
label10
(if (> l1 n) (go label170))
(if (> l1 1)
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l1 1))
((1 *))
e-%offset%)
zero))
(f2cl-lib:fdo (m l1 (f2cl-lib:int-add m 1))
((> m (f2cl-lib:int-add n (f2cl-lib:int-sub 1))) nil)
(tagbody
(cond
((<= (abs (f2cl-lib:fref e (m) ((1 *))))
(* (f2cl-lib:fsqrt (abs (f2cl-lib:fref d (m) ((1 *)))))
(f2cl-lib:fsqrt
(abs
(f2cl-lib:fref d ((f2cl-lib:int-add m 1)) ((1 *)))))
eps))
(setf (f2cl-lib:fref e-%data% (m) ((1 *)) e-%offset%) zero)
(go label30)))
label20))
(setf m n)
label30
(setf l l1)
(setf lsv l)
(setf lend m)
(setf lendsv lend)
(setf l1 (f2cl-lib:int-add m 1))
(if (= lend l) (go label10))
(setf anorm
(dlanst "I" (f2cl-lib:int-add (f2cl-lib:int-sub lend l) 1)
(f2cl-lib:array-slice d-%data%
double-float
(l)
((1 *))
d-%offset%)
(f2cl-lib:array-slice e-%data%
double-float
(l)
((1 *))
e-%offset%)))
(setf iscale 0)
(cond
((> anorm ssfmax)
(setf iscale 1)
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 anorm ssfmax
(f2cl-lib:int-add (f2cl-lib:int-sub lend l) 1) 1
(f2cl-lib:array-slice d-%data%
double-float
(l)
((1 *))
d-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9))
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 anorm ssfmax (f2cl-lib:int-sub lend l) 1
(f2cl-lib:array-slice e-%data%
double-float
(l)
((1 *))
e-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9)))
((< anorm ssfmin)
(setf iscale 2)
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 anorm ssfmin
(f2cl-lib:int-add (f2cl-lib:int-sub lend l) 1) 1
(f2cl-lib:array-slice d-%data%
double-float
(l)
((1 *))
d-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9))
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 anorm ssfmin (f2cl-lib:int-sub lend l) 1
(f2cl-lib:array-slice e-%data%
double-float
(l)
((1 *))
e-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9))))
(f2cl-lib:fdo (i l (f2cl-lib:int-add i 1))
((> i (f2cl-lib:int-add lend (f2cl-lib:int-sub 1))) nil)
(tagbody
(setf (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%)
(expt (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%) 2))
label40))
(cond
((< (abs (f2cl-lib:fref d (lend) ((1 *))))
(abs (f2cl-lib:fref d (l) ((1 *)))))
(setf lend lsv)
(setf l lendsv)))
(cond
((>= lend l)
(tagbody
label50
(cond
((/= l lend)
(f2cl-lib:fdo (m l (f2cl-lib:int-add m 1))
((> m
(f2cl-lib:int-add lend (f2cl-lib:int-sub 1)))
nil)
(tagbody
(if
(<= (abs (f2cl-lib:fref e-%data% (m) ((1 *)) e-%offset%))
(* eps2
(abs
(* (f2cl-lib:fref d-%data% (m) ((1 *)) d-%offset%)
(f2cl-lib:fref d-%data%
((f2cl-lib:int-add m 1))
((1 *))
d-%offset%)))))
(go label70))
label60))))
(setf m lend)
label70
(if (< m lend)
(setf (f2cl-lib:fref e-%data% (m) ((1 *)) e-%offset%) zero))
(setf p (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%))
(if (= m l) (go label90))
(cond
((= m (f2cl-lib:int-add l 1))
(setf rte
(f2cl-lib:fsqrt
(f2cl-lib:fref e-%data% (l) ((1 *)) e-%offset%)))
(multiple-value-bind (var-0 var-1 var-2 var-3 var-4)
(dlae2 (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) rte
(f2cl-lib:fref d-%data%
((f2cl-lib:int-add l 1))
((1 *))
d-%offset%)
rt1 rt2)
(declare (ignore var-0 var-1 var-2))
(setf rt1 var-3)
(setf rt2 var-4))
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) rt1)
(setf (f2cl-lib:fref d-%data%
((f2cl-lib:int-add l 1))
((1 *))
d-%offset%)
rt2)
(setf (f2cl-lib:fref e-%data% (l) ((1 *)) e-%offset%) zero)
(setf l (f2cl-lib:int-add l 2))
(if (<= l lend) (go label50))
(go label150)))
(if (= jtot nmaxit) (go label150))
(setf jtot (f2cl-lib:int-add jtot 1))
(setf rte
(f2cl-lib:fsqrt
(f2cl-lib:fref e-%data% (l) ((1 *)) e-%offset%)))
(setf sigma
(/
(-
(f2cl-lib:fref d-%data%
((f2cl-lib:int-add l 1))
((1 *))
d-%offset%)
p)
(* two rte)))
(setf r (dlapy2 sigma one))
(setf sigma (- p (/ rte (+ sigma (f2cl-lib:sign r sigma)))))
(setf c one)
(setf s zero)
(setf gamma
(- (f2cl-lib:fref d-%data% (m) ((1 *)) d-%offset%) sigma))
(setf p (* gamma gamma))
(f2cl-lib:fdo (i (f2cl-lib:int-add m (f2cl-lib:int-sub 1))
(f2cl-lib:int-add i (f2cl-lib:int-sub 1)))
((> i l) nil)
(tagbody
(setf bb (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%))
(setf r (+ p bb))
(if (/= i (f2cl-lib:int-sub m 1))
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-add i 1))
((1 *))
e-%offset%)
(* s r)))
(setf oldc c)
(setf c (/ p r))
(setf s (/ bb r))
(setf oldgam gamma)
(setf alpha (f2cl-lib:fref d-%data% (i) ((1 *)) d-%offset%))
(setf gamma (- (* c (- alpha sigma)) (* s oldgam)))
(setf (f2cl-lib:fref d-%data%
((f2cl-lib:int-add i 1))
((1 *))
d-%offset%)
(+ oldgam (- alpha gamma)))
(cond
((/= c zero)
(setf p (/ (* gamma gamma) c)))
(t
(setf p (* oldc bb))))
label80))
(setf (f2cl-lib:fref e-%data% (l) ((1 *)) e-%offset%) (* s p))
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%)
(+ sigma gamma))
(go label50)
label90
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) p)
(setf l (f2cl-lib:int-add l 1))
(if (<= l lend) (go label50))
(go label150)))
(t
(tagbody
label100
(f2cl-lib:fdo (m l (f2cl-lib:int-add m (f2cl-lib:int-sub 1)))
((> m (f2cl-lib:int-add lend 1)) nil)
(tagbody
(if
(<=
(abs
(f2cl-lib:fref e-%data%
((f2cl-lib:int-sub m 1))
((1 *))
e-%offset%))
(* eps2
(abs
(* (f2cl-lib:fref d-%data% (m) ((1 *)) d-%offset%)
(f2cl-lib:fref d-%data%
((f2cl-lib:int-sub m 1))
((1 *))
d-%offset%)))))
(go label120))
label110))
(setf m lend)
label120
(if (> m lend)
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub m 1))
((1 *))
e-%offset%)
zero))
(setf p (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%))
(if (= m l) (go label140))
(cond
((= m (f2cl-lib:int-add l (f2cl-lib:int-sub 1)))
(setf rte
(f2cl-lib:fsqrt
(f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l 1))
((1 *))
e-%offset%)))
(multiple-value-bind (var-0 var-1 var-2 var-3 var-4)
(dlae2 (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) rte
(f2cl-lib:fref d-%data%
((f2cl-lib:int-sub l 1))
((1 *))
d-%offset%)
rt1 rt2)
(declare (ignore var-0 var-1 var-2))
(setf rt1 var-3)
(setf rt2 var-4))
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) rt1)
(setf (f2cl-lib:fref d-%data%
((f2cl-lib:int-sub l 1))
((1 *))
d-%offset%)
rt2)
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l 1))
((1 *))
e-%offset%)
zero)
(setf l (f2cl-lib:int-sub l 2))
(if (>= l lend) (go label100))
(go label150)))
(if (= jtot nmaxit) (go label150))
(setf jtot (f2cl-lib:int-add jtot 1))
(setf rte
(f2cl-lib:fsqrt
(f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l 1))
((1 *))
e-%offset%)))
(setf sigma
(/
(-
(f2cl-lib:fref d-%data%
((f2cl-lib:int-sub l 1))
((1 *))
d-%offset%)
p)
(* two rte)))
(setf r (dlapy2 sigma one))
(setf sigma (- p (/ rte (+ sigma (f2cl-lib:sign r sigma)))))
(setf c one)
(setf s zero)
(setf gamma
(- (f2cl-lib:fref d-%data% (m) ((1 *)) d-%offset%) sigma))
(setf p (* gamma gamma))
(f2cl-lib:fdo (i m (f2cl-lib:int-add i 1))
((> i (f2cl-lib:int-add l (f2cl-lib:int-sub 1))) nil)
(tagbody
(setf bb (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%))
(setf r (+ p bb))
(if (/= i m)
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub i 1))
((1 *))
e-%offset%)
(* s r)))
(setf oldc c)
(setf c (/ p r))
(setf s (/ bb r))
(setf oldgam gamma)
(setf alpha
(f2cl-lib:fref d-%data%
((f2cl-lib:int-add i 1))
((1 *))
d-%offset%))
(setf gamma (- (* c (- alpha sigma)) (* s oldgam)))
(setf (f2cl-lib:fref d-%data% (i) ((1 *)) d-%offset%)
(+ oldgam (- alpha gamma)))
(cond
((/= c zero)
(setf p (/ (* gamma gamma) c)))
(t
(setf p (* oldc bb))))
label130))
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l 1))
((1 *))
e-%offset%)
(* s p))
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%)
(+ sigma gamma))
(go label100)
label140
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) p)
(setf l (f2cl-lib:int-sub l 1))
(if (>= l lend) (go label100))
(go label150))))
label150
(if (= iscale 1)
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 ssfmax anorm
(f2cl-lib:int-add (f2cl-lib:int-sub lendsv lsv) 1) 1
(f2cl-lib:array-slice d-%data%
double-float
(lsv)
((1 *))
d-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9)))
(if (= iscale 2)
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 ssfmin anorm
(f2cl-lib:int-add (f2cl-lib:int-sub lendsv lsv) 1) 1
(f2cl-lib:array-slice d-%data%
double-float
(lsv)
((1 *))
d-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9)))
(if (< jtot nmaxit) (go label10))
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i (f2cl-lib:int-add n (f2cl-lib:int-sub 1))) nil)
(tagbody
(if (/= (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%) zero)
(setf info (f2cl-lib:int-add info 1)))
label160))
(go label180)
label170
(multiple-value-bind (var-0 var-1 var-2 var-3)
(dlasrt "I" n d info)
(declare (ignore var-0 var-1 var-2))
(setf info var-3))
label180
(go end_label)
end_label
(return (values nil nil nil info))))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::dsterf
fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((fortran-to-lisp::integer4) (array double-float (*))
(array double-float (*)) (fortran-to-lisp::integer4))
:return-values '(nil nil nil fortran-to-lisp::info)
:calls '(fortran-to-lisp::dlasrt fortran-to-lisp::dlapy2
fortran-to-lisp::dlae2 fortran-to-lisp::dlascl
fortran-to-lisp::dlanst fortran-to-lisp::dlamch
fortran-to-lisp::xerbla))))
| null | https://raw.githubusercontent.com/Eduap-com/WordMat/83c9336770067f54431cc42c7147dc6ed640a339/Windows/ExternalPrograms/maxima-5.45.1/share/maxima/5.45.1/share/lapack/lapack/dsterf.lisp | lisp | Compiled by f2cl version:
Using Lisp CMU Common Lisp snapshot-2013-11 (20E Unicode)
Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
(:coerce-assigns :as-needed) (:array-type ':array)
(:array-slicing t) (:declare-common nil)
(:float-format single-float)) | ( " f2cl1.l , v 95098eb54f13 2013/04/01 00:45:16 toy $ "
" f2cl2.l , v 95098eb54f13 2013/04/01 00:45:16 toy $ "
" f2cl3.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl4.l , v 96616d88fb7e 2008/02/22 22:19:34 rtoy $ "
" f2cl5.l , v 95098eb54f13 2013/04/01 00:45:16 toy $ "
" f2cl6.l , v 1d5cbacbb977 2008/08/24 00:56:27 rtoy $ "
" macros.l , v 1409c1352feb 2013/03/24 20:44:50 toy $ " )
(in-package "LAPACK")
(let* ((zero 0.0d0) (one 1.0d0) (two 2.0d0) (three 3.0d0) (maxit 30))
(declare (type (double-float 0.0d0 0.0d0) zero)
(type (double-float 1.0d0 1.0d0) one)
(type (double-float 2.0d0 2.0d0) two)
(type (double-float 3.0d0 3.0d0) three)
(type (f2cl-lib:integer4 30 30) maxit)
(ignorable zero one two three maxit))
(defun dsterf (n d e info)
(declare (type (array double-float (*)) e d)
(type (f2cl-lib:integer4) info n))
(f2cl-lib:with-multi-array-data
((d double-float d-%data% d-%offset%)
(e double-float e-%data% e-%offset%))
(prog ((alpha 0.0d0) (anorm 0.0d0) (bb 0.0d0) (c 0.0d0) (eps 0.0d0)
(eps2 0.0d0) (gamma 0.0d0) (oldc 0.0d0) (oldgam 0.0d0) (p 0.0d0)
(r 0.0d0) (rt1 0.0d0) (rt2 0.0d0) (rte 0.0d0) (s 0.0d0)
(safmax 0.0d0) (safmin 0.0d0) (sigma 0.0d0) (ssfmax 0.0d0)
(ssfmin 0.0d0) (i 0) (iscale 0) (jtot 0) (l 0) (l1 0) (lend 0)
(lendsv 0) (lsv 0) (m 0) (nmaxit 0))
(declare (type (double-float) alpha anorm bb c eps eps2 gamma oldc
oldgam p r rt1 rt2 rte s safmax safmin
sigma ssfmax ssfmin)
(type (f2cl-lib:integer4) i iscale jtot l l1 lend lendsv lsv m
nmaxit))
(setf info 0)
(cond
((< n 0)
(setf info -1)
(xerbla "DSTERF" (f2cl-lib:int-sub info))
(go end_label)))
(if (<= n 1) (go end_label))
(setf eps (dlamch "E"))
(setf eps2 (expt eps 2))
(setf safmin (dlamch "S"))
(setf safmax (/ one safmin))
(setf ssfmax (/ (f2cl-lib:fsqrt safmax) three))
(setf ssfmin (/ (f2cl-lib:fsqrt safmin) eps2))
(setf nmaxit (f2cl-lib:int-mul n maxit))
(setf sigma zero)
(setf jtot 0)
(setf l1 1)
label10
(if (> l1 n) (go label170))
(if (> l1 1)
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l1 1))
((1 *))
e-%offset%)
zero))
(f2cl-lib:fdo (m l1 (f2cl-lib:int-add m 1))
((> m (f2cl-lib:int-add n (f2cl-lib:int-sub 1))) nil)
(tagbody
(cond
((<= (abs (f2cl-lib:fref e (m) ((1 *))))
(* (f2cl-lib:fsqrt (abs (f2cl-lib:fref d (m) ((1 *)))))
(f2cl-lib:fsqrt
(abs
(f2cl-lib:fref d ((f2cl-lib:int-add m 1)) ((1 *)))))
eps))
(setf (f2cl-lib:fref e-%data% (m) ((1 *)) e-%offset%) zero)
(go label30)))
label20))
(setf m n)
label30
(setf l l1)
(setf lsv l)
(setf lend m)
(setf lendsv lend)
(setf l1 (f2cl-lib:int-add m 1))
(if (= lend l) (go label10))
(setf anorm
(dlanst "I" (f2cl-lib:int-add (f2cl-lib:int-sub lend l) 1)
(f2cl-lib:array-slice d-%data%
double-float
(l)
((1 *))
d-%offset%)
(f2cl-lib:array-slice e-%data%
double-float
(l)
((1 *))
e-%offset%)))
(setf iscale 0)
(cond
((> anorm ssfmax)
(setf iscale 1)
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 anorm ssfmax
(f2cl-lib:int-add (f2cl-lib:int-sub lend l) 1) 1
(f2cl-lib:array-slice d-%data%
double-float
(l)
((1 *))
d-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9))
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 anorm ssfmax (f2cl-lib:int-sub lend l) 1
(f2cl-lib:array-slice e-%data%
double-float
(l)
((1 *))
e-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9)))
((< anorm ssfmin)
(setf iscale 2)
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 anorm ssfmin
(f2cl-lib:int-add (f2cl-lib:int-sub lend l) 1) 1
(f2cl-lib:array-slice d-%data%
double-float
(l)
((1 *))
d-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9))
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 anorm ssfmin (f2cl-lib:int-sub lend l) 1
(f2cl-lib:array-slice e-%data%
double-float
(l)
((1 *))
e-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9))))
(f2cl-lib:fdo (i l (f2cl-lib:int-add i 1))
((> i (f2cl-lib:int-add lend (f2cl-lib:int-sub 1))) nil)
(tagbody
(setf (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%)
(expt (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%) 2))
label40))
(cond
((< (abs (f2cl-lib:fref d (lend) ((1 *))))
(abs (f2cl-lib:fref d (l) ((1 *)))))
(setf lend lsv)
(setf l lendsv)))
(cond
((>= lend l)
(tagbody
label50
(cond
((/= l lend)
(f2cl-lib:fdo (m l (f2cl-lib:int-add m 1))
((> m
(f2cl-lib:int-add lend (f2cl-lib:int-sub 1)))
nil)
(tagbody
(if
(<= (abs (f2cl-lib:fref e-%data% (m) ((1 *)) e-%offset%))
(* eps2
(abs
(* (f2cl-lib:fref d-%data% (m) ((1 *)) d-%offset%)
(f2cl-lib:fref d-%data%
((f2cl-lib:int-add m 1))
((1 *))
d-%offset%)))))
(go label70))
label60))))
(setf m lend)
label70
(if (< m lend)
(setf (f2cl-lib:fref e-%data% (m) ((1 *)) e-%offset%) zero))
(setf p (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%))
(if (= m l) (go label90))
(cond
((= m (f2cl-lib:int-add l 1))
(setf rte
(f2cl-lib:fsqrt
(f2cl-lib:fref e-%data% (l) ((1 *)) e-%offset%)))
(multiple-value-bind (var-0 var-1 var-2 var-3 var-4)
(dlae2 (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) rte
(f2cl-lib:fref d-%data%
((f2cl-lib:int-add l 1))
((1 *))
d-%offset%)
rt1 rt2)
(declare (ignore var-0 var-1 var-2))
(setf rt1 var-3)
(setf rt2 var-4))
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) rt1)
(setf (f2cl-lib:fref d-%data%
((f2cl-lib:int-add l 1))
((1 *))
d-%offset%)
rt2)
(setf (f2cl-lib:fref e-%data% (l) ((1 *)) e-%offset%) zero)
(setf l (f2cl-lib:int-add l 2))
(if (<= l lend) (go label50))
(go label150)))
(if (= jtot nmaxit) (go label150))
(setf jtot (f2cl-lib:int-add jtot 1))
(setf rte
(f2cl-lib:fsqrt
(f2cl-lib:fref e-%data% (l) ((1 *)) e-%offset%)))
(setf sigma
(/
(-
(f2cl-lib:fref d-%data%
((f2cl-lib:int-add l 1))
((1 *))
d-%offset%)
p)
(* two rte)))
(setf r (dlapy2 sigma one))
(setf sigma (- p (/ rte (+ sigma (f2cl-lib:sign r sigma)))))
(setf c one)
(setf s zero)
(setf gamma
(- (f2cl-lib:fref d-%data% (m) ((1 *)) d-%offset%) sigma))
(setf p (* gamma gamma))
(f2cl-lib:fdo (i (f2cl-lib:int-add m (f2cl-lib:int-sub 1))
(f2cl-lib:int-add i (f2cl-lib:int-sub 1)))
((> i l) nil)
(tagbody
(setf bb (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%))
(setf r (+ p bb))
(if (/= i (f2cl-lib:int-sub m 1))
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-add i 1))
((1 *))
e-%offset%)
(* s r)))
(setf oldc c)
(setf c (/ p r))
(setf s (/ bb r))
(setf oldgam gamma)
(setf alpha (f2cl-lib:fref d-%data% (i) ((1 *)) d-%offset%))
(setf gamma (- (* c (- alpha sigma)) (* s oldgam)))
(setf (f2cl-lib:fref d-%data%
((f2cl-lib:int-add i 1))
((1 *))
d-%offset%)
(+ oldgam (- alpha gamma)))
(cond
((/= c zero)
(setf p (/ (* gamma gamma) c)))
(t
(setf p (* oldc bb))))
label80))
(setf (f2cl-lib:fref e-%data% (l) ((1 *)) e-%offset%) (* s p))
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%)
(+ sigma gamma))
(go label50)
label90
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) p)
(setf l (f2cl-lib:int-add l 1))
(if (<= l lend) (go label50))
(go label150)))
(t
(tagbody
label100
(f2cl-lib:fdo (m l (f2cl-lib:int-add m (f2cl-lib:int-sub 1)))
((> m (f2cl-lib:int-add lend 1)) nil)
(tagbody
(if
(<=
(abs
(f2cl-lib:fref e-%data%
((f2cl-lib:int-sub m 1))
((1 *))
e-%offset%))
(* eps2
(abs
(* (f2cl-lib:fref d-%data% (m) ((1 *)) d-%offset%)
(f2cl-lib:fref d-%data%
((f2cl-lib:int-sub m 1))
((1 *))
d-%offset%)))))
(go label120))
label110))
(setf m lend)
label120
(if (> m lend)
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub m 1))
((1 *))
e-%offset%)
zero))
(setf p (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%))
(if (= m l) (go label140))
(cond
((= m (f2cl-lib:int-add l (f2cl-lib:int-sub 1)))
(setf rte
(f2cl-lib:fsqrt
(f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l 1))
((1 *))
e-%offset%)))
(multiple-value-bind (var-0 var-1 var-2 var-3 var-4)
(dlae2 (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) rte
(f2cl-lib:fref d-%data%
((f2cl-lib:int-sub l 1))
((1 *))
d-%offset%)
rt1 rt2)
(declare (ignore var-0 var-1 var-2))
(setf rt1 var-3)
(setf rt2 var-4))
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) rt1)
(setf (f2cl-lib:fref d-%data%
((f2cl-lib:int-sub l 1))
((1 *))
d-%offset%)
rt2)
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l 1))
((1 *))
e-%offset%)
zero)
(setf l (f2cl-lib:int-sub l 2))
(if (>= l lend) (go label100))
(go label150)))
(if (= jtot nmaxit) (go label150))
(setf jtot (f2cl-lib:int-add jtot 1))
(setf rte
(f2cl-lib:fsqrt
(f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l 1))
((1 *))
e-%offset%)))
(setf sigma
(/
(-
(f2cl-lib:fref d-%data%
((f2cl-lib:int-sub l 1))
((1 *))
d-%offset%)
p)
(* two rte)))
(setf r (dlapy2 sigma one))
(setf sigma (- p (/ rte (+ sigma (f2cl-lib:sign r sigma)))))
(setf c one)
(setf s zero)
(setf gamma
(- (f2cl-lib:fref d-%data% (m) ((1 *)) d-%offset%) sigma))
(setf p (* gamma gamma))
(f2cl-lib:fdo (i m (f2cl-lib:int-add i 1))
((> i (f2cl-lib:int-add l (f2cl-lib:int-sub 1))) nil)
(tagbody
(setf bb (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%))
(setf r (+ p bb))
(if (/= i m)
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub i 1))
((1 *))
e-%offset%)
(* s r)))
(setf oldc c)
(setf c (/ p r))
(setf s (/ bb r))
(setf oldgam gamma)
(setf alpha
(f2cl-lib:fref d-%data%
((f2cl-lib:int-add i 1))
((1 *))
d-%offset%))
(setf gamma (- (* c (- alpha sigma)) (* s oldgam)))
(setf (f2cl-lib:fref d-%data% (i) ((1 *)) d-%offset%)
(+ oldgam (- alpha gamma)))
(cond
((/= c zero)
(setf p (/ (* gamma gamma) c)))
(t
(setf p (* oldc bb))))
label130))
(setf (f2cl-lib:fref e-%data%
((f2cl-lib:int-sub l 1))
((1 *))
e-%offset%)
(* s p))
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%)
(+ sigma gamma))
(go label100)
label140
(setf (f2cl-lib:fref d-%data% (l) ((1 *)) d-%offset%) p)
(setf l (f2cl-lib:int-sub l 1))
(if (>= l lend) (go label100))
(go label150))))
label150
(if (= iscale 1)
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 ssfmax anorm
(f2cl-lib:int-add (f2cl-lib:int-sub lendsv lsv) 1) 1
(f2cl-lib:array-slice d-%data%
double-float
(lsv)
((1 *))
d-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9)))
(if (= iscale 2)
(multiple-value-bind
(var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7 var-8 var-9)
(dlascl "G" 0 0 ssfmin anorm
(f2cl-lib:int-add (f2cl-lib:int-sub lendsv lsv) 1) 1
(f2cl-lib:array-slice d-%data%
double-float
(lsv)
((1 *))
d-%offset%)
n info)
(declare (ignore var-0 var-1 var-2 var-3 var-4 var-5 var-6 var-7
var-8))
(setf info var-9)))
(if (< jtot nmaxit) (go label10))
(f2cl-lib:fdo (i 1 (f2cl-lib:int-add i 1))
((> i (f2cl-lib:int-add n (f2cl-lib:int-sub 1))) nil)
(tagbody
(if (/= (f2cl-lib:fref e-%data% (i) ((1 *)) e-%offset%) zero)
(setf info (f2cl-lib:int-add info 1)))
label160))
(go label180)
label170
(multiple-value-bind (var-0 var-1 var-2 var-3)
(dlasrt "I" n d info)
(declare (ignore var-0 var-1 var-2))
(setf info var-3))
label180
(go end_label)
end_label
(return (values nil nil nil info))))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::dsterf
fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((fortran-to-lisp::integer4) (array double-float (*))
(array double-float (*)) (fortran-to-lisp::integer4))
:return-values '(nil nil nil fortran-to-lisp::info)
:calls '(fortran-to-lisp::dlasrt fortran-to-lisp::dlapy2
fortran-to-lisp::dlae2 fortran-to-lisp::dlascl
fortran-to-lisp::dlanst fortran-to-lisp::dlamch
fortran-to-lisp::xerbla))))
|
0458e9dbc3153f9a12d20bea5d1dc1296281ab14b25f8f0cdc3e394ee7217cb7 | tamarit/edd | roman_ok.erl | -module(roman_ok).
-compile([export_all]).
to_roman(0) -> [];
to_roman(X) when X >= 1000 -> [$M | to_roman(X - 1000)];
to_roman(X) when X >= 100 ->
digit(X div 100, $C, $D, $M) ++ to_roman(X rem 100);
to_roman(X) when X >= 10 ->
digit(X div 10, $X, $L, $C) ++ to_roman(X rem 10);
to_roman(X) when X >= 1 -> digit(X, $I, $V, $X).
digit(1, X, _, _) -> [X];
digit(2, X, _, _) -> [X, X];
digit(3, X, _, _) -> [X, X, X];
digit(4, X, Y, _) -> [X, Y];
digit(5, _, Y, _) -> [Y];
digit(6, X, Y, _) -> [Y, X];
digit(7, X, Y, _) -> [Y, X, X];
digit(8, X, Y, _) -> [Y, X, X, X];
digit(9, X, _, Z) -> [X, Z].
main() ->
to_roman(2489).
| null | https://raw.githubusercontent.com/tamarit/edd/867f287efe951bec6a8213743a218b86e4f5bbf7/examples/roman/roman_ok.erl | erlang | -module(roman_ok).
-compile([export_all]).
to_roman(0) -> [];
to_roman(X) when X >= 1000 -> [$M | to_roman(X - 1000)];
to_roman(X) when X >= 100 ->
digit(X div 100, $C, $D, $M) ++ to_roman(X rem 100);
to_roman(X) when X >= 10 ->
digit(X div 10, $X, $L, $C) ++ to_roman(X rem 10);
to_roman(X) when X >= 1 -> digit(X, $I, $V, $X).
digit(1, X, _, _) -> [X];
digit(2, X, _, _) -> [X, X];
digit(3, X, _, _) -> [X, X, X];
digit(4, X, Y, _) -> [X, Y];
digit(5, _, Y, _) -> [Y];
digit(6, X, Y, _) -> [Y, X];
digit(7, X, Y, _) -> [Y, X, X];
digit(8, X, Y, _) -> [Y, X, X, X];
digit(9, X, _, Z) -> [X, Z].
main() ->
to_roman(2489).
| |
f06b960659bb85b986a6ad752261f380daea6bc25ff0d365f024022b09ceb17c | tweag/ormolu | proc-forms1.hs | {-# LANGUAGE Arrows #-}
foo0 f g x y = proc _ -> (| f (g -< (x, y)) |)
foo1 f g h x =
proc (y, z) -> (|
test ( h f
. h g
-<
y x
. y z
)
( h g
. h f
-<
y z
. y x)
|)
| null | https://raw.githubusercontent.com/tweag/ormolu/34bdf62429768f24b70d0f8ba7730fc4d8ae73ba/data/examples/declaration/value/function/arrow/proc-forms1.hs | haskell | # LANGUAGE Arrows # |
foo0 f g x y = proc _ -> (| f (g -< (x, y)) |)
foo1 f g h x =
proc (y, z) -> (|
test ( h f
. h g
-<
y x
. y z
)
( h g
. h f
-<
y z
. y x)
|)
|
4723864dc9636d13b183354b427bfcee8b3b40d84ac67c9e85361595a764ecf9 | ghcjs/ghcjs | gadt2.hs | # LANGUAGE ExplicitForAll , GADTs #
-- Pattern match uses dictionaries bound higher up in the pattern
module Main where
data T = forall a. Integral a => T a
f :: T -> Bool
f (T 0) = True
f (T n) = False
g :: T -> Ordering
g (T n) | n >= 3 = if n>3 then GT else EQ
g (T n) = LT
main = do print [f (T 0), f (T 1)]
print [g (T 2), g (T 3), g (T 4)]
| null | https://raw.githubusercontent.com/ghcjs/ghcjs/e4cd4232a31f6371c761acd93853702f4c7ca74c/test/ghc/gadt/gadt2.hs | haskell | Pattern match uses dictionaries bound higher up in the pattern | # LANGUAGE ExplicitForAll , GADTs #
module Main where
data T = forall a. Integral a => T a
f :: T -> Bool
f (T 0) = True
f (T n) = False
g :: T -> Ordering
g (T n) | n >= 3 = if n>3 then GT else EQ
g (T n) = LT
main = do print [f (T 0), f (T 1)]
print [g (T 2), g (T 3), g (T 4)]
|
f4d81deea6606557e01df712dbfe2174eb5f218aac40206e3f8bf689da1d872f | kowainik/stan | FileInfo.hs | |
Copyright : ( c ) 2020 Kowainik
SPDX - License - Identifier : MPL-2.0
Maintainer : < >
File ( or module ) specific information .
Copyright: (c) 2020 Kowainik
SPDX-License-Identifier: MPL-2.0
Maintainer: Kowainik <>
File (or module) specific information.
-}
module Stan.FileInfo
( FileMap
, FileInfo (..)
, extensionsToText
, isExtensionDisabled
) where
import Data.Aeson.Micro (ToJSON (..), object, (.=))
import Extensions (Extensions (..), ExtensionsError, ExtensionsResult, OnOffExtension (..),
ParsedExtensions (..), showOnOffExtension)
import GHC.LanguageExtensions.Type (Extension)
import Stan.Core.ModuleName (ModuleName)
import Stan.Observation (Observations)
import qualified Data.Set as Set
-- | File specific information.
data FileInfo = FileInfo
{ fileInfoPath :: !FilePath
, fileInfoModuleName :: !ModuleName
, fileInfoLoc :: !Int
, fileInfoCabalExtensions :: !(Either ExtensionsError ParsedExtensions)
, fileInfoExtensions :: !(Either ExtensionsError ParsedExtensions)
, fileInfoMergedExtensions :: !ExtensionsResult
, fileInfoObservations :: !Observations
} deriving stock (Show, Eq)
instance ToJSON FileInfo where
toJSON FileInfo{..} = object
[ "path" .= toText fileInfoPath
, "moduleName" .= fileInfoModuleName
, "loc" .= fileInfoLoc
, "cabalExtensions" .= extensionsToText fileInfoCabalExtensions
, "extensions" .= extensionsToText fileInfoExtensions
, "observations" .= toList fileInfoObservations
]
type FileMap = Map FilePath FileInfo
-- | Return the list of pretty-printed extensions.
extensionsToText :: Either ExtensionsError ParsedExtensions -> [Text]
extensionsToText = \case
Left _ -> ["Unable to extract extensions"]
Right ParsedExtensions{..} ->
let exts = map showOnOffExtension parsedExtensionsAll in
case parsedExtensionsSafe of
Just s -> show s : exts
Nothing -> exts
{- | Check whether the given extension is disabled
-}
isExtensionDisabled :: Extension -> ExtensionsResult -> Bool
isExtensionDisabled ext = \case
Left _ -> True -- no info about extensions, consider it disabled
Right Extensions{..} ->
Set.notMember (On ext) extensionsAll
|| Set.member (Off ext) extensionsAll
| null | https://raw.githubusercontent.com/kowainik/stan/da36eac741466fe6f46dc3e56fca7806f8b41816/src/Stan/FileInfo.hs | haskell | | File specific information.
| Return the list of pretty-printed extensions.
| Check whether the given extension is disabled
no info about extensions, consider it disabled | |
Copyright : ( c ) 2020 Kowainik
SPDX - License - Identifier : MPL-2.0
Maintainer : < >
File ( or module ) specific information .
Copyright: (c) 2020 Kowainik
SPDX-License-Identifier: MPL-2.0
Maintainer: Kowainik <>
File (or module) specific information.
-}
module Stan.FileInfo
( FileMap
, FileInfo (..)
, extensionsToText
, isExtensionDisabled
) where
import Data.Aeson.Micro (ToJSON (..), object, (.=))
import Extensions (Extensions (..), ExtensionsError, ExtensionsResult, OnOffExtension (..),
ParsedExtensions (..), showOnOffExtension)
import GHC.LanguageExtensions.Type (Extension)
import Stan.Core.ModuleName (ModuleName)
import Stan.Observation (Observations)
import qualified Data.Set as Set
data FileInfo = FileInfo
{ fileInfoPath :: !FilePath
, fileInfoModuleName :: !ModuleName
, fileInfoLoc :: !Int
, fileInfoCabalExtensions :: !(Either ExtensionsError ParsedExtensions)
, fileInfoExtensions :: !(Either ExtensionsError ParsedExtensions)
, fileInfoMergedExtensions :: !ExtensionsResult
, fileInfoObservations :: !Observations
} deriving stock (Show, Eq)
instance ToJSON FileInfo where
toJSON FileInfo{..} = object
[ "path" .= toText fileInfoPath
, "moduleName" .= fileInfoModuleName
, "loc" .= fileInfoLoc
, "cabalExtensions" .= extensionsToText fileInfoCabalExtensions
, "extensions" .= extensionsToText fileInfoExtensions
, "observations" .= toList fileInfoObservations
]
type FileMap = Map FilePath FileInfo
extensionsToText :: Either ExtensionsError ParsedExtensions -> [Text]
extensionsToText = \case
Left _ -> ["Unable to extract extensions"]
Right ParsedExtensions{..} ->
let exts = map showOnOffExtension parsedExtensionsAll in
case parsedExtensionsSafe of
Just s -> show s : exts
Nothing -> exts
isExtensionDisabled :: Extension -> ExtensionsResult -> Bool
isExtensionDisabled ext = \case
Right Extensions{..} ->
Set.notMember (On ext) extensionsAll
|| Set.member (Off ext) extensionsAll
|
14de0a86ddbd6290ce9acf14a820c82a52eb8c6295f7511ff40d1b6579497f0f | dbuenzli/rresult | rresult.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2015 The rresult programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2015 The rresult programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
type ('a, 'b) result = ('a, 'b) Stdlib.result = Ok of 'a | Error of 'b
module R = struct
let err_error = "result value is (Error _)"
let err_ok = "result value is (Ok _)"
(* Results *)
type ('a, 'b) t = ('a, 'b) result
let ok v = Ok v
let error e = Error e
let get_ok = function Ok v -> v | Error _ -> invalid_arg err_error
let get_error = function Error e -> e | Ok _ -> invalid_arg err_ok
let reword_error reword = function
| Ok _ as r -> r
| Error e -> Error (reword e)
let return = ok
let fail = error
(* Composing results *)
let bind v f = match v with Ok v -> f v | Error _ as e -> e
let map f v = match v with Ok v -> Ok (f v) | Error _ as e -> e
let join r = match r with Ok v -> v | Error _ as e -> e
let ( >>= ) = bind
let ( >>| ) v f = match v with Ok v -> Ok (f v) | Error _ as e -> e
module Infix = struct
let ( >>= ) = ( >>= )
let ( >>| ) = ( >>| )
end
(* Error messages *)
let pp_lines ppf s = (* hints new lines *)
let left = ref 0 and right = ref 0 and len = String.length s in
let flush () =
Format.pp_print_string ppf (String.sub s !left (!right - !left));
incr right; left := !right;
in
while (!right <> len) do
if s.[!right] = '\n' then (flush (); Format.pp_force_newline ppf ()) else
incr right;
done;
if !left <> len then flush ()
type msg = [ `Msg of string ]
let msg s = `Msg s
let msgf fmt =
let kmsg _ = `Msg (Format.flush_str_formatter ()) in
Format.kfprintf kmsg Format.str_formatter fmt
let pp_msg ppf (`Msg msg) = pp_lines ppf msg
let error_msg s = Error (`Msg s)
let error_msgf fmt =
let kerr _ = Error (`Msg (Format.flush_str_formatter ())) in
Format.kfprintf kerr Format.str_formatter fmt
let reword_error_msg ?(replace = false) reword = function
| Ok _ as r -> r
| Error (`Msg e) ->
let (`Msg e' as v) = reword e in
if replace then Error v else error_msgf "%s\n%s" e e'
let error_to_msg ~pp_error = function
| Ok _ as r -> r
| Error e -> error_msgf "%a" pp_error e
let error_msg_to_invalid_arg = function
| Ok v -> v
| Error (`Msg m) -> invalid_arg m
let open_error_msg = function Ok _ as r -> r | Error (`Msg _) as r -> r
let failwith_error_msg = function Ok v -> v | Error (`Msg m) -> failwith m
(* Trapping unexpected exceptions *)
type exn_trap = [ `Exn_trap of exn * Printexc.raw_backtrace ]
let pp_exn_trap ppf (`Exn_trap (exn, bt)) =
Format.fprintf ppf "%s@\n" (Printexc.to_string exn);
pp_lines ppf (Printexc.raw_backtrace_to_string bt)
let trap_exn f v = try Ok (f v) with
| e ->
let bt = Printexc.get_raw_backtrace () in
Error (`Exn_trap (e, bt))
let error_exn_trap_to_msg = function
| Ok _ as r -> r
| Error trap ->
error_msgf "Unexpected exception:@\n%a" pp_exn_trap trap
let open_error_exn_trap = function
| Ok _ as r -> r | Error (`Exn_trap _) as r -> r
(* Pretty-printing *)
let pp ~ok ~error ppf = function Ok v -> ok ppf v | Error e -> error ppf e
let dump ~ok ~error ppf = function
| Ok v -> Format.fprintf ppf "@[<2>Ok@ @[%a@]@]" ok v
| Error e -> Format.fprintf ppf "@[<2>Error@ @[%a@]@]" error e
(* Predicates *)
let is_ok = function Ok _ -> true | Error _ -> false
let is_error = function Ok _ -> false | Error _ -> true
let equal ~ok ~error r r' = match r, r' with
| Ok v, Ok v' -> ok v v'
| Error e, Error e' -> error e e'
| _ -> false
let compare ~ok ~error r r' = match r, r' with
| Ok v, Ok v' -> ok v v'
| Error v, Error v' -> error v v'
| Ok _, Error _ -> -1
| Error _, Ok _ -> 1
(* Converting *)
let to_option = function Ok v -> Some v | Error e -> None
let of_option ~none = function None -> none () | Some v -> Ok v
let to_presult = function Ok v -> `Ok v | Error e -> `Error e
let of_presult = function `Ok v -> Ok v | `Error e -> Error e
(* Ignoring errors *)
let ignore_error ~use = function Ok v -> v | Error e -> use e
let kignore_error ~use = function Ok _ as r -> r | Error e -> use e
end
include R.Infix
---------------------------------------------------------------------------
Copyright ( c ) 2015 The rresult programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2015 The rresult programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/rresult/5324558067a391bf8827ee76d413399887030c2f/src/rresult.ml | ocaml | Results
Composing results
Error messages
hints new lines
Trapping unexpected exceptions
Pretty-printing
Predicates
Converting
Ignoring errors | ---------------------------------------------------------------------------
Copyright ( c ) 2015 The rresult programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2015 The rresult programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
type ('a, 'b) result = ('a, 'b) Stdlib.result = Ok of 'a | Error of 'b
module R = struct
let err_error = "result value is (Error _)"
let err_ok = "result value is (Ok _)"
type ('a, 'b) t = ('a, 'b) result
let ok v = Ok v
let error e = Error e
let get_ok = function Ok v -> v | Error _ -> invalid_arg err_error
let get_error = function Error e -> e | Ok _ -> invalid_arg err_ok
let reword_error reword = function
| Ok _ as r -> r
| Error e -> Error (reword e)
let return = ok
let fail = error
let bind v f = match v with Ok v -> f v | Error _ as e -> e
let map f v = match v with Ok v -> Ok (f v) | Error _ as e -> e
let join r = match r with Ok v -> v | Error _ as e -> e
let ( >>= ) = bind
let ( >>| ) v f = match v with Ok v -> Ok (f v) | Error _ as e -> e
module Infix = struct
let ( >>= ) = ( >>= )
let ( >>| ) = ( >>| )
end
let left = ref 0 and right = ref 0 and len = String.length s in
let flush () =
Format.pp_print_string ppf (String.sub s !left (!right - !left));
incr right; left := !right;
in
while (!right <> len) do
if s.[!right] = '\n' then (flush (); Format.pp_force_newline ppf ()) else
incr right;
done;
if !left <> len then flush ()
type msg = [ `Msg of string ]
let msg s = `Msg s
let msgf fmt =
let kmsg _ = `Msg (Format.flush_str_formatter ()) in
Format.kfprintf kmsg Format.str_formatter fmt
let pp_msg ppf (`Msg msg) = pp_lines ppf msg
let error_msg s = Error (`Msg s)
let error_msgf fmt =
let kerr _ = Error (`Msg (Format.flush_str_formatter ())) in
Format.kfprintf kerr Format.str_formatter fmt
let reword_error_msg ?(replace = false) reword = function
| Ok _ as r -> r
| Error (`Msg e) ->
let (`Msg e' as v) = reword e in
if replace then Error v else error_msgf "%s\n%s" e e'
let error_to_msg ~pp_error = function
| Ok _ as r -> r
| Error e -> error_msgf "%a" pp_error e
let error_msg_to_invalid_arg = function
| Ok v -> v
| Error (`Msg m) -> invalid_arg m
let open_error_msg = function Ok _ as r -> r | Error (`Msg _) as r -> r
let failwith_error_msg = function Ok v -> v | Error (`Msg m) -> failwith m
type exn_trap = [ `Exn_trap of exn * Printexc.raw_backtrace ]
let pp_exn_trap ppf (`Exn_trap (exn, bt)) =
Format.fprintf ppf "%s@\n" (Printexc.to_string exn);
pp_lines ppf (Printexc.raw_backtrace_to_string bt)
let trap_exn f v = try Ok (f v) with
| e ->
let bt = Printexc.get_raw_backtrace () in
Error (`Exn_trap (e, bt))
let error_exn_trap_to_msg = function
| Ok _ as r -> r
| Error trap ->
error_msgf "Unexpected exception:@\n%a" pp_exn_trap trap
let open_error_exn_trap = function
| Ok _ as r -> r | Error (`Exn_trap _) as r -> r
let pp ~ok ~error ppf = function Ok v -> ok ppf v | Error e -> error ppf e
let dump ~ok ~error ppf = function
| Ok v -> Format.fprintf ppf "@[<2>Ok@ @[%a@]@]" ok v
| Error e -> Format.fprintf ppf "@[<2>Error@ @[%a@]@]" error e
let is_ok = function Ok _ -> true | Error _ -> false
let is_error = function Ok _ -> false | Error _ -> true
let equal ~ok ~error r r' = match r, r' with
| Ok v, Ok v' -> ok v v'
| Error e, Error e' -> error e e'
| _ -> false
let compare ~ok ~error r r' = match r, r' with
| Ok v, Ok v' -> ok v v'
| Error v, Error v' -> error v v'
| Ok _, Error _ -> -1
| Error _, Ok _ -> 1
let to_option = function Ok v -> Some v | Error e -> None
let of_option ~none = function None -> none () | Some v -> Ok v
let to_presult = function Ok v -> `Ok v | Error e -> `Error e
let of_presult = function `Ok v -> Ok v | `Error e -> Error e
let ignore_error ~use = function Ok v -> v | Error e -> use e
let kignore_error ~use = function Ok _ as r -> r | Error e -> use e
end
include R.Infix
---------------------------------------------------------------------------
Copyright ( c ) 2015 The rresult programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2015 The rresult programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
3d32a7071222a0e1c31cfbeeb73dd121dba2844073e12efd30500eb746e82e96 | byorgey/AoC | 06.hs | import Control.Arrow
import Data.List
import Data.Ord
main = interact
( (++"\n")
. map pickCommon
. transpose
. lines
)
For part 1 , use
pickCommon = fst . minimumBy (comparing snd) . map (head &&& length) . group . sort
| null | https://raw.githubusercontent.com/byorgey/AoC/30eb51eb41af9ca86b05de598a3a96d25bd428e3/2016/06/06.hs | haskell | import Control.Arrow
import Data.List
import Data.Ord
main = interact
( (++"\n")
. map pickCommon
. transpose
. lines
)
For part 1 , use
pickCommon = fst . minimumBy (comparing snd) . map (head &&& length) . group . sort
| |
03e9cebdd222401b54f0279d204d532e07da4b0a74e7e1d2f2133f42710b9ffe | jayrbolton/coursework | 2.6.hs | -- J Bolton
Assignment 6 , quarter 2
import Control.Monad.State
import Control.Monad.Writer
import Data.List (unfoldr)
1 .
fib n = take (n+1) $ f 0 1 where f y z = y : f z (y+z)
fib' n = take (n+1) $ unfoldr (\(a,b) -> Just(a,(b,a+b))) (0,1) --sweet!
2 .
fib'' n = 0:[evalState (f x) (0,1) | x <- [0..n-1]]
where f c = get >>= \(x,y) ->
if (c > 0) then (put (y,x+y) >> (f (c-1)))
else return y
fib''' n = fst $ execState (replicateM n $
get >>= \(x,y) ->
put (y,x+y)) (0,1)
test_fib = let (x,y,z) = (fib 999, fib' 999, fib'' 999)
in x == y && x == z && y == z
takes a second to run ...
3 .
-- I really want to try heapsort sometime (I've never done it), but don't think I have time
qsort_count ls = qsort 0 ls
qsort n [] = (n,[])
qsort n (x:xs) = let l0 = filter (<x) xs
l1 = filter (>=x) xs
c0 = n+(length (xs))*2
(c1, l2) = qsort 0 l0
(c2, l3) = qsort 0 l1
in (c0+c1+c2, l2 ++ [x] ++ l3)
-- Wow, that was much more of a brain bender than I expected
4 .
qsortM n [] = return (n,[])
qsortM n (x:xs) = qsortM 0 (filter (<x) xs) >>= \(c1, l1) ->
qsortM 0 (filter (>=x) xs) >>= \(c2, l2) ->
let c0 = n+(length (xs))*2
in return (c0+c1+c2, l1++[x]++l2)
| null | https://raw.githubusercontent.com/jayrbolton/coursework/f0da276527d42a6751fb8d29c76de35ce358fe65/computability_and_formal_languages/Haskell/hws/q2/2.6.hs | haskell | J Bolton
sweet!
I really want to try heapsort sometime (I've never done it), but don't think I have time
Wow, that was much more of a brain bender than I expected | Assignment 6 , quarter 2
import Control.Monad.State
import Control.Monad.Writer
import Data.List (unfoldr)
1 .
fib n = take (n+1) $ f 0 1 where f y z = y : f z (y+z)
2 .
fib'' n = 0:[evalState (f x) (0,1) | x <- [0..n-1]]
where f c = get >>= \(x,y) ->
if (c > 0) then (put (y,x+y) >> (f (c-1)))
else return y
fib''' n = fst $ execState (replicateM n $
get >>= \(x,y) ->
put (y,x+y)) (0,1)
test_fib = let (x,y,z) = (fib 999, fib' 999, fib'' 999)
in x == y && x == z && y == z
takes a second to run ...
3 .
qsort_count ls = qsort 0 ls
qsort n [] = (n,[])
qsort n (x:xs) = let l0 = filter (<x) xs
l1 = filter (>=x) xs
c0 = n+(length (xs))*2
(c1, l2) = qsort 0 l0
(c2, l3) = qsort 0 l1
in (c0+c1+c2, l2 ++ [x] ++ l3)
4 .
qsortM n [] = return (n,[])
qsortM n (x:xs) = qsortM 0 (filter (<x) xs) >>= \(c1, l1) ->
qsortM 0 (filter (>=x) xs) >>= \(c2, l2) ->
let c0 = n+(length (xs))*2
in return (c0+c1+c2, l1++[x]++l2)
|
6cf4f4bd4ef6d92f088298df79fbabd40581a3977d9dd7b684905005c5be08fd | triffon/fp-2019-20 | get last member.rkt | (define (list-ref2 list i)
(cond
((< i 0) "not a valid i")
(else (if (= i 0) (car list)
(list-ref2 (cdr list) (- i 1))))))
(define (last xs)
(list-ref2 xs (- (length xs) 1)))
(last '(5 9 2))
(last '(1 8 6 2 3))
(last '(1)) | null | https://raw.githubusercontent.com/triffon/fp-2019-20/7efb13ff4de3ea13baa2c5c59eb57341fac15641/exercises/computer-science-3/exercises/04.lists/solutions/get%20last%20member.rkt | racket | (define (list-ref2 list i)
(cond
((< i 0) "not a valid i")
(else (if (= i 0) (car list)
(list-ref2 (cdr list) (- i 1))))))
(define (last xs)
(list-ref2 xs (- (length xs) 1)))
(last '(5 9 2))
(last '(1 8 6 2 3))
(last '(1)) | |
1addeeaf06c6579cb5f7c4c3e9b740d9c62271c2184d077de30689c2134b1fa1 | KestrelInstitute/Specware | Tests.lisp | (test-directories ".")
(test
("Bug 0107 : Bogus Nil prints as []"
:show "BogusNil"
:output '(";;; Elaborating spec at $TESTDIR/BogusNil"
(:optional "")
"spec"
(:optional "")
"type NotList(a) = | Cons a * NotList(a) | Nil"
(:optional "")
"op bogus_nil: NotList(Nat) = Nil"
(:optional "")
"op bogus_cons: NotList(Nat) = 4 :: 5 :: 6 :: bogus_nil"
(:optional "")
"op true_nil: List(Nat) = []"
(:optional "")
"op true_cons: List(Nat) = [1, 2, 3]"
(:alternatives "endspec" "end-spec")
(:optional "")
(:optional "")))
)
| null | https://raw.githubusercontent.com/KestrelInstitute/Specware/2be6411c55f26432bf5c9e2f7778128898220c24/TestSuite/Bugs/Bug_0107/Tests.lisp | lisp | (test-directories ".")
(test
("Bug 0107 : Bogus Nil prints as []"
:show "BogusNil"
:output '(";;; Elaborating spec at $TESTDIR/BogusNil"
(:optional "")
"spec"
(:optional "")
"type NotList(a) = | Cons a * NotList(a) | Nil"
(:optional "")
"op bogus_nil: NotList(Nat) = Nil"
(:optional "")
"op bogus_cons: NotList(Nat) = 4 :: 5 :: 6 :: bogus_nil"
(:optional "")
"op true_nil: List(Nat) = []"
(:optional "")
"op true_cons: List(Nat) = [1, 2, 3]"
(:alternatives "endspec" "end-spec")
(:optional "")
(:optional "")))
)
| |
3f6ec0c66748748ce9a3ff56f40a6f28b0f751e03e69119f093cbbb8a380ca97 | zelark/AoC-2020 | day_01.clj | (ns zelark.aoc-2020.day-01
(:require [clojure.java.io :as io]
[clojure.string :as str]))
;; --- Day 1: Report Repair ---
;;
(def input (slurp (io/resource "input_01.txt")))
(defn parse-input [input]
(->> input (str/split-lines) (map #(Long/parseLong %))))
(defn part1 [numbers target]
(first
(for [[a & a-rest] (iterate next numbers) :while a
b a-rest :when (== (+ a b) target)]
(* a b))))
(defn part2 [numbers target]
(first
(for [[a & a-rest] (iterate next numbers) :while a
[b & b-rest] (iterate next a-rest) :while b
c b-rest :when (== (+ a b c) target)]
(* a b c))))
440979
(part2 (parse-input input) 2020) ; 82498112
| null | https://raw.githubusercontent.com/zelark/AoC-2020/5417c3514889eb02efc23f6be7d69e29fdfa0376/src/zelark/aoc_2020/day_01.clj | clojure | --- Day 1: Report Repair ---
82498112 | (ns zelark.aoc-2020.day-01
(:require [clojure.java.io :as io]
[clojure.string :as str]))
(def input (slurp (io/resource "input_01.txt")))
(defn parse-input [input]
(->> input (str/split-lines) (map #(Long/parseLong %))))
(defn part1 [numbers target]
(first
(for [[a & a-rest] (iterate next numbers) :while a
b a-rest :when (== (+ a b) target)]
(* a b))))
(defn part2 [numbers target]
(first
(for [[a & a-rest] (iterate next numbers) :while a
[b & b-rest] (iterate next a-rest) :while b
c b-rest :when (== (+ a b c) target)]
(* a b c))))
440979
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.