_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7 values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
97bad2bf405a24d7f663f2705394537fe3fce74e6abb9b89e2e8d3c09f408ddd | bgusach/exercises-htdp2e | ex-252.rkt | #lang htdp/isl
(require 2htdp/image)
(require test-engine/racket-tests)
# # # Constants
(define emt (empty-scene 100 100))
(define dot (circle 3 "solid" "red"))
# # # Data Definitions
# # # Functions
; [List-of Number] -> Number
(check-expect (product '(1 2 3 4)) (* 1 2 3 4))
(define (product l)
(fold2 l * 1)
)
; [List-of Posn] -> Image
; Places a dot for each posn on top of an empty image
(check-expect (image* '()) emt)
(check-expect
(image* (list (make-posn 0 0) (make-posn 2 3)))
(place-dot (make-posn 2 3) (place-dot (make-posn 0 0) emt))
)
(define (image* l)
(fold2 l place-dot emt)
)
; [List-of A] [A B -> B] -> B
(define (fold2 items fold-fn acc)
(cond
[(empty? items) acc]
[else
(fold2
(rest items)
fold-fn
(fold-fn (first items) acc)
)]))
; Posn Image -> Image
(define (place-dot p img)
(place-image
dot
(posn-x p) (posn-y p)
img
))
(test)
| null | https://raw.githubusercontent.com/bgusach/exercises-htdp2e/c4fd33f28fb0427862a2777a1fde8bf6432a7690/3-abstraction/ex-252.rkt | racket | [List-of Number] -> Number
[List-of Posn] -> Image
Places a dot for each posn on top of an empty image
[List-of A] [A B -> B] -> B
Posn Image -> Image | #lang htdp/isl
(require 2htdp/image)
(require test-engine/racket-tests)
# # # Constants
(define emt (empty-scene 100 100))
(define dot (circle 3 "solid" "red"))
# # # Data Definitions
# # # Functions
(check-expect (product '(1 2 3 4)) (* 1 2 3 4))
(define (product l)
(fold2 l * 1)
)
(check-expect (image* '()) emt)
(check-expect
(image* (list (make-posn 0 0) (make-posn 2 3)))
(place-dot (make-posn 2 3) (place-dot (make-posn 0 0) emt))
)
(define (image* l)
(fold2 l place-dot emt)
)
(define (fold2 items fold-fn acc)
(cond
[(empty? items) acc]
[else
(fold2
(rest items)
fold-fn
(fold-fn (first items) acc)
)]))
(define (place-dot p img)
(place-image
dot
(posn-x p) (posn-y p)
img
))
(test)
|
4a35fa51c7f1f6a9da537f5f32bd12db895a89025e27ea9d898bf01ce203b5fd | janestreet/merlin-jst | pprintast.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, OCamlPro
(* Fabrice Le Fessant, INRIA Saclay *)
, University of Pennsylvania
(* *)
Copyright 2007 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
Original Code from Ber - metaocaml , modified for 3.12.0 and fixed
(* Printing code expressions *)
Authors : ,
Extensive Rewrite : : University of Pennsylvania
TODO more fine - grained precedence pretty - printing
open Asttypes
open Format
open Location
open Longident
open Parsetree
open Ast_helper
let prefix_symbols = [ '!'; '?'; '~' ] ;;
let infix_symbols = [ '='; '<'; '>'; '@'; '^'; '|'; '&'; '+'; '-'; '*'; '/';
'$'; '%'; '#' ]
(* type fixity = Infix| Prefix *)
let special_infix_strings =
["asr"; "land"; "lor"; "lsl"; "lsr"; "lxor"; "mod"; "or"; ":="; "!="; "::" ]
let letop s =
String.length s > 3
&& s.[0] = 'l'
&& s.[1] = 'e'
&& s.[2] = 't'
&& List.mem s.[3] infix_symbols
let andop s =
String.length s > 3
&& s.[0] = 'a'
&& s.[1] = 'n'
&& s.[2] = 'd'
&& List.mem s.[3] infix_symbols
determines if the string is an infix string .
checks backwards , first allowing a renaming postfix ( " _ 102 " ) which
may have resulted from - > Texp - > translation , then checking
if all the characters in the beginning of the string are valid infix
characters .
checks backwards, first allowing a renaming postfix ("_102") which
may have resulted from Pexp -> Texp -> Pexp translation, then checking
if all the characters in the beginning of the string are valid infix
characters. *)
let fixity_of_string = function
| "" -> `Normal
| s when List.mem s special_infix_strings -> `Infix s
| s when List.mem s.[0] infix_symbols -> `Infix s
| s when List.mem s.[0] prefix_symbols -> `Prefix s
| s when s.[0] = '.' -> `Mixfix s
| s when letop s -> `Letop s
| s when andop s -> `Andop s
| _ -> `Normal
let view_fixity_of_exp = function
| {pexp_desc = Pexp_ident {txt=Lident l;_}; pexp_attributes = []} ->
fixity_of_string l
| _ -> `Normal
let is_infix = function `Infix _ -> true | _ -> false
let is_mixfix = function `Mixfix _ -> true | _ -> false
let is_kwdop = function `Letop _ | `Andop _ -> true | _ -> false
let first_is c str =
str <> "" && str.[0] = c
let last_is c str =
str <> "" && str.[String.length str - 1] = c
let first_is_in cs str =
str <> "" && List.mem str.[0] cs
(* which identifiers are in fact operators needing parentheses *)
let needs_parens txt =
let fix = fixity_of_string txt in
is_infix fix
|| is_mixfix fix
|| is_kwdop fix
|| first_is_in prefix_symbols txt
(* some infixes need spaces around parens to avoid clashes with comment
syntax *)
let needs_spaces txt =
first_is '*' txt || last_is '*' txt
let string_loc ppf x = fprintf ppf "%s" x.txt
(* add parentheses to binders when they are in fact infix or prefix operators *)
let protect_ident ppf txt =
let format : (_, _, _) format =
if not (needs_parens txt) then "%s"
else if needs_spaces txt then "(@;%s@;)"
else "(%s)"
in fprintf ppf format txt
let protect_longident ppf print_longident longprefix txt =
let format : (_, _, _) format =
if not (needs_parens txt) then "%a.%s"
else if needs_spaces txt then "%a.(@;%s@;)"
else "%a.(%s)" in
fprintf ppf format print_longident longprefix txt
let is_curry_attr attr =
match attr.attr_name.txt with
| "extension.curry" -> true
| _ -> false
let filter_curry_attrs attrs =
List.filter (fun attr -> not (is_curry_attr attr)) attrs
let has_non_curry_attr attrs =
List.exists (fun attr -> not (is_curry_attr attr)) attrs
let check_local_attr attrs =
match
List.partition (fun attr ->
attr.attr_name.txt = "extension.local") attrs
with
| [], _ -> attrs, false
| _::_, rest -> rest, true
let check_include_functor_attr attrs =
match
List.partition (fun attr ->
attr.attr_name.txt = "extension.include_functor") attrs
with
| [], _ -> attrs, false
| _::_, rest -> rest, true
type space_formatter = (unit, Format.formatter, unit) format
let override = function
| Override -> "!"
| Fresh -> ""
(* variance encoding: need to sync up with the [parser.mly] *)
let type_variance = function
| NoVariance -> ""
| Covariant -> "+"
| Contravariant -> "-"
let type_injectivity = function
| NoInjectivity -> ""
| Injective -> "!"
type construct =
[ `cons of expression list
| `list of expression list
| `nil
| `normal
| `simple of Longident.t
| `tuple ]
let view_expr x =
match x.pexp_desc with
| Pexp_construct ( {txt= Lident "()"; _},_) -> `tuple
| Pexp_construct ( {txt= Lident "[]";_},_) -> `nil
| Pexp_construct ( {txt= Lident"::";_},Some _) ->
let rec loop exp acc = match exp with
| {pexp_desc=Pexp_construct ({txt=Lident "[]";_},_);
pexp_attributes = []} ->
(List.rev acc,true)
| {pexp_desc=
Pexp_construct ({txt=Lident "::";_},
Some ({pexp_desc= Pexp_tuple([e1;e2]);
pexp_attributes = []}));
pexp_attributes = []}
->
loop e2 (e1::acc)
| e -> (List.rev (e::acc),false) in
let (ls,b) = loop x [] in
if b then
`list ls
else `cons ls
| Pexp_construct (x,None) -> `simple (x.txt)
| _ -> `normal
let is_simple_construct :construct -> bool = function
| `nil | `tuple | `list _ | `simple _ -> true
| `cons _ | `normal -> false
let pp = fprintf
type ctxt = {
pipe : bool;
semi : bool;
ifthenelse : bool;
}
let reset_ctxt = { pipe=false; semi=false; ifthenelse=false }
let under_pipe ctxt = { ctxt with pipe=true }
let under_semi ctxt = { ctxt with semi=true }
let under_ifthenelse ctxt = { ctxt with ifthenelse=true }
let reset_semi = with semi = false }
let reset_ifthenelse = with ifthenelse = false }
let = with pipe = false }
let reset_semi ctxt = { ctxt with semi=false }
let reset_ifthenelse ctxt = { ctxt with ifthenelse=false }
let reset_pipe ctxt = { ctxt with pipe=false }
*)
let list : 'a . ?sep:space_formatter -> ?first:space_formatter ->
?last:space_formatter -> (Format.formatter -> 'a -> unit) ->
Format.formatter -> 'a list -> unit
= fun ?sep ?first ?last fu f xs ->
let first = match first with Some x -> x |None -> ("": _ format6)
and last = match last with Some x -> x |None -> ("": _ format6)
and sep = match sep with Some x -> x |None -> ("@ ": _ format6) in
let aux f = function
| [] -> ()
| [x] -> fu f x
| xs ->
let rec loop f = function
| [x] -> fu f x
| x::xs -> fu f x; pp f sep; loop f xs;
| _ -> assert false in begin
pp f first; loop f xs; pp f last;
end in
aux f xs
let option : 'a. ?first:space_formatter -> ?last:space_formatter ->
(Format.formatter -> 'a -> unit) -> Format.formatter -> 'a option -> unit
= fun ?first ?last fu f a ->
let first = match first with Some x -> x | None -> ("": _ format6)
and last = match last with Some x -> x | None -> ("": _ format6) in
match a with
| None -> ()
| Some x -> pp f first; fu f x; pp f last
let paren: 'a . ?first:space_formatter -> ?last:space_formatter ->
bool -> (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a -> unit
= fun ?(first=("": _ format6)) ?(last=("": _ format6)) b fu f x ->
if b then (pp f "("; pp f first; fu f x; pp f last; pp f ")")
else fu f x
let rec longident f = function
| Lident s -> protect_ident f s
| Ldot(y,s) -> protect_longident f longident y s
| Lapply (y,s) ->
pp f "%a(%a)" longident y longident s
let longident_loc f x = pp f "%a" longident x.txt
let constant f = function
| Pconst_char i ->
pp f "%C" i
| Pconst_string (i, _, None) ->
pp f "%S" i
| Pconst_string (i, _, Some delim) ->
pp f "{%s|%s|%s}" delim i delim
| Pconst_integer (i, None) ->
paren (first_is '-' i) (fun f -> pp f "%s") f i
| Pconst_integer (i, Some m) ->
paren (first_is '-' i) (fun f (i, m) -> pp f "%s%c" i m) f (i,m)
| Pconst_float (i, None) ->
paren (first_is '-' i) (fun f -> pp f "%s") f i
| Pconst_float (i, Some m) ->
paren (first_is '-' i) (fun f (i,m) -> pp f "%s%c" i m) f (i,m)
(* trailing space*)
let mutable_flag f = function
| Immutable -> ()
| Mutable -> pp f "mutable@;"
let virtual_flag f = function
| Concrete -> ()
| Virtual -> pp f "virtual@;"
(* trailing space added *)
let rec_flag f rf =
match rf with
| Nonrecursive -> ()
| Recursive -> pp f "rec "
let nonrec_flag f rf =
match rf with
| Nonrecursive -> pp f "nonrec "
| Recursive -> ()
let direction_flag f = function
| Upto -> pp f "to@ "
| Downto -> pp f "downto@ "
let private_flag f = function
| Public -> ()
| Private -> pp f "private@ "
let iter_loc f ctxt {txt; loc = _} = f ctxt txt
let constant_string f s = pp f "%S" s
let tyvar ppf s =
if String.length s >= 2 && s.[1] = '\'' then
(* without the space, this would be parsed as
a character literal *)
Format.fprintf ppf "' %s" s
else
Format.fprintf ppf "'%s" s
let tyvar_loc f str = tyvar f str.txt
let string_quot f x = pp f "`%s" x
let maybe_local_type pty ctxt f c =
let cattrs, is_local = check_local_attr c.ptyp_attributes in
let c = { c with ptyp_attributes = cattrs } in
if is_local then
pp f "local_ %a" (pty ctxt) c
else
pty ctxt f c
let maybe_functor f has_functor_attr =
if has_functor_attr then pp f "@ functor" else ()
(* c ['a,'b] *)
let rec class_params_def ctxt f = function
| [] -> ()
| l ->
pp f "[%a] " (* space *)
(list (type_param ctxt) ~sep:",") l
and type_with_label ctxt f (label, c) =
match label with
| Nolabel -> maybe_local_type core_type1 ctxt f c (* otherwise parenthesize *)
| Labelled s -> pp f "%s:%a" s (maybe_local_type core_type1 ctxt) c
| Optional s -> pp f "?%s:%a" s (maybe_local_type core_type1 ctxt) c
and core_type ctxt f x =
let filtered_attrs = filter_curry_attrs x.ptyp_attributes in
if filtered_attrs <> [] then begin
pp f "((%a)%a)" (core_type ctxt) {x with ptyp_attributes=[]}
(attributes ctxt) filtered_attrs
end
else match x.ptyp_desc with
| Ptyp_arrow (l, ct1, ct2) ->
FIXME remove parens later
(type_with_label ctxt) (l,ct1) (return_type ctxt) ct2
| Ptyp_alias (ct, s) ->
pp f "@[<2>%a@;as@;%a@]" (core_type1 ctxt) ct tyvar s
| Ptyp_poly ([], ct) ->
core_type ctxt f ct
| Ptyp_poly (sl, ct) ->
pp f "@[<2>%a%a@]"
(fun f l -> match l with
| [] -> ()
| _ ->
pp f "%a@;.@;"
(list tyvar_loc ~sep:"@;") l)
sl (core_type ctxt) ct
| _ -> pp f "@[<2>%a@]" (core_type1 ctxt) x
and core_type1 ctxt f x =
if has_non_curry_attr x.ptyp_attributes then core_type ctxt f x
else match x.ptyp_desc with
| Ptyp_any -> pp f "_";
| Ptyp_var s -> tyvar f s;
| Ptyp_tuple l -> pp f "(%a)" (list (core_type1 ctxt) ~sep:"@;*@;") l
| Ptyp_constr (li, l) ->
pp f (* "%a%a@;" *) "%a%a"
(fun f l -> match l with
|[] -> ()
|[x]-> pp f "%a@;" (core_type1 ctxt) x
| _ -> list ~first:"(" ~last:")@;" (core_type ctxt) ~sep:",@;" f l)
l longident_loc li
| Ptyp_variant (l, closed, low) ->
let first_is_inherit = match l with
| {Parsetree.prf_desc = Rinherit _}::_ -> true
| _ -> false in
let type_variant_helper f x =
match x.prf_desc with
| Rtag (l, _, ctl) ->
pp f "@[<2>%a%a@;%a@]" (iter_loc string_quot) l
(fun f l -> match l with
|[] -> ()
| _ -> pp f "@;of@;%a"
(list (core_type ctxt) ~sep:"&") ctl) ctl
(attributes ctxt) x.prf_attributes
| Rinherit ct -> core_type ctxt f ct in
pp f "@[<2>[%a%a]@]"
(fun f l ->
match l, closed with
| [], Closed -> ()
| [], Open -> pp f ">" (* Cf #7200: print [>] correctly *)
| _ ->
pp f "%s@;%a"
(match (closed,low) with
| (Closed,None) -> if first_is_inherit then " |" else ""
| (Closed,Some _) -> "<" (* FIXME desugar the syntax sugar*)
| (Open,_) -> ">")
(list type_variant_helper ~sep:"@;<1 -2>| ") l) l
(fun f low -> match low with
|Some [] |None -> ()
|Some xs ->
pp f ">@ %a"
(list string_quot) xs) low
| Ptyp_object (l, o) ->
let core_field_type f x = match x.pof_desc with
| Otag (l, ct) ->
Cf # 7200
pp f "@[<hov2>%s: %a@ %a@ @]" l.txt
(core_type ctxt) ct (attributes ctxt) x.pof_attributes
| Oinherit ct ->
pp f "@[<hov2>%a@ @]" (core_type ctxt) ct
in
let field_var f = function
| Asttypes.Closed -> ()
| Asttypes.Open ->
match l with
| [] -> pp f ".."
| _ -> pp f " ;.."
in
pp f "@[<hov2><@ %a%a@ > @]"
(list core_field_type ~sep:";") l
Cf # 7200
FIXME
pp f "@[<hov2>%a#%a@]"
(list (core_type ctxt) ~sep:"," ~first:"(" ~last:")") l
longident_loc li
| Ptyp_package (lid, cstrs) ->
let aux f (s, ct) =
pp f "type %a@ =@ %a" longident_loc s (core_type ctxt) ct in
(match cstrs with
|[] -> pp f "@[<hov2>(module@ %a)@]" longident_loc lid
|_ ->
pp f "@[<hov2>(module@ %a@ with@ %a)@]" longident_loc lid
(list aux ~sep:"@ and@ ") cstrs)
| Ptyp_extension e -> extension ctxt f e
| _ -> paren true (core_type ctxt) f x
and return_type ctxt f x =
if x.ptyp_attributes <> [] then maybe_local_type core_type1 ctxt f x
else maybe_local_type core_type ctxt f x
(********************pattern********************)
be cautious when use [ pattern ] , [ ] is preferred
and pattern ctxt f x =
if x.ppat_attributes <> [] then begin
pp f "((%a)%a)" (pattern ctxt) {x with ppat_attributes=[]}
(attributes ctxt) x.ppat_attributes
end
else match x.ppat_desc with
| Ppat_alias (p, s) ->
pp f "@[<2>%a@;as@;%a@]" (pattern ctxt) p protect_ident s.txt
| _ -> pattern_or ctxt f x
and pattern_or ctxt f x =
let rec left_associative x acc = match x with
| {ppat_desc=Ppat_or (p1,p2); ppat_attributes = []} ->
left_associative p1 (p2 :: acc)
| x -> x :: acc
in
match left_associative x [] with
| [] -> assert false
| [x] -> pattern1 ctxt f x
| orpats ->
pp f "@[<hov0>%a@]" (list ~sep:"@ | " (pattern1 ctxt)) orpats
and pattern1 ctxt (f:Format.formatter) (x:pattern) : unit =
let rec pattern_list_helper f = function
| {ppat_desc =
Ppat_construct
({ txt = Lident("::") ;_},
Some ([], {ppat_desc = Ppat_tuple([pat1; pat2]);_}));
ppat_attributes = []}
->
pp f "%a::%a" (simple_pattern ctxt) pat1 pattern_list_helper pat2 (*RA*)
| p -> pattern1 ctxt f p
in
if x.ppat_attributes <> [] then pattern ctxt f x
else match x.ppat_desc with
| Ppat_variant (l, Some p) ->
pp f "@[<2>`%s@;%a@]" l (simple_pattern ctxt) p
| Ppat_construct (({txt=Lident("()"|"[]");_}), _) ->
simple_pattern ctxt f x
| Ppat_construct (({txt;_} as li), po) ->
FIXME The third field always false
if txt = Lident "::" then
pp f "%a" pattern_list_helper x
else
(match po with
| Some ([], x) ->
pp f "%a@;%a" longident_loc li (simple_pattern ctxt) x
| Some (vl, x) ->
pp f "%a@ (type %a)@;%a" longident_loc li
(list ~sep:"@ " string_loc) vl
(simple_pattern ctxt) x
| None -> pp f "%a" longident_loc li)
| _ -> simple_pattern ctxt f x
and simple_pattern ctxt (f:Format.formatter) (x:pattern) : unit =
if x.ppat_attributes <> [] then pattern ctxt f x
else match x.ppat_desc with
| Ppat_construct (({txt=Lident ("()"|"[]" as x);_}), None) ->
pp f "%s" x
| Ppat_any -> pp f "_";
| Ppat_var ({txt = txt;_}) -> protect_ident f txt
| Ppat_array l ->
pp f "@[<2>[|%a|]@]" (list (pattern1 ctxt) ~sep:";") l
| Ppat_unpack { txt = None } ->
pp f "(module@ _)@ "
| Ppat_unpack { txt = Some s } ->
pp f "(module@ %s)@ " s
| Ppat_type li ->
pp f "#%a" longident_loc li
| Ppat_record (l, closed) ->
let longident_x_pattern f (li, p) =
match (li,p) with
| ({txt=Lident s;_ },
{ppat_desc=Ppat_var {txt;_};
ppat_attributes=[]; _})
when s = txt ->
pp f "@[<2>%a@]" longident_loc li
| _ ->
pp f "@[<2>%a@;=@;%a@]" longident_loc li (pattern1 ctxt) p
in
begin match closed with
| Closed ->
pp f "@[<2>{@;%a@;}@]" (list longident_x_pattern ~sep:";@;") l
| _ ->
pp f "@[<2>{@;%a;_}@]" (list longident_x_pattern ~sep:";@;") l
end
| Ppat_tuple l ->
| Ppat_constant (c) -> pp f "%a" constant c
| Ppat_interval (c1, c2) -> pp f "%a..%a" constant c1 constant c2
| Ppat_variant (l,None) -> pp f "`%s" l
| Ppat_constraint (p, ct) ->
pp f "@[<2>(%a@;:@;%a)@]" (pattern1 ctxt) p (core_type ctxt) ct
| Ppat_lazy p ->
pp f "@[<2>(lazy@;%a)@]" (simple_pattern ctxt) p
| Ppat_exception p ->
pp f "@[<2>exception@;%a@]" (pattern1 ctxt) p
| Ppat_extension e -> extension ctxt f e
| Ppat_open (lid, p) ->
let with_paren =
match p.ppat_desc with
| Ppat_array _ | Ppat_record _
| Ppat_construct (({txt=Lident ("()"|"[]");_}), None) -> false
| _ -> true in
pp f "@[<2>%a.%a @]" longident_loc lid
(paren with_paren @@ pattern1 ctxt) p
| _ -> paren true (pattern ctxt) f x
and maybe_local_pat ctxt is_local f p =
if is_local then
pp f "(local_ %a)" (simple_pattern ctxt) p
else
pp f "%a" (simple_pattern ctxt) p
and label_exp ctxt f (l,opt,p) =
let pattrs, is_local = check_local_attr p.ppat_attributes in
let p = { p with ppat_attributes = pattrs } in
match l with
| Nolabel ->
(* single case pattern parens needed here *)
pp f "%a" (maybe_local_pat ctxt is_local) p
| Optional rest ->
begin match p with
| {ppat_desc = Ppat_var {txt;_}; ppat_attributes = []}
when txt = rest && not is_local ->
(match opt with
| Some o -> pp f "?(%s=@;%a)" rest (expression ctxt) o
| None -> pp f "?%s" rest)
| _ ->
(match opt with
| Some o ->
pp f "?%s:(%s%a=@;%a)"
rest
(if is_local then "local_ " else "")
(pattern1 ctxt) p (expression ctxt) o
| None -> pp f "?%s:%a" rest (maybe_local_pat ctxt is_local) p)
end
| Labelled l -> match p with
| {ppat_desc = Ppat_var {txt;_}; ppat_attributes = []}
when txt = l ->
if is_local then
pp f "~(local_ %s)" l
else
pp f "~%s" l
| _ -> pp f "~%s:%a" l (maybe_local_pat ctxt is_local) p
and sugar_expr ctxt f e =
if e.pexp_attributes <> [] then false
else match e.pexp_desc with
| Pexp_apply ({ pexp_desc = Pexp_ident {txt = id; _};
pexp_attributes=[]; _}, args)
when List.for_all (fun (lab, _) -> lab = Nolabel) args -> begin
let print_indexop a path_prefix assign left sep right print_index indices
rem_args =
let print_path ppf = function
| None -> ()
| Some m -> pp ppf ".%a" longident m in
match assign, rem_args with
| false, [] ->
pp f "@[%a%a%s%a%s@]"
(simple_expr ctxt) a print_path path_prefix
left (list ~sep print_index) indices right; true
| true, [v] ->
pp f "@[%a%a%s%a%s@ <-@;<1 2>%a@]"
(simple_expr ctxt) a print_path path_prefix
left (list ~sep print_index) indices right
(simple_expr ctxt) v; true
| _ -> false in
match id, List.map snd args with
| Lident "!", [e] ->
pp f "@[<hov>!%a@]" (simple_expr ctxt) e; true
| Ldot (path, ("get"|"set" as func)), a :: other_args -> begin
let assign = func = "set" in
let print = print_indexop a None assign in
match path, other_args with
| Lident "Array", i :: rest ->
print ".(" "" ")" (expression ctxt) [i] rest
| Lident "String", i :: rest ->
print ".[" "" "]" (expression ctxt) [i] rest
| Ldot (Lident "Bigarray", "Array1"), i1 :: rest ->
print ".{" "," "}" (simple_expr ctxt) [i1] rest
| Ldot (Lident "Bigarray", "Array2"), i1 :: i2 :: rest ->
print ".{" "," "}" (simple_expr ctxt) [i1; i2] rest
| Ldot (Lident "Bigarray", "Array3"), i1 :: i2 :: i3 :: rest ->
print ".{" "," "}" (simple_expr ctxt) [i1; i2; i3] rest
| Ldot (Lident "Bigarray", "Genarray"),
{pexp_desc = Pexp_array indexes; pexp_attributes = []} :: rest ->
print ".{" "," "}" (simple_expr ctxt) indexes rest
| _ -> false
end
| (Lident s | Ldot(_,s)) , a :: i :: rest
when first_is '.' s ->
(* extract operator:
assignment operators end with [right_bracket ^ "<-"],
access operators end with [right_bracket] directly
*)
let multi_indices = String.contains s ';' in
let i =
match i.pexp_desc with
| Pexp_array l when multi_indices -> l
| _ -> [ i ] in
let assign = last_is '-' s in
let kind =
(* extract the right end bracket *)
let n = String.length s in
if assign then s.[n - 3] else s.[n - 1] in
let left, right = match kind with
| ')' -> '(', ")"
| ']' -> '[', "]"
| '}' -> '{', "}"
| _ -> assert false in
let path_prefix = match id with
| Ldot(m,_) -> Some m
| _ -> None in
let left = String.sub s 0 (1+String.index s left) in
print_indexop a path_prefix assign left ";" right
(if multi_indices then expression ctxt else simple_expr ctxt)
i rest
| _ -> false
end
| _ -> false
and expression ctxt f x =
if x.pexp_attributes <> [] then
pp f "((%a)@,%a)" (expression ctxt) {x with pexp_attributes=[]}
(attributes ctxt) x.pexp_attributes
else match x.pexp_desc with
| Pexp_function _ | Pexp_fun _ | Pexp_match _ | Pexp_try _ | Pexp_sequence _
| Pexp_newtype _
when ctxt.pipe || ctxt.semi ->
paren true (expression reset_ctxt) f x
| Pexp_ifthenelse _ | Pexp_sequence _ when ctxt.ifthenelse ->
paren true (expression reset_ctxt) f x
| Pexp_let _ | Pexp_letmodule _ | Pexp_open _
| Pexp_letexception _ | Pexp_letop _
when ctxt.semi ->
paren true (expression reset_ctxt) f x
| Pexp_fun (l, e0, p, e) ->
pp f "@[<2>fun@;%a@;%a@]"
(label_exp ctxt) (l, e0, p)
(pp_print_pexp_function ctxt "->") e
| Pexp_newtype (lid, e) ->
pp f "@[<2>fun@;(type@;%s)@;%a@]" lid.txt
(pp_print_pexp_function ctxt "->") e
| Pexp_function l ->
pp f "@[<hv>function%a@]" (case_list ctxt) l
| Pexp_match (e, l) ->
pp f "@[<hv0>@[<hv0>@[<2>match %a@]@ with@]%a@]"
(expression reset_ctxt) e (case_list ctxt) l
| Pexp_try (e, l) ->
pp f "@[<0>@[<hv2>try@ %a@]@ @[<0>with%a@]@]"
(* "try@;@[<2>%a@]@\nwith@\n%a"*)
(expression reset_ctxt) e (case_list ctxt) l
| Pexp_let (rf, l, e) ->
(* pp f "@[<2>let %a%a in@;<1 -2>%a@]"
(*no indentation here, a new line*) *)
rec_flag rf
pp f "@[<2>%a in@;<1 -2>%a@]"
(bindings reset_ctxt) (rf,l)
(expression ctxt) e
| Pexp_apply
({ pexp_desc = Pexp_extension({txt = "extension.local"}, PStr []) },
[Nolabel, sbody]) ->
pp f "@[<2>local_ %a@]" (expression ctxt) sbody
| Pexp_apply (e, l) ->
begin if not (sugar_expr ctxt f x) then
match view_fixity_of_exp e with
| `Infix s ->
begin match l with
| [ (Nolabel, _) as arg1; (Nolabel, _) as arg2 ] ->
FIXME associativity label_x_expression_param
pp f "@[<2>%a@;%s@;%a@]"
(label_x_expression_param reset_ctxt) arg1 s
(label_x_expression_param ctxt) arg2
| _ ->
pp f "@[<2>%a %a@]"
(simple_expr ctxt) e
(list (label_x_expression_param ctxt)) l
end
| `Prefix s ->
let s =
if List.mem s ["~+";"~-";"~+.";"~-."] &&
(match l with
(* See #7200: avoid turning (~- 1) into (- 1) which is
parsed as an int literal *)
|[(_,{pexp_desc=Pexp_constant _})] -> false
| _ -> true)
then String.sub s 1 (String.length s -1)
else s in
begin match l with
| [(Nolabel, x)] ->
pp f "@[<2>%s@;%a@]" s (simple_expr ctxt) x
| _ ->
pp f "@[<2>%a %a@]" (simple_expr ctxt) e
(list (label_x_expression_param ctxt)) l
end
| _ ->
pp f "@[<hov2>%a@]" begin fun f (e,l) ->
pp f "%a@ %a" (expression2 ctxt) e
(list (label_x_expression_param reset_ctxt)) l
(* reset here only because [function,match,try,sequence]
are lower priority *)
end (e,l)
end
| Pexp_construct (li, Some eo)
when not (is_simple_construct (view_expr x))-> (* Not efficient FIXME*)
(match view_expr x with
| `cons ls -> list (simple_expr ctxt) f ls ~sep:"@;::@;"
| `normal ->
pp f "@[<2>%a@;%a@]" longident_loc li
(simple_expr ctxt) eo
| _ -> assert false)
| Pexp_setfield (e1, li, e2) ->
pp f "@[<2>%a.%a@ <-@ %a@]"
(simple_expr ctxt) e1 longident_loc li (simple_expr ctxt) e2
| Pexp_ifthenelse (e1, e2, eo) ->
(* @;@[<2>else@ %a@]@] *)
let fmt:(_,_,_)format ="@[<hv0>@[<2>if@ %a@]@;@[<2>then@ %a@]%a@]" in
let expression_under_ifthenelse = expression (under_ifthenelse ctxt) in
pp f fmt expression_under_ifthenelse e1 expression_under_ifthenelse e2
(fun f eo -> match eo with
| Some x ->
pp f "@;@[<2>else@;%a@]" (expression (under_semi ctxt)) x
| None -> () (* pp f "()" *)) eo
| Pexp_sequence _ ->
let rec sequence_helper acc = function
| {pexp_desc=Pexp_sequence(e1,e2); pexp_attributes = []} ->
sequence_helper (e1::acc) e2
| v -> List.rev (v::acc) in
let lst = sequence_helper [] x in
pp f "@[<hv>%a@]"
(list (expression (under_semi ctxt)) ~sep:";@;") lst
| Pexp_new (li) ->
pp f "@[<hov2>new@ %a@]" longident_loc li;
| Pexp_setinstvar (s, e) ->
pp f "@[<hov2>%s@ <-@ %a@]" s.txt (expression ctxt) e
FIXME
let string_x_expression f (s, e) =
pp f "@[<hov2>%s@ =@ %a@]" s.txt (expression ctxt) e in
pp f "@[<hov2>{<%a>}@]"
(list string_x_expression ~sep:";" ) l;
| Pexp_letmodule (s, me, e) ->
pp f "@[<hov2>let@ module@ %s@ =@ %a@ in@ %a@]"
(Option.value s.txt ~default:"_")
(module_expr reset_ctxt) me (expression ctxt) e
| Pexp_letexception (cd, e) ->
pp f "@[<hov2>let@ exception@ %a@ in@ %a@]"
(extension_constructor ctxt) cd
(expression ctxt) e
| Pexp_assert e ->
pp f "@[<hov2>assert@ %a@]" (simple_expr ctxt) e
| Pexp_lazy (e) ->
pp f "@[<hov2>lazy@ %a@]" (simple_expr ctxt) e
(* Pexp_poly: impossible but we should print it anyway, rather than
assert false *)
| Pexp_poly (e, None) ->
pp f "@[<hov2>!poly!@ %a@]" (simple_expr ctxt) e
| Pexp_poly (e, Some ct) ->
pp f "@[<hov2>(!poly!@ %a@ : %a)@]"
(simple_expr ctxt) e (core_type ctxt) ct
| Pexp_open (o, e) ->
pp f "@[<2>let open%s %a in@;%a@]"
(override o.popen_override) (module_expr ctxt) o.popen_expr
(expression ctxt) e
| Pexp_variant (l,Some eo) ->
pp f "@[<2>`%s@;%a@]" l (simple_expr ctxt) eo
| Pexp_letop {let_; ands; body} ->
pp f "@[<2>@[<v>%a@,%a@] in@;<1 -2>%a@]"
(binding_op ctxt) let_
(list ~sep:"@," (binding_op ctxt)) ands
(expression ctxt) body
| Pexp_extension e -> extension ctxt f e
| Pexp_unreachable -> pp f "."
| _ -> expression1 ctxt f x
and expression1 ctxt f x =
if x.pexp_attributes <> [] then expression ctxt f x
else match x.pexp_desc with
| Pexp_object cs -> pp f "%a" (class_structure ctxt) cs
| _ -> expression2 ctxt f x
(* used in [Pexp_apply] *)
and expression2 ctxt f x =
if x.pexp_attributes <> [] then expression ctxt f x
else match x.pexp_desc with
| Pexp_field (e, li) ->
pp f "@[<hov2>%a.%a@]" (simple_expr ctxt) e longident_loc li
| Pexp_send (e, s) -> pp f "@[<hov2>%a#%s@]" (simple_expr ctxt) e s.txt
| _ -> simple_expr ctxt f x
and simple_expr ctxt f x =
if x.pexp_attributes <> [] then expression ctxt f x
else match x.pexp_desc with
| Pexp_construct _ when is_simple_construct (view_expr x) ->
(match view_expr x with
| `nil -> pp f "[]"
| `tuple -> pp f "()"
| `list xs ->
pp f "@[<hv0>[%a]@]"
(list (expression (under_semi ctxt)) ~sep:";@;") xs
| `simple x -> longident f x
| _ -> assert false)
| Pexp_ident li ->
longident_loc f li
(* (match view_fixity_of_exp x with *)
(* |`Normal -> longident_loc f li *)
(* | `Prefix _ | `Infix _ -> pp f "( %a )" longident_loc li) *)
| Pexp_constant c -> constant f c;
| Pexp_pack me ->
pp f "(module@;%a)" (module_expr ctxt) me
| Pexp_tuple l ->
pp f "@[<hov2>(%a)@]" (list (simple_expr ctxt) ~sep:",@;") l
| Pexp_constraint (e, ct) ->
pp f "(%a : %a)" (expression ctxt) e (core_type ctxt) ct
| Pexp_coerce (e, cto1, ct) ->
pp f "(%a%a :> %a)" (expression ctxt) e
(option (core_type ctxt) ~first:" : " ~last:" ") cto1 (* no sep hint*)
(core_type ctxt) ct
| Pexp_variant (l, None) -> pp f "`%s" l
| Pexp_record (l, eo) ->
let longident_x_expression f ( li, e) =
match e with
| {pexp_desc=Pexp_ident {txt;_};
pexp_attributes=[]; _} when li.txt = txt ->
pp f "@[<hov2>%a@]" longident_loc li
| _ ->
pp f "@[<hov2>%a@;=@;%a@]" longident_loc li (simple_expr ctxt) e
in
pp f "@[<hv0>@[<hv2>{@;%a%a@]@;}@]"(* "@[<hov2>{%a%a}@]" *)
(option ~last:" with@;" (simple_expr ctxt)) eo
(list longident_x_expression ~sep:";@;") l
| Pexp_array (l) ->
pp f "@[<0>@[<2>[|%a|]@]@]"
(list (simple_expr (under_semi ctxt)) ~sep:";") l
| Pexp_while (e1, e2) ->
let fmt : (_,_,_) format = "@[<2>while@;%a@;do@;%a@;done@]" in
pp f fmt (expression ctxt) e1 (expression ctxt) e2
| Pexp_for (s, e1, e2, df, e3) ->
let fmt:(_,_,_)format =
"@[<hv0>@[<hv2>@[<2>for %a =@;%a@;%a%a@;do@]@;%a@]@;done@]" in
let expression = expression ctxt in
pp f fmt (pattern ctxt) s expression e1 direction_flag
df expression e2 expression e3
| _ -> paren true (expression ctxt) f x
and attributes ctxt f l =
List.iter (attribute ctxt f) l
and item_attributes ctxt f l =
List.iter (item_attribute ctxt f) l
and attribute ctxt f a =
pp f "@[<2>[@@%s@ %a]@]" a.attr_name.txt (payload ctxt) a.attr_payload
and item_attribute ctxt f a =
pp f "@[<2>[@@@@%s@ %a]@]" a.attr_name.txt (payload ctxt) a.attr_payload
and floating_attribute ctxt f a =
pp f "@[<2>[@@@@@@%s@ %a]@]" a.attr_name.txt (payload ctxt) a.attr_payload
and value_description ctxt f x =
(* note: value_description has an attribute field,
but they're already printed by the callers this method *)
pp f "@[<hov2>%a%a@]" (core_type ctxt) x.pval_type
(fun f x ->
if x.pval_prim <> []
then pp f "@ =@ %a" (list constant_string) x.pval_prim
) x
and extension ctxt f (s, e) =
pp f "@[<2>[%%%s@ %a]@]" s.txt (payload ctxt) e
and item_extension ctxt f (s, e) =
pp f "@[<2>[%%%%%s@ %a]@]" s.txt (payload ctxt) e
and exception_declaration ctxt f x =
pp f "@[<hov2>exception@ %a@]%a"
(extension_constructor ctxt) x.ptyexn_constructor
(item_attributes ctxt) x.ptyexn_attributes
and class_type_field ctxt f x =
match x.pctf_desc with
| Pctf_inherit (ct) ->
pp f "@[<2>inherit@ %a@]%a" (class_type ctxt) ct
(item_attributes ctxt) x.pctf_attributes
| Pctf_val (s, mf, vf, ct) ->
pp f "@[<2>val @ %a%a%s@ :@ %a@]%a"
mutable_flag mf virtual_flag vf s.txt (core_type ctxt) ct
(item_attributes ctxt) x.pctf_attributes
| Pctf_method (s, pf, vf, ct) ->
pp f "@[<2>method %a %a%s :@;%a@]%a"
private_flag pf virtual_flag vf s.txt (core_type ctxt) ct
(item_attributes ctxt) x.pctf_attributes
| Pctf_constraint (ct1, ct2) ->
pp f "@[<2>constraint@ %a@ =@ %a@]%a"
(core_type ctxt) ct1 (core_type ctxt) ct2
(item_attributes ctxt) x.pctf_attributes
| Pctf_attribute a -> floating_attribute ctxt f a
| Pctf_extension e ->
item_extension ctxt f e;
item_attributes ctxt f x.pctf_attributes
and class_signature ctxt f { pcsig_self = ct; pcsig_fields = l ;_} =
pp f "@[<hv0>@[<hv2>object@[<1>%a@]@ %a@]@ end@]"
(fun f -> function
{ptyp_desc=Ptyp_any; ptyp_attributes=[]; _} -> ()
| ct -> pp f " (%a)" (core_type ctxt) ct) ct
(list (class_type_field ctxt) ~sep:"@;") l
(* call [class_signature] called by [class_signature] *)
and class_type ctxt f x =
match x.pcty_desc with
| Pcty_signature cs ->
class_signature ctxt f cs;
attributes ctxt f x.pcty_attributes
| Pcty_constr (li, l) ->
pp f "%a%a%a"
(fun f l -> match l with
| [] -> ()
| _ -> pp f "[%a]@ " (list (core_type ctxt) ~sep:"," ) l) l
longident_loc li
(attributes ctxt) x.pcty_attributes
| Pcty_arrow (l, co, cl) ->
FIXME remove parens later
(type_with_label ctxt) (l,co)
(class_type ctxt) cl
| Pcty_extension e ->
extension ctxt f e;
attributes ctxt f x.pcty_attributes
| Pcty_open (o, e) ->
pp f "@[<2>let open%s %a in@;%a@]"
(override o.popen_override) longident_loc o.popen_expr
(class_type ctxt) e
(* [class type a = object end] *)
and class_type_declaration_list ctxt f l =
let class_type_declaration kwd f x =
let { pci_params=ls; pci_name={ txt; _ }; _ } = x in
pp f "@[<2>%s %a%a%s@ =@ %a@]%a" kwd
virtual_flag x.pci_virt
(class_params_def ctxt) ls txt
(class_type ctxt) x.pci_expr
(item_attributes ctxt) x.pci_attributes
in
match l with
| [] -> ()
| [x] -> class_type_declaration "class type" f x
| x :: xs ->
pp f "@[<v>%a@,%a@]"
(class_type_declaration "class type") x
(list ~sep:"@," (class_type_declaration "and")) xs
and class_field ctxt f x =
match x.pcf_desc with
| Pcf_inherit (ovf, ce, so) ->
pp f "@[<2>inherit@ %s@ %a%a@]%a" (override ovf)
(class_expr ctxt) ce
(fun f so -> match so with
| None -> ();
| Some (s) -> pp f "@ as %s" s.txt ) so
(item_attributes ctxt) x.pcf_attributes
| Pcf_val (s, mf, Cfk_concrete (ovf, e)) ->
pp f "@[<2>val%s %a%s =@;%a@]%a" (override ovf)
mutable_flag mf s.txt
(expression ctxt) e
(item_attributes ctxt) x.pcf_attributes
| Pcf_method (s, pf, Cfk_virtual ct) ->
pp f "@[<2>method virtual %a %s :@;%a@]%a"
private_flag pf s.txt
(core_type ctxt) ct
(item_attributes ctxt) x.pcf_attributes
| Pcf_val (s, mf, Cfk_virtual ct) ->
pp f "@[<2>val virtual %a%s :@ %a@]%a"
mutable_flag mf s.txt
(core_type ctxt) ct
(item_attributes ctxt) x.pcf_attributes
| Pcf_method (s, pf, Cfk_concrete (ovf, e)) ->
let bind e =
binding ctxt f
{pvb_pat=
{ppat_desc=Ppat_var s;
ppat_loc=Location.none;
ppat_loc_stack=[];
ppat_attributes=[]};
pvb_expr=e;
pvb_attributes=[];
pvb_loc=Location.none;
}
in
pp f "@[<2>method%s %a%a@]%a"
(override ovf)
private_flag pf
(fun f -> function
| {pexp_desc=Pexp_poly (e, Some ct); pexp_attributes=[]; _} ->
pp f "%s :@;%a=@;%a"
s.txt (core_type ctxt) ct (expression ctxt) e
| {pexp_desc=Pexp_poly (e, None); pexp_attributes=[]; _} ->
bind e
| _ -> bind e) e
(item_attributes ctxt) x.pcf_attributes
| Pcf_constraint (ct1, ct2) ->
pp f "@[<2>constraint %a =@;%a@]%a"
(core_type ctxt) ct1
(core_type ctxt) ct2
(item_attributes ctxt) x.pcf_attributes
| Pcf_initializer (e) ->
pp f "@[<2>initializer@ %a@]%a"
(expression ctxt) e
(item_attributes ctxt) x.pcf_attributes
| Pcf_attribute a -> floating_attribute ctxt f a
| Pcf_extension e ->
item_extension ctxt f e;
item_attributes ctxt f x.pcf_attributes
and class_structure ctxt f { pcstr_self = p; pcstr_fields = l } =
pp f "@[<hv0>@[<hv2>object%a@;%a@]@;end@]"
(fun f p -> match p.ppat_desc with
| Ppat_any -> ()
| Ppat_constraint _ -> pp f " %a" (pattern ctxt) p
| _ -> pp f " (%a)" (pattern ctxt) p) p
(list (class_field ctxt)) l
and class_expr ctxt f x =
if x.pcl_attributes <> [] then begin
pp f "((%a)%a)" (class_expr ctxt) {x with pcl_attributes=[]}
(attributes ctxt) x.pcl_attributes
end else
match x.pcl_desc with
| Pcl_structure (cs) -> class_structure ctxt f cs
| Pcl_fun (l, eo, p, e) ->
pp f "fun@ %a@ ->@ %a"
(label_exp ctxt) (l,eo,p)
(class_expr ctxt) e
| Pcl_let (rf, l, ce) ->
pp f "%a@ in@ %a"
(bindings ctxt) (rf,l)
(class_expr ctxt) ce
| Pcl_apply (ce, l) ->
Cf : # 7200
(class_expr ctxt) ce
(list (label_x_expression_param ctxt)) l
| Pcl_constr (li, l) ->
pp f "%a%a"
(fun f l-> if l <>[] then
pp f "[%a]@ "
(list (core_type ctxt) ~sep:",") l) l
longident_loc li
| Pcl_constraint (ce, ct) ->
pp f "(%a@ :@ %a)"
(class_expr ctxt) ce
(class_type ctxt) ct
| Pcl_extension e -> extension ctxt f e
| Pcl_open (o, e) ->
pp f "@[<2>let open%s %a in@;%a@]"
(override o.popen_override) longident_loc o.popen_expr
(class_expr ctxt) e
and module_type ctxt f x =
if x.pmty_attributes <> [] then begin
pp f "((%a)%a)" (module_type ctxt) {x with pmty_attributes=[]}
(attributes ctxt) x.pmty_attributes
end else
match x.pmty_desc with
| Pmty_functor (Unit, mt2) ->
pp f "@[<hov2>functor () ->@ %a@]" (module_type ctxt) mt2
| Pmty_functor (Named (s, mt1), mt2) ->
begin match s.txt with
| None ->
pp f "@[<hov2>%a@ ->@ %a@]"
(module_type1 ctxt) mt1 (module_type ctxt) mt2
| Some name ->
pp f "@[<hov2>functor@ (%s@ :@ %a)@ ->@ %a@]" name
(module_type ctxt) mt1 (module_type ctxt) mt2
end
| Pmty_with (mt, []) -> module_type ctxt f mt
| Pmty_with (mt, l) ->
pp f "@[<hov2>%a@ with@ %a@]"
(module_type1 ctxt) mt
(list (with_constraint ctxt) ~sep:"@ and@ ") l
| _ -> module_type1 ctxt f x
and with_constraint ctxt f = function
| Pwith_type (li, ({ptype_params= ls ;_} as td)) ->
let ls = List.map fst ls in
pp f "type@ %a %a =@ %a"
(list (core_type ctxt) ~sep:"," ~first:"(" ~last:")")
ls longident_loc li (type_declaration ctxt) td
| Pwith_module (li, li2) ->
pp f "module %a =@ %a" longident_loc li longident_loc li2;
| Pwith_modtype (li, mty) ->
pp f "module type %a =@ %a" longident_loc li (module_type ctxt) mty;
| Pwith_typesubst (li, ({ptype_params=ls;_} as td)) ->
let ls = List.map fst ls in
pp f "type@ %a %a :=@ %a"
(list (core_type ctxt) ~sep:"," ~first:"(" ~last:")")
ls longident_loc li
(type_declaration ctxt) td
| Pwith_modsubst (li, li2) ->
pp f "module %a :=@ %a" longident_loc li longident_loc li2
| Pwith_modtypesubst (li, mty) ->
pp f "module type %a :=@ %a" longident_loc li (module_type ctxt) mty;
and module_type1 ctxt f x =
if x.pmty_attributes <> [] then module_type ctxt f x
else match x.pmty_desc with
| Pmty_ident li ->
pp f "%a" longident_loc li;
| Pmty_alias li ->
pp f "(module %a)" longident_loc li;
| Pmty_signature (s) ->
pp f "@[<hv0>@[<hv2>sig@ %a@]@ end@]" (* "@[<hov>sig@ %a@ end@]" *)
(list (signature_item ctxt)) s (* FIXME wrong indentation*)
| Pmty_typeof me ->
pp f "@[<hov2>module@ type@ of@ %a@]" (module_expr ctxt) me
| Pmty_extension e -> extension ctxt f e
| _ -> paren true (module_type ctxt) f x
and signature ctxt f x = list ~sep:"@\n" (signature_item ctxt) f x
and signature_item ctxt f x : unit =
match x.psig_desc with
| Psig_type (rf, l) ->
type_def_list ctxt f (rf, true, l)
| Psig_typesubst l ->
(* Psig_typesubst is never recursive, but we specify [Recursive] here to
avoid printing a [nonrec] flag, which would be rejected by the parser.
*)
type_def_list ctxt f (Recursive, false, l)
| Psig_value vd ->
let intro = if vd.pval_prim = [] then "val" else "external" in
pp f "@[<2>%s@ %a@ :@ %a@]%a" intro
protect_ident vd.pval_name.txt
(value_description ctxt) vd
(item_attributes ctxt) vd.pval_attributes
| Psig_typext te ->
type_extension ctxt f te
| Psig_exception ed ->
exception_declaration ctxt f ed
| Psig_class l ->
let class_description kwd f ({pci_params=ls;pci_name={txt;_};_} as x) =
pp f "@[<2>%s %a%a%s@;:@;%a@]%a" kwd
virtual_flag x.pci_virt
(class_params_def ctxt) ls txt
(class_type ctxt) x.pci_expr
(item_attributes ctxt) x.pci_attributes
in begin
match l with
| [] -> ()
| [x] -> class_description "class" f x
| x :: xs ->
pp f "@[<v>%a@,%a@]"
(class_description "class") x
(list ~sep:"@," (class_description "and")) xs
end
| Psig_module ({pmd_type={pmty_desc=Pmty_alias alias;
pmty_attributes=[]; _};_} as pmd) ->
pp f "@[<hov>module@ %s@ =@ %a@]%a"
(Option.value pmd.pmd_name.txt ~default:"_")
longident_loc alias
(item_attributes ctxt) pmd.pmd_attributes
| Psig_module pmd ->
pp f "@[<hov>module@ %s@ :@ %a@]%a"
(Option.value pmd.pmd_name.txt ~default:"_")
(module_type ctxt) pmd.pmd_type
(item_attributes ctxt) pmd.pmd_attributes
| Psig_modsubst pms ->
pp f "@[<hov>module@ %s@ :=@ %a@]%a" pms.pms_name.txt
longident_loc pms.pms_manifest
(item_attributes ctxt) pms.pms_attributes
| Psig_open od ->
pp f "@[<hov2>open%s@ %a@]%a"
(override od.popen_override)
longident_loc od.popen_expr
(item_attributes ctxt) od.popen_attributes
| Psig_include incl ->
(* Print "include functor" rather than attribute *)
let attrs, incl_fun = check_include_functor_attr incl.pincl_attributes in
pp f "@[<hov2>include%a@ %a@]%a"
maybe_functor incl_fun
(module_type ctxt) incl.pincl_mod
(item_attributes ctxt) attrs
| Psig_modtype {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} ->
pp f "@[<hov2>module@ type@ %s%a@]%a"
s.txt
(fun f md -> match md with
| None -> ()
| Some mt ->
pp_print_space f () ;
pp f "@ =@ %a" (module_type ctxt) mt
) md
(item_attributes ctxt) attrs
| Psig_modtypesubst {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} ->
let md = match md with
| None -> assert false (* ast invariant *)
| Some mt -> mt in
pp f "@[<hov2>module@ type@ %s@ :=@ %a@]%a"
s.txt (module_type ctxt) md
(item_attributes ctxt) attrs
| Psig_class_type (l) -> class_type_declaration_list ctxt f l
| Psig_recmodule decls ->
let rec string_x_module_type_list f ?(first=true) l =
match l with
| [] -> () ;
| pmd :: tl ->
if not first then
pp f "@ @[<hov2>and@ %s:@ %a@]%a"
(Option.value pmd.pmd_name.txt ~default:"_")
(module_type1 ctxt) pmd.pmd_type
(item_attributes ctxt) pmd.pmd_attributes
else
pp f "@[<hov2>module@ rec@ %s:@ %a@]%a"
(Option.value pmd.pmd_name.txt ~default:"_")
(module_type1 ctxt) pmd.pmd_type
(item_attributes ctxt) pmd.pmd_attributes;
string_x_module_type_list f ~first:false tl
in
string_x_module_type_list f decls
| Psig_attribute a -> floating_attribute ctxt f a
| Psig_extension(e, a) ->
item_extension ctxt f e;
item_attributes ctxt f a
and module_expr ctxt f x =
if x.pmod_attributes <> [] then
pp f "((%a)%a)" (module_expr ctxt) {x with pmod_attributes=[]}
(attributes ctxt) x.pmod_attributes
else match x.pmod_desc with
| Pmod_structure (s) ->
pp f "@[<hv2>struct@;@[<0>%a@]@;<1 -2>end@]"
(list (structure_item ctxt) ~sep:"@\n") s;
| Pmod_constraint (me, mt) ->
pp f "@[<hov2>(%a@ :@ %a)@]"
(module_expr ctxt) me
(module_type ctxt) mt
| Pmod_ident (li) ->
pp f "%a" longident_loc li;
| Pmod_functor (Unit, me) ->
pp f "functor ()@;->@;%a" (module_expr ctxt) me
| Pmod_functor (Named (s, mt), me) ->
pp f "functor@ (%s@ :@ %a)@;->@;%a"
(Option.value s.txt ~default:"_")
(module_type ctxt) mt (module_expr ctxt) me
| Pmod_apply (me1, me2) ->
pp f "(%a)(%a)" (module_expr ctxt) me1 (module_expr ctxt) me2
Cf : # 7200
| Pmod_unpack e ->
pp f "(val@ %a)" (expression ctxt) e
| Pmod_extension e -> extension ctxt f e
and structure ctxt f x = list ~sep:"@\n" (structure_item ctxt) f x
and payload ctxt f = function
| PStr [{pstr_desc = Pstr_eval (e, attrs)}] ->
pp f "@[<2>%a@]%a"
(expression ctxt) e
(item_attributes ctxt) attrs
| PStr x -> structure ctxt f x
| PTyp x -> pp f ":@ "; core_type ctxt f x
| PSig x -> pp f ":@ "; signature ctxt f x
| PPat (x, None) -> pp f "?@ "; pattern ctxt f x
| PPat (x, Some e) ->
pp f "?@ "; pattern ctxt f x;
pp f " when "; expression ctxt f e
and pp_print_pexp_function ctxt sep f x =
(* do not print [@extension.local] on expressions *)
let attrs, _ = check_local_attr x.pexp_attributes in
let x = { x with pexp_attributes = attrs } in
if x.pexp_attributes <> [] then pp f "%s@;%a" sep (expression ctxt) x
else match x.pexp_desc with
| Pexp_fun (label, eo, p, e) ->
pp f "%a@ %a"
(label_exp ctxt) (label,eo,p) (pp_print_pexp_function ctxt sep) e
| Pexp_newtype (str,e) ->
pp f "(type@ %s)@ %a" str.txt (pp_print_pexp_function ctxt sep) e
| _ ->
pp f "%s@;%a" sep (expression ctxt) x
(* transform [f = fun g h -> ..] to [f g h = ... ] could be improved *)
and binding ctxt f {pvb_pat=p; pvb_expr=x; _} =
(* .pvb_attributes have already been printed by the caller, #bindings *)
let tyvars_str tyvars = List.map (fun v -> v.txt) tyvars in
let is_desugared_gadt p e =
let gadt_pattern =
match p with
| {ppat_desc=Ppat_constraint({ppat_desc=Ppat_var _} as pat,
{ptyp_desc=Ptyp_poly (args_tyvars, rt)});
ppat_attributes=[]}->
Some (pat, args_tyvars, rt)
| _ -> None in
let rec gadt_exp tyvars e =
match e with
| {pexp_desc=Pexp_newtype (tyvar, e); pexp_attributes=[]} ->
gadt_exp (tyvar :: tyvars) e
| {pexp_desc=Pexp_constraint (e, ct); pexp_attributes=[]} ->
Some (List.rev tyvars, e, ct)
| _ -> None in
let gadt_exp = gadt_exp [] e in
match gadt_pattern, gadt_exp with
| Some (p, pt_tyvars, pt_ct), Some (e_tyvars, e, e_ct)
when tyvars_str pt_tyvars = tyvars_str e_tyvars ->
let ety = Typ.varify_constructors e_tyvars e_ct in
if ety = pt_ct then
Some (p, pt_tyvars, e_ct, e) else None
| _ -> None in
if x.pexp_attributes <> []
then
match p with
| {ppat_desc=Ppat_constraint({ppat_desc=Ppat_var _; _} as pat,
({ptyp_desc=Ptyp_poly _; _} as typ));
ppat_attributes=[]; _} ->
pp f "%a@;: %a@;=@;%a"
(simple_pattern ctxt) pat (core_type ctxt) typ (expression ctxt) x
| _ ->
pp f "%a@;=@;%a" (pattern ctxt) p (expression ctxt) x
else
match is_desugared_gadt p x with
| Some (p, [], ct, e) ->
pp f "%a@;: %a@;=@;%a"
(simple_pattern ctxt) p (core_type ctxt) ct (expression ctxt) e
| Some (p, tyvars, ct, e) -> begin
pp f "%a@;: type@;%a.@;%a@;=@;%a"
(simple_pattern ctxt) p (list pp_print_string ~sep:"@;")
(tyvars_str tyvars) (core_type ctxt) ct (expression ctxt) e
end
| None -> begin
match p with
| {ppat_desc=Ppat_constraint(p ,ty);
special case for the first
begin match ty with
| {ptyp_desc=Ptyp_poly _; ptyp_attributes=[]} ->
pp f "%a@;:@;%a@;=@;%a" (simple_pattern ctxt) p
(core_type ctxt) ty (expression ctxt) x
| _ ->
pp f "(%a@;:@;%a)@;=@;%a" (simple_pattern ctxt) p
(core_type ctxt) ty (expression ctxt) x
end
| {ppat_desc=Ppat_var _; ppat_attributes=[]} ->
pp f "%a@ %a" (simple_pattern ctxt) p
(pp_print_pexp_function ctxt "=") x
| _ ->
pp f "%a@;=@;%a" (pattern ctxt) p (expression ctxt) x
end
(* [in] is not printed *)
and bindings ctxt f (rf,l) =
let binding kwd rf f x =
let x, is_local =
match x.pvb_expr.pexp_desc with
| Pexp_apply
({ pexp_desc = Pexp_extension({txt = "extension.local"}, PStr []) },
[Nolabel, sbody]) ->
let sattrs, _ = check_local_attr sbody.pexp_attributes in
let sbody = {sbody with pexp_attributes = sattrs} in
let pattrs, _ = check_local_attr x.pvb_pat.ppat_attributes in
let pat = {x.pvb_pat with ppat_attributes = pattrs} in
{x with pvb_pat = pat; pvb_expr = sbody}, "local_ "
| _ -> x, ""
in
pp f "@[<2>%s %a%s%a@]%a" kwd rec_flag rf is_local
(binding ctxt) x (item_attributes ctxt) x.pvb_attributes
in
match l with
| [] -> ()
| [x] -> binding "let" rf f x
| x::xs ->
pp f "@[<v>%a@,%a@]"
(binding "let" rf) x
(list ~sep:"@," (binding "and" Nonrecursive)) xs
and binding_op ctxt f x =
match x.pbop_pat, x.pbop_exp with
| {ppat_desc = Ppat_var { txt=pvar; _ }; ppat_attributes = []; _},
{pexp_desc = Pexp_ident { txt=Lident evar; _}; pexp_attributes = []; _}
when pvar = evar ->
pp f "@[<2>%s %s@]" x.pbop_op.txt evar
| pat, exp ->
pp f "@[<2>%s %a@;=@;%a@]"
x.pbop_op.txt (pattern ctxt) pat (expression ctxt) exp
and structure_item ctxt f x =
match x.pstr_desc with
| Pstr_eval (e, attrs) ->
pp f "@[<hov2>;;%a@]%a"
(expression ctxt) e
(item_attributes ctxt) attrs
| Pstr_type (_, []) -> assert false
| Pstr_type (rf, l) -> type_def_list ctxt f (rf, true, l)
| Pstr_value (rf, l) ->
pp f " @[<hov2 > let % a%a@ ] " rec_flag rf bindings l
pp f "@[<2>%a@]" (bindings ctxt) (rf,l)
| Pstr_typext te -> type_extension ctxt f te
| Pstr_exception ed -> exception_declaration ctxt f ed
| Pstr_module x ->
let rec module_helper = function
| {pmod_desc=Pmod_functor(arg_opt,me'); pmod_attributes = []} ->
begin match arg_opt with
| Unit -> pp f "()"
| Named (s, mt) ->
pp f "(%s:%a)" (Option.value s.txt ~default:"_")
(module_type ctxt) mt
end;
module_helper me'
| me -> me
in
pp f "@[<hov2>module %s%a@]%a"
(Option.value x.pmb_name.txt ~default:"_")
(fun f me ->
let me = module_helper me in
match me with
| {pmod_desc=
Pmod_constraint
(me',
({pmty_desc=(Pmty_ident (_)
| Pmty_signature (_));_} as mt));
pmod_attributes = []} ->
pp f " :@;%a@;=@;%a@;"
(module_type ctxt) mt (module_expr ctxt) me'
| _ -> pp f " =@ %a" (module_expr ctxt) me
) x.pmb_expr
(item_attributes ctxt) x.pmb_attributes
| Pstr_open od ->
pp f "@[<2>open%s@;%a@]%a"
(override od.popen_override)
(module_expr ctxt) od.popen_expr
(item_attributes ctxt) od.popen_attributes
| Pstr_modtype {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} ->
pp f "@[<hov2>module@ type@ %s%a@]%a"
s.txt
(fun f md -> match md with
| None -> ()
| Some mt ->
pp_print_space f () ;
pp f "@ =@ %a" (module_type ctxt) mt
) md
(item_attributes ctxt) attrs
| Pstr_class l ->
let extract_class_args cl =
let rec loop acc = function
| {pcl_desc=Pcl_fun (l, eo, p, cl'); pcl_attributes = []} ->
loop ((l,eo,p) :: acc) cl'
| cl -> List.rev acc, cl
in
let args, cl = loop [] cl in
let constr, cl =
match cl with
| {pcl_desc=Pcl_constraint (cl', ct); pcl_attributes = []} ->
Some ct, cl'
| _ -> None, cl
in
args, constr, cl
in
let class_constraint f ct = pp f ": @[%a@] " (class_type ctxt) ct in
let class_declaration kwd f
({pci_params=ls; pci_name={txt;_}; _} as x) =
let args, constr, cl = extract_class_args x.pci_expr in
pp f "@[<2>%s %a%a%s %a%a=@;%a@]%a" kwd
virtual_flag x.pci_virt
(class_params_def ctxt) ls txt
(list (label_exp ctxt) ~last:"@ ") args
(option class_constraint) constr
(class_expr ctxt) cl
(item_attributes ctxt) x.pci_attributes
in begin
match l with
| [] -> ()
| [x] -> class_declaration "class" f x
| x :: xs ->
pp f "@[<v>%a@,%a@]"
(class_declaration "class") x
(list ~sep:"@," (class_declaration "and")) xs
end
| Pstr_class_type l -> class_type_declaration_list ctxt f l
| Pstr_primitive vd ->
pp f "@[<hov2>external@ %a@ :@ %a@]%a"
protect_ident vd.pval_name.txt
(value_description ctxt) vd
(item_attributes ctxt) vd.pval_attributes
| Pstr_include incl ->
(* Print "include functor" rather than attribute *)
let attrs, incl_fun = check_include_functor_attr incl.pincl_attributes in
pp f "@[<hov2>include%a@ %a@]%a"
maybe_functor incl_fun
(module_expr ctxt) incl.pincl_mod
(item_attributes ctxt) attrs
3.07
let aux f = function
| ({pmb_expr={pmod_desc=Pmod_constraint (expr, typ)}} as pmb) ->
pp f "@[<hov2>@ and@ %s:%a@ =@ %a@]%a"
(Option.value pmb.pmb_name.txt ~default:"_")
(module_type ctxt) typ
(module_expr ctxt) expr
(item_attributes ctxt) pmb.pmb_attributes
| pmb ->
pp f "@[<hov2>@ and@ %s@ =@ %a@]%a"
(Option.value pmb.pmb_name.txt ~default:"_")
(module_expr ctxt) pmb.pmb_expr
(item_attributes ctxt) pmb.pmb_attributes
in
begin match decls with
| ({pmb_expr={pmod_desc=Pmod_constraint (expr, typ)}} as pmb) :: l2 ->
pp f "@[<hv>@[<hov2>module@ rec@ %s:%a@ =@ %a@]%a@ %a@]"
(Option.value pmb.pmb_name.txt ~default:"_")
(module_type ctxt) typ
(module_expr ctxt) expr
(item_attributes ctxt) pmb.pmb_attributes
(fun f l2 -> List.iter (aux f) l2) l2
| pmb :: l2 ->
pp f "@[<hv>@[<hov2>module@ rec@ %s@ =@ %a@]%a@ %a@]"
(Option.value pmb.pmb_name.txt ~default:"_")
(module_expr ctxt) pmb.pmb_expr
(item_attributes ctxt) pmb.pmb_attributes
(fun f l2 -> List.iter (aux f) l2) l2
| _ -> assert false
end
| Pstr_attribute a -> floating_attribute ctxt f a
| Pstr_extension(e, a) ->
item_extension ctxt f e;
item_attributes ctxt f a
and type_param ctxt f (ct, (a,b)) =
pp f "%s%s%a" (type_variance a) (type_injectivity b) (core_type ctxt) ct
and type_params ctxt f = function
| [] -> ()
| l -> pp f "%a " (list (type_param ctxt) ~first:"(" ~last:")" ~sep:",@;") l
and type_def_list ctxt f (rf, exported, l) =
let type_decl kwd rf f x =
let eq =
if (x.ptype_kind = Ptype_abstract)
&& (x.ptype_manifest = None) then ""
else if exported then " ="
else " :="
in
pp f "@[<2>%s %a%a%s%s%a@]%a" kwd
nonrec_flag rf
(type_params ctxt) x.ptype_params
x.ptype_name.txt eq
(type_declaration ctxt) x
(item_attributes ctxt) x.ptype_attributes
in
match l with
| [] -> assert false
| [x] -> type_decl "type" rf f x
| x :: xs -> pp f "@[<v>%a@,%a@]"
(type_decl "type" rf) x
(list ~sep:"@," (type_decl "and" Recursive)) xs
and record_declaration ctxt f lbls =
let has_attr pld name =
List.exists (fun attr -> attr.attr_name.txt = name) pld.pld_attributes
in
let field_flag f pld =
pp f "%a" mutable_flag pld.pld_mutable;
if has_attr pld "extension.nonlocal" then pp f "nonlocal_ ";
if has_attr pld "extension.global" then pp f "global_ "
in
let type_record_field f pld =
let pld_attributes =
List.filter (fun attr ->
match attr.attr_name.txt with
| "extension.nonlocal" | "extension.global" -> false
| _ -> true) pld.pld_attributes
in
pp f "@[<2>%a%s:@;%a@;%a@]"
field_flag pld
pld.pld_name.txt
(core_type ctxt) pld.pld_type
(attributes ctxt) pld_attributes
in
pp f "{@\n%a}"
(list type_record_field ~sep:";@\n" ) lbls
and type_declaration ctxt f x =
(* type_declaration has an attribute field,
but it's been printed by the caller of this method *)
let priv f =
match x.ptype_private with
| Public -> ()
| Private -> pp f "@;private"
in
let manifest f =
match x.ptype_manifest with
| None -> ()
| Some y ->
if x.ptype_kind = Ptype_abstract then
pp f "%t@;%a" priv (core_type ctxt) y
else
pp f "@;%a" (core_type ctxt) y
in
let constructor_declaration f pcd =
pp f "|@;";
constructor_declaration ctxt f
(pcd.pcd_name.txt, pcd.pcd_vars,
pcd.pcd_args, pcd.pcd_res, pcd.pcd_attributes)
in
let repr f =
let intro f =
if x.ptype_manifest = None then ()
else pp f "@;="
in
match x.ptype_kind with
| Ptype_variant xs ->
let variants fmt xs =
if xs = [] then pp fmt " |" else
pp fmt "@\n%a" (list ~sep:"@\n" constructor_declaration) xs
in pp f "%t%t%a" intro priv variants xs
| Ptype_abstract -> ()
| Ptype_record l ->
pp f "%t%t@;%a" intro priv (record_declaration ctxt) l
| Ptype_open -> pp f "%t%t@;.." intro priv
in
let constraints f =
List.iter
(fun (ct1,ct2,_) ->
pp f "@[<hov2>@ constraint@ %a@ =@ %a@]"
(core_type ctxt) ct1 (core_type ctxt) ct2)
x.ptype_cstrs
in
pp f "%t%t%t" manifest repr constraints
and type_extension ctxt f x =
let extension_constructor f x =
pp f "@\n|@;%a" (extension_constructor ctxt) x
in
pp f "@[<2>type %a%a += %a@ %a@]%a"
(fun f -> function
| [] -> ()
| l ->
pp f "%a@;" (list (type_param ctxt) ~first:"(" ~last:")" ~sep:",") l)
x.ptyext_params
longident_loc x.ptyext_path
Cf : # 7200
(list ~sep:"" extension_constructor)
x.ptyext_constructors
(item_attributes ctxt) x.ptyext_attributes
and constructor_declaration ctxt f (name, vars, args, res, attrs) =
let name =
match name with
| "::" -> "(::)"
| s -> s in
let pp_vars f vs =
match vs with
| [] -> ()
| vs -> pp f "%a@;.@;" (list tyvar_loc ~sep:"@;") vs in
match res with
| None ->
pp f "%s%a@;%a" name
(fun f -> function
| Pcstr_tuple [] -> ()
| Pcstr_tuple l ->
pp f "@;of@;%a" (list (core_type1 ctxt) ~sep:"@;*@;") l
| Pcstr_record l -> pp f "@;of@;%a" (record_declaration ctxt) l
) args
(attributes ctxt) attrs
| Some r ->
pp f "%s:@;%a%a@;%a" name
pp_vars vars
(fun f -> function
| Pcstr_tuple [] -> core_type1 ctxt f r
| Pcstr_tuple l -> pp f "%a@;->@;%a"
(list (core_type1 ctxt) ~sep:"@;*@;") l
(core_type1 ctxt) r
| Pcstr_record l ->
pp f "%a@;->@;%a" (record_declaration ctxt) l (core_type1 ctxt) r
)
args
(attributes ctxt) attrs
and extension_constructor ctxt f x =
Cf : # 7200
match x.pext_kind with
| Pext_decl(v, l, r) ->
constructor_declaration ctxt f
(x.pext_name.txt, v, l, r, x.pext_attributes)
| Pext_rebind li ->
pp f "%s@;=@;%a%a" x.pext_name.txt
longident_loc li
(attributes ctxt) x.pext_attributes
and case_list ctxt f l : unit =
let aux f {pc_lhs; pc_guard; pc_rhs} =
pp f "@;| @[<2>%a%a@;->@;%a@]"
(pattern ctxt) pc_lhs (option (expression ctxt) ~first:"@;when@;")
pc_guard (expression (under_pipe ctxt)) pc_rhs
in
list aux f l ~sep:""
and label_x_expression_param ctxt f (l,e) =
let simple_name = match e with
| {pexp_desc=Pexp_ident {txt=Lident l;_};
pexp_attributes=[]} -> Some l
| _ -> None
in match l with
level 2
| Optional str ->
if Some str = simple_name then
pp f "?%s" str
else
pp f "?%s:%a" str (simple_expr ctxt) e
| Labelled lbl ->
if Some lbl = simple_name then
pp f "~%s" lbl
else
pp f "~%s:%a" lbl (simple_expr ctxt) e
and directive_argument f x =
match x.pdira_desc with
| Pdir_string (s) -> pp f "@ %S" s
| Pdir_int (n, None) -> pp f "@ %s" n
| Pdir_int (n, Some m) -> pp f "@ %s%c" n m
| Pdir_ident (li) -> pp f "@ %a" longident li
| Pdir_bool (b) -> pp f "@ %s" (string_of_bool b)
let toplevel_phrase f x =
match x with
| Ptop_def (s) ->pp f "@[<hov0>%a@]" (list (structure_item reset_ctxt)) s
(* pp_open_hvbox f 0; *)
(* pp_print_list structure_item f s ; *)
pp_close_box f ( ) ;
| Ptop_dir {pdir_name; pdir_arg = None; _} ->
pp f "@[<hov2>#%s@]" pdir_name.txt
| Ptop_dir {pdir_name; pdir_arg = Some pdir_arg; _} ->
pp f "@[<hov2>#%s@ %a@]" pdir_name.txt directive_argument pdir_arg
let expression f x =
pp f "@[%a@]" (expression reset_ctxt) x
let string_of_expression x =
ignore (flush_str_formatter ()) ;
let f = str_formatter in
expression f x;
flush_str_formatter ()
let string_of_structure x =
ignore (flush_str_formatter ());
let f = str_formatter in
structure reset_ctxt f x;
flush_str_formatter ()
let top_phrase f x =
pp_print_newline f ();
toplevel_phrase f x;
pp f ";;";
pp_print_newline f ()
let core_type = core_type reset_ctxt
let pattern = pattern reset_ctxt
let signature = signature reset_ctxt
let structure = structure reset_ctxt
let module_expr = module_expr reset_ctxt
let module_type = module_type reset_ctxt
let class_field = class_field reset_ctxt
let class_type_field = class_type_field reset_ctxt
let class_expr = class_expr reset_ctxt
let class_type = class_type reset_ctxt
let structure_item = structure_item reset_ctxt
let signature_item = signature_item reset_ctxt
let binding = binding reset_ctxt
let payload = payload reset_ctxt
| null | https://raw.githubusercontent.com/janestreet/merlin-jst/00f0a2c961fbf5a968125b33612d60224a573f40/upstream/ocaml_flambda/parsing/pprintast.ml | ocaml | ************************************************************************
OCaml
Fabrice Le Fessant, INRIA Saclay
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Printing code expressions
type fixity = Infix| Prefix
which identifiers are in fact operators needing parentheses
some infixes need spaces around parens to avoid clashes with comment
syntax
add parentheses to binders when they are in fact infix or prefix operators
variance encoding: need to sync up with the [parser.mly]
trailing space
trailing space added
without the space, this would be parsed as
a character literal
c ['a,'b]
space
otherwise parenthesize
"%a%a@;"
Cf #7200: print [>] correctly
FIXME desugar the syntax sugar
*******************pattern*******************
RA
single case pattern parens needed here
extract operator:
assignment operators end with [right_bracket ^ "<-"],
access operators end with [right_bracket] directly
extract the right end bracket
"try@;@[<2>%a@]@\nwith@\n%a"
pp f "@[<2>let %a%a in@;<1 -2>%a@]"
(*no indentation here, a new line
See #7200: avoid turning (~- 1) into (- 1) which is
parsed as an int literal
reset here only because [function,match,try,sequence]
are lower priority
Not efficient FIXME
@;@[<2>else@ %a@]@]
pp f "()"
Pexp_poly: impossible but we should print it anyway, rather than
assert false
used in [Pexp_apply]
(match view_fixity_of_exp x with
|`Normal -> longident_loc f li
| `Prefix _ | `Infix _ -> pp f "( %a )" longident_loc li)
no sep hint
"@[<hov2>{%a%a}@]"
note: value_description has an attribute field,
but they're already printed by the callers this method
call [class_signature] called by [class_signature]
[class type a = object end]
"@[<hov>sig@ %a@ end@]"
FIXME wrong indentation
Psig_typesubst is never recursive, but we specify [Recursive] here to
avoid printing a [nonrec] flag, which would be rejected by the parser.
Print "include functor" rather than attribute
ast invariant
do not print [@extension.local] on expressions
transform [f = fun g h -> ..] to [f g h = ... ] could be improved
.pvb_attributes have already been printed by the caller, #bindings
[in] is not printed
Print "include functor" rather than attribute
type_declaration has an attribute field,
but it's been printed by the caller of this method
pp_open_hvbox f 0;
pp_print_list structure_item f s ; | , OCamlPro
, University of Pennsylvania
Copyright 2007 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
Original Code from Ber - metaocaml , modified for 3.12.0 and fixed
Authors : ,
Extensive Rewrite : : University of Pennsylvania
TODO more fine - grained precedence pretty - printing
open Asttypes
open Format
open Location
open Longident
open Parsetree
open Ast_helper
let prefix_symbols = [ '!'; '?'; '~' ] ;;
let infix_symbols = [ '='; '<'; '>'; '@'; '^'; '|'; '&'; '+'; '-'; '*'; '/';
'$'; '%'; '#' ]
let special_infix_strings =
["asr"; "land"; "lor"; "lsl"; "lsr"; "lxor"; "mod"; "or"; ":="; "!="; "::" ]
let letop s =
String.length s > 3
&& s.[0] = 'l'
&& s.[1] = 'e'
&& s.[2] = 't'
&& List.mem s.[3] infix_symbols
let andop s =
String.length s > 3
&& s.[0] = 'a'
&& s.[1] = 'n'
&& s.[2] = 'd'
&& List.mem s.[3] infix_symbols
determines if the string is an infix string .
checks backwards , first allowing a renaming postfix ( " _ 102 " ) which
may have resulted from - > Texp - > translation , then checking
if all the characters in the beginning of the string are valid infix
characters .
checks backwards, first allowing a renaming postfix ("_102") which
may have resulted from Pexp -> Texp -> Pexp translation, then checking
if all the characters in the beginning of the string are valid infix
characters. *)
let fixity_of_string = function
| "" -> `Normal
| s when List.mem s special_infix_strings -> `Infix s
| s when List.mem s.[0] infix_symbols -> `Infix s
| s when List.mem s.[0] prefix_symbols -> `Prefix s
| s when s.[0] = '.' -> `Mixfix s
| s when letop s -> `Letop s
| s when andop s -> `Andop s
| _ -> `Normal
let view_fixity_of_exp = function
| {pexp_desc = Pexp_ident {txt=Lident l;_}; pexp_attributes = []} ->
fixity_of_string l
| _ -> `Normal
let is_infix = function `Infix _ -> true | _ -> false
let is_mixfix = function `Mixfix _ -> true | _ -> false
let is_kwdop = function `Letop _ | `Andop _ -> true | _ -> false
let first_is c str =
str <> "" && str.[0] = c
let last_is c str =
str <> "" && str.[String.length str - 1] = c
let first_is_in cs str =
str <> "" && List.mem str.[0] cs
let needs_parens txt =
let fix = fixity_of_string txt in
is_infix fix
|| is_mixfix fix
|| is_kwdop fix
|| first_is_in prefix_symbols txt
let needs_spaces txt =
first_is '*' txt || last_is '*' txt
let string_loc ppf x = fprintf ppf "%s" x.txt
let protect_ident ppf txt =
let format : (_, _, _) format =
if not (needs_parens txt) then "%s"
else if needs_spaces txt then "(@;%s@;)"
else "(%s)"
in fprintf ppf format txt
let protect_longident ppf print_longident longprefix txt =
let format : (_, _, _) format =
if not (needs_parens txt) then "%a.%s"
else if needs_spaces txt then "%a.(@;%s@;)"
else "%a.(%s)" in
fprintf ppf format print_longident longprefix txt
let is_curry_attr attr =
match attr.attr_name.txt with
| "extension.curry" -> true
| _ -> false
let filter_curry_attrs attrs =
List.filter (fun attr -> not (is_curry_attr attr)) attrs
let has_non_curry_attr attrs =
List.exists (fun attr -> not (is_curry_attr attr)) attrs
let check_local_attr attrs =
match
List.partition (fun attr ->
attr.attr_name.txt = "extension.local") attrs
with
| [], _ -> attrs, false
| _::_, rest -> rest, true
let check_include_functor_attr attrs =
match
List.partition (fun attr ->
attr.attr_name.txt = "extension.include_functor") attrs
with
| [], _ -> attrs, false
| _::_, rest -> rest, true
type space_formatter = (unit, Format.formatter, unit) format
let override = function
| Override -> "!"
| Fresh -> ""
let type_variance = function
| NoVariance -> ""
| Covariant -> "+"
| Contravariant -> "-"
let type_injectivity = function
| NoInjectivity -> ""
| Injective -> "!"
type construct =
[ `cons of expression list
| `list of expression list
| `nil
| `normal
| `simple of Longident.t
| `tuple ]
let view_expr x =
match x.pexp_desc with
| Pexp_construct ( {txt= Lident "()"; _},_) -> `tuple
| Pexp_construct ( {txt= Lident "[]";_},_) -> `nil
| Pexp_construct ( {txt= Lident"::";_},Some _) ->
let rec loop exp acc = match exp with
| {pexp_desc=Pexp_construct ({txt=Lident "[]";_},_);
pexp_attributes = []} ->
(List.rev acc,true)
| {pexp_desc=
Pexp_construct ({txt=Lident "::";_},
Some ({pexp_desc= Pexp_tuple([e1;e2]);
pexp_attributes = []}));
pexp_attributes = []}
->
loop e2 (e1::acc)
| e -> (List.rev (e::acc),false) in
let (ls,b) = loop x [] in
if b then
`list ls
else `cons ls
| Pexp_construct (x,None) -> `simple (x.txt)
| _ -> `normal
let is_simple_construct :construct -> bool = function
| `nil | `tuple | `list _ | `simple _ -> true
| `cons _ | `normal -> false
let pp = fprintf
type ctxt = {
pipe : bool;
semi : bool;
ifthenelse : bool;
}
let reset_ctxt = { pipe=false; semi=false; ifthenelse=false }
let under_pipe ctxt = { ctxt with pipe=true }
let under_semi ctxt = { ctxt with semi=true }
let under_ifthenelse ctxt = { ctxt with ifthenelse=true }
let reset_semi = with semi = false }
let reset_ifthenelse = with ifthenelse = false }
let = with pipe = false }
let reset_semi ctxt = { ctxt with semi=false }
let reset_ifthenelse ctxt = { ctxt with ifthenelse=false }
let reset_pipe ctxt = { ctxt with pipe=false }
*)
let list : 'a . ?sep:space_formatter -> ?first:space_formatter ->
?last:space_formatter -> (Format.formatter -> 'a -> unit) ->
Format.formatter -> 'a list -> unit
= fun ?sep ?first ?last fu f xs ->
let first = match first with Some x -> x |None -> ("": _ format6)
and last = match last with Some x -> x |None -> ("": _ format6)
and sep = match sep with Some x -> x |None -> ("@ ": _ format6) in
let aux f = function
| [] -> ()
| [x] -> fu f x
| xs ->
let rec loop f = function
| [x] -> fu f x
| x::xs -> fu f x; pp f sep; loop f xs;
| _ -> assert false in begin
pp f first; loop f xs; pp f last;
end in
aux f xs
let option : 'a. ?first:space_formatter -> ?last:space_formatter ->
(Format.formatter -> 'a -> unit) -> Format.formatter -> 'a option -> unit
= fun ?first ?last fu f a ->
let first = match first with Some x -> x | None -> ("": _ format6)
and last = match last with Some x -> x | None -> ("": _ format6) in
match a with
| None -> ()
| Some x -> pp f first; fu f x; pp f last
let paren: 'a . ?first:space_formatter -> ?last:space_formatter ->
bool -> (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a -> unit
= fun ?(first=("": _ format6)) ?(last=("": _ format6)) b fu f x ->
if b then (pp f "("; pp f first; fu f x; pp f last; pp f ")")
else fu f x
let rec longident f = function
| Lident s -> protect_ident f s
| Ldot(y,s) -> protect_longident f longident y s
| Lapply (y,s) ->
pp f "%a(%a)" longident y longident s
let longident_loc f x = pp f "%a" longident x.txt
let constant f = function
| Pconst_char i ->
pp f "%C" i
| Pconst_string (i, _, None) ->
pp f "%S" i
| Pconst_string (i, _, Some delim) ->
pp f "{%s|%s|%s}" delim i delim
| Pconst_integer (i, None) ->
paren (first_is '-' i) (fun f -> pp f "%s") f i
| Pconst_integer (i, Some m) ->
paren (first_is '-' i) (fun f (i, m) -> pp f "%s%c" i m) f (i,m)
| Pconst_float (i, None) ->
paren (first_is '-' i) (fun f -> pp f "%s") f i
| Pconst_float (i, Some m) ->
paren (first_is '-' i) (fun f (i,m) -> pp f "%s%c" i m) f (i,m)
let mutable_flag f = function
| Immutable -> ()
| Mutable -> pp f "mutable@;"
let virtual_flag f = function
| Concrete -> ()
| Virtual -> pp f "virtual@;"
let rec_flag f rf =
match rf with
| Nonrecursive -> ()
| Recursive -> pp f "rec "
let nonrec_flag f rf =
match rf with
| Nonrecursive -> pp f "nonrec "
| Recursive -> ()
let direction_flag f = function
| Upto -> pp f "to@ "
| Downto -> pp f "downto@ "
let private_flag f = function
| Public -> ()
| Private -> pp f "private@ "
let iter_loc f ctxt {txt; loc = _} = f ctxt txt
let constant_string f s = pp f "%S" s
let tyvar ppf s =
if String.length s >= 2 && s.[1] = '\'' then
Format.fprintf ppf "' %s" s
else
Format.fprintf ppf "'%s" s
let tyvar_loc f str = tyvar f str.txt
let string_quot f x = pp f "`%s" x
let maybe_local_type pty ctxt f c =
let cattrs, is_local = check_local_attr c.ptyp_attributes in
let c = { c with ptyp_attributes = cattrs } in
if is_local then
pp f "local_ %a" (pty ctxt) c
else
pty ctxt f c
let maybe_functor f has_functor_attr =
if has_functor_attr then pp f "@ functor" else ()
let rec class_params_def ctxt f = function
| [] -> ()
| l ->
(list (type_param ctxt) ~sep:",") l
and type_with_label ctxt f (label, c) =
match label with
| Labelled s -> pp f "%s:%a" s (maybe_local_type core_type1 ctxt) c
| Optional s -> pp f "?%s:%a" s (maybe_local_type core_type1 ctxt) c
and core_type ctxt f x =
let filtered_attrs = filter_curry_attrs x.ptyp_attributes in
if filtered_attrs <> [] then begin
pp f "((%a)%a)" (core_type ctxt) {x with ptyp_attributes=[]}
(attributes ctxt) filtered_attrs
end
else match x.ptyp_desc with
| Ptyp_arrow (l, ct1, ct2) ->
FIXME remove parens later
(type_with_label ctxt) (l,ct1) (return_type ctxt) ct2
| Ptyp_alias (ct, s) ->
pp f "@[<2>%a@;as@;%a@]" (core_type1 ctxt) ct tyvar s
| Ptyp_poly ([], ct) ->
core_type ctxt f ct
| Ptyp_poly (sl, ct) ->
pp f "@[<2>%a%a@]"
(fun f l -> match l with
| [] -> ()
| _ ->
pp f "%a@;.@;"
(list tyvar_loc ~sep:"@;") l)
sl (core_type ctxt) ct
| _ -> pp f "@[<2>%a@]" (core_type1 ctxt) x
and core_type1 ctxt f x =
if has_non_curry_attr x.ptyp_attributes then core_type ctxt f x
else match x.ptyp_desc with
| Ptyp_any -> pp f "_";
| Ptyp_var s -> tyvar f s;
| Ptyp_tuple l -> pp f "(%a)" (list (core_type1 ctxt) ~sep:"@;*@;") l
| Ptyp_constr (li, l) ->
(fun f l -> match l with
|[] -> ()
|[x]-> pp f "%a@;" (core_type1 ctxt) x
| _ -> list ~first:"(" ~last:")@;" (core_type ctxt) ~sep:",@;" f l)
l longident_loc li
| Ptyp_variant (l, closed, low) ->
let first_is_inherit = match l with
| {Parsetree.prf_desc = Rinherit _}::_ -> true
| _ -> false in
let type_variant_helper f x =
match x.prf_desc with
| Rtag (l, _, ctl) ->
pp f "@[<2>%a%a@;%a@]" (iter_loc string_quot) l
(fun f l -> match l with
|[] -> ()
| _ -> pp f "@;of@;%a"
(list (core_type ctxt) ~sep:"&") ctl) ctl
(attributes ctxt) x.prf_attributes
| Rinherit ct -> core_type ctxt f ct in
pp f "@[<2>[%a%a]@]"
(fun f l ->
match l, closed with
| [], Closed -> ()
| _ ->
pp f "%s@;%a"
(match (closed,low) with
| (Closed,None) -> if first_is_inherit then " |" else ""
| (Open,_) -> ">")
(list type_variant_helper ~sep:"@;<1 -2>| ") l) l
(fun f low -> match low with
|Some [] |None -> ()
|Some xs ->
pp f ">@ %a"
(list string_quot) xs) low
| Ptyp_object (l, o) ->
let core_field_type f x = match x.pof_desc with
| Otag (l, ct) ->
Cf # 7200
pp f "@[<hov2>%s: %a@ %a@ @]" l.txt
(core_type ctxt) ct (attributes ctxt) x.pof_attributes
| Oinherit ct ->
pp f "@[<hov2>%a@ @]" (core_type ctxt) ct
in
let field_var f = function
| Asttypes.Closed -> ()
| Asttypes.Open ->
match l with
| [] -> pp f ".."
| _ -> pp f " ;.."
in
pp f "@[<hov2><@ %a%a@ > @]"
(list core_field_type ~sep:";") l
Cf # 7200
FIXME
pp f "@[<hov2>%a#%a@]"
(list (core_type ctxt) ~sep:"," ~first:"(" ~last:")") l
longident_loc li
| Ptyp_package (lid, cstrs) ->
let aux f (s, ct) =
pp f "type %a@ =@ %a" longident_loc s (core_type ctxt) ct in
(match cstrs with
|[] -> pp f "@[<hov2>(module@ %a)@]" longident_loc lid
|_ ->
pp f "@[<hov2>(module@ %a@ with@ %a)@]" longident_loc lid
(list aux ~sep:"@ and@ ") cstrs)
| Ptyp_extension e -> extension ctxt f e
| _ -> paren true (core_type ctxt) f x
and return_type ctxt f x =
if x.ptyp_attributes <> [] then maybe_local_type core_type1 ctxt f x
else maybe_local_type core_type ctxt f x
be cautious when use [ pattern ] , [ ] is preferred
and pattern ctxt f x =
if x.ppat_attributes <> [] then begin
pp f "((%a)%a)" (pattern ctxt) {x with ppat_attributes=[]}
(attributes ctxt) x.ppat_attributes
end
else match x.ppat_desc with
| Ppat_alias (p, s) ->
pp f "@[<2>%a@;as@;%a@]" (pattern ctxt) p protect_ident s.txt
| _ -> pattern_or ctxt f x
and pattern_or ctxt f x =
let rec left_associative x acc = match x with
| {ppat_desc=Ppat_or (p1,p2); ppat_attributes = []} ->
left_associative p1 (p2 :: acc)
| x -> x :: acc
in
match left_associative x [] with
| [] -> assert false
| [x] -> pattern1 ctxt f x
| orpats ->
pp f "@[<hov0>%a@]" (list ~sep:"@ | " (pattern1 ctxt)) orpats
and pattern1 ctxt (f:Format.formatter) (x:pattern) : unit =
let rec pattern_list_helper f = function
| {ppat_desc =
Ppat_construct
({ txt = Lident("::") ;_},
Some ([], {ppat_desc = Ppat_tuple([pat1; pat2]);_}));
ppat_attributes = []}
->
| p -> pattern1 ctxt f p
in
if x.ppat_attributes <> [] then pattern ctxt f x
else match x.ppat_desc with
| Ppat_variant (l, Some p) ->
pp f "@[<2>`%s@;%a@]" l (simple_pattern ctxt) p
| Ppat_construct (({txt=Lident("()"|"[]");_}), _) ->
simple_pattern ctxt f x
| Ppat_construct (({txt;_} as li), po) ->
FIXME The third field always false
if txt = Lident "::" then
pp f "%a" pattern_list_helper x
else
(match po with
| Some ([], x) ->
pp f "%a@;%a" longident_loc li (simple_pattern ctxt) x
| Some (vl, x) ->
pp f "%a@ (type %a)@;%a" longident_loc li
(list ~sep:"@ " string_loc) vl
(simple_pattern ctxt) x
| None -> pp f "%a" longident_loc li)
| _ -> simple_pattern ctxt f x
and simple_pattern ctxt (f:Format.formatter) (x:pattern) : unit =
if x.ppat_attributes <> [] then pattern ctxt f x
else match x.ppat_desc with
| Ppat_construct (({txt=Lident ("()"|"[]" as x);_}), None) ->
pp f "%s" x
| Ppat_any -> pp f "_";
| Ppat_var ({txt = txt;_}) -> protect_ident f txt
| Ppat_array l ->
pp f "@[<2>[|%a|]@]" (list (pattern1 ctxt) ~sep:";") l
| Ppat_unpack { txt = None } ->
pp f "(module@ _)@ "
| Ppat_unpack { txt = Some s } ->
pp f "(module@ %s)@ " s
| Ppat_type li ->
pp f "#%a" longident_loc li
| Ppat_record (l, closed) ->
let longident_x_pattern f (li, p) =
match (li,p) with
| ({txt=Lident s;_ },
{ppat_desc=Ppat_var {txt;_};
ppat_attributes=[]; _})
when s = txt ->
pp f "@[<2>%a@]" longident_loc li
| _ ->
pp f "@[<2>%a@;=@;%a@]" longident_loc li (pattern1 ctxt) p
in
begin match closed with
| Closed ->
pp f "@[<2>{@;%a@;}@]" (list longident_x_pattern ~sep:";@;") l
| _ ->
pp f "@[<2>{@;%a;_}@]" (list longident_x_pattern ~sep:";@;") l
end
| Ppat_tuple l ->
| Ppat_constant (c) -> pp f "%a" constant c
| Ppat_interval (c1, c2) -> pp f "%a..%a" constant c1 constant c2
| Ppat_variant (l,None) -> pp f "`%s" l
| Ppat_constraint (p, ct) ->
pp f "@[<2>(%a@;:@;%a)@]" (pattern1 ctxt) p (core_type ctxt) ct
| Ppat_lazy p ->
pp f "@[<2>(lazy@;%a)@]" (simple_pattern ctxt) p
| Ppat_exception p ->
pp f "@[<2>exception@;%a@]" (pattern1 ctxt) p
| Ppat_extension e -> extension ctxt f e
| Ppat_open (lid, p) ->
let with_paren =
match p.ppat_desc with
| Ppat_array _ | Ppat_record _
| Ppat_construct (({txt=Lident ("()"|"[]");_}), None) -> false
| _ -> true in
pp f "@[<2>%a.%a @]" longident_loc lid
(paren with_paren @@ pattern1 ctxt) p
| _ -> paren true (pattern ctxt) f x
and maybe_local_pat ctxt is_local f p =
if is_local then
pp f "(local_ %a)" (simple_pattern ctxt) p
else
pp f "%a" (simple_pattern ctxt) p
and label_exp ctxt f (l,opt,p) =
let pattrs, is_local = check_local_attr p.ppat_attributes in
let p = { p with ppat_attributes = pattrs } in
match l with
| Nolabel ->
pp f "%a" (maybe_local_pat ctxt is_local) p
| Optional rest ->
begin match p with
| {ppat_desc = Ppat_var {txt;_}; ppat_attributes = []}
when txt = rest && not is_local ->
(match opt with
| Some o -> pp f "?(%s=@;%a)" rest (expression ctxt) o
| None -> pp f "?%s" rest)
| _ ->
(match opt with
| Some o ->
pp f "?%s:(%s%a=@;%a)"
rest
(if is_local then "local_ " else "")
(pattern1 ctxt) p (expression ctxt) o
| None -> pp f "?%s:%a" rest (maybe_local_pat ctxt is_local) p)
end
| Labelled l -> match p with
| {ppat_desc = Ppat_var {txt;_}; ppat_attributes = []}
when txt = l ->
if is_local then
pp f "~(local_ %s)" l
else
pp f "~%s" l
| _ -> pp f "~%s:%a" l (maybe_local_pat ctxt is_local) p
and sugar_expr ctxt f e =
if e.pexp_attributes <> [] then false
else match e.pexp_desc with
| Pexp_apply ({ pexp_desc = Pexp_ident {txt = id; _};
pexp_attributes=[]; _}, args)
when List.for_all (fun (lab, _) -> lab = Nolabel) args -> begin
let print_indexop a path_prefix assign left sep right print_index indices
rem_args =
let print_path ppf = function
| None -> ()
| Some m -> pp ppf ".%a" longident m in
match assign, rem_args with
| false, [] ->
pp f "@[%a%a%s%a%s@]"
(simple_expr ctxt) a print_path path_prefix
left (list ~sep print_index) indices right; true
| true, [v] ->
pp f "@[%a%a%s%a%s@ <-@;<1 2>%a@]"
(simple_expr ctxt) a print_path path_prefix
left (list ~sep print_index) indices right
(simple_expr ctxt) v; true
| _ -> false in
match id, List.map snd args with
| Lident "!", [e] ->
pp f "@[<hov>!%a@]" (simple_expr ctxt) e; true
| Ldot (path, ("get"|"set" as func)), a :: other_args -> begin
let assign = func = "set" in
let print = print_indexop a None assign in
match path, other_args with
| Lident "Array", i :: rest ->
print ".(" "" ")" (expression ctxt) [i] rest
| Lident "String", i :: rest ->
print ".[" "" "]" (expression ctxt) [i] rest
| Ldot (Lident "Bigarray", "Array1"), i1 :: rest ->
print ".{" "," "}" (simple_expr ctxt) [i1] rest
| Ldot (Lident "Bigarray", "Array2"), i1 :: i2 :: rest ->
print ".{" "," "}" (simple_expr ctxt) [i1; i2] rest
| Ldot (Lident "Bigarray", "Array3"), i1 :: i2 :: i3 :: rest ->
print ".{" "," "}" (simple_expr ctxt) [i1; i2; i3] rest
| Ldot (Lident "Bigarray", "Genarray"),
{pexp_desc = Pexp_array indexes; pexp_attributes = []} :: rest ->
print ".{" "," "}" (simple_expr ctxt) indexes rest
| _ -> false
end
| (Lident s | Ldot(_,s)) , a :: i :: rest
when first_is '.' s ->
let multi_indices = String.contains s ';' in
let i =
match i.pexp_desc with
| Pexp_array l when multi_indices -> l
| _ -> [ i ] in
let assign = last_is '-' s in
let kind =
let n = String.length s in
if assign then s.[n - 3] else s.[n - 1] in
let left, right = match kind with
| ')' -> '(', ")"
| ']' -> '[', "]"
| '}' -> '{', "}"
| _ -> assert false in
let path_prefix = match id with
| Ldot(m,_) -> Some m
| _ -> None in
let left = String.sub s 0 (1+String.index s left) in
print_indexop a path_prefix assign left ";" right
(if multi_indices then expression ctxt else simple_expr ctxt)
i rest
| _ -> false
end
| _ -> false
and expression ctxt f x =
if x.pexp_attributes <> [] then
pp f "((%a)@,%a)" (expression ctxt) {x with pexp_attributes=[]}
(attributes ctxt) x.pexp_attributes
else match x.pexp_desc with
| Pexp_function _ | Pexp_fun _ | Pexp_match _ | Pexp_try _ | Pexp_sequence _
| Pexp_newtype _
when ctxt.pipe || ctxt.semi ->
paren true (expression reset_ctxt) f x
| Pexp_ifthenelse _ | Pexp_sequence _ when ctxt.ifthenelse ->
paren true (expression reset_ctxt) f x
| Pexp_let _ | Pexp_letmodule _ | Pexp_open _
| Pexp_letexception _ | Pexp_letop _
when ctxt.semi ->
paren true (expression reset_ctxt) f x
| Pexp_fun (l, e0, p, e) ->
pp f "@[<2>fun@;%a@;%a@]"
(label_exp ctxt) (l, e0, p)
(pp_print_pexp_function ctxt "->") e
| Pexp_newtype (lid, e) ->
pp f "@[<2>fun@;(type@;%s)@;%a@]" lid.txt
(pp_print_pexp_function ctxt "->") e
| Pexp_function l ->
pp f "@[<hv>function%a@]" (case_list ctxt) l
| Pexp_match (e, l) ->
pp f "@[<hv0>@[<hv0>@[<2>match %a@]@ with@]%a@]"
(expression reset_ctxt) e (case_list ctxt) l
| Pexp_try (e, l) ->
pp f "@[<0>@[<hv2>try@ %a@]@ @[<0>with%a@]@]"
(expression reset_ctxt) e (case_list ctxt) l
| Pexp_let (rf, l, e) ->
rec_flag rf
pp f "@[<2>%a in@;<1 -2>%a@]"
(bindings reset_ctxt) (rf,l)
(expression ctxt) e
| Pexp_apply
({ pexp_desc = Pexp_extension({txt = "extension.local"}, PStr []) },
[Nolabel, sbody]) ->
pp f "@[<2>local_ %a@]" (expression ctxt) sbody
| Pexp_apply (e, l) ->
begin if not (sugar_expr ctxt f x) then
match view_fixity_of_exp e with
| `Infix s ->
begin match l with
| [ (Nolabel, _) as arg1; (Nolabel, _) as arg2 ] ->
FIXME associativity label_x_expression_param
pp f "@[<2>%a@;%s@;%a@]"
(label_x_expression_param reset_ctxt) arg1 s
(label_x_expression_param ctxt) arg2
| _ ->
pp f "@[<2>%a %a@]"
(simple_expr ctxt) e
(list (label_x_expression_param ctxt)) l
end
| `Prefix s ->
let s =
if List.mem s ["~+";"~-";"~+.";"~-."] &&
(match l with
|[(_,{pexp_desc=Pexp_constant _})] -> false
| _ -> true)
then String.sub s 1 (String.length s -1)
else s in
begin match l with
| [(Nolabel, x)] ->
pp f "@[<2>%s@;%a@]" s (simple_expr ctxt) x
| _ ->
pp f "@[<2>%a %a@]" (simple_expr ctxt) e
(list (label_x_expression_param ctxt)) l
end
| _ ->
pp f "@[<hov2>%a@]" begin fun f (e,l) ->
pp f "%a@ %a" (expression2 ctxt) e
(list (label_x_expression_param reset_ctxt)) l
end (e,l)
end
| Pexp_construct (li, Some eo)
(match view_expr x with
| `cons ls -> list (simple_expr ctxt) f ls ~sep:"@;::@;"
| `normal ->
pp f "@[<2>%a@;%a@]" longident_loc li
(simple_expr ctxt) eo
| _ -> assert false)
| Pexp_setfield (e1, li, e2) ->
pp f "@[<2>%a.%a@ <-@ %a@]"
(simple_expr ctxt) e1 longident_loc li (simple_expr ctxt) e2
| Pexp_ifthenelse (e1, e2, eo) ->
let fmt:(_,_,_)format ="@[<hv0>@[<2>if@ %a@]@;@[<2>then@ %a@]%a@]" in
let expression_under_ifthenelse = expression (under_ifthenelse ctxt) in
pp f fmt expression_under_ifthenelse e1 expression_under_ifthenelse e2
(fun f eo -> match eo with
| Some x ->
pp f "@;@[<2>else@;%a@]" (expression (under_semi ctxt)) x
| Pexp_sequence _ ->
let rec sequence_helper acc = function
| {pexp_desc=Pexp_sequence(e1,e2); pexp_attributes = []} ->
sequence_helper (e1::acc) e2
| v -> List.rev (v::acc) in
let lst = sequence_helper [] x in
pp f "@[<hv>%a@]"
(list (expression (under_semi ctxt)) ~sep:";@;") lst
| Pexp_new (li) ->
pp f "@[<hov2>new@ %a@]" longident_loc li;
| Pexp_setinstvar (s, e) ->
pp f "@[<hov2>%s@ <-@ %a@]" s.txt (expression ctxt) e
FIXME
let string_x_expression f (s, e) =
pp f "@[<hov2>%s@ =@ %a@]" s.txt (expression ctxt) e in
pp f "@[<hov2>{<%a>}@]"
(list string_x_expression ~sep:";" ) l;
| Pexp_letmodule (s, me, e) ->
pp f "@[<hov2>let@ module@ %s@ =@ %a@ in@ %a@]"
(Option.value s.txt ~default:"_")
(module_expr reset_ctxt) me (expression ctxt) e
| Pexp_letexception (cd, e) ->
pp f "@[<hov2>let@ exception@ %a@ in@ %a@]"
(extension_constructor ctxt) cd
(expression ctxt) e
| Pexp_assert e ->
pp f "@[<hov2>assert@ %a@]" (simple_expr ctxt) e
| Pexp_lazy (e) ->
pp f "@[<hov2>lazy@ %a@]" (simple_expr ctxt) e
| Pexp_poly (e, None) ->
pp f "@[<hov2>!poly!@ %a@]" (simple_expr ctxt) e
| Pexp_poly (e, Some ct) ->
pp f "@[<hov2>(!poly!@ %a@ : %a)@]"
(simple_expr ctxt) e (core_type ctxt) ct
| Pexp_open (o, e) ->
pp f "@[<2>let open%s %a in@;%a@]"
(override o.popen_override) (module_expr ctxt) o.popen_expr
(expression ctxt) e
| Pexp_variant (l,Some eo) ->
pp f "@[<2>`%s@;%a@]" l (simple_expr ctxt) eo
| Pexp_letop {let_; ands; body} ->
pp f "@[<2>@[<v>%a@,%a@] in@;<1 -2>%a@]"
(binding_op ctxt) let_
(list ~sep:"@," (binding_op ctxt)) ands
(expression ctxt) body
| Pexp_extension e -> extension ctxt f e
| Pexp_unreachable -> pp f "."
| _ -> expression1 ctxt f x
and expression1 ctxt f x =
if x.pexp_attributes <> [] then expression ctxt f x
else match x.pexp_desc with
| Pexp_object cs -> pp f "%a" (class_structure ctxt) cs
| _ -> expression2 ctxt f x
and expression2 ctxt f x =
if x.pexp_attributes <> [] then expression ctxt f x
else match x.pexp_desc with
| Pexp_field (e, li) ->
pp f "@[<hov2>%a.%a@]" (simple_expr ctxt) e longident_loc li
| Pexp_send (e, s) -> pp f "@[<hov2>%a#%s@]" (simple_expr ctxt) e s.txt
| _ -> simple_expr ctxt f x
and simple_expr ctxt f x =
if x.pexp_attributes <> [] then expression ctxt f x
else match x.pexp_desc with
| Pexp_construct _ when is_simple_construct (view_expr x) ->
(match view_expr x with
| `nil -> pp f "[]"
| `tuple -> pp f "()"
| `list xs ->
pp f "@[<hv0>[%a]@]"
(list (expression (under_semi ctxt)) ~sep:";@;") xs
| `simple x -> longident f x
| _ -> assert false)
| Pexp_ident li ->
longident_loc f li
| Pexp_constant c -> constant f c;
| Pexp_pack me ->
pp f "(module@;%a)" (module_expr ctxt) me
| Pexp_tuple l ->
pp f "@[<hov2>(%a)@]" (list (simple_expr ctxt) ~sep:",@;") l
| Pexp_constraint (e, ct) ->
pp f "(%a : %a)" (expression ctxt) e (core_type ctxt) ct
| Pexp_coerce (e, cto1, ct) ->
pp f "(%a%a :> %a)" (expression ctxt) e
(core_type ctxt) ct
| Pexp_variant (l, None) -> pp f "`%s" l
| Pexp_record (l, eo) ->
let longident_x_expression f ( li, e) =
match e with
| {pexp_desc=Pexp_ident {txt;_};
pexp_attributes=[]; _} when li.txt = txt ->
pp f "@[<hov2>%a@]" longident_loc li
| _ ->
pp f "@[<hov2>%a@;=@;%a@]" longident_loc li (simple_expr ctxt) e
in
(option ~last:" with@;" (simple_expr ctxt)) eo
(list longident_x_expression ~sep:";@;") l
| Pexp_array (l) ->
pp f "@[<0>@[<2>[|%a|]@]@]"
(list (simple_expr (under_semi ctxt)) ~sep:";") l
| Pexp_while (e1, e2) ->
let fmt : (_,_,_) format = "@[<2>while@;%a@;do@;%a@;done@]" in
pp f fmt (expression ctxt) e1 (expression ctxt) e2
| Pexp_for (s, e1, e2, df, e3) ->
let fmt:(_,_,_)format =
"@[<hv0>@[<hv2>@[<2>for %a =@;%a@;%a%a@;do@]@;%a@]@;done@]" in
let expression = expression ctxt in
pp f fmt (pattern ctxt) s expression e1 direction_flag
df expression e2 expression e3
| _ -> paren true (expression ctxt) f x
and attributes ctxt f l =
List.iter (attribute ctxt f) l
and item_attributes ctxt f l =
List.iter (item_attribute ctxt f) l
and attribute ctxt f a =
pp f "@[<2>[@@%s@ %a]@]" a.attr_name.txt (payload ctxt) a.attr_payload
and item_attribute ctxt f a =
pp f "@[<2>[@@@@%s@ %a]@]" a.attr_name.txt (payload ctxt) a.attr_payload
and floating_attribute ctxt f a =
pp f "@[<2>[@@@@@@%s@ %a]@]" a.attr_name.txt (payload ctxt) a.attr_payload
and value_description ctxt f x =
pp f "@[<hov2>%a%a@]" (core_type ctxt) x.pval_type
(fun f x ->
if x.pval_prim <> []
then pp f "@ =@ %a" (list constant_string) x.pval_prim
) x
and extension ctxt f (s, e) =
pp f "@[<2>[%%%s@ %a]@]" s.txt (payload ctxt) e
and item_extension ctxt f (s, e) =
pp f "@[<2>[%%%%%s@ %a]@]" s.txt (payload ctxt) e
and exception_declaration ctxt f x =
pp f "@[<hov2>exception@ %a@]%a"
(extension_constructor ctxt) x.ptyexn_constructor
(item_attributes ctxt) x.ptyexn_attributes
and class_type_field ctxt f x =
match x.pctf_desc with
| Pctf_inherit (ct) ->
pp f "@[<2>inherit@ %a@]%a" (class_type ctxt) ct
(item_attributes ctxt) x.pctf_attributes
| Pctf_val (s, mf, vf, ct) ->
pp f "@[<2>val @ %a%a%s@ :@ %a@]%a"
mutable_flag mf virtual_flag vf s.txt (core_type ctxt) ct
(item_attributes ctxt) x.pctf_attributes
| Pctf_method (s, pf, vf, ct) ->
pp f "@[<2>method %a %a%s :@;%a@]%a"
private_flag pf virtual_flag vf s.txt (core_type ctxt) ct
(item_attributes ctxt) x.pctf_attributes
| Pctf_constraint (ct1, ct2) ->
pp f "@[<2>constraint@ %a@ =@ %a@]%a"
(core_type ctxt) ct1 (core_type ctxt) ct2
(item_attributes ctxt) x.pctf_attributes
| Pctf_attribute a -> floating_attribute ctxt f a
| Pctf_extension e ->
item_extension ctxt f e;
item_attributes ctxt f x.pctf_attributes
and class_signature ctxt f { pcsig_self = ct; pcsig_fields = l ;_} =
pp f "@[<hv0>@[<hv2>object@[<1>%a@]@ %a@]@ end@]"
(fun f -> function
{ptyp_desc=Ptyp_any; ptyp_attributes=[]; _} -> ()
| ct -> pp f " (%a)" (core_type ctxt) ct) ct
(list (class_type_field ctxt) ~sep:"@;") l
and class_type ctxt f x =
match x.pcty_desc with
| Pcty_signature cs ->
class_signature ctxt f cs;
attributes ctxt f x.pcty_attributes
| Pcty_constr (li, l) ->
pp f "%a%a%a"
(fun f l -> match l with
| [] -> ()
| _ -> pp f "[%a]@ " (list (core_type ctxt) ~sep:"," ) l) l
longident_loc li
(attributes ctxt) x.pcty_attributes
| Pcty_arrow (l, co, cl) ->
FIXME remove parens later
(type_with_label ctxt) (l,co)
(class_type ctxt) cl
| Pcty_extension e ->
extension ctxt f e;
attributes ctxt f x.pcty_attributes
| Pcty_open (o, e) ->
pp f "@[<2>let open%s %a in@;%a@]"
(override o.popen_override) longident_loc o.popen_expr
(class_type ctxt) e
and class_type_declaration_list ctxt f l =
let class_type_declaration kwd f x =
let { pci_params=ls; pci_name={ txt; _ }; _ } = x in
pp f "@[<2>%s %a%a%s@ =@ %a@]%a" kwd
virtual_flag x.pci_virt
(class_params_def ctxt) ls txt
(class_type ctxt) x.pci_expr
(item_attributes ctxt) x.pci_attributes
in
match l with
| [] -> ()
| [x] -> class_type_declaration "class type" f x
| x :: xs ->
pp f "@[<v>%a@,%a@]"
(class_type_declaration "class type") x
(list ~sep:"@," (class_type_declaration "and")) xs
and class_field ctxt f x =
match x.pcf_desc with
| Pcf_inherit (ovf, ce, so) ->
pp f "@[<2>inherit@ %s@ %a%a@]%a" (override ovf)
(class_expr ctxt) ce
(fun f so -> match so with
| None -> ();
| Some (s) -> pp f "@ as %s" s.txt ) so
(item_attributes ctxt) x.pcf_attributes
| Pcf_val (s, mf, Cfk_concrete (ovf, e)) ->
pp f "@[<2>val%s %a%s =@;%a@]%a" (override ovf)
mutable_flag mf s.txt
(expression ctxt) e
(item_attributes ctxt) x.pcf_attributes
| Pcf_method (s, pf, Cfk_virtual ct) ->
pp f "@[<2>method virtual %a %s :@;%a@]%a"
private_flag pf s.txt
(core_type ctxt) ct
(item_attributes ctxt) x.pcf_attributes
| Pcf_val (s, mf, Cfk_virtual ct) ->
pp f "@[<2>val virtual %a%s :@ %a@]%a"
mutable_flag mf s.txt
(core_type ctxt) ct
(item_attributes ctxt) x.pcf_attributes
| Pcf_method (s, pf, Cfk_concrete (ovf, e)) ->
let bind e =
binding ctxt f
{pvb_pat=
{ppat_desc=Ppat_var s;
ppat_loc=Location.none;
ppat_loc_stack=[];
ppat_attributes=[]};
pvb_expr=e;
pvb_attributes=[];
pvb_loc=Location.none;
}
in
pp f "@[<2>method%s %a%a@]%a"
(override ovf)
private_flag pf
(fun f -> function
| {pexp_desc=Pexp_poly (e, Some ct); pexp_attributes=[]; _} ->
pp f "%s :@;%a=@;%a"
s.txt (core_type ctxt) ct (expression ctxt) e
| {pexp_desc=Pexp_poly (e, None); pexp_attributes=[]; _} ->
bind e
| _ -> bind e) e
(item_attributes ctxt) x.pcf_attributes
| Pcf_constraint (ct1, ct2) ->
pp f "@[<2>constraint %a =@;%a@]%a"
(core_type ctxt) ct1
(core_type ctxt) ct2
(item_attributes ctxt) x.pcf_attributes
| Pcf_initializer (e) ->
pp f "@[<2>initializer@ %a@]%a"
(expression ctxt) e
(item_attributes ctxt) x.pcf_attributes
| Pcf_attribute a -> floating_attribute ctxt f a
| Pcf_extension e ->
item_extension ctxt f e;
item_attributes ctxt f x.pcf_attributes
and class_structure ctxt f { pcstr_self = p; pcstr_fields = l } =
pp f "@[<hv0>@[<hv2>object%a@;%a@]@;end@]"
(fun f p -> match p.ppat_desc with
| Ppat_any -> ()
| Ppat_constraint _ -> pp f " %a" (pattern ctxt) p
| _ -> pp f " (%a)" (pattern ctxt) p) p
(list (class_field ctxt)) l
and class_expr ctxt f x =
if x.pcl_attributes <> [] then begin
pp f "((%a)%a)" (class_expr ctxt) {x with pcl_attributes=[]}
(attributes ctxt) x.pcl_attributes
end else
match x.pcl_desc with
| Pcl_structure (cs) -> class_structure ctxt f cs
| Pcl_fun (l, eo, p, e) ->
pp f "fun@ %a@ ->@ %a"
(label_exp ctxt) (l,eo,p)
(class_expr ctxt) e
| Pcl_let (rf, l, ce) ->
pp f "%a@ in@ %a"
(bindings ctxt) (rf,l)
(class_expr ctxt) ce
| Pcl_apply (ce, l) ->
Cf : # 7200
(class_expr ctxt) ce
(list (label_x_expression_param ctxt)) l
| Pcl_constr (li, l) ->
pp f "%a%a"
(fun f l-> if l <>[] then
pp f "[%a]@ "
(list (core_type ctxt) ~sep:",") l) l
longident_loc li
| Pcl_constraint (ce, ct) ->
pp f "(%a@ :@ %a)"
(class_expr ctxt) ce
(class_type ctxt) ct
| Pcl_extension e -> extension ctxt f e
| Pcl_open (o, e) ->
pp f "@[<2>let open%s %a in@;%a@]"
(override o.popen_override) longident_loc o.popen_expr
(class_expr ctxt) e
and module_type ctxt f x =
if x.pmty_attributes <> [] then begin
pp f "((%a)%a)" (module_type ctxt) {x with pmty_attributes=[]}
(attributes ctxt) x.pmty_attributes
end else
match x.pmty_desc with
| Pmty_functor (Unit, mt2) ->
pp f "@[<hov2>functor () ->@ %a@]" (module_type ctxt) mt2
| Pmty_functor (Named (s, mt1), mt2) ->
begin match s.txt with
| None ->
pp f "@[<hov2>%a@ ->@ %a@]"
(module_type1 ctxt) mt1 (module_type ctxt) mt2
| Some name ->
pp f "@[<hov2>functor@ (%s@ :@ %a)@ ->@ %a@]" name
(module_type ctxt) mt1 (module_type ctxt) mt2
end
| Pmty_with (mt, []) -> module_type ctxt f mt
| Pmty_with (mt, l) ->
pp f "@[<hov2>%a@ with@ %a@]"
(module_type1 ctxt) mt
(list (with_constraint ctxt) ~sep:"@ and@ ") l
| _ -> module_type1 ctxt f x
and with_constraint ctxt f = function
| Pwith_type (li, ({ptype_params= ls ;_} as td)) ->
let ls = List.map fst ls in
pp f "type@ %a %a =@ %a"
(list (core_type ctxt) ~sep:"," ~first:"(" ~last:")")
ls longident_loc li (type_declaration ctxt) td
| Pwith_module (li, li2) ->
pp f "module %a =@ %a" longident_loc li longident_loc li2;
| Pwith_modtype (li, mty) ->
pp f "module type %a =@ %a" longident_loc li (module_type ctxt) mty;
| Pwith_typesubst (li, ({ptype_params=ls;_} as td)) ->
let ls = List.map fst ls in
pp f "type@ %a %a :=@ %a"
(list (core_type ctxt) ~sep:"," ~first:"(" ~last:")")
ls longident_loc li
(type_declaration ctxt) td
| Pwith_modsubst (li, li2) ->
pp f "module %a :=@ %a" longident_loc li longident_loc li2
| Pwith_modtypesubst (li, mty) ->
pp f "module type %a :=@ %a" longident_loc li (module_type ctxt) mty;
and module_type1 ctxt f x =
if x.pmty_attributes <> [] then module_type ctxt f x
else match x.pmty_desc with
| Pmty_ident li ->
pp f "%a" longident_loc li;
| Pmty_alias li ->
pp f "(module %a)" longident_loc li;
| Pmty_signature (s) ->
| Pmty_typeof me ->
pp f "@[<hov2>module@ type@ of@ %a@]" (module_expr ctxt) me
| Pmty_extension e -> extension ctxt f e
| _ -> paren true (module_type ctxt) f x
and signature ctxt f x = list ~sep:"@\n" (signature_item ctxt) f x
and signature_item ctxt f x : unit =
match x.psig_desc with
| Psig_type (rf, l) ->
type_def_list ctxt f (rf, true, l)
| Psig_typesubst l ->
type_def_list ctxt f (Recursive, false, l)
| Psig_value vd ->
let intro = if vd.pval_prim = [] then "val" else "external" in
pp f "@[<2>%s@ %a@ :@ %a@]%a" intro
protect_ident vd.pval_name.txt
(value_description ctxt) vd
(item_attributes ctxt) vd.pval_attributes
| Psig_typext te ->
type_extension ctxt f te
| Psig_exception ed ->
exception_declaration ctxt f ed
| Psig_class l ->
let class_description kwd f ({pci_params=ls;pci_name={txt;_};_} as x) =
pp f "@[<2>%s %a%a%s@;:@;%a@]%a" kwd
virtual_flag x.pci_virt
(class_params_def ctxt) ls txt
(class_type ctxt) x.pci_expr
(item_attributes ctxt) x.pci_attributes
in begin
match l with
| [] -> ()
| [x] -> class_description "class" f x
| x :: xs ->
pp f "@[<v>%a@,%a@]"
(class_description "class") x
(list ~sep:"@," (class_description "and")) xs
end
| Psig_module ({pmd_type={pmty_desc=Pmty_alias alias;
pmty_attributes=[]; _};_} as pmd) ->
pp f "@[<hov>module@ %s@ =@ %a@]%a"
(Option.value pmd.pmd_name.txt ~default:"_")
longident_loc alias
(item_attributes ctxt) pmd.pmd_attributes
| Psig_module pmd ->
pp f "@[<hov>module@ %s@ :@ %a@]%a"
(Option.value pmd.pmd_name.txt ~default:"_")
(module_type ctxt) pmd.pmd_type
(item_attributes ctxt) pmd.pmd_attributes
| Psig_modsubst pms ->
pp f "@[<hov>module@ %s@ :=@ %a@]%a" pms.pms_name.txt
longident_loc pms.pms_manifest
(item_attributes ctxt) pms.pms_attributes
| Psig_open od ->
pp f "@[<hov2>open%s@ %a@]%a"
(override od.popen_override)
longident_loc od.popen_expr
(item_attributes ctxt) od.popen_attributes
| Psig_include incl ->
let attrs, incl_fun = check_include_functor_attr incl.pincl_attributes in
pp f "@[<hov2>include%a@ %a@]%a"
maybe_functor incl_fun
(module_type ctxt) incl.pincl_mod
(item_attributes ctxt) attrs
| Psig_modtype {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} ->
pp f "@[<hov2>module@ type@ %s%a@]%a"
s.txt
(fun f md -> match md with
| None -> ()
| Some mt ->
pp_print_space f () ;
pp f "@ =@ %a" (module_type ctxt) mt
) md
(item_attributes ctxt) attrs
| Psig_modtypesubst {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} ->
let md = match md with
| Some mt -> mt in
pp f "@[<hov2>module@ type@ %s@ :=@ %a@]%a"
s.txt (module_type ctxt) md
(item_attributes ctxt) attrs
| Psig_class_type (l) -> class_type_declaration_list ctxt f l
| Psig_recmodule decls ->
let rec string_x_module_type_list f ?(first=true) l =
match l with
| [] -> () ;
| pmd :: tl ->
if not first then
pp f "@ @[<hov2>and@ %s:@ %a@]%a"
(Option.value pmd.pmd_name.txt ~default:"_")
(module_type1 ctxt) pmd.pmd_type
(item_attributes ctxt) pmd.pmd_attributes
else
pp f "@[<hov2>module@ rec@ %s:@ %a@]%a"
(Option.value pmd.pmd_name.txt ~default:"_")
(module_type1 ctxt) pmd.pmd_type
(item_attributes ctxt) pmd.pmd_attributes;
string_x_module_type_list f ~first:false tl
in
string_x_module_type_list f decls
| Psig_attribute a -> floating_attribute ctxt f a
| Psig_extension(e, a) ->
item_extension ctxt f e;
item_attributes ctxt f a
and module_expr ctxt f x =
if x.pmod_attributes <> [] then
pp f "((%a)%a)" (module_expr ctxt) {x with pmod_attributes=[]}
(attributes ctxt) x.pmod_attributes
else match x.pmod_desc with
| Pmod_structure (s) ->
pp f "@[<hv2>struct@;@[<0>%a@]@;<1 -2>end@]"
(list (structure_item ctxt) ~sep:"@\n") s;
| Pmod_constraint (me, mt) ->
pp f "@[<hov2>(%a@ :@ %a)@]"
(module_expr ctxt) me
(module_type ctxt) mt
| Pmod_ident (li) ->
pp f "%a" longident_loc li;
| Pmod_functor (Unit, me) ->
pp f "functor ()@;->@;%a" (module_expr ctxt) me
| Pmod_functor (Named (s, mt), me) ->
pp f "functor@ (%s@ :@ %a)@;->@;%a"
(Option.value s.txt ~default:"_")
(module_type ctxt) mt (module_expr ctxt) me
| Pmod_apply (me1, me2) ->
pp f "(%a)(%a)" (module_expr ctxt) me1 (module_expr ctxt) me2
Cf : # 7200
| Pmod_unpack e ->
pp f "(val@ %a)" (expression ctxt) e
| Pmod_extension e -> extension ctxt f e
and structure ctxt f x = list ~sep:"@\n" (structure_item ctxt) f x
and payload ctxt f = function
| PStr [{pstr_desc = Pstr_eval (e, attrs)}] ->
pp f "@[<2>%a@]%a"
(expression ctxt) e
(item_attributes ctxt) attrs
| PStr x -> structure ctxt f x
| PTyp x -> pp f ":@ "; core_type ctxt f x
| PSig x -> pp f ":@ "; signature ctxt f x
| PPat (x, None) -> pp f "?@ "; pattern ctxt f x
| PPat (x, Some e) ->
pp f "?@ "; pattern ctxt f x;
pp f " when "; expression ctxt f e
and pp_print_pexp_function ctxt sep f x =
let attrs, _ = check_local_attr x.pexp_attributes in
let x = { x with pexp_attributes = attrs } in
if x.pexp_attributes <> [] then pp f "%s@;%a" sep (expression ctxt) x
else match x.pexp_desc with
| Pexp_fun (label, eo, p, e) ->
pp f "%a@ %a"
(label_exp ctxt) (label,eo,p) (pp_print_pexp_function ctxt sep) e
| Pexp_newtype (str,e) ->
pp f "(type@ %s)@ %a" str.txt (pp_print_pexp_function ctxt sep) e
| _ ->
pp f "%s@;%a" sep (expression ctxt) x
and binding ctxt f {pvb_pat=p; pvb_expr=x; _} =
let tyvars_str tyvars = List.map (fun v -> v.txt) tyvars in
let is_desugared_gadt p e =
let gadt_pattern =
match p with
| {ppat_desc=Ppat_constraint({ppat_desc=Ppat_var _} as pat,
{ptyp_desc=Ptyp_poly (args_tyvars, rt)});
ppat_attributes=[]}->
Some (pat, args_tyvars, rt)
| _ -> None in
let rec gadt_exp tyvars e =
match e with
| {pexp_desc=Pexp_newtype (tyvar, e); pexp_attributes=[]} ->
gadt_exp (tyvar :: tyvars) e
| {pexp_desc=Pexp_constraint (e, ct); pexp_attributes=[]} ->
Some (List.rev tyvars, e, ct)
| _ -> None in
let gadt_exp = gadt_exp [] e in
match gadt_pattern, gadt_exp with
| Some (p, pt_tyvars, pt_ct), Some (e_tyvars, e, e_ct)
when tyvars_str pt_tyvars = tyvars_str e_tyvars ->
let ety = Typ.varify_constructors e_tyvars e_ct in
if ety = pt_ct then
Some (p, pt_tyvars, e_ct, e) else None
| _ -> None in
if x.pexp_attributes <> []
then
match p with
| {ppat_desc=Ppat_constraint({ppat_desc=Ppat_var _; _} as pat,
({ptyp_desc=Ptyp_poly _; _} as typ));
ppat_attributes=[]; _} ->
pp f "%a@;: %a@;=@;%a"
(simple_pattern ctxt) pat (core_type ctxt) typ (expression ctxt) x
| _ ->
pp f "%a@;=@;%a" (pattern ctxt) p (expression ctxt) x
else
match is_desugared_gadt p x with
| Some (p, [], ct, e) ->
pp f "%a@;: %a@;=@;%a"
(simple_pattern ctxt) p (core_type ctxt) ct (expression ctxt) e
| Some (p, tyvars, ct, e) -> begin
pp f "%a@;: type@;%a.@;%a@;=@;%a"
(simple_pattern ctxt) p (list pp_print_string ~sep:"@;")
(tyvars_str tyvars) (core_type ctxt) ct (expression ctxt) e
end
| None -> begin
match p with
| {ppat_desc=Ppat_constraint(p ,ty);
special case for the first
begin match ty with
| {ptyp_desc=Ptyp_poly _; ptyp_attributes=[]} ->
pp f "%a@;:@;%a@;=@;%a" (simple_pattern ctxt) p
(core_type ctxt) ty (expression ctxt) x
| _ ->
pp f "(%a@;:@;%a)@;=@;%a" (simple_pattern ctxt) p
(core_type ctxt) ty (expression ctxt) x
end
| {ppat_desc=Ppat_var _; ppat_attributes=[]} ->
pp f "%a@ %a" (simple_pattern ctxt) p
(pp_print_pexp_function ctxt "=") x
| _ ->
pp f "%a@;=@;%a" (pattern ctxt) p (expression ctxt) x
end
and bindings ctxt f (rf,l) =
let binding kwd rf f x =
let x, is_local =
match x.pvb_expr.pexp_desc with
| Pexp_apply
({ pexp_desc = Pexp_extension({txt = "extension.local"}, PStr []) },
[Nolabel, sbody]) ->
let sattrs, _ = check_local_attr sbody.pexp_attributes in
let sbody = {sbody with pexp_attributes = sattrs} in
let pattrs, _ = check_local_attr x.pvb_pat.ppat_attributes in
let pat = {x.pvb_pat with ppat_attributes = pattrs} in
{x with pvb_pat = pat; pvb_expr = sbody}, "local_ "
| _ -> x, ""
in
pp f "@[<2>%s %a%s%a@]%a" kwd rec_flag rf is_local
(binding ctxt) x (item_attributes ctxt) x.pvb_attributes
in
match l with
| [] -> ()
| [x] -> binding "let" rf f x
| x::xs ->
pp f "@[<v>%a@,%a@]"
(binding "let" rf) x
(list ~sep:"@," (binding "and" Nonrecursive)) xs
and binding_op ctxt f x =
match x.pbop_pat, x.pbop_exp with
| {ppat_desc = Ppat_var { txt=pvar; _ }; ppat_attributes = []; _},
{pexp_desc = Pexp_ident { txt=Lident evar; _}; pexp_attributes = []; _}
when pvar = evar ->
pp f "@[<2>%s %s@]" x.pbop_op.txt evar
| pat, exp ->
pp f "@[<2>%s %a@;=@;%a@]"
x.pbop_op.txt (pattern ctxt) pat (expression ctxt) exp
and structure_item ctxt f x =
match x.pstr_desc with
| Pstr_eval (e, attrs) ->
pp f "@[<hov2>;;%a@]%a"
(expression ctxt) e
(item_attributes ctxt) attrs
| Pstr_type (_, []) -> assert false
| Pstr_type (rf, l) -> type_def_list ctxt f (rf, true, l)
| Pstr_value (rf, l) ->
pp f " @[<hov2 > let % a%a@ ] " rec_flag rf bindings l
pp f "@[<2>%a@]" (bindings ctxt) (rf,l)
| Pstr_typext te -> type_extension ctxt f te
| Pstr_exception ed -> exception_declaration ctxt f ed
| Pstr_module x ->
let rec module_helper = function
| {pmod_desc=Pmod_functor(arg_opt,me'); pmod_attributes = []} ->
begin match arg_opt with
| Unit -> pp f "()"
| Named (s, mt) ->
pp f "(%s:%a)" (Option.value s.txt ~default:"_")
(module_type ctxt) mt
end;
module_helper me'
| me -> me
in
pp f "@[<hov2>module %s%a@]%a"
(Option.value x.pmb_name.txt ~default:"_")
(fun f me ->
let me = module_helper me in
match me with
| {pmod_desc=
Pmod_constraint
(me',
({pmty_desc=(Pmty_ident (_)
| Pmty_signature (_));_} as mt));
pmod_attributes = []} ->
pp f " :@;%a@;=@;%a@;"
(module_type ctxt) mt (module_expr ctxt) me'
| _ -> pp f " =@ %a" (module_expr ctxt) me
) x.pmb_expr
(item_attributes ctxt) x.pmb_attributes
| Pstr_open od ->
pp f "@[<2>open%s@;%a@]%a"
(override od.popen_override)
(module_expr ctxt) od.popen_expr
(item_attributes ctxt) od.popen_attributes
| Pstr_modtype {pmtd_name=s; pmtd_type=md; pmtd_attributes=attrs} ->
pp f "@[<hov2>module@ type@ %s%a@]%a"
s.txt
(fun f md -> match md with
| None -> ()
| Some mt ->
pp_print_space f () ;
pp f "@ =@ %a" (module_type ctxt) mt
) md
(item_attributes ctxt) attrs
| Pstr_class l ->
let extract_class_args cl =
let rec loop acc = function
| {pcl_desc=Pcl_fun (l, eo, p, cl'); pcl_attributes = []} ->
loop ((l,eo,p) :: acc) cl'
| cl -> List.rev acc, cl
in
let args, cl = loop [] cl in
let constr, cl =
match cl with
| {pcl_desc=Pcl_constraint (cl', ct); pcl_attributes = []} ->
Some ct, cl'
| _ -> None, cl
in
args, constr, cl
in
let class_constraint f ct = pp f ": @[%a@] " (class_type ctxt) ct in
let class_declaration kwd f
({pci_params=ls; pci_name={txt;_}; _} as x) =
let args, constr, cl = extract_class_args x.pci_expr in
pp f "@[<2>%s %a%a%s %a%a=@;%a@]%a" kwd
virtual_flag x.pci_virt
(class_params_def ctxt) ls txt
(list (label_exp ctxt) ~last:"@ ") args
(option class_constraint) constr
(class_expr ctxt) cl
(item_attributes ctxt) x.pci_attributes
in begin
match l with
| [] -> ()
| [x] -> class_declaration "class" f x
| x :: xs ->
pp f "@[<v>%a@,%a@]"
(class_declaration "class") x
(list ~sep:"@," (class_declaration "and")) xs
end
| Pstr_class_type l -> class_type_declaration_list ctxt f l
| Pstr_primitive vd ->
pp f "@[<hov2>external@ %a@ :@ %a@]%a"
protect_ident vd.pval_name.txt
(value_description ctxt) vd
(item_attributes ctxt) vd.pval_attributes
| Pstr_include incl ->
let attrs, incl_fun = check_include_functor_attr incl.pincl_attributes in
pp f "@[<hov2>include%a@ %a@]%a"
maybe_functor incl_fun
(module_expr ctxt) incl.pincl_mod
(item_attributes ctxt) attrs
3.07
let aux f = function
| ({pmb_expr={pmod_desc=Pmod_constraint (expr, typ)}} as pmb) ->
pp f "@[<hov2>@ and@ %s:%a@ =@ %a@]%a"
(Option.value pmb.pmb_name.txt ~default:"_")
(module_type ctxt) typ
(module_expr ctxt) expr
(item_attributes ctxt) pmb.pmb_attributes
| pmb ->
pp f "@[<hov2>@ and@ %s@ =@ %a@]%a"
(Option.value pmb.pmb_name.txt ~default:"_")
(module_expr ctxt) pmb.pmb_expr
(item_attributes ctxt) pmb.pmb_attributes
in
begin match decls with
| ({pmb_expr={pmod_desc=Pmod_constraint (expr, typ)}} as pmb) :: l2 ->
pp f "@[<hv>@[<hov2>module@ rec@ %s:%a@ =@ %a@]%a@ %a@]"
(Option.value pmb.pmb_name.txt ~default:"_")
(module_type ctxt) typ
(module_expr ctxt) expr
(item_attributes ctxt) pmb.pmb_attributes
(fun f l2 -> List.iter (aux f) l2) l2
| pmb :: l2 ->
pp f "@[<hv>@[<hov2>module@ rec@ %s@ =@ %a@]%a@ %a@]"
(Option.value pmb.pmb_name.txt ~default:"_")
(module_expr ctxt) pmb.pmb_expr
(item_attributes ctxt) pmb.pmb_attributes
(fun f l2 -> List.iter (aux f) l2) l2
| _ -> assert false
end
| Pstr_attribute a -> floating_attribute ctxt f a
| Pstr_extension(e, a) ->
item_extension ctxt f e;
item_attributes ctxt f a
and type_param ctxt f (ct, (a,b)) =
pp f "%s%s%a" (type_variance a) (type_injectivity b) (core_type ctxt) ct
and type_params ctxt f = function
| [] -> ()
| l -> pp f "%a " (list (type_param ctxt) ~first:"(" ~last:")" ~sep:",@;") l
and type_def_list ctxt f (rf, exported, l) =
let type_decl kwd rf f x =
let eq =
if (x.ptype_kind = Ptype_abstract)
&& (x.ptype_manifest = None) then ""
else if exported then " ="
else " :="
in
pp f "@[<2>%s %a%a%s%s%a@]%a" kwd
nonrec_flag rf
(type_params ctxt) x.ptype_params
x.ptype_name.txt eq
(type_declaration ctxt) x
(item_attributes ctxt) x.ptype_attributes
in
match l with
| [] -> assert false
| [x] -> type_decl "type" rf f x
| x :: xs -> pp f "@[<v>%a@,%a@]"
(type_decl "type" rf) x
(list ~sep:"@," (type_decl "and" Recursive)) xs
and record_declaration ctxt f lbls =
let has_attr pld name =
List.exists (fun attr -> attr.attr_name.txt = name) pld.pld_attributes
in
let field_flag f pld =
pp f "%a" mutable_flag pld.pld_mutable;
if has_attr pld "extension.nonlocal" then pp f "nonlocal_ ";
if has_attr pld "extension.global" then pp f "global_ "
in
let type_record_field f pld =
let pld_attributes =
List.filter (fun attr ->
match attr.attr_name.txt with
| "extension.nonlocal" | "extension.global" -> false
| _ -> true) pld.pld_attributes
in
pp f "@[<2>%a%s:@;%a@;%a@]"
field_flag pld
pld.pld_name.txt
(core_type ctxt) pld.pld_type
(attributes ctxt) pld_attributes
in
pp f "{@\n%a}"
(list type_record_field ~sep:";@\n" ) lbls
and type_declaration ctxt f x =
let priv f =
match x.ptype_private with
| Public -> ()
| Private -> pp f "@;private"
in
let manifest f =
match x.ptype_manifest with
| None -> ()
| Some y ->
if x.ptype_kind = Ptype_abstract then
pp f "%t@;%a" priv (core_type ctxt) y
else
pp f "@;%a" (core_type ctxt) y
in
let constructor_declaration f pcd =
pp f "|@;";
constructor_declaration ctxt f
(pcd.pcd_name.txt, pcd.pcd_vars,
pcd.pcd_args, pcd.pcd_res, pcd.pcd_attributes)
in
let repr f =
let intro f =
if x.ptype_manifest = None then ()
else pp f "@;="
in
match x.ptype_kind with
| Ptype_variant xs ->
let variants fmt xs =
if xs = [] then pp fmt " |" else
pp fmt "@\n%a" (list ~sep:"@\n" constructor_declaration) xs
in pp f "%t%t%a" intro priv variants xs
| Ptype_abstract -> ()
| Ptype_record l ->
pp f "%t%t@;%a" intro priv (record_declaration ctxt) l
| Ptype_open -> pp f "%t%t@;.." intro priv
in
let constraints f =
List.iter
(fun (ct1,ct2,_) ->
pp f "@[<hov2>@ constraint@ %a@ =@ %a@]"
(core_type ctxt) ct1 (core_type ctxt) ct2)
x.ptype_cstrs
in
pp f "%t%t%t" manifest repr constraints
and type_extension ctxt f x =
let extension_constructor f x =
pp f "@\n|@;%a" (extension_constructor ctxt) x
in
pp f "@[<2>type %a%a += %a@ %a@]%a"
(fun f -> function
| [] -> ()
| l ->
pp f "%a@;" (list (type_param ctxt) ~first:"(" ~last:")" ~sep:",") l)
x.ptyext_params
longident_loc x.ptyext_path
Cf : # 7200
(list ~sep:"" extension_constructor)
x.ptyext_constructors
(item_attributes ctxt) x.ptyext_attributes
and constructor_declaration ctxt f (name, vars, args, res, attrs) =
let name =
match name with
| "::" -> "(::)"
| s -> s in
let pp_vars f vs =
match vs with
| [] -> ()
| vs -> pp f "%a@;.@;" (list tyvar_loc ~sep:"@;") vs in
match res with
| None ->
pp f "%s%a@;%a" name
(fun f -> function
| Pcstr_tuple [] -> ()
| Pcstr_tuple l ->
pp f "@;of@;%a" (list (core_type1 ctxt) ~sep:"@;*@;") l
| Pcstr_record l -> pp f "@;of@;%a" (record_declaration ctxt) l
) args
(attributes ctxt) attrs
| Some r ->
pp f "%s:@;%a%a@;%a" name
pp_vars vars
(fun f -> function
| Pcstr_tuple [] -> core_type1 ctxt f r
| Pcstr_tuple l -> pp f "%a@;->@;%a"
(list (core_type1 ctxt) ~sep:"@;*@;") l
(core_type1 ctxt) r
| Pcstr_record l ->
pp f "%a@;->@;%a" (record_declaration ctxt) l (core_type1 ctxt) r
)
args
(attributes ctxt) attrs
and extension_constructor ctxt f x =
Cf : # 7200
match x.pext_kind with
| Pext_decl(v, l, r) ->
constructor_declaration ctxt f
(x.pext_name.txt, v, l, r, x.pext_attributes)
| Pext_rebind li ->
pp f "%s@;=@;%a%a" x.pext_name.txt
longident_loc li
(attributes ctxt) x.pext_attributes
and case_list ctxt f l : unit =
let aux f {pc_lhs; pc_guard; pc_rhs} =
pp f "@;| @[<2>%a%a@;->@;%a@]"
(pattern ctxt) pc_lhs (option (expression ctxt) ~first:"@;when@;")
pc_guard (expression (under_pipe ctxt)) pc_rhs
in
list aux f l ~sep:""
and label_x_expression_param ctxt f (l,e) =
let simple_name = match e with
| {pexp_desc=Pexp_ident {txt=Lident l;_};
pexp_attributes=[]} -> Some l
| _ -> None
in match l with
level 2
| Optional str ->
if Some str = simple_name then
pp f "?%s" str
else
pp f "?%s:%a" str (simple_expr ctxt) e
| Labelled lbl ->
if Some lbl = simple_name then
pp f "~%s" lbl
else
pp f "~%s:%a" lbl (simple_expr ctxt) e
and directive_argument f x =
match x.pdira_desc with
| Pdir_string (s) -> pp f "@ %S" s
| Pdir_int (n, None) -> pp f "@ %s" n
| Pdir_int (n, Some m) -> pp f "@ %s%c" n m
| Pdir_ident (li) -> pp f "@ %a" longident li
| Pdir_bool (b) -> pp f "@ %s" (string_of_bool b)
let toplevel_phrase f x =
match x with
| Ptop_def (s) ->pp f "@[<hov0>%a@]" (list (structure_item reset_ctxt)) s
pp_close_box f ( ) ;
| Ptop_dir {pdir_name; pdir_arg = None; _} ->
pp f "@[<hov2>#%s@]" pdir_name.txt
| Ptop_dir {pdir_name; pdir_arg = Some pdir_arg; _} ->
pp f "@[<hov2>#%s@ %a@]" pdir_name.txt directive_argument pdir_arg
let expression f x =
pp f "@[%a@]" (expression reset_ctxt) x
let string_of_expression x =
ignore (flush_str_formatter ()) ;
let f = str_formatter in
expression f x;
flush_str_formatter ()
let string_of_structure x =
ignore (flush_str_formatter ());
let f = str_formatter in
structure reset_ctxt f x;
flush_str_formatter ()
let top_phrase f x =
pp_print_newline f ();
toplevel_phrase f x;
pp f ";;";
pp_print_newline f ()
let core_type = core_type reset_ctxt
let pattern = pattern reset_ctxt
let signature = signature reset_ctxt
let structure = structure reset_ctxt
let module_expr = module_expr reset_ctxt
let module_type = module_type reset_ctxt
let class_field = class_field reset_ctxt
let class_type_field = class_type_field reset_ctxt
let class_expr = class_expr reset_ctxt
let class_type = class_type reset_ctxt
let structure_item = structure_item reset_ctxt
let signature_item = signature_item reset_ctxt
let binding = binding reset_ctxt
let payload = payload reset_ctxt
|
80ea79873cd5a99de3be342b50a8dd03278b56aa18295ddcfe935f431a27236d | psilord/option-9 | sparks.lisp | (in-package :option-9)
#+option-9-debug (declaim (optimize (safety 3) (space 0) (speed 0) (debug 3)))
(defmethod sparks ((ent entity))
(+ (initial-sparks ent) (random (additional-sparks ent))))
| null | https://raw.githubusercontent.com/psilord/option-9/44d96cbc5543ee2acbdcf45d300207ef175462bc/sparks.lisp | lisp | (in-package :option-9)
#+option-9-debug (declaim (optimize (safety 3) (space 0) (speed 0) (debug 3)))
(defmethod sparks ((ent entity))
(+ (initial-sparks ent) (random (additional-sparks ent))))
| |
3d8b64e46877d0e61cd110dbab2cd4a2ad0c0d103134dc2a4f26cb0df0fc64e9 | srdqty/talc-3.0 | coff.mli | (**********************************************************************)
( c ) , ,
September 1998 , all rights reserved .
(**********************************************************************)
(* Coff
* Outputs coff format object files.
*)
open Objfile;;
type coff_file;;
make_coff outfilename objfile
Creates a COFF file object from the given objfile .
Creates a COFF file object from the given objfile. *)
val make_coff : string -> objfile -> coff_file;;
(* write_coff outfile coff_file
Writes the data from the coff_file structure to outfile. *)
val write_coff : out_channel -> coff_file -> unit;;
create_coff srcfilename outfilename objfile
Opens a file with the given name , produces a COFF file structure from the
object file , and writes it .
Opens a file with the given name, produces a COFF file structure from the
object file, and writes it. *)
val create_coff : string -> string -> objfile -> unit;;
EOF : coff.mli
| null | https://raw.githubusercontent.com/srdqty/talc-3.0/df83dd5ff0e2b189b13280ddae233d8277199350/talx86/coff.mli | ocaml | ********************************************************************
********************************************************************
Coff
* Outputs coff format object files.
write_coff outfile coff_file
Writes the data from the coff_file structure to outfile. | ( c ) , ,
September 1998 , all rights reserved .
open Objfile;;
type coff_file;;
make_coff outfilename objfile
Creates a COFF file object from the given objfile .
Creates a COFF file object from the given objfile. *)
val make_coff : string -> objfile -> coff_file;;
val write_coff : out_channel -> coff_file -> unit;;
create_coff srcfilename outfilename objfile
Opens a file with the given name , produces a COFF file structure from the
object file , and writes it .
Opens a file with the given name, produces a COFF file structure from the
object file, and writes it. *)
val create_coff : string -> string -> objfile -> unit;;
EOF : coff.mli
|
c31f2779c21449cd6bfec5ae5ffaa4f937693171080b5b4d6a2b0db2e3a76d4c | tcsprojects/ocaml-sat-solvers | minisatwrapper.mli | open Satwrapper;;
open Minisat;;
class minisatSolverFactory: object inherit solverFactory
method description: string
method identifier: string
method short_identifier: string
method copyright: string
method url: string
method new_timed_instance: Timing.timetable -> abstractSolver
end
| null | https://raw.githubusercontent.com/tcsprojects/ocaml-sat-solvers/2c36605fb3e38a1bee41e079031ab5b173794910/src/minisat/minisatwrapper.mli | ocaml | open Satwrapper;;
open Minisat;;
class minisatSolverFactory: object inherit solverFactory
method description: string
method identifier: string
method short_identifier: string
method copyright: string
method url: string
method new_timed_instance: Timing.timetable -> abstractSolver
end
| |
0b1aaf6a2bf898a964eb63a65a5870b2d73896f5c3ffde96bf2736f12f5d03ef | fortytools/holumbus | DocIdMap.hs | # LANGUAGE CPP #
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE GeneralizedNewtypeDeriving #
-- ----------------------------------------------------------------------------
|
Module : Holumbus . Index . Common . DocIdMap
Copyright : Copyright ( C ) 2013 , ,
License : MIT
Maintainer : ( )
Stability : experimental
Portability : none portable
DocId maps
Module : Holumbus.Index.Common.DocIdMap
Copyright : Copyright (C) 2013 Sebastian M. Schlatt, Timo B. Huebel, Uwe Schmidt
License : MIT
Maintainer : Timo B. Huebel ()
Stability : experimental
Portability: none portable
DocId maps
-}
-- ----------------------------------------------------------------------------
module Holumbus.Index.Common.DocIdMap
( DocIdMap
, emptyDocIdMap
, singletonDocIdMap
, nullDocIdMap
, memberDocIdMap
, lookupDocIdMap
, insertDocIdMap
, deleteDocIdMap
, insertWithDocIdMap
, sizeDocIdMap
, minKeyDocIdMap
, maxKeyDocIdMap
, isIntervallDocIdMap
, unionDocIdMap
, intersectionDocIdMap
, differenceDocIdMap
, unionWithDocIdMap
, intersectionWithDocIdMap
, differenceWithDocIdMap
, unionsWithDocIdMap
, mapDocIdMap
, filterDocIdMap
, filterWithKeyDocIdMap
, mapWithKeyDocIdMap
, foldDocIdMap
, foldWithKeyDocIdMap
, fromListDocIdMap
, toListDocIdMap
, keysDocIdMap
, elemsDocIdMap
)
where
import Control.Arrow
import Control.DeepSeq
import Data.Binary (Binary (..))
import qualified Data.Binary as B
import Data.Foldable
import qualified Data.IntMap.Strict as IM
import Data.Typeable
import Holumbus.Index.Common.DocId
#if sizeable == 1
import Data.Size
#endif
-- ------------------------------------------------------------
newtype DocIdMap v = DIM { unDIM :: IM.IntMap v }
deriving (Eq, Show, Foldable, NFData, Typeable)
liftDIM :: (IM.IntMap v -> IM.IntMap r) ->
(DocIdMap v -> DocIdMap r)
liftDIM f = DIM . f . unDIM
liftDIM2 :: (IM.IntMap v -> IM.IntMap v -> IM.IntMap v) ->
(DocIdMap v -> DocIdMap v -> DocIdMap v)
liftDIM2 f x y = DIM $ f (unDIM x) (unDIM y)
emptyDocIdMap :: DocIdMap v
emptyDocIdMap = DIM $ IM.empty
singletonDocIdMap :: DocId -> v -> DocIdMap v
singletonDocIdMap d v = insertDocIdMap d v emptyDocIdMap
nullDocIdMap :: DocIdMap v -> Bool
nullDocIdMap = IM.null . unDIM
memberDocIdMap :: DocId -> DocIdMap v -> Bool
memberDocIdMap x = IM.member (theDocId x) . unDIM
lookupDocIdMap :: DocId -> DocIdMap v -> Maybe v
lookupDocIdMap x = IM.lookup (theDocId x) . unDIM
insertDocIdMap :: DocId -> v -> DocIdMap v -> DocIdMap v
insertDocIdMap x y = liftDIM $ IM.insert (theDocId x) y
deleteDocIdMap :: DocId -> DocIdMap v -> DocIdMap v
deleteDocIdMap x = liftDIM $ IM.delete (theDocId x)
insertWithDocIdMap :: (v -> v -> v) -> DocId -> v -> DocIdMap v -> DocIdMap v
insertWithDocIdMap f x y = liftDIM $ IM.insertWith f (theDocId x) y
sizeDocIdMap :: DocIdMap v -> Int
sizeDocIdMap = IM.size . unDIM
minKeyDocIdMap :: DocIdMap v -> DocId
minKeyDocIdMap = maybe nullDocId (DocId . fst . fst) . IM.minViewWithKey . unDIM
maxKeyDocIdMap :: DocIdMap v -> DocId
maxKeyDocIdMap = maybe nullDocId (DocId . fst . fst) . IM.maxViewWithKey . unDIM
isIntervallDocIdMap :: DocIdMap v -> Bool
isIntervallDocIdMap m = nullDocIdMap m
||
( fromEnum (theDocId (maxKeyDocIdMap m)) - fromEnum (theDocId (minKeyDocIdMap m))
== sizeDocIdMap m - 1
)
unionDocIdMap :: DocIdMap v -> DocIdMap v -> DocIdMap v
unionDocIdMap = liftDIM2 $ IM.union
intersectionDocIdMap :: DocIdMap v -> DocIdMap v -> DocIdMap v
intersectionDocIdMap = liftDIM2 $ IM.intersection
differenceDocIdMap :: DocIdMap v -> DocIdMap v -> DocIdMap v
differenceDocIdMap = liftDIM2 $ IM.difference
unionWithDocIdMap :: (v -> v -> v) -> DocIdMap v -> DocIdMap v -> DocIdMap v
unionWithDocIdMap f = liftDIM2 $ IM.unionWith f
intersectionWithDocIdMap :: (v -> v -> v) -> DocIdMap v -> DocIdMap v -> DocIdMap v
intersectionWithDocIdMap f = liftDIM2 $ IM.intersectionWith f
differenceWithDocIdMap :: (v -> v -> Maybe v) -> DocIdMap v -> DocIdMap v -> DocIdMap v
differenceWithDocIdMap f = liftDIM2 $ IM.differenceWith f
unionsWithDocIdMap :: (v -> v -> v) -> [DocIdMap v] -> DocIdMap v
unionsWithDocIdMap f = DIM . IM.unionsWith f . map unDIM
mapDocIdMap :: (v -> r) -> DocIdMap v -> DocIdMap r
mapDocIdMap f = liftDIM $ IM.map f
filterDocIdMap :: (v -> Bool) -> DocIdMap v -> DocIdMap v
filterDocIdMap p = liftDIM $ IM.filter p
filterWithKeyDocIdMap :: (DocId -> v -> Bool) -> DocIdMap v -> DocIdMap v
filterWithKeyDocIdMap p = liftDIM $ IM.filterWithKey (p . DocId)
mapWithKeyDocIdMap :: (DocId -> v -> r) -> DocIdMap v -> DocIdMap r
mapWithKeyDocIdMap f = liftDIM $ IM.mapWithKey (f . DocId)
foldDocIdMap :: (v -> b -> b) -> b -> DocIdMap v -> b
foldDocIdMap f u = IM.foldr f u . unDIM
foldWithKeyDocIdMap :: (DocId -> v -> b -> b) -> b -> DocIdMap v -> b
foldWithKeyDocIdMap f u = IM.foldrWithKey (f . DocId) u . unDIM
fromListDocIdMap :: [(DocId, v)] -> DocIdMap v
fromListDocIdMap = DIM . IM.fromList . map (first theDocId)
toListDocIdMap :: DocIdMap v -> [(DocId, v)]
toListDocIdMap = map (first DocId) . IM.toList . unDIM
keysDocIdMap :: DocIdMap v -> [DocId]
keysDocIdMap = map DocId . IM.keys . unDIM
elemsDocIdMap :: DocIdMap v -> [v]
elemsDocIdMap = IM.elems . unDIM
instance Binary v => Binary (DocIdMap v) where
put = B.put . toListDocIdMap
get = B.get >>= return . fromListDocIdMap
-- ------------------------------------------------------------
#if sizeable == 1
instance (Typeable v, Sizeable v) => Sizeable (DocIdMap v) where
dataOf = dataOf . unDIM
bytesOf = bytesOf . unDIM
statsOf = statsOf . unDIM
#endif
-- ------------------------------------------------------------
# INLINE liftDIM #
# INLINE liftDIM2 #
# INLINE emptyDocIdMap #
{-# INLINE singletonDocIdMap #-}
# INLINE nullDocIdMap #
# INLINE memberDocIdMap #
# INLINE lookupDocIdMap #
# INLINE insertDocIdMap #
# INLINE deleteDocIdMap #
{-# INLINE insertWithDocIdMap #-}
# INLINE sizeDocIdMap #
# INLINE minKeyDocIdMap #
{-# INLINE maxKeyDocIdMap #-}
# INLINE isIntervallDocIdMap #
# INLINE unionDocIdMap #
# INLINE differenceDocIdMap #
# INLINE unionWithDocIdMap #
{-# INLINE intersectionWithDocIdMap #-}
# INLINE differenceWithDocIdMap #
# INLINE unionsWithDocIdMap #
# INLINE mapDocIdMap #
# INLINE filterDocIdMap #
# INLINE filterWithKeyDocIdMap #
# INLINE mapWithKeyDocIdMap #
# INLINE foldDocIdMap #
# INLINE foldWithKeyDocIdMap #
# INLINE fromListDocIdMap #
# INLINE toListDocIdMap #
# INLINE keysDocIdMap #
# INLINE elemsDocIdMap #
-- ------------------------------------------------------------
| null | https://raw.githubusercontent.com/fortytools/holumbus/4b2f7b832feab2715a4d48be0b07dca018eaa8e8/Holumbus-Searchengine/src/Holumbus/Index/Common/DocIdMap.hs | haskell | # LANGUAGE DeriveDataTypeable #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
------------------------------------------------------------
------------------------------------------------------------
------------------------------------------------------------
# INLINE singletonDocIdMap #
# INLINE insertWithDocIdMap #
# INLINE maxKeyDocIdMap #
# INLINE intersectionWithDocIdMap #
------------------------------------------------------------ | # LANGUAGE CPP #
# LANGUAGE GeneralizedNewtypeDeriving #
|
Module : Holumbus . Index . Common . DocIdMap
Copyright : Copyright ( C ) 2013 , ,
License : MIT
Maintainer : ( )
Stability : experimental
Portability : none portable
DocId maps
Module : Holumbus.Index.Common.DocIdMap
Copyright : Copyright (C) 2013 Sebastian M. Schlatt, Timo B. Huebel, Uwe Schmidt
License : MIT
Maintainer : Timo B. Huebel ()
Stability : experimental
Portability: none portable
DocId maps
-}
module Holumbus.Index.Common.DocIdMap
( DocIdMap
, emptyDocIdMap
, singletonDocIdMap
, nullDocIdMap
, memberDocIdMap
, lookupDocIdMap
, insertDocIdMap
, deleteDocIdMap
, insertWithDocIdMap
, sizeDocIdMap
, minKeyDocIdMap
, maxKeyDocIdMap
, isIntervallDocIdMap
, unionDocIdMap
, intersectionDocIdMap
, differenceDocIdMap
, unionWithDocIdMap
, intersectionWithDocIdMap
, differenceWithDocIdMap
, unionsWithDocIdMap
, mapDocIdMap
, filterDocIdMap
, filterWithKeyDocIdMap
, mapWithKeyDocIdMap
, foldDocIdMap
, foldWithKeyDocIdMap
, fromListDocIdMap
, toListDocIdMap
, keysDocIdMap
, elemsDocIdMap
)
where
import Control.Arrow
import Control.DeepSeq
import Data.Binary (Binary (..))
import qualified Data.Binary as B
import Data.Foldable
import qualified Data.IntMap.Strict as IM
import Data.Typeable
import Holumbus.Index.Common.DocId
#if sizeable == 1
import Data.Size
#endif
newtype DocIdMap v = DIM { unDIM :: IM.IntMap v }
deriving (Eq, Show, Foldable, NFData, Typeable)
liftDIM :: (IM.IntMap v -> IM.IntMap r) ->
(DocIdMap v -> DocIdMap r)
liftDIM f = DIM . f . unDIM
liftDIM2 :: (IM.IntMap v -> IM.IntMap v -> IM.IntMap v) ->
(DocIdMap v -> DocIdMap v -> DocIdMap v)
liftDIM2 f x y = DIM $ f (unDIM x) (unDIM y)
emptyDocIdMap :: DocIdMap v
emptyDocIdMap = DIM $ IM.empty
singletonDocIdMap :: DocId -> v -> DocIdMap v
singletonDocIdMap d v = insertDocIdMap d v emptyDocIdMap
nullDocIdMap :: DocIdMap v -> Bool
nullDocIdMap = IM.null . unDIM
memberDocIdMap :: DocId -> DocIdMap v -> Bool
memberDocIdMap x = IM.member (theDocId x) . unDIM
lookupDocIdMap :: DocId -> DocIdMap v -> Maybe v
lookupDocIdMap x = IM.lookup (theDocId x) . unDIM
insertDocIdMap :: DocId -> v -> DocIdMap v -> DocIdMap v
insertDocIdMap x y = liftDIM $ IM.insert (theDocId x) y
deleteDocIdMap :: DocId -> DocIdMap v -> DocIdMap v
deleteDocIdMap x = liftDIM $ IM.delete (theDocId x)
insertWithDocIdMap :: (v -> v -> v) -> DocId -> v -> DocIdMap v -> DocIdMap v
insertWithDocIdMap f x y = liftDIM $ IM.insertWith f (theDocId x) y
sizeDocIdMap :: DocIdMap v -> Int
sizeDocIdMap = IM.size . unDIM
minKeyDocIdMap :: DocIdMap v -> DocId
minKeyDocIdMap = maybe nullDocId (DocId . fst . fst) . IM.minViewWithKey . unDIM
maxKeyDocIdMap :: DocIdMap v -> DocId
maxKeyDocIdMap = maybe nullDocId (DocId . fst . fst) . IM.maxViewWithKey . unDIM
isIntervallDocIdMap :: DocIdMap v -> Bool
isIntervallDocIdMap m = nullDocIdMap m
||
( fromEnum (theDocId (maxKeyDocIdMap m)) - fromEnum (theDocId (minKeyDocIdMap m))
== sizeDocIdMap m - 1
)
unionDocIdMap :: DocIdMap v -> DocIdMap v -> DocIdMap v
unionDocIdMap = liftDIM2 $ IM.union
intersectionDocIdMap :: DocIdMap v -> DocIdMap v -> DocIdMap v
intersectionDocIdMap = liftDIM2 $ IM.intersection
differenceDocIdMap :: DocIdMap v -> DocIdMap v -> DocIdMap v
differenceDocIdMap = liftDIM2 $ IM.difference
unionWithDocIdMap :: (v -> v -> v) -> DocIdMap v -> DocIdMap v -> DocIdMap v
unionWithDocIdMap f = liftDIM2 $ IM.unionWith f
intersectionWithDocIdMap :: (v -> v -> v) -> DocIdMap v -> DocIdMap v -> DocIdMap v
intersectionWithDocIdMap f = liftDIM2 $ IM.intersectionWith f
differenceWithDocIdMap :: (v -> v -> Maybe v) -> DocIdMap v -> DocIdMap v -> DocIdMap v
differenceWithDocIdMap f = liftDIM2 $ IM.differenceWith f
unionsWithDocIdMap :: (v -> v -> v) -> [DocIdMap v] -> DocIdMap v
unionsWithDocIdMap f = DIM . IM.unionsWith f . map unDIM
mapDocIdMap :: (v -> r) -> DocIdMap v -> DocIdMap r
mapDocIdMap f = liftDIM $ IM.map f
filterDocIdMap :: (v -> Bool) -> DocIdMap v -> DocIdMap v
filterDocIdMap p = liftDIM $ IM.filter p
filterWithKeyDocIdMap :: (DocId -> v -> Bool) -> DocIdMap v -> DocIdMap v
filterWithKeyDocIdMap p = liftDIM $ IM.filterWithKey (p . DocId)
mapWithKeyDocIdMap :: (DocId -> v -> r) -> DocIdMap v -> DocIdMap r
mapWithKeyDocIdMap f = liftDIM $ IM.mapWithKey (f . DocId)
foldDocIdMap :: (v -> b -> b) -> b -> DocIdMap v -> b
foldDocIdMap f u = IM.foldr f u . unDIM
foldWithKeyDocIdMap :: (DocId -> v -> b -> b) -> b -> DocIdMap v -> b
foldWithKeyDocIdMap f u = IM.foldrWithKey (f . DocId) u . unDIM
fromListDocIdMap :: [(DocId, v)] -> DocIdMap v
fromListDocIdMap = DIM . IM.fromList . map (first theDocId)
toListDocIdMap :: DocIdMap v -> [(DocId, v)]
toListDocIdMap = map (first DocId) . IM.toList . unDIM
keysDocIdMap :: DocIdMap v -> [DocId]
keysDocIdMap = map DocId . IM.keys . unDIM
elemsDocIdMap :: DocIdMap v -> [v]
elemsDocIdMap = IM.elems . unDIM
instance Binary v => Binary (DocIdMap v) where
put = B.put . toListDocIdMap
get = B.get >>= return . fromListDocIdMap
#if sizeable == 1
instance (Typeable v, Sizeable v) => Sizeable (DocIdMap v) where
dataOf = dataOf . unDIM
bytesOf = bytesOf . unDIM
statsOf = statsOf . unDIM
#endif
# INLINE liftDIM #
# INLINE liftDIM2 #
# INLINE emptyDocIdMap #
# INLINE nullDocIdMap #
# INLINE memberDocIdMap #
# INLINE lookupDocIdMap #
# INLINE insertDocIdMap #
# INLINE deleteDocIdMap #
# INLINE sizeDocIdMap #
# INLINE minKeyDocIdMap #
# INLINE isIntervallDocIdMap #
# INLINE unionDocIdMap #
# INLINE differenceDocIdMap #
# INLINE unionWithDocIdMap #
# INLINE differenceWithDocIdMap #
# INLINE unionsWithDocIdMap #
# INLINE mapDocIdMap #
# INLINE filterDocIdMap #
# INLINE filterWithKeyDocIdMap #
# INLINE mapWithKeyDocIdMap #
# INLINE foldDocIdMap #
# INLINE foldWithKeyDocIdMap #
# INLINE fromListDocIdMap #
# INLINE toListDocIdMap #
# INLINE keysDocIdMap #
# INLINE elemsDocIdMap #
|
4d2abdf243daeb50792bc8a64722a20a5f0b15aa938cec4e32eeb1bada5fad12 | thepower/tpnode | httpapi_playground.erl | -module(httpapi_playground).
-include("include/tplog.hrl").
-export([h/3]).
-import(tpnode_httpapi,[answer/1, answer/2]).
h(<<"OPTIONS">>, _, _Req) ->
{200, [], ""};
h(<<"GET">>, [<<"tx">>,<<"construct">>], _Req) ->
answer(#{
result => <<"ok">>,
text => <<"POST here tx">>,
example => #{
kind => generic,
from => naddress:encode(naddress:construct_public(1,2,3)),
payload =>
[#{amount => 10,cur => <<"TEST">>,purpose => transfer},
#{amount => 1,cur => <<"TEST">>,purpose => srcfee}],
seq => 1,
t => 1512450000,
to => naddress:encode(naddress:construct_public(1,2,3)),
txext => #{
message=><<"preved12345678901234567890123456789123456789">>
},
ver => 2
}
});
h(<<"POST">>, [<<"tx">>,<<"validate">>], Req) ->
#{<<"tx">>:=B64Tx}=apixiom:bodyjs(Req),
Bin=case B64Tx of
<<"0x",Hex/binary>> -> hex:decode(Hex);
_ -> base64:decode(B64Tx)
end,
Res0=#{
dcontainer => tx_visualizer:show(Bin)
},
Res1=try
{ok,#{"body":=Body}}=msgpack:unpack(Bin),
Res0#{
dtx => tx_visualizer:show(Body)
}
catch Ec:Ee ->
Res0#{
dtx_error=>iolist_to_binary(io_lib:format("body can't be parsed ~p:~p",[Ec,Ee]))
}
end,
Res2=try
#{body:=_}=Tx=tx:unpack(Bin),
Res1#{
tx=>Tx
}
catch Ec1:Ee1 ->
Res1#{
tx_error=><<"transaction can't be parsed">>,
ec=>Ec1,
ee=>iolist_to_binary(io_lib:format("~p",[Ee1]))
}
end,
BinPacker=tpnode_httpapi:packer(Req,hex),
Res3=try
T=maps:get(tx,Res2),
case tx:verify(T, ['nocheck_ledger']) of
{ok, V} ->
Res2#{
verify=>tpnode_httpapi:prettify_tx(V,BinPacker)
};
{error, Any} ->
Res2#{
verify_error=>true,
verify=>tpnode_httpapi:prettify_tx(Any,BinPacker)
}
end
catch _:_ ->
Res2#{
verify_error=><<"transaction can't be verified">>
}
end,
Res=maps:put(tx,tpnode_httpapi:prettify_tx(maps:get(tx,Res3,#{}),BinPacker),Res3),
EHF=fun([{Type, Str}|Tokens],{parser, State, Handler, Stack}, Conf) ->
Conf1=jsx_config:list_to_config(Conf),
jsx_parser:resume([{Type, hex:encode(Str)}|Tokens],
State, Handler, Stack, Conf1)
end,
maps:fold(
fun(K,V,_) ->
?LOG_INFO("~s Res ~p",[K,V]),
?LOG_INFO("~s Res ~s",[K,jsx:encode(V)])
end, [], Res),
tpnode_httpapi:answer(Res,
#{jsx=>[ strict, {error_handler, EHF} ]}
);
h(<<"POST">>, [<<"tx">>,<<"construct">>], Req) ->
Body=apixiom:bodyjs(Req),
Packer=fun(Bin) -> base64:encode(Bin) end,
try
Body1=maps:fold(
fun(<<"from">>,Addr,Acc) ->
maps:put(from,naddress:decode(Addr),Acc);
(<<"to">>,Addr,Acc) ->
maps:put(to,naddress:decode(Addr),Acc);
(<<"kind">>,Kind,Acc) ->
case lists:member(Kind,[<<"generic">>,<<"register">>]) of
true ->
maps:put(kind,erlang:binary_to_atom(Kind,utf8),Acc);
false ->
throw({tx,<<"Bad kind">>})
end;
(<<"payload">>,Val,Acc) ->
maps:put(payload,
lists:map(
fun(Purpose) ->
maps:fold(
fun(<<"purpose">>,V,A) ->
maps:put(purpose,b2a(V,
[
<<"srcfee">>,
<<"transfer">>
]
),A);
(K,V,A) ->
maps:put(b2a(K),V,A)
end,#{}, Purpose)
end, Val),Acc);
(Key,Val,Acc) ->
maps:put(b2a(Key),Val,Acc)
end, #{}, Body),
#{body:=TxBody}=Tx=tx:construct_tx(Body1),
answer(#{
result => <<"ok">>,
dtx =>tx_visualizer:show(TxBody),
tx=>tpnode_httpapi:prettify_tx(
Tx,
Packer),
ptx=>base64:encode(tx:pack(Tx))
})
catch throw:{tx,Reason} ->
answer(#{
result => <<"error">>,
reason => Reason
})
end;
h(<<"GET">>, [<<"miner">>, TAddr], _Req) ->
answer(
#{
result => <<"ok">>,
mined => naddress:mine(binary_to_integer(TAddr))
}).
b2a(Bin) ->
Known=[
<<"seq">>,
<<"t">>,
<<"amount">>,
<<"register">>,
<<"generic">>,
<<"cur">>,
<<"ver">>
],
b2a(Bin,Known).
b2a(Bin,Known) ->
case lists:member(Bin,Known) of
true ->
erlang:binary_to_atom(Bin,utf8);
false ->
Bin
end.
| null | https://raw.githubusercontent.com/thepower/tpnode/6212422bc9061d44d150ef2d777d6e694396ed15/apps/tpnode/src/httpapi_playground.erl | erlang | -module(httpapi_playground).
-include("include/tplog.hrl").
-export([h/3]).
-import(tpnode_httpapi,[answer/1, answer/2]).
h(<<"OPTIONS">>, _, _Req) ->
{200, [], ""};
h(<<"GET">>, [<<"tx">>,<<"construct">>], _Req) ->
answer(#{
result => <<"ok">>,
text => <<"POST here tx">>,
example => #{
kind => generic,
from => naddress:encode(naddress:construct_public(1,2,3)),
payload =>
[#{amount => 10,cur => <<"TEST">>,purpose => transfer},
#{amount => 1,cur => <<"TEST">>,purpose => srcfee}],
seq => 1,
t => 1512450000,
to => naddress:encode(naddress:construct_public(1,2,3)),
txext => #{
message=><<"preved12345678901234567890123456789123456789">>
},
ver => 2
}
});
h(<<"POST">>, [<<"tx">>,<<"validate">>], Req) ->
#{<<"tx">>:=B64Tx}=apixiom:bodyjs(Req),
Bin=case B64Tx of
<<"0x",Hex/binary>> -> hex:decode(Hex);
_ -> base64:decode(B64Tx)
end,
Res0=#{
dcontainer => tx_visualizer:show(Bin)
},
Res1=try
{ok,#{"body":=Body}}=msgpack:unpack(Bin),
Res0#{
dtx => tx_visualizer:show(Body)
}
catch Ec:Ee ->
Res0#{
dtx_error=>iolist_to_binary(io_lib:format("body can't be parsed ~p:~p",[Ec,Ee]))
}
end,
Res2=try
#{body:=_}=Tx=tx:unpack(Bin),
Res1#{
tx=>Tx
}
catch Ec1:Ee1 ->
Res1#{
tx_error=><<"transaction can't be parsed">>,
ec=>Ec1,
ee=>iolist_to_binary(io_lib:format("~p",[Ee1]))
}
end,
BinPacker=tpnode_httpapi:packer(Req,hex),
Res3=try
T=maps:get(tx,Res2),
case tx:verify(T, ['nocheck_ledger']) of
{ok, V} ->
Res2#{
verify=>tpnode_httpapi:prettify_tx(V,BinPacker)
};
{error, Any} ->
Res2#{
verify_error=>true,
verify=>tpnode_httpapi:prettify_tx(Any,BinPacker)
}
end
catch _:_ ->
Res2#{
verify_error=><<"transaction can't be verified">>
}
end,
Res=maps:put(tx,tpnode_httpapi:prettify_tx(maps:get(tx,Res3,#{}),BinPacker),Res3),
EHF=fun([{Type, Str}|Tokens],{parser, State, Handler, Stack}, Conf) ->
Conf1=jsx_config:list_to_config(Conf),
jsx_parser:resume([{Type, hex:encode(Str)}|Tokens],
State, Handler, Stack, Conf1)
end,
maps:fold(
fun(K,V,_) ->
?LOG_INFO("~s Res ~p",[K,V]),
?LOG_INFO("~s Res ~s",[K,jsx:encode(V)])
end, [], Res),
tpnode_httpapi:answer(Res,
#{jsx=>[ strict, {error_handler, EHF} ]}
);
h(<<"POST">>, [<<"tx">>,<<"construct">>], Req) ->
Body=apixiom:bodyjs(Req),
Packer=fun(Bin) -> base64:encode(Bin) end,
try
Body1=maps:fold(
fun(<<"from">>,Addr,Acc) ->
maps:put(from,naddress:decode(Addr),Acc);
(<<"to">>,Addr,Acc) ->
maps:put(to,naddress:decode(Addr),Acc);
(<<"kind">>,Kind,Acc) ->
case lists:member(Kind,[<<"generic">>,<<"register">>]) of
true ->
maps:put(kind,erlang:binary_to_atom(Kind,utf8),Acc);
false ->
throw({tx,<<"Bad kind">>})
end;
(<<"payload">>,Val,Acc) ->
maps:put(payload,
lists:map(
fun(Purpose) ->
maps:fold(
fun(<<"purpose">>,V,A) ->
maps:put(purpose,b2a(V,
[
<<"srcfee">>,
<<"transfer">>
]
),A);
(K,V,A) ->
maps:put(b2a(K),V,A)
end,#{}, Purpose)
end, Val),Acc);
(Key,Val,Acc) ->
maps:put(b2a(Key),Val,Acc)
end, #{}, Body),
#{body:=TxBody}=Tx=tx:construct_tx(Body1),
answer(#{
result => <<"ok">>,
dtx =>tx_visualizer:show(TxBody),
tx=>tpnode_httpapi:prettify_tx(
Tx,
Packer),
ptx=>base64:encode(tx:pack(Tx))
})
catch throw:{tx,Reason} ->
answer(#{
result => <<"error">>,
reason => Reason
})
end;
h(<<"GET">>, [<<"miner">>, TAddr], _Req) ->
answer(
#{
result => <<"ok">>,
mined => naddress:mine(binary_to_integer(TAddr))
}).
b2a(Bin) ->
Known=[
<<"seq">>,
<<"t">>,
<<"amount">>,
<<"register">>,
<<"generic">>,
<<"cur">>,
<<"ver">>
],
b2a(Bin,Known).
b2a(Bin,Known) ->
case lists:member(Bin,Known) of
true ->
erlang:binary_to_atom(Bin,utf8);
false ->
Bin
end.
| |
12289ea98c111bcb00d6bf61a347d3e05f77b9a97e3c644c054a65325d781286 | LambdaHack/LambdaHack | KeyBindings.hs | {-# LANGUAGE RankNTypes #-}
-- | Verifying, aggregating and displaying binding of keys to commands.
module Game.LambdaHack.Client.UI.KeyBindings
( keyHelp, okxsN
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.EnumMap.Strict as EM
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Game.LambdaHack.Client.UI.Content.Input
import Game.LambdaHack.Client.UI.Content.Screen
import Game.LambdaHack.Client.UI.ContentClientUI
import Game.LambdaHack.Client.UI.HumanCmd
import qualified Game.LambdaHack.Client.UI.Key as K
import Game.LambdaHack.Client.UI.Overlay
import Game.LambdaHack.Client.UI.PointUI
import Game.LambdaHack.Client.UI.Slideshow
import qualified Game.LambdaHack.Definition.Color as Color
-- | Produce a set of help/menu screens from the key bindings.
--
-- When the intro screen mentions KP_5, this really is KP_Begin,
-- but since that is harder to understand we assume a different, non-default
-- state of NumLock in the help text than in the code that handles keys.
keyHelp :: CCUI -> FontSetup -> [(Text, OKX)]
keyHelp CCUI{ coinput=coinput@InputContent{..}
, coscreen=ScreenContent{rwidth, rheight} } FontSetup{..} =
let
movBlurb1 =
[ "Walk throughout a level with mouse or numeric keypad (right diagram below)"
, "or the Vi editor keys (middle) or the left-hand movement keys (left). Run until"
, "disturbed with Shift or Control. Go-to a position with LMB (left mouse button)."
, "In aiming mode, the same keys (and mouse) move the aiming crosshair."
]
movSchema =
[ " q w e y k u 7 8 9"
, " \\|/ \\|/ \\|/"
, " a-s-d h-.-l 4-5-6"
, " /|\\ /|\\ /|\\"
, " z x c b j n 1 2 3"
]
movBlurb2 =
[ "Press `KP_5` (`5` on keypad) to wait, bracing for impact, which reduces any"
, "damage taken and prevents displacement by foes. Press `S-KP_5` or `C-KP_5`"
, "(the same key with Shift or Control) to lurk 0.1 of a turn, without bracing."
, ""
, "Displace enemies by running into them with Shift/Control or S-LMB. Search,"
, "open, descend and melee by bumping into walls, doors, stairs and enemies."
, "The best, and not on cooldown, melee weapon is automatically chosen"
, "for attack from your equipment and from among your body parts."
]
minimalBlurb =
[ "The following few commands, joined with the movement and running keys,"
, "let you accomplish almost anything in the game, though not necessarily"
, "with the fewest keystrokes. You can also play the game exclusively"
, "with a mouse, or both mouse and keyboard (e.g., mouse for go-to"
, "and terrain inspection and keyboard for everything else). Lastly,"
, "you can select a command with arrows or mouse directly from the help"
, "screen or the dashboard and execute it on the spot."
]
itemAllEnding =
[ "Note how lower case item commands (stash item, equip item) place items"
, "into a particular item store, while upper case item commands (manage Inventory,"
, "manage Outfit) open management menu for a store. Once a store menu is opened,"
, "you can switch stores with `<` and `>`, so the multiple commands only determine"
, "the starting item store. Each store is accessible from the dashboard as well."
]
mouseBasicsBlurb =
[ "Screen area and UI mode (exploration/aiming) determine mouse click"
, "effects. Here we give an overview of effects of each button over"
, "the game map area. The list includes not only left and right buttons,"
, "but also the optional middle mouse button (MMB) and the mouse wheel,"
, "which is also used over menus to move selection. For mice without RMB,"
, "one can use Control key with LMB and for mice without MMB, one can use"
, "C-RMB or C-S-LMB."
]
mouseAreasBlurb =
[ "Next we show mouse button effects per screen area, in exploration and"
, "(if different) aiming mode. Note that mouse is optional. Keyboard suffices,"
, "occasionally requiring a lookup for an obscure command key in help screens."
]
mouseAreasMini =
[ "Mouse button effects per screen area, in exploration and in aiming modes"
]
movTextEnd = "Press SPACE or PGDN to advance or ESC to see the map again."
lastHelpEnd = "Use PGUP to go back and ESC to see the map again."
seeAlso = "For more playing instructions see file PLAYING.md."
offsetCol2 = 12
pickLeaderDescription =
[ fmt offsetCol2 "0, 1 ... 9"
"pick a particular actor as the new pointman"
]
casualDescription = "Minimal cheat sheet for casual play"
fmt0 n k h = T.justifyLeft n ' ' k <> " " <> h
fmt n k h = " " <> fmt0 n k h
keyCaption = fmt offsetCol2 "keys" "command"
mouseOverviewCaption = fmt offsetCol2 "keys" "command (exploration/aiming)"
spLen = textSize monoFont " "
okxs cat headers footers = xytranslateOKX spLen 0 $
okxsN coinput monoFont propFont offsetCol2 (const False)
True cat headers footers
mergeOKX :: OKX -> OKX -> OKX
mergeOKX okx1 okx2 =
let off = 1 + maxYofFontOverlayMap (fst okx1)
in sideBySideOKX 0 off okx1 okx2
catLength cat = length $ filter (\(_, (cats, desc, _)) ->
cat `elem` cats && (desc /= "" || CmdInternal `elem` cats)) bcmdList
keyM = 13
keyB = 31
truncatem b = if T.length b > keyB
then T.take (keyB - 1) b <> "$"
else b
fmm a b c = fmt (keyM + 1) a $ fmt0 keyB (truncatem b) (truncatem c)
areaCaption t = fmm t "LMB (left mouse button)" "RMB (right mouse button)"
keySel :: (forall a. (a, a) -> a) -> K.KM
-> [(CmdArea, KeyOrSlot, Text)]
keySel sel key =
let cmd = case M.lookup key bcmdMap of
Just (_, _, cmd2) -> cmd2
Nothing -> error $ "" `showFailure` key
caCmds = case cmd of
ByAimMode AimModeCmd{exploration=ByArea lexp, aiming=ByArea laim} ->
sort $ sel (lexp, laim \\ lexp)
_ -> error $ "" `showFailure` cmd
caMakeChoice (ca, cmd2) =
let (km, desc) = case M.lookup cmd2 brevMap of
Just ks ->
let descOfKM km2 = case M.lookup km2 bcmdMap of
Just (_, "", _) -> Nothing
Just (_, desc2, _) -> Just (km2, desc2)
Nothing -> error $ "" `showFailure` km2
in case mapMaybe descOfKM ks of
[] -> error $ "" `showFailure` (ks, cmd2)
kmdesc3 : _ -> kmdesc3
Nothing -> (key, "(not described:" <+> tshow cmd2 <> ")")
in (ca, Left km, desc)
in map caMakeChoice caCmds
doubleIfSquare n | isSquareFont monoFont = 2 * n
| otherwise = n
okm :: (forall a. (a, a) -> a) -> K.KM -> K.KM -> [Text] -> OKX
okm sel key1 key2 header =
let kst1 = keySel sel key1
kst2 = keySel sel key2
f (ca1, Left km1, _) (ca2, Left km2, _) y =
assert (ca1 == ca2 `blame` (ca1, ca2, km1, km2, kst1, kst2))
[ (Left km1, ( PointUI (doubleIfSquare $ keyM + 4) y
, ButtonWidth monoFont keyB ))
, (Left km2, ( PointUI (doubleIfSquare $ keyB + keyM + 5) y
, ButtonWidth monoFont keyB )) ]
f c d e = error $ "" `showFailure` (c, d, e)
kxs = concat $ zipWith3 f kst1 kst2 [1 + length header..]
menuLeft = map (\(ca1, _, _) -> textToAL $ areaDescription ca1) kst1
menuMiddle = map (\(_, _, desc) -> textToAL desc) kst1
menuRight = map (\(_, _, desc) -> textToAL desc) kst2
y0 = 1 + length header
in ( EM.unionsWith (++)
[ typesetInMono $ "" : header
, EM.singleton monoFont
$ typesetXY (doubleIfSquare 2, y0) menuLeft
, EM.singleton propFont
$ typesetXY (doubleIfSquare $ keyM + 4, y0) menuMiddle
, EM.singleton propFont
$ typesetXY (doubleIfSquare $ keyB + keyM + 5, y0) menuRight ]
, kxs )
typesetInSquare :: [Text] -> FontOverlayMap
typesetInSquare =
EM.singleton squareFont . typesetXY (spLen, 0) . map textToAL
typesetInMono :: [Text] -> FontOverlayMap
typesetInMono =
EM.singleton monoFont . typesetXY (spLen, 0) . map textToAL
typesetInProp :: [Text] -> FontOverlayMap
typesetInProp =
EM.singleton propFont . typesetXY (spLen, 0) . map textToAL
sideBySide :: [(Text, OKX)] -> [(Text, OKX)]
sideBySide ((_t1, okx1) : (t2, okx2) : rest) | not (isSquareFont propFont) =
(t2, sideBySideOKX rwidth 0 okx1 okx2) : sideBySide rest
sideBySide l = l
in sideBySide $ concat
[ if catLength CmdMinimal
+ length movBlurb1 + length movSchema + length movBlurb2
+ length minimalBlurb
+ 6 > rheight then
[ ( movTextEnd
, mergeOKX
(mergeOKX ( typesetInMono ["", casualDescription <+> "(1/2)", ""]
, [] )
(mergeOKX (typesetInProp movBlurb1, [])
(typesetInSquare $ "" : movSchema, [])))
(typesetInProp $ "" : movBlurb2, []) )
, ( movTextEnd
, okxs CmdMinimal
( ["", casualDescription <+> "(2/2)", ""]
, minimalBlurb ++ [""]
, [keyCaption] )
([], []) ) ]
else
[ ( movTextEnd
, mergeOKX
(mergeOKX ( typesetInMono ["", casualDescription, ""]
, [] )
(mergeOKX (typesetInProp movBlurb1, [])
(typesetInSquare $ "" : movSchema, [])))
(okxs CmdMinimal
( []
, [""] ++ movBlurb2 ++ [""]
++ minimalBlurb ++ [""]
, [keyCaption] )
([], [""])) ) ]
, if 45 > rheight then
[ ( movTextEnd
, let (ls, _) = okxs CmdMouse
( ["", "Optional mouse commands", ""]
, mouseBasicsBlurb ++ [""]
, [mouseOverviewCaption] )
([], [])
in (ls, []) ) -- don't capture mouse wheel, etc.
, ( movTextEnd
, mergeOKX
(typesetInMono $ "" : mouseAreasMini, [])
(mergeOKX
(okm fst K.leftButtonReleaseKM K.rightButtonReleaseKM
[areaCaption "Exploration"])
(okm snd K.leftButtonReleaseKM K.rightButtonReleaseKM
[areaCaption "Aiming Mode"])) ) ]
else
[ ( movTextEnd
, let (ls, _) = okxs CmdMouse
( ["", "Optional mouse commands", ""]
, mouseBasicsBlurb ++ [""]
, [mouseOverviewCaption] )
([], [])
okx0 = (ls, []) -- don't capture mouse wheel, etc.
in mergeOKX
(mergeOKX
okx0
(typesetInProp $ "" : mouseAreasBlurb, []))
(mergeOKX
(okm fst K.leftButtonReleaseKM K.rightButtonReleaseKM
[areaCaption "Exploration"])
(okm snd K.leftButtonReleaseKM K.rightButtonReleaseKM
[areaCaption "Aiming Mode"] )) ) ]
, if catLength CmdItem + catLength CmdMove + 9 + 9 > rheight then
[ ( movTextEnd
, okxs CmdItem
(["", categoryDescription CmdItem], [], ["", keyCaption])
([], "" : itemAllEnding) )
, ( movTextEnd
, okxs CmdMove
(["", categoryDescription CmdMove], [], ["", keyCaption])
(pickLeaderDescription, []) ) ]
else
[ ( movTextEnd
, mergeOKX
(okxs CmdItem
(["", categoryDescription CmdItem], [], ["", keyCaption])
([], "" : itemAllEnding))
(okxs CmdMove
( ["", "", categoryDescription CmdMove]
, []
, ["", keyCaption] )
(pickLeaderDescription, [""])) ) ]
, if catLength CmdAim + catLength CmdMeta + 9 > rheight then
[ ( movTextEnd
, okxs CmdAim
(["", categoryDescription CmdAim], [], ["", keyCaption])
([], []) )
, ( lastHelpEnd
, okxs CmdMeta
(["", categoryDescription CmdMeta], [], ["", keyCaption])
([], ["", seeAlso]) ) ]
else
[ ( lastHelpEnd
, mergeOKX
(okxs CmdAim
(["", categoryDescription CmdAim], [], ["", keyCaption])
([], []))
(okxs CmdMeta
( ["", "", categoryDescription CmdMeta]
, []
, ["", keyCaption] )
([], ["", seeAlso, ""])) ) ]
]
-- | Turn the specified portion of bindings into a menu.
--
The length of the button may be wrong if the two supplied fonts
-- have very different widths.
okxsN :: InputContent -> DisplayFont -> DisplayFont -> Int -> (HumanCmd -> Bool)
-> Bool -> CmdCategory -> ([Text], [Text], [Text]) -> ([Text], [Text])
-> OKX
okxsN InputContent{..} labFont descFont offsetCol2 greyedOut
showManyKeys cat (headerMono1, headerProp, headerMono2)
(footerMono, footerProp) =
let fmt k h = (T.singleton '\x00a0' <> k, h)
coImage :: HumanCmd -> [K.KM]
coImage cmd = M.findWithDefault (error $ "" `showFailure` cmd) cmd brevMap
disp = T.intercalate " or " . map (T.pack . K.showKM)
keyKnown km = case K.key km of
K.Unknown{} -> False
_ -> True
keys :: [(KeyOrSlot, (Bool, (Text, Text)))]
keys = [ (Left km, (greyedOut cmd, fmt keyNames desc))
| (_, (cats, desc, cmd)) <- bcmdList
, let kms = coImage cmd
knownKeys = filter keyKnown kms
keyNames =
disp $ (if showManyKeys then id else take 1) knownKeys
kmsRes = if desc == "" then knownKeys else kms
km = case kmsRes of
[] -> K.escKM
km1 : _ -> km1
, cat `elem` cats
, desc /= "" || CmdInternal `elem` cats]
spLen = textSize labFont " "
f (ks, (_, (_, t2))) y =
(ks, ( PointUI spLen y
, ButtonWidth labFont (offsetCol2 + 2 + T.length t2 - 1)))
kxs = zipWith f keys
[length headerMono1 + length headerProp + length headerMono2 ..]
ts = map (\t -> (False, (t, ""))) headerMono1
++ map (\t -> (False, ("", t))) headerProp
++ map (\t -> (False, (t, ""))) headerMono2
++ map snd keys
++ map (\t -> (False, (t, ""))) footerMono
++ map (\t -> (False, ("", t))) footerProp
greyToAL (b, (t1, t2)) =
if b
then let al1 = textFgToAL Color.BrBlack t1
in (al1, ( if T.null t1 then 0 else spLen * (offsetCol2 + 2)
, textFgToAL Color.BrBlack t2 ))
else let al1 = textToAL t1
in (al1, ( if T.null t1 then 0 else spLen * (offsetCol2 + 2)
, textToAL t2 ))
(greyLab, greyDesc) = unzip $ map greyToAL ts
in ( EM.insertWith (++) descFont (offsetOverlayX greyDesc)
$ EM.singleton labFont (offsetOverlay greyLab)
, kxs )
| null | https://raw.githubusercontent.com/LambdaHack/LambdaHack/84cda1ca86056bbda0ca2dfc10f63260a82b73c5/engine-src/Game/LambdaHack/Client/UI/KeyBindings.hs | haskell | # LANGUAGE RankNTypes #
| Verifying, aggregating and displaying binding of keys to commands.
| Produce a set of help/menu screens from the key bindings.
When the intro screen mentions KP_5, this really is KP_Begin,
but since that is harder to understand we assume a different, non-default
state of NumLock in the help text than in the code that handles keys.
don't capture mouse wheel, etc.
don't capture mouse wheel, etc.
| Turn the specified portion of bindings into a menu.
have very different widths. | module Game.LambdaHack.Client.UI.KeyBindings
( keyHelp, okxsN
) where
import Prelude ()
import Game.LambdaHack.Core.Prelude
import qualified Data.EnumMap.Strict as EM
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Game.LambdaHack.Client.UI.Content.Input
import Game.LambdaHack.Client.UI.Content.Screen
import Game.LambdaHack.Client.UI.ContentClientUI
import Game.LambdaHack.Client.UI.HumanCmd
import qualified Game.LambdaHack.Client.UI.Key as K
import Game.LambdaHack.Client.UI.Overlay
import Game.LambdaHack.Client.UI.PointUI
import Game.LambdaHack.Client.UI.Slideshow
import qualified Game.LambdaHack.Definition.Color as Color
keyHelp :: CCUI -> FontSetup -> [(Text, OKX)]
keyHelp CCUI{ coinput=coinput@InputContent{..}
, coscreen=ScreenContent{rwidth, rheight} } FontSetup{..} =
let
movBlurb1 =
[ "Walk throughout a level with mouse or numeric keypad (right diagram below)"
, "or the Vi editor keys (middle) or the left-hand movement keys (left). Run until"
, "disturbed with Shift or Control. Go-to a position with LMB (left mouse button)."
, "In aiming mode, the same keys (and mouse) move the aiming crosshair."
]
movSchema =
[ " q w e y k u 7 8 9"
, " \\|/ \\|/ \\|/"
, " a-s-d h-.-l 4-5-6"
, " /|\\ /|\\ /|\\"
, " z x c b j n 1 2 3"
]
movBlurb2 =
[ "Press `KP_5` (`5` on keypad) to wait, bracing for impact, which reduces any"
, "damage taken and prevents displacement by foes. Press `S-KP_5` or `C-KP_5`"
, "(the same key with Shift or Control) to lurk 0.1 of a turn, without bracing."
, ""
, "Displace enemies by running into them with Shift/Control or S-LMB. Search,"
, "open, descend and melee by bumping into walls, doors, stairs and enemies."
, "The best, and not on cooldown, melee weapon is automatically chosen"
, "for attack from your equipment and from among your body parts."
]
minimalBlurb =
[ "The following few commands, joined with the movement and running keys,"
, "let you accomplish almost anything in the game, though not necessarily"
, "with the fewest keystrokes. You can also play the game exclusively"
, "with a mouse, or both mouse and keyboard (e.g., mouse for go-to"
, "and terrain inspection and keyboard for everything else). Lastly,"
, "you can select a command with arrows or mouse directly from the help"
, "screen or the dashboard and execute it on the spot."
]
itemAllEnding =
[ "Note how lower case item commands (stash item, equip item) place items"
, "into a particular item store, while upper case item commands (manage Inventory,"
, "manage Outfit) open management menu for a store. Once a store menu is opened,"
, "you can switch stores with `<` and `>`, so the multiple commands only determine"
, "the starting item store. Each store is accessible from the dashboard as well."
]
mouseBasicsBlurb =
[ "Screen area and UI mode (exploration/aiming) determine mouse click"
, "effects. Here we give an overview of effects of each button over"
, "the game map area. The list includes not only left and right buttons,"
, "but also the optional middle mouse button (MMB) and the mouse wheel,"
, "which is also used over menus to move selection. For mice without RMB,"
, "one can use Control key with LMB and for mice without MMB, one can use"
, "C-RMB or C-S-LMB."
]
mouseAreasBlurb =
[ "Next we show mouse button effects per screen area, in exploration and"
, "(if different) aiming mode. Note that mouse is optional. Keyboard suffices,"
, "occasionally requiring a lookup for an obscure command key in help screens."
]
mouseAreasMini =
[ "Mouse button effects per screen area, in exploration and in aiming modes"
]
movTextEnd = "Press SPACE or PGDN to advance or ESC to see the map again."
lastHelpEnd = "Use PGUP to go back and ESC to see the map again."
seeAlso = "For more playing instructions see file PLAYING.md."
offsetCol2 = 12
pickLeaderDescription =
[ fmt offsetCol2 "0, 1 ... 9"
"pick a particular actor as the new pointman"
]
casualDescription = "Minimal cheat sheet for casual play"
fmt0 n k h = T.justifyLeft n ' ' k <> " " <> h
fmt n k h = " " <> fmt0 n k h
keyCaption = fmt offsetCol2 "keys" "command"
mouseOverviewCaption = fmt offsetCol2 "keys" "command (exploration/aiming)"
spLen = textSize monoFont " "
okxs cat headers footers = xytranslateOKX spLen 0 $
okxsN coinput monoFont propFont offsetCol2 (const False)
True cat headers footers
mergeOKX :: OKX -> OKX -> OKX
mergeOKX okx1 okx2 =
let off = 1 + maxYofFontOverlayMap (fst okx1)
in sideBySideOKX 0 off okx1 okx2
catLength cat = length $ filter (\(_, (cats, desc, _)) ->
cat `elem` cats && (desc /= "" || CmdInternal `elem` cats)) bcmdList
keyM = 13
keyB = 31
truncatem b = if T.length b > keyB
then T.take (keyB - 1) b <> "$"
else b
fmm a b c = fmt (keyM + 1) a $ fmt0 keyB (truncatem b) (truncatem c)
areaCaption t = fmm t "LMB (left mouse button)" "RMB (right mouse button)"
keySel :: (forall a. (a, a) -> a) -> K.KM
-> [(CmdArea, KeyOrSlot, Text)]
keySel sel key =
let cmd = case M.lookup key bcmdMap of
Just (_, _, cmd2) -> cmd2
Nothing -> error $ "" `showFailure` key
caCmds = case cmd of
ByAimMode AimModeCmd{exploration=ByArea lexp, aiming=ByArea laim} ->
sort $ sel (lexp, laim \\ lexp)
_ -> error $ "" `showFailure` cmd
caMakeChoice (ca, cmd2) =
let (km, desc) = case M.lookup cmd2 brevMap of
Just ks ->
let descOfKM km2 = case M.lookup km2 bcmdMap of
Just (_, "", _) -> Nothing
Just (_, desc2, _) -> Just (km2, desc2)
Nothing -> error $ "" `showFailure` km2
in case mapMaybe descOfKM ks of
[] -> error $ "" `showFailure` (ks, cmd2)
kmdesc3 : _ -> kmdesc3
Nothing -> (key, "(not described:" <+> tshow cmd2 <> ")")
in (ca, Left km, desc)
in map caMakeChoice caCmds
doubleIfSquare n | isSquareFont monoFont = 2 * n
| otherwise = n
okm :: (forall a. (a, a) -> a) -> K.KM -> K.KM -> [Text] -> OKX
okm sel key1 key2 header =
let kst1 = keySel sel key1
kst2 = keySel sel key2
f (ca1, Left km1, _) (ca2, Left km2, _) y =
assert (ca1 == ca2 `blame` (ca1, ca2, km1, km2, kst1, kst2))
[ (Left km1, ( PointUI (doubleIfSquare $ keyM + 4) y
, ButtonWidth monoFont keyB ))
, (Left km2, ( PointUI (doubleIfSquare $ keyB + keyM + 5) y
, ButtonWidth monoFont keyB )) ]
f c d e = error $ "" `showFailure` (c, d, e)
kxs = concat $ zipWith3 f kst1 kst2 [1 + length header..]
menuLeft = map (\(ca1, _, _) -> textToAL $ areaDescription ca1) kst1
menuMiddle = map (\(_, _, desc) -> textToAL desc) kst1
menuRight = map (\(_, _, desc) -> textToAL desc) kst2
y0 = 1 + length header
in ( EM.unionsWith (++)
[ typesetInMono $ "" : header
, EM.singleton monoFont
$ typesetXY (doubleIfSquare 2, y0) menuLeft
, EM.singleton propFont
$ typesetXY (doubleIfSquare $ keyM + 4, y0) menuMiddle
, EM.singleton propFont
$ typesetXY (doubleIfSquare $ keyB + keyM + 5, y0) menuRight ]
, kxs )
typesetInSquare :: [Text] -> FontOverlayMap
typesetInSquare =
EM.singleton squareFont . typesetXY (spLen, 0) . map textToAL
typesetInMono :: [Text] -> FontOverlayMap
typesetInMono =
EM.singleton monoFont . typesetXY (spLen, 0) . map textToAL
typesetInProp :: [Text] -> FontOverlayMap
typesetInProp =
EM.singleton propFont . typesetXY (spLen, 0) . map textToAL
sideBySide :: [(Text, OKX)] -> [(Text, OKX)]
sideBySide ((_t1, okx1) : (t2, okx2) : rest) | not (isSquareFont propFont) =
(t2, sideBySideOKX rwidth 0 okx1 okx2) : sideBySide rest
sideBySide l = l
in sideBySide $ concat
[ if catLength CmdMinimal
+ length movBlurb1 + length movSchema + length movBlurb2
+ length minimalBlurb
+ 6 > rheight then
[ ( movTextEnd
, mergeOKX
(mergeOKX ( typesetInMono ["", casualDescription <+> "(1/2)", ""]
, [] )
(mergeOKX (typesetInProp movBlurb1, [])
(typesetInSquare $ "" : movSchema, [])))
(typesetInProp $ "" : movBlurb2, []) )
, ( movTextEnd
, okxs CmdMinimal
( ["", casualDescription <+> "(2/2)", ""]
, minimalBlurb ++ [""]
, [keyCaption] )
([], []) ) ]
else
[ ( movTextEnd
, mergeOKX
(mergeOKX ( typesetInMono ["", casualDescription, ""]
, [] )
(mergeOKX (typesetInProp movBlurb1, [])
(typesetInSquare $ "" : movSchema, [])))
(okxs CmdMinimal
( []
, [""] ++ movBlurb2 ++ [""]
++ minimalBlurb ++ [""]
, [keyCaption] )
([], [""])) ) ]
, if 45 > rheight then
[ ( movTextEnd
, let (ls, _) = okxs CmdMouse
( ["", "Optional mouse commands", ""]
, mouseBasicsBlurb ++ [""]
, [mouseOverviewCaption] )
([], [])
, ( movTextEnd
, mergeOKX
(typesetInMono $ "" : mouseAreasMini, [])
(mergeOKX
(okm fst K.leftButtonReleaseKM K.rightButtonReleaseKM
[areaCaption "Exploration"])
(okm snd K.leftButtonReleaseKM K.rightButtonReleaseKM
[areaCaption "Aiming Mode"])) ) ]
else
[ ( movTextEnd
, let (ls, _) = okxs CmdMouse
( ["", "Optional mouse commands", ""]
, mouseBasicsBlurb ++ [""]
, [mouseOverviewCaption] )
([], [])
in mergeOKX
(mergeOKX
okx0
(typesetInProp $ "" : mouseAreasBlurb, []))
(mergeOKX
(okm fst K.leftButtonReleaseKM K.rightButtonReleaseKM
[areaCaption "Exploration"])
(okm snd K.leftButtonReleaseKM K.rightButtonReleaseKM
[areaCaption "Aiming Mode"] )) ) ]
, if catLength CmdItem + catLength CmdMove + 9 + 9 > rheight then
[ ( movTextEnd
, okxs CmdItem
(["", categoryDescription CmdItem], [], ["", keyCaption])
([], "" : itemAllEnding) )
, ( movTextEnd
, okxs CmdMove
(["", categoryDescription CmdMove], [], ["", keyCaption])
(pickLeaderDescription, []) ) ]
else
[ ( movTextEnd
, mergeOKX
(okxs CmdItem
(["", categoryDescription CmdItem], [], ["", keyCaption])
([], "" : itemAllEnding))
(okxs CmdMove
( ["", "", categoryDescription CmdMove]
, []
, ["", keyCaption] )
(pickLeaderDescription, [""])) ) ]
, if catLength CmdAim + catLength CmdMeta + 9 > rheight then
[ ( movTextEnd
, okxs CmdAim
(["", categoryDescription CmdAim], [], ["", keyCaption])
([], []) )
, ( lastHelpEnd
, okxs CmdMeta
(["", categoryDescription CmdMeta], [], ["", keyCaption])
([], ["", seeAlso]) ) ]
else
[ ( lastHelpEnd
, mergeOKX
(okxs CmdAim
(["", categoryDescription CmdAim], [], ["", keyCaption])
([], []))
(okxs CmdMeta
( ["", "", categoryDescription CmdMeta]
, []
, ["", keyCaption] )
([], ["", seeAlso, ""])) ) ]
]
The length of the button may be wrong if the two supplied fonts
okxsN :: InputContent -> DisplayFont -> DisplayFont -> Int -> (HumanCmd -> Bool)
-> Bool -> CmdCategory -> ([Text], [Text], [Text]) -> ([Text], [Text])
-> OKX
okxsN InputContent{..} labFont descFont offsetCol2 greyedOut
showManyKeys cat (headerMono1, headerProp, headerMono2)
(footerMono, footerProp) =
let fmt k h = (T.singleton '\x00a0' <> k, h)
coImage :: HumanCmd -> [K.KM]
coImage cmd = M.findWithDefault (error $ "" `showFailure` cmd) cmd brevMap
disp = T.intercalate " or " . map (T.pack . K.showKM)
keyKnown km = case K.key km of
K.Unknown{} -> False
_ -> True
keys :: [(KeyOrSlot, (Bool, (Text, Text)))]
keys = [ (Left km, (greyedOut cmd, fmt keyNames desc))
| (_, (cats, desc, cmd)) <- bcmdList
, let kms = coImage cmd
knownKeys = filter keyKnown kms
keyNames =
disp $ (if showManyKeys then id else take 1) knownKeys
kmsRes = if desc == "" then knownKeys else kms
km = case kmsRes of
[] -> K.escKM
km1 : _ -> km1
, cat `elem` cats
, desc /= "" || CmdInternal `elem` cats]
spLen = textSize labFont " "
f (ks, (_, (_, t2))) y =
(ks, ( PointUI spLen y
, ButtonWidth labFont (offsetCol2 + 2 + T.length t2 - 1)))
kxs = zipWith f keys
[length headerMono1 + length headerProp + length headerMono2 ..]
ts = map (\t -> (False, (t, ""))) headerMono1
++ map (\t -> (False, ("", t))) headerProp
++ map (\t -> (False, (t, ""))) headerMono2
++ map snd keys
++ map (\t -> (False, (t, ""))) footerMono
++ map (\t -> (False, ("", t))) footerProp
greyToAL (b, (t1, t2)) =
if b
then let al1 = textFgToAL Color.BrBlack t1
in (al1, ( if T.null t1 then 0 else spLen * (offsetCol2 + 2)
, textFgToAL Color.BrBlack t2 ))
else let al1 = textToAL t1
in (al1, ( if T.null t1 then 0 else spLen * (offsetCol2 + 2)
, textToAL t2 ))
(greyLab, greyDesc) = unzip $ map greyToAL ts
in ( EM.insertWith (++) descFont (offsetOverlayX greyDesc)
$ EM.singleton labFont (offsetOverlay greyLab)
, kxs )
|
f528578baa3c8fcafe53c2edfa12c3394686366879ac2a166e03b5df14dfe512 | poroh/ersip | ersip_sdp_bandwidth.erl | %%%
Copyright ( c ) 2018 , 2021 Dmitry Poroh
%%% All rights reserved.
Distributed under the terms of the MIT License . See the LICENSE file .
%%%
SDP bandwidth
%%%
-module(ersip_sdp_bandwidth).
-export([new/0]).
-export([tias/1,
ct/1,
as/1,
experimental/2,
parse/1,
assemble/1
]).
-export_type([bandwidth/0]).
%%===================================================================
%% Types
%%===================================================================
-type bandwidth() :: {bandwidth, bw_list()}.
-type bw_list() :: [bw_item()].
-type bw_item() :: {bw_type(), bw_value()}.
-type bw_type() :: ct | as | tias | {bw_type, binary()}.
-type bw_value() :: non_neg_integer().
-type parse_result() :: ersip_parser_aux:parse_result(bandwidth()).
%%===================================================================
%% API
%%===================================================================
-spec new() -> bandwidth().
new() ->
{bandwidth, []}.
-spec tias(bandwidth()) -> non_neg_integer() | undefined.
tias({bandwidth, BWList}) ->
proplists:get_value(tias, BWList, undefined).
-spec as(bandwidth()) -> non_neg_integer() | undefined.
as({bandwidth, BWList}) ->
proplists:get_value(as, BWList, undefined).
-spec ct(bandwidth()) -> non_neg_integer() | undefined.
ct({bandwidth, BWList}) ->
proplists:get_value(ct, BWList, undefined).
-spec experimental(binary(), bandwidth()) -> non_neg_integer() | undefined.
experimental(Name, {bandwidth, BWList}) ->
LName = ersip_bin:to_lower(Name),
Vals = [Val || {{bw_type, N}, Val} <- BWList,
ersip_bin:to_lower(N) == LName],
case Vals of
[] ->
undefined;
[V|_] -> V
end.
-spec parse(binary()) -> parse_result().
parse(Bin) ->
do_bw_parse(Bin, {bandwidth, []}).
-spec assemble(bandwidth()) -> iolist().
assemble({bandwidth, BWList}) ->
[[<<"b=">>, bw_type_to_binary(BWType), <<":">>, integer_to_binary(BWValue), <<"\r\n">>]
|| {BWType, BWValue} <- BWList].
%%%===================================================================
%%% Internal Implementation
%%%===================================================================
-define(crlf, "\r\n").
bandwidth - fields = * ( % x62 " = " " : " bandwidth CRLF )
-spec do_bw_parse(binary(), bandwidth()) -> ersip_parser_aux:parse_result(bandwidth()).
do_bw_parse(<<"b=", Rest/binary>>, {bandwidth, Acc}) ->
case binary:split(Rest, <<?crlf>>) of
[_] ->
{error, {invalid_bandwidth, {no_crlf, Rest}}};
[Band, Rest1] ->
case parse_bw(Band) of
{ok, BWItem} ->
do_bw_parse(Rest1, {bandwidth, [BWItem | Acc]});
{error, _} = Error ->
Error
end
end;
do_bw_parse(Bin, {bandwidth, Acc}) ->
{ok, {bandwidth, lists:reverse(Acc)}, Bin}.
-spec parse_bw(binary()) -> {ok, bw_item()} | {error, term()}.
parse_bw(Item) ->
case binary:split(Item, <<":">>) of
[_] ->
{error, {invalid_bandwidth, Item}};
[BWTypeBin, BWValueBin] ->
case ersip_sdp_aux:check_token(BWTypeBin) of
false ->
{error, {invalid_bandwidth_type, BWTypeBin}};
true ->
BWType = convert_bw_type(BWTypeBin),
case ersip_parser_aux:parse_non_neg_int(BWValueBin) of
{ok, BWValue, <<>>} ->
{ok, {BWType, BWValue}};
{ok, _, _} ->
{error, {invalid_bandwidth_value, BWValueBin}};
{error, Reason} ->
{error, {invalid_bandwidth_value, Reason}}
end
end
end.
-spec convert_bw_type(binary()) -> bw_type().
convert_bw_type(BWType) ->
case ersip_bin:to_lower(BWType) of
<<"ct">> -> ct;
<<"as">> -> as;
<<"tias">> -> tias; %% RFC 3890
_ -> {bw_type, BWType}
end.
-spec bw_type_to_binary(bw_type()) -> binary().
bw_type_to_binary(ct) ->
<<"CT">>;
bw_type_to_binary(as) ->
<<"AS">>;
bw_type_to_binary(tias) ->
<<"TIAS">>;
bw_type_to_binary({bw_type, T}) ->
T.
| null | https://raw.githubusercontent.com/poroh/ersip/1a406433c80fb21bb15ec45bfec3ce3d98aaaa52/src/sdp/ersip_sdp_bandwidth.erl | erlang |
All rights reserved.
===================================================================
Types
===================================================================
===================================================================
API
===================================================================
===================================================================
Internal Implementation
===================================================================
x62 " = " " : " bandwidth CRLF )
RFC 3890 | Copyright ( c ) 2018 , 2021 Dmitry Poroh
Distributed under the terms of the MIT License . See the LICENSE file .
SDP bandwidth
-module(ersip_sdp_bandwidth).
-export([new/0]).
-export([tias/1,
ct/1,
as/1,
experimental/2,
parse/1,
assemble/1
]).
-export_type([bandwidth/0]).
-type bandwidth() :: {bandwidth, bw_list()}.
-type bw_list() :: [bw_item()].
-type bw_item() :: {bw_type(), bw_value()}.
-type bw_type() :: ct | as | tias | {bw_type, binary()}.
-type bw_value() :: non_neg_integer().
-type parse_result() :: ersip_parser_aux:parse_result(bandwidth()).
-spec new() -> bandwidth().
new() ->
{bandwidth, []}.
-spec tias(bandwidth()) -> non_neg_integer() | undefined.
tias({bandwidth, BWList}) ->
proplists:get_value(tias, BWList, undefined).
-spec as(bandwidth()) -> non_neg_integer() | undefined.
as({bandwidth, BWList}) ->
proplists:get_value(as, BWList, undefined).
-spec ct(bandwidth()) -> non_neg_integer() | undefined.
ct({bandwidth, BWList}) ->
proplists:get_value(ct, BWList, undefined).
-spec experimental(binary(), bandwidth()) -> non_neg_integer() | undefined.
experimental(Name, {bandwidth, BWList}) ->
LName = ersip_bin:to_lower(Name),
Vals = [Val || {{bw_type, N}, Val} <- BWList,
ersip_bin:to_lower(N) == LName],
case Vals of
[] ->
undefined;
[V|_] -> V
end.
-spec parse(binary()) -> parse_result().
parse(Bin) ->
do_bw_parse(Bin, {bandwidth, []}).
-spec assemble(bandwidth()) -> iolist().
assemble({bandwidth, BWList}) ->
[[<<"b=">>, bw_type_to_binary(BWType), <<":">>, integer_to_binary(BWValue), <<"\r\n">>]
|| {BWType, BWValue} <- BWList].
-define(crlf, "\r\n").
-spec do_bw_parse(binary(), bandwidth()) -> ersip_parser_aux:parse_result(bandwidth()).
do_bw_parse(<<"b=", Rest/binary>>, {bandwidth, Acc}) ->
case binary:split(Rest, <<?crlf>>) of
[_] ->
{error, {invalid_bandwidth, {no_crlf, Rest}}};
[Band, Rest1] ->
case parse_bw(Band) of
{ok, BWItem} ->
do_bw_parse(Rest1, {bandwidth, [BWItem | Acc]});
{error, _} = Error ->
Error
end
end;
do_bw_parse(Bin, {bandwidth, Acc}) ->
{ok, {bandwidth, lists:reverse(Acc)}, Bin}.
-spec parse_bw(binary()) -> {ok, bw_item()} | {error, term()}.
parse_bw(Item) ->
case binary:split(Item, <<":">>) of
[_] ->
{error, {invalid_bandwidth, Item}};
[BWTypeBin, BWValueBin] ->
case ersip_sdp_aux:check_token(BWTypeBin) of
false ->
{error, {invalid_bandwidth_type, BWTypeBin}};
true ->
BWType = convert_bw_type(BWTypeBin),
case ersip_parser_aux:parse_non_neg_int(BWValueBin) of
{ok, BWValue, <<>>} ->
{ok, {BWType, BWValue}};
{ok, _, _} ->
{error, {invalid_bandwidth_value, BWValueBin}};
{error, Reason} ->
{error, {invalid_bandwidth_value, Reason}}
end
end
end.
-spec convert_bw_type(binary()) -> bw_type().
convert_bw_type(BWType) ->
case ersip_bin:to_lower(BWType) of
<<"ct">> -> ct;
<<"as">> -> as;
_ -> {bw_type, BWType}
end.
-spec bw_type_to_binary(bw_type()) -> binary().
bw_type_to_binary(ct) ->
<<"CT">>;
bw_type_to_binary(as) ->
<<"AS">>;
bw_type_to_binary(tias) ->
<<"TIAS">>;
bw_type_to_binary({bw_type, T}) ->
T.
|
42edf787c3868ef198868e086cbff10693b1b7d3a92ece8212afb4becf425a4e | basho/riak_sysmon | riak_sysmon_app.erl | Copyright ( c ) 2011 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
@doc Riak_sysmon : a rate - limiting , gen_event - based mechanism to
%% allow multiple parties/processes/applications to share
erlang : system_monitor/2 system events safely .
-module(riak_sysmon_app).
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1]).
%%%===================================================================
%%% Application callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called whenever an application is started using
application : start/[1,2 ] , and should start the processes of the
%% application. If the application is structured according to the OTP
%% design principles as a supervision tree, this means starting the
%% top supervisor of the tree.
%%
@spec start(StartType , ) - > { ok , Pid } |
{ ok , Pid , State } |
%% {error, Reason}
%% StartType = normal | {takeover, Node} | {failover, Node}
= term ( )
%% @end
%%--------------------------------------------------------------------
start(_StartType, _StartArgs) ->
case riak_sysmon_sup:start_link() of
{ok, _Pid} = Ok ->
Ok;
Error ->
Error
end.
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called whenever an application has stopped. It
%% is intended to be the opposite of Module:start/2 and should do
%% any necessary cleaning up. The return value is ignored.
%%
%% @spec stop(State) -> void()
%% @end
%%--------------------------------------------------------------------
stop(_State) ->
ok.
%%%===================================================================
Internal functions
%%%===================================================================
| null | https://raw.githubusercontent.com/basho/riak_sysmon/0ab94b3186ff67501b833543f0c3f52be5b7ca7a/src/riak_sysmon_app.erl | erlang |
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
allow multiple parties/processes/applications to share
Application callbacks
===================================================================
Application callbacks
===================================================================
--------------------------------------------------------------------
@doc
This function is called whenever an application is started using
application. If the application is structured according to the OTP
design principles as a supervision tree, this means starting the
top supervisor of the tree.
{error, Reason}
StartType = normal | {takeover, Node} | {failover, Node}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called whenever an application has stopped. It
is intended to be the opposite of Module:start/2 and should do
any necessary cleaning up. The return value is ignored.
@spec stop(State) -> void()
@end
--------------------------------------------------------------------
===================================================================
=================================================================== | Copyright ( c ) 2011 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@doc Riak_sysmon : a rate - limiting , gen_event - based mechanism to
erlang : system_monitor/2 system events safely .
-module(riak_sysmon_app).
-behaviour(application).
-export([start/2, stop/1]).
@private
application : start/[1,2 ] , and should start the processes of the
@spec start(StartType , ) - > { ok , Pid } |
{ ok , Pid , State } |
= term ( )
start(_StartType, _StartArgs) ->
case riak_sysmon_sup:start_link() of
{ok, _Pid} = Ok ->
Ok;
Error ->
Error
end.
@private
stop(_State) ->
ok.
Internal functions
|
1ee3b4bb56b27e5bd39cf833c41c96b931f441d3b143732f7a4b4f289c148dc5 | skanev/playground | 19-tests.scm | (require rackunit rackunit/text-ui)
(load "helpers/simulator.scm")
(load "../19.scm")
(define (test-machine)
(make-machine '(a)
'()
'(start
(assign a (const 1))
(assign a (const 2))
(assign a (const 3))
(assign a (const 4))
(assign a (const 5))
before-six
(assign a (const 6))
(assign a (const 7))
(assign a (const 8))
(assign a (const 9))
(assign a (const 10)))))
(define sicp-5.19-tests
(test-suite
"Tests for SICP exercise 5.19"
(test-case "Checking breakpoints"
(define machine (test-machine))
(set-breakpoint machine 'start 3)
(set-breakpoint machine 'start 5)
(set-breakpoint machine 'before-six 3)
(start machine)
(check-eq? (get-register-contents machine 'a) 2)
(proceed-machine machine)
(check-eq? (get-register-contents machine 'a) 4)
(proceed-machine machine)
(check-eq? (get-register-contents machine 'a) 7))
(test-case "Canceling breakpoints"
(define machine (test-machine))
(set-breakpoint machine 'start 3)
(set-breakpoint machine 'start 5)
(set-breakpoint machine 'before-six 3)
(cancel-breakpoint machine 'start 3)
(start machine)
(check-eq? (get-register-contents machine 'a) 4)
(cancel-all-breakpoints machine)
(proceed-machine machine)
(check-eq? (get-register-contents machine 'a) 10))
))
(run-tests sicp-5.19-tests)
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/05/tests/19-tests.scm | scheme | (require rackunit rackunit/text-ui)
(load "helpers/simulator.scm")
(load "../19.scm")
(define (test-machine)
(make-machine '(a)
'()
'(start
(assign a (const 1))
(assign a (const 2))
(assign a (const 3))
(assign a (const 4))
(assign a (const 5))
before-six
(assign a (const 6))
(assign a (const 7))
(assign a (const 8))
(assign a (const 9))
(assign a (const 10)))))
(define sicp-5.19-tests
(test-suite
"Tests for SICP exercise 5.19"
(test-case "Checking breakpoints"
(define machine (test-machine))
(set-breakpoint machine 'start 3)
(set-breakpoint machine 'start 5)
(set-breakpoint machine 'before-six 3)
(start machine)
(check-eq? (get-register-contents machine 'a) 2)
(proceed-machine machine)
(check-eq? (get-register-contents machine 'a) 4)
(proceed-machine machine)
(check-eq? (get-register-contents machine 'a) 7))
(test-case "Canceling breakpoints"
(define machine (test-machine))
(set-breakpoint machine 'start 3)
(set-breakpoint machine 'start 5)
(set-breakpoint machine 'before-six 3)
(cancel-breakpoint machine 'start 3)
(start machine)
(check-eq? (get-register-contents machine 'a) 4)
(cancel-all-breakpoints machine)
(proceed-machine machine)
(check-eq? (get-register-contents machine 'a) 10))
))
(run-tests sicp-5.19-tests)
| |
35133433d52304d7e055516ae0da140bab4af72e20e167d229046d0b0644fb77 | emqx/ekka | ekka_guid_SUITE.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2019 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(ekka_guid_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
all() -> ekka_ct:all(?MODULE).
t_gen(_) ->
<<_:128>> = Guid1 = ekka_guid:gen(),
<<_:128>> = Guid2 = ekka_guid:gen(),
?assert(Guid2 > Guid1).
t_new(_) ->
{Ts1, NPid, 0} = ekka_guid:new(),
{Ts2, NPid, 0} = ekka_guid:new(),
?assert(Ts2 > Ts1).
t_timestamp(_) ->
Ts1 = ekka_guid:timestamp(ekka_guid:gen()),
Ts2 = ekka_guid:timestamp(ekka_guid:gen()),
?assert(Ts2 > Ts1).
t_to_from_hexstr(_) ->
?assertEqual(Guid = ekka_guid:gen(), ekka_guid:from_hexstr(ekka_guid:to_hexstr(Guid))).
| null | https://raw.githubusercontent.com/emqx/ekka/70f2250e5e968e0c1da64e5b4733c5eb0eb402de/test/ekka_guid_SUITE.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-------------------------------------------------------------------- | Copyright ( c ) 2019 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(ekka_guid_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
all() -> ekka_ct:all(?MODULE).
t_gen(_) ->
<<_:128>> = Guid1 = ekka_guid:gen(),
<<_:128>> = Guid2 = ekka_guid:gen(),
?assert(Guid2 > Guid1).
t_new(_) ->
{Ts1, NPid, 0} = ekka_guid:new(),
{Ts2, NPid, 0} = ekka_guid:new(),
?assert(Ts2 > Ts1).
t_timestamp(_) ->
Ts1 = ekka_guid:timestamp(ekka_guid:gen()),
Ts2 = ekka_guid:timestamp(ekka_guid:gen()),
?assert(Ts2 > Ts1).
t_to_from_hexstr(_) ->
?assertEqual(Guid = ekka_guid:gen(), ekka_guid:from_hexstr(ekka_guid:to_hexstr(Guid))).
|
6eb1e41cc725d5eece9c4addf37a5ceaa07a34862937760e0e6f48647822644d | spurious/sagittarius-scheme-mirror | uri.scm | (import (rnrs)
(sagittarius)
(rfc uri)
(srfi :64 testing))
(test-begin "RFC URI tests")
Test cases are from Gauche
(test-equal "encode" "abc%3C%20%3E%20%22%20%23%25%7B%7C%7D%5C%5E"
(uri-encode-string "abc< > \" #%{|}\\^"))
(test-equal "encode (noescape)" ".a%21%2Ap"
(uri-encode-string ".a!*p" :noescape *rfc3986-unreserved-char-set*))
(test-equal "encode" "abc%3c%20%3e%20%22%20%23%25%7b%7c%7d%5c%5e"
(uri-encode-string "abc< > \" #%{|}\\^" :upper-case #f))
(test-equal "encode (noescape)" ".a%21%2ap"
(uri-encode-string ".a!*p" :noescape *rfc3986-unreserved-char-set*
:upper-case #f))
(test-equal "decode" "abc< > \" #%?{|}\\^"
(uri-decode-string "abc%3c%20%3e%20%22%20%23%25%3f%7b%7c%7d%5c%5e"))
(test-equal "decode" "abc<+>+\"+#%?{|}\\^"
(uri-decode-string "abc%3c+%3e+%22+%23%25%3f%7b%7c%7d%5c%5e"))
(test-equal "decode" "abc< > \" #%?{|}\\^"
(uri-decode-string "abc%3c+%3e+%22+%23%25%3f%7b%7c%7d%5c%5e"
:cgi-decode #t))
(test-equal "decode" "%" (uri-decode-string "%"))
(test-equal "decode" "a%" (uri-decode-string "a%"))
(test-equal "decode" "a%y" (uri-decode-string "a%y"))
(test-equal "decode" "a%ay" (uri-decode-string "a%ay"))
(test-equal "decode" "" (uri-decode-string ""))
(test-equal "uri-scheme&specific" '("http" "//practical-scheme.net/gauche/")
(receive r
(uri-scheme&specific "-scheme.net/gauche/")
r))
(test-equal "uri-scheme&specific" '(#f "/dev/tty")
(receive r
(uri-scheme&specific "/dev/tty")
r))
(test-equal "uri-decompose-hierarchical" '("www.example.com:8080"
"/about/company"
"abc=def&ghi%20"
"zzz")
(receive r
(uri-decompose-hierarchical
"//www.example.com:8080/about/company?abc=def&ghi%20#zzz")
r))
(test-equal "uri-decompose-hierarchical" '("www.example.com:8080"
"/about/company"
#f
"zzz")
(receive r
(uri-decompose-hierarchical
"//www.example.com:8080/about/company#zzz")
r))
(test-equal "uri-decompose-hierarchical" '("www.example.com:8080"
"/"
"abc"
#f)
(receive r
(uri-decompose-hierarchical
"//www.example.com:8080/?abc")
r))
(test-equal "uri-decompose-hierarchical" '("www.example.com:8080"
#f
#f
#f)
(receive r (uri-decompose-hierarchical "//www.example.com:8080") r))
(test-equal "uri-decompose-hierarchical" '((#f #f #f #f)
("" #f #f #f)
(#f "/" #f #f)
("" "/" #f #f))
(map (lambda (specific)
(receive r (uri-decompose-hierarchical specific) r))
'("" "//" "/" "///")))
(test-equal "uri-decompose-authority" '(#f "www.example.com" #f)
(receive r (uri-decompose-authority "www.example.com") r))
(test-equal "uri-decompose-authority" '(#f "www.example.com" "8080")
(receive r (uri-decompose-authority "www.example.com:8080") r))
(test-equal "uri-decompose-authority" '("foo:bar" "www.example.com" #f)
(receive r (uri-decompose-authority "foo:") r))
(test-equal "uri-parse" '("https" "shiro" "www.example.com" 443 "/login" "abc" "def")
(receive r (uri-parse "https://:443/login?abc#def")
r))
(test-equal "uri-parse" '("ftp" "anonymous:anonymous" "ftp.example.com" #f
"/pub/foo" #f #f)
(receive r (uri-parse "ftp:/pub/foo")
r))
(test-equal "uri-parse" '("file" #f #f #f "/usr/local/lib/abc" #f #f)
(receive r (uri-parse "file:/usr/local/lib/abc")
r))
(test-equal "uri-parse" '(#f #f #f #f "/usr/local/lib" #f #f)
(receive r (uri-parse "/usr/local/lib") r))
(test-equal "uri-parse" '("mailto" #f #f #f "" #f #f)
(receive r (uri-parse "mailto:") r))
(let ([base0 ";p?q"])
(define (t base rel expect)
(test-equal (format "merging ~s onto ~s" rel base)
expect (uri-merge base rel)))
(define t0 (lambda args (apply t base0 args)))
examples given in RFC3986 section 5.4
;; normal path
(t0 "g:h" "g:h")
(t0 "g" "")
(t0 "./g" "")
(t0 "g/" "/")
(t0 "/g" "")
(t0 "//g" "")
(t0 "?y" ";p?y")
(t0 "g?y" "")
(t0 "#s" ";p?q#s")
(t0 "g#s" "#s")
(t0 "g?y#s" "#s")
(t0 ";x" "/;x")
(t0 "g;x" ";x")
(t0 "g;x?y#s" ";x?y#s")
(t0 "" ";p?q")
(t0 "." "/")
(t0 "./" "/")
(t0 ".." "/")
(t0 "../" "/")
(t0 "../g" "")
(t0 "../.." "/")
(t0 "../../" "/")
(t0 "../../g" "")
;; failure path
(t0 "../../../g" "")
(t0 "../../../../g" "")
(t0 "/./g" "")
(t0 "/../g" "")
(t0 "g." ".")
(t0 ".g" "")
(t0 "g.." "..")
(t0 "..g" "")
(t0 "./../g" "")
(t0 "./g/." "/")
(t0 "g/./h" "")
(t0 "g/../h" "")
(t0 "g;x=1/./y" ";x=1/y")
(t0 "g;x=1/../y" "")
(t0 "g?y/./x" "")
(t0 "g?y/../x" "")
(t0 "g#s/./x" "#s/./x")
(t0 "g#s/../x" "#s/../x")
(t0 "http:g" "http:g") ;; for strict parser
some edge cases . the first case works since we do pre - normalization
of the base URI ( ) , which is optional .
(t "/.." "./" "/")
(t "/" "./foo/bar/.." "/")
;; empty base-path case
(t "" "foo" "")
(t "" "./foo" "")
(t "" "../foo" "")
)
(test-equal "+%25%26%2B%C2%A3%E2%82%AC"
(uri-encode-string " %&+\xA3;\x20AC;" :cgi-encode #t))
(test-end)
| null | https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/test/tests/rfc/uri.scm | scheme | normal path
failure path
for strict parser
empty base-path case | (import (rnrs)
(sagittarius)
(rfc uri)
(srfi :64 testing))
(test-begin "RFC URI tests")
Test cases are from Gauche
(test-equal "encode" "abc%3C%20%3E%20%22%20%23%25%7B%7C%7D%5C%5E"
(uri-encode-string "abc< > \" #%{|}\\^"))
(test-equal "encode (noescape)" ".a%21%2Ap"
(uri-encode-string ".a!*p" :noescape *rfc3986-unreserved-char-set*))
(test-equal "encode" "abc%3c%20%3e%20%22%20%23%25%7b%7c%7d%5c%5e"
(uri-encode-string "abc< > \" #%{|}\\^" :upper-case #f))
(test-equal "encode (noescape)" ".a%21%2ap"
(uri-encode-string ".a!*p" :noescape *rfc3986-unreserved-char-set*
:upper-case #f))
(test-equal "decode" "abc< > \" #%?{|}\\^"
(uri-decode-string "abc%3c%20%3e%20%22%20%23%25%3f%7b%7c%7d%5c%5e"))
(test-equal "decode" "abc<+>+\"+#%?{|}\\^"
(uri-decode-string "abc%3c+%3e+%22+%23%25%3f%7b%7c%7d%5c%5e"))
(test-equal "decode" "abc< > \" #%?{|}\\^"
(uri-decode-string "abc%3c+%3e+%22+%23%25%3f%7b%7c%7d%5c%5e"
:cgi-decode #t))
(test-equal "decode" "%" (uri-decode-string "%"))
(test-equal "decode" "a%" (uri-decode-string "a%"))
(test-equal "decode" "a%y" (uri-decode-string "a%y"))
(test-equal "decode" "a%ay" (uri-decode-string "a%ay"))
(test-equal "decode" "" (uri-decode-string ""))
(test-equal "uri-scheme&specific" '("http" "//practical-scheme.net/gauche/")
(receive r
(uri-scheme&specific "-scheme.net/gauche/")
r))
(test-equal "uri-scheme&specific" '(#f "/dev/tty")
(receive r
(uri-scheme&specific "/dev/tty")
r))
(test-equal "uri-decompose-hierarchical" '("www.example.com:8080"
"/about/company"
"abc=def&ghi%20"
"zzz")
(receive r
(uri-decompose-hierarchical
"//www.example.com:8080/about/company?abc=def&ghi%20#zzz")
r))
(test-equal "uri-decompose-hierarchical" '("www.example.com:8080"
"/about/company"
#f
"zzz")
(receive r
(uri-decompose-hierarchical
"//www.example.com:8080/about/company#zzz")
r))
(test-equal "uri-decompose-hierarchical" '("www.example.com:8080"
"/"
"abc"
#f)
(receive r
(uri-decompose-hierarchical
"//www.example.com:8080/?abc")
r))
(test-equal "uri-decompose-hierarchical" '("www.example.com:8080"
#f
#f
#f)
(receive r (uri-decompose-hierarchical "//www.example.com:8080") r))
(test-equal "uri-decompose-hierarchical" '((#f #f #f #f)
("" #f #f #f)
(#f "/" #f #f)
("" "/" #f #f))
(map (lambda (specific)
(receive r (uri-decompose-hierarchical specific) r))
'("" "//" "/" "///")))
(test-equal "uri-decompose-authority" '(#f "www.example.com" #f)
(receive r (uri-decompose-authority "www.example.com") r))
(test-equal "uri-decompose-authority" '(#f "www.example.com" "8080")
(receive r (uri-decompose-authority "www.example.com:8080") r))
(test-equal "uri-decompose-authority" '("foo:bar" "www.example.com" #f)
(receive r (uri-decompose-authority "foo:") r))
(test-equal "uri-parse" '("https" "shiro" "www.example.com" 443 "/login" "abc" "def")
(receive r (uri-parse "https://:443/login?abc#def")
r))
(test-equal "uri-parse" '("ftp" "anonymous:anonymous" "ftp.example.com" #f
"/pub/foo" #f #f)
(receive r (uri-parse "ftp:/pub/foo")
r))
(test-equal "uri-parse" '("file" #f #f #f "/usr/local/lib/abc" #f #f)
(receive r (uri-parse "file:/usr/local/lib/abc")
r))
(test-equal "uri-parse" '(#f #f #f #f "/usr/local/lib" #f #f)
(receive r (uri-parse "/usr/local/lib") r))
(test-equal "uri-parse" '("mailto" #f #f #f "" #f #f)
(receive r (uri-parse "mailto:") r))
(let ([base0 ";p?q"])
(define (t base rel expect)
(test-equal (format "merging ~s onto ~s" rel base)
expect (uri-merge base rel)))
(define t0 (lambda args (apply t base0 args)))
examples given in RFC3986 section 5.4
(t0 "g:h" "g:h")
(t0 "g" "")
(t0 "./g" "")
(t0 "g/" "/")
(t0 "/g" "")
(t0 "//g" "")
(t0 "?y" ";p?y")
(t0 "g?y" "")
(t0 "#s" ";p?q#s")
(t0 "g#s" "#s")
(t0 "g?y#s" "#s")
(t0 ";x" "/;x")
(t0 "g;x" ";x")
(t0 "g;x?y#s" ";x?y#s")
(t0 "" ";p?q")
(t0 "." "/")
(t0 "./" "/")
(t0 ".." "/")
(t0 "../" "/")
(t0 "../g" "")
(t0 "../.." "/")
(t0 "../../" "/")
(t0 "../../g" "")
(t0 "../../../g" "")
(t0 "../../../../g" "")
(t0 "/./g" "")
(t0 "/../g" "")
(t0 "g." ".")
(t0 ".g" "")
(t0 "g.." "..")
(t0 "..g" "")
(t0 "./../g" "")
(t0 "./g/." "/")
(t0 "g/./h" "")
(t0 "g/../h" "")
(t0 "g;x=1/./y" ";x=1/y")
(t0 "g;x=1/../y" "")
(t0 "g?y/./x" "")
(t0 "g?y/../x" "")
(t0 "g#s/./x" "#s/./x")
(t0 "g#s/../x" "#s/../x")
some edge cases . the first case works since we do pre - normalization
of the base URI ( ) , which is optional .
(t "/.." "./" "/")
(t "/" "./foo/bar/.." "/")
(t "" "foo" "")
(t "" "./foo" "")
(t "" "../foo" "")
)
(test-equal "+%25%26%2B%C2%A3%E2%82%AC"
(uri-encode-string " %&+\xA3;\x20AC;" :cgi-encode #t))
(test-end)
|
ed0833918d05942df5bdec030ae39b1de96fc8d30eee0b4216e2348b57d4b37e | rtoy/ansi-cl-tests | call-next-method.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Sat May 31 11:18:15 2003
;;;; Contains: Tests of CALL-NEXT-METHOD
(in-package :cl-test)
Tests where there is no next method are in no-next-method.lsp
(defgeneric cnm-gf-01 (x)
(:method ((x integer)) (cons 'a (call-next-method)))
(:method ((x rational)) (cons 'b (call-next-method)))
(:method ((x real)) (cons 'c (call-next-method)))
(:method ((x number)) (cons 'd (call-next-method)))
(:method ((x t)) nil))
(deftest call-next-method.1
(mapcar #'cnm-gf-01 '(0 2/3 1.3 #c(1 1) a))
((a b c d) (b c d) (c d) (d) nil))
;; Check that call-next-method passes along multiple values correctly
(defgeneric cnm-gf-02 (x)
(:method ((x integer)) (call-next-method))
(:method ((x number)) (values))
(:method ((x (eql 'a))) (call-next-method))
(:method ((x symbol)) (values 1 2 3 4 5 6)))
(deftest call-next-method.2
(cnm-gf-02 0))
(deftest call-next-method.3
(cnm-gf-02 'a)
1 2 3 4 5 6)
;;; Call next method has indefinite extent
(defgeneric cnm-gf-03 (x)
(:method ((x integer)) #'call-next-method)
(:method ((x t)) t))
(deftest call-next-method.4
(funcall (cnm-gf-03 0))
t)
The arguments to c - n - m can be changed
(defgeneric cnm-gf-04 (x)
(:method ((x integer)) (call-next-method (+ x 10)))
(:method ((x number)) (1+ x)))
(deftest call-next-method.5
(mapcar #'cnm-gf-04 '(0 1 2 5/3 9/2 1.0 #c(1 1)))
(11 12 13 8/3 11/2 2.0 #c(2 1)))
;;; call-next-method goes up the list of applicable methods
which may be to a method with specializers incomparable to
;;; the current method
(defgeneric cnm-gf-05 (x y)
(:method ((x integer) (y integer)) (cons 'a (call-next-method)))
(:method ((x integer) (y t)) (cons 'b (call-next-method)))
(:method ((x t) (y integer)) (cons 'c (call-next-method)))
(:method ((x t) (y t)) (list 'd)))
(deftest call-next-method.6
(mapcar #'cnm-gf-05 '(0 0 t t) '(0 t 0 t))
((a b c d)
(b d)
(c d)
(d)))
(defclass cnm-class-01a () ())
(defclass cnm-class-01b (cnm-class-01a) ())
(defclass cnm-class-01c (cnm-class-01a) ())
(defclass cnm-class-01d (cnm-class-01c cnm-class-01b) ())
(defgeneric cnm-gf-06 (x)
(:method ((x cnm-class-01d)) (cons 1 (call-next-method)))
(:method ((x cnm-class-01c)) (cons 2 (call-next-method)))
(:method ((x cnm-class-01b)) (cons 3 (call-next-method)))
(:method ((x cnm-class-01a)) (cons 4 (call-next-method)))
(:method ((x t)) nil))
(deftest call-next-method.7
(values
(cnm-gf-06 (make-instance 'cnm-class-01d))
(cnm-gf-06 (make-instance 'cnm-class-01c))
(cnm-gf-06 (make-instance 'cnm-class-01b))
(cnm-gf-06 (make-instance 'cnm-class-01a))
(cnm-gf-06 nil))
(1 2 3 4)
(2 4)
(3 4)
(4)
nil)
;;; Neither rebinding nor setq affects the arguments passed by
;;; (call-next-method)
(defgeneric cnm-gf-07 (x)
(:method ((x integer)) (list (incf x) (call-next-method)))
(:method ((x symbol)) (list (setq x 'a) x (call-next-method)))
(:method ((x cons)) (list x (let ((x :bad))
(declare (ignorable x))
(call-next-method))))
(:method ((x t)) x))
(deftest call-next-method.8
(mapcar #'cnm-gf-07 '(0 z (x) #\a))
((1 0) (a a z) ((x) (x)) #\a))
;; Nor does argument defaulting
(defgeneric cnm-gf-08 (x &optional y)
(:method ((x integer) &optional y) (list* x y (call-next-method)))
(:method ((x t) &optional y) (list x y)))
(deftest call-next-method.9
(values
(cnm-gf-08 0)
(cnm-gf-08 0 t)
(cnm-gf-08 'a)
(cnm-gf-08 'a 'b))
(0 nil 0 nil)
(0 t 0 t)
(a nil)
(a b))
;;; When c-n-m is called with arguments but omits optionals, those
;;; optionals are defaulted
(defgeneric cnm-gf-09 (x &optional y)
(:method ((x integer) &optional y) (list* x y (call-next-method (1+ x))))
(:method ((x t) &optional y) (list x y)))
(deftest call-next-method.10
(values
(cnm-gf-09 5)
(cnm-gf-09 8 'a)
(cnm-gf-09 'x)
(cnm-gf-09 'x 'y))
(5 nil 6 nil)
(8 a 9 nil)
(x nil)
(x y))
(defgeneric cnm-gf-10 (x &optional y z)
(:method ((x integer) &optional (y 'a y-p) (z 'b z-p))
(list* x y (notnot y-p) z (notnot z-p) (call-next-method (1+ x))))
(:method ((x t) &optional (y 'c y-p) (z 'd z-p))
(list x y (notnot y-p) z (notnot z-p))))
(deftest call-next-method.11
(values
(cnm-gf-10 5)
(cnm-gf-10 8 'p)
(cnm-gf-10 8 'p 'q)
(cnm-gf-10 'x)
(cnm-gf-10 'x 'u)
(cnm-gf-10 'x 'u 'v))
(5 a nil b nil 6 c nil d nil)
(8 p t b nil 9 c nil d nil)
(8 p t q t 9 c nil d nil)
(x c nil d nil)
(x u t d nil)
(x u t v t))
;;; "When providing arguments to call-next-method, the following
;;; rule must be satisfied or an error of type error should be signaled:
;;; the ordered set of applicable methods for a changed set of arguments
;;; for call-next-method must be the same as the ordered set of applicable
;;; methods for the original arguments to the generic function."
(defgeneric cnm-order-error-gf-01 (x)
(declare (optimize (safety 3)))
(:method ((x (eql 0)))
(declare (optimize (safety 3)))
no longer EQL to 0
(:method ((x t)) nil))
(deftest call-next-method.error.1
(locally
(declare (optimize (safety 3)))
(handler-case
(eval '(locally (declare (optimize (safety 3)))
(cnm-order-error-gf-01 0)))
(error () :error)))
:error)
(defgeneric cnm-order-error-gf-02 (x)
(declare (optimize (safety 3)))
(:method ((x integer))
(declare (optimize (safety 3)))
(call-next-method :bad))
(:method ((x t)) x))
(deftest call-next-method.error.2
(locally
(declare (optimize (safety 3)))
(handler-case
(eval '(locally (declare (optimize (safety 3)))
(cnm-order-error-gf-02 0)))
(error () :error)))
:error)
| null | https://raw.githubusercontent.com/rtoy/ansi-cl-tests/9708f3977220c46def29f43bb237e97d62033c1d/call-next-method.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests of CALL-NEXT-METHOD
Check that call-next-method passes along multiple values correctly
Call next method has indefinite extent
call-next-method goes up the list of applicable methods
the current method
Neither rebinding nor setq affects the arguments passed by
(call-next-method)
Nor does argument defaulting
When c-n-m is called with arguments but omits optionals, those
optionals are defaulted
"When providing arguments to call-next-method, the following
rule must be satisfied or an error of type error should be signaled:
the ordered set of applicable methods for a changed set of arguments
for call-next-method must be the same as the ordered set of applicable
methods for the original arguments to the generic function." | Author :
Created : Sat May 31 11:18:15 2003
(in-package :cl-test)
Tests where there is no next method are in no-next-method.lsp
(defgeneric cnm-gf-01 (x)
(:method ((x integer)) (cons 'a (call-next-method)))
(:method ((x rational)) (cons 'b (call-next-method)))
(:method ((x real)) (cons 'c (call-next-method)))
(:method ((x number)) (cons 'd (call-next-method)))
(:method ((x t)) nil))
(deftest call-next-method.1
(mapcar #'cnm-gf-01 '(0 2/3 1.3 #c(1 1) a))
((a b c d) (b c d) (c d) (d) nil))
(defgeneric cnm-gf-02 (x)
(:method ((x integer)) (call-next-method))
(:method ((x number)) (values))
(:method ((x (eql 'a))) (call-next-method))
(:method ((x symbol)) (values 1 2 3 4 5 6)))
(deftest call-next-method.2
(cnm-gf-02 0))
(deftest call-next-method.3
(cnm-gf-02 'a)
1 2 3 4 5 6)
(defgeneric cnm-gf-03 (x)
(:method ((x integer)) #'call-next-method)
(:method ((x t)) t))
(deftest call-next-method.4
(funcall (cnm-gf-03 0))
t)
The arguments to c - n - m can be changed
(defgeneric cnm-gf-04 (x)
(:method ((x integer)) (call-next-method (+ x 10)))
(:method ((x number)) (1+ x)))
(deftest call-next-method.5
(mapcar #'cnm-gf-04 '(0 1 2 5/3 9/2 1.0 #c(1 1)))
(11 12 13 8/3 11/2 2.0 #c(2 1)))
which may be to a method with specializers incomparable to
(defgeneric cnm-gf-05 (x y)
(:method ((x integer) (y integer)) (cons 'a (call-next-method)))
(:method ((x integer) (y t)) (cons 'b (call-next-method)))
(:method ((x t) (y integer)) (cons 'c (call-next-method)))
(:method ((x t) (y t)) (list 'd)))
(deftest call-next-method.6
(mapcar #'cnm-gf-05 '(0 0 t t) '(0 t 0 t))
((a b c d)
(b d)
(c d)
(d)))
(defclass cnm-class-01a () ())
(defclass cnm-class-01b (cnm-class-01a) ())
(defclass cnm-class-01c (cnm-class-01a) ())
(defclass cnm-class-01d (cnm-class-01c cnm-class-01b) ())
(defgeneric cnm-gf-06 (x)
(:method ((x cnm-class-01d)) (cons 1 (call-next-method)))
(:method ((x cnm-class-01c)) (cons 2 (call-next-method)))
(:method ((x cnm-class-01b)) (cons 3 (call-next-method)))
(:method ((x cnm-class-01a)) (cons 4 (call-next-method)))
(:method ((x t)) nil))
(deftest call-next-method.7
(values
(cnm-gf-06 (make-instance 'cnm-class-01d))
(cnm-gf-06 (make-instance 'cnm-class-01c))
(cnm-gf-06 (make-instance 'cnm-class-01b))
(cnm-gf-06 (make-instance 'cnm-class-01a))
(cnm-gf-06 nil))
(1 2 3 4)
(2 4)
(3 4)
(4)
nil)
(defgeneric cnm-gf-07 (x)
(:method ((x integer)) (list (incf x) (call-next-method)))
(:method ((x symbol)) (list (setq x 'a) x (call-next-method)))
(:method ((x cons)) (list x (let ((x :bad))
(declare (ignorable x))
(call-next-method))))
(:method ((x t)) x))
(deftest call-next-method.8
(mapcar #'cnm-gf-07 '(0 z (x) #\a))
((1 0) (a a z) ((x) (x)) #\a))
(defgeneric cnm-gf-08 (x &optional y)
(:method ((x integer) &optional y) (list* x y (call-next-method)))
(:method ((x t) &optional y) (list x y)))
(deftest call-next-method.9
(values
(cnm-gf-08 0)
(cnm-gf-08 0 t)
(cnm-gf-08 'a)
(cnm-gf-08 'a 'b))
(0 nil 0 nil)
(0 t 0 t)
(a nil)
(a b))
(defgeneric cnm-gf-09 (x &optional y)
(:method ((x integer) &optional y) (list* x y (call-next-method (1+ x))))
(:method ((x t) &optional y) (list x y)))
(deftest call-next-method.10
(values
(cnm-gf-09 5)
(cnm-gf-09 8 'a)
(cnm-gf-09 'x)
(cnm-gf-09 'x 'y))
(5 nil 6 nil)
(8 a 9 nil)
(x nil)
(x y))
(defgeneric cnm-gf-10 (x &optional y z)
(:method ((x integer) &optional (y 'a y-p) (z 'b z-p))
(list* x y (notnot y-p) z (notnot z-p) (call-next-method (1+ x))))
(:method ((x t) &optional (y 'c y-p) (z 'd z-p))
(list x y (notnot y-p) z (notnot z-p))))
(deftest call-next-method.11
(values
(cnm-gf-10 5)
(cnm-gf-10 8 'p)
(cnm-gf-10 8 'p 'q)
(cnm-gf-10 'x)
(cnm-gf-10 'x 'u)
(cnm-gf-10 'x 'u 'v))
(5 a nil b nil 6 c nil d nil)
(8 p t b nil 9 c nil d nil)
(8 p t q t 9 c nil d nil)
(x c nil d nil)
(x u t d nil)
(x u t v t))
(defgeneric cnm-order-error-gf-01 (x)
(declare (optimize (safety 3)))
(:method ((x (eql 0)))
(declare (optimize (safety 3)))
no longer EQL to 0
(:method ((x t)) nil))
(deftest call-next-method.error.1
(locally
(declare (optimize (safety 3)))
(handler-case
(eval '(locally (declare (optimize (safety 3)))
(cnm-order-error-gf-01 0)))
(error () :error)))
:error)
(defgeneric cnm-order-error-gf-02 (x)
(declare (optimize (safety 3)))
(:method ((x integer))
(declare (optimize (safety 3)))
(call-next-method :bad))
(:method ((x t)) x))
(deftest call-next-method.error.2
(locally
(declare (optimize (safety 3)))
(handler-case
(eval '(locally (declare (optimize (safety 3)))
(cnm-order-error-gf-02 0)))
(error () :error)))
:error)
|
175d4bd9b0b40ed2d2daaf735594f76c438f3c202faf8021cf903beb32cc1a10 | rzezeski/try-try-try | rts.erl | %% @doc Interface into the Real Time Statistics application.
-module(rts).
-include("rts.hrl").
-include_lib("riak_core/include/riak_core_vnode.hrl").
-export([
ping/0,
entry/2,
get/2,
set/3,
append/3,
incr/2,
incrby/3,
sadd/3
]).
%%%===================================================================
%%% API
%%%===================================================================
% @doc Pings a random vnode to make sure communication is functional
ping() ->
DocIdx = riak_core_util:chash_key({<<"ping">>, term_to_binary(now())}),
PrefList = riak_core_apl:get_primary_apl(DocIdx, 1, rts),
[{IndexNode, _Type}] = PrefList,
riak_core_vnode_master:sync_spawn_command(IndexNode, ping, rts_vnode_master).
%% @doc Process an entry.
%%
%% TODO: Coordinator to provide N/R/W
entry(Client, Entry) ->
DocIdx = riak_core_util:chash_key({list_to_binary(Client),
term_to_binary(now())}),
PrefList = riak_core_apl:get_apl(DocIdx, 1, rts_entry),
[IdxNode] = PrefList,
rts_entry_vnode:entry(IdxNode, Client, Entry).
%% @doc Get a stat's value.
get(Client, StatName) ->
rts_stat_vnode:get(get_idxnode(Client, StatName), StatName).
%% @doc Set a stat's value, replacing the current value.
set(Client, StatName, Val) ->
rts_stat_vnode:set(get_idxnode(Client, StatName), StatName, Val).
%% @doc Append to a stat's value.
append(Client, StatName, Val) ->
rts_state_vnode:append(get_idxnode(Client, StatName), StatName, Val).
@doc Increment the stat 's value by 1 .
incr(Client, StatName) ->
rts_stat_vnode:incr(get_idxnode(Client, StatName), StatName).
@doc Increment the stat 's value by .
incrby(Client, StatName, Val) ->
rts_stat_vnode:incrby(get_idxnode(Client, StatName), StatName, Val).
%% @doc Add a memeber to the stat's set.
sadd(Client, StatName, Val) ->
rts_stat_vnode:sadd(get_idxnode(Client, StatName), StatName, Val).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
get_idxnode(Client, StatName) ->
DocIdx = riak_core_util:chash_key({list_to_binary(Client), list_to_binary(StatName)}),
hd(riak_core_apl:get_apl(DocIdx, 1, rts_stat)).
| null | https://raw.githubusercontent.com/rzezeski/try-try-try/c5d99f29fb3380f8653efdd1aa6a8f52143a9717/2011/riak-core-the-vnode/rts/src/rts.erl | erlang | @doc Interface into the Real Time Statistics application.
===================================================================
API
===================================================================
@doc Pings a random vnode to make sure communication is functional
@doc Process an entry.
TODO: Coordinator to provide N/R/W
@doc Get a stat's value.
@doc Set a stat's value, replacing the current value.
@doc Append to a stat's value.
@doc Add a memeber to the stat's set.
===================================================================
Internal Functions
=================================================================== | -module(rts).
-include("rts.hrl").
-include_lib("riak_core/include/riak_core_vnode.hrl").
-export([
ping/0,
entry/2,
get/2,
set/3,
append/3,
incr/2,
incrby/3,
sadd/3
]).
ping() ->
DocIdx = riak_core_util:chash_key({<<"ping">>, term_to_binary(now())}),
PrefList = riak_core_apl:get_primary_apl(DocIdx, 1, rts),
[{IndexNode, _Type}] = PrefList,
riak_core_vnode_master:sync_spawn_command(IndexNode, ping, rts_vnode_master).
entry(Client, Entry) ->
DocIdx = riak_core_util:chash_key({list_to_binary(Client),
term_to_binary(now())}),
PrefList = riak_core_apl:get_apl(DocIdx, 1, rts_entry),
[IdxNode] = PrefList,
rts_entry_vnode:entry(IdxNode, Client, Entry).
get(Client, StatName) ->
rts_stat_vnode:get(get_idxnode(Client, StatName), StatName).
set(Client, StatName, Val) ->
rts_stat_vnode:set(get_idxnode(Client, StatName), StatName, Val).
append(Client, StatName, Val) ->
rts_state_vnode:append(get_idxnode(Client, StatName), StatName, Val).
@doc Increment the stat 's value by 1 .
incr(Client, StatName) ->
rts_stat_vnode:incr(get_idxnode(Client, StatName), StatName).
@doc Increment the stat 's value by .
incrby(Client, StatName, Val) ->
rts_stat_vnode:incrby(get_idxnode(Client, StatName), StatName, Val).
sadd(Client, StatName, Val) ->
rts_stat_vnode:sadd(get_idxnode(Client, StatName), StatName, Val).
get_idxnode(Client, StatName) ->
DocIdx = riak_core_util:chash_key({list_to_binary(Client), list_to_binary(StatName)}),
hd(riak_core_apl:get_apl(DocIdx, 1, rts_stat)).
|
5cbf4525cd3c76f2fede4d9cc2d378911a85bbbaf1135ae491d2f3e224108190 | bennguvaye/flu | dss.ml | let pi = 4. *. atan 1.;;
let a_incr a i =
a.(i) <- a.(i) + 1
let a_decr a i =
a.(i) <- a.(i) - 1
let foi = float_of_int
module type PARS =
sig
val n : float
val r0 : float
val e : float
val etaN1 : float
val etaN2 : float
val g1 : float
val g2 : float
val nu : float
val q : float
end
module Sys (Pars : PARS) : (Gill.SYSTEM with type state = int array
and type aux = float * float) =
struct
open Pars
type state = int array;;
type aux = float * float;;
let eta1 = etaN1 *. n
let eta2 = etaN2 *. n
let bet0 = r0 *. nu
let cnt_incr = ref 0
let cnt_decr = ref 0
let beta t =
bet0 *. (1. +. e *. cos (2. *. pi *. t /. 365.))
let gi j = (* get i *)
match j with
| 10 -> 4
| 20 -> 5
| 12 -> 6
| 21 -> 7
| _ -> invalid_arg "expects 10, 20, 12 or 21"
let gr j =
match j with
| 0 -> 0
| 1 -> 1
| 2 -> 2
| 12 -> 3
| _ -> invalid_arg "expects 0, 1, 2 or 12"
let gq j =
match j with
| 0 -> 8
| 1 -> 9
| 2 -> 10
| 12 -> 11
| _ -> invalid_arg "expects 0, 1, 2, or 12"
let fi snk st =
match snk with
| 10 -> foi (st.(gi 10) + st.(gi 12)) +. eta1
| 20 -> foi (st.(gi 20) + st.(gi 21)) +. eta2
| 12 -> foi (st.(gi 10) + st.(gi 12)) +. eta1
| 21 -> foi (st.(gi 20) + st.(gi 21)) +. eta2
| _ -> invalid_arg "expects 10, 20, 12 or 21"
let imloss1_rate src snk t st =
ignore (gr snk) ;
g1 *. foi st.(gr src)
let imloss2_rate src snk t st =
ignore (gr snk) ;
g2 *. foi st.(gr src)
let imloss_modif src snk st =
let nst = Array.copy st in
a_decr nst (gr src) ;
a_incr nst (gr snk) ;
nst
let imlossi1_rate src snk t st =
g1 *. foi st.(gi src)
let imlossi2_rate src snk t st =
g2 *. foi st.(gi src)
let imlossi_modif src snk st =
let nst = Array.copy st in
a_decr nst (gi src) ;
a_incr nst (gi snk) ;
nst
let qloss_rate src snk t st =
ignore (gr snk) ;
q *. foi st.(gq src)
let qloss_modif src snk st =
let nst = Array.copy st in
a_decr nst (gq src) ;
a_incr nst (gr snk) ;
nst
let inf_rate src snk t st =
beta t *. foi st.(gr src) /. n *. (fi snk st)
let inf_modif src snk st =
let nst = Array.copy st in
a_decr nst (gr src) ;
a_incr nst (gi snk) ;
nst
let recov_rate src snk t st =
ignore (gq snk) ; (* in case bad sink *)
nu *. foi st.(gi src)
let recov_modif src snk st =
let nst = Array.copy st in
a_decr nst (gi src) ;
a_incr nst (gq snk) ;
nst
let min_step = 1.
let aux_fun t st =
let infct1 = (inf_rate 0 10 t st) +. (inf_rate 2 12 t st)
and infct2 = (inf_rate 0 20 t st) +. (inf_rate 1 21 t st)
in
(infct1 *. 7. *. 100000. /. n, infct2 *. 7. *. 100000. /. n)
let fl = [imloss1_rate 1 0; imloss2_rate 2 0; imloss1_rate 12 2;
imloss2_rate 12 1;
imlossi1_rate 21 20 ; imlossi2_rate 12 10 ;
qloss_rate 0 0; qloss_rate 1 1 ; qloss_rate 2 2 ;
qloss_rate 12 12 ;
inf_rate 0 10 ; inf_rate 0 20; inf_rate 2 12 ; inf_rate 1 21 ;
recov_rate 10 1 ; recov_rate 20 2 ; recov_rate 21 12 ;
recov_rate 12 12]
let ml = [imloss_modif 1 0 ; imloss_modif 2 0 ; imloss_modif 12 2;
imloss_modif 12 1 ;
imlossi_modif 21 20 ; imlossi_modif 12 10 ;
qloss_modif 0 0 ; qloss_modif 1 1 ; qloss_modif 2 2 ;
qloss_modif 12 12 ;
inf_modif 0 10 ; inf_modif 0 20 ; inf_modif 2 12 ; inf_modif 1 21 ;
recov_modif 10 1 ; recov_modif 20 2 ; recov_modif 21 12 ;
recov_modif 12 12]
let csv_init () =
[
["n=12" ; "m=2"] ;
["t" ;
"inc1" ; "inc2" ;
"R0" ; "R1" ; "R2" ; "R12" ;
"I10" ; "I20" ; "I12" ; "I21" ;
"Q0" ; "Q1" ; "Q2" ; "Q12"]
]
let csv_line t au st =
let au1, au2 = au in
(string_of_float t) ::
(string_of_float au1) :: (string_of_float au2) ::
Array.to_list (Array.map string_of_int st)
end;;
module Default_Algp =
struct
let min_step = 1.
end
| null | https://raw.githubusercontent.com/bennguvaye/flu/5dbae212405b74917ee91392684fd2ee10ab1e58/src/dss.ml | ocaml | get i
in case bad sink | let pi = 4. *. atan 1.;;
let a_incr a i =
a.(i) <- a.(i) + 1
let a_decr a i =
a.(i) <- a.(i) - 1
let foi = float_of_int
module type PARS =
sig
val n : float
val r0 : float
val e : float
val etaN1 : float
val etaN2 : float
val g1 : float
val g2 : float
val nu : float
val q : float
end
module Sys (Pars : PARS) : (Gill.SYSTEM with type state = int array
and type aux = float * float) =
struct
open Pars
type state = int array;;
type aux = float * float;;
let eta1 = etaN1 *. n
let eta2 = etaN2 *. n
let bet0 = r0 *. nu
let cnt_incr = ref 0
let cnt_decr = ref 0
let beta t =
bet0 *. (1. +. e *. cos (2. *. pi *. t /. 365.))
match j with
| 10 -> 4
| 20 -> 5
| 12 -> 6
| 21 -> 7
| _ -> invalid_arg "expects 10, 20, 12 or 21"
let gr j =
match j with
| 0 -> 0
| 1 -> 1
| 2 -> 2
| 12 -> 3
| _ -> invalid_arg "expects 0, 1, 2 or 12"
let gq j =
match j with
| 0 -> 8
| 1 -> 9
| 2 -> 10
| 12 -> 11
| _ -> invalid_arg "expects 0, 1, 2, or 12"
let fi snk st =
match snk with
| 10 -> foi (st.(gi 10) + st.(gi 12)) +. eta1
| 20 -> foi (st.(gi 20) + st.(gi 21)) +. eta2
| 12 -> foi (st.(gi 10) + st.(gi 12)) +. eta1
| 21 -> foi (st.(gi 20) + st.(gi 21)) +. eta2
| _ -> invalid_arg "expects 10, 20, 12 or 21"
let imloss1_rate src snk t st =
ignore (gr snk) ;
g1 *. foi st.(gr src)
let imloss2_rate src snk t st =
ignore (gr snk) ;
g2 *. foi st.(gr src)
let imloss_modif src snk st =
let nst = Array.copy st in
a_decr nst (gr src) ;
a_incr nst (gr snk) ;
nst
let imlossi1_rate src snk t st =
g1 *. foi st.(gi src)
let imlossi2_rate src snk t st =
g2 *. foi st.(gi src)
let imlossi_modif src snk st =
let nst = Array.copy st in
a_decr nst (gi src) ;
a_incr nst (gi snk) ;
nst
let qloss_rate src snk t st =
ignore (gr snk) ;
q *. foi st.(gq src)
let qloss_modif src snk st =
let nst = Array.copy st in
a_decr nst (gq src) ;
a_incr nst (gr snk) ;
nst
let inf_rate src snk t st =
beta t *. foi st.(gr src) /. n *. (fi snk st)
let inf_modif src snk st =
let nst = Array.copy st in
a_decr nst (gr src) ;
a_incr nst (gi snk) ;
nst
let recov_rate src snk t st =
nu *. foi st.(gi src)
let recov_modif src snk st =
let nst = Array.copy st in
a_decr nst (gi src) ;
a_incr nst (gq snk) ;
nst
let min_step = 1.
let aux_fun t st =
let infct1 = (inf_rate 0 10 t st) +. (inf_rate 2 12 t st)
and infct2 = (inf_rate 0 20 t st) +. (inf_rate 1 21 t st)
in
(infct1 *. 7. *. 100000. /. n, infct2 *. 7. *. 100000. /. n)
let fl = [imloss1_rate 1 0; imloss2_rate 2 0; imloss1_rate 12 2;
imloss2_rate 12 1;
imlossi1_rate 21 20 ; imlossi2_rate 12 10 ;
qloss_rate 0 0; qloss_rate 1 1 ; qloss_rate 2 2 ;
qloss_rate 12 12 ;
inf_rate 0 10 ; inf_rate 0 20; inf_rate 2 12 ; inf_rate 1 21 ;
recov_rate 10 1 ; recov_rate 20 2 ; recov_rate 21 12 ;
recov_rate 12 12]
let ml = [imloss_modif 1 0 ; imloss_modif 2 0 ; imloss_modif 12 2;
imloss_modif 12 1 ;
imlossi_modif 21 20 ; imlossi_modif 12 10 ;
qloss_modif 0 0 ; qloss_modif 1 1 ; qloss_modif 2 2 ;
qloss_modif 12 12 ;
inf_modif 0 10 ; inf_modif 0 20 ; inf_modif 2 12 ; inf_modif 1 21 ;
recov_modif 10 1 ; recov_modif 20 2 ; recov_modif 21 12 ;
recov_modif 12 12]
let csv_init () =
[
["n=12" ; "m=2"] ;
["t" ;
"inc1" ; "inc2" ;
"R0" ; "R1" ; "R2" ; "R12" ;
"I10" ; "I20" ; "I12" ; "I21" ;
"Q0" ; "Q1" ; "Q2" ; "Q12"]
]
let csv_line t au st =
let au1, au2 = au in
(string_of_float t) ::
(string_of_float au1) :: (string_of_float au2) ::
Array.to_list (Array.map string_of_int st)
end;;
module Default_Algp =
struct
let min_step = 1.
end
|
18b9ec8cc0bc75d946d753fb3bc83813a613575343c90bfde51c2fa4f191441f | rsnikhil/Forvis_RISCV-ISA-Spec | ReadHexFile.hs | module ReadHexFile (readHexFile) where
-- ================================================================
This code is adapted from MIT 's riscv - semantics repo
-- This module implements a function that reads a hex-memory file
-- and returns a memory (i.e., list of (addr, byte)).
-- ================================================================
Standard Haskell imports
import System.IO
import Data.Word
import Data.Bits
import Numeric (showHex, readHex)
-- Project imports
-- None
-- ================================================================
Read a Mem - Hex file ( each datum should represent one byte )
-- and return a memory (list of (addr,byte))
readHexFile :: FilePath -> IO [(Int, Word8)]
readHexFile f = do
h <- openFile f ReadMode
helper h 0 0 []
where helper h line_num next_addr mem = do
s <- hGetLine h
if (null s)
then (do
putStrLn ("Finished reading hex file (" ++ show line_num ++ " lines)")
return (reverse mem))
else (do
let (next_addr', mem') = processLine s next_addr mem
done <- hIsEOF h
if done
then return (reverse mem')
else helper h (line_num + 1) next_addr' mem')
-- Process a line from a Mem-Hex file, which is
-- either an address line ('@hex-address')
-- or a data line (a hex byte in memory)
processLine :: String -> Int -> [(Int, Word8)] -> (Int, [(Int, Word8)])
processLine ('@':xs) next_addr mem = (fst $ head $ readHex xs, mem)
processLine s next_addr mem = (next_addr + 1,
(next_addr, fst $ head $ readHex s): mem)
-- ================================================================
| null | https://raw.githubusercontent.com/rsnikhil/Forvis_RISCV-ISA-Spec/0c5590a12f4b39644d0497fa6285ad5e33003dfc/ZZ_OLD/v1/src/ReadHexFile.hs | haskell | ================================================================
This module implements a function that reads a hex-memory file
and returns a memory (i.e., list of (addr, byte)).
================================================================
Project imports
None
================================================================
and return a memory (list of (addr,byte))
Process a line from a Mem-Hex file, which is
either an address line ('@hex-address')
or a data line (a hex byte in memory)
================================================================ | module ReadHexFile (readHexFile) where
This code is adapted from MIT 's riscv - semantics repo
Standard Haskell imports
import System.IO
import Data.Word
import Data.Bits
import Numeric (showHex, readHex)
Read a Mem - Hex file ( each datum should represent one byte )
readHexFile :: FilePath -> IO [(Int, Word8)]
readHexFile f = do
h <- openFile f ReadMode
helper h 0 0 []
where helper h line_num next_addr mem = do
s <- hGetLine h
if (null s)
then (do
putStrLn ("Finished reading hex file (" ++ show line_num ++ " lines)")
return (reverse mem))
else (do
let (next_addr', mem') = processLine s next_addr mem
done <- hIsEOF h
if done
then return (reverse mem')
else helper h (line_num + 1) next_addr' mem')
processLine :: String -> Int -> [(Int, Word8)] -> (Int, [(Int, Word8)])
processLine ('@':xs) next_addr mem = (fst $ head $ readHex xs, mem)
processLine s next_addr mem = (next_addr + 1,
(next_addr, fst $ head $ readHex s): mem)
|
5e8f4d52dda0d4f5cd42307d038906d4a9287d03de6598956ad7136604b7e909 | fccm/glMLite | lesson2.ml |
This code was created by 99
If you 've found this code useful , please let me know .
The full tutorial associated with this file is available here :
( OCaml version by )
This code was created by Jeff Molofee '99
If you've found this code useful, please let me know.
The full tutorial associated with this file is available here:
(OCaml version by Florent Monnier)
*)
open GL (* Module For The OpenGL Library *)
open Glu (* Module For The GLu Library *)
open Glut (* Module For The GLUT Library *)
(* A general OpenGL initialization function. Sets all of the initial parameters. *)
let initGL ~width ~height = (* We call this right after our OpenGL window is created. *)
This Will Clear The Background Color To Black
glClearDepth 1.0; (* Enables Clearing Of The Depth Buffer *)
The Type Of Depth Test To Do
glEnable GL_DEPTH_TEST; (* Enables Depth Testing *)
glShadeModel GL_SMOOTH; (* Enables Smooth Color Shading *)
glMatrixMode GL_PROJECTION;
glLoadIdentity(); (* Reset The Projection Matrix *)
gluPerspective 45.0 ((float width)/.(float height)) 0.1 100.0; (* Calculate The Aspect Ratio Of The Window *)
glMatrixMode GL_MODELVIEW;
;;
(* The function called when our window is resized (which shouldn't happen, because we're fullscreen) *)
let reshape ~width ~height =
let height =
Prevent A Divide By Zero If The Window Is Too Small
then 1
else height
in
glViewport 0 0 width height; (* Reset The Current Viewport And Perspective Transformation *)
glMatrixMode GL_PROJECTION;
glLoadIdentity();
gluPerspective 45.0 ((float width)/.(float height)) 0.1 100.0;
glMatrixMode GL_MODELVIEW;
;;
(* The main drawing function. *)
let display() =
glClear [GL_COLOR_BUFFER_BIT; GL_DEPTH_BUFFER_BIT]; (* Clear The Screen And The Depth Buffer *)
glLoadIdentity(); (* Reset The View *)
Move Left 1.5 Units And Into The Screen 6.0
(* draw a triangle *)
glBegin GL_POLYGON; (* start drawing a polygon *)
glVertex3 ( 0.0) ( 1.0) (0.0); (* Top *)
glVertex3 ( 1.0) (-1.0) (0.0); (* Bottom Right *)
glVertex3 (-1.0) (-1.0) (0.0); (* Bottom Left *)
glEnd(); (* we're done with the polygon *)
Move Right 3 Units
(* draw a square (quadrilateral) *)
start drawing a polygon ( 4 sided )
glVertex3 (-1.0) ( 1.0) (0.0); (* Top Left *)
glVertex3 ( 1.0) ( 1.0) (0.0); (* Top Right *)
glVertex3 ( 1.0) (-1.0) (0.0); (* Bottom Right *)
glVertex3 (-1.0) (-1.0) (0.0); (* Bottom Left *)
glEnd(); (* done with the polygon *)
(* swap buffers to display, since we're double buffered. *)
glutSwapBuffers();
;;
(* The function called whenever a key is pressed. *)
let keyPressed ~window ~key ~x ~y =
(* If escape or 'q' are pressed, kill everything. *)
match key with
| 'q' | '\027' ->
(* shut down our window *)
glutDestroyWindow window;
(* exit the program...normal termination. *)
exit(0);
| _ -> ()
;;
let () =
Initialize GLUT state - glut will take any command line arguments that
pertain to it or X Windows - look at its documentation at :
pertain to it or X Windows - look at its documentation at:
*)
ignore(glutInit Sys.argv);
(* Select type of Display mode:
Double buffer
RGBA color
Alpha components supported
Depth buffer *)
glutInitDisplayMode [GLUT_RGBA; GLUT_DOUBLE; GLUT_ALPHA; GLUT_DEPTH];
get a 640 x 480 window
glutInitWindowSize 640 480;
(* the window starts at the upper left corner of the screen *)
glutInitWindowPosition 0 0;
(* Open a window *)
The number of our GLUT window
glutCreateWindow "Jeff Molofee's GL Code Tutorial ... NeHe '99"
in
(* Register the function to do all our OpenGL drawing. *)
glutDisplayFunc ~display;
(* Go fullscreen. This is as soon as possible. *)
glutFullScreen();
(* Even if there are no events, redraw our gl scene. *)
glutIdleFunc ~idle:display;
(* Register the function called when our window is resized. *)
glutReshapeFunc ~reshape;
(* Register the function called when the keyboard is pressed. *)
glutKeyboardFunc ~keyboard:(keyPressed ~window);
Initialize our window .
initGL 640 480;
Start Event Processing Engine
glutMainLoop();
;;
| null | https://raw.githubusercontent.com/fccm/glMLite/c52cd806909581e49d9b660195576c8a932f6d33/nehe-examples/lesson2.ml | ocaml | Module For The OpenGL Library
Module For The GLu Library
Module For The GLUT Library
A general OpenGL initialization function. Sets all of the initial parameters.
We call this right after our OpenGL window is created.
Enables Clearing Of The Depth Buffer
Enables Depth Testing
Enables Smooth Color Shading
Reset The Projection Matrix
Calculate The Aspect Ratio Of The Window
The function called when our window is resized (which shouldn't happen, because we're fullscreen)
Reset The Current Viewport And Perspective Transformation
The main drawing function.
Clear The Screen And The Depth Buffer
Reset The View
draw a triangle
start drawing a polygon
Top
Bottom Right
Bottom Left
we're done with the polygon
draw a square (quadrilateral)
Top Left
Top Right
Bottom Right
Bottom Left
done with the polygon
swap buffers to display, since we're double buffered.
The function called whenever a key is pressed.
If escape or 'q' are pressed, kill everything.
shut down our window
exit the program...normal termination.
Select type of Display mode:
Double buffer
RGBA color
Alpha components supported
Depth buffer
the window starts at the upper left corner of the screen
Open a window
Register the function to do all our OpenGL drawing.
Go fullscreen. This is as soon as possible.
Even if there are no events, redraw our gl scene.
Register the function called when our window is resized.
Register the function called when the keyboard is pressed. |
This code was created by 99
If you 've found this code useful , please let me know .
The full tutorial associated with this file is available here :
( OCaml version by )
This code was created by Jeff Molofee '99
If you've found this code useful, please let me know.
The full tutorial associated with this file is available here:
(OCaml version by Florent Monnier)
*)
This Will Clear The Background Color To Black
The Type Of Depth Test To Do
glMatrixMode GL_PROJECTION;
glMatrixMode GL_MODELVIEW;
;;
let reshape ~width ~height =
let height =
Prevent A Divide By Zero If The Window Is Too Small
then 1
else height
in
glMatrixMode GL_PROJECTION;
glLoadIdentity();
gluPerspective 45.0 ((float width)/.(float height)) 0.1 100.0;
glMatrixMode GL_MODELVIEW;
;;
let display() =
Move Left 1.5 Units And Into The Screen 6.0
Move Right 3 Units
start drawing a polygon ( 4 sided )
glutSwapBuffers();
;;
let keyPressed ~window ~key ~x ~y =
match key with
| 'q' | '\027' ->
glutDestroyWindow window;
exit(0);
| _ -> ()
;;
let () =
Initialize GLUT state - glut will take any command line arguments that
pertain to it or X Windows - look at its documentation at :
pertain to it or X Windows - look at its documentation at:
*)
ignore(glutInit Sys.argv);
glutInitDisplayMode [GLUT_RGBA; GLUT_DOUBLE; GLUT_ALPHA; GLUT_DEPTH];
get a 640 x 480 window
glutInitWindowSize 640 480;
glutInitWindowPosition 0 0;
The number of our GLUT window
glutCreateWindow "Jeff Molofee's GL Code Tutorial ... NeHe '99"
in
glutDisplayFunc ~display;
glutFullScreen();
glutIdleFunc ~idle:display;
glutReshapeFunc ~reshape;
glutKeyboardFunc ~keyboard:(keyPressed ~window);
Initialize our window .
initGL 640 480;
Start Event Processing Engine
glutMainLoop();
;;
|
411e8517a088d10c04abbc663bae49a5c86875c32349924dbce0686afa6f08e8 | slepher/astranaut | astranaut_traverse_SUITE.erl | %%%-------------------------------------------------------------------
@author < >
( C ) 2020 ,
%%% @doc
%%%
%%% @end
Created : 6 Jul 2020 by < >
%%%-------------------------------------------------------------------
-module(astranaut_traverse_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include("do.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
%%--------------------------------------------------------------------
@spec suite ( ) - > Info
%% Info = [tuple()]
%% @end
%%--------------------------------------------------------------------
suite() ->
[{timetrap,{seconds,30}}].
%%--------------------------------------------------------------------
@spec init_per_suite(Config0 ) - >
Config1 | { skip , Reason } | { skip_and_save , Reason , Config1 }
%% Config0 = Config1 = [tuple()]
%% Reason = term()
%% @end
%%--------------------------------------------------------------------
init_per_suite(Config) ->
Config.
%%--------------------------------------------------------------------
) - > term ( ) | { save_config , Config1 }
%% Config0 = Config1 = [tuple()]
%% @end
%%--------------------------------------------------------------------
end_per_suite(_Config) ->
ok.
%%--------------------------------------------------------------------
@spec init_per_group(GroupName , Config0 ) - >
Config1 | { skip , Reason } | { skip_and_save , Reason , Config1 }
%% GroupName = atom()
%% Config0 = Config1 = [tuple()]
%% Reason = term()
%% @end
%%--------------------------------------------------------------------
init_per_group(_GroupName, Config) ->
Config.
%%--------------------------------------------------------------------
, Config0 ) - >
term ( ) | { save_config , Config1 }
%% GroupName = atom()
%% Config0 = Config1 = [tuple()]
%% @end
%%--------------------------------------------------------------------
end_per_group(_GroupName, _Config) ->
ok.
%%--------------------------------------------------------------------
@spec init_per_testcase(TestCase , Config0 ) - >
Config1 | { skip , Reason } | { skip_and_save , Reason , Config1 }
TestCase = atom ( )
%% Config0 = Config1 = [tuple()]
%% Reason = term()
%% @end
%%--------------------------------------------------------------------
init_per_testcase(_TestCase, Config) ->
Config.
%%--------------------------------------------------------------------
, Config0 ) - >
term ( ) | { save_config , Config1 } | { fail , Reason }
TestCase = atom ( )
%% Config0 = Config1 = [tuple()]
%% Reason = term()
%% @end
%%--------------------------------------------------------------------
end_per_testcase(_TestCase, _Config) ->
ok.
%%--------------------------------------------------------------------
@spec groups ( ) - > [ Group ]
Group = { GroupName , Properties , }
%% GroupName = atom()
Properties = [ parallel | sequence | Shuffle | } ]
= [ Group | { group , GroupName } | TestCase ]
TestCase = atom ( )
Shuffle = shuffle | { shuffle,{integer(),integer(),integer ( ) } }
%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail |
%% repeat_until_any_ok | repeat_until_any_fail
%% N = integer() | forever
%% @end
%%--------------------------------------------------------------------
groups() ->
[].
%%--------------------------------------------------------------------
@spec all ( ) - > GroupsAndTestCases | { skip , Reason }
= [ { group , GroupName } | TestCase ]
%% GroupName = atom()
TestCase = atom ( )
%% Reason = term()
%% @end
%%--------------------------------------------------------------------
all() ->
[test_return, test_bind, test_error_0, test_state,
test_pos, test_pos_2, test_file_pos, test_fail].
%%--------------------------------------------------------------------
( ) - > Info
%% Info = [tuple()]
%% @end
%%--------------------------------------------------------------------
test_return() ->
[].
%%--------------------------------------------------------------------
) - >
%% ok | exit() | {skip,Reason} | {comment,Comment} |
{ save_config , Config1 } | { skip_and_save , Reason , Config1 }
%% Config0 = Config1 = [tuple()]
%% Reason = term()
%% Comment = term()
%% @end
%%--------------------------------------------------------------------
test_return(_Config) ->
MA = astranaut_traverse:return(10),
Result = astranaut_return:ok({10, ok}),
?assertEqual(Result, astranaut_traverse:run(MA, undefined, #{}, ok)),
ok.
test_bind(_Config) ->
MA =
do([ traverse ||
A <- astranaut_traverse:return(10),
return(A + 10)
]),
Result = astranaut_return:ok({20, ok}),
?assertEqual(Result, astranaut_traverse:run(MA, undefined, #{}, ok)),
ok.
test_error_0(_Config) ->
MA =
do([ traverse ||
A <- astranaut_traverse:return(10),
astranaut_traverse:update_pos(
10, astranaut_traverse:error(error_0)),
return(A + 10)
]),
ErrorState = astranaut_error:new(),
ErrorState1 = astranaut_error:append_formatted_errors([{10, formatter_0, error_0}], ErrorState),
ErrorState2 = astranaut_error:printable(ErrorState1),
ErrorStateM0 = astranaut_return:run_error(astranaut_traverse:run(MA, formatter_0, #{}, ok)),
ErrorStateM1 = astranaut_error:printable(ErrorStateM0),
?assertEqual(ErrorState2, ErrorStateM1),
ok.
test_state(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:put(10),
astranaut_traverse:state(
fun(A) ->
{A + 10, A + 20}
end)
]),
Result = astranaut_return:ok({20, 30}),
?assertEqual(Result, astranaut_traverse:run(MA, undefined, #{}, ok)),
ok.
test_pos(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:put(10),
astranaut_traverse:update_pos(
20, astranaut_traverse:error(error_0)),
astranaut_traverse:state(
fun(A) ->
{A + 10, A + 20}
end)
]),
Errors = [{20, formatter_0, error_0}],
#{return := Return, error := Error} = astranaut_traverse:run(MA, formatter_0, #{}, ok),
?assertEqual({{20, 30}, Errors}, {Return, astranaut_error:formatted_errors(Error)}),
ok.
test_pos_2(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:update_pos(
20, astranaut_traverse:error(error_0)),
return(10)
]),
Errors = [{20, formatter_0, error_0}],
#{return := Return, error := Error} = astranaut_traverse:run(MA, formatter_0, #{}, ok),
?assertEqual({{10, ok}, Errors}, {Return, astranaut_error:formatted_errors(Error)}),
ok.
test_file_pos(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:update_file(?FILE),
astranaut_traverse:put(10),
astranaut_traverse:with_formatter(
astranaut_traverse,
astranaut_traverse:update_pos(
20,
astranaut_traverse:error(error_0)
)),
astranaut_traverse:update_pos(
25, astranaut_traverse:warning(warning_0)),
B <- astranaut_traverse:get(),
astranaut_traverse:modify(
fun(A) ->
A + 20
end),
astranaut_traverse:eof(),
return(B + 10)
]),
FileErrors = [{?FILE, [{20, astranaut_traverse, error_0}]}],
FileWarnings = [{?FILE, [{25, ?MODULE, warning_0}]}],
#{return := Result, error := Error} = astranaut_traverse:run(MA, ?MODULE, #{}, ok),
?assertEqual({{20, 30}, {FileErrors, FileWarnings}}, {Result, astranaut_error:realize(Error)}),
ok.
test_fail(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:put(10),
astranaut_traverse:with_formatter(
astranaut_traverse,
astranaut_traverse:update_pos(
20,
astranaut_traverse:error(error_0))
),
astranaut_traverse:update_pos(
25, astranaut_traverse:warning(warning_0)),
B <- astranaut_traverse:get(),
astranaut_traverse:modify(
fun(A) ->
A + 20
end),
return(B)
]),
MB = do([ traverse ||
astranaut_traverse:fail_on_error(MA),
astranaut_traverse:put(30),
astranaut_traverse:update_pos(
30, astranaut_traverse:error(error_1))
]),
Errors = [{20, astranaut_traverse, error_0}],
Warnings = [{25, ?MODULE, warning_0}],
#{error := Error} = astranaut_traverse:run(MB, ?MODULE, #{}, ok),
?assertEqual({Errors, Warnings}, {astranaut_error:formatted_errors(Error),
astranaut_error:formatted_warnings(Error)}),
ok.
%% test_bind_node(_Config) ->
NodeA = { atom , 10 , ' A ' } ,
%% Walk = astranaut_walk_return:new(#{}),
%% Return =
%% bind_pre(NodeA, Walk,
%% fun({atom, _Pos, A}) ->
NodeC = { atom , 20 , A } ,
%% astranaut_walk_return:new(#{node => NodeC})
%% end),
? assertEqual({atom , 20 , ' A ' } , Return ) ,
%% ok.
%% test_bind_node_continue(_Config) ->
NodeA = { atom , 10 , ' A ' } ,
Walk = astranaut_walk_return : new(#{continue = > true , node = > { atom , 10 , ' B ' } } ) ,
%% Return =
%% bind_pre(NodeA, Walk,
%% fun({atom, _Pos, A}) ->
NodeC = { atom , 20 , A } ,
%% astranaut_walk_return:new(#{node => NodeC})
%% end),
? assertEqual({atom , 10 , ' B ' } , Return ) ,
%% ok.
%% test_bind_node_update(_Config) ->
NodeA = { atom , 10 , ' A ' } ,
Walk = astranaut_walk_return : new(#{node = > { atom , 10 , ' B ' } } ) ,
%% Return =
%% bind_pre(NodeA, Walk,
%% fun({atom, _Pos, A}) ->
NodeC = { atom , 20 , A } ,
%% astranaut_walk_return:new(#{node => NodeC})
%% end),
? assertEqual({atom , 20 , ' B ' } , Return ) ,
%% ok.
| null | https://raw.githubusercontent.com/slepher/astranaut/95445ee8de492ead2cd9d9671095e251e902986b/test/astranaut_traverse_SUITE.erl | erlang | -------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
--------------------------------------------------------------------
Info = [tuple()]
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
Config0 = Config1 = [tuple()]
Reason = term()
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
Config0 = Config1 = [tuple()]
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
GroupName = atom()
Config0 = Config1 = [tuple()]
Reason = term()
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
GroupName = atom()
Config0 = Config1 = [tuple()]
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
Config0 = Config1 = [tuple()]
Reason = term()
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
Config0 = Config1 = [tuple()]
Reason = term()
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
GroupName = atom()
RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail |
repeat_until_any_ok | repeat_until_any_fail
N = integer() | forever
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
GroupName = atom()
Reason = term()
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
Info = [tuple()]
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
ok | exit() | {skip,Reason} | {comment,Comment} |
Config0 = Config1 = [tuple()]
Reason = term()
Comment = term()
@end
--------------------------------------------------------------------
test_bind_node(_Config) ->
Walk = astranaut_walk_return:new(#{}),
Return =
bind_pre(NodeA, Walk,
fun({atom, _Pos, A}) ->
astranaut_walk_return:new(#{node => NodeC})
end),
ok.
test_bind_node_continue(_Config) ->
Return =
bind_pre(NodeA, Walk,
fun({atom, _Pos, A}) ->
astranaut_walk_return:new(#{node => NodeC})
end),
ok.
test_bind_node_update(_Config) ->
Return =
bind_pre(NodeA, Walk,
fun({atom, _Pos, A}) ->
astranaut_walk_return:new(#{node => NodeC})
end),
ok. | @author < >
( C ) 2020 ,
Created : 6 Jul 2020 by < >
-module(astranaut_traverse_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include("do.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
@spec suite ( ) - > Info
suite() ->
[{timetrap,{seconds,30}}].
@spec init_per_suite(Config0 ) - >
Config1 | { skip , Reason } | { skip_and_save , Reason , Config1 }
init_per_suite(Config) ->
Config.
) - > term ( ) | { save_config , Config1 }
end_per_suite(_Config) ->
ok.
@spec init_per_group(GroupName , Config0 ) - >
Config1 | { skip , Reason } | { skip_and_save , Reason , Config1 }
init_per_group(_GroupName, Config) ->
Config.
, Config0 ) - >
term ( ) | { save_config , Config1 }
end_per_group(_GroupName, _Config) ->
ok.
@spec init_per_testcase(TestCase , Config0 ) - >
Config1 | { skip , Reason } | { skip_and_save , Reason , Config1 }
TestCase = atom ( )
init_per_testcase(_TestCase, Config) ->
Config.
, Config0 ) - >
term ( ) | { save_config , Config1 } | { fail , Reason }
TestCase = atom ( )
end_per_testcase(_TestCase, _Config) ->
ok.
@spec groups ( ) - > [ Group ]
Group = { GroupName , Properties , }
Properties = [ parallel | sequence | Shuffle | } ]
= [ Group | { group , GroupName } | TestCase ]
TestCase = atom ( )
Shuffle = shuffle | { shuffle,{integer(),integer(),integer ( ) } }
groups() ->
[].
@spec all ( ) - > GroupsAndTestCases | { skip , Reason }
= [ { group , GroupName } | TestCase ]
TestCase = atom ( )
all() ->
[test_return, test_bind, test_error_0, test_state,
test_pos, test_pos_2, test_file_pos, test_fail].
( ) - > Info
test_return() ->
[].
) - >
{ save_config , Config1 } | { skip_and_save , Reason , Config1 }
test_return(_Config) ->
MA = astranaut_traverse:return(10),
Result = astranaut_return:ok({10, ok}),
?assertEqual(Result, astranaut_traverse:run(MA, undefined, #{}, ok)),
ok.
test_bind(_Config) ->
MA =
do([ traverse ||
A <- astranaut_traverse:return(10),
return(A + 10)
]),
Result = astranaut_return:ok({20, ok}),
?assertEqual(Result, astranaut_traverse:run(MA, undefined, #{}, ok)),
ok.
test_error_0(_Config) ->
MA =
do([ traverse ||
A <- astranaut_traverse:return(10),
astranaut_traverse:update_pos(
10, astranaut_traverse:error(error_0)),
return(A + 10)
]),
ErrorState = astranaut_error:new(),
ErrorState1 = astranaut_error:append_formatted_errors([{10, formatter_0, error_0}], ErrorState),
ErrorState2 = astranaut_error:printable(ErrorState1),
ErrorStateM0 = astranaut_return:run_error(astranaut_traverse:run(MA, formatter_0, #{}, ok)),
ErrorStateM1 = astranaut_error:printable(ErrorStateM0),
?assertEqual(ErrorState2, ErrorStateM1),
ok.
test_state(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:put(10),
astranaut_traverse:state(
fun(A) ->
{A + 10, A + 20}
end)
]),
Result = astranaut_return:ok({20, 30}),
?assertEqual(Result, astranaut_traverse:run(MA, undefined, #{}, ok)),
ok.
test_pos(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:put(10),
astranaut_traverse:update_pos(
20, astranaut_traverse:error(error_0)),
astranaut_traverse:state(
fun(A) ->
{A + 10, A + 20}
end)
]),
Errors = [{20, formatter_0, error_0}],
#{return := Return, error := Error} = astranaut_traverse:run(MA, formatter_0, #{}, ok),
?assertEqual({{20, 30}, Errors}, {Return, astranaut_error:formatted_errors(Error)}),
ok.
test_pos_2(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:update_pos(
20, astranaut_traverse:error(error_0)),
return(10)
]),
Errors = [{20, formatter_0, error_0}],
#{return := Return, error := Error} = astranaut_traverse:run(MA, formatter_0, #{}, ok),
?assertEqual({{10, ok}, Errors}, {Return, astranaut_error:formatted_errors(Error)}),
ok.
test_file_pos(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:update_file(?FILE),
astranaut_traverse:put(10),
astranaut_traverse:with_formatter(
astranaut_traverse,
astranaut_traverse:update_pos(
20,
astranaut_traverse:error(error_0)
)),
astranaut_traverse:update_pos(
25, astranaut_traverse:warning(warning_0)),
B <- astranaut_traverse:get(),
astranaut_traverse:modify(
fun(A) ->
A + 20
end),
astranaut_traverse:eof(),
return(B + 10)
]),
FileErrors = [{?FILE, [{20, astranaut_traverse, error_0}]}],
FileWarnings = [{?FILE, [{25, ?MODULE, warning_0}]}],
#{return := Result, error := Error} = astranaut_traverse:run(MA, ?MODULE, #{}, ok),
?assertEqual({{20, 30}, {FileErrors, FileWarnings}}, {Result, astranaut_error:realize(Error)}),
ok.
test_fail(_Config) ->
MA =
do([ traverse ||
astranaut_traverse:put(10),
astranaut_traverse:with_formatter(
astranaut_traverse,
astranaut_traverse:update_pos(
20,
astranaut_traverse:error(error_0))
),
astranaut_traverse:update_pos(
25, astranaut_traverse:warning(warning_0)),
B <- astranaut_traverse:get(),
astranaut_traverse:modify(
fun(A) ->
A + 20
end),
return(B)
]),
MB = do([ traverse ||
astranaut_traverse:fail_on_error(MA),
astranaut_traverse:put(30),
astranaut_traverse:update_pos(
30, astranaut_traverse:error(error_1))
]),
Errors = [{20, astranaut_traverse, error_0}],
Warnings = [{25, ?MODULE, warning_0}],
#{error := Error} = astranaut_traverse:run(MB, ?MODULE, #{}, ok),
?assertEqual({Errors, Warnings}, {astranaut_error:formatted_errors(Error),
astranaut_error:formatted_warnings(Error)}),
ok.
NodeA = { atom , 10 , ' A ' } ,
NodeC = { atom , 20 , A } ,
? assertEqual({atom , 20 , ' A ' } , Return ) ,
NodeA = { atom , 10 , ' A ' } ,
Walk = astranaut_walk_return : new(#{continue = > true , node = > { atom , 10 , ' B ' } } ) ,
NodeC = { atom , 20 , A } ,
? assertEqual({atom , 10 , ' B ' } , Return ) ,
NodeA = { atom , 10 , ' A ' } ,
Walk = astranaut_walk_return : new(#{node = > { atom , 10 , ' B ' } } ) ,
NodeC = { atom , 20 , A } ,
? assertEqual({atom , 20 , ' B ' } , Return ) ,
|
e98cfd0e408bb2ccc5b971fd49c64d7b43024438bc2cf06897207566866500a4 | tolysz/prepare-ghcjs | Buffer.hs | # LANGUAGE Trustworthy #
# LANGUAGE CPP , NoImplicitPrelude #
{-# OPTIONS_GHC -funbox-strict-fields #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.IO.Buffer
Copyright : ( c ) The University of Glasgow 2008
-- License : see libraries/base/LICENSE
--
-- Maintainer :
-- Stability : internal
Portability : non - portable ( GHC Extensions )
--
-- Buffers used in the IO system
--
-----------------------------------------------------------------------------
module GHC.IO.Buffer (
-- * Buffers of any element
Buffer(..), BufferState(..), CharBuffer, CharBufElem,
-- ** Creation
newByteBuffer,
newCharBuffer,
newBuffer,
emptyBuffer,
-- ** Insertion/removal
bufferRemove,
bufferAdd,
slideContents,
bufferAdjustL,
-- ** Inspecting
isEmptyBuffer,
isFullBuffer,
isFullCharBuffer,
isWriteBuffer,
bufferElems,
bufferAvailable,
summaryBuffer,
-- ** Operating on the raw buffer as a Ptr
withBuffer,
withRawBuffer,
-- ** Assertions
checkBuffer,
-- * Raw buffers
RawBuffer,
readWord8Buf,
writeWord8Buf,
RawCharBuffer,
peekCharBuf,
readCharBuf,
writeCharBuf,
readCharBufPtr,
writeCharBufPtr,
charSize,
) where
import GHC.Base
import GHC.IO
import GHC.Num
import GHC.Ptr
import GHC.Word
import GHC.Show
import GHC.Real
import Foreign.C.Types
import Foreign.ForeignPtr
import Foreign.Storable
buffers use either UTF-16 or UTF-32 , with the endianness matching
-- the endianness of the host.
--
-- Invariants:
* a buffer consists of * valid * UTF-16 or UTF-32
-- * only whole characters: no partial surrogate pairs
#define CHARBUF_UTF32
-- #define CHARBUF_UTF16
--
NB . it wo n't work to just change this to CHARBUF_UTF16 . Some of
-- the code to make this work is there, and it has been tested with
the Iconv codec , but there are some pieces that are known to be
-- broken. In particular, the built-in codecs
e.g. GHC.IO.Encoding . } need to use isFullCharBuffer or
-- similar in place of the ow >= os comparisons.
-- ---------------------------------------------------------------------------
-- Raw blocks of data
type RawBuffer e = ForeignPtr e
readWord8Buf :: RawBuffer Word8 -> Int -> IO Word8
readWord8Buf arr ix = withForeignPtr arr $ \p -> peekByteOff p ix
writeWord8Buf :: RawBuffer Word8 -> Int -> Word8 -> IO ()
writeWord8Buf arr ix w = withForeignPtr arr $ \p -> pokeByteOff p ix w
#ifdef CHARBUF_UTF16
type CharBufElem = Word16
#else
type CharBufElem = Char
#endif
type RawCharBuffer = RawBuffer CharBufElem
peekCharBuf :: RawCharBuffer -> Int -> IO Char
peekCharBuf arr ix = withForeignPtr arr $ \p -> do
(c,_) <- readCharBufPtr p ix
return c
# INLINE readCharBuf #
readCharBuf :: RawCharBuffer -> Int -> IO (Char, Int)
readCharBuf arr ix = withForeignPtr arr $ \p -> readCharBufPtr p ix
# INLINE writeCharBuf #
writeCharBuf :: RawCharBuffer -> Int -> Char -> IO Int
writeCharBuf arr ix c = withForeignPtr arr $ \p -> writeCharBufPtr p ix c
# INLINE readCharBufPtr #
readCharBufPtr :: Ptr CharBufElem -> Int -> IO (Char, Int)
#ifdef CHARBUF_UTF16
readCharBufPtr p ix = do
c1 <- peekElemOff p ix
if (c1 < 0xd800 || c1 > 0xdbff)
then return (chr (fromIntegral c1), ix+1)
else do c2 <- peekElemOff p (ix+1)
return (unsafeChr ((fromIntegral c1 - 0xd800)*0x400 +
(fromIntegral c2 - 0xdc00) + 0x10000), ix+2)
#else
readCharBufPtr p ix = do c <- peekElemOff (castPtr p) ix; return (c, ix+1)
#endif
# INLINE writeCharBufPtr #
writeCharBufPtr :: Ptr CharBufElem -> Int -> Char -> IO Int
#ifdef CHARBUF_UTF16
writeCharBufPtr p ix ch
| c < 0x10000 = do pokeElemOff p ix (fromIntegral c)
return (ix+1)
| otherwise = do let c' = c - 0x10000
pokeElemOff p ix (fromIntegral (c' `div` 0x400 + 0xd800))
pokeElemOff p (ix+1) (fromIntegral (c' `mod` 0x400 + 0xdc00))
return (ix+2)
where
c = ord ch
#else
writeCharBufPtr p ix ch = do pokeElemOff (castPtr p) ix ch; return (ix+1)
#endif
charSize :: Int
#ifdef CHARBUF_UTF16
charSize = 2
#else
charSize = 4
#endif
-- ---------------------------------------------------------------------------
-- Buffers
-- | A mutable array of bytes that can be passed to foreign functions.
--
-- The buffer is represented by a record, where the record contains
-- the raw buffer and the start/end points of the filled portion. The
-- buffer contents itself is mutable, but the rest of the record is
-- immutable. This is a slightly odd mix, but it turns out to be
-- quite practical: by making all the buffer metadata immutable, we
can have operations on buffer metadata outside of the IO monad .
--
-- The "live" elements of the buffer are those between the 'bufL' and
-- 'bufR' offsets. In an empty buffer, 'bufL' is equal to 'bufR', but
they might not be zero : for exmaple , the buffer might correspond to
-- a memory-mapped file and in which case 'bufL' will point to the
-- next location to be written, which is not necessarily the beginning
-- of the file.
data Buffer e
= Buffer {
bufRaw :: !(RawBuffer e),
bufState :: BufferState,
bufSize :: !Int, -- in elements, not bytes
offset of first item in the buffer
bufR :: !Int -- offset of last item + 1
}
#ifdef CHARBUF_UTF16
type CharBuffer = Buffer Word16
#else
type CharBuffer = Buffer Char
#endif
data BufferState = ReadBuffer | WriteBuffer deriving (Eq)
withBuffer :: Buffer e -> (Ptr e -> IO a) -> IO a
withBuffer Buffer{ bufRaw=raw } f = withForeignPtr (castForeignPtr raw) f
withRawBuffer :: RawBuffer e -> (Ptr e -> IO a) -> IO a
withRawBuffer raw f = withForeignPtr (castForeignPtr raw) f
isEmptyBuffer :: Buffer e -> Bool
isEmptyBuffer Buffer{ bufL=l, bufR=r } = l == r
isFullBuffer :: Buffer e -> Bool
isFullBuffer Buffer{ bufR=w, bufSize=s } = s == w
if a buffer does not have room for a surrogate pair , it is " full "
isFullCharBuffer :: Buffer e -> Bool
#ifdef CHARBUF_UTF16
isFullCharBuffer buf = bufferAvailable buf < 2
#else
isFullCharBuffer = isFullBuffer
#endif
isWriteBuffer :: Buffer e -> Bool
isWriteBuffer buf = case bufState buf of
WriteBuffer -> True
ReadBuffer -> False
bufferElems :: Buffer e -> Int
bufferElems Buffer{ bufR=w, bufL=r } = w - r
bufferAvailable :: Buffer e -> Int
bufferAvailable Buffer{ bufR=w, bufSize=s } = s - w
bufferRemove :: Int -> Buffer e -> Buffer e
bufferRemove i buf@Buffer{ bufL=r } = bufferAdjustL (r+i) buf
bufferAdjustL :: Int -> Buffer e -> Buffer e
bufferAdjustL l buf@Buffer{ bufR=w }
| l == w = buf{ bufL=0, bufR=0 }
| otherwise = buf{ bufL=l, bufR=w }
bufferAdd :: Int -> Buffer e -> Buffer e
bufferAdd i buf@Buffer{ bufR=w } = buf{ bufR=w+i }
emptyBuffer :: RawBuffer e -> Int -> BufferState -> Buffer e
emptyBuffer raw sz state =
Buffer{ bufRaw=raw, bufState=state, bufR=0, bufL=0, bufSize=sz }
newByteBuffer :: Int -> BufferState -> IO (Buffer Word8)
newByteBuffer c st = newBuffer c c st
newCharBuffer :: Int -> BufferState -> IO CharBuffer
newCharBuffer c st = newBuffer (c * charSize) c st
newBuffer :: Int -> Int -> BufferState -> IO (Buffer e)
newBuffer bytes sz state = do
fp <- mallocForeignPtrBytes bytes
return (emptyBuffer fp sz state)
-- | slides the contents of the buffer to the beginning
slideContents :: Buffer Word8 -> IO (Buffer Word8)
slideContents buf@Buffer{ bufL=l, bufR=r, bufRaw=raw } = do
let elems = r - l
withRawBuffer raw $ \p ->
do _ <- memmove p (p `plusPtr` l) (fromIntegral elems)
return ()
return buf{ bufL=0, bufR=elems }
foreign import ccall unsafe "memmove"
memmove :: Ptr a -> Ptr a -> CSize -> IO (Ptr a)
summaryBuffer :: Buffer a -> String
summaryBuffer buf = "buf" ++ show (bufSize buf) ++ "(" ++ show (bufL buf) ++ "-" ++ show (bufR buf) ++ ")"
-- INVARIANTS on Buffers:
-- * r <= w
-- * if r == w, and the buffer is for reading, then r == 0 && w == 0
-- * a write buffer is never full. If an operation
-- fills up the buffer, it will always flush it before
-- returning.
* a read buffer may be full as a result of hLookAhead . In normal
operation , a read buffer always has at least one character of space .
checkBuffer :: Buffer a -> IO ()
checkBuffer buf@Buffer{ bufState = state, bufL=r, bufR=w, bufSize=size } = do
check buf (
size > 0
&& r <= w
&& w <= size
&& ( r /= w || state == WriteBuffer || (r == 0 && w == 0) )
&& ( state /= WriteBuffer || w < size ) -- write buffer is never full
)
check :: Buffer a -> Bool -> IO ()
check _ True = return ()
check buf False = errorWithoutStackTrace ("buffer invariant violation: " ++ summaryBuffer buf)
| null | https://raw.githubusercontent.com/tolysz/prepare-ghcjs/8499e14e27854a366e98f89fab0af355056cf055/spec-lts8/base/GHC/IO/Buffer.hs | haskell | # OPTIONS_GHC -funbox-strict-fields #
---------------------------------------------------------------------------
|
Module : GHC.IO.Buffer
License : see libraries/base/LICENSE
Maintainer :
Stability : internal
Buffers used in the IO system
---------------------------------------------------------------------------
* Buffers of any element
** Creation
** Insertion/removal
** Inspecting
** Operating on the raw buffer as a Ptr
** Assertions
* Raw buffers
the endianness of the host.
Invariants:
* only whole characters: no partial surrogate pairs
#define CHARBUF_UTF16
the code to make this work is there, and it has been tested with
broken. In particular, the built-in codecs
similar in place of the ow >= os comparisons.
---------------------------------------------------------------------------
Raw blocks of data
---------------------------------------------------------------------------
Buffers
| A mutable array of bytes that can be passed to foreign functions.
The buffer is represented by a record, where the record contains
the raw buffer and the start/end points of the filled portion. The
buffer contents itself is mutable, but the rest of the record is
immutable. This is a slightly odd mix, but it turns out to be
quite practical: by making all the buffer metadata immutable, we
The "live" elements of the buffer are those between the 'bufL' and
'bufR' offsets. In an empty buffer, 'bufL' is equal to 'bufR', but
a memory-mapped file and in which case 'bufL' will point to the
next location to be written, which is not necessarily the beginning
of the file.
in elements, not bytes
offset of last item + 1
| slides the contents of the buffer to the beginning
INVARIANTS on Buffers:
* r <= w
* if r == w, and the buffer is for reading, then r == 0 && w == 0
* a write buffer is never full. If an operation
fills up the buffer, it will always flush it before
returning.
write buffer is never full | # LANGUAGE Trustworthy #
# LANGUAGE CPP , NoImplicitPrelude #
Copyright : ( c ) The University of Glasgow 2008
Portability : non - portable ( GHC Extensions )
module GHC.IO.Buffer (
Buffer(..), BufferState(..), CharBuffer, CharBufElem,
newByteBuffer,
newCharBuffer,
newBuffer,
emptyBuffer,
bufferRemove,
bufferAdd,
slideContents,
bufferAdjustL,
isEmptyBuffer,
isFullBuffer,
isFullCharBuffer,
isWriteBuffer,
bufferElems,
bufferAvailable,
summaryBuffer,
withBuffer,
withRawBuffer,
checkBuffer,
RawBuffer,
readWord8Buf,
writeWord8Buf,
RawCharBuffer,
peekCharBuf,
readCharBuf,
writeCharBuf,
readCharBufPtr,
writeCharBufPtr,
charSize,
) where
import GHC.Base
import GHC.IO
import GHC.Num
import GHC.Ptr
import GHC.Word
import GHC.Show
import GHC.Real
import Foreign.C.Types
import Foreign.ForeignPtr
import Foreign.Storable
buffers use either UTF-16 or UTF-32 , with the endianness matching
* a buffer consists of * valid * UTF-16 or UTF-32
#define CHARBUF_UTF32
NB . it wo n't work to just change this to CHARBUF_UTF16 . Some of
the Iconv codec , but there are some pieces that are known to be
e.g. GHC.IO.Encoding . } need to use isFullCharBuffer or
type RawBuffer e = ForeignPtr e
readWord8Buf :: RawBuffer Word8 -> Int -> IO Word8
readWord8Buf arr ix = withForeignPtr arr $ \p -> peekByteOff p ix
writeWord8Buf :: RawBuffer Word8 -> Int -> Word8 -> IO ()
writeWord8Buf arr ix w = withForeignPtr arr $ \p -> pokeByteOff p ix w
#ifdef CHARBUF_UTF16
type CharBufElem = Word16
#else
type CharBufElem = Char
#endif
type RawCharBuffer = RawBuffer CharBufElem
peekCharBuf :: RawCharBuffer -> Int -> IO Char
peekCharBuf arr ix = withForeignPtr arr $ \p -> do
(c,_) <- readCharBufPtr p ix
return c
# INLINE readCharBuf #
readCharBuf :: RawCharBuffer -> Int -> IO (Char, Int)
readCharBuf arr ix = withForeignPtr arr $ \p -> readCharBufPtr p ix
# INLINE writeCharBuf #
writeCharBuf :: RawCharBuffer -> Int -> Char -> IO Int
writeCharBuf arr ix c = withForeignPtr arr $ \p -> writeCharBufPtr p ix c
# INLINE readCharBufPtr #
readCharBufPtr :: Ptr CharBufElem -> Int -> IO (Char, Int)
#ifdef CHARBUF_UTF16
readCharBufPtr p ix = do
c1 <- peekElemOff p ix
if (c1 < 0xd800 || c1 > 0xdbff)
then return (chr (fromIntegral c1), ix+1)
else do c2 <- peekElemOff p (ix+1)
return (unsafeChr ((fromIntegral c1 - 0xd800)*0x400 +
(fromIntegral c2 - 0xdc00) + 0x10000), ix+2)
#else
readCharBufPtr p ix = do c <- peekElemOff (castPtr p) ix; return (c, ix+1)
#endif
# INLINE writeCharBufPtr #
writeCharBufPtr :: Ptr CharBufElem -> Int -> Char -> IO Int
#ifdef CHARBUF_UTF16
writeCharBufPtr p ix ch
| c < 0x10000 = do pokeElemOff p ix (fromIntegral c)
return (ix+1)
| otherwise = do let c' = c - 0x10000
pokeElemOff p ix (fromIntegral (c' `div` 0x400 + 0xd800))
pokeElemOff p (ix+1) (fromIntegral (c' `mod` 0x400 + 0xdc00))
return (ix+2)
where
c = ord ch
#else
writeCharBufPtr p ix ch = do pokeElemOff (castPtr p) ix ch; return (ix+1)
#endif
charSize :: Int
#ifdef CHARBUF_UTF16
charSize = 2
#else
charSize = 4
#endif
can have operations on buffer metadata outside of the IO monad .
they might not be zero : for exmaple , the buffer might correspond to
data Buffer e
= Buffer {
bufRaw :: !(RawBuffer e),
bufState :: BufferState,
offset of first item in the buffer
}
#ifdef CHARBUF_UTF16
type CharBuffer = Buffer Word16
#else
type CharBuffer = Buffer Char
#endif
data BufferState = ReadBuffer | WriteBuffer deriving (Eq)
withBuffer :: Buffer e -> (Ptr e -> IO a) -> IO a
withBuffer Buffer{ bufRaw=raw } f = withForeignPtr (castForeignPtr raw) f
withRawBuffer :: RawBuffer e -> (Ptr e -> IO a) -> IO a
withRawBuffer raw f = withForeignPtr (castForeignPtr raw) f
isEmptyBuffer :: Buffer e -> Bool
isEmptyBuffer Buffer{ bufL=l, bufR=r } = l == r
isFullBuffer :: Buffer e -> Bool
isFullBuffer Buffer{ bufR=w, bufSize=s } = s == w
if a buffer does not have room for a surrogate pair , it is " full "
isFullCharBuffer :: Buffer e -> Bool
#ifdef CHARBUF_UTF16
isFullCharBuffer buf = bufferAvailable buf < 2
#else
isFullCharBuffer = isFullBuffer
#endif
isWriteBuffer :: Buffer e -> Bool
isWriteBuffer buf = case bufState buf of
WriteBuffer -> True
ReadBuffer -> False
bufferElems :: Buffer e -> Int
bufferElems Buffer{ bufR=w, bufL=r } = w - r
bufferAvailable :: Buffer e -> Int
bufferAvailable Buffer{ bufR=w, bufSize=s } = s - w
bufferRemove :: Int -> Buffer e -> Buffer e
bufferRemove i buf@Buffer{ bufL=r } = bufferAdjustL (r+i) buf
bufferAdjustL :: Int -> Buffer e -> Buffer e
bufferAdjustL l buf@Buffer{ bufR=w }
| l == w = buf{ bufL=0, bufR=0 }
| otherwise = buf{ bufL=l, bufR=w }
bufferAdd :: Int -> Buffer e -> Buffer e
bufferAdd i buf@Buffer{ bufR=w } = buf{ bufR=w+i }
emptyBuffer :: RawBuffer e -> Int -> BufferState -> Buffer e
emptyBuffer raw sz state =
Buffer{ bufRaw=raw, bufState=state, bufR=0, bufL=0, bufSize=sz }
newByteBuffer :: Int -> BufferState -> IO (Buffer Word8)
newByteBuffer c st = newBuffer c c st
newCharBuffer :: Int -> BufferState -> IO CharBuffer
newCharBuffer c st = newBuffer (c * charSize) c st
newBuffer :: Int -> Int -> BufferState -> IO (Buffer e)
newBuffer bytes sz state = do
fp <- mallocForeignPtrBytes bytes
return (emptyBuffer fp sz state)
slideContents :: Buffer Word8 -> IO (Buffer Word8)
slideContents buf@Buffer{ bufL=l, bufR=r, bufRaw=raw } = do
let elems = r - l
withRawBuffer raw $ \p ->
do _ <- memmove p (p `plusPtr` l) (fromIntegral elems)
return ()
return buf{ bufL=0, bufR=elems }
foreign import ccall unsafe "memmove"
memmove :: Ptr a -> Ptr a -> CSize -> IO (Ptr a)
summaryBuffer :: Buffer a -> String
summaryBuffer buf = "buf" ++ show (bufSize buf) ++ "(" ++ show (bufL buf) ++ "-" ++ show (bufR buf) ++ ")"
* a read buffer may be full as a result of hLookAhead . In normal
operation , a read buffer always has at least one character of space .
checkBuffer :: Buffer a -> IO ()
checkBuffer buf@Buffer{ bufState = state, bufL=r, bufR=w, bufSize=size } = do
check buf (
size > 0
&& r <= w
&& w <= size
&& ( r /= w || state == WriteBuffer || (r == 0 && w == 0) )
)
check :: Buffer a -> Bool -> IO ()
check _ True = return ()
check buf False = errorWithoutStackTrace ("buffer invariant violation: " ++ summaryBuffer buf)
|
ad6e56db0d94b8ed6c4d2fd462de20beb62d084b596d2c8c3640b3844cc8423c | scrintal/heroicons-reagent | globe_asia_australia.cljs | (ns com.scrintal.heroicons.mini.globe-asia-australia)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:path {:fillRule "evenodd"
:d "M18 10a8 8 0 11-16 0 8 8 0 0116 0zm-6.5 6.326a6.52 6.52 0 01-1.5.174 6.487 6.487 0 01-5.011-2.36l.49-.98a.423.423 0 01.614-.164l.294.196a.992.992 0 001.491-1.139l-.197-.593a.252.252 0 01.126-.304l1.973-.987a.938.938 0 00.361-1.359.375.375 0 01.239-.576l.125-.025A2.421 2.421 0 0012.327 6.6l.05-.149a1 1 0 00-.242-1.023l-1.489-1.489a.5.5 0 01-.146-.353v-.067a6.5 6.5 0 015.392 9.23 1.398 1.398 0 00-.68-.244l-.566-.566a1.5 1.5 0 00-1.06-.439h-.172a1.5 1.5 0 00-1.06.44l-.593.592a.501.501 0 01-.13.093l-1.578.79a1 1 0 00-.553.894v.191a1 1 0 001 1h.5a.5.5 0 01.5.5v.326z"
:clipRule "evenodd"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/mini/globe_asia_australia.cljs | clojure | (ns com.scrintal.heroicons.mini.globe-asia-australia)
(defn render []
[:svg {:xmlns ""
:viewBox "0 0 20 20"
:fill "currentColor"
:aria-hidden "true"}
[:path {:fillRule "evenodd"
:d "M18 10a8 8 0 11-16 0 8 8 0 0116 0zm-6.5 6.326a6.52 6.52 0 01-1.5.174 6.487 6.487 0 01-5.011-2.36l.49-.98a.423.423 0 01.614-.164l.294.196a.992.992 0 001.491-1.139l-.197-.593a.252.252 0 01.126-.304l1.973-.987a.938.938 0 00.361-1.359.375.375 0 01.239-.576l.125-.025A2.421 2.421 0 0012.327 6.6l.05-.149a1 1 0 00-.242-1.023l-1.489-1.489a.5.5 0 01-.146-.353v-.067a6.5 6.5 0 015.392 9.23 1.398 1.398 0 00-.68-.244l-.566-.566a1.5 1.5 0 00-1.06-.439h-.172a1.5 1.5 0 00-1.06.44l-.593.592a.501.501 0 01-.13.093l-1.578.79a1 1 0 00-.553.894v.191a1 1 0 001 1h.5a.5.5 0 01.5.5v.326z"
:clipRule "evenodd"}]]) | |
fd72238bd6cc9d93b4db52d69670fde4b70f3a867b68af778ef58b55a6f1fc6e | hammerlab/prohlatype | distances.ml | (** Measure distances between different alleles from (at the moment) parsed
Multiple Sequence Alignment files. *)
open Util
module Trie_distances = struct
let init_trie elems =
let open Nomenclature in
list_fold_ok elems ~init:Trie.empty ~f:(fun trie s ->
parse s >>= fun (_gene, (allele_resolution, suffix_opt)) ->
Ok (Trie.add allele_resolution suffix_opt trie))
let f ~targets ~candidates =
let open Nomenclature in
let just_candidates = StringMap.bindings candidates |> List.map ~f:fst in
init_trie just_candidates >>= fun trie ->
StringMap.bindings targets |> list_fold_ok ~init:StringMap.empty ~f:(fun m (ta, _) ->
parse ta >>= fun (locus, (allele_resolution, _suffix_opt)) ->
let closest_allele_res = Trie.nearest allele_resolution trie in
let closest_allele_str = to_string ~locus closest_allele_res in
Ok (StringMap.add ~key:ta ~data:[(closest_allele_str, 1.)] m))
end (* Trie_distances *)
module Weighted_per_segment = struct
let debug = ref false
let against_mask ~init ~f =
List.fold_left ~init ~f:(fun a -> function
| None -> a
| Some (mismatches, ref_len) -> f a ~mismatches ~ref_len)
let apply_mask ~init ~f =
let open Option in
List.fold_left2 ~init:(Some init) ~f:(fun aopt b c ->
aopt >>= fun a ->
match b, c with
| None, None -> Some a
| None, _
| _, None -> None
| Some _, Some (mismatches, ref_len) ->
Some (f a ~mismatches ~ref_len))
let dist ~normalize tlen =
0.,
(fun a ~mismatches ~ref_len ->
let w = ref_len /. tlen in
if normalize then
a +. mismatches *. w
else
a +. mismatches)
let one ~reference ~reference_sequence ~candidates ~allele ~allele_name =
let open MSA.Segments in
distances ~reference:reference_sequence ~allele >>= fun dist_to_ref ->
let ref_mask =
List.map dist_to_ref ~f:(fun s ->
match s.relationship with
| Full _ -> Some (float s.mismatches, float s.seq_length)
| _ -> None)
in
let tlen = against_mask ref_mask ~init:0.
~f:(fun [@warning "-27"] a ~mismatches ~ref_len -> a +. ref_len)
in
let dist_init, dist_f = dist ~normalize:true tlen in
let ref_diff = against_mask ~init:dist_init ~f:dist_f ref_mask in
StringMap.bindings candidates
|> list_fold_ok ~init:[] ~f:(fun acc (al2, allele2) ->
if !debug then
printf "Calculating weighted differences for %s vs %s\n" allele_name al2;
distances_between ~reference:reference_sequence ~allele1:allele ~allele2
>>| List.map ~f:(fun s ->
match s.relationship with
| (Full _), (Full _) ->
Some (float s.mismatches, float s.seq_length)
| _ ->
None)
>>= fun dlst ->
match apply_mask ~init:dist_init ~f:dist_f ref_mask dlst with
| None -> Ok acc
| Some dist -> Ok ((al2, dist) :: acc))
>>= fun all_distances ->
let with_reference = (reference, ref_diff) :: all_distances in
Ok (List.sort with_reference
~cmp:(fun (_,d1) (_,(d2:float)) -> compare d1 d2))
let f ~reference ~reference_sequence ~targets ~candidates =
let c = one ~reference ~reference_sequence ~candidates in
StringMap.bindings targets
|> list_fold_ok ~init:[] ~f:(fun acc (allele_name, allele) ->
c ~allele_name ~allele >>= fun d -> Ok ((allele_name, d) :: acc))
>>| string_map_of_assoc
end (* Weighted_per_segment *)
module Reference = struct
let [@warning "-27"] one ~reference ~reference_sequence ~candidates ~allele =
let is_ref, isn't =
StringMap.bindings candidates
|> List.partition ~f:(fun (al, _seq) -> al = reference)
in
List.map is_ref ~f:(fun _ -> reference, 0.0)
@ List.map isn't ~f:(fun (a, _) -> a, infinity)
let f ~reference ~reference_sequence ~targets ~candidates =
StringMap.map targets ~f:(fun _s ->
one ~reference ~reference_sequence ~candidates
~allele:("Allele sequence doesn't matter", []))
end (* Reference *)
type logic =
| Reference
| Trie
| WeightedPerSegment
[@@deriving show]
type alignment_sequence = string MSA.alignment_sequence
let one ~reference ~reference_sequence ~allele ~candidates = function
| Reference ->
let _aname, aseq = allele in
Ok (Reference.one ~reference ~reference_sequence ~candidates
~allele:aseq)
| Trie ->
let aname, aseq = allele in
let targets = StringMap.singleton aname aseq in
Trie_distances.f ~targets ~candidates >>= fun m ->
Ok (StringMap.find aname m)
| WeightedPerSegment ->
let allele_name, aseq = allele in
Weighted_per_segment.one ~reference ~reference_sequence ~candidates
~allele_name ~allele:aseq
type arg =
{ reference : string
; reference_sequence : alignment_sequence
; targets : alignment_sequence StringMap.t
; candidates : alignment_sequence StringMap.t
}
let compute { reference; reference_sequence; targets; candidates } = function
| Reference ->
Ok (Reference.f ~reference ~reference_sequence ~targets ~candidates)
| Trie ->
Trie_distances.f ~targets ~candidates
| WeightedPerSegment ->
Weighted_per_segment.f ~reference ~reference_sequence ~targets ~candidates
| null | https://raw.githubusercontent.com/hammerlab/prohlatype/3acaf7154f93675fc729971d4c76c2b133e90ce6/src/lib/distances.ml | ocaml | * Measure distances between different alleles from (at the moment) parsed
Multiple Sequence Alignment files.
Trie_distances
Weighted_per_segment
Reference |
open Util
module Trie_distances = struct
let init_trie elems =
let open Nomenclature in
list_fold_ok elems ~init:Trie.empty ~f:(fun trie s ->
parse s >>= fun (_gene, (allele_resolution, suffix_opt)) ->
Ok (Trie.add allele_resolution suffix_opt trie))
let f ~targets ~candidates =
let open Nomenclature in
let just_candidates = StringMap.bindings candidates |> List.map ~f:fst in
init_trie just_candidates >>= fun trie ->
StringMap.bindings targets |> list_fold_ok ~init:StringMap.empty ~f:(fun m (ta, _) ->
parse ta >>= fun (locus, (allele_resolution, _suffix_opt)) ->
let closest_allele_res = Trie.nearest allele_resolution trie in
let closest_allele_str = to_string ~locus closest_allele_res in
Ok (StringMap.add ~key:ta ~data:[(closest_allele_str, 1.)] m))
module Weighted_per_segment = struct
let debug = ref false
let against_mask ~init ~f =
List.fold_left ~init ~f:(fun a -> function
| None -> a
| Some (mismatches, ref_len) -> f a ~mismatches ~ref_len)
let apply_mask ~init ~f =
let open Option in
List.fold_left2 ~init:(Some init) ~f:(fun aopt b c ->
aopt >>= fun a ->
match b, c with
| None, None -> Some a
| None, _
| _, None -> None
| Some _, Some (mismatches, ref_len) ->
Some (f a ~mismatches ~ref_len))
let dist ~normalize tlen =
0.,
(fun a ~mismatches ~ref_len ->
let w = ref_len /. tlen in
if normalize then
a +. mismatches *. w
else
a +. mismatches)
let one ~reference ~reference_sequence ~candidates ~allele ~allele_name =
let open MSA.Segments in
distances ~reference:reference_sequence ~allele >>= fun dist_to_ref ->
let ref_mask =
List.map dist_to_ref ~f:(fun s ->
match s.relationship with
| Full _ -> Some (float s.mismatches, float s.seq_length)
| _ -> None)
in
let tlen = against_mask ref_mask ~init:0.
~f:(fun [@warning "-27"] a ~mismatches ~ref_len -> a +. ref_len)
in
let dist_init, dist_f = dist ~normalize:true tlen in
let ref_diff = against_mask ~init:dist_init ~f:dist_f ref_mask in
StringMap.bindings candidates
|> list_fold_ok ~init:[] ~f:(fun acc (al2, allele2) ->
if !debug then
printf "Calculating weighted differences for %s vs %s\n" allele_name al2;
distances_between ~reference:reference_sequence ~allele1:allele ~allele2
>>| List.map ~f:(fun s ->
match s.relationship with
| (Full _), (Full _) ->
Some (float s.mismatches, float s.seq_length)
| _ ->
None)
>>= fun dlst ->
match apply_mask ~init:dist_init ~f:dist_f ref_mask dlst with
| None -> Ok acc
| Some dist -> Ok ((al2, dist) :: acc))
>>= fun all_distances ->
let with_reference = (reference, ref_diff) :: all_distances in
Ok (List.sort with_reference
~cmp:(fun (_,d1) (_,(d2:float)) -> compare d1 d2))
let f ~reference ~reference_sequence ~targets ~candidates =
let c = one ~reference ~reference_sequence ~candidates in
StringMap.bindings targets
|> list_fold_ok ~init:[] ~f:(fun acc (allele_name, allele) ->
c ~allele_name ~allele >>= fun d -> Ok ((allele_name, d) :: acc))
>>| string_map_of_assoc
module Reference = struct
let [@warning "-27"] one ~reference ~reference_sequence ~candidates ~allele =
let is_ref, isn't =
StringMap.bindings candidates
|> List.partition ~f:(fun (al, _seq) -> al = reference)
in
List.map is_ref ~f:(fun _ -> reference, 0.0)
@ List.map isn't ~f:(fun (a, _) -> a, infinity)
let f ~reference ~reference_sequence ~targets ~candidates =
StringMap.map targets ~f:(fun _s ->
one ~reference ~reference_sequence ~candidates
~allele:("Allele sequence doesn't matter", []))
type logic =
| Reference
| Trie
| WeightedPerSegment
[@@deriving show]
type alignment_sequence = string MSA.alignment_sequence
let one ~reference ~reference_sequence ~allele ~candidates = function
| Reference ->
let _aname, aseq = allele in
Ok (Reference.one ~reference ~reference_sequence ~candidates
~allele:aseq)
| Trie ->
let aname, aseq = allele in
let targets = StringMap.singleton aname aseq in
Trie_distances.f ~targets ~candidates >>= fun m ->
Ok (StringMap.find aname m)
| WeightedPerSegment ->
let allele_name, aseq = allele in
Weighted_per_segment.one ~reference ~reference_sequence ~candidates
~allele_name ~allele:aseq
type arg =
{ reference : string
; reference_sequence : alignment_sequence
; targets : alignment_sequence StringMap.t
; candidates : alignment_sequence StringMap.t
}
let compute { reference; reference_sequence; targets; candidates } = function
| Reference ->
Ok (Reference.f ~reference ~reference_sequence ~targets ~candidates)
| Trie ->
Trie_distances.f ~targets ~candidates
| WeightedPerSegment ->
Weighted_per_segment.f ~reference ~reference_sequence ~targets ~candidates
|
8ae45ce718e902d0c2a6cc963a1c60144dc9bb3f034b0fbd5a12e4c43ae15ef0 | ocurrent/ocurrent | s.ml | * The subset of the Current API that the RPC system needs .
This is duplicated here to avoid making RPC clients depend on
the " current " service implementation package .
This is duplicated here to avoid making RPC clients depend on
the "current" service implementation package. *)
module type CURRENT = sig
class type actions = object
method pp : Format.formatter -> unit
method rebuild : (unit -> string) option
end
module Job : sig
type t
module Map : Map.S with type key = string
val log_path : string -> (Fpath.t, [`Msg of string]) result
val lookup_running : string -> t option
val wait_for_log_data : t -> unit Lwt.t
val approve_early_start : t -> unit
val cancel : t -> string -> unit
val cancelled_state : t -> (unit, [`Msg of string]) result
end
module Engine : sig
type t
type results
val state : t -> results
val jobs : results -> actions Job.Map.t
end
end
| null | https://raw.githubusercontent.com/ocurrent/ocurrent/344af83279e9ba17f5f32d0a0351c228a6f42863/lib_rpc/s.ml | ocaml | * The subset of the Current API that the RPC system needs .
This is duplicated here to avoid making RPC clients depend on
the " current " service implementation package .
This is duplicated here to avoid making RPC clients depend on
the "current" service implementation package. *)
module type CURRENT = sig
class type actions = object
method pp : Format.formatter -> unit
method rebuild : (unit -> string) option
end
module Job : sig
type t
module Map : Map.S with type key = string
val log_path : string -> (Fpath.t, [`Msg of string]) result
val lookup_running : string -> t option
val wait_for_log_data : t -> unit Lwt.t
val approve_early_start : t -> unit
val cancel : t -> string -> unit
val cancelled_state : t -> (unit, [`Msg of string]) result
end
module Engine : sig
type t
type results
val state : t -> results
val jobs : results -> actions Job.Map.t
end
end
| |
d4a304f84210ef91bca345be91257f7380316aade6e786bcb84d0fb21ce7f1b3 | v-kolesnikov/sicp | 2_51.clj | (ns sicp.chapter02.2-51
(:require [sicp.chapter02.2-46 :refer [make-vect]]
[sicp.chapter02.2-50 :refer [rotate-counterclockwise-180
rotate-counterclockwise-270
transform-painter]]))
(defn beside
[left-painter right-painter]
(let [split-point (make-vect 0.5 0.0)
paint-left (transform-painter left-painter
(make-vect 0.0 0.0)
split-point
(make-vect 0.0 1.0))
paint-right (transform-painter right-painter
split-point
(make-vect 1.0 0.0)
(make-vect 0.5 1.0))]
(fn [frame]
(paint-left frame)
(paint-right frame))))
(defn below
[bottom-painter top-painter]
(let [split-point (make-vect 0.0 0.5)
paint-bottom (transform-painter bottom-painter
(make-vect 0.0 0.0)
(make-vect 1.0 0.0)
split-point)
paint-top (transform-painter top-painter
split-point
(make-vect 1.0 0.5)
(make-vect 0.0 1.0))]
(fn [frame]
(paint-bottom frame)
(paint-top frame))))
(defn below-rotate
[bottom-painter top-painter]
(let [bottom-rotated (rotate-counterclockwise-270 bottom-painter)
top-rotated (rotate-counterclockwise-270 top-painter)]
(->> (beside bottom-rotated top-rotated)
(rotate-counterclockwise-270)
(rotate-counterclockwise-180))))
| null | https://raw.githubusercontent.com/v-kolesnikov/sicp/4298de6083440a75898e97aad658025a8cecb631/src/sicp/chapter02/2_51.clj | clojure | (ns sicp.chapter02.2-51
(:require [sicp.chapter02.2-46 :refer [make-vect]]
[sicp.chapter02.2-50 :refer [rotate-counterclockwise-180
rotate-counterclockwise-270
transform-painter]]))
(defn beside
[left-painter right-painter]
(let [split-point (make-vect 0.5 0.0)
paint-left (transform-painter left-painter
(make-vect 0.0 0.0)
split-point
(make-vect 0.0 1.0))
paint-right (transform-painter right-painter
split-point
(make-vect 1.0 0.0)
(make-vect 0.5 1.0))]
(fn [frame]
(paint-left frame)
(paint-right frame))))
(defn below
[bottom-painter top-painter]
(let [split-point (make-vect 0.0 0.5)
paint-bottom (transform-painter bottom-painter
(make-vect 0.0 0.0)
(make-vect 1.0 0.0)
split-point)
paint-top (transform-painter top-painter
split-point
(make-vect 1.0 0.5)
(make-vect 0.0 1.0))]
(fn [frame]
(paint-bottom frame)
(paint-top frame))))
(defn below-rotate
[bottom-painter top-painter]
(let [bottom-rotated (rotate-counterclockwise-270 bottom-painter)
top-rotated (rotate-counterclockwise-270 top-painter)]
(->> (beside bottom-rotated top-rotated)
(rotate-counterclockwise-270)
(rotate-counterclockwise-180))))
| |
2bee169a398816cfb9e6aa12ad6fcf3e708b16924bd44abc2455b66cfb1b01af | uber/queryparser | Parser.hs | Copyright ( c ) 2017 Uber Technologies , Inc.
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
--
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-- THE SOFTWARE.
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
module Database.Sql.Vertica.Parser where
import Database.Sql.Type
import Database.Sql.Info
import Database.Sql.Helpers
import Database.Sql.Vertica.Type
import Database.Sql.Vertica.Scanner
import Database.Sql.Vertica.Parser.Internal
import Database.Sql.Position
import qualified Database.Sql.Vertica.Parser.Token as Tok
import Database.Sql.Vertica.Parser.IngestionOptions
import Database.Sql.Vertica.Parser.Shared
import Data.Char (isDigit)
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL
import qualified Data.List as L
import Data.Maybe (catMaybes, fromMaybe)
import Data.Monoid (Endo (..))
import Data.Semigroup (Option (..))
import qualified Text.Parsec as P
import Text.Parsec ( chainl1, choice, many, many1
, option, optional, optionMaybe
, sepBy, sepBy1, try, (<|>), (<?>))
import Control.Arrow (first)
import Control.Monad (void, (>=>), when)
import Data.Semigroup (Semigroup (..), sconcat)
import Data.List.NonEmpty (NonEmpty ((:|)))
import qualified Data.List.NonEmpty as NE (last, fromList)
import Data.Foldable (fold)
statementParser :: Parser (VerticaStatement RawNames Range)
statementParser = do
maybeStmt <- optionMaybe $ choice
[ try $ VerticaStandardSqlStatement <$> statementP
, do
_ <- try $ P.lookAhead createProjectionPrefixP
VerticaCreateProjectionStatement <$> createProjectionP
, try $ VerticaMultipleRenameStatement <$> multipleRenameP
, try $ VerticaSetSchemaStatement <$> setSchemaP
, try $ VerticaUnhandledStatement <$> renameProjectionP
, do
_ <- try $ P.lookAhead alterResourcePoolPrefixP
VerticaUnhandledStatement <$> alterResourcePoolP
, do
_ <- try $ P.lookAhead createResourcePoolPrefixP
VerticaUnhandledStatement <$> createResourcePoolP
, do
_ <- try $ P.lookAhead dropResourcePoolPrefixP
VerticaUnhandledStatement <$> dropResourcePoolP
, do
_ <- try $ P.lookAhead createFunctionPrefixP
VerticaUnhandledStatement <$> createFunctionP
, VerticaUnhandledStatement <$> alterTableAddConstraintP
, VerticaUnhandledStatement <$> exportToStdoutP
, do
_ <- try $ P.lookAhead setSessionPrefixP
VerticaUnhandledStatement <$> setSessionP
, VerticaUnhandledStatement <$> setTimeZoneP
, VerticaUnhandledStatement <$> connectP
, VerticaUnhandledStatement <$> disconnectP
, VerticaUnhandledStatement <$> createAccessPolicyP
, VerticaUnhandledStatement <$> copyFromP
, VerticaUnhandledStatement <$> showP
, VerticaMergeStatement <$> mergeP
]
case maybeStmt of
Just stmt -> terminator >> return stmt
Nothing -> VerticaStandardSqlStatement <$> emptyStatementP
where
normal statements may be terminated by ` ; ` or eof
emptyStatementP = EmptyStmt <$> Tok.semicolonP -- but we don't allow eof here. `;` is the
-- only way to write the empty statement, i.e. `` (empty string) is not allowed.
-- | parse consumes a statement, or fails
parse :: Text -> Either P.ParseError (VerticaStatement RawNames Range)
parse = P.runParser statementParser 0 "-" . tokenize
-- | parseAll consumes all input as a single statement, or fails
parseAll :: Text -> Either P.ParseError (VerticaStatement RawNames Range)
parseAll = P.runParser (statementParser <* P.eof) 0 "-" . tokenize
-- | parseMany consumes multiple statements, or fails
parseMany :: Text -> Either P.ParseError [VerticaStatement RawNames Range]
parseMany = P.runParser (P.many1 statementParser) 0 "-" . tokenize
-- | parseManyAll consumes all input multiple statements, or fails
parseManyAll :: Text -> Either P.ParseError [VerticaStatement RawNames Range]
parseManyAll text = P.runParser (P.many1 statementParser <* P.eof) 0 "-" . tokenize $ text
-- | parseManyEithers consumes all input as multiple (statements or failures)
-- it should never fail
parseManyEithers :: Text -> Either P.ParseError [Either (Unparsed Range) (VerticaStatement RawNames Range)]
parseManyEithers text = P.runParser parser 0 "-" . tokenize $ text
where
parser = do
statements <- P.many1 $ P.setState 0 >> choice
[ try $ Right <$> statementParser
, try $ Left <$> do
ss <- many Tok.notSemicolonP
e <- Tok.semicolonP
pure $ case ss of
[] -> Unparsed e
s:_ -> Unparsed (s <> e)
]
locs <- many Tok.notSemicolonP
P.eof
pure $ case locs of
[] -> statements
s:es -> statements ++ [Left $ Unparsed $ sconcat (s:|es)]
optionBool :: Parser a -> Parser Bool
optionBool p = option False $ p >> pure True
statementP :: Parser (Statement Vertica RawNames Range)
statementP = choice
[ InsertStmt <$> insertP
, DeleteStmt <$> deleteP
, QueryStmt <$> queryP
, explainP
, TruncateStmt <$> truncateP
, AlterTableStmt <$> alterTableP
, do
_ <- try $ P.lookAhead createSchemaPrefixP
CreateSchemaStmt <$> createSchemaP
, do
_ <- try $ P.lookAhead createExternalTablePrefixP
CreateTableStmt <$> createExternalTableP
, do
_ <- try $ P.lookAhead createViewPrefixP
CreateViewStmt <$> createViewP
, CreateTableStmt <$> createTableP
, do
_ <- try $ P.lookAhead dropViewPrefixP
DropViewStmt <$> dropViewP
, DropTableStmt <$> dropTableP
, GrantStmt <$> grantP
, RevokeStmt <$> revokeP
, BeginStmt <$> beginP
, CommitStmt <$> commitP
, RollbackStmt <$> rollbackP
]
oqColumnNameP :: Parser (OQColumnName Range)
oqColumnNameP = (\ (c, r') -> QColumnName r' Nothing c) <$> Tok.columnNameP
insertP :: Parser (Insert RawNames Range)
insertP = do
r <- Tok.insertP
insertBehavior <- InsertAppend <$> Tok.intoP
insertTable <- tableNameP
insertColumns <- optionMaybe $ try $ do
_ <- Tok.openP
c:cs <- oqColumnNameP `sepBy1` Tok.commaP
_ <- Tok.closeP
pure (c :| cs)
insertValues <- choice
[ do
s <- Tok.defaultP
e <- Tok.valuesP
pure $ InsertDefaultValues (s <> e)
, do
s <- Tok.valuesP
_ <- Tok.openP
x:xs <- defaultExprP `sepBy1` Tok.commaP
e <- Tok.closeP
let row = x :| xs
there can only be one
pure $ InsertExprValues (s <> e) rows
, InsertSelectValues <$> queryP
]
let insertInfo = r <> getInfo insertValues
pure Insert{..}
defaultExprP :: Parser (DefaultExpr RawNames Range)
defaultExprP = choice
[ DefaultValue <$> Tok.defaultP
, ExprValue <$> exprP
]
deleteP :: Parser (Delete RawNames Range)
deleteP = do
r <- Tok.deleteP
_ <- Tok.fromP
table <- tableNameP
maybeExpr <- optionMaybe $ do
_ <- Tok.whereP
exprP
let r' = case maybeExpr of
Nothing -> getInfo table
Just expr -> getInfo expr
info = r <> r'
pure $ Delete info table maybeExpr
truncateP :: Parser (Truncate RawNames Range)
truncateP = do
s <- Tok.truncateP
_ <- Tok.tableP
table <- tableNameP
pure $ Truncate (s <> getInfo table) table
querySelectP :: Parser (Query RawNames Range)
querySelectP = do
select <- selectP
return $ QuerySelect (selectInfo select) select
queryP :: Parser (Query RawNames Range)
queryP = manyParensP $ do
with <- option id withP
query <- ((querySelectP <|> P.between Tok.openP Tok.closeP queryP) `chainl1` (exceptP <|> unionP))
`chainl1` intersectP
order <- option id orderP
limit <- option id limitP
offset <- option id offsetP
return $ with $ limit $ offset $ order $ query
where
exceptP = do
r <- Tok.exceptP
return $ QueryExcept r Unused
unionP = do
r <- Tok.unionP
distinct <- option (Distinct True) distinctP
return $ QueryUnion r distinct Unused
intersectP = do
r <- Tok.intersectP
return $ QueryIntersect r Unused
withP = do
r <- Tok.withP
withs <- cteP `sepBy1` Tok.commaP
return $ \ query ->
let r' = sconcat $ r :| getInfo query : map cteInfo withs
in QueryWith r' withs query
cteP = do
(name, r) <- Tok.tableNameP
alias <- makeTableAlias r name
columns <- option []
$ P.between Tok.openP Tok.closeP $ columnAliasP `sepBy1` Tok.commaP
_ <- Tok.asP
(query, r') <- do
_ <- Tok.openP
q <- queryP
r' <- Tok.closeP
return (q, r')
return $ CTE (r <> r') alias columns query
orderP = do
(r, orders) <- orderTopLevelP
return $ \ query -> QueryOrder (getInfo query <> r) orders query
limitP = do
r <- Tok.limitP
choice
[ Tok.numberP >>= \ (v, r') ->
let limit = Limit (r <> r') v
in return $ \ query -> QueryLimit (getInfo query <> r') limit query
, Tok.nullP >> return id
]
offsetP = do
r <- Tok.offsetP
Tok.numberP >>= \ (v, r') ->
let offset = Offset (r <> r') v
in return $ \ query -> QueryOffset (getInfo query <> r') offset query
distinctP :: Parser Distinct
distinctP = choice $
[ Tok.allP >> return (Distinct False)
, Tok.distinctP >> return (Distinct True)
]
explainP :: Parser (Statement Vertica RawNames Range)
explainP = do
s <- Tok.explainP
stmt <- choice
[ InsertStmt <$> insertP
, DeleteStmt <$> deleteP
, QueryStmt <$> queryP
]
pure $ ExplainStmt (s <> getInfo stmt) stmt
columnAliasP :: Parser (ColumnAlias Range)
columnAliasP = do
(name, r) <- Tok.columnNameP
makeColumnAlias r name
alterTableP :: Parser (AlterTable RawNames Range)
alterTableP = do
s <- Tok.alterP
_ <- Tok.tableP
from <- tableNameP
_ <- Tok.renameP
_ <- Tok.toP
to <- (\ uqtn -> uqtn { tableNameSchema = Nothing }) <$> unqualifiedTableNameP
pure $ AlterTableRenameTable (s <> getInfo to) from to
createSchemaPrefixP :: Parser Range
createSchemaPrefixP = do
s <- Tok.createP
e <- Tok.schemaP
return $ s <> e
ifNotExistsP :: Parser (Maybe Range)
ifNotExistsP = optionMaybe $ do
s <- Tok.ifP
_ <- Tok.notP
e <- Tok.existsP
pure $ s <> e
ifExistsP :: Parser Range
ifExistsP = do
s <- Tok.ifP
e <- Tok.existsP
pure $ s <> e
createSchemaP :: Parser (CreateSchema RawNames Range)
createSchemaP = do
s <- createSchemaPrefixP
createSchemaIfNotExists <- ifNotExistsP
(name, r) <- Tok.schemaNameP
let createSchemaName = mkNormalSchema name r
e <- option r (Tok.authorizationP >> snd <$> Tok.userNameP)
let createSchemaInfo = s <> e
return $ CreateSchema{..}
createTableColumnsP :: Parser (TableDefinition Vertica RawNames Range)
createTableColumnsP = do
s <- Tok.openP
c:cs <- columnOrConstraintP `sepBy1` Tok.commaP
e <- Tok.closeP
pure $ TableColumns (s <> e) (c:|cs)
where
columnOrConstraintP :: Parser (ColumnOrConstraint Vertica RawNames Range)
columnOrConstraintP = choice
[ try $ ColumnOrConstraintColumn <$> columnDefinitionP
, ColumnOrConstraintConstraint <$> constraintDefinitionP
]
columnDefinitionP = do
(name, s) <- Tok.columnNameP
columnDefinitionType <- dataTypeP
updates <- many $ choice [ notNullUpdateP, nullUpdateP, defaultUpdateP ]
let columnDefinitionInfo = s <> getInfo columnDefinitionType
TODO
-- set when applying updates
columnDefinitionNull = Nothing
columnDefinitionDefault = Nothing
columnDefinitionName = QColumnName s None name
foldr (>=>) pure updates ColumnDefinition{..}
notNullUpdateP :: Parser (ColumnDefinition d r Range -> Parser (ColumnDefinition d r Range))
notNullUpdateP = do
r <- (<>) <$> Tok.notP <*> Tok.nullP
pure $ \ d -> case columnDefinitionNull d of
Nothing -> pure $ d { columnDefinitionNull = Just $ NotNull r }
Just (Nullable _) -> fail "conflicting NULL/NOT NULL specifications on column"
Just (NotNull _) -> pure d
nullUpdateP :: Parser (ColumnDefinition d r Range -> Parser (ColumnDefinition d r Range))
nullUpdateP = do
r <- Tok.nullP
pure $ \ d -> case columnDefinitionNull d of
Nothing -> pure $ d { columnDefinitionNull = Just $ Nullable r }
Just (NotNull _) -> fail "conflicting NULL/NOT NULL specifications on column"
Just (Nullable _) -> pure d
defaultUpdateP :: Parser (ColumnDefinition d RawNames Range -> Parser (ColumnDefinition d RawNames Range))
defaultUpdateP = do
_ <- Tok.defaultP
expr <- exprP
pure $ \ d -> case columnDefinitionDefault d of
Nothing -> pure $ d { columnDefinitionDefault = Just expr }
Just _ -> fail "multiple defaults for column"
constraintDefinitionP :: Parser (ConstraintDefinition Range)
constraintDefinitionP = ConstraintDefinition <$> tableConstraintP
createExternalTablePrefixP :: Parser (Range, Externality Range)
createExternalTablePrefixP = do
s <- Tok.createP
r <- Tok.externalP
_ <- Tok.tableP
return (s, External r)
createExternalTableP :: Parser (CreateTable Vertica RawNames Range)
createExternalTableP = do
(s, createTableExternality) <- createExternalTablePrefixP
let createTablePersistence = Persistent
createTableIfNotExists <- ifNotExistsP
createTableName <- tableNameP
TODO allow for column - name - list syntax
_ <- optional $ do
_ <- optional $ Tok.includeP <|> Tok.excludeP
_ <- Tok.schemaP
Tok.privilegesP
_ <- Tok.asP
e <- Tok.copyP
e' <- consumeOrderedOptions e $
[ ingestionColumnListP (getInfo <$> exprP)
, ingestionColumnOptionP
, fromP -- you need **either** a FROM or a SOURCE clause, but let's not be fussy
, fileStorageFormatP
]
e'' <- consumeUnorderedOptions e' $
[ Tok.withP
, abortOnErrorP
, delimiterAsP
, enclosedByP
, Tok.enforceLengthP
, errorToleranceP
, escapeFormatP
, exceptionsOnNodeP
, fileFilterP
, nullAsP
, fileParserP
, recordTerminatorP
, rejectedDataOnNodeP
, rejectMaxP
, skipRecordsP
, skipBytesP
, fileSourceP
, trailingNullColsP
, trimByteP
]
let createTableInfo = s <> e''
createTableExtra = Nothing
pure CreateTable{..}
where
stringP :: Parser Range
stringP = snd <$> Tok.stringP
fromP :: Parser Range
fromP = do
s <- Tok.fromP
let fileP = do
r <- stringP
consumeOrderedOptions r [nodeLocationP, compressionP]
rs <- fileP `sepBy1` Tok.commaP
return $ s <> last rs
nodeLocationP = choice $
[ Tok.onP >> snd <$> Tok.nodeNameP
, Tok.onP >> Tok.anyP >> Tok.nodeP
]
createViewPrefixP :: Parser (Range, Maybe Range, Persistence Range)
createViewPrefixP = do
s <- Tok.createP
ifNotExists <- optionMaybe $ do
s' <- Tok.orP
e' <- Tok.replaceP
pure $ s' <> e'
persistence <- option Persistent $ Temporary <$> do
s' <- Tok.localP
e' <- Tok.temporaryP
pure $ s' <> e'
e <- Tok.viewP
pure (s <> e, ifNotExists, persistence)
schemaPrivilegesP :: Parser Range
schemaPrivilegesP = do
s <- choice [ Tok.includeP, Tok.excludeP ]
optional Tok.schemaP
e <- Tok.privilegesP
return $ s <> e
createViewP :: Parser (CreateView RawNames Range)
createViewP = do
(s, createViewIfNotExists, createViewPersistence) <- createViewPrefixP
createViewName <- tableNameP >>= \case
QTableName info Nothing view ->
case createViewPersistence of
Persistent -> pure $ QTableName info Nothing view
Temporary _ -> pure $ QTableName info (pure $ QSchemaName info Nothing "<session>" SessionSchema) view
qualifiedTableName ->
case createViewPersistence of
Persistent -> pure $ qualifiedTableName
Temporary _ -> fail $ "cannot specify schema on a local temporary view"
createViewColumns <- optionMaybe $ do
_ <- Tok.openP
c:cs <- unqualifiedColumnNameP `sepBy1` Tok.commaP
_ <- Tok.closeP
return (c:|cs)
case createViewPersistence of
Persistent -> optional schemaPrivilegesP
Temporary _ -> pure ()
_ <- Tok.asP
createViewQuery <- queryP
let createViewInfo = s <> getInfo createViewQuery
pure CreateView{..}
where
unqualifiedColumnNameP = do
(name, r) <- Tok.columnNameP
pure $ QColumnName r None name
createTableP :: Parser (CreateTable Vertica RawNames Range)
createTableP = do
s <- Tok.createP
(createTablePersistence, isLocal) <- option (Persistent, False) $ do
isLocal <- option False $ choice
[ Tok.localP >> pure True
, Tok.globalP >> pure False
]
createTablePersistence <- Temporary <$> Tok.temporaryP
pure (createTablePersistence, isLocal)
let createTableExternality = Internal
_ <- Tok.tableP
createTableIfNotExists <- ifNotExistsP
createTableName <- tableNameP >>= \case
QTableName info Nothing table ->
if isLocal
then pure $ QTableName info (pure $ QSchemaName info Nothing "<session>" SessionSchema) table
else pure $ QTableName info (pure $ QSchemaName info Nothing "public" NormalSchema) table
qualifiedTableName ->
if isLocal
then fail "cannot specify schema on a local temporary table"
else pure $ qualifiedTableName
let onCommitP = case createTablePersistence of
Persistent -> pure ()
Temporary _ -> do
TODO ( T374141 ): do something with this
_ <- Tok.onP
_ <- Tok.commitP
_ <- Tok.deleteP <|> Tok.preserveP
void Tok.rowsP
createTableDefinition <- choice
[ createTableColumnsP <* optional onCommitP <* optional schemaPrivilegesP
, try $ optional onCommitP *> optional schemaPrivilegesP *> createTableAsP
, optional schemaPrivilegesP *> createTableLikeP
]
createTableExtra <- tableInfoP
case createTablePersistence of
Persistent -> pure ()
Temporary _ -> optional $ do
_ <- Tok.noP
void Tok.projectionP
let e = maybe (getInfo createTableDefinition) getInfo createTableExtra
createTableInfo = s <> e
pure CreateTable{..}
where
columnListP :: Parser (NonEmpty (UQColumnName Range))
columnListP = do
_ <- Tok.openP
c:cs <- (`sepBy1` Tok.commaP) $ do
(name, r) <- Tok.columnNameP
pure $ QColumnName r None name
_ <- Tok.closeP
pure (c:|cs)
createTableLikeP = do
s <- Tok.likeP
table <- tableNameP
e <- option (getInfo table) $ do
TODO - include projection info in createTableExtra
_ <- Tok.includingP <|> Tok.excludingP
Tok.projectionsP
pure $ TableLike (s <> e) table
createTableAsP = do
s <- Tok.asP
columns <- optionMaybe $ try columnListP
query <- optionalParensP $ queryP
pure $ TableAs (s <> getInfo query) columns query
tableInfoP :: Parser (Maybe (TableInfo RawNames Range))
tableInfoP = do
mOrdering <- optionMaybe orderTopLevelP
let tableInfoOrdering = snd <$> mOrdering
let tableInfoEncoding :: Maybe (TableEncoding RawNames Range)
TODO
tableInfoSegmentation <- optionMaybe $ choice
[ do
s <- Tok.unsegmentedP
choice
[ do
_ <- Tok.nodeP
node <- nodeNameP
let e = getInfo node
pure $ UnsegmentedOneNode (s <> e) node
, do
_ <- Tok.allP
e <- Tok.nodesP
pure $ UnsegmentedAllNodes (s <> e)
]
, do
s <- Tok.segmentedP
_ <- Tok.byP
expr <- exprP
list <- nodeListP
pure $ SegmentedBy (s <> getInfo list) expr list
]
tableInfoKSafety <- optionMaybe $ do
s <- Tok.ksafeP
choice
[ do
(n, e) <- integerP
pure $ KSafety (s <> e) (Just n)
, pure $ KSafety s Nothing
]
tableInfoPartitioning <- optionMaybe $ do
s <- Tok.partitionP
_ <- Tok.byP
expr <- exprP
pure $ Partitioning (s <> getInfo expr) expr
let infos = [ fst <$> mOrdering
, getInfo <$> tableInfoEncoding
, getInfo <$> tableInfoSegmentation
, getInfo <$> tableInfoKSafety
, getInfo <$> tableInfoPartitioning
]
case getOption $ mconcat $ map Option infos of
Nothing -> pure Nothing
Just tableInfoInfo -> pure $ Just TableInfo{..}
dropViewPrefixP :: Parser Range
dropViewPrefixP = do
s <- Tok.dropP
e <- Tok.viewP
pure $ s <> e
dropViewP :: Parser (DropView RawNames Range)
dropViewP = do
s <- dropViewPrefixP
dropViewIfExists <- optionMaybe ifExistsP
dropViewName <- tableNameP
let dropViewInfo = s <> getInfo dropViewName
pure DropView{..}
dropTableP :: Parser (DropTable RawNames Range)
dropTableP = do
s <- Tok.dropP
_ <- Tok.tableP
dropTableIfExists <- optionMaybe ifExistsP
(dropTableName:rest) <- tableNameP `sepBy1` Tok.commaP
cascade <- optionMaybe Tok.cascadeP
let dropTableNames = dropTableName :| rest
dropTableInfo = s <> (fromMaybe (getInfo $ NE.last dropTableNames) cascade)
pure DropTable{..}
grantP :: Parser (Grant Range)
grantP = do
s <- Tok.grantP
e <- many1 Tok.notSemicolonP
return $ Grant (s <> (last e))
revokeP :: Parser (Revoke Range)
revokeP = do
s <- Tok.revokeP
e <- many1 Tok.notSemicolonP
return $ Revoke (s <> (last e))
beginP :: Parser Range
beginP = do
s <- choice [ do
s <- Tok.beginP
e <- option s (Tok.workP <|> Tok.transactionP)
return $ s <> e
, do
s <- Tok.startP
e <- Tok.transactionP
return $ s <> e
]
e <- consumeOrderedOptions s [isolationLevelP, transactionModeP]
return $ s <> e
where
isolationLevelP :: Parser Range
isolationLevelP = do
s <- Tok.isolationP
_ <- Tok.levelP
e <- choice [ Tok.serializableP
, Tok.repeatableP >> Tok.readP
, Tok.readP >> (Tok.committedP <|> Tok.uncommittedP)
]
return $ s <> e
transactionModeP :: Parser Range
transactionModeP = do
s <- Tok.readP
e <- Tok.onlyP <|> Tok.writeP
return $ s <> e
commitP :: Parser Range
commitP = do
s <- Tok.commitP <|> Tok.endP
e <- option s (Tok.workP <|> Tok.transactionP)
return $ s <> e
rollbackP :: Parser Range
rollbackP = do
s <- Tok.rollbackP <|> Tok.abortP
e <- option s (Tok.workP <|> Tok.transactionP)
return $ s <> e
nodeListP :: Parser (NodeList Range)
nodeListP = choice
[ do
s <- Tok.allP
e <- Tok.nodesP
offset <- optionMaybe nodeListOffsetP
let e' = maybe e getInfo offset
pure $ AllNodes (s <> e') offset
, do
s <- Tok.nodesP
n:ns <- nodeNameP `sepBy1` Tok.commaP
let e = getInfo $ last (n:ns)
pure $ Nodes (s <> e) (n:|ns)
]
nodeListOffsetP :: Parser (NodeListOffset Range)
nodeListOffsetP = do
s <- Tok.offsetP
(n, e) <- integerP
pure $ NodeListOffset (s <> e) n
nodeNameP :: Parser (Node Range)
nodeNameP = do
(node, e) <- Tok.nodeNameP
pure $ Node e node
integerP :: Parser (Int, Range)
integerP = do
(n, e) <- Tok.numberP
case reads $ TL.unpack n of
[(n', "")] -> pure (n', e)
_ -> fail $ unwords ["unable to parse", show n, "as integer"]
selectP :: Parser (Select RawNames Range)
selectP = do
r <- Tok.selectP
selectDistinct <- option notDistinct distinctP
selectCols <- do
selections <- selectionP `sepBy1` Tok.commaP
let r' = foldl1 (<>) $ map getInfo selections
return $ SelectColumns r' selections
selectFrom <- optionMaybe fromP
selectWhere <- optionMaybe whereP
selectTimeseries <- optionMaybe timeseriesP
selectGroup <- optionMaybe groupP
selectHaving <- optionMaybe havingP
selectNamedWindow <- optionMaybe namedWindowP
let (Just selectInfo) = sconcat $ Just r :|
[ Just $ getInfo selectCols
, getInfo <$> selectFrom
, getInfo <$> selectWhere
, getInfo <$> selectTimeseries
, getInfo <$> selectGroup
, getInfo <$> selectHaving
, getInfo <$> selectNamedWindow
]
return Select{..}
where
fromP = do
r <- Tok.fromP
tablishes <- tablishP `sepBy1` Tok.commaP
let r' = foldl (<>) r $ fmap getInfo tablishes
return $ SelectFrom r' tablishes
whereP = do
r <- Tok.whereP
condition <- exprP
return $ SelectWhere (r <> getInfo condition) condition
timeseriesP = do
s <- Tok.timeseriesP
selectTimeseriesSliceName <- columnAliasP
_ <- Tok.asP
selectTimeseriesInterval <- do
(c, r) <- Tok.stringP
pure $ StringConstant r c
_ <- Tok.overP
_ <- Tok.openP
selectTimeseriesPartition <- optionMaybe partitionP
selectTimeseriesOrder <- do
_ <- Tok.orderP
_ <- Tok.byP
exprP
e <- Tok.closeP
let selectTimeseriesInfo = s <> e
pure $ SelectTimeseries {..}
toGroupingElement :: PositionOrExpr RawNames Range -> GroupingElement RawNames Range
toGroupingElement posOrExpr = GroupingElementExpr (getInfo posOrExpr) posOrExpr
groupP = do
r <- Tok.groupP
_ <- Tok.byP
exprs <- exprP `sepBy1` Tok.commaP
let selectGroupGroupingElements = map (toGroupingElement . handlePositionalReferences) exprs
selectGroupInfo = foldl (<>) r $ fmap getInfo selectGroupGroupingElements
return SelectGroup{..}
havingP = do
r <- Tok.havingP
conditions <- exprP `sepBy1` Tok.commaP
let r' = foldl (<>) r $ fmap getInfo conditions
return $ SelectHaving r' conditions
namedWindowP =
do
r <- Tok.windowP
windows <- (flip sepBy1) Tok.commaP $ do
name <- windowNameP
_ <- Tok.asP
_ <- Tok.openP
window <- choice
[ do
partition@(Just p) <- Just <$> partitionP
order <- option [] orderInWindowClauseP
let orderInfos = map getInfo order -- better way?
info = L.foldl' (<>) (getInfo p) orderInfos
return $ Left $ WindowExpr info partition order Nothing
, do
inherit <- windowNameP
order <- option [] orderInWindowClauseP
let orderInfo = map getInfo order -- better way?
info = L.foldl' (<>) (getInfo inherit) orderInfo
return $ Right $ PartialWindowExpr info inherit Nothing order Nothing
]
e <- Tok.closeP
let info = getInfo name <> e
return $ case window of
Left w -> NamedWindowExpr info name w
Right pw -> NamedPartialWindowExpr info name pw
let info = L.foldl' (<>) r $ fmap getInfo windows
return $ SelectNamedWindow info windows
handlePositionalReferences :: Expr RawNames Range -> PositionOrExpr RawNames Range
handlePositionalReferences e = case e of
ConstantExpr _ (NumericConstant _ n) | TL.all isDigit n -> PositionOrExprPosition (getInfo e) (read $ TL.unpack n) Unused
_ -> PositionOrExprExpr e
selectStarP :: Parser (Selection RawNames Range)
selectStarP = choice
[ do
r <- Tok.starP
return $ SelectStar r Nothing Unused
, try $ do
(t, r) <- Tok.tableNameP
_ <- Tok.dotP
r' <- Tok.starP
return $ SelectStar (r <> r') (Just $ QTableName r Nothing t) Unused
, try $ do
(s, t, r, r') <- qualifiedTableNameP
_ <- Tok.dotP
r'' <- Tok.starP
return $ SelectStar (r <> r'')
(Just $ QTableName r' (Just $ mkNormalSchema s r) t) Unused
]
selectionP :: Parser (Selection RawNames Range)
selectionP = try selectStarP <|> do
expr <- exprP
alias <- aliasP expr
return $ SelectExpr (getInfo alias <> getInfo expr) [alias] expr
makeColumnAlias :: Range -> Text -> Parser (ColumnAlias Range)
makeColumnAlias r alias = ColumnAlias r alias . ColumnAliasId <$> getNextCounter
makeTableAlias :: Range -> Text -> Parser (TableAlias Range)
makeTableAlias r alias = TableAlias r alias . TableAliasId <$> getNextCounter
makeDummyAlias :: Range -> Parser (ColumnAlias Range)
makeDummyAlias r = makeColumnAlias r "?column?"
makeExprAlias :: Expr RawNames Range -> Parser (ColumnAlias Range)
makeExprAlias (BinOpExpr info _ _ _) = makeDummyAlias info
makeExprAlias (UnOpExpr info _ _) = makeDummyAlias info
makeExprAlias (LikeExpr info _ _ _ _) = makeDummyAlias info
makeExprAlias (CaseExpr info _ _) = makeDummyAlias info
makeExprAlias (ColumnExpr info (QColumnName _ _ name)) = makeColumnAlias info name
makeExprAlias (ConstantExpr info _) = makeDummyAlias info
makeExprAlias (InListExpr info _ _) = makeDummyAlias info
makeExprAlias (InSubqueryExpr info _ _) = makeDummyAlias info
makeExprAlias (BetweenExpr info _ _ _) = makeDummyAlias info
makeExprAlias (OverlapsExpr info _ _) = makeDummyAlias info
makeExprAlias (AtTimeZoneExpr info _ _) = makeColumnAlias info "timezone" -- because reasons
-- function expressions get the name of the function
makeExprAlias (FunctionExpr info (QFunctionName _ _ name) _ _ _ _ _) = makeColumnAlias info name
makeExprAlias (SubqueryExpr info _) = makeDummyAlias info
makeExprAlias (ArrayExpr info _) = makeDummyAlias info -- might actually be "array", but I'm not sure how to check
makeExprAlias (ExistsExpr info _) = makeDummyAlias info
makeExprAlias (FieldAccessExpr _ _ _) = fail "Unsupported struct access in Vertica: unused datatype in this dialect"
makeExprAlias (ArrayAccessExpr _ _ _) = fail "Unsupported array access in Vertica: unused datatype in this dialect"
makeExprAlias (TypeCastExpr _ _ expr _) = makeExprAlias expr
makeExprAlias (VariableSubstitutionExpr _) = fail "Unsupported variable substitution in Vertica: unused datatype in this dialect"
aliasP :: Expr RawNames Range -> Parser (ColumnAlias Range)
aliasP expr = choice
[ try $ do
optional Tok.asP
(name, r) <- choice
[ Tok.columnNameP
, first TL.decodeUtf8 <$> Tok.stringP
]
makeColumnAlias r name
, do
_ <- Tok.asP
_ <- P.between Tok.openP Tok.closeP $ Tok.columnNameP `sepBy1` Tok.commaP
makeExprAlias expr
, makeExprAlias expr
]
exprP :: Parser (Expr RawNames Range)
exprP = orExprP
parenExprP :: Parser (Expr RawNames Range)
parenExprP = P.between Tok.openP Tok.closeP $ choice
[ try subqueryExprP
, exprP
]
subqueryExprP :: Parser (Expr RawNames Range)
subqueryExprP = do
query <- queryP
return $ SubqueryExpr (getInfo query) query
caseExprP :: Parser (Expr RawNames Range)
caseExprP = do
r <- Tok.caseP
whens <- choice
[ P.many1 $ do
_ <- Tok.whenP
condition <- exprP
_ <- Tok.thenP
result <- exprP
return (condition, result)
, do
expr <- exprP
P.many1 $ do
whenr <- Tok.whenP
nullseq <- optionMaybe Tok.nullsequalP
condition <- case nullseq of
Nothing -> BinOpExpr whenr "=" expr <$> exprP
Just nullseqr -> BinOpExpr (whenr <> nullseqr) "<=>" expr <$> exprP
_ <- Tok.thenP
result <- exprP
return (condition, result)
]
melse <- optionMaybe $ do
_ <- Tok.elseP
exprP
r' <- Tok.endP
return $ CaseExpr (r <> r') whens melse
fieldTypeP :: Parser (Expr RawNames Range)
fieldTypeP = do
(ftype, r) <- Tok.fieldTypeP
return $ ConstantExpr r $ StringConstant r $ TL.encodeUtf8 ftype
functionExprP :: Parser (Expr RawNames Range)
functionExprP = choice
[ castFuncP
, dateDiffFuncP
, extractFuncP
, try regularFuncP
, bareFuncP
]
where
castFuncP = do
r <- Tok.castP
_ <- Tok.openP
e <- exprP
_ <- Tok.asP
t <- choice
[ try $ do
i <- Tok.intervalP
(unit, u) <- Tok.datePartP
pure $ PrimitiveDataType (i <> u) ("INTERVAL " <> TL.toUpper unit) []
, dataTypeP
]
r' <- Tok.closeP
return $ TypeCastExpr (r <> r') CastFailureError e t
dateDiffFuncP = do
r <- Tok.dateDiffP
_ <- Tok.openP
datepart <- choice
[ do
_ <- Tok.openP
expr <- exprP
_ <- Tok.closeP
pure expr
, do
(string, r') <- Tok.stringP
pure $ ConstantExpr r' $ StringConstant r' string
, do
(string, r') <- Tok.datePartP
pure $ ConstantExpr r' $ StringConstant r' $ TL.encodeUtf8 string
]
_ <- Tok.commaP
startExp <- exprP
_ <- Tok.commaP
endExp <- exprP
r' <- Tok.closeP
return $ FunctionExpr (r <> r') (QFunctionName r Nothing "datediff") notDistinct [datepart, startExp, endExp] [] Nothing Nothing
extractFuncP = do
r <- Tok.extractP
_ <- Tok.openP
ftype <- fieldTypeP
_ <- Tok.fromP
expr <- exprP
r' <- Tok.closeP
return $ FunctionExpr (r <> r') (QFunctionName r Nothing "extract") notDistinct [ftype, expr] [] Nothing Nothing
regularFuncP = do
name <- choice
[ try $ do
(s, r) <- Tok.schemaNameP
_ <- Tok.dotP
(f, r') <- Tok.functionNameP
return $ QFunctionName (r <> r') (Just $ mkNormalSchema s r) f
, do
(f, r) <- Tok.functionNameP
return $ QFunctionName r Nothing f
]
(distinct, arguments, parameters, r') <- do
_ <- Tok.openP
(distinct, arguments) <- choice
[ case name of
QFunctionName _ Nothing "count" -> do
r' <- Tok.starP
return ( notDistinct
, [ConstantExpr r' $ NumericConstant r' "1"]
)
QFunctionName _ Nothing "substring" -> do
arg1 <- exprP
word <- (const True <$> Tok.fromP)
<|> (const False <$> Tok.commaP)
arg2 <- exprP
arg3 <- optionMaybe $ do
_ <- if word then Tok.forP else Tok.commaP
exprP
return ( notDistinct
, arg1 : arg2 : maybe [] pure arg3
)
_ -> fail "no special case for function"
, do
isDistinct <- distinctP
(isDistinct,) . (:[]) <$> exprP
, (notDistinct,) <$> exprP `sepBy` Tok.commaP
]
parameters <- option [] $ do
_ <- Tok.usingP
_ <- Tok.parametersP
flip sepBy1 Tok.commaP $ do
(param, paramr) <- Tok.paramNameP
_ <- Tok.equalP
expr <- exprP
pure (ParamName paramr param, expr)
optional $ Tok.ignoreP >> Tok.nullsP
r' <- Tok.closeP
return (distinct, arguments, parameters, r')
over <- optionMaybe $ try $ overP
let r'' = maybe r' getInfo over <> getInfo name
return $ FunctionExpr r'' name distinct arguments parameters Nothing over
bareFuncP = do
(v, r) <- choice
[ Tok.currentDatabaseP
, Tok.currentSchemaP
, Tok.userP
, Tok.currentUserP
, Tok.sessionUserP
, Tok.currentDateP
, Tok.currentTimeP
, Tok.currentTimestampP
, Tok.localTimeP
, Tok.localTimestampP
, Tok.sysDateP
]
pure $ FunctionExpr r (QFunctionName r Nothing v) notDistinct [] [] Nothing Nothing
orderTopLevelP :: Parser (Range, [Order RawNames Range])
orderTopLevelP = orderExprP False True
orderInWindowClauseP :: Parser [Order RawNames Range]
orderInWindowClauseP = snd <$> orderExprP True False
orderExprP :: Bool -> Bool -> Parser (Range, [Order RawNames Range])
orderExprP nullsClausePermitted positionalReferencesPermitted = do
r <- Tok.orderP
_ <- Tok.byP
orders <- helperP `sepBy1` Tok.commaP
let r' = getInfo $ last orders
return (r <> r', orders)
where
helperP :: Parser (Order RawNames Range)
helperP = do
expr <- exprP
let posOrExpr = if positionalReferencesPermitted
then handlePositionalReferences expr
else PositionOrExprExpr expr
dir <- directionP
nulls <- case (nullsClausePermitted, dir) of
(False, _) -> return $ NullsAuto Nothing
(True, OrderAsc _) -> option (NullsLast Nothing) nullsP
(True, OrderDesc _) -> option (NullsFirst Nothing) nullsP
let info = getInfo expr ?<> getInfo dir <> getInfo nulls
return $ Order info posOrExpr dir nulls
directionP :: Parser (OrderDirection (Maybe Range))
directionP = option (OrderAsc Nothing) $ choice
[ OrderAsc . Just <$> Tok.ascP
, OrderDesc . Just <$> Tok.descP
]
nullsP :: Parser (NullPosition (Maybe Range))
nullsP = do
r <- Tok.nullsP
choice
[ Tok.firstP >>= \ r' -> return $ NullsFirst $ Just $ r <> r'
, Tok.lastP >>= \ r' -> return $ NullsLast $ Just $ r <> r'
, Tok.autoP >>= \ r' -> return $ NullsAuto $ Just $ r <> r'
]
frameP :: Parser (Frame Range)
frameP = do
ftype <- choice
[ RowFrame <$> Tok.rowsP
, RangeFrame <$> Tok.rangeP
]
choice
[ do
_ <- Tok.betweenP
start <- frameBoundP
_ <- Tok.andP
end <- frameBoundP
let r = getInfo ftype <> getInfo end
return $ Frame r ftype start (Just end)
, do
start <- frameBoundP
let r = getInfo ftype <> getInfo start
return $ Frame r ftype start Nothing
]
frameBoundP :: Parser (FrameBound Range)
frameBoundP = choice
[ fmap Unbounded $ (<>)
<$> Tok.unboundedP
<*> choice [ Tok.precedingP, Tok.followingP ]
, fmap CurrentRow $ (<>) <$> Tok.currentP <*> Tok.rowP
, constantP >>= \ expr -> choice
[ Tok.precedingP >>= \ r ->
return $ Preceding (getInfo expr <> r) expr
, Tok.followingP >>= \ r ->
return $ Following (getInfo expr <> r) expr
]
]
overP :: Parser (OverSubExpr RawNames Range)
overP = do
start <- Tok.overP
subExpr <- choice
[ Left <$> windowP
, Right <$> windowNameP
]
return $ case subExpr of
Left w -> mergeWindowInfo start w
Right wn -> OverWindowName (start <> getInfo wn) wn
where
windowP :: Parser (OverSubExpr RawNames Range)
windowP = do
start' <- Tok.openP
expr <- choice
[ Left <$> windowExprP start'
, Right <$> partialWindowExprP start'
]
return $ case expr of
Left w -> OverWindowExpr (start' <> getInfo w) w
Right pw -> OverPartialWindowExpr (start' <> getInfo pw) pw
mergeWindowInfo :: Range -> OverSubExpr RawNames Range -> OverSubExpr RawNames Range
mergeWindowInfo r = \case
OverWindowExpr r' WindowExpr{..} ->
OverWindowExpr (r <> r') $ WindowExpr { windowExprInfo = windowExprInfo <> r , ..}
OverWindowName r' n -> OverWindowName (r <> r') n
OverPartialWindowExpr r' PartialWindowExpr{..} ->
OverPartialWindowExpr (r <> r') $ PartialWindowExpr { partWindowExprInfo = partWindowExprInfo <> r , ..}
windowExprP :: Range -> Parser (WindowExpr RawNames Range)
windowExprP start =
do
partition <- optionMaybe partitionP
order <- option [] orderInWindowClauseP
frame <- optionMaybe frameP
end <- Tok.closeP
let info = start <> end
return (WindowExpr info partition order frame)
partialWindowExprP :: Range -> Parser (PartialWindowExpr RawNames Range)
partialWindowExprP start =
do
inherit <- windowNameP
order <- option [] orderInWindowClauseP
frame <- optionMaybe frameP
end <- Tok.closeP
let info = start <> end
return (PartialWindowExpr info inherit Nothing order frame)
windowNameP :: Parser (WindowName Range)
windowNameP =
do
(name, r) <- Tok.windowNameP
return $ WindowName r name
partitionP :: Parser (Partition RawNames Range)
partitionP = do
r <- Tok.partitionP
choice
[ Tok.byP >> (exprP `sepBy1` Tok.commaP) >>= \ exprs ->
return $ PartitionBy
(sconcat $ r :| map getInfo exprs) exprs
, Tok.bestP >>= \ r' -> return $ PartitionBest (r <> r')
, Tok.nodesP >>= \ r' -> return $ PartitionNodes (r <> r')
]
existsExprP :: Parser (Expr RawNames Range)
existsExprP = do
r <- Tok.existsP
_ <- Tok.openP
query <- queryP
r' <- Tok.closeP
return $ ExistsExpr (r <> r') query
arrayExprP :: Parser (Expr RawNames Range)
arrayExprP = do
s <- Tok.arrayP
_ <- Tok.openBracketP
cs <- exprP `sepBy` Tok.commaP
e <- Tok.closeBracketP
pure $ ArrayExpr (s <> e) cs
castExprP :: Parser (Expr RawNames Range)
castExprP = foldl (flip ($)) <$> castedP <*> many castP
where
castedP :: Parser (Expr RawNames Range)
castedP = choice
[ try parenExprP
, try existsExprP
, try arrayExprP
, try functionExprP
, caseExprP
, try $ do
constant <- constantP
return $ ConstantExpr (getInfo constant) constant
, do
name <- columnNameP
return $ ColumnExpr (getInfo name) name
]
castP :: Parser (Expr RawNames Range -> Expr RawNames Range)
castP = do
_ <- Tok.castOpP
typeName <- dataTypeP
let r expr = getInfo expr <> getInfo typeName
return (\ expr -> TypeCastExpr (r expr) CastFailureError expr typeName)
atTimeZoneExprP :: Parser (Expr RawNames Range)
atTimeZoneExprP = foldl (flip ($)) <$> castExprP <*> many atTimeZoneP
where
atTimeZoneP :: Parser (Expr RawNames Range -> Expr RawNames Range)
atTimeZoneP = do
_ <- Tok.atP
_ <- Tok.timezoneP
tz <- castExprP
return $ \ expr ->
AtTimeZoneExpr (getInfo expr <> getInfo tz) expr tz
unOpP :: Text -> Parser (Expr RawNames Range -> Expr RawNames Range)
unOpP op = do
r <- Tok.symbolP op
return $ \ expr -> UnOpExpr (r <> getInfo expr) (Operator op) expr
negateExprP :: Parser (Expr RawNames Range)
negateExprP = do
neg <- option id $ choice $ map unOpP [ "+", "-", "@", "~" ]
expr <- atTimeZoneExprP
return $ neg expr
binOpP :: Text -> Parser (Expr RawNames Range -> Expr RawNames Range -> Expr RawNames Range)
binOpP op = do
r <- Tok.symbolP op
let r' lhs rhs = sconcat $ r :| map getInfo [lhs, rhs]
return $ \ lhs rhs -> BinOpExpr (r' lhs rhs) (Operator op) lhs rhs
exponentExprP :: Parser (Expr RawNames Range)
exponentExprP = negateExprP `chainl1` binOpP "^"
productExprP :: Parser (Expr RawNames Range)
productExprP = exponentExprP `chainl1` opP
where
opP = choice $ map binOpP [ "*", "//", "/", "%" ]
sumExprP :: Parser (Expr RawNames Range)
sumExprP = productExprP `chainl1` opP
where
opP = choice $ map binOpP [ "+", "-" ]
bitwiseExprP :: Parser (Expr RawNames Range)
bitwiseExprP = sumExprP `chainl1` opP
where
opP = choice $ map binOpP [ "&", "|", "#" ]
bitShiftExprP :: Parser (Expr RawNames Range)
bitShiftExprP = bitwiseExprP `chainl1` opP
where
opP = choice $ map binOpP [ "<<", ">>" ]
notP :: Parser (Expr RawNames Range -> Expr RawNames Range)
notP = (\ r -> UnOpExpr r "NOT") <$> Tok.notP
isExprP :: Parser (Expr RawNames Range)
isExprP = do
expr <- bitShiftExprP
is <- fmap (foldl (.) id) $ many $ choice
[ do
_ <- Tok.isP
not_ <- option id notP
(not_ .) <$> choice
[ Tok.trueP >>= \ r -> return (UnOpExpr r "ISTRUE")
, Tok.falseP >>= \ r -> return (UnOpExpr r "ISFALSE")
, Tok.nullP >>= \ r -> return (UnOpExpr r "ISNULL")
, Tok.unknownP >>= \ r -> return (UnOpExpr r "ISUNKNOWN")
]
, Tok.isnullP >>= \ r -> return (UnOpExpr r "ISNULL")
, Tok.notnullP >>= \ r -> return (UnOpExpr r "NOT" . UnOpExpr r "ISNULL")
]
return $ is expr
appendExprP :: Parser (Expr RawNames Range)
appendExprP = isExprP `chainl1` binOpP "||"
inExprP :: Parser (Expr RawNames Range)
inExprP = do
expr <- appendExprP
not_ <- option id notP
in_ <- foldl (.) id <$> many inP
return $ not_ $ in_ expr
where
inP = do
_ <- Tok.inP
_ <- Tok.openP
list <- choice
[ Left <$> queryP
, Right <$> exprP `sepBy1` Tok.commaP
]
r <- Tok.closeP
return $ case list of
Left query ->
\ expr -> InSubqueryExpr (getInfo expr <> r) query expr
Right constants ->
\ expr -> InListExpr (getInfo expr <> r) constants expr
betweenExprP :: Parser (Expr RawNames Range)
betweenExprP = do
expr <- inExprP
between <- foldl (.) id <$> many betweenP
return $ between expr
where
betweenP = do
_ <- Tok.betweenP
start <- bitShiftExprP
_ <- Tok.andP
end <- bitShiftExprP
let r expr = getInfo expr <> getInfo end
return $ \ expr -> BetweenExpr (r expr) start end expr
overlapsExprP :: Parser (Expr RawNames Range)
overlapsExprP = try overlapsP <|> betweenExprP
where
overlapsP = do
let pair :: Parser a -> Parser ((a, a), Range)
pair p = do
r <- Tok.openP
s <- p
_ <- Tok.commaP
e <- p
r' <- Tok.closeP
return ((s, e), r <> r')
(lhs, r) <- pair exprP
_ <- Tok.overlapsP
(rhs, r') <- pair exprP
return $ OverlapsExpr (r <> r') lhs rhs
likeExprP :: Parser (Expr RawNames Range)
likeExprP = do
expr <- overlapsExprP
like <- option id comparisonP
return $ like expr
where
comparisonP :: Parser (Expr RawNames Range -> Expr RawNames Range)
comparisonP = choice
[ do
comparison <- symbolComparisonP
pattern <- Pattern <$> overlapsExprP
return $ comparison pattern
, do
comparison <- textComparisonP
pattern <- Pattern <$> overlapsExprP
escape <- optionMaybe $ do
_ <- Tok.escapeP
Escape <$> exprP
return $ comparison escape pattern
]
symbolComparisonP :: Parser (Pattern RawNames Range -> Expr RawNames Range -> Expr RawNames Range)
symbolComparisonP = choice $
let r expr pattern = getInfo expr <> getInfo pattern
in [ do
_ <- Tok.likeOpP
return $ \ pattern expr -> LikeExpr (r pattern expr) "LIKE" Nothing pattern expr
, do
_ <- Tok.iLikeOpP
return $ \ pattern expr -> LikeExpr (r pattern expr) "ILIKE" Nothing pattern expr
, do
_ <- Tok.notLikeOpP
return $ \ pattern expr ->
UnOpExpr (r pattern expr) "NOT" $ LikeExpr (r pattern expr) "LIKE" Nothing pattern expr
, do
_ <- Tok.notILikeOpP
return $ \ pattern expr ->
UnOpExpr (r pattern expr) "NOT" $ LikeExpr (r pattern expr) "ILIKE" Nothing pattern expr
, do
_ <- Tok.regexMatchesP
return $ \ pattern expr ->
BinOpExpr (r pattern expr) "REGEX MATCHES" expr $ patternExpr pattern
, do
_ <- Tok.regexIgnoreCaseMatchesP
return $ \ pattern expr ->
BinOpExpr (r pattern expr) "REGEX IGNORE-CASE MATCHES" expr $ patternExpr pattern
, do
_ <- Tok.notRegexMatchesP
return $ \ pattern expr ->
UnOpExpr (r pattern expr) "NOT" $
BinOpExpr (r pattern expr) "REGEX MATCHES" expr $ patternExpr pattern
, do
_ <- Tok.notRegexIgnoreCaseMatchesP
return $ \ pattern expr ->
UnOpExpr (r pattern expr) "NOT" $
BinOpExpr (r pattern expr) "REGEX IGNORE-CASE MATCHES" expr $ patternExpr pattern
]
textComparisonP :: Parser (Maybe (Escape RawNames Range) -> Pattern RawNames Range -> Expr RawNames Range -> Expr RawNames Range)
textComparisonP = do
not_ <- option id notP
like <- choice
[ Tok.likeP >>= \ r -> return $ LikeExpr r "LIKE"
, Tok.iLikeP >>= \ r -> return $ LikeExpr r "ILIKE"
, Tok.likeBP >>= \ r -> return $ LikeExpr r "LIKE"
, Tok.iLikeBP >>= \ r -> return $ LikeExpr r "ILIKE"
]
return $ \ escape pattern expr -> not_ $ like escape pattern expr
mkBinOp :: (Text, a) -> Expr r a -> Expr r a -> Expr r a
mkBinOp (op, r) = BinOpExpr r (Operator op)
inequalityExprP :: Parser (Expr RawNames Range)
inequalityExprP = likeExprP `chainl1` (mkBinOp <$> Tok.inequalityOpP)
equalityExprP :: Parser (Expr RawNames Range)
equalityExprP = inequalityExprP `chainl1` (mkBinOp <$> Tok.equalityOpP)
notExprP :: Parser (Expr RawNames Range)
notExprP = do
nots <- appEndo . fold . reverse . map Endo <$> many notP
expr <- equalityExprP
return $ nots expr
andExprP :: Parser (Expr RawNames Range)
andExprP = notExprP `chainl1`
(Tok.andP >>= \ r -> return $ BinOpExpr r "AND")
orExprP :: Parser (Expr RawNames Range)
orExprP = andExprP `chainl1` (Tok.orP >>= \ r -> return (BinOpExpr r "OR"))
singleTableP :: Parser (Tablish RawNames Range)
singleTableP = try subqueryP <|> try tableP <|> parenthesizedJoinP
where
subqueryP = do
r <- Tok.openP
query <- queryP
_ <- Tok.closeP
optional Tok.asP
(name, r') <- Tok.tableNameP
alias <- makeTableAlias r' name
return $ TablishSubQuery (r <> r')
(TablishAliasesT alias)
query
tableP = do
name <- tableNameP
maybe_alias <- optionMaybe $ do
optional Tok.asP
(alias, r) <- Tok.tableNameP
makeTableAlias r alias
let r = case maybe_alias of
Nothing -> getInfo name
Just alias -> getInfo alias <> getInfo name
aliases = maybe TablishAliasesNone TablishAliasesT maybe_alias
return $ TablishTable r aliases name
parenthesizedJoinP = do
tablish <- P.between Tok.openP Tok.closeP $ do
table <- singleTableP
joins <- fmap (appEndo . fold . reverse) $ many1 $ Endo <$> joinP
return $ joins table
optional $ do
optional Tok.asP
void Tok.tableNameP
pure tablish
optionalParensP :: Parser a -> Parser a
optionalParensP p = try p <|> P.between Tok.openP Tok.closeP p
manyParensP :: Parser a -> Parser a
manyParensP p = try p <|> P.between Tok.openP Tok.closeP (manyParensP p)
tablishP :: Parser (Tablish RawNames Range)
tablishP = do
table <- singleTableP
joins <- fmap (appEndo . fold . reverse) $ many $ Endo <$> joinP
return $ joins table
joinP :: Parser (Tablish RawNames Range -> Tablish RawNames Range)
joinP = regularJoinP <|> naturalJoinP <|> crossJoinP
regularJoinP :: Parser (Tablish RawNames Range -> Tablish RawNames Range)
regularJoinP = do
maybeJoinType <- optionMaybe $ innerJoinTypeP <|> outerJoinTypeP
joinType <- Tok.joinP >>= \ r -> return $ case maybeJoinType of
Nothing -> JoinInner r
Just joinType -> (<> r) <$> joinType
rhs <- singleTableP
condition <- choice
[ do
_ <- Tok.onP <?> "condition in join clause"
JoinOn <$> exprP
, do
s <- Tok.usingP <?> "using in join clause"
_ <- Tok.openP
names <- flip sepBy1 Tok.commaP $ do
(name, r) <- Tok.columnNameP
pure $ QColumnName r None name
e <- Tok.closeP
return $ JoinUsing (s <> e) names
]
let r lhs = getInfo lhs <> getInfo rhs <> getInfo condition
return $ \ lhs ->
TablishJoin (r lhs) joinType condition lhs rhs
outerJoinTypeP :: Parser (JoinType Range)
outerJoinTypeP = do
joinType <- choice
[ Tok.leftP >>= \ r -> return $ JoinLeft r
, Tok.rightP >>= \ r -> return $ JoinRight r
, Tok.fullP >>= \ r -> return $ JoinFull r
]
optional Tok.outerP
return joinType
innerJoinTypeP :: Parser (JoinType Range)
innerJoinTypeP = Tok.innerP >>= \ r -> return $ JoinInner r
naturalJoinP :: Parser (Tablish RawNames Range -> Tablish RawNames Range)
naturalJoinP = do
r <- Tok.naturalP
maybeJoinType <- optionMaybe $ innerJoinTypeP <|> outerJoinTypeP
joinType <- Tok.joinP >>= \ r' -> return $ case maybeJoinType of
Nothing -> JoinInner r
Just joinType -> (const $ r <> r') <$> joinType
rhs <- singleTableP
let r' lhs = getInfo lhs <> getInfo rhs
return $ \ lhs -> TablishJoin (r' lhs) joinType (JoinNatural r Unused) lhs rhs
crossJoinP :: Parser (Tablish RawNames Range -> Tablish RawNames Range)
crossJoinP = do
r <- Tok.crossP
r'<- Tok.joinP
rhs <- singleTableP
let r'' lhs = getInfo lhs <> getInfo rhs
joinInfo = r <> r'
true' = JoinOn $ ConstantExpr joinInfo $ BooleanConstant joinInfo True
return $ \ lhs ->
TablishJoin (r'' lhs) (JoinInner joinInfo) true' lhs rhs
createProjectionPrefixP :: Parser Range
createProjectionPrefixP = do
s <- Tok.createP
e <- Tok.projectionP
pure $ s <> e
createProjectionP :: Parser (CreateProjection RawNames Range)
createProjectionP = do
s <- createProjectionPrefixP
createProjectionIfNotExists <- ifNotExistsP
createProjectionName <- projectionNameP
createProjectionColumns <- optionMaybe $ try columnListP
_ <- Tok.asP
createProjectionQuery <- queryP
createProjectionSegmentation <- optionMaybe $ choice
[ do
s' <- Tok.unsegmentedP
choice
[ do
_ <- Tok.nodeP
node <- nodeNameP
let e' = getInfo node
pure $ UnsegmentedOneNode (s' <> e') node
, do
_ <- Tok.allP
e' <- Tok.nodesP
pure $ UnsegmentedAllNodes (s' <> e')
]
, do
s' <- Tok.segmentedP
_ <- Tok.byP
expr <- exprP
list <- nodeListP
pure $ SegmentedBy (s' <> getInfo list) expr list
]
createProjectionKSafety <- optionMaybe $ do
s' <- Tok.ksafeP
choice
[ do
(n, e') <- integerP
pure $ KSafety (s' <> e') (Just n)
, pure $ KSafety s' Nothing
]
let createProjectionInfo =
sconcat $ s :| catMaybes [ Just $ getInfo createProjectionQuery
, getInfo <$> createProjectionSegmentation
, getInfo <$> createProjectionKSafety
]
pure CreateProjection{..}
where
columnListP :: Parser (NonEmpty (ProjectionColumn Range))
columnListP = do
_ <- Tok.openP
c:cs <- flip sepBy1 Tok.commaP $ do
(projectionColumnName, s) <- Tok.columnNameP
projectionColumnAccessRank <- optionMaybe $ do
s' <- Tok.accessRankP
(n, e') <- integerP
pure $ AccessRank (s' <> e') n
projectionColumnEncoding <- optionMaybe $ do
_ <- Tok.encodingP
Tok.encodingTypeP
let projectionColumnInfo =
sconcat $ s :| catMaybes [ getInfo <$> projectionColumnAccessRank
, getInfo <$> projectionColumnEncoding ]
pure ProjectionColumn{..}
_ <- Tok.closeP
pure (c:|cs)
multipleRenameP :: Parser (MultipleRename RawNames Range)
multipleRenameP = do
s <- Tok.alterP
_ <- Tok.tableP
sources <- tableNameP `sepBy1` Tok.commaP
_ <- Tok.renameP
_ <- Tok.toP
targets <- map (\ uqtn -> uqtn { tableNameSchema = Nothing }) <$> unqualifiedTableNameP `sepBy1` Tok.commaP
when (length sources /= length targets) $ fail "multi-renames require the same number of sources and targets"
let e = getInfo $ last targets
pairs = zip sources targets
toAlterTableRename = \ (from, to) ->
AlterTableRenameTable (getInfo from <> getInfo to) from to
renames = map toAlterTableRename pairs
pure $ MultipleRename (s <> e) renames
setSchemaP :: Parser (SetSchema RawNames Range)
setSchemaP = do
s <- Tok.alterP
_ <- Tok.tableP
table <- tableNameP
_ <- Tok.setP
_ <- Tok.schemaP
(schema, r) <- Tok.schemaNameP
e <- option r $ choice [Tok.restrictP, Tok.cascadeP]
pure $ SetSchema (s <> e) table $ mkNormalSchema schema r
renameProjectionP :: Parser Range
renameProjectionP = do
s <- Tok.alterP
_ <- Tok.projectionP
_ <- projectionNameP
_ <- Tok.renameP
_ <- Tok.toP
to <- projectionNameP
pure $ s <> getInfo to
alterResourcePoolPrefixP :: Parser Range
alterResourcePoolPrefixP = do
s <- Tok.alterP
_ <- Tok.resourceP
e <- Tok.poolP
pure $ s <> e
alterResourcePoolP :: Parser Range
alterResourcePoolP = do
s <- alterResourcePoolPrefixP
ts <- P.many Tok.notSemicolonP
pure $ case reverse ts of
[] -> s
e:_ -> s <> e
createResourcePoolPrefixP :: Parser Range
createResourcePoolPrefixP = do
s <- Tok.createP
_ <- Tok.resourceP
e <- Tok.poolP
pure $ s <> e
createResourcePoolP :: Parser Range
createResourcePoolP = do
s <- createResourcePoolPrefixP
ts <- P.many Tok.notSemicolonP
pure $ case reverse ts of
[] -> s
e:_ -> s <> e
dropResourcePoolPrefixP :: Parser Range
dropResourcePoolPrefixP = do
s <- Tok.dropP
_ <- Tok.resourceP
e <- Tok.poolP
pure $ s <> e
dropResourcePoolP :: Parser Range
dropResourcePoolP = do
s <- dropResourcePoolPrefixP
e <- Tok.notSemicolonP -- the pool's name
pure $ s <> e
createFunctionPrefixP :: Parser Range
createFunctionPrefixP = do
s <- Tok.createP
_ <- optional $ Tok.orP >> Tok.replaceP
e <- choice
[ do
_ <- optional $ Tok.transformP <|> Tok.analyticP <|> Tok.aggregateP
Tok.functionP
, Tok.filterP
, Tok.parserP
, Tok.sourceP
]
pure $ s <> e
createFunctionP :: Parser Range
createFunctionP = do
s <- createFunctionPrefixP
ts <- P.many Tok.notSemicolonP
pure $ case reverse ts of
[] -> s
e:_ -> s <> e
alterTableAddConstraintP :: Parser Range
alterTableAddConstraintP = do
s <- Tok.alterP
_ <- Tok.tableP
_ <- tableNameP
_ <- Tok.addP
e <- tableConstraintP
pure $ s <> e
tableConstraintP :: Parser Range
tableConstraintP = do
s <- optionMaybe $ do
s <- Tok.constraintP
_ <- Tok.constraintNameP
return s
e <- choice
[ do
_ <- Tok.primaryP
_ <- Tok.keyP
e <- columnListP
option e (Tok.enabledP <|> Tok.disabledP)
, do
_ <- Tok.uniqueP
e <- columnListP
option e (Tok.enabledP <|> Tok.disabledP)
, do
_ <- Tok.foreignP
_ <- Tok.keyP
_ <- columnListP
_ <- Tok.referencesP
e <- getInfo <$> tableNameP
option e columnListP
, do
_ <- Tok.checkP
e <- getInfo <$> exprP
option e (Tok.enabledP <|> Tok.disabledP)
]
return (maybe e id s <> e)
where
columnListP :: Parser Range
columnListP = do
s <- Tok.openP
_ <- Tok.columnNameP `sepBy1` Tok.commaP
e <- Tok.closeP
return (s <> e)
exportToStdoutP :: Parser Range
exportToStdoutP = do
s <- Tok.exportP
_ <- Tok.toP
_ <- Tok.stdoutP
_ <- Tok.fromP
_ <- tableNameP
_ <- Tok.openP
_ <- Tok.columnNameP `sepBy1` Tok.commaP
e <- Tok.closeP
pure $ s <> e
setSessionPrefixP :: Parser Range
setSessionPrefixP = do
s <- Tok.setP
e <- Tok.sessionP
return $ s <> e
setSessionP :: Parser Range
setSessionP = do
s <- setSessionPrefixP
ts <- P.many Tok.notSemicolonP
pure $ case reverse ts of
[] -> s
e:_ -> s <> e
setTimeZoneP :: Parser Range
setTimeZoneP = do
s <- Tok.setP
_ <- Tok.timezoneP
_ <- Tok.toP
e <- choice [ Tok.defaultP
, snd <$> Tok.stringP
, Tok.intervalP >> snd <$> Tok.stringP
]
return $ s <> e
connectP :: Parser Range
connectP = do
s <- Tok.connectP
_ <- Tok.toP
_ <- Tok.verticaP
_ <- Tok.databaseNameP
_ <- Tok.userP
_ <- Tok.userNameP
_ <- Tok.passwordP
e <- snd <$> Tok.stringP <|> snd <$> starsP
e' <- option e $ do
_ <- Tok.onP
_ <- Tok.stringP
_ <- Tok.commaP
snd <$> Tok.numberP
pure $ s <> e'
where
starsP = do
rs <- P.many1 Tok.starP
let text = TL.take (fromIntegral $ length rs) $ TL.repeat '*'
r = head rs <> last rs
pure (text, r)
disconnectP :: Parser Range
disconnectP = do
s <- Tok.disconnectP
(_, e) <- Tok.databaseNameP
pure $ s <> e
createAccessPolicyP :: Parser Range
createAccessPolicyP = do
s <- Tok.createP
_ <- Tok.accessP
_ <- Tok.policyP
_ <- Tok.onP
_ <- tableNameP
_ <- Tok.forP
_ <- Tok.columnP
_ <- Tok.columnNameP
_ <- exprP
e <- choice [ Tok.enableP, Tok.disableP ]
pure $ s <> e
copyFromP :: Parser Range
copyFromP = do
s <- Tok.copyP
e <- getInfo <$> tableNameP
e' <- consumeOrderedOptions e $
[ ingestionColumnListP (getInfo <$> exprP)
, ingestionColumnOptionP
, fromP -- you need **either** a FROM or a SOURCE clause, but let's not be fussy
, fileStorageFormatP
]
e'' <- consumeUnorderedOptions e' $
[ do
_ <- optional Tok.withP
choice [ fileSourceP
, fileFilterP
, fileParserP
]
, delimiterAsP
, trailingNullColsP
, nullAsP
, escapeFormatP
, enclosedByP
, recordTerminatorP
, try $ skipRecordsP
, try $ skipBytesP
, trimByteP
, rejectMaxP
, rejectedDataOnNodeP
, exceptionsOnNodeP
, Tok.enforceLengthP
, errorToleranceP
, abortOnErrorP
, optional Tok.storageP >> loadMethodP
, streamNameP
, noCommitP
]
return $ s <> e''
where
onNodeP :: Range -> Parser Range
onNodeP r = do
s <- option r $ choice
[ try $ Tok.onP >> snd <$> Tok.nodeNameP
, Tok.onP >> Tok.anyP >> Tok.nodeP
]
e <- option s compressionP
return $ s <> e
fromP :: Parser Range
fromP = do
outerS <- Tok.fromP
outerE <- choice $
[ do
s <- Tok.stdinP
e <- option s compressionP
return $ s <> e
, do
(_, s) <- Tok.stringP
e <- last <$> ((onNodeP s) `sepBy1` Tok.commaP)
return $ s <> e
, do
s <- Tok.localP
e' <- choice [ do
e <- Tok.stdinP
option e compressionP
, let pathToDataP = do
e <- snd <$> Tok.stringP
option e compressionP
in last <$> (pathToDataP `sepBy1` Tok.commaP)
]
return $ s <> e'
, do
s <- Tok.verticaP
_ <- Tok.databaseNameP
_ <- Tok.dotP
e <- getInfo <$> tableNameP
e' <- option e $ do
_ <- Tok.openP
_ <- Tok.columnNameP `sepBy1` Tok.commaP
Tok.closeP
return $ s <> e'
]
return $ outerS <> outerE
showP :: Parser Range
showP = do
s <- Tok.showP
es <- many1 Tok.notSemicolonP
return $ s <> last es
mergeP :: Parser (Merge RawNames Range)
mergeP = do
r1 <- Tok.mergeP
_ <- Tok.intoP
mergeTargetTable <- tableNameP
mergeTargetAlias <- optionMaybe tableAliasP
_ <- Tok.usingP
mergeSourceTable <- tableNameP
mergeSourceAlias <- optionMaybe tableAliasP
_ <- Tok.onP
mergeCondition <- exprP
-- lookahead
mergeUpdateDirective <- optionMaybe $ do
_ <- try $ P.lookAhead $ Tok.whenP >> Tok.matchedP
_ <- Tok.whenP
_ <- Tok.matchedP
_ <- Tok.thenP
_ <- Tok.updateP
_ <- Tok.setP
NE.fromList <$> colValP `sepBy1` Tok.commaP
(mergeInsertDirectiveColumns, mergeInsertDirectiveValues, r2) <- option (Nothing, Nothing, Just r1) $ do
_ <- Tok.whenP
_ <- Tok.notP
_ <- Tok.matchedP
_ <- Tok.thenP
_ <- Tok.insertP
cols <- optionMaybe $ NE.fromList <$> P.between Tok.openP Tok.closeP (oqColumnNameP `sepBy1` Tok.commaP)
_ <- Tok.valuesP
_ <- Tok.openP
vals <- NE.fromList <$> defaultExprP `sepBy1` Tok.commaP
e <- Tok.closeP
return (cols, Just vals, Just e)
when ((mergeUpdateDirective, mergeInsertDirectiveValues) == (Nothing, Nothing)) $
fail "MERGE requires at least one of UPDATE and INSERT"
let mLastUpdate = fmap (getInfo . snd . NE.last) mergeUpdateDirective
mLastInsert = r2
r3 = sconcat $ NE.fromList $ catMaybes [mLastUpdate, mLastInsert]
mergeInfo = r1 <> r3
return Merge{..}
where
tableAliasP :: Parser (TableAlias Range)
tableAliasP = do
(name, r) <- Tok.tableNameP
makeTableAlias r name
colValP :: Parser (ColumnRef RawNames Range, DefaultExpr RawNames Range)
colValP = do
col <- oqColumnNameP
_ <- Tok.equalP
val <- defaultExprP
return (col { columnNameTable = Nothing }, val)
| null | https://raw.githubusercontent.com/uber/queryparser/6015e8f273f4498326fec0315ac5580d7036f8a4/dialects/vertica/src/Database/Sql/Vertica/Parser.hs | haskell |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
but we don't allow eof here. `;` is the
only way to write the empty statement, i.e. `` (empty string) is not allowed.
| parse consumes a statement, or fails
| parseAll consumes all input as a single statement, or fails
| parseMany consumes multiple statements, or fails
| parseManyAll consumes all input multiple statements, or fails
| parseManyEithers consumes all input as multiple (statements or failures)
it should never fail
set when applying updates
you need **either** a FROM or a SOURCE clause, but let's not be fussy
better way?
better way?
because reasons
function expressions get the name of the function
might actually be "array", but I'm not sure how to check
the pool's name
you need **either** a FROM or a SOURCE clause, but let's not be fussy
lookahead | Copyright ( c ) 2017 Uber Technologies , Inc.
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
module Database.Sql.Vertica.Parser where
import Database.Sql.Type
import Database.Sql.Info
import Database.Sql.Helpers
import Database.Sql.Vertica.Type
import Database.Sql.Vertica.Scanner
import Database.Sql.Vertica.Parser.Internal
import Database.Sql.Position
import qualified Database.Sql.Vertica.Parser.Token as Tok
import Database.Sql.Vertica.Parser.IngestionOptions
import Database.Sql.Vertica.Parser.Shared
import Data.Char (isDigit)
import Data.Text.Lazy (Text)
import qualified Data.Text.Lazy as TL
import qualified Data.Text.Lazy.Encoding as TL
import qualified Data.List as L
import Data.Maybe (catMaybes, fromMaybe)
import Data.Monoid (Endo (..))
import Data.Semigroup (Option (..))
import qualified Text.Parsec as P
import Text.Parsec ( chainl1, choice, many, many1
, option, optional, optionMaybe
, sepBy, sepBy1, try, (<|>), (<?>))
import Control.Arrow (first)
import Control.Monad (void, (>=>), when)
import Data.Semigroup (Semigroup (..), sconcat)
import Data.List.NonEmpty (NonEmpty ((:|)))
import qualified Data.List.NonEmpty as NE (last, fromList)
import Data.Foldable (fold)
statementParser :: Parser (VerticaStatement RawNames Range)
statementParser = do
maybeStmt <- optionMaybe $ choice
[ try $ VerticaStandardSqlStatement <$> statementP
, do
_ <- try $ P.lookAhead createProjectionPrefixP
VerticaCreateProjectionStatement <$> createProjectionP
, try $ VerticaMultipleRenameStatement <$> multipleRenameP
, try $ VerticaSetSchemaStatement <$> setSchemaP
, try $ VerticaUnhandledStatement <$> renameProjectionP
, do
_ <- try $ P.lookAhead alterResourcePoolPrefixP
VerticaUnhandledStatement <$> alterResourcePoolP
, do
_ <- try $ P.lookAhead createResourcePoolPrefixP
VerticaUnhandledStatement <$> createResourcePoolP
, do
_ <- try $ P.lookAhead dropResourcePoolPrefixP
VerticaUnhandledStatement <$> dropResourcePoolP
, do
_ <- try $ P.lookAhead createFunctionPrefixP
VerticaUnhandledStatement <$> createFunctionP
, VerticaUnhandledStatement <$> alterTableAddConstraintP
, VerticaUnhandledStatement <$> exportToStdoutP
, do
_ <- try $ P.lookAhead setSessionPrefixP
VerticaUnhandledStatement <$> setSessionP
, VerticaUnhandledStatement <$> setTimeZoneP
, VerticaUnhandledStatement <$> connectP
, VerticaUnhandledStatement <$> disconnectP
, VerticaUnhandledStatement <$> createAccessPolicyP
, VerticaUnhandledStatement <$> copyFromP
, VerticaUnhandledStatement <$> showP
, VerticaMergeStatement <$> mergeP
]
case maybeStmt of
Just stmt -> terminator >> return stmt
Nothing -> VerticaStandardSqlStatement <$> emptyStatementP
where
normal statements may be terminated by ` ; ` or eof
parse :: Text -> Either P.ParseError (VerticaStatement RawNames Range)
parse = P.runParser statementParser 0 "-" . tokenize
parseAll :: Text -> Either P.ParseError (VerticaStatement RawNames Range)
parseAll = P.runParser (statementParser <* P.eof) 0 "-" . tokenize
parseMany :: Text -> Either P.ParseError [VerticaStatement RawNames Range]
parseMany = P.runParser (P.many1 statementParser) 0 "-" . tokenize
parseManyAll :: Text -> Either P.ParseError [VerticaStatement RawNames Range]
parseManyAll text = P.runParser (P.many1 statementParser <* P.eof) 0 "-" . tokenize $ text
parseManyEithers :: Text -> Either P.ParseError [Either (Unparsed Range) (VerticaStatement RawNames Range)]
parseManyEithers text = P.runParser parser 0 "-" . tokenize $ text
where
parser = do
statements <- P.many1 $ P.setState 0 >> choice
[ try $ Right <$> statementParser
, try $ Left <$> do
ss <- many Tok.notSemicolonP
e <- Tok.semicolonP
pure $ case ss of
[] -> Unparsed e
s:_ -> Unparsed (s <> e)
]
locs <- many Tok.notSemicolonP
P.eof
pure $ case locs of
[] -> statements
s:es -> statements ++ [Left $ Unparsed $ sconcat (s:|es)]
optionBool :: Parser a -> Parser Bool
optionBool p = option False $ p >> pure True
statementP :: Parser (Statement Vertica RawNames Range)
statementP = choice
[ InsertStmt <$> insertP
, DeleteStmt <$> deleteP
, QueryStmt <$> queryP
, explainP
, TruncateStmt <$> truncateP
, AlterTableStmt <$> alterTableP
, do
_ <- try $ P.lookAhead createSchemaPrefixP
CreateSchemaStmt <$> createSchemaP
, do
_ <- try $ P.lookAhead createExternalTablePrefixP
CreateTableStmt <$> createExternalTableP
, do
_ <- try $ P.lookAhead createViewPrefixP
CreateViewStmt <$> createViewP
, CreateTableStmt <$> createTableP
, do
_ <- try $ P.lookAhead dropViewPrefixP
DropViewStmt <$> dropViewP
, DropTableStmt <$> dropTableP
, GrantStmt <$> grantP
, RevokeStmt <$> revokeP
, BeginStmt <$> beginP
, CommitStmt <$> commitP
, RollbackStmt <$> rollbackP
]
oqColumnNameP :: Parser (OQColumnName Range)
oqColumnNameP = (\ (c, r') -> QColumnName r' Nothing c) <$> Tok.columnNameP
insertP :: Parser (Insert RawNames Range)
insertP = do
r <- Tok.insertP
insertBehavior <- InsertAppend <$> Tok.intoP
insertTable <- tableNameP
insertColumns <- optionMaybe $ try $ do
_ <- Tok.openP
c:cs <- oqColumnNameP `sepBy1` Tok.commaP
_ <- Tok.closeP
pure (c :| cs)
insertValues <- choice
[ do
s <- Tok.defaultP
e <- Tok.valuesP
pure $ InsertDefaultValues (s <> e)
, do
s <- Tok.valuesP
_ <- Tok.openP
x:xs <- defaultExprP `sepBy1` Tok.commaP
e <- Tok.closeP
let row = x :| xs
there can only be one
pure $ InsertExprValues (s <> e) rows
, InsertSelectValues <$> queryP
]
let insertInfo = r <> getInfo insertValues
pure Insert{..}
defaultExprP :: Parser (DefaultExpr RawNames Range)
defaultExprP = choice
[ DefaultValue <$> Tok.defaultP
, ExprValue <$> exprP
]
deleteP :: Parser (Delete RawNames Range)
deleteP = do
r <- Tok.deleteP
_ <- Tok.fromP
table <- tableNameP
maybeExpr <- optionMaybe $ do
_ <- Tok.whereP
exprP
let r' = case maybeExpr of
Nothing -> getInfo table
Just expr -> getInfo expr
info = r <> r'
pure $ Delete info table maybeExpr
truncateP :: Parser (Truncate RawNames Range)
truncateP = do
s <- Tok.truncateP
_ <- Tok.tableP
table <- tableNameP
pure $ Truncate (s <> getInfo table) table
querySelectP :: Parser (Query RawNames Range)
querySelectP = do
select <- selectP
return $ QuerySelect (selectInfo select) select
queryP :: Parser (Query RawNames Range)
queryP = manyParensP $ do
with <- option id withP
query <- ((querySelectP <|> P.between Tok.openP Tok.closeP queryP) `chainl1` (exceptP <|> unionP))
`chainl1` intersectP
order <- option id orderP
limit <- option id limitP
offset <- option id offsetP
return $ with $ limit $ offset $ order $ query
where
exceptP = do
r <- Tok.exceptP
return $ QueryExcept r Unused
unionP = do
r <- Tok.unionP
distinct <- option (Distinct True) distinctP
return $ QueryUnion r distinct Unused
intersectP = do
r <- Tok.intersectP
return $ QueryIntersect r Unused
withP = do
r <- Tok.withP
withs <- cteP `sepBy1` Tok.commaP
return $ \ query ->
let r' = sconcat $ r :| getInfo query : map cteInfo withs
in QueryWith r' withs query
cteP = do
(name, r) <- Tok.tableNameP
alias <- makeTableAlias r name
columns <- option []
$ P.between Tok.openP Tok.closeP $ columnAliasP `sepBy1` Tok.commaP
_ <- Tok.asP
(query, r') <- do
_ <- Tok.openP
q <- queryP
r' <- Tok.closeP
return (q, r')
return $ CTE (r <> r') alias columns query
orderP = do
(r, orders) <- orderTopLevelP
return $ \ query -> QueryOrder (getInfo query <> r) orders query
limitP = do
r <- Tok.limitP
choice
[ Tok.numberP >>= \ (v, r') ->
let limit = Limit (r <> r') v
in return $ \ query -> QueryLimit (getInfo query <> r') limit query
, Tok.nullP >> return id
]
offsetP = do
r <- Tok.offsetP
Tok.numberP >>= \ (v, r') ->
let offset = Offset (r <> r') v
in return $ \ query -> QueryOffset (getInfo query <> r') offset query
distinctP :: Parser Distinct
distinctP = choice $
[ Tok.allP >> return (Distinct False)
, Tok.distinctP >> return (Distinct True)
]
explainP :: Parser (Statement Vertica RawNames Range)
explainP = do
s <- Tok.explainP
stmt <- choice
[ InsertStmt <$> insertP
, DeleteStmt <$> deleteP
, QueryStmt <$> queryP
]
pure $ ExplainStmt (s <> getInfo stmt) stmt
columnAliasP :: Parser (ColumnAlias Range)
columnAliasP = do
(name, r) <- Tok.columnNameP
makeColumnAlias r name
alterTableP :: Parser (AlterTable RawNames Range)
alterTableP = do
s <- Tok.alterP
_ <- Tok.tableP
from <- tableNameP
_ <- Tok.renameP
_ <- Tok.toP
to <- (\ uqtn -> uqtn { tableNameSchema = Nothing }) <$> unqualifiedTableNameP
pure $ AlterTableRenameTable (s <> getInfo to) from to
createSchemaPrefixP :: Parser Range
createSchemaPrefixP = do
s <- Tok.createP
e <- Tok.schemaP
return $ s <> e
ifNotExistsP :: Parser (Maybe Range)
ifNotExistsP = optionMaybe $ do
s <- Tok.ifP
_ <- Tok.notP
e <- Tok.existsP
pure $ s <> e
ifExistsP :: Parser Range
ifExistsP = do
s <- Tok.ifP
e <- Tok.existsP
pure $ s <> e
createSchemaP :: Parser (CreateSchema RawNames Range)
createSchemaP = do
s <- createSchemaPrefixP
createSchemaIfNotExists <- ifNotExistsP
(name, r) <- Tok.schemaNameP
let createSchemaName = mkNormalSchema name r
e <- option r (Tok.authorizationP >> snd <$> Tok.userNameP)
let createSchemaInfo = s <> e
return $ CreateSchema{..}
createTableColumnsP :: Parser (TableDefinition Vertica RawNames Range)
createTableColumnsP = do
s <- Tok.openP
c:cs <- columnOrConstraintP `sepBy1` Tok.commaP
e <- Tok.closeP
pure $ TableColumns (s <> e) (c:|cs)
where
columnOrConstraintP :: Parser (ColumnOrConstraint Vertica RawNames Range)
columnOrConstraintP = choice
[ try $ ColumnOrConstraintColumn <$> columnDefinitionP
, ColumnOrConstraintConstraint <$> constraintDefinitionP
]
columnDefinitionP = do
(name, s) <- Tok.columnNameP
columnDefinitionType <- dataTypeP
updates <- many $ choice [ notNullUpdateP, nullUpdateP, defaultUpdateP ]
let columnDefinitionInfo = s <> getInfo columnDefinitionType
TODO
columnDefinitionNull = Nothing
columnDefinitionDefault = Nothing
columnDefinitionName = QColumnName s None name
foldr (>=>) pure updates ColumnDefinition{..}
notNullUpdateP :: Parser (ColumnDefinition d r Range -> Parser (ColumnDefinition d r Range))
notNullUpdateP = do
r <- (<>) <$> Tok.notP <*> Tok.nullP
pure $ \ d -> case columnDefinitionNull d of
Nothing -> pure $ d { columnDefinitionNull = Just $ NotNull r }
Just (Nullable _) -> fail "conflicting NULL/NOT NULL specifications on column"
Just (NotNull _) -> pure d
nullUpdateP :: Parser (ColumnDefinition d r Range -> Parser (ColumnDefinition d r Range))
nullUpdateP = do
r <- Tok.nullP
pure $ \ d -> case columnDefinitionNull d of
Nothing -> pure $ d { columnDefinitionNull = Just $ Nullable r }
Just (NotNull _) -> fail "conflicting NULL/NOT NULL specifications on column"
Just (Nullable _) -> pure d
defaultUpdateP :: Parser (ColumnDefinition d RawNames Range -> Parser (ColumnDefinition d RawNames Range))
defaultUpdateP = do
_ <- Tok.defaultP
expr <- exprP
pure $ \ d -> case columnDefinitionDefault d of
Nothing -> pure $ d { columnDefinitionDefault = Just expr }
Just _ -> fail "multiple defaults for column"
constraintDefinitionP :: Parser (ConstraintDefinition Range)
constraintDefinitionP = ConstraintDefinition <$> tableConstraintP
createExternalTablePrefixP :: Parser (Range, Externality Range)
createExternalTablePrefixP = do
s <- Tok.createP
r <- Tok.externalP
_ <- Tok.tableP
return (s, External r)
createExternalTableP :: Parser (CreateTable Vertica RawNames Range)
createExternalTableP = do
(s, createTableExternality) <- createExternalTablePrefixP
let createTablePersistence = Persistent
createTableIfNotExists <- ifNotExistsP
createTableName <- tableNameP
TODO allow for column - name - list syntax
_ <- optional $ do
_ <- optional $ Tok.includeP <|> Tok.excludeP
_ <- Tok.schemaP
Tok.privilegesP
_ <- Tok.asP
e <- Tok.copyP
e' <- consumeOrderedOptions e $
[ ingestionColumnListP (getInfo <$> exprP)
, ingestionColumnOptionP
, fileStorageFormatP
]
e'' <- consumeUnorderedOptions e' $
[ Tok.withP
, abortOnErrorP
, delimiterAsP
, enclosedByP
, Tok.enforceLengthP
, errorToleranceP
, escapeFormatP
, exceptionsOnNodeP
, fileFilterP
, nullAsP
, fileParserP
, recordTerminatorP
, rejectedDataOnNodeP
, rejectMaxP
, skipRecordsP
, skipBytesP
, fileSourceP
, trailingNullColsP
, trimByteP
]
let createTableInfo = s <> e''
createTableExtra = Nothing
pure CreateTable{..}
where
stringP :: Parser Range
stringP = snd <$> Tok.stringP
fromP :: Parser Range
fromP = do
s <- Tok.fromP
let fileP = do
r <- stringP
consumeOrderedOptions r [nodeLocationP, compressionP]
rs <- fileP `sepBy1` Tok.commaP
return $ s <> last rs
nodeLocationP = choice $
[ Tok.onP >> snd <$> Tok.nodeNameP
, Tok.onP >> Tok.anyP >> Tok.nodeP
]
createViewPrefixP :: Parser (Range, Maybe Range, Persistence Range)
createViewPrefixP = do
s <- Tok.createP
ifNotExists <- optionMaybe $ do
s' <- Tok.orP
e' <- Tok.replaceP
pure $ s' <> e'
persistence <- option Persistent $ Temporary <$> do
s' <- Tok.localP
e' <- Tok.temporaryP
pure $ s' <> e'
e <- Tok.viewP
pure (s <> e, ifNotExists, persistence)
schemaPrivilegesP :: Parser Range
schemaPrivilegesP = do
s <- choice [ Tok.includeP, Tok.excludeP ]
optional Tok.schemaP
e <- Tok.privilegesP
return $ s <> e
createViewP :: Parser (CreateView RawNames Range)
createViewP = do
(s, createViewIfNotExists, createViewPersistence) <- createViewPrefixP
createViewName <- tableNameP >>= \case
QTableName info Nothing view ->
case createViewPersistence of
Persistent -> pure $ QTableName info Nothing view
Temporary _ -> pure $ QTableName info (pure $ QSchemaName info Nothing "<session>" SessionSchema) view
qualifiedTableName ->
case createViewPersistence of
Persistent -> pure $ qualifiedTableName
Temporary _ -> fail $ "cannot specify schema on a local temporary view"
createViewColumns <- optionMaybe $ do
_ <- Tok.openP
c:cs <- unqualifiedColumnNameP `sepBy1` Tok.commaP
_ <- Tok.closeP
return (c:|cs)
case createViewPersistence of
Persistent -> optional schemaPrivilegesP
Temporary _ -> pure ()
_ <- Tok.asP
createViewQuery <- queryP
let createViewInfo = s <> getInfo createViewQuery
pure CreateView{..}
where
unqualifiedColumnNameP = do
(name, r) <- Tok.columnNameP
pure $ QColumnName r None name
createTableP :: Parser (CreateTable Vertica RawNames Range)
createTableP = do
s <- Tok.createP
(createTablePersistence, isLocal) <- option (Persistent, False) $ do
isLocal <- option False $ choice
[ Tok.localP >> pure True
, Tok.globalP >> pure False
]
createTablePersistence <- Temporary <$> Tok.temporaryP
pure (createTablePersistence, isLocal)
let createTableExternality = Internal
_ <- Tok.tableP
createTableIfNotExists <- ifNotExistsP
createTableName <- tableNameP >>= \case
QTableName info Nothing table ->
if isLocal
then pure $ QTableName info (pure $ QSchemaName info Nothing "<session>" SessionSchema) table
else pure $ QTableName info (pure $ QSchemaName info Nothing "public" NormalSchema) table
qualifiedTableName ->
if isLocal
then fail "cannot specify schema on a local temporary table"
else pure $ qualifiedTableName
let onCommitP = case createTablePersistence of
Persistent -> pure ()
Temporary _ -> do
TODO ( T374141 ): do something with this
_ <- Tok.onP
_ <- Tok.commitP
_ <- Tok.deleteP <|> Tok.preserveP
void Tok.rowsP
createTableDefinition <- choice
[ createTableColumnsP <* optional onCommitP <* optional schemaPrivilegesP
, try $ optional onCommitP *> optional schemaPrivilegesP *> createTableAsP
, optional schemaPrivilegesP *> createTableLikeP
]
createTableExtra <- tableInfoP
case createTablePersistence of
Persistent -> pure ()
Temporary _ -> optional $ do
_ <- Tok.noP
void Tok.projectionP
let e = maybe (getInfo createTableDefinition) getInfo createTableExtra
createTableInfo = s <> e
pure CreateTable{..}
where
columnListP :: Parser (NonEmpty (UQColumnName Range))
columnListP = do
_ <- Tok.openP
c:cs <- (`sepBy1` Tok.commaP) $ do
(name, r) <- Tok.columnNameP
pure $ QColumnName r None name
_ <- Tok.closeP
pure (c:|cs)
createTableLikeP = do
s <- Tok.likeP
table <- tableNameP
e <- option (getInfo table) $ do
TODO - include projection info in createTableExtra
_ <- Tok.includingP <|> Tok.excludingP
Tok.projectionsP
pure $ TableLike (s <> e) table
createTableAsP = do
s <- Tok.asP
columns <- optionMaybe $ try columnListP
query <- optionalParensP $ queryP
pure $ TableAs (s <> getInfo query) columns query
tableInfoP :: Parser (Maybe (TableInfo RawNames Range))
tableInfoP = do
mOrdering <- optionMaybe orderTopLevelP
let tableInfoOrdering = snd <$> mOrdering
let tableInfoEncoding :: Maybe (TableEncoding RawNames Range)
TODO
tableInfoSegmentation <- optionMaybe $ choice
[ do
s <- Tok.unsegmentedP
choice
[ do
_ <- Tok.nodeP
node <- nodeNameP
let e = getInfo node
pure $ UnsegmentedOneNode (s <> e) node
, do
_ <- Tok.allP
e <- Tok.nodesP
pure $ UnsegmentedAllNodes (s <> e)
]
, do
s <- Tok.segmentedP
_ <- Tok.byP
expr <- exprP
list <- nodeListP
pure $ SegmentedBy (s <> getInfo list) expr list
]
tableInfoKSafety <- optionMaybe $ do
s <- Tok.ksafeP
choice
[ do
(n, e) <- integerP
pure $ KSafety (s <> e) (Just n)
, pure $ KSafety s Nothing
]
tableInfoPartitioning <- optionMaybe $ do
s <- Tok.partitionP
_ <- Tok.byP
expr <- exprP
pure $ Partitioning (s <> getInfo expr) expr
let infos = [ fst <$> mOrdering
, getInfo <$> tableInfoEncoding
, getInfo <$> tableInfoSegmentation
, getInfo <$> tableInfoKSafety
, getInfo <$> tableInfoPartitioning
]
case getOption $ mconcat $ map Option infos of
Nothing -> pure Nothing
Just tableInfoInfo -> pure $ Just TableInfo{..}
dropViewPrefixP :: Parser Range
dropViewPrefixP = do
s <- Tok.dropP
e <- Tok.viewP
pure $ s <> e
dropViewP :: Parser (DropView RawNames Range)
dropViewP = do
s <- dropViewPrefixP
dropViewIfExists <- optionMaybe ifExistsP
dropViewName <- tableNameP
let dropViewInfo = s <> getInfo dropViewName
pure DropView{..}
dropTableP :: Parser (DropTable RawNames Range)
dropTableP = do
s <- Tok.dropP
_ <- Tok.tableP
dropTableIfExists <- optionMaybe ifExistsP
(dropTableName:rest) <- tableNameP `sepBy1` Tok.commaP
cascade <- optionMaybe Tok.cascadeP
let dropTableNames = dropTableName :| rest
dropTableInfo = s <> (fromMaybe (getInfo $ NE.last dropTableNames) cascade)
pure DropTable{..}
grantP :: Parser (Grant Range)
grantP = do
s <- Tok.grantP
e <- many1 Tok.notSemicolonP
return $ Grant (s <> (last e))
revokeP :: Parser (Revoke Range)
revokeP = do
s <- Tok.revokeP
e <- many1 Tok.notSemicolonP
return $ Revoke (s <> (last e))
beginP :: Parser Range
beginP = do
s <- choice [ do
s <- Tok.beginP
e <- option s (Tok.workP <|> Tok.transactionP)
return $ s <> e
, do
s <- Tok.startP
e <- Tok.transactionP
return $ s <> e
]
e <- consumeOrderedOptions s [isolationLevelP, transactionModeP]
return $ s <> e
where
isolationLevelP :: Parser Range
isolationLevelP = do
s <- Tok.isolationP
_ <- Tok.levelP
e <- choice [ Tok.serializableP
, Tok.repeatableP >> Tok.readP
, Tok.readP >> (Tok.committedP <|> Tok.uncommittedP)
]
return $ s <> e
transactionModeP :: Parser Range
transactionModeP = do
s <- Tok.readP
e <- Tok.onlyP <|> Tok.writeP
return $ s <> e
commitP :: Parser Range
commitP = do
s <- Tok.commitP <|> Tok.endP
e <- option s (Tok.workP <|> Tok.transactionP)
return $ s <> e
rollbackP :: Parser Range
rollbackP = do
s <- Tok.rollbackP <|> Tok.abortP
e <- option s (Tok.workP <|> Tok.transactionP)
return $ s <> e
nodeListP :: Parser (NodeList Range)
nodeListP = choice
[ do
s <- Tok.allP
e <- Tok.nodesP
offset <- optionMaybe nodeListOffsetP
let e' = maybe e getInfo offset
pure $ AllNodes (s <> e') offset
, do
s <- Tok.nodesP
n:ns <- nodeNameP `sepBy1` Tok.commaP
let e = getInfo $ last (n:ns)
pure $ Nodes (s <> e) (n:|ns)
]
nodeListOffsetP :: Parser (NodeListOffset Range)
nodeListOffsetP = do
s <- Tok.offsetP
(n, e) <- integerP
pure $ NodeListOffset (s <> e) n
nodeNameP :: Parser (Node Range)
nodeNameP = do
(node, e) <- Tok.nodeNameP
pure $ Node e node
integerP :: Parser (Int, Range)
integerP = do
(n, e) <- Tok.numberP
case reads $ TL.unpack n of
[(n', "")] -> pure (n', e)
_ -> fail $ unwords ["unable to parse", show n, "as integer"]
selectP :: Parser (Select RawNames Range)
selectP = do
r <- Tok.selectP
selectDistinct <- option notDistinct distinctP
selectCols <- do
selections <- selectionP `sepBy1` Tok.commaP
let r' = foldl1 (<>) $ map getInfo selections
return $ SelectColumns r' selections
selectFrom <- optionMaybe fromP
selectWhere <- optionMaybe whereP
selectTimeseries <- optionMaybe timeseriesP
selectGroup <- optionMaybe groupP
selectHaving <- optionMaybe havingP
selectNamedWindow <- optionMaybe namedWindowP
let (Just selectInfo) = sconcat $ Just r :|
[ Just $ getInfo selectCols
, getInfo <$> selectFrom
, getInfo <$> selectWhere
, getInfo <$> selectTimeseries
, getInfo <$> selectGroup
, getInfo <$> selectHaving
, getInfo <$> selectNamedWindow
]
return Select{..}
where
fromP = do
r <- Tok.fromP
tablishes <- tablishP `sepBy1` Tok.commaP
let r' = foldl (<>) r $ fmap getInfo tablishes
return $ SelectFrom r' tablishes
whereP = do
r <- Tok.whereP
condition <- exprP
return $ SelectWhere (r <> getInfo condition) condition
timeseriesP = do
s <- Tok.timeseriesP
selectTimeseriesSliceName <- columnAliasP
_ <- Tok.asP
selectTimeseriesInterval <- do
(c, r) <- Tok.stringP
pure $ StringConstant r c
_ <- Tok.overP
_ <- Tok.openP
selectTimeseriesPartition <- optionMaybe partitionP
selectTimeseriesOrder <- do
_ <- Tok.orderP
_ <- Tok.byP
exprP
e <- Tok.closeP
let selectTimeseriesInfo = s <> e
pure $ SelectTimeseries {..}
toGroupingElement :: PositionOrExpr RawNames Range -> GroupingElement RawNames Range
toGroupingElement posOrExpr = GroupingElementExpr (getInfo posOrExpr) posOrExpr
groupP = do
r <- Tok.groupP
_ <- Tok.byP
exprs <- exprP `sepBy1` Tok.commaP
let selectGroupGroupingElements = map (toGroupingElement . handlePositionalReferences) exprs
selectGroupInfo = foldl (<>) r $ fmap getInfo selectGroupGroupingElements
return SelectGroup{..}
havingP = do
r <- Tok.havingP
conditions <- exprP `sepBy1` Tok.commaP
let r' = foldl (<>) r $ fmap getInfo conditions
return $ SelectHaving r' conditions
namedWindowP =
do
r <- Tok.windowP
windows <- (flip sepBy1) Tok.commaP $ do
name <- windowNameP
_ <- Tok.asP
_ <- Tok.openP
window <- choice
[ do
partition@(Just p) <- Just <$> partitionP
order <- option [] orderInWindowClauseP
info = L.foldl' (<>) (getInfo p) orderInfos
return $ Left $ WindowExpr info partition order Nothing
, do
inherit <- windowNameP
order <- option [] orderInWindowClauseP
info = L.foldl' (<>) (getInfo inherit) orderInfo
return $ Right $ PartialWindowExpr info inherit Nothing order Nothing
]
e <- Tok.closeP
let info = getInfo name <> e
return $ case window of
Left w -> NamedWindowExpr info name w
Right pw -> NamedPartialWindowExpr info name pw
let info = L.foldl' (<>) r $ fmap getInfo windows
return $ SelectNamedWindow info windows
handlePositionalReferences :: Expr RawNames Range -> PositionOrExpr RawNames Range
handlePositionalReferences e = case e of
ConstantExpr _ (NumericConstant _ n) | TL.all isDigit n -> PositionOrExprPosition (getInfo e) (read $ TL.unpack n) Unused
_ -> PositionOrExprExpr e
selectStarP :: Parser (Selection RawNames Range)
selectStarP = choice
[ do
r <- Tok.starP
return $ SelectStar r Nothing Unused
, try $ do
(t, r) <- Tok.tableNameP
_ <- Tok.dotP
r' <- Tok.starP
return $ SelectStar (r <> r') (Just $ QTableName r Nothing t) Unused
, try $ do
(s, t, r, r') <- qualifiedTableNameP
_ <- Tok.dotP
r'' <- Tok.starP
return $ SelectStar (r <> r'')
(Just $ QTableName r' (Just $ mkNormalSchema s r) t) Unused
]
selectionP :: Parser (Selection RawNames Range)
selectionP = try selectStarP <|> do
expr <- exprP
alias <- aliasP expr
return $ SelectExpr (getInfo alias <> getInfo expr) [alias] expr
makeColumnAlias :: Range -> Text -> Parser (ColumnAlias Range)
makeColumnAlias r alias = ColumnAlias r alias . ColumnAliasId <$> getNextCounter
makeTableAlias :: Range -> Text -> Parser (TableAlias Range)
makeTableAlias r alias = TableAlias r alias . TableAliasId <$> getNextCounter
makeDummyAlias :: Range -> Parser (ColumnAlias Range)
makeDummyAlias r = makeColumnAlias r "?column?"
makeExprAlias :: Expr RawNames Range -> Parser (ColumnAlias Range)
makeExprAlias (BinOpExpr info _ _ _) = makeDummyAlias info
makeExprAlias (UnOpExpr info _ _) = makeDummyAlias info
makeExprAlias (LikeExpr info _ _ _ _) = makeDummyAlias info
makeExprAlias (CaseExpr info _ _) = makeDummyAlias info
makeExprAlias (ColumnExpr info (QColumnName _ _ name)) = makeColumnAlias info name
makeExprAlias (ConstantExpr info _) = makeDummyAlias info
makeExprAlias (InListExpr info _ _) = makeDummyAlias info
makeExprAlias (InSubqueryExpr info _ _) = makeDummyAlias info
makeExprAlias (BetweenExpr info _ _ _) = makeDummyAlias info
makeExprAlias (OverlapsExpr info _ _) = makeDummyAlias info
makeExprAlias (FunctionExpr info (QFunctionName _ _ name) _ _ _ _ _) = makeColumnAlias info name
makeExprAlias (SubqueryExpr info _) = makeDummyAlias info
makeExprAlias (ExistsExpr info _) = makeDummyAlias info
makeExprAlias (FieldAccessExpr _ _ _) = fail "Unsupported struct access in Vertica: unused datatype in this dialect"
makeExprAlias (ArrayAccessExpr _ _ _) = fail "Unsupported array access in Vertica: unused datatype in this dialect"
makeExprAlias (TypeCastExpr _ _ expr _) = makeExprAlias expr
makeExprAlias (VariableSubstitutionExpr _) = fail "Unsupported variable substitution in Vertica: unused datatype in this dialect"
aliasP :: Expr RawNames Range -> Parser (ColumnAlias Range)
aliasP expr = choice
[ try $ do
optional Tok.asP
(name, r) <- choice
[ Tok.columnNameP
, first TL.decodeUtf8 <$> Tok.stringP
]
makeColumnAlias r name
, do
_ <- Tok.asP
_ <- P.between Tok.openP Tok.closeP $ Tok.columnNameP `sepBy1` Tok.commaP
makeExprAlias expr
, makeExprAlias expr
]
exprP :: Parser (Expr RawNames Range)
exprP = orExprP
parenExprP :: Parser (Expr RawNames Range)
parenExprP = P.between Tok.openP Tok.closeP $ choice
[ try subqueryExprP
, exprP
]
subqueryExprP :: Parser (Expr RawNames Range)
subqueryExprP = do
query <- queryP
return $ SubqueryExpr (getInfo query) query
caseExprP :: Parser (Expr RawNames Range)
caseExprP = do
r <- Tok.caseP
whens <- choice
[ P.many1 $ do
_ <- Tok.whenP
condition <- exprP
_ <- Tok.thenP
result <- exprP
return (condition, result)
, do
expr <- exprP
P.many1 $ do
whenr <- Tok.whenP
nullseq <- optionMaybe Tok.nullsequalP
condition <- case nullseq of
Nothing -> BinOpExpr whenr "=" expr <$> exprP
Just nullseqr -> BinOpExpr (whenr <> nullseqr) "<=>" expr <$> exprP
_ <- Tok.thenP
result <- exprP
return (condition, result)
]
melse <- optionMaybe $ do
_ <- Tok.elseP
exprP
r' <- Tok.endP
return $ CaseExpr (r <> r') whens melse
fieldTypeP :: Parser (Expr RawNames Range)
fieldTypeP = do
(ftype, r) <- Tok.fieldTypeP
return $ ConstantExpr r $ StringConstant r $ TL.encodeUtf8 ftype
functionExprP :: Parser (Expr RawNames Range)
functionExprP = choice
[ castFuncP
, dateDiffFuncP
, extractFuncP
, try regularFuncP
, bareFuncP
]
where
castFuncP = do
r <- Tok.castP
_ <- Tok.openP
e <- exprP
_ <- Tok.asP
t <- choice
[ try $ do
i <- Tok.intervalP
(unit, u) <- Tok.datePartP
pure $ PrimitiveDataType (i <> u) ("INTERVAL " <> TL.toUpper unit) []
, dataTypeP
]
r' <- Tok.closeP
return $ TypeCastExpr (r <> r') CastFailureError e t
dateDiffFuncP = do
r <- Tok.dateDiffP
_ <- Tok.openP
datepart <- choice
[ do
_ <- Tok.openP
expr <- exprP
_ <- Tok.closeP
pure expr
, do
(string, r') <- Tok.stringP
pure $ ConstantExpr r' $ StringConstant r' string
, do
(string, r') <- Tok.datePartP
pure $ ConstantExpr r' $ StringConstant r' $ TL.encodeUtf8 string
]
_ <- Tok.commaP
startExp <- exprP
_ <- Tok.commaP
endExp <- exprP
r' <- Tok.closeP
return $ FunctionExpr (r <> r') (QFunctionName r Nothing "datediff") notDistinct [datepart, startExp, endExp] [] Nothing Nothing
extractFuncP = do
r <- Tok.extractP
_ <- Tok.openP
ftype <- fieldTypeP
_ <- Tok.fromP
expr <- exprP
r' <- Tok.closeP
return $ FunctionExpr (r <> r') (QFunctionName r Nothing "extract") notDistinct [ftype, expr] [] Nothing Nothing
regularFuncP = do
name <- choice
[ try $ do
(s, r) <- Tok.schemaNameP
_ <- Tok.dotP
(f, r') <- Tok.functionNameP
return $ QFunctionName (r <> r') (Just $ mkNormalSchema s r) f
, do
(f, r) <- Tok.functionNameP
return $ QFunctionName r Nothing f
]
(distinct, arguments, parameters, r') <- do
_ <- Tok.openP
(distinct, arguments) <- choice
[ case name of
QFunctionName _ Nothing "count" -> do
r' <- Tok.starP
return ( notDistinct
, [ConstantExpr r' $ NumericConstant r' "1"]
)
QFunctionName _ Nothing "substring" -> do
arg1 <- exprP
word <- (const True <$> Tok.fromP)
<|> (const False <$> Tok.commaP)
arg2 <- exprP
arg3 <- optionMaybe $ do
_ <- if word then Tok.forP else Tok.commaP
exprP
return ( notDistinct
, arg1 : arg2 : maybe [] pure arg3
)
_ -> fail "no special case for function"
, do
isDistinct <- distinctP
(isDistinct,) . (:[]) <$> exprP
, (notDistinct,) <$> exprP `sepBy` Tok.commaP
]
parameters <- option [] $ do
_ <- Tok.usingP
_ <- Tok.parametersP
flip sepBy1 Tok.commaP $ do
(param, paramr) <- Tok.paramNameP
_ <- Tok.equalP
expr <- exprP
pure (ParamName paramr param, expr)
optional $ Tok.ignoreP >> Tok.nullsP
r' <- Tok.closeP
return (distinct, arguments, parameters, r')
over <- optionMaybe $ try $ overP
let r'' = maybe r' getInfo over <> getInfo name
return $ FunctionExpr r'' name distinct arguments parameters Nothing over
bareFuncP = do
(v, r) <- choice
[ Tok.currentDatabaseP
, Tok.currentSchemaP
, Tok.userP
, Tok.currentUserP
, Tok.sessionUserP
, Tok.currentDateP
, Tok.currentTimeP
, Tok.currentTimestampP
, Tok.localTimeP
, Tok.localTimestampP
, Tok.sysDateP
]
pure $ FunctionExpr r (QFunctionName r Nothing v) notDistinct [] [] Nothing Nothing
orderTopLevelP :: Parser (Range, [Order RawNames Range])
orderTopLevelP = orderExprP False True
orderInWindowClauseP :: Parser [Order RawNames Range]
orderInWindowClauseP = snd <$> orderExprP True False
orderExprP :: Bool -> Bool -> Parser (Range, [Order RawNames Range])
orderExprP nullsClausePermitted positionalReferencesPermitted = do
r <- Tok.orderP
_ <- Tok.byP
orders <- helperP `sepBy1` Tok.commaP
let r' = getInfo $ last orders
return (r <> r', orders)
where
helperP :: Parser (Order RawNames Range)
helperP = do
expr <- exprP
let posOrExpr = if positionalReferencesPermitted
then handlePositionalReferences expr
else PositionOrExprExpr expr
dir <- directionP
nulls <- case (nullsClausePermitted, dir) of
(False, _) -> return $ NullsAuto Nothing
(True, OrderAsc _) -> option (NullsLast Nothing) nullsP
(True, OrderDesc _) -> option (NullsFirst Nothing) nullsP
let info = getInfo expr ?<> getInfo dir <> getInfo nulls
return $ Order info posOrExpr dir nulls
directionP :: Parser (OrderDirection (Maybe Range))
directionP = option (OrderAsc Nothing) $ choice
[ OrderAsc . Just <$> Tok.ascP
, OrderDesc . Just <$> Tok.descP
]
nullsP :: Parser (NullPosition (Maybe Range))
nullsP = do
r <- Tok.nullsP
choice
[ Tok.firstP >>= \ r' -> return $ NullsFirst $ Just $ r <> r'
, Tok.lastP >>= \ r' -> return $ NullsLast $ Just $ r <> r'
, Tok.autoP >>= \ r' -> return $ NullsAuto $ Just $ r <> r'
]
frameP :: Parser (Frame Range)
frameP = do
ftype <- choice
[ RowFrame <$> Tok.rowsP
, RangeFrame <$> Tok.rangeP
]
choice
[ do
_ <- Tok.betweenP
start <- frameBoundP
_ <- Tok.andP
end <- frameBoundP
let r = getInfo ftype <> getInfo end
return $ Frame r ftype start (Just end)
, do
start <- frameBoundP
let r = getInfo ftype <> getInfo start
return $ Frame r ftype start Nothing
]
frameBoundP :: Parser (FrameBound Range)
frameBoundP = choice
[ fmap Unbounded $ (<>)
<$> Tok.unboundedP
<*> choice [ Tok.precedingP, Tok.followingP ]
, fmap CurrentRow $ (<>) <$> Tok.currentP <*> Tok.rowP
, constantP >>= \ expr -> choice
[ Tok.precedingP >>= \ r ->
return $ Preceding (getInfo expr <> r) expr
, Tok.followingP >>= \ r ->
return $ Following (getInfo expr <> r) expr
]
]
overP :: Parser (OverSubExpr RawNames Range)
overP = do
start <- Tok.overP
subExpr <- choice
[ Left <$> windowP
, Right <$> windowNameP
]
return $ case subExpr of
Left w -> mergeWindowInfo start w
Right wn -> OverWindowName (start <> getInfo wn) wn
where
windowP :: Parser (OverSubExpr RawNames Range)
windowP = do
start' <- Tok.openP
expr <- choice
[ Left <$> windowExprP start'
, Right <$> partialWindowExprP start'
]
return $ case expr of
Left w -> OverWindowExpr (start' <> getInfo w) w
Right pw -> OverPartialWindowExpr (start' <> getInfo pw) pw
mergeWindowInfo :: Range -> OverSubExpr RawNames Range -> OverSubExpr RawNames Range
mergeWindowInfo r = \case
OverWindowExpr r' WindowExpr{..} ->
OverWindowExpr (r <> r') $ WindowExpr { windowExprInfo = windowExprInfo <> r , ..}
OverWindowName r' n -> OverWindowName (r <> r') n
OverPartialWindowExpr r' PartialWindowExpr{..} ->
OverPartialWindowExpr (r <> r') $ PartialWindowExpr { partWindowExprInfo = partWindowExprInfo <> r , ..}
windowExprP :: Range -> Parser (WindowExpr RawNames Range)
windowExprP start =
do
partition <- optionMaybe partitionP
order <- option [] orderInWindowClauseP
frame <- optionMaybe frameP
end <- Tok.closeP
let info = start <> end
return (WindowExpr info partition order frame)
partialWindowExprP :: Range -> Parser (PartialWindowExpr RawNames Range)
partialWindowExprP start =
do
inherit <- windowNameP
order <- option [] orderInWindowClauseP
frame <- optionMaybe frameP
end <- Tok.closeP
let info = start <> end
return (PartialWindowExpr info inherit Nothing order frame)
windowNameP :: Parser (WindowName Range)
windowNameP =
do
(name, r) <- Tok.windowNameP
return $ WindowName r name
partitionP :: Parser (Partition RawNames Range)
partitionP = do
r <- Tok.partitionP
choice
[ Tok.byP >> (exprP `sepBy1` Tok.commaP) >>= \ exprs ->
return $ PartitionBy
(sconcat $ r :| map getInfo exprs) exprs
, Tok.bestP >>= \ r' -> return $ PartitionBest (r <> r')
, Tok.nodesP >>= \ r' -> return $ PartitionNodes (r <> r')
]
existsExprP :: Parser (Expr RawNames Range)
existsExprP = do
r <- Tok.existsP
_ <- Tok.openP
query <- queryP
r' <- Tok.closeP
return $ ExistsExpr (r <> r') query
arrayExprP :: Parser (Expr RawNames Range)
arrayExprP = do
s <- Tok.arrayP
_ <- Tok.openBracketP
cs <- exprP `sepBy` Tok.commaP
e <- Tok.closeBracketP
pure $ ArrayExpr (s <> e) cs
castExprP :: Parser (Expr RawNames Range)
castExprP = foldl (flip ($)) <$> castedP <*> many castP
where
castedP :: Parser (Expr RawNames Range)
castedP = choice
[ try parenExprP
, try existsExprP
, try arrayExprP
, try functionExprP
, caseExprP
, try $ do
constant <- constantP
return $ ConstantExpr (getInfo constant) constant
, do
name <- columnNameP
return $ ColumnExpr (getInfo name) name
]
castP :: Parser (Expr RawNames Range -> Expr RawNames Range)
castP = do
_ <- Tok.castOpP
typeName <- dataTypeP
let r expr = getInfo expr <> getInfo typeName
return (\ expr -> TypeCastExpr (r expr) CastFailureError expr typeName)
atTimeZoneExprP :: Parser (Expr RawNames Range)
atTimeZoneExprP = foldl (flip ($)) <$> castExprP <*> many atTimeZoneP
where
atTimeZoneP :: Parser (Expr RawNames Range -> Expr RawNames Range)
atTimeZoneP = do
_ <- Tok.atP
_ <- Tok.timezoneP
tz <- castExprP
return $ \ expr ->
AtTimeZoneExpr (getInfo expr <> getInfo tz) expr tz
unOpP :: Text -> Parser (Expr RawNames Range -> Expr RawNames Range)
unOpP op = do
r <- Tok.symbolP op
return $ \ expr -> UnOpExpr (r <> getInfo expr) (Operator op) expr
negateExprP :: Parser (Expr RawNames Range)
negateExprP = do
neg <- option id $ choice $ map unOpP [ "+", "-", "@", "~" ]
expr <- atTimeZoneExprP
return $ neg expr
binOpP :: Text -> Parser (Expr RawNames Range -> Expr RawNames Range -> Expr RawNames Range)
binOpP op = do
r <- Tok.symbolP op
let r' lhs rhs = sconcat $ r :| map getInfo [lhs, rhs]
return $ \ lhs rhs -> BinOpExpr (r' lhs rhs) (Operator op) lhs rhs
exponentExprP :: Parser (Expr RawNames Range)
exponentExprP = negateExprP `chainl1` binOpP "^"
productExprP :: Parser (Expr RawNames Range)
productExprP = exponentExprP `chainl1` opP
where
opP = choice $ map binOpP [ "*", "//", "/", "%" ]
sumExprP :: Parser (Expr RawNames Range)
sumExprP = productExprP `chainl1` opP
where
opP = choice $ map binOpP [ "+", "-" ]
bitwiseExprP :: Parser (Expr RawNames Range)
bitwiseExprP = sumExprP `chainl1` opP
where
opP = choice $ map binOpP [ "&", "|", "#" ]
bitShiftExprP :: Parser (Expr RawNames Range)
bitShiftExprP = bitwiseExprP `chainl1` opP
where
opP = choice $ map binOpP [ "<<", ">>" ]
notP :: Parser (Expr RawNames Range -> Expr RawNames Range)
notP = (\ r -> UnOpExpr r "NOT") <$> Tok.notP
isExprP :: Parser (Expr RawNames Range)
isExprP = do
expr <- bitShiftExprP
is <- fmap (foldl (.) id) $ many $ choice
[ do
_ <- Tok.isP
not_ <- option id notP
(not_ .) <$> choice
[ Tok.trueP >>= \ r -> return (UnOpExpr r "ISTRUE")
, Tok.falseP >>= \ r -> return (UnOpExpr r "ISFALSE")
, Tok.nullP >>= \ r -> return (UnOpExpr r "ISNULL")
, Tok.unknownP >>= \ r -> return (UnOpExpr r "ISUNKNOWN")
]
, Tok.isnullP >>= \ r -> return (UnOpExpr r "ISNULL")
, Tok.notnullP >>= \ r -> return (UnOpExpr r "NOT" . UnOpExpr r "ISNULL")
]
return $ is expr
appendExprP :: Parser (Expr RawNames Range)
appendExprP = isExprP `chainl1` binOpP "||"
inExprP :: Parser (Expr RawNames Range)
inExprP = do
expr <- appendExprP
not_ <- option id notP
in_ <- foldl (.) id <$> many inP
return $ not_ $ in_ expr
where
inP = do
_ <- Tok.inP
_ <- Tok.openP
list <- choice
[ Left <$> queryP
, Right <$> exprP `sepBy1` Tok.commaP
]
r <- Tok.closeP
return $ case list of
Left query ->
\ expr -> InSubqueryExpr (getInfo expr <> r) query expr
Right constants ->
\ expr -> InListExpr (getInfo expr <> r) constants expr
betweenExprP :: Parser (Expr RawNames Range)
betweenExprP = do
expr <- inExprP
between <- foldl (.) id <$> many betweenP
return $ between expr
where
betweenP = do
_ <- Tok.betweenP
start <- bitShiftExprP
_ <- Tok.andP
end <- bitShiftExprP
let r expr = getInfo expr <> getInfo end
return $ \ expr -> BetweenExpr (r expr) start end expr
overlapsExprP :: Parser (Expr RawNames Range)
overlapsExprP = try overlapsP <|> betweenExprP
where
overlapsP = do
let pair :: Parser a -> Parser ((a, a), Range)
pair p = do
r <- Tok.openP
s <- p
_ <- Tok.commaP
e <- p
r' <- Tok.closeP
return ((s, e), r <> r')
(lhs, r) <- pair exprP
_ <- Tok.overlapsP
(rhs, r') <- pair exprP
return $ OverlapsExpr (r <> r') lhs rhs
likeExprP :: Parser (Expr RawNames Range)
likeExprP = do
expr <- overlapsExprP
like <- option id comparisonP
return $ like expr
where
comparisonP :: Parser (Expr RawNames Range -> Expr RawNames Range)
comparisonP = choice
[ do
comparison <- symbolComparisonP
pattern <- Pattern <$> overlapsExprP
return $ comparison pattern
, do
comparison <- textComparisonP
pattern <- Pattern <$> overlapsExprP
escape <- optionMaybe $ do
_ <- Tok.escapeP
Escape <$> exprP
return $ comparison escape pattern
]
symbolComparisonP :: Parser (Pattern RawNames Range -> Expr RawNames Range -> Expr RawNames Range)
symbolComparisonP = choice $
let r expr pattern = getInfo expr <> getInfo pattern
in [ do
_ <- Tok.likeOpP
return $ \ pattern expr -> LikeExpr (r pattern expr) "LIKE" Nothing pattern expr
, do
_ <- Tok.iLikeOpP
return $ \ pattern expr -> LikeExpr (r pattern expr) "ILIKE" Nothing pattern expr
, do
_ <- Tok.notLikeOpP
return $ \ pattern expr ->
UnOpExpr (r pattern expr) "NOT" $ LikeExpr (r pattern expr) "LIKE" Nothing pattern expr
, do
_ <- Tok.notILikeOpP
return $ \ pattern expr ->
UnOpExpr (r pattern expr) "NOT" $ LikeExpr (r pattern expr) "ILIKE" Nothing pattern expr
, do
_ <- Tok.regexMatchesP
return $ \ pattern expr ->
BinOpExpr (r pattern expr) "REGEX MATCHES" expr $ patternExpr pattern
, do
_ <- Tok.regexIgnoreCaseMatchesP
return $ \ pattern expr ->
BinOpExpr (r pattern expr) "REGEX IGNORE-CASE MATCHES" expr $ patternExpr pattern
, do
_ <- Tok.notRegexMatchesP
return $ \ pattern expr ->
UnOpExpr (r pattern expr) "NOT" $
BinOpExpr (r pattern expr) "REGEX MATCHES" expr $ patternExpr pattern
, do
_ <- Tok.notRegexIgnoreCaseMatchesP
return $ \ pattern expr ->
UnOpExpr (r pattern expr) "NOT" $
BinOpExpr (r pattern expr) "REGEX IGNORE-CASE MATCHES" expr $ patternExpr pattern
]
textComparisonP :: Parser (Maybe (Escape RawNames Range) -> Pattern RawNames Range -> Expr RawNames Range -> Expr RawNames Range)
textComparisonP = do
not_ <- option id notP
like <- choice
[ Tok.likeP >>= \ r -> return $ LikeExpr r "LIKE"
, Tok.iLikeP >>= \ r -> return $ LikeExpr r "ILIKE"
, Tok.likeBP >>= \ r -> return $ LikeExpr r "LIKE"
, Tok.iLikeBP >>= \ r -> return $ LikeExpr r "ILIKE"
]
return $ \ escape pattern expr -> not_ $ like escape pattern expr
mkBinOp :: (Text, a) -> Expr r a -> Expr r a -> Expr r a
mkBinOp (op, r) = BinOpExpr r (Operator op)
inequalityExprP :: Parser (Expr RawNames Range)
inequalityExprP = likeExprP `chainl1` (mkBinOp <$> Tok.inequalityOpP)
equalityExprP :: Parser (Expr RawNames Range)
equalityExprP = inequalityExprP `chainl1` (mkBinOp <$> Tok.equalityOpP)
notExprP :: Parser (Expr RawNames Range)
notExprP = do
nots <- appEndo . fold . reverse . map Endo <$> many notP
expr <- equalityExprP
return $ nots expr
andExprP :: Parser (Expr RawNames Range)
andExprP = notExprP `chainl1`
(Tok.andP >>= \ r -> return $ BinOpExpr r "AND")
orExprP :: Parser (Expr RawNames Range)
orExprP = andExprP `chainl1` (Tok.orP >>= \ r -> return (BinOpExpr r "OR"))
singleTableP :: Parser (Tablish RawNames Range)
singleTableP = try subqueryP <|> try tableP <|> parenthesizedJoinP
where
subqueryP = do
r <- Tok.openP
query <- queryP
_ <- Tok.closeP
optional Tok.asP
(name, r') <- Tok.tableNameP
alias <- makeTableAlias r' name
return $ TablishSubQuery (r <> r')
(TablishAliasesT alias)
query
tableP = do
name <- tableNameP
maybe_alias <- optionMaybe $ do
optional Tok.asP
(alias, r) <- Tok.tableNameP
makeTableAlias r alias
let r = case maybe_alias of
Nothing -> getInfo name
Just alias -> getInfo alias <> getInfo name
aliases = maybe TablishAliasesNone TablishAliasesT maybe_alias
return $ TablishTable r aliases name
parenthesizedJoinP = do
tablish <- P.between Tok.openP Tok.closeP $ do
table <- singleTableP
joins <- fmap (appEndo . fold . reverse) $ many1 $ Endo <$> joinP
return $ joins table
optional $ do
optional Tok.asP
void Tok.tableNameP
pure tablish
optionalParensP :: Parser a -> Parser a
optionalParensP p = try p <|> P.between Tok.openP Tok.closeP p
manyParensP :: Parser a -> Parser a
manyParensP p = try p <|> P.between Tok.openP Tok.closeP (manyParensP p)
tablishP :: Parser (Tablish RawNames Range)
tablishP = do
table <- singleTableP
joins <- fmap (appEndo . fold . reverse) $ many $ Endo <$> joinP
return $ joins table
joinP :: Parser (Tablish RawNames Range -> Tablish RawNames Range)
joinP = regularJoinP <|> naturalJoinP <|> crossJoinP
regularJoinP :: Parser (Tablish RawNames Range -> Tablish RawNames Range)
regularJoinP = do
maybeJoinType <- optionMaybe $ innerJoinTypeP <|> outerJoinTypeP
joinType <- Tok.joinP >>= \ r -> return $ case maybeJoinType of
Nothing -> JoinInner r
Just joinType -> (<> r) <$> joinType
rhs <- singleTableP
condition <- choice
[ do
_ <- Tok.onP <?> "condition in join clause"
JoinOn <$> exprP
, do
s <- Tok.usingP <?> "using in join clause"
_ <- Tok.openP
names <- flip sepBy1 Tok.commaP $ do
(name, r) <- Tok.columnNameP
pure $ QColumnName r None name
e <- Tok.closeP
return $ JoinUsing (s <> e) names
]
let r lhs = getInfo lhs <> getInfo rhs <> getInfo condition
return $ \ lhs ->
TablishJoin (r lhs) joinType condition lhs rhs
outerJoinTypeP :: Parser (JoinType Range)
outerJoinTypeP = do
joinType <- choice
[ Tok.leftP >>= \ r -> return $ JoinLeft r
, Tok.rightP >>= \ r -> return $ JoinRight r
, Tok.fullP >>= \ r -> return $ JoinFull r
]
optional Tok.outerP
return joinType
innerJoinTypeP :: Parser (JoinType Range)
innerJoinTypeP = Tok.innerP >>= \ r -> return $ JoinInner r
naturalJoinP :: Parser (Tablish RawNames Range -> Tablish RawNames Range)
naturalJoinP = do
r <- Tok.naturalP
maybeJoinType <- optionMaybe $ innerJoinTypeP <|> outerJoinTypeP
joinType <- Tok.joinP >>= \ r' -> return $ case maybeJoinType of
Nothing -> JoinInner r
Just joinType -> (const $ r <> r') <$> joinType
rhs <- singleTableP
let r' lhs = getInfo lhs <> getInfo rhs
return $ \ lhs -> TablishJoin (r' lhs) joinType (JoinNatural r Unused) lhs rhs
crossJoinP :: Parser (Tablish RawNames Range -> Tablish RawNames Range)
crossJoinP = do
r <- Tok.crossP
r'<- Tok.joinP
rhs <- singleTableP
let r'' lhs = getInfo lhs <> getInfo rhs
joinInfo = r <> r'
true' = JoinOn $ ConstantExpr joinInfo $ BooleanConstant joinInfo True
return $ \ lhs ->
TablishJoin (r'' lhs) (JoinInner joinInfo) true' lhs rhs
createProjectionPrefixP :: Parser Range
createProjectionPrefixP = do
s <- Tok.createP
e <- Tok.projectionP
pure $ s <> e
createProjectionP :: Parser (CreateProjection RawNames Range)
createProjectionP = do
s <- createProjectionPrefixP
createProjectionIfNotExists <- ifNotExistsP
createProjectionName <- projectionNameP
createProjectionColumns <- optionMaybe $ try columnListP
_ <- Tok.asP
createProjectionQuery <- queryP
createProjectionSegmentation <- optionMaybe $ choice
[ do
s' <- Tok.unsegmentedP
choice
[ do
_ <- Tok.nodeP
node <- nodeNameP
let e' = getInfo node
pure $ UnsegmentedOneNode (s' <> e') node
, do
_ <- Tok.allP
e' <- Tok.nodesP
pure $ UnsegmentedAllNodes (s' <> e')
]
, do
s' <- Tok.segmentedP
_ <- Tok.byP
expr <- exprP
list <- nodeListP
pure $ SegmentedBy (s' <> getInfo list) expr list
]
createProjectionKSafety <- optionMaybe $ do
s' <- Tok.ksafeP
choice
[ do
(n, e') <- integerP
pure $ KSafety (s' <> e') (Just n)
, pure $ KSafety s' Nothing
]
let createProjectionInfo =
sconcat $ s :| catMaybes [ Just $ getInfo createProjectionQuery
, getInfo <$> createProjectionSegmentation
, getInfo <$> createProjectionKSafety
]
pure CreateProjection{..}
where
columnListP :: Parser (NonEmpty (ProjectionColumn Range))
columnListP = do
_ <- Tok.openP
c:cs <- flip sepBy1 Tok.commaP $ do
(projectionColumnName, s) <- Tok.columnNameP
projectionColumnAccessRank <- optionMaybe $ do
s' <- Tok.accessRankP
(n, e') <- integerP
pure $ AccessRank (s' <> e') n
projectionColumnEncoding <- optionMaybe $ do
_ <- Tok.encodingP
Tok.encodingTypeP
let projectionColumnInfo =
sconcat $ s :| catMaybes [ getInfo <$> projectionColumnAccessRank
, getInfo <$> projectionColumnEncoding ]
pure ProjectionColumn{..}
_ <- Tok.closeP
pure (c:|cs)
multipleRenameP :: Parser (MultipleRename RawNames Range)
multipleRenameP = do
s <- Tok.alterP
_ <- Tok.tableP
sources <- tableNameP `sepBy1` Tok.commaP
_ <- Tok.renameP
_ <- Tok.toP
targets <- map (\ uqtn -> uqtn { tableNameSchema = Nothing }) <$> unqualifiedTableNameP `sepBy1` Tok.commaP
when (length sources /= length targets) $ fail "multi-renames require the same number of sources and targets"
let e = getInfo $ last targets
pairs = zip sources targets
toAlterTableRename = \ (from, to) ->
AlterTableRenameTable (getInfo from <> getInfo to) from to
renames = map toAlterTableRename pairs
pure $ MultipleRename (s <> e) renames
setSchemaP :: Parser (SetSchema RawNames Range)
setSchemaP = do
s <- Tok.alterP
_ <- Tok.tableP
table <- tableNameP
_ <- Tok.setP
_ <- Tok.schemaP
(schema, r) <- Tok.schemaNameP
e <- option r $ choice [Tok.restrictP, Tok.cascadeP]
pure $ SetSchema (s <> e) table $ mkNormalSchema schema r
renameProjectionP :: Parser Range
renameProjectionP = do
s <- Tok.alterP
_ <- Tok.projectionP
_ <- projectionNameP
_ <- Tok.renameP
_ <- Tok.toP
to <- projectionNameP
pure $ s <> getInfo to
alterResourcePoolPrefixP :: Parser Range
alterResourcePoolPrefixP = do
s <- Tok.alterP
_ <- Tok.resourceP
e <- Tok.poolP
pure $ s <> e
alterResourcePoolP :: Parser Range
alterResourcePoolP = do
s <- alterResourcePoolPrefixP
ts <- P.many Tok.notSemicolonP
pure $ case reverse ts of
[] -> s
e:_ -> s <> e
createResourcePoolPrefixP :: Parser Range
createResourcePoolPrefixP = do
s <- Tok.createP
_ <- Tok.resourceP
e <- Tok.poolP
pure $ s <> e
createResourcePoolP :: Parser Range
createResourcePoolP = do
s <- createResourcePoolPrefixP
ts <- P.many Tok.notSemicolonP
pure $ case reverse ts of
[] -> s
e:_ -> s <> e
dropResourcePoolPrefixP :: Parser Range
dropResourcePoolPrefixP = do
s <- Tok.dropP
_ <- Tok.resourceP
e <- Tok.poolP
pure $ s <> e
dropResourcePoolP :: Parser Range
dropResourcePoolP = do
s <- dropResourcePoolPrefixP
pure $ s <> e
createFunctionPrefixP :: Parser Range
createFunctionPrefixP = do
s <- Tok.createP
_ <- optional $ Tok.orP >> Tok.replaceP
e <- choice
[ do
_ <- optional $ Tok.transformP <|> Tok.analyticP <|> Tok.aggregateP
Tok.functionP
, Tok.filterP
, Tok.parserP
, Tok.sourceP
]
pure $ s <> e
createFunctionP :: Parser Range
createFunctionP = do
s <- createFunctionPrefixP
ts <- P.many Tok.notSemicolonP
pure $ case reverse ts of
[] -> s
e:_ -> s <> e
alterTableAddConstraintP :: Parser Range
alterTableAddConstraintP = do
s <- Tok.alterP
_ <- Tok.tableP
_ <- tableNameP
_ <- Tok.addP
e <- tableConstraintP
pure $ s <> e
tableConstraintP :: Parser Range
tableConstraintP = do
s <- optionMaybe $ do
s <- Tok.constraintP
_ <- Tok.constraintNameP
return s
e <- choice
[ do
_ <- Tok.primaryP
_ <- Tok.keyP
e <- columnListP
option e (Tok.enabledP <|> Tok.disabledP)
, do
_ <- Tok.uniqueP
e <- columnListP
option e (Tok.enabledP <|> Tok.disabledP)
, do
_ <- Tok.foreignP
_ <- Tok.keyP
_ <- columnListP
_ <- Tok.referencesP
e <- getInfo <$> tableNameP
option e columnListP
, do
_ <- Tok.checkP
e <- getInfo <$> exprP
option e (Tok.enabledP <|> Tok.disabledP)
]
return (maybe e id s <> e)
where
columnListP :: Parser Range
columnListP = do
s <- Tok.openP
_ <- Tok.columnNameP `sepBy1` Tok.commaP
e <- Tok.closeP
return (s <> e)
exportToStdoutP :: Parser Range
exportToStdoutP = do
s <- Tok.exportP
_ <- Tok.toP
_ <- Tok.stdoutP
_ <- Tok.fromP
_ <- tableNameP
_ <- Tok.openP
_ <- Tok.columnNameP `sepBy1` Tok.commaP
e <- Tok.closeP
pure $ s <> e
setSessionPrefixP :: Parser Range
setSessionPrefixP = do
s <- Tok.setP
e <- Tok.sessionP
return $ s <> e
setSessionP :: Parser Range
setSessionP = do
s <- setSessionPrefixP
ts <- P.many Tok.notSemicolonP
pure $ case reverse ts of
[] -> s
e:_ -> s <> e
setTimeZoneP :: Parser Range
setTimeZoneP = do
s <- Tok.setP
_ <- Tok.timezoneP
_ <- Tok.toP
e <- choice [ Tok.defaultP
, snd <$> Tok.stringP
, Tok.intervalP >> snd <$> Tok.stringP
]
return $ s <> e
connectP :: Parser Range
connectP = do
s <- Tok.connectP
_ <- Tok.toP
_ <- Tok.verticaP
_ <- Tok.databaseNameP
_ <- Tok.userP
_ <- Tok.userNameP
_ <- Tok.passwordP
e <- snd <$> Tok.stringP <|> snd <$> starsP
e' <- option e $ do
_ <- Tok.onP
_ <- Tok.stringP
_ <- Tok.commaP
snd <$> Tok.numberP
pure $ s <> e'
where
starsP = do
rs <- P.many1 Tok.starP
let text = TL.take (fromIntegral $ length rs) $ TL.repeat '*'
r = head rs <> last rs
pure (text, r)
disconnectP :: Parser Range
disconnectP = do
s <- Tok.disconnectP
(_, e) <- Tok.databaseNameP
pure $ s <> e
createAccessPolicyP :: Parser Range
createAccessPolicyP = do
s <- Tok.createP
_ <- Tok.accessP
_ <- Tok.policyP
_ <- Tok.onP
_ <- tableNameP
_ <- Tok.forP
_ <- Tok.columnP
_ <- Tok.columnNameP
_ <- exprP
e <- choice [ Tok.enableP, Tok.disableP ]
pure $ s <> e
copyFromP :: Parser Range
copyFromP = do
s <- Tok.copyP
e <- getInfo <$> tableNameP
e' <- consumeOrderedOptions e $
[ ingestionColumnListP (getInfo <$> exprP)
, ingestionColumnOptionP
, fileStorageFormatP
]
e'' <- consumeUnorderedOptions e' $
[ do
_ <- optional Tok.withP
choice [ fileSourceP
, fileFilterP
, fileParserP
]
, delimiterAsP
, trailingNullColsP
, nullAsP
, escapeFormatP
, enclosedByP
, recordTerminatorP
, try $ skipRecordsP
, try $ skipBytesP
, trimByteP
, rejectMaxP
, rejectedDataOnNodeP
, exceptionsOnNodeP
, Tok.enforceLengthP
, errorToleranceP
, abortOnErrorP
, optional Tok.storageP >> loadMethodP
, streamNameP
, noCommitP
]
return $ s <> e''
where
onNodeP :: Range -> Parser Range
onNodeP r = do
s <- option r $ choice
[ try $ Tok.onP >> snd <$> Tok.nodeNameP
, Tok.onP >> Tok.anyP >> Tok.nodeP
]
e <- option s compressionP
return $ s <> e
fromP :: Parser Range
fromP = do
outerS <- Tok.fromP
outerE <- choice $
[ do
s <- Tok.stdinP
e <- option s compressionP
return $ s <> e
, do
(_, s) <- Tok.stringP
e <- last <$> ((onNodeP s) `sepBy1` Tok.commaP)
return $ s <> e
, do
s <- Tok.localP
e' <- choice [ do
e <- Tok.stdinP
option e compressionP
, let pathToDataP = do
e <- snd <$> Tok.stringP
option e compressionP
in last <$> (pathToDataP `sepBy1` Tok.commaP)
]
return $ s <> e'
, do
s <- Tok.verticaP
_ <- Tok.databaseNameP
_ <- Tok.dotP
e <- getInfo <$> tableNameP
e' <- option e $ do
_ <- Tok.openP
_ <- Tok.columnNameP `sepBy1` Tok.commaP
Tok.closeP
return $ s <> e'
]
return $ outerS <> outerE
showP :: Parser Range
showP = do
s <- Tok.showP
es <- many1 Tok.notSemicolonP
return $ s <> last es
mergeP :: Parser (Merge RawNames Range)
mergeP = do
r1 <- Tok.mergeP
_ <- Tok.intoP
mergeTargetTable <- tableNameP
mergeTargetAlias <- optionMaybe tableAliasP
_ <- Tok.usingP
mergeSourceTable <- tableNameP
mergeSourceAlias <- optionMaybe tableAliasP
_ <- Tok.onP
mergeCondition <- exprP
mergeUpdateDirective <- optionMaybe $ do
_ <- try $ P.lookAhead $ Tok.whenP >> Tok.matchedP
_ <- Tok.whenP
_ <- Tok.matchedP
_ <- Tok.thenP
_ <- Tok.updateP
_ <- Tok.setP
NE.fromList <$> colValP `sepBy1` Tok.commaP
(mergeInsertDirectiveColumns, mergeInsertDirectiveValues, r2) <- option (Nothing, Nothing, Just r1) $ do
_ <- Tok.whenP
_ <- Tok.notP
_ <- Tok.matchedP
_ <- Tok.thenP
_ <- Tok.insertP
cols <- optionMaybe $ NE.fromList <$> P.between Tok.openP Tok.closeP (oqColumnNameP `sepBy1` Tok.commaP)
_ <- Tok.valuesP
_ <- Tok.openP
vals <- NE.fromList <$> defaultExprP `sepBy1` Tok.commaP
e <- Tok.closeP
return (cols, Just vals, Just e)
when ((mergeUpdateDirective, mergeInsertDirectiveValues) == (Nothing, Nothing)) $
fail "MERGE requires at least one of UPDATE and INSERT"
let mLastUpdate = fmap (getInfo . snd . NE.last) mergeUpdateDirective
mLastInsert = r2
r3 = sconcat $ NE.fromList $ catMaybes [mLastUpdate, mLastInsert]
mergeInfo = r1 <> r3
return Merge{..}
where
tableAliasP :: Parser (TableAlias Range)
tableAliasP = do
(name, r) <- Tok.tableNameP
makeTableAlias r name
colValP :: Parser (ColumnRef RawNames Range, DefaultExpr RawNames Range)
colValP = do
col <- oqColumnNameP
_ <- Tok.equalP
val <- defaultExprP
return (col { columnNameTable = Nothing }, val)
|
6bfcaa23d5a9d7944177db076f28b242c8b1d3a4e8dee96b85ba502d35c3ca74 | kolmodin/binary | Get.hs | # LANGUAGE CPP , OverloadedStrings , ExistentialQuantification , BangPatterns #
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
#include "MachDeps.h"
#endif
module Main where
import Control.DeepSeq
import Control.Exception (evaluate)
import Criterion.Main
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Lazy as L
import Data.Bits
import Data.Char (ord)
import Data.List (foldl')
import Control.Applicative
import Data.Binary
import Data.Binary.Get
import qualified Data.Serialize.Get as Cereal
import qualified Data.Attoparsec.ByteString as A
import qualified Data.Attoparsec.ByteString.Lazy as AL
#if !MIN_VERSION_bytestring(0,10,0)
instance NFData S.ByteString
instance NFData L.ByteString where
rnf = rnf . L.toChunks
#endif
main :: IO ()
main = do
evaluate $ rnf [
rnf brackets,
rnf bracketsInChunks,
rnf bracketCount,
rnf oneMegabyte,
rnf oneMegabyteLBS,
rnf manyBytes,
rnf encodedBigInteger
]
defaultMain
[ bgroup "brackets"
[ bench "Binary 100kb, one chunk" $
whnf (checkBracket . runTest bracketParser) brackets
, bench "Binary 100kb, 100 byte chunks" $
whnf (checkBracket . runTest bracketParser) bracketsInChunks
, bench "Attoparsec lazy-bs 100kb, one chunk" $
whnf (checkBracket . runAttoL bracketParser_atto) brackets
, bench "Attoparsec lazy-bs 100kb, 100 byte chunks" $
whnf (checkBracket . runAttoL bracketParser_atto) bracketsInChunks
, bench "Attoparsec strict-bs 100kb" $
whnf (checkBracket . runAtto bracketParser_atto) $ S.concat (L.toChunks brackets)
, bench "Cereal strict-bs 100kb" $
whnf (checkBracket . runCereal bracketParser_cereal) $ S.concat (L.toChunks brackets)
]
, bgroup "comparison getStruct4, 1MB of struct of 4 Word8s"
[ bench "Attoparsec" $
whnf (runAtto (getStruct4_atto mega)) oneMegabyte
, bench "Binary" $
whnf (runTest (getStruct4 mega)) oneMegabyteLBS
, bench "Cereal" $
whnf (runCereal (getStruct4_cereal mega)) oneMegabyte
]
, bgroup "comparison getWord8, 1MB"
[ bench "Attoparsec" $
whnf (runAtto (getWord8N1_atto mega)) oneMegabyte
, bench "Binary" $
whnf (runTest (getWord8N1 mega)) oneMegabyteLBS
, bench "Cereal" $
whnf (runCereal (getWord8N1_cereal mega)) oneMegabyte
]
, bgroup "getWord8 1MB"
[ bench "chunk size 2 bytes" $
whnf (runTest (getWord8N2 mega)) oneMegabyteLBS
, bench "chunk size 4 bytes" $
whnf (runTest (getWord8N4 mega)) oneMegabyteLBS
, bench "chunk size 8 bytes" $
whnf (runTest (getWord8N8 mega)) oneMegabyteLBS
, bench "chunk size 16 bytes" $
whnf (runTest (getWord8N16 mega)) oneMegabyteLBS
]
, bgroup "getWord8 1MB Applicative"
[ bench "chunk size 2 bytes" $
whnf (runTest (getWord8N2A mega)) oneMegabyteLBS
, bench "chunk size 4 bytes" $
whnf (runTest (getWord8N4A mega)) oneMegabyteLBS
, bench "chunk size 8 bytes" $
whnf (runTest (getWord8N8A mega)) oneMegabyteLBS
, bench "chunk size 16 bytes" $
whnf (runTest (getWord8N16A mega)) oneMegabyteLBS
]
, bgroup "roll"
[ bench "foldr" $ nf (roll_foldr :: [Word8] -> Integer) manyBytes
, bench "foldl'" $ nf (roll_foldl' :: [Word8] -> Integer) manyBytes
]
, bgroup "Integer"
[ bench "decode" $ nf (decode :: L.ByteString -> Integer) encodedBigInteger
]
]
checkBracket :: Int -> Int
checkBracket x | x == bracketCount = x
| otherwise = error "argh!"
runTest :: Get a -> L.ByteString -> a
runTest decoder inp = runGet decoder inp
runCereal :: Cereal.Get a -> C8.ByteString -> a
runCereal decoder inp = case Cereal.runGet decoder inp of
Right a -> a
Left err -> error err
runAtto :: AL.Parser a -> C8.ByteString -> a
runAtto decoder inp = case A.parseOnly decoder inp of
Right a -> a
Left err -> error err
runAttoL :: Show a => AL.Parser a -> L.ByteString -> a
runAttoL decoder inp = case AL.parse decoder inp of
AL.Done _ r -> r
a -> error (show a)
-- Defs.
oneMegabyte :: S.ByteString
oneMegabyte = S.replicate mega $ fromIntegral $ ord 'a'
oneMegabyteLBS :: L.ByteString
oneMegabyteLBS = L.fromChunks [oneMegabyte]
mega :: Int
mega = 1024 * 1024
100k of brackets
bracketTest :: L.ByteString -> Int
bracketTest inp = runTest bracketParser inp
bracketCount :: Int
bracketCount = fromIntegral $ L.length brackets `div` 2
brackets :: L.ByteString
brackets = L.fromChunks [C8.concat (L.toChunks bracketsInChunks)]
bracketsInChunks :: L.ByteString
bracketsInChunks = L.fromChunks (replicate chunksOfBrackets oneChunk)
where
oneChunk = "((()((()()))((()(()()()()()()()(((()()()()(()()(()(()())))))()((())())))()())(((())())(()))))()(()))"
chunksOfBrackets = 102400 `div` S.length oneChunk
bracketParser :: Get Int
bracketParser = cont <|> return 0
where
cont = do v <- some ( do 40 <- getWord8
n <- many cont
41 <- getWord8
return $! sum n + 1)
return $! sum v
bracketParser_cereal :: Cereal.Get Int
bracketParser_cereal = cont <|> return 0
where
cont = do v <- some ( do 40 <- Cereal.getWord8
n <- many cont
41 <- Cereal.getWord8
return $! sum n + 1)
return $! sum v
bracketParser_atto :: A.Parser Int
bracketParser_atto = cont <|> return 0
where
cont = do v <- some ( do _ <- A.word8 40
n <- bracketParser_atto
_ <- A.word8 41
return $! n + 1)
return $! sum v
Strict struct of 4 Word8s
data S2 = S2 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
data S4 = S4 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
data S8 = S8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
data S16 = S16 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
{-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8 {-# UNPACK #-} !Word8
getStruct4 :: Int -> Get [S4]
getStruct4 = loop []
where loop acc 0 = return acc
loop acc n = do
!w0 <- getWord8
!w1 <- getWord8
!w2 <- getWord8
!w3 <- getWord8
let !s = S4 w0 w1 w2 w3
loop (s : acc) (n - 4)
getStruct4_cereal :: Int -> Cereal.Get [S4]
getStruct4_cereal = loop []
where loop acc 0 = return acc
loop acc n = do
!w0 <- Cereal.getWord8
!w1 <- Cereal.getWord8
!w2 <- Cereal.getWord8
!w3 <- Cereal.getWord8
let !s = S4 w0 w1 w2 w3
loop (s : acc) (n - 4)
getStruct4_atto :: Int -> A.Parser [S4]
getStruct4_atto = loop []
where loop acc 0 = return acc
loop acc n = do
!w0 <- A.anyWord8
!w1 <- A.anyWord8
!w2 <- A.anyWord8
!w3 <- A.anyWord8
let !s = S4 w0 w1 w2 w3
loop (s : acc) (n - 4)
getWord8N1 :: Int -> Get [Word8]
getWord8N1 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
loop (s0:s) (n-1)
getWord8N1_cereal :: Int -> Cereal.Get [Word8]
getWord8N1_cereal = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- Cereal.getWord8
loop (s0:s) (n-1)
getWord8N1_atto :: Int -> A.Parser [Word8]
getWord8N1_atto = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- A.anyWord8
loop (s0:s) (n-1)
getWord8N2 :: Int -> Get [S2]
getWord8N2 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
s1 <- getWord8
let !v = S2 s0 s1
loop (v:s) (n-2)
getWord8N2A :: Int -> Get [S2]
getWord8N2A = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
!v <- S2 <$> getWord8 <*> getWord8
loop (v:s) (n-2)
getWord8N4 :: Int -> Get [S4]
getWord8N4 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
s1 <- getWord8
s2 <- getWord8
s3 <- getWord8
let !v = S4 s0 s1 s2 s3
loop (v:s) (n-4)
getWord8N4A :: Int -> Get [S4]
getWord8N4A = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
!v <- S4 <$> getWord8 <*> getWord8 <*> getWord8 <*> getWord8
loop (v:s) (n-4)
getWord8N8 :: Int -> Get [S8]
getWord8N8 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
s1 <- getWord8
s2 <- getWord8
s3 <- getWord8
s4 <- getWord8
s5 <- getWord8
s6 <- getWord8
s7 <- getWord8
let !v = S8 s0 s1 s2 s3 s4 s5 s6 s7
loop (v:s) (n-8)
getWord8N8A :: Int -> Get [S8]
getWord8N8A = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
!v <- S8 <$> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
loop (v:s) (n-8)
getWord8N16 :: Int -> Get [S16]
getWord8N16 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
s1 <- getWord8
s2 <- getWord8
s3 <- getWord8
s4 <- getWord8
s5 <- getWord8
s6 <- getWord8
s7 <- getWord8
s8 <- getWord8
s9 <- getWord8
s10 <- getWord8
s11 <- getWord8
s12 <- getWord8
s13 <- getWord8
s14 <- getWord8
s15 <- getWord8
let !v = S16 s0 s1 s2 s3 s4 s5 s6 s7 s8 s9 s10 s11 s12 s13 s14 s15
loop (v:s) (n-16)
getWord8N16A :: Int -> Get [S16]
getWord8N16A = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
!v <- S16 <$> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
loop (v:s) (n-16)
manyBytes :: [Word8]
manyBytes = concat $ replicate 256 [0..255]
bigInteger :: Integer
bigInteger = roll_foldl' manyBytes
encodedBigInteger :: L.ByteString
encodedBigInteger = encode bigInteger
roll_foldr :: (Integral a, Bits a) => [Word8] -> a
roll_foldr = foldr unstep 0
where
unstep b a = a `shiftL` 8 .|. fromIntegral b
roll_foldl' :: (Integral a, Bits a) => [Word8] -> a
roll_foldl' = foldl' unstep 0 . reverse
where
unstep a b = a `shiftL` 8 .|. fromIntegral b
| null | https://raw.githubusercontent.com/kolmodin/binary/bccbece2e254813f86e0a04f71d9ca3cea68b3bf/benchmarks/Get.hs | haskell | Defs.
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK #
# UNPACK # | # LANGUAGE CPP , OverloadedStrings , ExistentialQuantification , BangPatterns #
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
#include "MachDeps.h"
#endif
module Main where
import Control.DeepSeq
import Control.Exception (evaluate)
import Criterion.Main
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Lazy as L
import Data.Bits
import Data.Char (ord)
import Data.List (foldl')
import Control.Applicative
import Data.Binary
import Data.Binary.Get
import qualified Data.Serialize.Get as Cereal
import qualified Data.Attoparsec.ByteString as A
import qualified Data.Attoparsec.ByteString.Lazy as AL
#if !MIN_VERSION_bytestring(0,10,0)
instance NFData S.ByteString
instance NFData L.ByteString where
rnf = rnf . L.toChunks
#endif
main :: IO ()
main = do
evaluate $ rnf [
rnf brackets,
rnf bracketsInChunks,
rnf bracketCount,
rnf oneMegabyte,
rnf oneMegabyteLBS,
rnf manyBytes,
rnf encodedBigInteger
]
defaultMain
[ bgroup "brackets"
[ bench "Binary 100kb, one chunk" $
whnf (checkBracket . runTest bracketParser) brackets
, bench "Binary 100kb, 100 byte chunks" $
whnf (checkBracket . runTest bracketParser) bracketsInChunks
, bench "Attoparsec lazy-bs 100kb, one chunk" $
whnf (checkBracket . runAttoL bracketParser_atto) brackets
, bench "Attoparsec lazy-bs 100kb, 100 byte chunks" $
whnf (checkBracket . runAttoL bracketParser_atto) bracketsInChunks
, bench "Attoparsec strict-bs 100kb" $
whnf (checkBracket . runAtto bracketParser_atto) $ S.concat (L.toChunks brackets)
, bench "Cereal strict-bs 100kb" $
whnf (checkBracket . runCereal bracketParser_cereal) $ S.concat (L.toChunks brackets)
]
, bgroup "comparison getStruct4, 1MB of struct of 4 Word8s"
[ bench "Attoparsec" $
whnf (runAtto (getStruct4_atto mega)) oneMegabyte
, bench "Binary" $
whnf (runTest (getStruct4 mega)) oneMegabyteLBS
, bench "Cereal" $
whnf (runCereal (getStruct4_cereal mega)) oneMegabyte
]
, bgroup "comparison getWord8, 1MB"
[ bench "Attoparsec" $
whnf (runAtto (getWord8N1_atto mega)) oneMegabyte
, bench "Binary" $
whnf (runTest (getWord8N1 mega)) oneMegabyteLBS
, bench "Cereal" $
whnf (runCereal (getWord8N1_cereal mega)) oneMegabyte
]
, bgroup "getWord8 1MB"
[ bench "chunk size 2 bytes" $
whnf (runTest (getWord8N2 mega)) oneMegabyteLBS
, bench "chunk size 4 bytes" $
whnf (runTest (getWord8N4 mega)) oneMegabyteLBS
, bench "chunk size 8 bytes" $
whnf (runTest (getWord8N8 mega)) oneMegabyteLBS
, bench "chunk size 16 bytes" $
whnf (runTest (getWord8N16 mega)) oneMegabyteLBS
]
, bgroup "getWord8 1MB Applicative"
[ bench "chunk size 2 bytes" $
whnf (runTest (getWord8N2A mega)) oneMegabyteLBS
, bench "chunk size 4 bytes" $
whnf (runTest (getWord8N4A mega)) oneMegabyteLBS
, bench "chunk size 8 bytes" $
whnf (runTest (getWord8N8A mega)) oneMegabyteLBS
, bench "chunk size 16 bytes" $
whnf (runTest (getWord8N16A mega)) oneMegabyteLBS
]
, bgroup "roll"
[ bench "foldr" $ nf (roll_foldr :: [Word8] -> Integer) manyBytes
, bench "foldl'" $ nf (roll_foldl' :: [Word8] -> Integer) manyBytes
]
, bgroup "Integer"
[ bench "decode" $ nf (decode :: L.ByteString -> Integer) encodedBigInteger
]
]
checkBracket :: Int -> Int
checkBracket x | x == bracketCount = x
| otherwise = error "argh!"
runTest :: Get a -> L.ByteString -> a
runTest decoder inp = runGet decoder inp
runCereal :: Cereal.Get a -> C8.ByteString -> a
runCereal decoder inp = case Cereal.runGet decoder inp of
Right a -> a
Left err -> error err
runAtto :: AL.Parser a -> C8.ByteString -> a
runAtto decoder inp = case A.parseOnly decoder inp of
Right a -> a
Left err -> error err
runAttoL :: Show a => AL.Parser a -> L.ByteString -> a
runAttoL decoder inp = case AL.parse decoder inp of
AL.Done _ r -> r
a -> error (show a)
oneMegabyte :: S.ByteString
oneMegabyte = S.replicate mega $ fromIntegral $ ord 'a'
oneMegabyteLBS :: L.ByteString
oneMegabyteLBS = L.fromChunks [oneMegabyte]
mega :: Int
mega = 1024 * 1024
100k of brackets
bracketTest :: L.ByteString -> Int
bracketTest inp = runTest bracketParser inp
bracketCount :: Int
bracketCount = fromIntegral $ L.length brackets `div` 2
brackets :: L.ByteString
brackets = L.fromChunks [C8.concat (L.toChunks bracketsInChunks)]
bracketsInChunks :: L.ByteString
bracketsInChunks = L.fromChunks (replicate chunksOfBrackets oneChunk)
where
oneChunk = "((()((()()))((()(()()()()()()()(((()()()()(()()(()(()())))))()((())())))()())(((())())(()))))()(()))"
chunksOfBrackets = 102400 `div` S.length oneChunk
bracketParser :: Get Int
bracketParser = cont <|> return 0
where
cont = do v <- some ( do 40 <- getWord8
n <- many cont
41 <- getWord8
return $! sum n + 1)
return $! sum v
bracketParser_cereal :: Cereal.Get Int
bracketParser_cereal = cont <|> return 0
where
cont = do v <- some ( do 40 <- Cereal.getWord8
n <- many cont
41 <- Cereal.getWord8
return $! sum n + 1)
return $! sum v
bracketParser_atto :: A.Parser Int
bracketParser_atto = cont <|> return 0
where
cont = do v <- some ( do _ <- A.word8 40
n <- bracketParser_atto
_ <- A.word8 41
return $! n + 1)
return $! sum v
Strict struct of 4 Word8s
getStruct4 :: Int -> Get [S4]
getStruct4 = loop []
where loop acc 0 = return acc
loop acc n = do
!w0 <- getWord8
!w1 <- getWord8
!w2 <- getWord8
!w3 <- getWord8
let !s = S4 w0 w1 w2 w3
loop (s : acc) (n - 4)
getStruct4_cereal :: Int -> Cereal.Get [S4]
getStruct4_cereal = loop []
where loop acc 0 = return acc
loop acc n = do
!w0 <- Cereal.getWord8
!w1 <- Cereal.getWord8
!w2 <- Cereal.getWord8
!w3 <- Cereal.getWord8
let !s = S4 w0 w1 w2 w3
loop (s : acc) (n - 4)
getStruct4_atto :: Int -> A.Parser [S4]
getStruct4_atto = loop []
where loop acc 0 = return acc
loop acc n = do
!w0 <- A.anyWord8
!w1 <- A.anyWord8
!w2 <- A.anyWord8
!w3 <- A.anyWord8
let !s = S4 w0 w1 w2 w3
loop (s : acc) (n - 4)
getWord8N1 :: Int -> Get [Word8]
getWord8N1 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
loop (s0:s) (n-1)
getWord8N1_cereal :: Int -> Cereal.Get [Word8]
getWord8N1_cereal = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- Cereal.getWord8
loop (s0:s) (n-1)
getWord8N1_atto :: Int -> A.Parser [Word8]
getWord8N1_atto = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- A.anyWord8
loop (s0:s) (n-1)
getWord8N2 :: Int -> Get [S2]
getWord8N2 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
s1 <- getWord8
let !v = S2 s0 s1
loop (v:s) (n-2)
getWord8N2A :: Int -> Get [S2]
getWord8N2A = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
!v <- S2 <$> getWord8 <*> getWord8
loop (v:s) (n-2)
getWord8N4 :: Int -> Get [S4]
getWord8N4 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
s1 <- getWord8
s2 <- getWord8
s3 <- getWord8
let !v = S4 s0 s1 s2 s3
loop (v:s) (n-4)
getWord8N4A :: Int -> Get [S4]
getWord8N4A = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
!v <- S4 <$> getWord8 <*> getWord8 <*> getWord8 <*> getWord8
loop (v:s) (n-4)
getWord8N8 :: Int -> Get [S8]
getWord8N8 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
s1 <- getWord8
s2 <- getWord8
s3 <- getWord8
s4 <- getWord8
s5 <- getWord8
s6 <- getWord8
s7 <- getWord8
let !v = S8 s0 s1 s2 s3 s4 s5 s6 s7
loop (v:s) (n-8)
getWord8N8A :: Int -> Get [S8]
getWord8N8A = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
!v <- S8 <$> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
loop (v:s) (n-8)
getWord8N16 :: Int -> Get [S16]
getWord8N16 = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
s0 <- getWord8
s1 <- getWord8
s2 <- getWord8
s3 <- getWord8
s4 <- getWord8
s5 <- getWord8
s6 <- getWord8
s7 <- getWord8
s8 <- getWord8
s9 <- getWord8
s10 <- getWord8
s11 <- getWord8
s12 <- getWord8
s13 <- getWord8
s14 <- getWord8
s15 <- getWord8
let !v = S16 s0 s1 s2 s3 s4 s5 s6 s7 s8 s9 s10 s11 s12 s13 s14 s15
loop (v:s) (n-16)
getWord8N16A :: Int -> Get [S16]
getWord8N16A = loop []
where loop s n | s `seq` n `seq` False = undefined
loop s 0 = return s
loop s n = do
!v <- S16 <$> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
<*> getWord8
loop (v:s) (n-16)
manyBytes :: [Word8]
manyBytes = concat $ replicate 256 [0..255]
bigInteger :: Integer
bigInteger = roll_foldl' manyBytes
encodedBigInteger :: L.ByteString
encodedBigInteger = encode bigInteger
roll_foldr :: (Integral a, Bits a) => [Word8] -> a
roll_foldr = foldr unstep 0
where
unstep b a = a `shiftL` 8 .|. fromIntegral b
roll_foldl' :: (Integral a, Bits a) => [Word8] -> a
roll_foldl' = foldl' unstep 0 . reverse
where
unstep a b = a `shiftL` 8 .|. fromIntegral b
|
c15d8b1e5930d1f8d26c963b84c3e141ac54cb061aad4b966d9269d5c389ddf8 | rems-project/cerberus | location_ocaml.ml | open Lexing
type cursor =
| NoCursor
| PointCursor of Lexing.position
| RegionCursor of Lexing.position * Lexing.position
type raw =
| Loc_unknown
| Loc_other of string
| Loc_point of Lexing.position
(* start, end, cursor *)
| Loc_region of Lexing.position * Lexing.position * cursor
| Loc_regions of (Lexing.position * Lexing.position) list * cursor
type t = raw
let to_raw t = t
let unknown =
Loc_unknown
let is_unknown_location = function
| Loc_unknown -> true
| _ -> false
let other str =
Loc_other str
let point pos =
Loc_point pos
let region (b, e) cur =
Loc_region (b, e, cur)
let regions xs cur =
match xs with
| [] ->
failwith "Location_ocaml.region, xs must not be []"
| _ ->
(* TODO: need to sort the regions *)
Loc_regions (xs, cur)
let with_cursor = function
| Loc_unknown
| Loc_other _
| Loc_regions ([], NoCursor) ->
Loc_unknown
| Loc_point z
| Loc_region (_, _, PointCursor z)
| Loc_region (z, _, NoCursor)
| Loc_regions (_, PointCursor z)
| Loc_regions ((z,_)::_, NoCursor) ->
Loc_point z
| Loc_region (_, _, RegionCursor (b, e))
| Loc_regions (_, RegionCursor (b, e)) ->
Loc_region (b, e, NoCursor)
(* [with_cursor_from loc1 loc2] makes a new (region location) with the region from loc1
and the cursor from loc2 if there is one, otherwise uses the beginning of loc2 as the cursor (if possible) *)
let with_cursor_from loc1 loc2 =
let cursor = match loc2 with
| Loc_unknown
| Loc_other _ ->
NoCursor
| Loc_point z ->
PointCursor z
| Loc_region (start_p, end_p, NoCursor) ->
RegionCursor (start_p, end_p)
| Loc_region (_, _, cur) ->
cur
| Loc_regions (_, z) ->
not putting a cursor because it seems arbitrary to use the first region
z in
match loc1 with
| Loc_unknown ->
begin match cursor with
| NoCursor ->
Loc_unknown
| PointCursor pos ->
Loc_point pos
| RegionCursor (b, e) ->
Loc_region (b, e, NoCursor)
end
| Loc_other str ->
Loc_other str
| Loc_point z ->
Loc_region (z, z, cursor)
| Loc_region (begin_loc, end_loc, _) ->
Loc_region (begin_loc, end_loc, cursor)
| Loc_regions (regions, _) ->
Loc_regions (regions, cursor)
let from_main_file = function
| Loc_unknown
| Loc_other _
| Loc_regions ([], _) -> false
| Loc_point pos
| Loc_region (pos, _, _)
| Loc_regions ((pos,_)::_, _) ->
let ext = Filename.extension pos.pos_fname in
ext = ".c" || ext = ".core"
let outer_bbox xs =
let (b0, e0) = match xs with
| [] ->
assert false
| (b,e) :: _ ->
(b,e) in
let pos_lt pos1 pos2 =
(* assuming pos_fname are the same *)
if pos1.pos_lnum = pos2.pos_lnum then
pos1.pos_cnum < pos2.pos_cnum
else
pos1.pos_lnum < pos2.pos_lnum in
List.fold_left (fun (bAcc, eAcc) (b, e) ->
((if pos_lt b bAcc then b else bAcc), (if pos_lt e eAcc then eAcc else e))
) (b0, e0) xs
let bbox_location = function
| [] ->
Loc_unknown
| xs ->
match begin
List.fold_left (fun (def_loc, acc) loc ->
match loc with
| Loc_unknown ->
(def_loc, acc)
| Loc_other _ ->
(loc, acc)
| Loc_point pos ->
(def_loc, (pos, pos) :: acc)
| Loc_region (pos1, pos2, _) ->
invariant from Loc_region is that < = pos2
(def_loc, (pos1, pos2) :: acc)
| Loc_regions (xs, _) ->
(def_loc, xs @ acc)
) (Loc_unknown, []) xs
end with
| (loc, []) ->
loc
| (_, xs') ->
let (b, e) = outer_bbox xs' in
Loc_region (b, e, NoCursor)
let with_regions_and_cursor locs loc_opt =
let cursor_opt = match loc_opt with
| Some (Loc_point z) -> PointCursor z
| Some (Loc_region (_, _, z))
| Some (Loc_regions (_, z)) -> z
| _ -> NoCursor
in
let pos_of_region = function
| Loc_point p -> Some (p, p)
| Loc_region (p1, p2, _) -> Some (p1, p2)
| _ -> None
in
let rec the acc = function
| Some x::xs -> the (x::acc) xs
| [] -> Some acc
| None::_ -> None
in
match the [] (List.map pos_of_region locs) with
| Some regs -> Loc_regions (regs, cursor_opt)
| None -> Loc_unknown
let to_cartesian loc =
let point_of_pos pos = Lexing.(pos.pos_lnum-1, pos.pos_cnum-pos.pos_bol) in
match loc with
| Loc_point p -> Some (point_of_pos p, (0,0))
| Loc_region (p1, p2, _) -> Some (point_of_pos p1, point_of_pos p2)
| _ -> None
let location_to_string ?(charon=false) loc =
let string_of_pos ?(shrink=false) pos =
if shrink || (charon && from_main_file loc) then
Printf.sprintf "%d:%d" pos.pos_lnum (1+pos.pos_cnum-pos.pos_bol)
else
Printf.sprintf "%s:%d:%d" pos.pos_fname pos.pos_lnum (1+pos.pos_cnum-pos.pos_bol) in
let shrink z =
if charon && from_main_file loc then
""
else
z in
match loc with
| Loc_unknown ->
"unknown location"
| Loc_other str ->
"other_location(" ^ str ^ ")"
| Loc_point pos ->
string_of_pos pos ^ ":"
| Loc_region (pos1, pos2, pos_opt) ->
string_of_pos pos1 ^ "-" ^
begin if pos1.pos_fname = pos2.pos_fname then
""
else
shrink pos2.pos_fname
end ^
begin if pos1.pos_lnum = pos2.pos_lnum then
""
else
string_of_int pos2.pos_lnum ^ ":"
end ^
string_of_int (1+pos2.pos_cnum-pos2.pos_bol)
^ begin match pos_opt with
| NoCursor -> ""
| PointCursor pos -> " (cursor: " ^ string_of_pos ~shrink:true pos ^ ")"
| RegionCursor (b, e) -> " (cursor: " ^ string_of_pos ~shrink:true b ^ " - " ^ string_of_pos ~shrink:true e ^ ")"
end
| Loc_regions (xs, _) ->
let (pos1, pos2) = outer_bbox xs in
string_of_pos pos1 ^ "-" ^
begin if pos1.pos_fname = pos2.pos_fname then
""
else
shrink pos2.pos_fname
end ^
begin if pos1.pos_lnum = pos2.pos_lnum then
""
else
string_of_int pos2.pos_lnum ^ ":"
end ^
string_of_int (1+pos2.pos_cnum-pos2.pos_bol)
module P = PPrint
open Pp_prelude
let print_location loc =
let print_lex pos =
!^"RT.position" ^^^ P.dquotes !^(pos.Lexing.pos_fname)
^^^ !^(string_of_int pos.Lexing.pos_lnum)
^^^ !^(string_of_int pos.Lexing.pos_bol)
^^^ !^(string_of_int pos.Lexing.pos_cnum)
in
let print_cursor = function
| NoCursor ->
!^ "Location_ocaml.NoCursor"
| PointCursor pos ->
!^ "Location_ocaml.PointCursor" ^^^ P.parens (print_lex pos)
| RegionCursor (b, e) ->
!^ "Location_ocaml.RegionCursor"
^^^ P.parens (print_lex b)
^^^ P.parens (print_lex e)
in
match loc with
| Loc_unknown ->
!^"Location_ocaml.unknown"
| Loc_other str ->
!^ "Location_ocaml.other" ^^ P.parens (P.dquotes !^ (String.escaped str))
| Loc_point pos ->
!^"Location_ocaml.point" ^^^ P.parens (print_lex pos)
| Loc_region (pos1, pos2, cur) ->
!^"Location_ocaml.region"
^^^ P.parens (print_lex pos1)
^^^ P.parens (print_lex pos2)
^^^ P.parens (print_cursor cur)
| Loc_regions (xs, cur) ->
let print_pair pp (x, y) = P.parens (pp x ^^ P.comma ^^^ pp y) in
let print_list pp xs = P.brackets (P.separate_map (P.semi ^^ P.space) pp xs) in
!^"Location_ocaml.regions"
^^^ P.parens (print_list (print_pair print_lex) xs)
^^^ P.parens (print_cursor cur)
open Lexing
let to_json loc =
let of_pos p =
`Assoc [("line", `Int (p.pos_lnum-1));
("ch", `Int (p.pos_cnum-p.pos_bol))] in
match loc with
| Loc_unknown ->
`Null
| Loc_other _str ->
`Null (* `String str *)
| Loc_point p ->
`Assoc [("begin", of_pos p); ("end", of_pos p)]
| Loc_region (p1, p2, _) ->
`Assoc [("begin", of_pos p1); ("end", of_pos p2)]
| Loc_regions (xs, _) ->
let (pos1, pos2) = outer_bbox xs in
`Assoc [("begin", of_pos pos1); ("end", of_pos pos2)]
open Colour
let pp_location =
let last_pos = ref Lexing.dummy_pos in
fun ?(clever = false) loc ->
let string_of_pos p =
let open Lexing in
let ret =
if !last_pos.pos_fname <> p.pos_fname then
p.pos_fname ^ ":" ^ string_of_int p.pos_lnum ^ ":" ^ string_of_int (p.pos_cnum - p.pos_bol)
else if !last_pos.pos_lnum <> p.pos_lnum then
"line:" ^ string_of_int p.pos_lnum ^ ":" ^ string_of_int (p.pos_cnum - p.pos_bol)
else
"col:" ^ string_of_int (p.pos_cnum - p.pos_bol) in
begin if clever then last_pos := p end;
ret in
let aux_region start_p end_p cur =
let mk_cursor_str () =
match cur with
| NoCursor -> ""
| PointCursor cursor_p -> " " ^ string_of_pos cursor_p
| RegionCursor (b, e) -> " " ^ string_of_pos b ^ " - " ^ string_of_pos e in
if !last_pos.pos_fname = start_p.pos_fname &&
start_p.pos_fname = end_p.pos_fname &&
start_p.pos_lnum = end_p.pos_lnum
then
let start_p_str = string_of_pos start_p in
P.angles (
!^ (ansi_format ~err:true [Yellow] (start_p_str ^ " - " ^ string_of_int (end_p.pos_cnum - end_p.pos_bol)))
) ^^ !^ (ansi_format ~err:true [Yellow] (mk_cursor_str ()))
else
let start_p_str = string_of_pos start_p in
let end_p_str = string_of_pos end_p in
P.angles (
!^ (ansi_format ~err:true [Yellow] start_p_str) ^^ P.comma ^^^
!^ (ansi_format ~err:true [Yellow] end_p_str)
) ^^ !^ (ansi_format ~err:true [Yellow] (mk_cursor_str ())) in
match loc with
| Loc_unknown ->
P.angles !^ (ansi_format ~err:true [Yellow] "unknown location")
| Loc_other str ->
P.angles !^ (ansi_format ~err:true [Yellow] ("other location (" ^ str ^ ")"))
| Loc_point pos ->
let pos_str = string_of_pos pos in
P.angles !^ (ansi_format ~err:true [Yellow] pos_str)
| Loc_region (start_p, end_p, cur) ->
aux_region start_p end_p cur
| Loc_regions (xs, cur) ->
let (start_p, end_p) = outer_bbox xs in
aux_region start_p end_p cur
let string_of_pos pos =
ansi_format ~err:true [Bold] (
Printf.sprintf "%s:%d:%d:" pos.pos_fname pos.pos_lnum (1 + pos.pos_cnum - pos.pos_bol)
)
let get_line n ic =
seek_in ic 0;
let rec aux = function
| 1 -> input_line ic
| n -> let _ = input_line ic in
aux (n-1) in
aux n
external terminal_size: unit -> (int * int) option = "terminal_size"
let string_at_line fname lnum cpos =
try
if Sys.file_exists fname then
let ic = open_in fname in
let sub l start n =
if start + n < String.length l then String.sub l start n
else Printf.sprintf "(?error: Location_ocaml.string_at_line with %S, %i-%i)"
l start n
in
let l =
let l_ = get_line lnum ic in
match terminal_size () with
| None ->
(None, l_)
| Some (_, term_col) ->
if cpos >= term_col then begin
(* The cursor position is beyond the width of the terminal *)
let mid = term_col / 2 in
let start = max 0 (cpos - mid) in
let n = String.length l_ - start in
( Some (cpos - start + 5)
, if n + 5 <= term_col then
" ..." ^ sub l_ start n
else
" ..." ^ String.sub l_ start (term_col - 5 - 3) ^ "..." )
end else if String.length l_ > term_col then
(* The cursor is within the terminal width, but the line needs
to be truncated *)
(None, sub l_ 0 (term_col - 3) ^ "...")
else
(None, l_) in
close_in ic;
Some l
else
None
with
End_of_file ->
TODO
None
let head_pos_of_location = function
| Loc_unknown ->
( "unknown location "
, "" )
| Loc_other str ->
( "other location (" ^ str ^ ") "
, "" )
| Loc_point pos ->
( string_of_pos pos
, let cpos = pos.pos_cnum - pos.pos_bol in
match string_at_line pos.pos_fname pos.pos_lnum cpos with
| Some (cpos'_opt, l) ->
let cpos = match cpos'_opt with
| Some cpos' -> cpos'
| None -> cpos in
l ^ "\n" ^
ansi_format ~err:true [Bold; Green] (String.init (cpos + 1) (fun n -> if n < cpos then ' ' else '^'))
| None ->
"" )
| Loc_region (start_p, end_p, cursor) ->
( string_of_pos start_p
, let cpos1 = start_p.pos_cnum - start_p.pos_bol in
match string_at_line start_p.pos_fname start_p.pos_lnum cpos1 with
| Some (_, l) ->
let cpos2 =
if start_p.pos_lnum = end_p.pos_lnum then
end_p.pos_cnum - end_p.pos_bol
else
String.length l in
let cursor_n = match cursor with
| PointCursor cursor_p
| RegionCursor (cursor_p, _) ->
cursor_p.pos_cnum - cursor_p.pos_bol
| NoCursor ->
cpos1 in
l ^ "\n" ^
ansi_format ~err:true [Bold; Green] (
String.init ((max cursor_n cpos2) + 1)
(fun n -> if n = cursor_n then '^' else if n >= cpos1 && n < cpos2 then '~' else if n < String.length l && l.[n] = '\t' then '\t' else ' ')
)
| None ->
"" )
| Loc_regions (xs, cursor) ->
let pos = match cursor with
| NoCursor -> fst (List.hd xs)
| PointCursor p
| RegionCursor (p, _) -> p
in
( string_of_pos pos
, let cursor_p = pos.pos_cnum - pos.pos_bol in
match string_at_line pos.pos_fname pos.pos_lnum cursor_p with
| Some (_, l) ->
let ps = List.map (fun (s, e) -> (s.pos_cnum - s.pos_bol, e.pos_cnum - e.pos_bol)) xs in
l ^ "\n" ^ ansi_format ~err:true [Bold; Green]
(String.init (String.length l)
(fun n -> if n = cursor_p then '^'
else if List.exists (fun (p1, p2) -> n >= p1 && n < p2) ps then '~'
else ' ')
)
| None -> "" )
let simple_location =
let string_of_pos pos =
Printf.sprintf "%d:%d" pos.pos_lnum (1 + pos.pos_cnum - pos.pos_bol)
in
function
| Loc_unknown ->
"<unknown location>"
| Loc_other str ->
"<other location: " ^ str ^ ">"
| Loc_point pos ->
string_of_pos pos
| Loc_region (start_p, end_p, _) ->
Printf.sprintf "<%s--%s>" (string_of_pos start_p) (string_of_pos end_p)
| Loc_regions (xs, _) ->
let (start_p, end_p) = List.hd xs in
Printf.sprintf "<%s--%s>" (string_of_pos start_p) (string_of_pos end_p)
let get_filename = function
| Loc_unknown
| Loc_regions ([], _) ->
None
| Loc_other _ ->
Some "<internal>"
| Loc_point pos
| Loc_region (pos, _, _)
| Loc_regions ((pos, _) :: _, _) ->
Some pos.pos_fname
let is_unknown = function
| Loc_unknown -> true
| _ -> false
let is_other = function
| Loc_other str -> Some str
| _ -> None
let is_library_location loc =
let excluded =
let tbl = Hashtbl.create 3 in
Hashtbl.add tbl (Cerb_runtime.in_runtime "libc/include") ();
Hashtbl.add tbl (Cerb_runtime.in_runtime "libcore") ();
Hashtbl.add tbl (Cerb_runtime.in_runtime "libcore/impls") ();
tbl in
match get_filename loc with
| Some path ->
Hashtbl.mem excluded (Filename.dirname path)
| None ->
false
(* following simple_location *)
let line_numbers = function
| Loc_unknown -> None
| Loc_other _ -> None
| Loc_point p -> Some (p.pos_lnum, p.pos_lnum)
| Loc_region (p1, p2, _) -> Some (p1.pos_lnum, p2.pos_lnum)
| Loc_regions ((p1,p2) :: _, _) -> Some (p1.pos_lnum, p2.pos_lnum)
| Loc_regions ([], _) -> None
| null | https://raw.githubusercontent.com/rems-project/cerberus/55581472e8225630c70e3e819f4cb6e39357c768/util/location_ocaml.ml | ocaml | start, end, cursor
TODO: need to sort the regions
[with_cursor_from loc1 loc2] makes a new (region location) with the region from loc1
and the cursor from loc2 if there is one, otherwise uses the beginning of loc2 as the cursor (if possible)
assuming pos_fname are the same
`String str
The cursor position is beyond the width of the terminal
The cursor is within the terminal width, but the line needs
to be truncated
following simple_location | open Lexing
type cursor =
| NoCursor
| PointCursor of Lexing.position
| RegionCursor of Lexing.position * Lexing.position
type raw =
| Loc_unknown
| Loc_other of string
| Loc_point of Lexing.position
| Loc_region of Lexing.position * Lexing.position * cursor
| Loc_regions of (Lexing.position * Lexing.position) list * cursor
type t = raw
let to_raw t = t
let unknown =
Loc_unknown
let is_unknown_location = function
| Loc_unknown -> true
| _ -> false
let other str =
Loc_other str
let point pos =
Loc_point pos
let region (b, e) cur =
Loc_region (b, e, cur)
let regions xs cur =
match xs with
| [] ->
failwith "Location_ocaml.region, xs must not be []"
| _ ->
Loc_regions (xs, cur)
let with_cursor = function
| Loc_unknown
| Loc_other _
| Loc_regions ([], NoCursor) ->
Loc_unknown
| Loc_point z
| Loc_region (_, _, PointCursor z)
| Loc_region (z, _, NoCursor)
| Loc_regions (_, PointCursor z)
| Loc_regions ((z,_)::_, NoCursor) ->
Loc_point z
| Loc_region (_, _, RegionCursor (b, e))
| Loc_regions (_, RegionCursor (b, e)) ->
Loc_region (b, e, NoCursor)
let with_cursor_from loc1 loc2 =
let cursor = match loc2 with
| Loc_unknown
| Loc_other _ ->
NoCursor
| Loc_point z ->
PointCursor z
| Loc_region (start_p, end_p, NoCursor) ->
RegionCursor (start_p, end_p)
| Loc_region (_, _, cur) ->
cur
| Loc_regions (_, z) ->
not putting a cursor because it seems arbitrary to use the first region
z in
match loc1 with
| Loc_unknown ->
begin match cursor with
| NoCursor ->
Loc_unknown
| PointCursor pos ->
Loc_point pos
| RegionCursor (b, e) ->
Loc_region (b, e, NoCursor)
end
| Loc_other str ->
Loc_other str
| Loc_point z ->
Loc_region (z, z, cursor)
| Loc_region (begin_loc, end_loc, _) ->
Loc_region (begin_loc, end_loc, cursor)
| Loc_regions (regions, _) ->
Loc_regions (regions, cursor)
let from_main_file = function
| Loc_unknown
| Loc_other _
| Loc_regions ([], _) -> false
| Loc_point pos
| Loc_region (pos, _, _)
| Loc_regions ((pos,_)::_, _) ->
let ext = Filename.extension pos.pos_fname in
ext = ".c" || ext = ".core"
let outer_bbox xs =
let (b0, e0) = match xs with
| [] ->
assert false
| (b,e) :: _ ->
(b,e) in
let pos_lt pos1 pos2 =
if pos1.pos_lnum = pos2.pos_lnum then
pos1.pos_cnum < pos2.pos_cnum
else
pos1.pos_lnum < pos2.pos_lnum in
List.fold_left (fun (bAcc, eAcc) (b, e) ->
((if pos_lt b bAcc then b else bAcc), (if pos_lt e eAcc then eAcc else e))
) (b0, e0) xs
let bbox_location = function
| [] ->
Loc_unknown
| xs ->
match begin
List.fold_left (fun (def_loc, acc) loc ->
match loc with
| Loc_unknown ->
(def_loc, acc)
| Loc_other _ ->
(loc, acc)
| Loc_point pos ->
(def_loc, (pos, pos) :: acc)
| Loc_region (pos1, pos2, _) ->
invariant from Loc_region is that < = pos2
(def_loc, (pos1, pos2) :: acc)
| Loc_regions (xs, _) ->
(def_loc, xs @ acc)
) (Loc_unknown, []) xs
end with
| (loc, []) ->
loc
| (_, xs') ->
let (b, e) = outer_bbox xs' in
Loc_region (b, e, NoCursor)
let with_regions_and_cursor locs loc_opt =
let cursor_opt = match loc_opt with
| Some (Loc_point z) -> PointCursor z
| Some (Loc_region (_, _, z))
| Some (Loc_regions (_, z)) -> z
| _ -> NoCursor
in
let pos_of_region = function
| Loc_point p -> Some (p, p)
| Loc_region (p1, p2, _) -> Some (p1, p2)
| _ -> None
in
let rec the acc = function
| Some x::xs -> the (x::acc) xs
| [] -> Some acc
| None::_ -> None
in
match the [] (List.map pos_of_region locs) with
| Some regs -> Loc_regions (regs, cursor_opt)
| None -> Loc_unknown
let to_cartesian loc =
let point_of_pos pos = Lexing.(pos.pos_lnum-1, pos.pos_cnum-pos.pos_bol) in
match loc with
| Loc_point p -> Some (point_of_pos p, (0,0))
| Loc_region (p1, p2, _) -> Some (point_of_pos p1, point_of_pos p2)
| _ -> None
let location_to_string ?(charon=false) loc =
let string_of_pos ?(shrink=false) pos =
if shrink || (charon && from_main_file loc) then
Printf.sprintf "%d:%d" pos.pos_lnum (1+pos.pos_cnum-pos.pos_bol)
else
Printf.sprintf "%s:%d:%d" pos.pos_fname pos.pos_lnum (1+pos.pos_cnum-pos.pos_bol) in
let shrink z =
if charon && from_main_file loc then
""
else
z in
match loc with
| Loc_unknown ->
"unknown location"
| Loc_other str ->
"other_location(" ^ str ^ ")"
| Loc_point pos ->
string_of_pos pos ^ ":"
| Loc_region (pos1, pos2, pos_opt) ->
string_of_pos pos1 ^ "-" ^
begin if pos1.pos_fname = pos2.pos_fname then
""
else
shrink pos2.pos_fname
end ^
begin if pos1.pos_lnum = pos2.pos_lnum then
""
else
string_of_int pos2.pos_lnum ^ ":"
end ^
string_of_int (1+pos2.pos_cnum-pos2.pos_bol)
^ begin match pos_opt with
| NoCursor -> ""
| PointCursor pos -> " (cursor: " ^ string_of_pos ~shrink:true pos ^ ")"
| RegionCursor (b, e) -> " (cursor: " ^ string_of_pos ~shrink:true b ^ " - " ^ string_of_pos ~shrink:true e ^ ")"
end
| Loc_regions (xs, _) ->
let (pos1, pos2) = outer_bbox xs in
string_of_pos pos1 ^ "-" ^
begin if pos1.pos_fname = pos2.pos_fname then
""
else
shrink pos2.pos_fname
end ^
begin if pos1.pos_lnum = pos2.pos_lnum then
""
else
string_of_int pos2.pos_lnum ^ ":"
end ^
string_of_int (1+pos2.pos_cnum-pos2.pos_bol)
module P = PPrint
open Pp_prelude
let print_location loc =
let print_lex pos =
!^"RT.position" ^^^ P.dquotes !^(pos.Lexing.pos_fname)
^^^ !^(string_of_int pos.Lexing.pos_lnum)
^^^ !^(string_of_int pos.Lexing.pos_bol)
^^^ !^(string_of_int pos.Lexing.pos_cnum)
in
let print_cursor = function
| NoCursor ->
!^ "Location_ocaml.NoCursor"
| PointCursor pos ->
!^ "Location_ocaml.PointCursor" ^^^ P.parens (print_lex pos)
| RegionCursor (b, e) ->
!^ "Location_ocaml.RegionCursor"
^^^ P.parens (print_lex b)
^^^ P.parens (print_lex e)
in
match loc with
| Loc_unknown ->
!^"Location_ocaml.unknown"
| Loc_other str ->
!^ "Location_ocaml.other" ^^ P.parens (P.dquotes !^ (String.escaped str))
| Loc_point pos ->
!^"Location_ocaml.point" ^^^ P.parens (print_lex pos)
| Loc_region (pos1, pos2, cur) ->
!^"Location_ocaml.region"
^^^ P.parens (print_lex pos1)
^^^ P.parens (print_lex pos2)
^^^ P.parens (print_cursor cur)
| Loc_regions (xs, cur) ->
let print_pair pp (x, y) = P.parens (pp x ^^ P.comma ^^^ pp y) in
let print_list pp xs = P.brackets (P.separate_map (P.semi ^^ P.space) pp xs) in
!^"Location_ocaml.regions"
^^^ P.parens (print_list (print_pair print_lex) xs)
^^^ P.parens (print_cursor cur)
open Lexing
let to_json loc =
let of_pos p =
`Assoc [("line", `Int (p.pos_lnum-1));
("ch", `Int (p.pos_cnum-p.pos_bol))] in
match loc with
| Loc_unknown ->
`Null
| Loc_other _str ->
| Loc_point p ->
`Assoc [("begin", of_pos p); ("end", of_pos p)]
| Loc_region (p1, p2, _) ->
`Assoc [("begin", of_pos p1); ("end", of_pos p2)]
| Loc_regions (xs, _) ->
let (pos1, pos2) = outer_bbox xs in
`Assoc [("begin", of_pos pos1); ("end", of_pos pos2)]
open Colour
let pp_location =
let last_pos = ref Lexing.dummy_pos in
fun ?(clever = false) loc ->
let string_of_pos p =
let open Lexing in
let ret =
if !last_pos.pos_fname <> p.pos_fname then
p.pos_fname ^ ":" ^ string_of_int p.pos_lnum ^ ":" ^ string_of_int (p.pos_cnum - p.pos_bol)
else if !last_pos.pos_lnum <> p.pos_lnum then
"line:" ^ string_of_int p.pos_lnum ^ ":" ^ string_of_int (p.pos_cnum - p.pos_bol)
else
"col:" ^ string_of_int (p.pos_cnum - p.pos_bol) in
begin if clever then last_pos := p end;
ret in
let aux_region start_p end_p cur =
let mk_cursor_str () =
match cur with
| NoCursor -> ""
| PointCursor cursor_p -> " " ^ string_of_pos cursor_p
| RegionCursor (b, e) -> " " ^ string_of_pos b ^ " - " ^ string_of_pos e in
if !last_pos.pos_fname = start_p.pos_fname &&
start_p.pos_fname = end_p.pos_fname &&
start_p.pos_lnum = end_p.pos_lnum
then
let start_p_str = string_of_pos start_p in
P.angles (
!^ (ansi_format ~err:true [Yellow] (start_p_str ^ " - " ^ string_of_int (end_p.pos_cnum - end_p.pos_bol)))
) ^^ !^ (ansi_format ~err:true [Yellow] (mk_cursor_str ()))
else
let start_p_str = string_of_pos start_p in
let end_p_str = string_of_pos end_p in
P.angles (
!^ (ansi_format ~err:true [Yellow] start_p_str) ^^ P.comma ^^^
!^ (ansi_format ~err:true [Yellow] end_p_str)
) ^^ !^ (ansi_format ~err:true [Yellow] (mk_cursor_str ())) in
match loc with
| Loc_unknown ->
P.angles !^ (ansi_format ~err:true [Yellow] "unknown location")
| Loc_other str ->
P.angles !^ (ansi_format ~err:true [Yellow] ("other location (" ^ str ^ ")"))
| Loc_point pos ->
let pos_str = string_of_pos pos in
P.angles !^ (ansi_format ~err:true [Yellow] pos_str)
| Loc_region (start_p, end_p, cur) ->
aux_region start_p end_p cur
| Loc_regions (xs, cur) ->
let (start_p, end_p) = outer_bbox xs in
aux_region start_p end_p cur
let string_of_pos pos =
ansi_format ~err:true [Bold] (
Printf.sprintf "%s:%d:%d:" pos.pos_fname pos.pos_lnum (1 + pos.pos_cnum - pos.pos_bol)
)
let get_line n ic =
seek_in ic 0;
let rec aux = function
| 1 -> input_line ic
| n -> let _ = input_line ic in
aux (n-1) in
aux n
external terminal_size: unit -> (int * int) option = "terminal_size"
let string_at_line fname lnum cpos =
try
if Sys.file_exists fname then
let ic = open_in fname in
let sub l start n =
if start + n < String.length l then String.sub l start n
else Printf.sprintf "(?error: Location_ocaml.string_at_line with %S, %i-%i)"
l start n
in
let l =
let l_ = get_line lnum ic in
match terminal_size () with
| None ->
(None, l_)
| Some (_, term_col) ->
if cpos >= term_col then begin
let mid = term_col / 2 in
let start = max 0 (cpos - mid) in
let n = String.length l_ - start in
( Some (cpos - start + 5)
, if n + 5 <= term_col then
" ..." ^ sub l_ start n
else
" ..." ^ String.sub l_ start (term_col - 5 - 3) ^ "..." )
end else if String.length l_ > term_col then
(None, sub l_ 0 (term_col - 3) ^ "...")
else
(None, l_) in
close_in ic;
Some l
else
None
with
End_of_file ->
TODO
None
let head_pos_of_location = function
| Loc_unknown ->
( "unknown location "
, "" )
| Loc_other str ->
( "other location (" ^ str ^ ") "
, "" )
| Loc_point pos ->
( string_of_pos pos
, let cpos = pos.pos_cnum - pos.pos_bol in
match string_at_line pos.pos_fname pos.pos_lnum cpos with
| Some (cpos'_opt, l) ->
let cpos = match cpos'_opt with
| Some cpos' -> cpos'
| None -> cpos in
l ^ "\n" ^
ansi_format ~err:true [Bold; Green] (String.init (cpos + 1) (fun n -> if n < cpos then ' ' else '^'))
| None ->
"" )
| Loc_region (start_p, end_p, cursor) ->
( string_of_pos start_p
, let cpos1 = start_p.pos_cnum - start_p.pos_bol in
match string_at_line start_p.pos_fname start_p.pos_lnum cpos1 with
| Some (_, l) ->
let cpos2 =
if start_p.pos_lnum = end_p.pos_lnum then
end_p.pos_cnum - end_p.pos_bol
else
String.length l in
let cursor_n = match cursor with
| PointCursor cursor_p
| RegionCursor (cursor_p, _) ->
cursor_p.pos_cnum - cursor_p.pos_bol
| NoCursor ->
cpos1 in
l ^ "\n" ^
ansi_format ~err:true [Bold; Green] (
String.init ((max cursor_n cpos2) + 1)
(fun n -> if n = cursor_n then '^' else if n >= cpos1 && n < cpos2 then '~' else if n < String.length l && l.[n] = '\t' then '\t' else ' ')
)
| None ->
"" )
| Loc_regions (xs, cursor) ->
let pos = match cursor with
| NoCursor -> fst (List.hd xs)
| PointCursor p
| RegionCursor (p, _) -> p
in
( string_of_pos pos
, let cursor_p = pos.pos_cnum - pos.pos_bol in
match string_at_line pos.pos_fname pos.pos_lnum cursor_p with
| Some (_, l) ->
let ps = List.map (fun (s, e) -> (s.pos_cnum - s.pos_bol, e.pos_cnum - e.pos_bol)) xs in
l ^ "\n" ^ ansi_format ~err:true [Bold; Green]
(String.init (String.length l)
(fun n -> if n = cursor_p then '^'
else if List.exists (fun (p1, p2) -> n >= p1 && n < p2) ps then '~'
else ' ')
)
| None -> "" )
let simple_location =
let string_of_pos pos =
Printf.sprintf "%d:%d" pos.pos_lnum (1 + pos.pos_cnum - pos.pos_bol)
in
function
| Loc_unknown ->
"<unknown location>"
| Loc_other str ->
"<other location: " ^ str ^ ">"
| Loc_point pos ->
string_of_pos pos
| Loc_region (start_p, end_p, _) ->
Printf.sprintf "<%s--%s>" (string_of_pos start_p) (string_of_pos end_p)
| Loc_regions (xs, _) ->
let (start_p, end_p) = List.hd xs in
Printf.sprintf "<%s--%s>" (string_of_pos start_p) (string_of_pos end_p)
let get_filename = function
| Loc_unknown
| Loc_regions ([], _) ->
None
| Loc_other _ ->
Some "<internal>"
| Loc_point pos
| Loc_region (pos, _, _)
| Loc_regions ((pos, _) :: _, _) ->
Some pos.pos_fname
let is_unknown = function
| Loc_unknown -> true
| _ -> false
let is_other = function
| Loc_other str -> Some str
| _ -> None
let is_library_location loc =
let excluded =
let tbl = Hashtbl.create 3 in
Hashtbl.add tbl (Cerb_runtime.in_runtime "libc/include") ();
Hashtbl.add tbl (Cerb_runtime.in_runtime "libcore") ();
Hashtbl.add tbl (Cerb_runtime.in_runtime "libcore/impls") ();
tbl in
match get_filename loc with
| Some path ->
Hashtbl.mem excluded (Filename.dirname path)
| None ->
false
let line_numbers = function
| Loc_unknown -> None
| Loc_other _ -> None
| Loc_point p -> Some (p.pos_lnum, p.pos_lnum)
| Loc_region (p1, p2, _) -> Some (p1.pos_lnum, p2.pos_lnum)
| Loc_regions ((p1,p2) :: _, _) -> Some (p1.pos_lnum, p2.pos_lnum)
| Loc_regions ([], _) -> None
|
23059dbc39474dca079a6a54b65e7a30175088dc881c6f2c34615d3cc0c40321 | mfoemmel/erlang-otp | escript.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2007 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
-module(escript).
%% Useful functions that can be called from scripts.
-export([script_name/0, foldl/3]).
%% Internal API.
-export([start/0, start/1]).
-record(state, {file,
module,
forms_or_bin,
source,
n_errors,
mode,
exports_main,
has_records}).
script_name() ->
[ScriptName|_] = init:get_plain_arguments(),
ScriptName.
Apply Fun(Name , , GetBin , Acc ) for each file in the escript .
%%
Fun/2 must return a new accumulator which is passed to the next call .
%% The function returns the final value of the accumulator. Acc0 is
%% returned if the escript contain an empty archive.
%%
%% GetInfo/0 is a fun that returns a #file_info{} record for the file.
%% GetBin/0 is a fun that returns a the contents of the file as a binary.
%%
An escript may contain erlang code , beam code or an archive :
%%
archive - the Fun/2 will be applied for each file in the archive
beam - the Fun/2 will be applied once and returns the file
%% info for the (entire) escript file
erl - the Fun/2 will be applied once , returns the file
info for the ( entire ) escript file and the returns
%% the compiled beam code
%%-spec foldl(fun((string(),
%% fun(() -> #file_info()),
%% fun(() -> binary() -> term()),
%% term()) -> term()),
%% term(),
%% string()).
foldl(Fun, Acc0, File) when is_function(Fun, 4) ->
case parse_file(File, false) of
{text, _, Forms, _Mode} when is_list(Forms) ->
GetInfo = fun() -> file:read_file_info(File) end,
GetBin =
fun() ->
case compile:forms(Forms, [return_errors, debug_info]) of
{ok, _, BeamBin} ->
BeamBin;
{error, _Errors, _Warnings} ->
fatal("There were compilation errors.")
end
end,
try
{ok, Fun(".", GetInfo, GetBin, Acc0)}
catch
throw:Reason ->
{error, Reason}
end;
{beam, _, BeamBin, _Mode} when is_binary(BeamBin) ->
GetInfo = fun() -> file:read_file_info(File) end,
GetBin = fun() -> BeamBin end,
try
{ok, Fun(".", GetInfo, GetBin, Acc0)}
catch
throw:Reason ->
{error, Reason}
end;
{archive, _, ArchiveBin, _Mode} when is_binary(ArchiveBin) ->
ZipFun =
fun({Name, GetInfo, GetBin}, A) ->
A2 = Fun(Name, GetInfo, GetBin, A),
{true, false, A2}
end,
case prim_zip:open(ZipFun, Acc0, {File, ArchiveBin}) of
{ok, PrimZip, Res} ->
ok = prim_zip:close(PrimZip),
{ok, Res};
{error, bad_eocd} ->
{error, "Not an archive file"};
{error, Reason} ->
{error, Reason}
end
end.
%%
%% Internal API.
%%
start() ->
start([]).
start(EscriptOptions) ->
try
%% Commands run using -run or -s are run in a process
%% trap_exit set to false. Because this behaviour is
surprising for users of escript , make sure to reset
%% trap_exit to false.
process_flag(trap_exit, false),
case init:get_plain_arguments() of
[File|Args] ->
parse_and_run(File, Args, EscriptOptions);
[] ->
io:format("escript: Missing filename\n", []),
my_halt(127)
end
catch
throw:Str ->
io:format("escript: ~s\n", [Str]),
my_halt(127);
_:Reason ->
io:format("escript: Internal error: ~p\n", [Reason]),
io:format("~p\n", [erlang:get_stacktrace()]),
my_halt(127)
end.
parse_and_run(File, Args, Options) ->
CheckOnly = lists:member("s", Options),
{Source, Module, FormsOrBin, Mode} = parse_file(File, CheckOnly),
Mode2 =
case lists:member("d", Options) of
true ->
debug;
false ->
case lists:member("c", Options) of
true ->
compile;
false ->
case lists:member("i", Options) of
true -> interpret;
false -> Mode
end
end
end,
if
is_list(FormsOrBin) ->
case Mode2 of
interpret ->
interpret(FormsOrBin, File, Args);
compile ->
case compile:forms(FormsOrBin, [report]) of
{ok, Module, BeamBin} ->
{module, Module} = code:load_binary(Module, File, BeamBin),
run(Module, Args);
_Other ->
fatal("There were compilation errors.")
end;
debug ->
case compile:forms(FormsOrBin, [report, debug_info]) of
{ok,Module,BeamBin} ->
{module, Module} = code:load_binary(Module, File, BeamBin),
debug(Module, {Module, File, File, BeamBin}, Args);
_Other ->
fatal("There were compilation errors.")
end
end;
is_binary(FormsOrBin) ->
case Source of
archive ->
case code:set_primary_archive(File, FormsOrBin) of
ok when CheckOnly ->
case code:load_file(Module) of
{module, _} ->
case erlang:function_exported(Module, main, 1) of
true ->
my_halt(0);
false ->
Text = lists:concat(["Function ", Module, ":main/1 is not exported"]),
fatal(Text)
end;
_ ->
Text = lists:concat(["Cannot load module ", Module, " from archive"]),
fatal(Text)
end;
ok ->
case Mode2 of
run -> run(Module, Args);
debug -> debug(Module, Module, Args)
end;
{error, bad_eocd} ->
fatal("Not an archive file");
{error, Reason} ->
fatal(Reason)
end;
beam ->
case Mode2 of
run ->
{module, Module} = code:load_binary(Module, File, FormsOrBin),
run(Module, Args);
debug ->
[Base | Rest] = lists:reverse(filename:split(File)),
Base2 = filename:basename(Base, code:objfile_extension()),
Rest2 =
case Rest of
["ebin" | Top] -> ["src" | Top];
_ -> Rest
end,
SrcFile = filename:join(lists:reverse([Base2 ++ ".erl" | Rest2])),
debug(Module, {Module, SrcFile, File, FormsOrBin}, Args)
end
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Parse script
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
parse_file(File, CheckOnly) ->
S = #state{file = File,
n_errors = 0,
mode = interpret,
exports_main = false,
has_records = false},
{ok, Fd} =
case file:open(File, [read]) of
{ok, Fd0} ->
{ok, Fd0};
{error, R} ->
fatal(lists:concat([file:format_error(R), ": '", File, "'"]))
end,
{HeaderSz, StartLine, FirstBodyLine} = skip_header(Fd, 1),
#state{mode = Mode,
source = Source,
module = Module,
forms_or_bin = FormsOrBin} =
case FirstBodyLine of
[$P, $K | _] ->
%% Archive file
ok = file:close(Fd),
parse_archive(S, File, HeaderSz);
[$F, $O, $R, $1 | _] ->
%% Beam file
ok = file:close(Fd),
parse_beam(S, File, HeaderSz, CheckOnly);
_ ->
%% Source code
parse_source(S, File, Fd, StartLine, HeaderSz, CheckOnly)
end,
{Source, Module, FormsOrBin, Mode}.
%% Skip header and return first body line
skip_header(P, LineNo) ->
Skip shebang on first line
{ok, HeaderSz0} = file:position(P, cur),
Line1 = get_line(P),
case Line1 of
[$\#, $\! | _] ->
%% Shebang
{ok, HeaderSz1} = file:position(P, cur),
Look for special comment on second line
Line2 = get_line(P),
{ok, HeaderSz2} = file:position(P, cur),
case Line2 of
[$\%, $\%, $\! | _] ->
Skip special comment on second line
Line3 = get_line(P),
{HeaderSz2, LineNo + 2, Line3};
_ ->
Look for special comment on third line
Line3 = get_line(P),
{ok, HeaderSz3} = file:position(P, cur),
case Line3 of
[$\%, $\%, $\! | _] ->
Skip special comment on third line
Line4 = get_line(P),
{HeaderSz3, LineNo + 3, Line4};
_ ->
Just skip shebang on first line
{HeaderSz1, LineNo + 1, Line2}
end
end;
_ ->
%% No shebang. Assume that there is no header.
{HeaderSz0, LineNo, Line1}
end.
get_line(P) ->
case io:get_line(P, '') of
eof ->
fatal("Premature end of file reached");
Line ->
Line
end.
parse_archive(S, File, HeaderSz) ->
case file:read_file(File) of
{ok, <<_FirstLine:HeaderSz/binary, Bin/binary>>} ->
Mod =
case init:get_argument(escript) of
{ok, [["main", M]]} ->
%% Use explicit module name
list_to_atom(M);
_ ->
%% Use escript name without extension as module name
RevBase = lists:reverse(filename:basename(File)),
RevBase2 =
case lists:dropwhile(fun(X) -> X =/= $. end, RevBase) of
[$. | Rest] -> Rest;
[] -> RevBase
end,
list_to_atom(lists:reverse(RevBase2))
end,
S#state{source = archive,
mode = run,
module = Mod,
forms_or_bin = Bin};
{ok, _} ->
fatal("Illegal archive format");
{error, Reason} ->
fatal(file:format_error(Reason))
end.
parse_beam(S, File, HeaderSz, CheckOnly) ->
{ok, <<_FirstLine:HeaderSz/binary, Bin/binary>>} =
file:read_file(File),
case beam_lib:chunks(Bin, [exports]) of
{ok, {Module, [{exports, Exports}]}} ->
case CheckOnly of
true ->
case lists:member({main, 1}, Exports) of
true ->
my_halt(0);
false ->
Text = lists:concat(["Function ", Module, ":main/1 is not exported"]),
fatal(Text)
end;
false ->
S#state{source = beam,
mode = run,
module = Module,
forms_or_bin = Bin}
end;
{error, beam_lib, Reason} when is_tuple(Reason) ->
fatal(element(1, Reason));
{error, beam_lib, Reason} ->
fatal(Reason)
end.
parse_source(S, File, Fd, StartLine, HeaderSz, CheckOnly) ->
{PreDefMacros, Module} = pre_def_macros(File),
IncludePath = [],
{ok, _} = file:position(Fd, {bof, HeaderSz}),
case epp:open(File, Fd, StartLine, IncludePath, PreDefMacros) of
{ok, Epp} ->
{ok, FileForm} = epp:parse_erl_form(Epp),
OptModRes = epp:parse_erl_form(Epp),
S2 = S#state{source = text, module = Module},
S3 =
case OptModRes of
{ok, {attribute,_, module, M} = Form} ->
epp_parse_file(Epp, S2#state{module = M}, [Form, FileForm]);
{ok, _} ->
ModForm = {attribute,1,module, Module},
epp_parse_file2(Epp, S2, [ModForm, FileForm], OptModRes);
{error, _} ->
epp_parse_file2(Epp, S2, [FileForm], OptModRes);
{eof,LastLine} ->
S#state{forms_or_bin = [FileForm, {eof,LastLine}]}
end,
ok = epp:close(Epp),
ok = file:close(Fd),
check_source(S3, CheckOnly);
{error, Reason} ->
io:format("escript: ~p\n", [Reason]),
fatal("Preprocessor error")
end.
check_source(S, CheckOnly) ->
case S of
#state{n_errors = Nerrs} when Nerrs =/= 0 ->
fatal("There were compilation errors.");
#state{exports_main = ExpMain,
has_records = HasRecs,
forms_or_bin = [FileForm2, ModForm2 | Forms]} ->
%% Optionally add export of main/1
Forms2 =
case ExpMain of
false -> [{attribute,0,export, [{main,1}]} | Forms];
true -> Forms
end,
Forms3 = [FileForm2, ModForm2 | Forms2],
case CheckOnly of
true ->
%% Optionally expand records
Forms4 =
case HasRecs of
false -> Forms3;
true -> erl_expand_records:module(Forms3, [])
end,
%% Strong validation and halt
case compile:forms(Forms4, [report,strong_validation]) of
{ok,_} ->
my_halt(0);
_Other ->
fatal("There were compilation errors.")
end;
false ->
%% Basic validation before execution
case erl_lint:module(Forms3) of
{ok,Ws} ->
report_warnings(Ws);
{error,Es,Ws} ->
report_errors(Es),
report_warnings(Ws),
fatal("There were compilation errors.")
end,
%% Optionally expand records
Forms4 =
case HasRecs of
false -> Forms3;
true -> erl_expand_records:module(Forms3, [])
end,
S#state{forms_or_bin = Forms4}
end
end.
pre_def_macros(File) ->
{MegaSecs, Secs, MicroSecs} = erlang:now(),
Replace = fun(Char) ->
case Char of
$\. -> $\_;
_ -> Char
end
end,
CleanBase = lists:map(Replace, filename:basename(File)),
ModuleStr =
CleanBase ++ "__" ++
"escript__" ++
integer_to_list(MegaSecs) ++ "__" ++
integer_to_list(Secs) ++ "__" ++
integer_to_list(MicroSecs),
Module = list_to_atom(ModuleStr),
PreDefMacros = [{'MODULE', Module, redefine},
{'MODULE_STRING', ModuleStr, redefine}],
{PreDefMacros, Module}.
epp_parse_file(Epp, S, Forms) ->
Parsed = epp:parse_erl_form(Epp),
epp_parse_file2(Epp, S, Forms, Parsed).
epp_parse_file2(Epp, S, Forms, Parsed) ->
io : format("~p\n " , [ ] ) ,
case Parsed of
{ok, Form} ->
case Form of
{attribute,Ln,record,{Record,Fields}} ->
S2 = S#state{has_records = true},
case epp:normalize_typed_record_fields(Fields) of
{typed, NewFields} ->
epp_parse_file(Epp, S2,
[{attribute, Ln, record, {Record, NewFields}},
{attribute, Ln, type,
{{record, Record}, Fields, []}} | Forms]);
not_typed ->
epp_parse_file(Epp, S2, [Form | Forms])
end;
{attribute,Ln,mode,NewMode} ->
S2 = S#state{mode = NewMode},
if
NewMode =:= compile; NewMode =:= interpret; NewMode =:= debug ->
epp_parse_file(Epp, S2, [Form | Forms]);
true ->
Args = lists:flatten(io_lib:format("illegal mode attribute: ~p", [NewMode])),
io:format("~s:~w ~s\n", [S#state.file,Ln,Args]),
Error = {error,{Ln,erl_parse,Args}},
Nerrs= S#state.n_errors + 1,
epp_parse_file(Epp, S2#state{n_errors = Nerrs}, [Error | Forms])
end;
{attribute,_,export,Fs} ->
case lists:member({main,1}, Fs) of
false ->
epp_parse_file(Epp, S, [Form | Forms]);
true ->
epp_parse_file(Epp, S#state{exports_main = true}, [Form | Forms])
end;
_ ->
epp_parse_file(Epp, S, [Form | Forms])
end;
{error,{Ln,Mod,Args}} = Form ->
io:format("~s:~w: ~s\n",
[S#state.file,Ln,Mod:format_error(Args)]),
epp_parse_file(Epp, S#state{n_errors = S#state.n_errors + 1}, [Form | Forms]);
{eof,LastLine} ->
S#state{forms_or_bin = lists:reverse([{eof, LastLine} | Forms])}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Evaluate script
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
debug(Module, AbsMod, Args) ->
case hidden_apply(debugger, debugger, start, []) of
{ok, _} ->
case hidden_apply(debugger, int, i, [AbsMod]) of
{module, _} ->
hidden_apply(debugger, debugger, auto_attach, [[init]]),
run(Module, Args);
error ->
Text = lists:concat(["Cannot load the code for ", Module, " into the debugger"]),
fatal(Text)
end;
_ ->
fatal("Cannot start the debugger")
end.
run(Module, Args) ->
try
Module:main(Args),
my_halt(0)
catch
Class:Reason ->
fatal(format_exception(Class, Reason))
end.
interpret(Forms, File, Args) ->
Dict = parse_to_dict(Forms),
ArgsA = erl_parse:abstract(Args, 0),
Call = {call,0,{atom,0,main},[ArgsA]},
try
erl_eval:expr(Call,
erl_eval:new_bindings(),
{value,fun(I, J) -> code_handler(I, J, Dict, File) end}),
my_halt(0)
catch
Class:Reason ->
fatal(format_exception(Class, Reason))
end.
report_errors(Errors) ->
lists:foreach(fun ({{F,_L},Eds}) -> list_errors(F, Eds);
({F,Eds}) -> list_errors(F, Eds) end,
Errors).
list_errors(F, [{Line,Mod,E}|Es]) ->
io:fwrite("~s:~w: ~s\n", [F,Line,Mod:format_error(E)]),
list_errors(F, Es);
list_errors(F, [{Mod,E}|Es]) ->
io:fwrite("~s: ~s\n", [F,Mod:format_error(E)]),
list_errors(F, Es);
list_errors(_F, []) -> ok.
report_warnings(Ws0) ->
Ws1 = lists:flatmap(fun({{F,_L},Eds}) -> format_message(F, Eds);
({F,Eds}) -> format_message(F, Eds) end,
Ws0),
Ws = ordsets:from_list(Ws1),
lists:foreach(fun({_,Str}) -> io:put_chars(Str) end, Ws).
format_message(F, [{Line,Mod,E}|Es]) ->
M = {{F,Line},io_lib:format("~s:~w: Warning: ~s\n", [F,Line,Mod:format_error(E)])},
[M|format_message(F, Es)];
format_message(F, [{Mod,E}|Es]) ->
M = {none,io_lib:format("~s: Warning: ~s\n", [F,Mod:format_error(E)])},
[M|format_message(F, Es)];
format_message(_, []) -> [].
parse_to_dict(L) -> parse_to_dict(L, dict:new()).
parse_to_dict([{function,_,Name,Arity,Clauses}|T], Dict0) ->
Dict = dict:store({local, Name,Arity}, Clauses, Dict0),
parse_to_dict(T, Dict);
parse_to_dict([{attribute,_,import,{Mod,Funcs}}|T], Dict0) ->
Dict = lists:foldl(fun(I, D) ->
dict:store({remote,I}, Mod, D)
end, Dict0, Funcs),
parse_to_dict(T, Dict);
parse_to_dict([_|T], Dict) ->
parse_to_dict(T, Dict);
parse_to_dict([], Dict) ->
Dict.
code_handler(local, [file], _, File) ->
File;
code_handler(Name, Args, Dict, File) ->
io : format("code ~ n",[{Name , } ] ) ,
Arity = length(Args),
case dict:find({local,Name,Arity}, Dict) of
{ok, Cs} ->
LF = {value,fun(I, J) -> code_handler(I, J, Dict, File) end},
case erl_eval:match_clause(Cs, Args,erl_eval:new_bindings(),LF) of
{Body, Bs} ->
eval_exprs(Body, Bs, LF, none, none);
nomatch ->
erlang:error({function_clause,[{local,Name,Args}]})
end;
error ->
case dict:find({remote,{Name,Arity}}, Dict) of
{ok, Mod} ->
io : format("Calling:~p ~ n",[{Mod , Name , } ] ) ,
apply(Mod, Name, Args);
error ->
io:format("Script does not export ~w/~w\n", [Name,Arity]),
my_halt(127)
end
end.
eval_exprs([E], Bs0, Lf, Ef, _RBs) ->
RBs1 = value,
erl_eval:expr(E, Bs0, Lf, Ef, RBs1);
eval_exprs([E|Es], Bs0, Lf, Ef, RBs) ->
RBs1 = none,
{value,_V,Bs} = erl_eval:expr(E, Bs0, Lf, Ef, RBs1),
eval_exprs(Es, Bs, Lf, Ef, RBs).
format_exception(Class, Reason) ->
PF = fun(Term, I) ->
io_lib:format("~." ++ integer_to_list(I) ++ "P", [Term, 50])
end,
StackTrace = erlang:get_stacktrace(),
StackFun = fun(M, _F, _A) -> (M =:= erl_eval) or (M =:= ?MODULE) end,
lib:format_exception(1, Class, Reason, StackTrace, StackFun, PF).
fatal(Str) ->
throw(Str).
my_halt(Reason) ->
case process_info(group_leader(), status) of
{_,waiting} ->
%% Now all output data is down in the driver.
%% Give the driver some extra time before halting.
receive after 1 -> ok end,
halt(Reason);
_ ->
%% Probably still processing I/O requests.
erlang:yield(),
my_halt(Reason)
end.
hidden_apply(App, M, F, Args) ->
try
apply(fun() -> M end(), F, Args)
catch
error:undef ->
case erlang:get_stacktrace() of
[{M,F,Args} | _] ->
Arity = length(Args),
Text = io_lib:format("Call to ~w:~w/~w in application ~w failed.\n",
[M, F, Arity, App]),
fatal(Text);
Stk ->
erlang:raise(error, undef, Stk)
end
end.
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/stdlib/src/escript.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
Useful functions that can be called from scripts.
Internal API.
The function returns the final value of the accumulator. Acc0 is
returned if the escript contain an empty archive.
GetInfo/0 is a fun that returns a #file_info{} record for the file.
GetBin/0 is a fun that returns a the contents of the file as a binary.
info for the (entire) escript file
the compiled beam code
-spec foldl(fun((string(),
fun(() -> #file_info()),
fun(() -> binary() -> term()),
term()) -> term()),
term(),
string()).
Internal API.
Commands run using -run or -s are run in a process
trap_exit set to false. Because this behaviour is
trap_exit to false.
Archive file
Beam file
Source code
Skip header and return first body line
Shebang
, $\%, $\! | _] ->
, $\%, $\! | _] ->
No shebang. Assume that there is no header.
Use explicit module name
Use escript name without extension as module name
Optionally add export of main/1
Optionally expand records
Strong validation and halt
Basic validation before execution
Optionally expand records
Evaluate script
Now all output data is down in the driver.
Give the driver some extra time before halting.
Probably still processing I/O requests. | Copyright Ericsson AB 2007 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(escript).
-export([script_name/0, foldl/3]).
-export([start/0, start/1]).
-record(state, {file,
module,
forms_or_bin,
source,
n_errors,
mode,
exports_main,
has_records}).
script_name() ->
[ScriptName|_] = init:get_plain_arguments(),
ScriptName.
Apply Fun(Name , , GetBin , Acc ) for each file in the escript .
Fun/2 must return a new accumulator which is passed to the next call .
An escript may contain erlang code , beam code or an archive :
archive - the Fun/2 will be applied for each file in the archive
beam - the Fun/2 will be applied once and returns the file
erl - the Fun/2 will be applied once , returns the file
info for the ( entire ) escript file and the returns
foldl(Fun, Acc0, File) when is_function(Fun, 4) ->
case parse_file(File, false) of
{text, _, Forms, _Mode} when is_list(Forms) ->
GetInfo = fun() -> file:read_file_info(File) end,
GetBin =
fun() ->
case compile:forms(Forms, [return_errors, debug_info]) of
{ok, _, BeamBin} ->
BeamBin;
{error, _Errors, _Warnings} ->
fatal("There were compilation errors.")
end
end,
try
{ok, Fun(".", GetInfo, GetBin, Acc0)}
catch
throw:Reason ->
{error, Reason}
end;
{beam, _, BeamBin, _Mode} when is_binary(BeamBin) ->
GetInfo = fun() -> file:read_file_info(File) end,
GetBin = fun() -> BeamBin end,
try
{ok, Fun(".", GetInfo, GetBin, Acc0)}
catch
throw:Reason ->
{error, Reason}
end;
{archive, _, ArchiveBin, _Mode} when is_binary(ArchiveBin) ->
ZipFun =
fun({Name, GetInfo, GetBin}, A) ->
A2 = Fun(Name, GetInfo, GetBin, A),
{true, false, A2}
end,
case prim_zip:open(ZipFun, Acc0, {File, ArchiveBin}) of
{ok, PrimZip, Res} ->
ok = prim_zip:close(PrimZip),
{ok, Res};
{error, bad_eocd} ->
{error, "Not an archive file"};
{error, Reason} ->
{error, Reason}
end
end.
start() ->
start([]).
start(EscriptOptions) ->
try
surprising for users of escript , make sure to reset
process_flag(trap_exit, false),
case init:get_plain_arguments() of
[File|Args] ->
parse_and_run(File, Args, EscriptOptions);
[] ->
io:format("escript: Missing filename\n", []),
my_halt(127)
end
catch
throw:Str ->
io:format("escript: ~s\n", [Str]),
my_halt(127);
_:Reason ->
io:format("escript: Internal error: ~p\n", [Reason]),
io:format("~p\n", [erlang:get_stacktrace()]),
my_halt(127)
end.
parse_and_run(File, Args, Options) ->
CheckOnly = lists:member("s", Options),
{Source, Module, FormsOrBin, Mode} = parse_file(File, CheckOnly),
Mode2 =
case lists:member("d", Options) of
true ->
debug;
false ->
case lists:member("c", Options) of
true ->
compile;
false ->
case lists:member("i", Options) of
true -> interpret;
false -> Mode
end
end
end,
if
is_list(FormsOrBin) ->
case Mode2 of
interpret ->
interpret(FormsOrBin, File, Args);
compile ->
case compile:forms(FormsOrBin, [report]) of
{ok, Module, BeamBin} ->
{module, Module} = code:load_binary(Module, File, BeamBin),
run(Module, Args);
_Other ->
fatal("There were compilation errors.")
end;
debug ->
case compile:forms(FormsOrBin, [report, debug_info]) of
{ok,Module,BeamBin} ->
{module, Module} = code:load_binary(Module, File, BeamBin),
debug(Module, {Module, File, File, BeamBin}, Args);
_Other ->
fatal("There were compilation errors.")
end
end;
is_binary(FormsOrBin) ->
case Source of
archive ->
case code:set_primary_archive(File, FormsOrBin) of
ok when CheckOnly ->
case code:load_file(Module) of
{module, _} ->
case erlang:function_exported(Module, main, 1) of
true ->
my_halt(0);
false ->
Text = lists:concat(["Function ", Module, ":main/1 is not exported"]),
fatal(Text)
end;
_ ->
Text = lists:concat(["Cannot load module ", Module, " from archive"]),
fatal(Text)
end;
ok ->
case Mode2 of
run -> run(Module, Args);
debug -> debug(Module, Module, Args)
end;
{error, bad_eocd} ->
fatal("Not an archive file");
{error, Reason} ->
fatal(Reason)
end;
beam ->
case Mode2 of
run ->
{module, Module} = code:load_binary(Module, File, FormsOrBin),
run(Module, Args);
debug ->
[Base | Rest] = lists:reverse(filename:split(File)),
Base2 = filename:basename(Base, code:objfile_extension()),
Rest2 =
case Rest of
["ebin" | Top] -> ["src" | Top];
_ -> Rest
end,
SrcFile = filename:join(lists:reverse([Base2 ++ ".erl" | Rest2])),
debug(Module, {Module, SrcFile, File, FormsOrBin}, Args)
end
end
end.
Parse script
parse_file(File, CheckOnly) ->
S = #state{file = File,
n_errors = 0,
mode = interpret,
exports_main = false,
has_records = false},
{ok, Fd} =
case file:open(File, [read]) of
{ok, Fd0} ->
{ok, Fd0};
{error, R} ->
fatal(lists:concat([file:format_error(R), ": '", File, "'"]))
end,
{HeaderSz, StartLine, FirstBodyLine} = skip_header(Fd, 1),
#state{mode = Mode,
source = Source,
module = Module,
forms_or_bin = FormsOrBin} =
case FirstBodyLine of
[$P, $K | _] ->
ok = file:close(Fd),
parse_archive(S, File, HeaderSz);
[$F, $O, $R, $1 | _] ->
ok = file:close(Fd),
parse_beam(S, File, HeaderSz, CheckOnly);
_ ->
parse_source(S, File, Fd, StartLine, HeaderSz, CheckOnly)
end,
{Source, Module, FormsOrBin, Mode}.
skip_header(P, LineNo) ->
Skip shebang on first line
{ok, HeaderSz0} = file:position(P, cur),
Line1 = get_line(P),
case Line1 of
[$\#, $\! | _] ->
{ok, HeaderSz1} = file:position(P, cur),
Look for special comment on second line
Line2 = get_line(P),
{ok, HeaderSz2} = file:position(P, cur),
case Line2 of
Skip special comment on second line
Line3 = get_line(P),
{HeaderSz2, LineNo + 2, Line3};
_ ->
Look for special comment on third line
Line3 = get_line(P),
{ok, HeaderSz3} = file:position(P, cur),
case Line3 of
Skip special comment on third line
Line4 = get_line(P),
{HeaderSz3, LineNo + 3, Line4};
_ ->
Just skip shebang on first line
{HeaderSz1, LineNo + 1, Line2}
end
end;
_ ->
{HeaderSz0, LineNo, Line1}
end.
get_line(P) ->
case io:get_line(P, '') of
eof ->
fatal("Premature end of file reached");
Line ->
Line
end.
parse_archive(S, File, HeaderSz) ->
case file:read_file(File) of
{ok, <<_FirstLine:HeaderSz/binary, Bin/binary>>} ->
Mod =
case init:get_argument(escript) of
{ok, [["main", M]]} ->
list_to_atom(M);
_ ->
RevBase = lists:reverse(filename:basename(File)),
RevBase2 =
case lists:dropwhile(fun(X) -> X =/= $. end, RevBase) of
[$. | Rest] -> Rest;
[] -> RevBase
end,
list_to_atom(lists:reverse(RevBase2))
end,
S#state{source = archive,
mode = run,
module = Mod,
forms_or_bin = Bin};
{ok, _} ->
fatal("Illegal archive format");
{error, Reason} ->
fatal(file:format_error(Reason))
end.
parse_beam(S, File, HeaderSz, CheckOnly) ->
{ok, <<_FirstLine:HeaderSz/binary, Bin/binary>>} =
file:read_file(File),
case beam_lib:chunks(Bin, [exports]) of
{ok, {Module, [{exports, Exports}]}} ->
case CheckOnly of
true ->
case lists:member({main, 1}, Exports) of
true ->
my_halt(0);
false ->
Text = lists:concat(["Function ", Module, ":main/1 is not exported"]),
fatal(Text)
end;
false ->
S#state{source = beam,
mode = run,
module = Module,
forms_or_bin = Bin}
end;
{error, beam_lib, Reason} when is_tuple(Reason) ->
fatal(element(1, Reason));
{error, beam_lib, Reason} ->
fatal(Reason)
end.
parse_source(S, File, Fd, StartLine, HeaderSz, CheckOnly) ->
{PreDefMacros, Module} = pre_def_macros(File),
IncludePath = [],
{ok, _} = file:position(Fd, {bof, HeaderSz}),
case epp:open(File, Fd, StartLine, IncludePath, PreDefMacros) of
{ok, Epp} ->
{ok, FileForm} = epp:parse_erl_form(Epp),
OptModRes = epp:parse_erl_form(Epp),
S2 = S#state{source = text, module = Module},
S3 =
case OptModRes of
{ok, {attribute,_, module, M} = Form} ->
epp_parse_file(Epp, S2#state{module = M}, [Form, FileForm]);
{ok, _} ->
ModForm = {attribute,1,module, Module},
epp_parse_file2(Epp, S2, [ModForm, FileForm], OptModRes);
{error, _} ->
epp_parse_file2(Epp, S2, [FileForm], OptModRes);
{eof,LastLine} ->
S#state{forms_or_bin = [FileForm, {eof,LastLine}]}
end,
ok = epp:close(Epp),
ok = file:close(Fd),
check_source(S3, CheckOnly);
{error, Reason} ->
io:format("escript: ~p\n", [Reason]),
fatal("Preprocessor error")
end.
check_source(S, CheckOnly) ->
case S of
#state{n_errors = Nerrs} when Nerrs =/= 0 ->
fatal("There were compilation errors.");
#state{exports_main = ExpMain,
has_records = HasRecs,
forms_or_bin = [FileForm2, ModForm2 | Forms]} ->
Forms2 =
case ExpMain of
false -> [{attribute,0,export, [{main,1}]} | Forms];
true -> Forms
end,
Forms3 = [FileForm2, ModForm2 | Forms2],
case CheckOnly of
true ->
Forms4 =
case HasRecs of
false -> Forms3;
true -> erl_expand_records:module(Forms3, [])
end,
case compile:forms(Forms4, [report,strong_validation]) of
{ok,_} ->
my_halt(0);
_Other ->
fatal("There were compilation errors.")
end;
false ->
case erl_lint:module(Forms3) of
{ok,Ws} ->
report_warnings(Ws);
{error,Es,Ws} ->
report_errors(Es),
report_warnings(Ws),
fatal("There were compilation errors.")
end,
Forms4 =
case HasRecs of
false -> Forms3;
true -> erl_expand_records:module(Forms3, [])
end,
S#state{forms_or_bin = Forms4}
end
end.
pre_def_macros(File) ->
{MegaSecs, Secs, MicroSecs} = erlang:now(),
Replace = fun(Char) ->
case Char of
$\. -> $\_;
_ -> Char
end
end,
CleanBase = lists:map(Replace, filename:basename(File)),
ModuleStr =
CleanBase ++ "__" ++
"escript__" ++
integer_to_list(MegaSecs) ++ "__" ++
integer_to_list(Secs) ++ "__" ++
integer_to_list(MicroSecs),
Module = list_to_atom(ModuleStr),
PreDefMacros = [{'MODULE', Module, redefine},
{'MODULE_STRING', ModuleStr, redefine}],
{PreDefMacros, Module}.
epp_parse_file(Epp, S, Forms) ->
Parsed = epp:parse_erl_form(Epp),
epp_parse_file2(Epp, S, Forms, Parsed).
epp_parse_file2(Epp, S, Forms, Parsed) ->
io : format("~p\n " , [ ] ) ,
case Parsed of
{ok, Form} ->
case Form of
{attribute,Ln,record,{Record,Fields}} ->
S2 = S#state{has_records = true},
case epp:normalize_typed_record_fields(Fields) of
{typed, NewFields} ->
epp_parse_file(Epp, S2,
[{attribute, Ln, record, {Record, NewFields}},
{attribute, Ln, type,
{{record, Record}, Fields, []}} | Forms]);
not_typed ->
epp_parse_file(Epp, S2, [Form | Forms])
end;
{attribute,Ln,mode,NewMode} ->
S2 = S#state{mode = NewMode},
if
NewMode =:= compile; NewMode =:= interpret; NewMode =:= debug ->
epp_parse_file(Epp, S2, [Form | Forms]);
true ->
Args = lists:flatten(io_lib:format("illegal mode attribute: ~p", [NewMode])),
io:format("~s:~w ~s\n", [S#state.file,Ln,Args]),
Error = {error,{Ln,erl_parse,Args}},
Nerrs= S#state.n_errors + 1,
epp_parse_file(Epp, S2#state{n_errors = Nerrs}, [Error | Forms])
end;
{attribute,_,export,Fs} ->
case lists:member({main,1}, Fs) of
false ->
epp_parse_file(Epp, S, [Form | Forms]);
true ->
epp_parse_file(Epp, S#state{exports_main = true}, [Form | Forms])
end;
_ ->
epp_parse_file(Epp, S, [Form | Forms])
end;
{error,{Ln,Mod,Args}} = Form ->
io:format("~s:~w: ~s\n",
[S#state.file,Ln,Mod:format_error(Args)]),
epp_parse_file(Epp, S#state{n_errors = S#state.n_errors + 1}, [Form | Forms]);
{eof,LastLine} ->
S#state{forms_or_bin = lists:reverse([{eof, LastLine} | Forms])}
end.
debug(Module, AbsMod, Args) ->
case hidden_apply(debugger, debugger, start, []) of
{ok, _} ->
case hidden_apply(debugger, int, i, [AbsMod]) of
{module, _} ->
hidden_apply(debugger, debugger, auto_attach, [[init]]),
run(Module, Args);
error ->
Text = lists:concat(["Cannot load the code for ", Module, " into the debugger"]),
fatal(Text)
end;
_ ->
fatal("Cannot start the debugger")
end.
run(Module, Args) ->
try
Module:main(Args),
my_halt(0)
catch
Class:Reason ->
fatal(format_exception(Class, Reason))
end.
interpret(Forms, File, Args) ->
Dict = parse_to_dict(Forms),
ArgsA = erl_parse:abstract(Args, 0),
Call = {call,0,{atom,0,main},[ArgsA]},
try
erl_eval:expr(Call,
erl_eval:new_bindings(),
{value,fun(I, J) -> code_handler(I, J, Dict, File) end}),
my_halt(0)
catch
Class:Reason ->
fatal(format_exception(Class, Reason))
end.
report_errors(Errors) ->
lists:foreach(fun ({{F,_L},Eds}) -> list_errors(F, Eds);
({F,Eds}) -> list_errors(F, Eds) end,
Errors).
list_errors(F, [{Line,Mod,E}|Es]) ->
io:fwrite("~s:~w: ~s\n", [F,Line,Mod:format_error(E)]),
list_errors(F, Es);
list_errors(F, [{Mod,E}|Es]) ->
io:fwrite("~s: ~s\n", [F,Mod:format_error(E)]),
list_errors(F, Es);
list_errors(_F, []) -> ok.
report_warnings(Ws0) ->
Ws1 = lists:flatmap(fun({{F,_L},Eds}) -> format_message(F, Eds);
({F,Eds}) -> format_message(F, Eds) end,
Ws0),
Ws = ordsets:from_list(Ws1),
lists:foreach(fun({_,Str}) -> io:put_chars(Str) end, Ws).
format_message(F, [{Line,Mod,E}|Es]) ->
M = {{F,Line},io_lib:format("~s:~w: Warning: ~s\n", [F,Line,Mod:format_error(E)])},
[M|format_message(F, Es)];
format_message(F, [{Mod,E}|Es]) ->
M = {none,io_lib:format("~s: Warning: ~s\n", [F,Mod:format_error(E)])},
[M|format_message(F, Es)];
format_message(_, []) -> [].
parse_to_dict(L) -> parse_to_dict(L, dict:new()).
parse_to_dict([{function,_,Name,Arity,Clauses}|T], Dict0) ->
Dict = dict:store({local, Name,Arity}, Clauses, Dict0),
parse_to_dict(T, Dict);
parse_to_dict([{attribute,_,import,{Mod,Funcs}}|T], Dict0) ->
Dict = lists:foldl(fun(I, D) ->
dict:store({remote,I}, Mod, D)
end, Dict0, Funcs),
parse_to_dict(T, Dict);
parse_to_dict([_|T], Dict) ->
parse_to_dict(T, Dict);
parse_to_dict([], Dict) ->
Dict.
code_handler(local, [file], _, File) ->
File;
code_handler(Name, Args, Dict, File) ->
io : format("code ~ n",[{Name , } ] ) ,
Arity = length(Args),
case dict:find({local,Name,Arity}, Dict) of
{ok, Cs} ->
LF = {value,fun(I, J) -> code_handler(I, J, Dict, File) end},
case erl_eval:match_clause(Cs, Args,erl_eval:new_bindings(),LF) of
{Body, Bs} ->
eval_exprs(Body, Bs, LF, none, none);
nomatch ->
erlang:error({function_clause,[{local,Name,Args}]})
end;
error ->
case dict:find({remote,{Name,Arity}}, Dict) of
{ok, Mod} ->
io : format("Calling:~p ~ n",[{Mod , Name , } ] ) ,
apply(Mod, Name, Args);
error ->
io:format("Script does not export ~w/~w\n", [Name,Arity]),
my_halt(127)
end
end.
eval_exprs([E], Bs0, Lf, Ef, _RBs) ->
RBs1 = value,
erl_eval:expr(E, Bs0, Lf, Ef, RBs1);
eval_exprs([E|Es], Bs0, Lf, Ef, RBs) ->
RBs1 = none,
{value,_V,Bs} = erl_eval:expr(E, Bs0, Lf, Ef, RBs1),
eval_exprs(Es, Bs, Lf, Ef, RBs).
format_exception(Class, Reason) ->
PF = fun(Term, I) ->
io_lib:format("~." ++ integer_to_list(I) ++ "P", [Term, 50])
end,
StackTrace = erlang:get_stacktrace(),
StackFun = fun(M, _F, _A) -> (M =:= erl_eval) or (M =:= ?MODULE) end,
lib:format_exception(1, Class, Reason, StackTrace, StackFun, PF).
fatal(Str) ->
throw(Str).
my_halt(Reason) ->
case process_info(group_leader(), status) of
{_,waiting} ->
receive after 1 -> ok end,
halt(Reason);
_ ->
erlang:yield(),
my_halt(Reason)
end.
hidden_apply(App, M, F, Args) ->
try
apply(fun() -> M end(), F, Args)
catch
error:undef ->
case erlang:get_stacktrace() of
[{M,F,Args} | _] ->
Arity = length(Args),
Text = io_lib:format("Call to ~w:~w/~w in application ~w failed.\n",
[M, F, Arity, App]),
fatal(Text);
Stk ->
erlang:raise(error, undef, Stk)
end
end.
|
2a166e7bf7b16a05c6335926f1e0b16a23823517601ecf3086b08fc1a47adbca | HugoPeters1024/hs-sleuth | Unsafe.hs | # LANGUAGE CPP , MagicHash , UnboxedTuples #
-- |
-- Module : Data.Text.Unsafe
Copyright : ( c ) 2009 , 2010 , 2011
-- License : BSD-style
-- Maintainer :
-- Portability : portable
--
-- A module containing unsafe 'Text' operations, for very very careful
-- use in heavily tested code.
module Data.Text.Unsafe
(
inlineInterleaveST
, inlinePerformIO
, unsafeDupablePerformIO
, Iter(..)
, iter
, iter_
, reverseIter
, reverseIter_
, unsafeHead
, unsafeTail
, lengthWord16
, takeWord16
, dropWord16
) where
#if defined(ASSERTS)
import Control.Exception (assert)
#endif
import Data.Text.Internal.Encoding.Utf16 (chr2)
import Data.Text.Internal (Text(..))
import Data.Text.Internal.Unsafe (inlineInterleaveST, inlinePerformIO)
import Data.Text.Internal.Unsafe.Char (unsafeChr)
import qualified Data.Text.Array as A
import GHC.IO (unsafeDupablePerformIO)
-- | /O(1)/ A variant of 'head' for non-empty 'Text'. 'unsafeHead'
-- omits the check for the empty case, so there is an obligation on
-- the programmer to provide a proof that the 'Text' is non-empty.
unsafeHead :: Text -> Char
unsafeHead (Text arr off _len)
| m < 0xD800 || m > 0xDBFF = unsafeChr m
| otherwise = chr2 m n
where m = A.unsafeIndex arr off
n = A.unsafeIndex arr (off+1)
# INLINE unsafeHead #
-- | /O(1)/ A variant of 'tail' for non-empty 'Text'. 'unsafeTail'
-- omits the check for the empty case, so there is an obligation on
-- the programmer to provide a proof that the 'Text' is non-empty.
unsafeTail :: Text -> Text
unsafeTail t@(Text arr off len) =
#if defined(ASSERTS)
assert (d <= len) $
#endif
Text arr (off+d) (len-d)
where d = iter_ t 0
# INLINE unsafeTail #
data Iter = Iter {-# UNPACK #-} !Char {-# UNPACK #-} !Int
| /O(1)/ Iterate ( unsafely ) one step forwards through a UTF-16
-- array, returning the current character and the delta to add to give
-- the next offset to iterate at.
iter :: Text -> Int -> Iter
iter (Text arr off _len) i
| m < 0xD800 || m > 0xDBFF = Iter (unsafeChr m) 1
| otherwise = Iter (chr2 m n) 2
where m = A.unsafeIndex arr j
n = A.unsafeIndex arr k
j = off + i
k = j + 1
# INLINE iter #
| /O(1)/ Iterate one step through a UTF-16 array , returning the
-- delta to add to give the next offset to iterate at.
iter_ :: Text -> Int -> Int
iter_ (Text arr off _len) i | m < 0xD800 || m > 0xDBFF = 1
| otherwise = 2
where m = A.unsafeIndex arr (off+i)
{-# INLINE iter_ #-}
| /O(1)/ Iterate one step backwards through a UTF-16 array ,
-- returning the current character and the delta to add (i.e. a
-- negative number) to give the next offset to iterate at.
reverseIter :: Text -> Int -> (Char,Int)
reverseIter (Text arr off _len) i
| m < 0xDC00 || m > 0xDFFF = (unsafeChr m, -1)
| otherwise = (chr2 n m, -2)
where m = A.unsafeIndex arr j
n = A.unsafeIndex arr k
j = off + i
k = j - 1
# INLINE reverseIter #
| /O(1)/ Iterate one step backwards through a UTF-16 array ,
-- returning the delta to add (i.e. a negative number) to give the
-- next offset to iterate at.
--
@since 1.1.1.0
reverseIter_ :: Text -> Int -> Int
reverseIter_ (Text arr off _len) i
| m < 0xDC00 || m > 0xDFFF = -1
| otherwise = -2
where m = A.unsafeIndex arr (off+i)
# INLINE reverseIter _ #
| /O(1)/ Return the length of a ' Text ' in units of ' Word16 ' . This
-- is useful for sizing a target array appropriately before using
-- 'unsafeCopyToPtr'.
lengthWord16 :: Text -> Int
lengthWord16 (Text _arr _off len) = len
# INLINE lengthWord16 #
| /O(1)/ Unchecked take of ' k ' ' Word16 's from the front of a ' Text ' .
takeWord16 :: Int -> Text -> Text
takeWord16 k (Text arr off _len) = Text arr off k
# INLINE takeWord16 #
| /O(1)/ Unchecked drop of ' k ' ' Word16 's from the front of a ' Text ' .
dropWord16 :: Int -> Text -> Text
dropWord16 k (Text arr off len) = Text arr (off+k) (len-k)
# INLINE dropWord16 #
| null | https://raw.githubusercontent.com/HugoPeters1024/hs-sleuth/385655e62031959a14a3bac5e9ccd1c42c045f0c/test-project/text-1.2.3.2/Data/Text/Unsafe.hs | haskell | |
Module : Data.Text.Unsafe
License : BSD-style
Maintainer :
Portability : portable
A module containing unsafe 'Text' operations, for very very careful
use in heavily tested code.
| /O(1)/ A variant of 'head' for non-empty 'Text'. 'unsafeHead'
omits the check for the empty case, so there is an obligation on
the programmer to provide a proof that the 'Text' is non-empty.
| /O(1)/ A variant of 'tail' for non-empty 'Text'. 'unsafeTail'
omits the check for the empty case, so there is an obligation on
the programmer to provide a proof that the 'Text' is non-empty.
# UNPACK #
# UNPACK #
array, returning the current character and the delta to add to give
the next offset to iterate at.
delta to add to give the next offset to iterate at.
# INLINE iter_ #
returning the current character and the delta to add (i.e. a
negative number) to give the next offset to iterate at.
returning the delta to add (i.e. a negative number) to give the
next offset to iterate at.
is useful for sizing a target array appropriately before using
'unsafeCopyToPtr'. | # LANGUAGE CPP , MagicHash , UnboxedTuples #
Copyright : ( c ) 2009 , 2010 , 2011
module Data.Text.Unsafe
(
inlineInterleaveST
, inlinePerformIO
, unsafeDupablePerformIO
, Iter(..)
, iter
, iter_
, reverseIter
, reverseIter_
, unsafeHead
, unsafeTail
, lengthWord16
, takeWord16
, dropWord16
) where
#if defined(ASSERTS)
import Control.Exception (assert)
#endif
import Data.Text.Internal.Encoding.Utf16 (chr2)
import Data.Text.Internal (Text(..))
import Data.Text.Internal.Unsafe (inlineInterleaveST, inlinePerformIO)
import Data.Text.Internal.Unsafe.Char (unsafeChr)
import qualified Data.Text.Array as A
import GHC.IO (unsafeDupablePerformIO)
unsafeHead :: Text -> Char
unsafeHead (Text arr off _len)
| m < 0xD800 || m > 0xDBFF = unsafeChr m
| otherwise = chr2 m n
where m = A.unsafeIndex arr off
n = A.unsafeIndex arr (off+1)
# INLINE unsafeHead #
unsafeTail :: Text -> Text
unsafeTail t@(Text arr off len) =
#if defined(ASSERTS)
assert (d <= len) $
#endif
Text arr (off+d) (len-d)
where d = iter_ t 0
# INLINE unsafeTail #
| /O(1)/ Iterate ( unsafely ) one step forwards through a UTF-16
iter :: Text -> Int -> Iter
iter (Text arr off _len) i
| m < 0xD800 || m > 0xDBFF = Iter (unsafeChr m) 1
| otherwise = Iter (chr2 m n) 2
where m = A.unsafeIndex arr j
n = A.unsafeIndex arr k
j = off + i
k = j + 1
# INLINE iter #
| /O(1)/ Iterate one step through a UTF-16 array , returning the
iter_ :: Text -> Int -> Int
iter_ (Text arr off _len) i | m < 0xD800 || m > 0xDBFF = 1
| otherwise = 2
where m = A.unsafeIndex arr (off+i)
| /O(1)/ Iterate one step backwards through a UTF-16 array ,
reverseIter :: Text -> Int -> (Char,Int)
reverseIter (Text arr off _len) i
| m < 0xDC00 || m > 0xDFFF = (unsafeChr m, -1)
| otherwise = (chr2 n m, -2)
where m = A.unsafeIndex arr j
n = A.unsafeIndex arr k
j = off + i
k = j - 1
# INLINE reverseIter #
| /O(1)/ Iterate one step backwards through a UTF-16 array ,
@since 1.1.1.0
reverseIter_ :: Text -> Int -> Int
reverseIter_ (Text arr off _len) i
| m < 0xDC00 || m > 0xDFFF = -1
| otherwise = -2
where m = A.unsafeIndex arr (off+i)
# INLINE reverseIter _ #
| /O(1)/ Return the length of a ' Text ' in units of ' Word16 ' . This
lengthWord16 :: Text -> Int
lengthWord16 (Text _arr _off len) = len
# INLINE lengthWord16 #
| /O(1)/ Unchecked take of ' k ' ' Word16 's from the front of a ' Text ' .
takeWord16 :: Int -> Text -> Text
takeWord16 k (Text arr off _len) = Text arr off k
# INLINE takeWord16 #
| /O(1)/ Unchecked drop of ' k ' ' Word16 's from the front of a ' Text ' .
dropWord16 :: Int -> Text -> Text
dropWord16 k (Text arr off len) = Text arr (off+k) (len-k)
# INLINE dropWord16 #
|
28d98bb70dabbe33ada1aff6e0e5e13811916fac16381f4af2a811db547b18e7 | antono/guix-debian | srfi-64.upstream.scm | Copyright ( c ) 2005 , 2006 , 2007 , 2012 , 2013 Per Bothner
Added " full " support for Chicken , Gauche , and SISC .
, Copyright ( c ) 2005 .
Modified for Scheme Spheres by , Copyright ( c ) 2012 .
Support for Guile 2 by < > , Copyright ( c ) 2014 .
;;
;; Permission is hereby granted, free of charge, to any person
;; obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without
;; restriction, including without limitation the rights to use, copy,
;; modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software , and to permit persons to whom the Software is
;; furnished to do so, subject to the following conditions:
;;
;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN
;; ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
;; CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
;; SOFTWARE.
(cond-expand
(chicken
(require-extension syntax-case))
(guile-2
(use-modules (srfi srfi-9)
In 2.0.9 , srfi-34 and srfi-35 are not well integrated
with either 's native exceptions or R6RS exceptions .
( srfi srfi-34 ) ( srfi srfi-35 )
(srfi srfi-39)))
(guile
(use-modules (ice-9 syncase) (srfi srfi-9)
( srfi srfi-34 ) ( srfi srfi-35 ) - not in 1.6.7
(srfi srfi-39)))
(sisc
(require-extension (srfi 9 34 35 39)))
(kawa
(module-compile-options warn-undefined-variable: #t
warn-invoke-unknown-method: #t)
(provide 'srfi-64)
(provide 'testing)
(require 'srfi-34)
(require 'srfi-35))
(else ()
))
(cond-expand
(kawa
(define-syntax %test-export
(syntax-rules ()
((%test-export test-begin . other-names)
(module-export %test-begin . other-names)))))
(else
(define-syntax %test-export
(syntax-rules ()
((%test-export . names) (if #f #f))))))
;; List of exported names
(%test-export
must be listed first , since in ( at least ) it is " magic " .
test-end test-assert test-eqv test-eq test-equal
test-approximate test-assert test-error test-apply test-with-runner
test-match-nth test-match-all test-match-any test-match-name
test-skip test-expect-fail test-read-eval-string
test-runner-group-path test-group test-group-with-cleanup
test-result-ref test-result-set! test-result-clear test-result-remove
test-result-kind test-passed?
test-log-to-file
; Misc test-runner functions
test-runner? test-runner-reset test-runner-null
test-runner-simple test-runner-current test-runner-factory test-runner-get
test-runner-create test-runner-test-name
;; test-runner field setter and getter functions - see %test-record-define:
test-runner-pass-count test-runner-pass-count!
test-runner-fail-count test-runner-fail-count!
test-runner-xpass-count test-runner-xpass-count!
test-runner-xfail-count test-runner-xfail-count!
test-runner-skip-count test-runner-skip-count!
test-runner-group-stack test-runner-group-stack!
test-runner-on-test-begin test-runner-on-test-begin!
test-runner-on-test-end test-runner-on-test-end!
test-runner-on-group-begin test-runner-on-group-begin!
test-runner-on-group-end test-runner-on-group-end!
test-runner-on-final test-runner-on-final!
test-runner-on-bad-count test-runner-on-bad-count!
test-runner-on-bad-end-name test-runner-on-bad-end-name!
test-result-alist test-result-alist!
test-runner-aux-value test-runner-aux-value!
;; default/simple call-back functions, used in default test-runner,
;; but can be called to construct more complex ones.
test-on-group-begin-simple test-on-group-end-simple
test-on-bad-count-simple test-on-bad-end-name-simple
test-on-final-simple test-on-test-end-simple
test-on-final-simple)
(cond-expand
(srfi-9
(define-syntax %test-record-define
(syntax-rules ()
((%test-record-define alloc runner? (name index setter getter) ...)
(define-record-type test-runner
(alloc)
runner?
(name setter getter) ...)))))
(else
(define %test-runner-cookie (list "test-runner"))
(define-syntax %test-record-define
(syntax-rules ()
((%test-record-define alloc runner? (name index getter setter) ...)
(begin
(define (runner? obj)
(and (vector? obj)
(> (vector-length obj) 1)
(eq (vector-ref obj 0) %test-runner-cookie)))
(define (alloc)
(let ((runner (make-vector 23)))
(vector-set! runner 0 %test-runner-cookie)
runner))
(begin
(define (getter runner)
(vector-ref runner index)) ...)
(begin
(define (setter runner value)
(vector-set! runner index value)) ...)))))))
(%test-record-define
%test-runner-alloc test-runner?
Cumulate count of all tests that have passed and were expected to .
(pass-count 1 test-runner-pass-count test-runner-pass-count!)
(fail-count 2 test-runner-fail-count test-runner-fail-count!)
(xpass-count 3 test-runner-xpass-count test-runner-xpass-count!)
(xfail-count 4 test-runner-xfail-count test-runner-xfail-count!)
(skip-count 5 test-runner-skip-count test-runner-skip-count!)
(skip-list 6 %test-runner-skip-list %test-runner-skip-list!)
(fail-list 7 %test-runner-fail-list %test-runner-fail-list!)
;; Normally #t, except when in a test-apply.
(run-list 8 %test-runner-run-list %test-runner-run-list!)
(skip-save 9 %test-runner-skip-save %test-runner-skip-save!)
(fail-save 10 %test-runner-fail-save %test-runner-fail-save!)
(group-stack 11 test-runner-group-stack test-runner-group-stack!)
(on-test-begin 12 test-runner-on-test-begin test-runner-on-test-begin!)
(on-test-end 13 test-runner-on-test-end test-runner-on-test-end!)
;; Call-back when entering a group. Takes (runner suite-name count).
(on-group-begin 14 test-runner-on-group-begin test-runner-on-group-begin!)
;; Call-back when leaving a group.
(on-group-end 15 test-runner-on-group-end test-runner-on-group-end!)
;; Call-back when leaving the outermost group.
(on-final 16 test-runner-on-final test-runner-on-final!)
;; Call-back when expected number of tests was wrong.
(on-bad-count 17 test-runner-on-bad-count test-runner-on-bad-count!)
;; Call-back when name in test=end doesn't match test-begin.
(on-bad-end-name 18 test-runner-on-bad-end-name test-runner-on-bad-end-name!)
Cumulate count of all tests that have been done .
(total-count 19 %test-runner-total-count %test-runner-total-count!)
Stack ( list ) of ( count - at - start . expected - count ):
(count-list 20 %test-runner-count-list %test-runner-count-list!)
(result-alist 21 test-result-alist test-result-alist!)
Field can be used by test - runner for any purpose .
;; test-runner-simple uses it for a log file.
(aux-value 22 test-runner-aux-value test-runner-aux-value!)
)
(define (test-runner-reset runner)
(test-result-alist! runner '())
(test-runner-pass-count! runner 0)
(test-runner-fail-count! runner 0)
(test-runner-xpass-count! runner 0)
(test-runner-xfail-count! runner 0)
(test-runner-skip-count! runner 0)
(%test-runner-total-count! runner 0)
(%test-runner-count-list! runner '())
(%test-runner-run-list! runner #t)
(%test-runner-skip-list! runner '())
(%test-runner-fail-list! runner '())
(%test-runner-skip-save! runner '())
(%test-runner-fail-save! runner '())
(test-runner-group-stack! runner '()))
(define (test-runner-group-path runner)
(reverse (test-runner-group-stack runner)))
(define (%test-null-callback runner) #f)
(define (test-runner-null)
(let ((runner (%test-runner-alloc)))
(test-runner-reset runner)
(test-runner-on-group-begin! runner (lambda (runner name count) #f))
(test-runner-on-group-end! runner %test-null-callback)
(test-runner-on-final! runner %test-null-callback)
(test-runner-on-test-begin! runner %test-null-callback)
(test-runner-on-test-end! runner %test-null-callback)
(test-runner-on-bad-count! runner (lambda (runner count expected) #f))
(test-runner-on-bad-end-name! runner (lambda (runner begin end) #f))
runner))
Not part of the specification . FIXME
;; Controls whether a log file is generated.
(define test-log-to-file #t)
(define (test-runner-simple)
(let ((runner (%test-runner-alloc)))
(test-runner-reset runner)
(test-runner-on-group-begin! runner test-on-group-begin-simple)
(test-runner-on-group-end! runner test-on-group-end-simple)
(test-runner-on-final! runner test-on-final-simple)
(test-runner-on-test-begin! runner test-on-test-begin-simple)
(test-runner-on-test-end! runner test-on-test-end-simple)
(test-runner-on-bad-count! runner test-on-bad-count-simple)
(test-runner-on-bad-end-name! runner test-on-bad-end-name-simple)
runner))
(cond-expand
(srfi-39
(define test-runner-current (make-parameter #f))
(define test-runner-factory (make-parameter test-runner-simple)))
(else
(define %test-runner-current #f)
(define-syntax test-runner-current
(syntax-rules ()
((test-runner-current)
%test-runner-current)
((test-runner-current runner)
(set! %test-runner-current runner))))
(define %test-runner-factory test-runner-simple)
(define-syntax test-runner-factory
(syntax-rules ()
((test-runner-factory)
%test-runner-factory)
((test-runner-factory runner)
(set! %test-runner-factory runner))))))
;; A safer wrapper to test-runner-current.
(define (test-runner-get)
(let ((r (test-runner-current)))
(if (not r)
(cond-expand
(srfi-23 (error "test-runner not initialized - test-begin missing?"))
(else #t)))
r))
(define (%test-specifier-matches spec runner)
(spec runner))
(define (test-runner-create)
((test-runner-factory)))
(define (%test-any-specifier-matches list runner)
(let ((result #f))
(let loop ((l list))
(cond ((null? l) result)
(else
(if (%test-specifier-matches (car l) runner)
(set! result #t))
(loop (cdr l)))))))
;; Returns #f, #t, or 'xfail.
(define (%test-should-execute runner)
(let ((run (%test-runner-run-list runner)))
(cond ((or
(not (or (eqv? run #t)
(%test-any-specifier-matches run runner)))
(%test-any-specifier-matches
(%test-runner-skip-list runner)
runner))
(test-result-set! runner 'result-kind 'skip)
#f)
((%test-any-specifier-matches
(%test-runner-fail-list runner)
runner)
(test-result-set! runner 'result-kind 'xfail)
'xfail)
(else #t))))
(define (%test-begin suite-name count)
(if (not (test-runner-current))
(test-runner-current (test-runner-create)))
(let ((runner (test-runner-current)))
((test-runner-on-group-begin runner) runner suite-name count)
(%test-runner-skip-save! runner
(cons (%test-runner-skip-list runner)
(%test-runner-skip-save runner)))
(%test-runner-fail-save! runner
(cons (%test-runner-fail-list runner)
(%test-runner-fail-save runner)))
(%test-runner-count-list! runner
(cons (cons (%test-runner-total-count runner)
count)
(%test-runner-count-list runner)))
(test-runner-group-stack! runner (cons suite-name
(test-runner-group-stack runner)))))
(cond-expand
(kawa
Kawa has test - begin built in , implemented as :
;; (begin
( cond - expand ( srfi-64 # ! void ) ( else ( require ' srfi-64 ) ) )
;; (%test-begin suite-name [count]))
;; This puts test-begin but only test-begin in the default environment.,
;; which makes normal test suites loadable without non-portable commands.
)
(else
(define-syntax test-begin
(syntax-rules ()
((test-begin suite-name)
(%test-begin suite-name #f))
((test-begin suite-name count)
(%test-begin suite-name count))))))
(define (test-on-group-begin-simple runner suite-name count)
(if (null? (test-runner-group-stack runner))
(begin
(display "%%%% Starting test ")
(display suite-name)
(if test-log-to-file
(let* ((log-file-name
(if (string? test-log-to-file) test-log-to-file
(string-append suite-name ".log")))
(log-file
(cond-expand (mzscheme
(open-output-file log-file-name 'truncate/replace))
(else (open-output-file log-file-name)))))
(display "%%%% Starting test " log-file)
(display suite-name log-file)
(newline log-file)
(test-runner-aux-value! runner log-file)
(display " (Writing full log to \"")
(display log-file-name)
(display "\")")))
(newline)))
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(begin
(display "Group begin: " log)
(display suite-name log)
(newline log))))
#f)
(define (test-on-group-end-simple runner)
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(begin
(display "Group end: " log)
(display (car (test-runner-group-stack runner)) log)
(newline log))))
#f)
(define (%test-on-bad-count-write runner count expected-count port)
(display "*** Total number of tests was " port)
(display count port)
(display " but should be " port)
(display expected-count port)
(display ". ***" port)
(newline port)
(display "*** Discrepancy indicates testsuite error or exceptions. ***" port)
(newline port))
(define (test-on-bad-count-simple runner count expected-count)
(%test-on-bad-count-write runner count expected-count (current-output-port))
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(%test-on-bad-count-write runner count expected-count log))))
(define (test-on-bad-end-name-simple runner begin-name end-name)
(let ((msg (string-append (%test-format-line runner) "test-end " begin-name
" does not match test-begin " end-name)))
(cond-expand
(srfi-23 (error msg))
(else (display msg) (newline)))))
(define (%test-final-report1 value label port)
(if (> value 0)
(begin
(display label port)
(display value port)
(newline port))))
(define (%test-final-report-simple runner port)
(%test-final-report1 (test-runner-pass-count runner)
"# of expected passes " port)
(%test-final-report1 (test-runner-xfail-count runner)
"# of expected failures " port)
(%test-final-report1 (test-runner-xpass-count runner)
"# of unexpected successes " port)
(%test-final-report1 (test-runner-fail-count runner)
"# of unexpected failures " port)
(%test-final-report1 (test-runner-skip-count runner)
"# of skipped tests " port))
(define (test-on-final-simple runner)
(%test-final-report-simple runner (current-output-port))
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(%test-final-report-simple runner log))))
(define (%test-format-line runner)
(let* ((line-info (test-result-alist runner))
(source-file (assq 'source-file line-info))
(source-line (assq 'source-line line-info))
(file (if source-file (cdr source-file) "")))
(if source-line
(string-append file ":"
(number->string (cdr source-line)) ": ")
"")))
(define (%test-end suite-name line-info)
(let* ((r (test-runner-get))
(groups (test-runner-group-stack r))
(line (%test-format-line r)))
(test-result-alist! r line-info)
(if (null? groups)
(let ((msg (string-append line "test-end not in a group")))
(cond-expand
(srfi-23 (error msg))
(else (display msg) (newline)))))
(if (and suite-name (not (equal? suite-name (car groups))))
((test-runner-on-bad-end-name r) r suite-name (car groups)))
(let* ((count-list (%test-runner-count-list r))
(expected-count (cdar count-list))
(saved-count (caar count-list))
(group-count (- (%test-runner-total-count r) saved-count)))
(if (and expected-count
(not (= expected-count group-count)))
((test-runner-on-bad-count r) r group-count expected-count))
((test-runner-on-group-end r) r)
(test-runner-group-stack! r (cdr (test-runner-group-stack r)))
(%test-runner-skip-list! r (car (%test-runner-skip-save r)))
(%test-runner-skip-save! r (cdr (%test-runner-skip-save r)))
(%test-runner-fail-list! r (car (%test-runner-fail-save r)))
(%test-runner-fail-save! r (cdr (%test-runner-fail-save r)))
(%test-runner-count-list! r (cdr count-list))
(if (null? (test-runner-group-stack r))
((test-runner-on-final r) r)))))
(define-syntax test-group
(syntax-rules ()
((test-group suite-name . body)
(let ((r (test-runner-current)))
;; Ideally should also set line-number, if available.
(test-result-alist! r (list (cons 'test-name suite-name)))
(if (%test-should-execute r)
(dynamic-wind
(lambda () (test-begin suite-name))
(lambda () . body)
(lambda () (test-end suite-name))))))))
(define-syntax test-group-with-cleanup
(syntax-rules ()
((test-group-with-cleanup suite-name form cleanup-form)
(test-group suite-name
(dynamic-wind
(lambda () #f)
(lambda () form)
(lambda () cleanup-form))))
((test-group-with-cleanup suite-name cleanup-form)
(test-group-with-cleanup suite-name #f cleanup-form))
((test-group-with-cleanup suite-name form1 form2 form3 . rest)
(test-group-with-cleanup suite-name (begin form1 form2) form3 . rest))))
(define (test-on-test-begin-simple runner)
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(let* ((results (test-result-alist runner))
(source-file (assq 'source-file results))
(source-line (assq 'source-line results))
(source-form (assq 'source-form results))
(test-name (assq 'test-name results)))
(display "Test begin:" log)
(newline log)
(if test-name (%test-write-result1 test-name log))
(if source-file (%test-write-result1 source-file log))
(if source-line (%test-write-result1 source-line log))
(if source-form (%test-write-result1 source-form log))))))
(define-syntax test-result-ref
(syntax-rules ()
((test-result-ref runner pname)
(test-result-ref runner pname #f))
((test-result-ref runner pname default)
(let ((p (assq pname (test-result-alist runner))))
(if p (cdr p) default)))))
(define (test-on-test-end-simple runner)
(let ((log (test-runner-aux-value runner))
(kind (test-result-ref runner 'result-kind)))
(if (memq kind '(fail xpass))
(let* ((results (test-result-alist runner))
(source-file (assq 'source-file results))
(source-line (assq 'source-line results))
(test-name (assq 'test-name results)))
(if (or source-file source-line)
(begin
(if source-file (display (cdr source-file)))
(display ":")
(if source-line (display (cdr source-line)))
(display ": ")))
(display (if (eq? kind 'xpass) "XPASS" "FAIL"))
(if test-name
(begin
(display " ")
(display (cdr test-name))))
(newline)))
(if (output-port? log)
(begin
(display "Test end:" log)
(newline log)
(let loop ((list (test-result-alist runner)))
(if (pair? list)
(let ((pair (car list)))
;; Write out properties not written out by on-test-begin.
(if (not (memq (car pair)
'(test-name source-file source-line source-form)))
(%test-write-result1 pair log))
(loop (cdr list)))))))))
(define (%test-write-result1 pair port)
(display " " port)
(display (car pair) port)
(display ": " port)
(write (cdr pair) port)
(newline port))
(define (test-result-set! runner pname value)
(let* ((alist (test-result-alist runner))
(p (assq pname alist)))
(if p
(set-cdr! p value)
(test-result-alist! runner (cons (cons pname value) alist)))))
(define (test-result-clear runner)
(test-result-alist! runner '()))
(define (test-result-remove runner pname)
(let* ((alist (test-result-alist runner))
(p (assq pname alist)))
(if p
(test-result-alist! runner
(let loop ((r alist))
(if (eq? r p) (cdr r)
(cons (car r) (loop (cdr r)))))))))
(define (test-result-kind . rest)
(let ((runner (if (pair? rest) (car rest) (test-runner-current))))
(test-result-ref runner 'result-kind)))
(define (test-passed? . rest)
(let ((runner (if (pair? rest) (car rest) (test-runner-get))))
(memq (test-result-ref runner 'result-kind) '(pass xpass))))
(define (%test-report-result)
(let* ((r (test-runner-get))
(result-kind (test-result-kind r)))
(case result-kind
((pass)
(test-runner-pass-count! r (+ 1 (test-runner-pass-count r))))
((fail)
(test-runner-fail-count! r (+ 1 (test-runner-fail-count r))))
((xpass)
(test-runner-xpass-count! r (+ 1 (test-runner-xpass-count r))))
((xfail)
(test-runner-xfail-count! r (+ 1 (test-runner-xfail-count r))))
(else
(test-runner-skip-count! r (+ 1 (test-runner-skip-count r)))))
(%test-runner-total-count! r (+ 1 (%test-runner-total-count r)))
((test-runner-on-test-end r) r)))
(cond-expand
(guile
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
(catch #t
(lambda () test-expression)
(lambda (key . args)
(test-result-set! (test-runner-current) 'actual-error
(cons key args))
#f))))))
(kawa
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
(try-catch test-expression
(ex <java.lang.Throwable>
(test-result-set! (test-runner-current) 'actual-error ex)
#f))))))
(srfi-34
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
(guard (err (else #f)) test-expression)))))
(chicken
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
(condition-case test-expression (ex () #f))))))
(else
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
test-expression)))))
(cond-expand
((or kawa mzscheme)
(cond-expand
(mzscheme
(define-for-syntax (%test-syntax-file form)
(let ((source (syntax-source form)))
(cond ((string? source) file)
((path? source) (path->string source))
(else #f)))))
(kawa
(define (%test-syntax-file form)
(syntax-source form))))
(define (%test-source-line2 form)
(let* ((line (syntax-line form))
(file (%test-syntax-file form))
(line-pair (if line (list (cons 'source-line line)) '())))
(cons (cons 'source-form (syntax-object->datum form))
(if file (cons (cons 'source-file file) line-pair) line-pair)))))
(guile-2
(define (%test-source-line2 form)
(let* ((src-props (syntax-source form))
(file (and src-props (assq-ref src-props 'filename)))
(line (and src-props (assq-ref src-props 'line)))
(file-alist (if file
`((source-file . ,file))
'()))
(line-alist (if line
`((source-line . ,(+ line 1)))
'())))
(datum->syntax (syntax here)
`((source-form . ,(syntax->datum form))
,@file-alist
,@line-alist)))))
(else
(define (%test-source-line2 form)
'())))
(define (%test-on-test-begin r)
(%test-should-execute r)
((test-runner-on-test-begin r) r)
(not (eq? 'skip (test-result-ref r 'result-kind))))
(define (%test-on-test-end r result)
(test-result-set! r 'result-kind
(if (eq? (test-result-ref r 'result-kind) 'xfail)
(if result 'xpass 'xfail)
(if result 'pass 'fail))))
(define (test-runner-test-name runner)
(test-result-ref runner 'test-name ""))
(define-syntax %test-comp2body
(syntax-rules ()
((%test-comp2body r comp expected expr)
(let ()
(if (%test-on-test-begin r)
(let ((exp expected))
(test-result-set! r 'expected-value exp)
(let ((res (%test-evaluate-with-catch expr)))
(test-result-set! r 'actual-value res)
(%test-on-test-end r (comp exp res)))))
(%test-report-result)))))
(define (%test-approximate= error)
(lambda (value expected)
(let ((rval (real-part value))
(ival (imag-part value))
(rexp (real-part expected))
(iexp (imag-part expected)))
(and (>= rval (- rexp error))
(>= ival (- iexp error))
(<= rval (+ rexp error))
(<= ival (+ iexp error))))))
(define-syntax %test-comp1body
(syntax-rules ()
((%test-comp1body r expr)
(let ()
(if (%test-on-test-begin r)
(let ()
(let ((res (%test-evaluate-with-catch expr)))
(test-result-set! r 'actual-value res)
(%test-on-test-end r res))))
(%test-report-result)))))
(cond-expand
((or kawa mzscheme guile-2)
;; Should be made to work for any Scheme with syntax-case
However , I have n't gotten the quoting working . FIXME .
(define-syntax test-end
(lambda (x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x))) ()
(((mac suite-name) line)
(syntax
(%test-end suite-name line)))
(((mac) line)
(syntax
(%test-end #f line))))))
(define-syntax test-assert
(lambda (x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x))) ()
(((mac tname expr) line)
(syntax
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (cons (cons 'test-name tname) line))
(%test-comp1body r expr))))
(((mac expr) line)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-comp1body r expr)))))))
(define (%test-comp2 comp x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x)) comp) ()
(((mac tname expected expr) line comp)
(syntax
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (cons (cons 'test-name tname) line))
(%test-comp2body r comp expected expr))))
(((mac expected expr) line comp)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-comp2body r comp expected expr))))))
(define-syntax test-eqv
(lambda (x) (%test-comp2 (syntax eqv?) x)))
(define-syntax test-eq
(lambda (x) (%test-comp2 (syntax eq?) x)))
(define-syntax test-equal
(lambda (x) (%test-comp2 (syntax equal?) x)))
FIXME - needed for
(lambda (x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x))) ()
(((mac tname expected expr error) line)
(syntax
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (cons (cons 'test-name tname) line))
(%test-comp2body r (%test-approximate= error) expected expr))))
(((mac expected expr error) line)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-comp2body r (%test-approximate= error) expected expr))))))))
(else
(define-syntax test-end
(syntax-rules ()
((test-end)
(%test-end #f '()))
((test-end suite-name)
(%test-end suite-name '()))))
(define-syntax test-assert
(syntax-rules ()
((test-assert tname test-expression)
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r '((test-name . tname)))
(%test-comp1body r test-expression)))
((test-assert test-expression)
(let* ((r (test-runner-get)))
(test-result-alist! r '())
(%test-comp1body r test-expression)))))
(define-syntax %test-comp2
(syntax-rules ()
((%test-comp2 comp tname expected expr)
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (list (cons 'test-name tname)))
(%test-comp2body r comp expected expr)))
((%test-comp2 comp expected expr)
(let* ((r (test-runner-get)))
(test-result-alist! r '())
(%test-comp2body r comp expected expr)))))
(define-syntax test-equal
(syntax-rules ()
((test-equal . rest)
(%test-comp2 equal? . rest))))
(define-syntax test-eqv
(syntax-rules ()
((test-eqv . rest)
(%test-comp2 eqv? . rest))))
(define-syntax test-eq
(syntax-rules ()
((test-eq . rest)
(%test-comp2 eq? . rest))))
(define-syntax test-approximate
(syntax-rules ()
((test-approximate tname expected expr error)
(%test-comp2 (%test-approximate= error) tname expected expr))
((test-approximate expected expr error)
(%test-comp2 (%test-approximate= error) expected expr))))))
(cond-expand
(guile
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(cond ((%test-on-test-begin r)
(let ((et etype))
(test-result-set! r 'expected-error et)
(%test-on-test-end r
(catch #t
(lambda ()
(test-result-set! r 'actual-value expr)
#f)
(lambda (key . args)
;; TODO: decide how to specify expected
error types for .
(test-result-set! r 'actual-error
(cons key args))
#t)))
(%test-report-result))))))))
(mzscheme
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(%test-comp1body r (with-handlers (((lambda (h) #t) (lambda (h) #t)))
(let ()
(test-result-set! r 'actual-value expr)
#f)))))))
(chicken
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(%test-comp1body r (condition-case expr (ex () #t)))))))
(kawa
(define-syntax %test-error
(syntax-rules ()
((%test-error r #t expr)
(cond ((%test-on-test-begin r)
(test-result-set! r 'expected-error #t)
(%test-on-test-end r
(try-catch
(let ()
(test-result-set! r 'actual-value expr)
#f)
(ex <java.lang.Throwable>
(test-result-set! r 'actual-error ex)
#t)))
(%test-report-result))))
((%test-error r etype expr)
(if (%test-on-test-begin r)
(let ((et etype))
(test-result-set! r 'expected-error et)
(%test-on-test-end r
(try-catch
(let ()
(test-result-set! r 'actual-value expr)
#f)
(ex <java.lang.Throwable>
(test-result-set! r 'actual-error ex)
(cond ((and (instance? et <gnu.bytecode.ClassType>)
(gnu.bytecode.ClassType:isSubclass et <java.lang.Throwable>))
(instance? ex et))
(else #t)))))
(%test-report-result)))))))
((and srfi-34 srfi-35)
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(%test-comp1body r (guard (ex ((condition-type? etype)
(and (condition? ex) (condition-has-type? ex etype)))
((procedure? etype)
(etype ex))
((equal? etype #t)
#t)
(else #t))
expr #f))))))
(srfi-34
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(%test-comp1body r (guard (ex (else #t)) expr #f))))))
(else
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(begin
((test-runner-on-test-begin r) r)
(test-result-set! r 'result-kind 'skip)
(%test-report-result)))))))
(cond-expand
((or kawa mzscheme guile-2)
(define-syntax test-error
(lambda (x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x))) ()
(((mac tname etype expr) line)
(syntax
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (cons (cons 'test-name tname) line))
(%test-error r etype expr))))
(((mac etype expr) line)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-error r etype expr))))
(((mac expr) line)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-error r #t expr))))))))
(else
(define-syntax test-error
(syntax-rules ()
((test-error name etype expr)
(let ((r (test-runner-get)))
(test-result-alist! r `((test-name . ,name)))
(%test-error r etype expr)))
((test-error etype expr)
(let ((r (test-runner-get)))
(test-result-alist! r '())
(%test-error r etype expr)))
((test-error expr)
(let ((r (test-runner-get)))
(test-result-alist! r '())
(%test-error r #t expr)))))))
(define (test-apply first . rest)
(if (test-runner? first)
(test-with-runner first (apply test-apply rest))
(let ((r (test-runner-current)))
(if r
(let ((run-list (%test-runner-run-list r)))
(cond ((null? rest)
(%test-runner-run-list! r (reverse run-list))
(first)) ;; actually apply procedure thunk
(else
(%test-runner-run-list!
r
(if (eq? run-list #t) (list first) (cons first run-list)))
(apply test-apply rest)
(%test-runner-run-list! r run-list))))
(let ((r (test-runner-create)))
(test-with-runner r (apply test-apply first rest))
((test-runner-on-final r) r))))))
(define-syntax test-with-runner
(syntax-rules ()
((test-with-runner runner form ...)
(let ((saved-runner (test-runner-current)))
(dynamic-wind
(lambda () (test-runner-current runner))
(lambda () form ...)
(lambda () (test-runner-current saved-runner)))))))
;;; Predicates
(define (%test-match-nth n count)
(let ((i 0))
(lambda (runner)
(set! i (+ i 1))
(and (>= i n) (< i (+ n count))))))
(define-syntax test-match-nth
(syntax-rules ()
((test-match-nth n)
(test-match-nth n 1))
((test-match-nth n count)
(%test-match-nth n count))))
(define (%test-match-all . pred-list)
(lambda (runner)
(let ((result #t))
(let loop ((l pred-list))
(if (null? l)
result
(begin
(if (not ((car l) runner))
(set! result #f))
(loop (cdr l))))))))
(define-syntax test-match-all
(syntax-rules ()
((test-match-all pred ...)
(%test-match-all (%test-as-specifier pred) ...))))
(define (%test-match-any . pred-list)
(lambda (runner)
(let ((result #f))
(let loop ((l pred-list))
(if (null? l)
result
(begin
(if ((car l) runner)
(set! result #t))
(loop (cdr l))))))))
(define-syntax test-match-any
(syntax-rules ()
((test-match-any pred ...)
(%test-match-any (%test-as-specifier pred) ...))))
Coerce to a predicate function :
(define (%test-as-specifier specifier)
(cond ((procedure? specifier) specifier)
((integer? specifier) (test-match-nth 1 specifier))
((string? specifier) (test-match-name specifier))
(else
(error "not a valid test specifier"))))
(define-syntax test-skip
(syntax-rules ()
((test-skip pred ...)
(let ((runner (test-runner-get)))
(%test-runner-skip-list! runner
(cons (test-match-all (%test-as-specifier pred) ...)
(%test-runner-skip-list runner)))))))
(define-syntax test-expect-fail
(syntax-rules ()
((test-expect-fail pred ...)
(let ((runner (test-runner-get)))
(%test-runner-fail-list! runner
(cons (test-match-all (%test-as-specifier pred) ...)
(%test-runner-fail-list runner)))))))
(define (test-match-name name)
(lambda (runner)
(equal? name (test-runner-test-name runner))))
(define (test-read-eval-string string)
(let* ((port (open-input-string string))
(form (read port)))
(if (eof-object? (read-char port))
(cond-expand
(guile (eval form (current-module)))
(else (eval form)))
(cond-expand
(srfi-23 (error "(not at eof)"))
(else "error")))))
| null | https://raw.githubusercontent.com/antono/guix-debian/85ef443788f0788a62010a942973d4f7714d10b4/srfi/srfi-64.upstream.scm | scheme |
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
List of exported names
Misc test-runner functions
test-runner field setter and getter functions - see %test-record-define:
default/simple call-back functions, used in default test-runner,
but can be called to construct more complex ones.
Normally #t, except when in a test-apply.
Call-back when entering a group. Takes (runner suite-name count).
Call-back when leaving a group.
Call-back when leaving the outermost group.
Call-back when expected number of tests was wrong.
Call-back when name in test=end doesn't match test-begin.
test-runner-simple uses it for a log file.
Controls whether a log file is generated.
A safer wrapper to test-runner-current.
Returns #f, #t, or 'xfail.
(begin
(%test-begin suite-name [count]))
This puts test-begin but only test-begin in the default environment.,
which makes normal test suites loadable without non-portable commands.
Ideally should also set line-number, if available.
Write out properties not written out by on-test-begin.
Should be made to work for any Scheme with syntax-case
TODO: decide how to specify expected
actually apply procedure thunk
Predicates | Copyright ( c ) 2005 , 2006 , 2007 , 2012 , 2013 Per Bothner
Added " full " support for Chicken , Gauche , and SISC .
, Copyright ( c ) 2005 .
Modified for Scheme Spheres by , Copyright ( c ) 2012 .
Support for Guile 2 by < > , Copyright ( c ) 2014 .
files ( the " Software " ) , to deal in the Software without
of the Software , and to permit persons to whom the Software is
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN
(cond-expand
(chicken
(require-extension syntax-case))
(guile-2
(use-modules (srfi srfi-9)
In 2.0.9 , srfi-34 and srfi-35 are not well integrated
with either 's native exceptions or R6RS exceptions .
( srfi srfi-34 ) ( srfi srfi-35 )
(srfi srfi-39)))
(guile
(use-modules (ice-9 syncase) (srfi srfi-9)
( srfi srfi-34 ) ( srfi srfi-35 ) - not in 1.6.7
(srfi srfi-39)))
(sisc
(require-extension (srfi 9 34 35 39)))
(kawa
(module-compile-options warn-undefined-variable: #t
warn-invoke-unknown-method: #t)
(provide 'srfi-64)
(provide 'testing)
(require 'srfi-34)
(require 'srfi-35))
(else ()
))
(cond-expand
(kawa
(define-syntax %test-export
(syntax-rules ()
((%test-export test-begin . other-names)
(module-export %test-begin . other-names)))))
(else
(define-syntax %test-export
(syntax-rules ()
((%test-export . names) (if #f #f))))))
(%test-export
must be listed first , since in ( at least ) it is " magic " .
test-end test-assert test-eqv test-eq test-equal
test-approximate test-assert test-error test-apply test-with-runner
test-match-nth test-match-all test-match-any test-match-name
test-skip test-expect-fail test-read-eval-string
test-runner-group-path test-group test-group-with-cleanup
test-result-ref test-result-set! test-result-clear test-result-remove
test-result-kind test-passed?
test-log-to-file
test-runner? test-runner-reset test-runner-null
test-runner-simple test-runner-current test-runner-factory test-runner-get
test-runner-create test-runner-test-name
test-runner-pass-count test-runner-pass-count!
test-runner-fail-count test-runner-fail-count!
test-runner-xpass-count test-runner-xpass-count!
test-runner-xfail-count test-runner-xfail-count!
test-runner-skip-count test-runner-skip-count!
test-runner-group-stack test-runner-group-stack!
test-runner-on-test-begin test-runner-on-test-begin!
test-runner-on-test-end test-runner-on-test-end!
test-runner-on-group-begin test-runner-on-group-begin!
test-runner-on-group-end test-runner-on-group-end!
test-runner-on-final test-runner-on-final!
test-runner-on-bad-count test-runner-on-bad-count!
test-runner-on-bad-end-name test-runner-on-bad-end-name!
test-result-alist test-result-alist!
test-runner-aux-value test-runner-aux-value!
test-on-group-begin-simple test-on-group-end-simple
test-on-bad-count-simple test-on-bad-end-name-simple
test-on-final-simple test-on-test-end-simple
test-on-final-simple)
(cond-expand
(srfi-9
(define-syntax %test-record-define
(syntax-rules ()
((%test-record-define alloc runner? (name index setter getter) ...)
(define-record-type test-runner
(alloc)
runner?
(name setter getter) ...)))))
(else
(define %test-runner-cookie (list "test-runner"))
(define-syntax %test-record-define
(syntax-rules ()
((%test-record-define alloc runner? (name index getter setter) ...)
(begin
(define (runner? obj)
(and (vector? obj)
(> (vector-length obj) 1)
(eq (vector-ref obj 0) %test-runner-cookie)))
(define (alloc)
(let ((runner (make-vector 23)))
(vector-set! runner 0 %test-runner-cookie)
runner))
(begin
(define (getter runner)
(vector-ref runner index)) ...)
(begin
(define (setter runner value)
(vector-set! runner index value)) ...)))))))
(%test-record-define
%test-runner-alloc test-runner?
Cumulate count of all tests that have passed and were expected to .
(pass-count 1 test-runner-pass-count test-runner-pass-count!)
(fail-count 2 test-runner-fail-count test-runner-fail-count!)
(xpass-count 3 test-runner-xpass-count test-runner-xpass-count!)
(xfail-count 4 test-runner-xfail-count test-runner-xfail-count!)
(skip-count 5 test-runner-skip-count test-runner-skip-count!)
(skip-list 6 %test-runner-skip-list %test-runner-skip-list!)
(fail-list 7 %test-runner-fail-list %test-runner-fail-list!)
(run-list 8 %test-runner-run-list %test-runner-run-list!)
(skip-save 9 %test-runner-skip-save %test-runner-skip-save!)
(fail-save 10 %test-runner-fail-save %test-runner-fail-save!)
(group-stack 11 test-runner-group-stack test-runner-group-stack!)
(on-test-begin 12 test-runner-on-test-begin test-runner-on-test-begin!)
(on-test-end 13 test-runner-on-test-end test-runner-on-test-end!)
(on-group-begin 14 test-runner-on-group-begin test-runner-on-group-begin!)
(on-group-end 15 test-runner-on-group-end test-runner-on-group-end!)
(on-final 16 test-runner-on-final test-runner-on-final!)
(on-bad-count 17 test-runner-on-bad-count test-runner-on-bad-count!)
(on-bad-end-name 18 test-runner-on-bad-end-name test-runner-on-bad-end-name!)
Cumulate count of all tests that have been done .
(total-count 19 %test-runner-total-count %test-runner-total-count!)
Stack ( list ) of ( count - at - start . expected - count ):
(count-list 20 %test-runner-count-list %test-runner-count-list!)
(result-alist 21 test-result-alist test-result-alist!)
Field can be used by test - runner for any purpose .
(aux-value 22 test-runner-aux-value test-runner-aux-value!)
)
(define (test-runner-reset runner)
(test-result-alist! runner '())
(test-runner-pass-count! runner 0)
(test-runner-fail-count! runner 0)
(test-runner-xpass-count! runner 0)
(test-runner-xfail-count! runner 0)
(test-runner-skip-count! runner 0)
(%test-runner-total-count! runner 0)
(%test-runner-count-list! runner '())
(%test-runner-run-list! runner #t)
(%test-runner-skip-list! runner '())
(%test-runner-fail-list! runner '())
(%test-runner-skip-save! runner '())
(%test-runner-fail-save! runner '())
(test-runner-group-stack! runner '()))
(define (test-runner-group-path runner)
(reverse (test-runner-group-stack runner)))
(define (%test-null-callback runner) #f)
(define (test-runner-null)
(let ((runner (%test-runner-alloc)))
(test-runner-reset runner)
(test-runner-on-group-begin! runner (lambda (runner name count) #f))
(test-runner-on-group-end! runner %test-null-callback)
(test-runner-on-final! runner %test-null-callback)
(test-runner-on-test-begin! runner %test-null-callback)
(test-runner-on-test-end! runner %test-null-callback)
(test-runner-on-bad-count! runner (lambda (runner count expected) #f))
(test-runner-on-bad-end-name! runner (lambda (runner begin end) #f))
runner))
Not part of the specification . FIXME
(define test-log-to-file #t)
(define (test-runner-simple)
(let ((runner (%test-runner-alloc)))
(test-runner-reset runner)
(test-runner-on-group-begin! runner test-on-group-begin-simple)
(test-runner-on-group-end! runner test-on-group-end-simple)
(test-runner-on-final! runner test-on-final-simple)
(test-runner-on-test-begin! runner test-on-test-begin-simple)
(test-runner-on-test-end! runner test-on-test-end-simple)
(test-runner-on-bad-count! runner test-on-bad-count-simple)
(test-runner-on-bad-end-name! runner test-on-bad-end-name-simple)
runner))
(cond-expand
(srfi-39
(define test-runner-current (make-parameter #f))
(define test-runner-factory (make-parameter test-runner-simple)))
(else
(define %test-runner-current #f)
(define-syntax test-runner-current
(syntax-rules ()
((test-runner-current)
%test-runner-current)
((test-runner-current runner)
(set! %test-runner-current runner))))
(define %test-runner-factory test-runner-simple)
(define-syntax test-runner-factory
(syntax-rules ()
((test-runner-factory)
%test-runner-factory)
((test-runner-factory runner)
(set! %test-runner-factory runner))))))
(define (test-runner-get)
(let ((r (test-runner-current)))
(if (not r)
(cond-expand
(srfi-23 (error "test-runner not initialized - test-begin missing?"))
(else #t)))
r))
(define (%test-specifier-matches spec runner)
(spec runner))
(define (test-runner-create)
((test-runner-factory)))
(define (%test-any-specifier-matches list runner)
(let ((result #f))
(let loop ((l list))
(cond ((null? l) result)
(else
(if (%test-specifier-matches (car l) runner)
(set! result #t))
(loop (cdr l)))))))
(define (%test-should-execute runner)
(let ((run (%test-runner-run-list runner)))
(cond ((or
(not (or (eqv? run #t)
(%test-any-specifier-matches run runner)))
(%test-any-specifier-matches
(%test-runner-skip-list runner)
runner))
(test-result-set! runner 'result-kind 'skip)
#f)
((%test-any-specifier-matches
(%test-runner-fail-list runner)
runner)
(test-result-set! runner 'result-kind 'xfail)
'xfail)
(else #t))))
(define (%test-begin suite-name count)
(if (not (test-runner-current))
(test-runner-current (test-runner-create)))
(let ((runner (test-runner-current)))
((test-runner-on-group-begin runner) runner suite-name count)
(%test-runner-skip-save! runner
(cons (%test-runner-skip-list runner)
(%test-runner-skip-save runner)))
(%test-runner-fail-save! runner
(cons (%test-runner-fail-list runner)
(%test-runner-fail-save runner)))
(%test-runner-count-list! runner
(cons (cons (%test-runner-total-count runner)
count)
(%test-runner-count-list runner)))
(test-runner-group-stack! runner (cons suite-name
(test-runner-group-stack runner)))))
(cond-expand
(kawa
Kawa has test - begin built in , implemented as :
( cond - expand ( srfi-64 # ! void ) ( else ( require ' srfi-64 ) ) )
)
(else
(define-syntax test-begin
(syntax-rules ()
((test-begin suite-name)
(%test-begin suite-name #f))
((test-begin suite-name count)
(%test-begin suite-name count))))))
(define (test-on-group-begin-simple runner suite-name count)
(if (null? (test-runner-group-stack runner))
(begin
(display "%%%% Starting test ")
(display suite-name)
(if test-log-to-file
(let* ((log-file-name
(if (string? test-log-to-file) test-log-to-file
(string-append suite-name ".log")))
(log-file
(cond-expand (mzscheme
(open-output-file log-file-name 'truncate/replace))
(else (open-output-file log-file-name)))))
(display "%%%% Starting test " log-file)
(display suite-name log-file)
(newline log-file)
(test-runner-aux-value! runner log-file)
(display " (Writing full log to \"")
(display log-file-name)
(display "\")")))
(newline)))
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(begin
(display "Group begin: " log)
(display suite-name log)
(newline log))))
#f)
(define (test-on-group-end-simple runner)
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(begin
(display "Group end: " log)
(display (car (test-runner-group-stack runner)) log)
(newline log))))
#f)
(define (%test-on-bad-count-write runner count expected-count port)
(display "*** Total number of tests was " port)
(display count port)
(display " but should be " port)
(display expected-count port)
(display ". ***" port)
(newline port)
(display "*** Discrepancy indicates testsuite error or exceptions. ***" port)
(newline port))
(define (test-on-bad-count-simple runner count expected-count)
(%test-on-bad-count-write runner count expected-count (current-output-port))
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(%test-on-bad-count-write runner count expected-count log))))
(define (test-on-bad-end-name-simple runner begin-name end-name)
(let ((msg (string-append (%test-format-line runner) "test-end " begin-name
" does not match test-begin " end-name)))
(cond-expand
(srfi-23 (error msg))
(else (display msg) (newline)))))
(define (%test-final-report1 value label port)
(if (> value 0)
(begin
(display label port)
(display value port)
(newline port))))
(define (%test-final-report-simple runner port)
(%test-final-report1 (test-runner-pass-count runner)
"# of expected passes " port)
(%test-final-report1 (test-runner-xfail-count runner)
"# of expected failures " port)
(%test-final-report1 (test-runner-xpass-count runner)
"# of unexpected successes " port)
(%test-final-report1 (test-runner-fail-count runner)
"# of unexpected failures " port)
(%test-final-report1 (test-runner-skip-count runner)
"# of skipped tests " port))
(define (test-on-final-simple runner)
(%test-final-report-simple runner (current-output-port))
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(%test-final-report-simple runner log))))
(define (%test-format-line runner)
(let* ((line-info (test-result-alist runner))
(source-file (assq 'source-file line-info))
(source-line (assq 'source-line line-info))
(file (if source-file (cdr source-file) "")))
(if source-line
(string-append file ":"
(number->string (cdr source-line)) ": ")
"")))
(define (%test-end suite-name line-info)
(let* ((r (test-runner-get))
(groups (test-runner-group-stack r))
(line (%test-format-line r)))
(test-result-alist! r line-info)
(if (null? groups)
(let ((msg (string-append line "test-end not in a group")))
(cond-expand
(srfi-23 (error msg))
(else (display msg) (newline)))))
(if (and suite-name (not (equal? suite-name (car groups))))
((test-runner-on-bad-end-name r) r suite-name (car groups)))
(let* ((count-list (%test-runner-count-list r))
(expected-count (cdar count-list))
(saved-count (caar count-list))
(group-count (- (%test-runner-total-count r) saved-count)))
(if (and expected-count
(not (= expected-count group-count)))
((test-runner-on-bad-count r) r group-count expected-count))
((test-runner-on-group-end r) r)
(test-runner-group-stack! r (cdr (test-runner-group-stack r)))
(%test-runner-skip-list! r (car (%test-runner-skip-save r)))
(%test-runner-skip-save! r (cdr (%test-runner-skip-save r)))
(%test-runner-fail-list! r (car (%test-runner-fail-save r)))
(%test-runner-fail-save! r (cdr (%test-runner-fail-save r)))
(%test-runner-count-list! r (cdr count-list))
(if (null? (test-runner-group-stack r))
((test-runner-on-final r) r)))))
(define-syntax test-group
(syntax-rules ()
((test-group suite-name . body)
(let ((r (test-runner-current)))
(test-result-alist! r (list (cons 'test-name suite-name)))
(if (%test-should-execute r)
(dynamic-wind
(lambda () (test-begin suite-name))
(lambda () . body)
(lambda () (test-end suite-name))))))))
(define-syntax test-group-with-cleanup
(syntax-rules ()
((test-group-with-cleanup suite-name form cleanup-form)
(test-group suite-name
(dynamic-wind
(lambda () #f)
(lambda () form)
(lambda () cleanup-form))))
((test-group-with-cleanup suite-name cleanup-form)
(test-group-with-cleanup suite-name #f cleanup-form))
((test-group-with-cleanup suite-name form1 form2 form3 . rest)
(test-group-with-cleanup suite-name (begin form1 form2) form3 . rest))))
(define (test-on-test-begin-simple runner)
(let ((log (test-runner-aux-value runner)))
(if (output-port? log)
(let* ((results (test-result-alist runner))
(source-file (assq 'source-file results))
(source-line (assq 'source-line results))
(source-form (assq 'source-form results))
(test-name (assq 'test-name results)))
(display "Test begin:" log)
(newline log)
(if test-name (%test-write-result1 test-name log))
(if source-file (%test-write-result1 source-file log))
(if source-line (%test-write-result1 source-line log))
(if source-form (%test-write-result1 source-form log))))))
(define-syntax test-result-ref
(syntax-rules ()
((test-result-ref runner pname)
(test-result-ref runner pname #f))
((test-result-ref runner pname default)
(let ((p (assq pname (test-result-alist runner))))
(if p (cdr p) default)))))
(define (test-on-test-end-simple runner)
(let ((log (test-runner-aux-value runner))
(kind (test-result-ref runner 'result-kind)))
(if (memq kind '(fail xpass))
(let* ((results (test-result-alist runner))
(source-file (assq 'source-file results))
(source-line (assq 'source-line results))
(test-name (assq 'test-name results)))
(if (or source-file source-line)
(begin
(if source-file (display (cdr source-file)))
(display ":")
(if source-line (display (cdr source-line)))
(display ": ")))
(display (if (eq? kind 'xpass) "XPASS" "FAIL"))
(if test-name
(begin
(display " ")
(display (cdr test-name))))
(newline)))
(if (output-port? log)
(begin
(display "Test end:" log)
(newline log)
(let loop ((list (test-result-alist runner)))
(if (pair? list)
(let ((pair (car list)))
(if (not (memq (car pair)
'(test-name source-file source-line source-form)))
(%test-write-result1 pair log))
(loop (cdr list)))))))))
(define (%test-write-result1 pair port)
(display " " port)
(display (car pair) port)
(display ": " port)
(write (cdr pair) port)
(newline port))
(define (test-result-set! runner pname value)
(let* ((alist (test-result-alist runner))
(p (assq pname alist)))
(if p
(set-cdr! p value)
(test-result-alist! runner (cons (cons pname value) alist)))))
(define (test-result-clear runner)
(test-result-alist! runner '()))
(define (test-result-remove runner pname)
(let* ((alist (test-result-alist runner))
(p (assq pname alist)))
(if p
(test-result-alist! runner
(let loop ((r alist))
(if (eq? r p) (cdr r)
(cons (car r) (loop (cdr r)))))))))
(define (test-result-kind . rest)
(let ((runner (if (pair? rest) (car rest) (test-runner-current))))
(test-result-ref runner 'result-kind)))
(define (test-passed? . rest)
(let ((runner (if (pair? rest) (car rest) (test-runner-get))))
(memq (test-result-ref runner 'result-kind) '(pass xpass))))
(define (%test-report-result)
(let* ((r (test-runner-get))
(result-kind (test-result-kind r)))
(case result-kind
((pass)
(test-runner-pass-count! r (+ 1 (test-runner-pass-count r))))
((fail)
(test-runner-fail-count! r (+ 1 (test-runner-fail-count r))))
((xpass)
(test-runner-xpass-count! r (+ 1 (test-runner-xpass-count r))))
((xfail)
(test-runner-xfail-count! r (+ 1 (test-runner-xfail-count r))))
(else
(test-runner-skip-count! r (+ 1 (test-runner-skip-count r)))))
(%test-runner-total-count! r (+ 1 (%test-runner-total-count r)))
((test-runner-on-test-end r) r)))
(cond-expand
(guile
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
(catch #t
(lambda () test-expression)
(lambda (key . args)
(test-result-set! (test-runner-current) 'actual-error
(cons key args))
#f))))))
(kawa
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
(try-catch test-expression
(ex <java.lang.Throwable>
(test-result-set! (test-runner-current) 'actual-error ex)
#f))))))
(srfi-34
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
(guard (err (else #f)) test-expression)))))
(chicken
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
(condition-case test-expression (ex () #f))))))
(else
(define-syntax %test-evaluate-with-catch
(syntax-rules ()
((%test-evaluate-with-catch test-expression)
test-expression)))))
(cond-expand
((or kawa mzscheme)
(cond-expand
(mzscheme
(define-for-syntax (%test-syntax-file form)
(let ((source (syntax-source form)))
(cond ((string? source) file)
((path? source) (path->string source))
(else #f)))))
(kawa
(define (%test-syntax-file form)
(syntax-source form))))
(define (%test-source-line2 form)
(let* ((line (syntax-line form))
(file (%test-syntax-file form))
(line-pair (if line (list (cons 'source-line line)) '())))
(cons (cons 'source-form (syntax-object->datum form))
(if file (cons (cons 'source-file file) line-pair) line-pair)))))
(guile-2
(define (%test-source-line2 form)
(let* ((src-props (syntax-source form))
(file (and src-props (assq-ref src-props 'filename)))
(line (and src-props (assq-ref src-props 'line)))
(file-alist (if file
`((source-file . ,file))
'()))
(line-alist (if line
`((source-line . ,(+ line 1)))
'())))
(datum->syntax (syntax here)
`((source-form . ,(syntax->datum form))
,@file-alist
,@line-alist)))))
(else
(define (%test-source-line2 form)
'())))
(define (%test-on-test-begin r)
(%test-should-execute r)
((test-runner-on-test-begin r) r)
(not (eq? 'skip (test-result-ref r 'result-kind))))
(define (%test-on-test-end r result)
(test-result-set! r 'result-kind
(if (eq? (test-result-ref r 'result-kind) 'xfail)
(if result 'xpass 'xfail)
(if result 'pass 'fail))))
(define (test-runner-test-name runner)
(test-result-ref runner 'test-name ""))
(define-syntax %test-comp2body
(syntax-rules ()
((%test-comp2body r comp expected expr)
(let ()
(if (%test-on-test-begin r)
(let ((exp expected))
(test-result-set! r 'expected-value exp)
(let ((res (%test-evaluate-with-catch expr)))
(test-result-set! r 'actual-value res)
(%test-on-test-end r (comp exp res)))))
(%test-report-result)))))
(define (%test-approximate= error)
(lambda (value expected)
(let ((rval (real-part value))
(ival (imag-part value))
(rexp (real-part expected))
(iexp (imag-part expected)))
(and (>= rval (- rexp error))
(>= ival (- iexp error))
(<= rval (+ rexp error))
(<= ival (+ iexp error))))))
(define-syntax %test-comp1body
(syntax-rules ()
((%test-comp1body r expr)
(let ()
(if (%test-on-test-begin r)
(let ()
(let ((res (%test-evaluate-with-catch expr)))
(test-result-set! r 'actual-value res)
(%test-on-test-end r res))))
(%test-report-result)))))
(cond-expand
((or kawa mzscheme guile-2)
However , I have n't gotten the quoting working . FIXME .
(define-syntax test-end
(lambda (x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x))) ()
(((mac suite-name) line)
(syntax
(%test-end suite-name line)))
(((mac) line)
(syntax
(%test-end #f line))))))
(define-syntax test-assert
(lambda (x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x))) ()
(((mac tname expr) line)
(syntax
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (cons (cons 'test-name tname) line))
(%test-comp1body r expr))))
(((mac expr) line)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-comp1body r expr)))))))
(define (%test-comp2 comp x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x)) comp) ()
(((mac tname expected expr) line comp)
(syntax
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (cons (cons 'test-name tname) line))
(%test-comp2body r comp expected expr))))
(((mac expected expr) line comp)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-comp2body r comp expected expr))))))
(define-syntax test-eqv
(lambda (x) (%test-comp2 (syntax eqv?) x)))
(define-syntax test-eq
(lambda (x) (%test-comp2 (syntax eq?) x)))
(define-syntax test-equal
(lambda (x) (%test-comp2 (syntax equal?) x)))
FIXME - needed for
(lambda (x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x))) ()
(((mac tname expected expr error) line)
(syntax
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (cons (cons 'test-name tname) line))
(%test-comp2body r (%test-approximate= error) expected expr))))
(((mac expected expr error) line)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-comp2body r (%test-approximate= error) expected expr))))))))
(else
(define-syntax test-end
(syntax-rules ()
((test-end)
(%test-end #f '()))
((test-end suite-name)
(%test-end suite-name '()))))
(define-syntax test-assert
(syntax-rules ()
((test-assert tname test-expression)
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r '((test-name . tname)))
(%test-comp1body r test-expression)))
((test-assert test-expression)
(let* ((r (test-runner-get)))
(test-result-alist! r '())
(%test-comp1body r test-expression)))))
(define-syntax %test-comp2
(syntax-rules ()
((%test-comp2 comp tname expected expr)
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (list (cons 'test-name tname)))
(%test-comp2body r comp expected expr)))
((%test-comp2 comp expected expr)
(let* ((r (test-runner-get)))
(test-result-alist! r '())
(%test-comp2body r comp expected expr)))))
(define-syntax test-equal
(syntax-rules ()
((test-equal . rest)
(%test-comp2 equal? . rest))))
(define-syntax test-eqv
(syntax-rules ()
((test-eqv . rest)
(%test-comp2 eqv? . rest))))
(define-syntax test-eq
(syntax-rules ()
((test-eq . rest)
(%test-comp2 eq? . rest))))
(define-syntax test-approximate
(syntax-rules ()
((test-approximate tname expected expr error)
(%test-comp2 (%test-approximate= error) tname expected expr))
((test-approximate expected expr error)
(%test-comp2 (%test-approximate= error) expected expr))))))
(cond-expand
(guile
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(cond ((%test-on-test-begin r)
(let ((et etype))
(test-result-set! r 'expected-error et)
(%test-on-test-end r
(catch #t
(lambda ()
(test-result-set! r 'actual-value expr)
#f)
(lambda (key . args)
error types for .
(test-result-set! r 'actual-error
(cons key args))
#t)))
(%test-report-result))))))))
(mzscheme
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(%test-comp1body r (with-handlers (((lambda (h) #t) (lambda (h) #t)))
(let ()
(test-result-set! r 'actual-value expr)
#f)))))))
(chicken
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(%test-comp1body r (condition-case expr (ex () #t)))))))
(kawa
(define-syntax %test-error
(syntax-rules ()
((%test-error r #t expr)
(cond ((%test-on-test-begin r)
(test-result-set! r 'expected-error #t)
(%test-on-test-end r
(try-catch
(let ()
(test-result-set! r 'actual-value expr)
#f)
(ex <java.lang.Throwable>
(test-result-set! r 'actual-error ex)
#t)))
(%test-report-result))))
((%test-error r etype expr)
(if (%test-on-test-begin r)
(let ((et etype))
(test-result-set! r 'expected-error et)
(%test-on-test-end r
(try-catch
(let ()
(test-result-set! r 'actual-value expr)
#f)
(ex <java.lang.Throwable>
(test-result-set! r 'actual-error ex)
(cond ((and (instance? et <gnu.bytecode.ClassType>)
(gnu.bytecode.ClassType:isSubclass et <java.lang.Throwable>))
(instance? ex et))
(else #t)))))
(%test-report-result)))))))
((and srfi-34 srfi-35)
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(%test-comp1body r (guard (ex ((condition-type? etype)
(and (condition? ex) (condition-has-type? ex etype)))
((procedure? etype)
(etype ex))
((equal? etype #t)
#t)
(else #t))
expr #f))))))
(srfi-34
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(%test-comp1body r (guard (ex (else #t)) expr #f))))))
(else
(define-syntax %test-error
(syntax-rules ()
((%test-error r etype expr)
(begin
((test-runner-on-test-begin r) r)
(test-result-set! r 'result-kind 'skip)
(%test-report-result)))))))
(cond-expand
((or kawa mzscheme guile-2)
(define-syntax test-error
(lambda (x)
(syntax-case (list x (list (syntax quote) (%test-source-line2 x))) ()
(((mac tname etype expr) line)
(syntax
(let* ((r (test-runner-get))
(name tname))
(test-result-alist! r (cons (cons 'test-name tname) line))
(%test-error r etype expr))))
(((mac etype expr) line)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-error r etype expr))))
(((mac expr) line)
(syntax
(let* ((r (test-runner-get)))
(test-result-alist! r line)
(%test-error r #t expr))))))))
(else
(define-syntax test-error
(syntax-rules ()
((test-error name etype expr)
(let ((r (test-runner-get)))
(test-result-alist! r `((test-name . ,name)))
(%test-error r etype expr)))
((test-error etype expr)
(let ((r (test-runner-get)))
(test-result-alist! r '())
(%test-error r etype expr)))
((test-error expr)
(let ((r (test-runner-get)))
(test-result-alist! r '())
(%test-error r #t expr)))))))
(define (test-apply first . rest)
(if (test-runner? first)
(test-with-runner first (apply test-apply rest))
(let ((r (test-runner-current)))
(if r
(let ((run-list (%test-runner-run-list r)))
(cond ((null? rest)
(%test-runner-run-list! r (reverse run-list))
(else
(%test-runner-run-list!
r
(if (eq? run-list #t) (list first) (cons first run-list)))
(apply test-apply rest)
(%test-runner-run-list! r run-list))))
(let ((r (test-runner-create)))
(test-with-runner r (apply test-apply first rest))
((test-runner-on-final r) r))))))
(define-syntax test-with-runner
(syntax-rules ()
((test-with-runner runner form ...)
(let ((saved-runner (test-runner-current)))
(dynamic-wind
(lambda () (test-runner-current runner))
(lambda () form ...)
(lambda () (test-runner-current saved-runner)))))))
(define (%test-match-nth n count)
(let ((i 0))
(lambda (runner)
(set! i (+ i 1))
(and (>= i n) (< i (+ n count))))))
(define-syntax test-match-nth
(syntax-rules ()
((test-match-nth n)
(test-match-nth n 1))
((test-match-nth n count)
(%test-match-nth n count))))
(define (%test-match-all . pred-list)
(lambda (runner)
(let ((result #t))
(let loop ((l pred-list))
(if (null? l)
result
(begin
(if (not ((car l) runner))
(set! result #f))
(loop (cdr l))))))))
(define-syntax test-match-all
(syntax-rules ()
((test-match-all pred ...)
(%test-match-all (%test-as-specifier pred) ...))))
(define (%test-match-any . pred-list)
(lambda (runner)
(let ((result #f))
(let loop ((l pred-list))
(if (null? l)
result
(begin
(if ((car l) runner)
(set! result #t))
(loop (cdr l))))))))
(define-syntax test-match-any
(syntax-rules ()
((test-match-any pred ...)
(%test-match-any (%test-as-specifier pred) ...))))
Coerce to a predicate function :
(define (%test-as-specifier specifier)
(cond ((procedure? specifier) specifier)
((integer? specifier) (test-match-nth 1 specifier))
((string? specifier) (test-match-name specifier))
(else
(error "not a valid test specifier"))))
(define-syntax test-skip
(syntax-rules ()
((test-skip pred ...)
(let ((runner (test-runner-get)))
(%test-runner-skip-list! runner
(cons (test-match-all (%test-as-specifier pred) ...)
(%test-runner-skip-list runner)))))))
(define-syntax test-expect-fail
(syntax-rules ()
((test-expect-fail pred ...)
(let ((runner (test-runner-get)))
(%test-runner-fail-list! runner
(cons (test-match-all (%test-as-specifier pred) ...)
(%test-runner-fail-list runner)))))))
(define (test-match-name name)
(lambda (runner)
(equal? name (test-runner-test-name runner))))
(define (test-read-eval-string string)
(let* ((port (open-input-string string))
(form (read port)))
(if (eof-object? (read-char port))
(cond-expand
(guile (eval form (current-module)))
(else (eval form)))
(cond-expand
(srfi-23 (error "(not at eof)"))
(else "error")))))
|
2e4dce4b5ff37eeb86a03ab858f0720fc99b4319e8327f8e9466fc7cc1556c77 | LexiFi/dead_code_analyzer | hidden_opt_use.ml | let f ?a ?b x = x
let apply (f: ?a:'a -> ?b:'b -> 'c -> 'd) ?a x = f ?a ~b:0 x
let () = apply f 2 |> ignore
| null | https://raw.githubusercontent.com/LexiFi/dead_code_analyzer/c44dc2ea5ccb13df2145e9316e21c39f09dad506/examples/dir/hidden_opt_use.ml | ocaml | let f ?a ?b x = x
let apply (f: ?a:'a -> ?b:'b -> 'c -> 'd) ?a x = f ?a ~b:0 x
let () = apply f 2 |> ignore
| |
2441c977beab2f62d9677567f822364439da5d3408d9b03104e32727a160ec7c | SquidDev/illuaminate | illuaminateSemantics.ml | module Control = Control
module Global = Global
module Pure = Pure
module Reference = Reference
module Resolve = Resolve
module Module_resolve = Module_resolve
module Namespace = Namespace
module Doc = struct
module AbstractSyntax = Doc_abstract_syntax
module Comment = Doc_comment
module Parser = Doc_parser
module Syntax = Doc_syntax
module Extract = Doc_extract
end
module Type = struct
module Syntax = Type_syntax
end
module Stringlib = struct
module Format = struct
include String_format
type t = specifier list
let parse str = Lexing.from_string str |> format []
end
module Literal = struct
open IlluaminateCore
include String_escape
type t = component list
let parse node = Lexing.from_string (Node.contents.get node) |> string_of (Node.span node)
end
end
module Ident = struct
module StringSet = Set.Make (String)
let keywords =
StringSet.of_list
[ "and";
"break";
"do";
"else";
"elseif";
"end";
"false";
"for";
"function";
"if";
"in";
"local";
"nil";
"not";
"or";
"repeat";
"return";
"then";
"true";
"until";
"while"
]
let is_start = function
| '_' | 'A' .. 'Z' | 'a' .. 'z' -> true
| _ -> false
let is_rest = function
| '_' | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' -> true
| _ -> false
let is x =
String.length x > 0
&& is_start x.[0]
&& CCString.for_all is_rest x
&& not (StringSet.mem x keywords)
end
| null | https://raw.githubusercontent.com/SquidDev/illuaminate/da18b101b4710881b71c42554d70a3a7d17c3cd6/src/semantics/illuaminateSemantics.ml | ocaml | module Control = Control
module Global = Global
module Pure = Pure
module Reference = Reference
module Resolve = Resolve
module Module_resolve = Module_resolve
module Namespace = Namespace
module Doc = struct
module AbstractSyntax = Doc_abstract_syntax
module Comment = Doc_comment
module Parser = Doc_parser
module Syntax = Doc_syntax
module Extract = Doc_extract
end
module Type = struct
module Syntax = Type_syntax
end
module Stringlib = struct
module Format = struct
include String_format
type t = specifier list
let parse str = Lexing.from_string str |> format []
end
module Literal = struct
open IlluaminateCore
include String_escape
type t = component list
let parse node = Lexing.from_string (Node.contents.get node) |> string_of (Node.span node)
end
end
module Ident = struct
module StringSet = Set.Make (String)
let keywords =
StringSet.of_list
[ "and";
"break";
"do";
"else";
"elseif";
"end";
"false";
"for";
"function";
"if";
"in";
"local";
"nil";
"not";
"or";
"repeat";
"return";
"then";
"true";
"until";
"while"
]
let is_start = function
| '_' | 'A' .. 'Z' | 'a' .. 'z' -> true
| _ -> false
let is_rest = function
| '_' | 'A' .. 'Z' | 'a' .. 'z' | '0' .. '9' -> true
| _ -> false
let is x =
String.length x > 0
&& is_start x.[0]
&& CCString.for_all is_rest x
&& not (StringSet.mem x keywords)
end
| |
0ec92abff261d72651cc7f39657b53d57e03fddfdddd4b8cfeac95a8f0669ba3 | input-output-hk/voting-tools | Registration.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
-- | Handles configuration, which involves parsing command line
-- arguments and reading key files.
module Config.Registration
( Config(Config)
, ConfigError
, opts
, mkConfig
, Opts(Opts)
, parseOpts
, MetadataOutFormat(..)
) where
import Cardano.Catalyst.Registration (DelegationWeight, Delegations (..))
import Control.Exception.Safe (try)
import Control.Lens ((#))
import Control.Lens.TH
import Control.Monad.Except (ExceptT, MonadError, throwError)
import Control.Monad.IO.Class (MonadIO, liftIO)
import qualified Data.Attoparsec.ByteString.Char8 as Atto
import qualified Data.ByteString.Char8 as BC
import Data.Char (isSpace)
import Data.Foldable (asum)
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Data.Traversable (forM)
import Data.Word (Word32)
import Options.Applicative
import Cardano.Api (Bech32DecodeError, StakeAddress)
import qualified Cardano.Api as Api
import Cardano.CLI.Shelley.Key (InputDecodeError)
import Cardano.CLI.Types (SigningKeyFile (..))
import Cardano.Catalyst.Crypto (StakeSigningKey, readStakeSigningKeyFile)
import Config.Common (versionOption)
import Cardano.API.Extended (AsBech32DecodeError (_Bech32DecodeError),
AsFileError (_FileIOError, __FileError), AsInputDecodeError (_InputDecodeError),
AsType (AsVotingKeyPublic), VotingKeyPublic, deserialiseFromBech32',
parseStakeAddress, readerFromAttoParser)
data Config = Config
{ cfgRewardsAddress :: StakeAddress
, cfgStakeSigningKey :: StakeSigningKey
, cfgDelegations :: Delegations VotingKeyPublic
, cfgSlotNo :: Api.SlotNo
, cfgOutFormat :: MetadataOutFormat
}
deriving (Show)
data MetadataOutFormat = MetadataOutFormatJSON
| MetadataOutFormatCBOR
deriving (Eq, Show)
data FileErrors = FileErrorInputDecode InputDecodeError
deriving (Show)
makePrisms ''FileErrors
instance AsInputDecodeError FileErrors where
_InputDecodeError = _FileErrorInputDecode
data ConfigError = ConfigFailedToReadFile (Api.FileError FileErrors)
| ConfigFailedToDecodeBech32 Bech32DecodeError
deriving (Show)
makePrisms ''ConfigError
instance AsFileError ConfigError FileErrors where
__FileError = _ConfigFailedToReadFile
instance AsBech32DecodeError ConfigError where
_Bech32DecodeError = _ConfigFailedToDecodeBech32
mkConfig
:: Opts
-> ExceptT ConfigError IO Config
mkConfig (Opts rewardsAddr delegations vskf slotNo outFormat) = do
stkSign <- readStakeSigningKeyFile (SigningKeyFile vskf)
delegations' <- case delegations of
LegacyDelegationCLI vpkf -> do
votepk <- readVotePublicKey vpkf
pure $ Delegations $ (votepk, 1) NE.:| []
DelegationsCLI keyWeights ->
fmap Delegations . forM keyWeights $ \(vpkf, weight) -> do
votepk <- readVotePublicKey vpkf
pure $ (votepk, weight)
pure $ Config rewardsAddr stkSign delegations' slotNo outFormat
data Opts = Opts
{ optRewardsAddress :: StakeAddress
, optVotePublicKeyFile :: DelegationsCLI
, optStakeSigningKeyFile :: FilePath
, optSlotNo :: Api.SlotNo
, optOutFormat :: MetadataOutFormat
}
deriving (Show)
parseOpts :: Parser Opts
parseOpts = Opts
<$> option (readerFromAttoParser parseStakeAddress) (long "rewards-address" <> metavar "STRING" <> help "address associated with rewards (Must be a stake address for MIR Certificate)")
<*> pDelegationsCLI
<*> strOption (long "stake-signing-key-file" <> metavar "FILE" <> help "stake authorizing vote key")
<*> pSlotNo
<*> pOutFormat
data DelegationsCLI
= LegacyDelegationCLI FilePath
| DelegationsCLI (NonEmpty (FilePath, DelegationWeight))
deriving Show
pDelegationLegacy :: Parser DelegationsCLI
pDelegationLegacy =
LegacyDelegationCLI
<$> strOption (
long "vote-public-key-file"
<> metavar "FILE"
<> help "vote key generated by jcli (corresponding private key must be ed25519extended)"
)
pDelegationCIP36 :: Parser (FilePath, Word32)
pDelegationCIP36 =
option
(readerFromAttoParser pSingleDelegationCIP36)
( long "delegate"
<> metavar "FILE,WEIGHT"
<> help "ED25519Extended public voting key and delegation weight"
)
pSingleDelegationCIP36 :: Atto.Parser (FilePath, Word32)
pSingleDelegationCIP36 = do
_ <- pSpace
vpkf <- pVotePublicKeyFile
_ <- Atto.string ","
weight <- pKeyWeight
pure $ (vpkf, weight)
where
sep = ','
isSep = (== sep)
pSpace = Atto.skipWhile isSpace
pVotePublicKeyFile = BC.unpack <$> Atto.takeWhile1 (not . isSep)
pKeyWeight = Atto.decimal
pDelegationsCLI :: Parser DelegationsCLI
pDelegationsCLI =
pDelegationLegacy
<|> ((DelegationsCLI . NE.fromList) <$> (some pDelegationCIP36))
opts :: ParserInfo Opts
opts =
info
( parseOpts <**> versionOption "0.3.0.0" <**> helper )
( fullDesc
<> progDesc "Create vote registration metadata"
<> header "voter-registration - a tool to create vote registration metadata suitable for attaching to a transaction"
)
stripTrailingNewlines :: Text -> Text
stripTrailingNewlines = T.intercalate "\n" . filter (not . T.null) . T.lines
readVotePublicKey
:: ( MonadIO m
, MonadError e m
, AsFileError e d
, AsBech32DecodeError e
)
=> FilePath
-> m VotingKeyPublic
readVotePublicKey path = do
result <- liftIO . try $ TIO.readFile path
raw <- either (\e -> throwError . (_FileIOError #) $ (path, e)) pure result
let publicKeyBech32 = stripTrailingNewlines raw
either (throwError . (_Bech32DecodeError #)) pure $ deserialiseFromBech32' AsVotingKeyPublic publicKeyBech32
pOutFormat :: Parser MetadataOutFormat
pOutFormat = asum
[ flag' MetadataOutFormatJSON
( long "json"
<> help "Output metadata in JSON format (using the 'NoSchema' TxMetadata JSON format - the default for cardano-cli)"
)
, flag' MetadataOutFormatCBOR
( long "cbor"
<> help "Output metadata in binary CBOR format"
)
]
pSlotNo :: Parser Api.SlotNo
pSlotNo = Api.SlotNo
<$> option auto
( long "slot-no"
<> metavar "WORD64"
<> help "Slot number to encode in vote registration. Used to prevent replay attacks. Use the chain tip if you're unsure."
)
| null | https://raw.githubusercontent.com/input-output-hk/voting-tools/4566028444a44518b05e42b5c06da4d6bda1f34c/registration/src/Config/Registration.hs | haskell | # LANGUAGE OverloadedStrings #
| Handles configuration, which involves parsing command line
arguments and reading key files. | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TemplateHaskell #
module Config.Registration
( Config(Config)
, ConfigError
, opts
, mkConfig
, Opts(Opts)
, parseOpts
, MetadataOutFormat(..)
) where
import Cardano.Catalyst.Registration (DelegationWeight, Delegations (..))
import Control.Exception.Safe (try)
import Control.Lens ((#))
import Control.Lens.TH
import Control.Monad.Except (ExceptT, MonadError, throwError)
import Control.Monad.IO.Class (MonadIO, liftIO)
import qualified Data.Attoparsec.ByteString.Char8 as Atto
import qualified Data.ByteString.Char8 as BC
import Data.Char (isSpace)
import Data.Foldable (asum)
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.IO as TIO
import Data.Traversable (forM)
import Data.Word (Word32)
import Options.Applicative
import Cardano.Api (Bech32DecodeError, StakeAddress)
import qualified Cardano.Api as Api
import Cardano.CLI.Shelley.Key (InputDecodeError)
import Cardano.CLI.Types (SigningKeyFile (..))
import Cardano.Catalyst.Crypto (StakeSigningKey, readStakeSigningKeyFile)
import Config.Common (versionOption)
import Cardano.API.Extended (AsBech32DecodeError (_Bech32DecodeError),
AsFileError (_FileIOError, __FileError), AsInputDecodeError (_InputDecodeError),
AsType (AsVotingKeyPublic), VotingKeyPublic, deserialiseFromBech32',
parseStakeAddress, readerFromAttoParser)
data Config = Config
{ cfgRewardsAddress :: StakeAddress
, cfgStakeSigningKey :: StakeSigningKey
, cfgDelegations :: Delegations VotingKeyPublic
, cfgSlotNo :: Api.SlotNo
, cfgOutFormat :: MetadataOutFormat
}
deriving (Show)
data MetadataOutFormat = MetadataOutFormatJSON
| MetadataOutFormatCBOR
deriving (Eq, Show)
data FileErrors = FileErrorInputDecode InputDecodeError
deriving (Show)
makePrisms ''FileErrors
instance AsInputDecodeError FileErrors where
_InputDecodeError = _FileErrorInputDecode
data ConfigError = ConfigFailedToReadFile (Api.FileError FileErrors)
| ConfigFailedToDecodeBech32 Bech32DecodeError
deriving (Show)
makePrisms ''ConfigError
instance AsFileError ConfigError FileErrors where
__FileError = _ConfigFailedToReadFile
instance AsBech32DecodeError ConfigError where
_Bech32DecodeError = _ConfigFailedToDecodeBech32
mkConfig
:: Opts
-> ExceptT ConfigError IO Config
mkConfig (Opts rewardsAddr delegations vskf slotNo outFormat) = do
stkSign <- readStakeSigningKeyFile (SigningKeyFile vskf)
delegations' <- case delegations of
LegacyDelegationCLI vpkf -> do
votepk <- readVotePublicKey vpkf
pure $ Delegations $ (votepk, 1) NE.:| []
DelegationsCLI keyWeights ->
fmap Delegations . forM keyWeights $ \(vpkf, weight) -> do
votepk <- readVotePublicKey vpkf
pure $ (votepk, weight)
pure $ Config rewardsAddr stkSign delegations' slotNo outFormat
data Opts = Opts
{ optRewardsAddress :: StakeAddress
, optVotePublicKeyFile :: DelegationsCLI
, optStakeSigningKeyFile :: FilePath
, optSlotNo :: Api.SlotNo
, optOutFormat :: MetadataOutFormat
}
deriving (Show)
parseOpts :: Parser Opts
parseOpts = Opts
<$> option (readerFromAttoParser parseStakeAddress) (long "rewards-address" <> metavar "STRING" <> help "address associated with rewards (Must be a stake address for MIR Certificate)")
<*> pDelegationsCLI
<*> strOption (long "stake-signing-key-file" <> metavar "FILE" <> help "stake authorizing vote key")
<*> pSlotNo
<*> pOutFormat
data DelegationsCLI
= LegacyDelegationCLI FilePath
| DelegationsCLI (NonEmpty (FilePath, DelegationWeight))
deriving Show
pDelegationLegacy :: Parser DelegationsCLI
pDelegationLegacy =
LegacyDelegationCLI
<$> strOption (
long "vote-public-key-file"
<> metavar "FILE"
<> help "vote key generated by jcli (corresponding private key must be ed25519extended)"
)
pDelegationCIP36 :: Parser (FilePath, Word32)
pDelegationCIP36 =
option
(readerFromAttoParser pSingleDelegationCIP36)
( long "delegate"
<> metavar "FILE,WEIGHT"
<> help "ED25519Extended public voting key and delegation weight"
)
pSingleDelegationCIP36 :: Atto.Parser (FilePath, Word32)
pSingleDelegationCIP36 = do
_ <- pSpace
vpkf <- pVotePublicKeyFile
_ <- Atto.string ","
weight <- pKeyWeight
pure $ (vpkf, weight)
where
sep = ','
isSep = (== sep)
pSpace = Atto.skipWhile isSpace
pVotePublicKeyFile = BC.unpack <$> Atto.takeWhile1 (not . isSep)
pKeyWeight = Atto.decimal
pDelegationsCLI :: Parser DelegationsCLI
pDelegationsCLI =
pDelegationLegacy
<|> ((DelegationsCLI . NE.fromList) <$> (some pDelegationCIP36))
opts :: ParserInfo Opts
opts =
info
( parseOpts <**> versionOption "0.3.0.0" <**> helper )
( fullDesc
<> progDesc "Create vote registration metadata"
<> header "voter-registration - a tool to create vote registration metadata suitable for attaching to a transaction"
)
stripTrailingNewlines :: Text -> Text
stripTrailingNewlines = T.intercalate "\n" . filter (not . T.null) . T.lines
readVotePublicKey
:: ( MonadIO m
, MonadError e m
, AsFileError e d
, AsBech32DecodeError e
)
=> FilePath
-> m VotingKeyPublic
readVotePublicKey path = do
result <- liftIO . try $ TIO.readFile path
raw <- either (\e -> throwError . (_FileIOError #) $ (path, e)) pure result
let publicKeyBech32 = stripTrailingNewlines raw
either (throwError . (_Bech32DecodeError #)) pure $ deserialiseFromBech32' AsVotingKeyPublic publicKeyBech32
pOutFormat :: Parser MetadataOutFormat
pOutFormat = asum
[ flag' MetadataOutFormatJSON
( long "json"
<> help "Output metadata in JSON format (using the 'NoSchema' TxMetadata JSON format - the default for cardano-cli)"
)
, flag' MetadataOutFormatCBOR
( long "cbor"
<> help "Output metadata in binary CBOR format"
)
]
pSlotNo :: Parser Api.SlotNo
pSlotNo = Api.SlotNo
<$> option auto
( long "slot-no"
<> metavar "WORD64"
<> help "Slot number to encode in vote registration. Used to prevent replay attacks. Use the chain tip if you're unsure."
)
|
3a8ba61070657e0612447a99ced11f6f097290930b49dd658f8b641def674202 | mstewartgallus/hs-callbypushvalue | AsPorcelain.hs | # LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
module AsPorcelain (Porcelain, extract) where
import Common
import Constant
import Cps
import Data.Text
import HasCode
import HasConstants
import HasData
import HasLet
import HasStack
import HasTerminal
import HasTuple
import TextShow
import qualified Unique
extract :: Data Porcelain a -> Text
extract (D val) = toText (Unique.run val)
ws :: Builder
ws = fromString " "
lp :: Builder
lp = fromString "("
rp :: Builder
rp = fromString ")"
atom :: String -> Builder
atom = fromString
node :: [Builder] -> Builder
node x = lp <> mconcat (interleave ws x) <> rp
interleave :: a -> [a] -> [a]
interleave x = w
where
w [] = []
w l@[_] = l
w (h : t) = h : x : w t
fresh :: Unique.State Builder
fresh = do
v <- Unique.uniqueId
pure $ fromString "v" <> showb v
pType :: SSet a -> Builder
pType = showb
pAction :: SAlgebra a -> Builder
pAction = showb
data Porcelain
instance HasData Porcelain where
newtype Data Porcelain a = D (Unique.State Builder)
instance HasCode Porcelain where
newtype Code Porcelain a = C (Unique.State Builder)
instance HasStack Porcelain where
newtype Stack Porcelain a = S (Unique.State Builder)
instance HasConstants Porcelain where
constant (U64Constant x) = D $ pure $ node [atom "u64", showb x]
instance HasTuple Porcelain
instance HasLet Porcelain where
letBe (D x) f = C $ do
x' <- x
v <- fresh
let C body = f (D $ pure v)
body' <- body
pure $ node [atom "be", x', body']
instance HasTerminal Porcelain where
terminal = D $ do
pure $ atom "terminal"
instance HasLabel Porcelain where
label (S x) f = C $ do
x' <- x
v <- fresh
let C body = f (S $ pure v)
body' <- body
pure $ node [atom "label", x', body']
instance HasThunk Porcelain where
force (D th) (S k) = C $ do
thunk' <- th
k' <- k
pure $ node [atom "force", thunk', k']
thunk t f = D $ do
v <- fresh
let C body = f (S $ pure v)
body' <- body
pure $ node [atom "thunk", v, pAction t, body']
instance HasReturn Porcelain where
returns (D value) (S k) = C $ do
k' <- k
value' <- value
pure $ node [atom "return", value', k']
letTo t f = S $ do
v <- fresh
let C body = f (D $ pure v)
body' <- body
pure $ node [atom "to", v, pType t, body']
instance HasFn Porcelain where
D h <*> S t = S $ do
h' <- h
t' <- t
pure $ node [atom "apply", h', t']
lambda (S k) f = C $ do
k' <- k
x <- fresh
n <- fresh
let C body = f (D $ pure x) (S $ pure n)
body' <- body
pure $ node [atom "lambda", k', x, n, body']
instance HasCall Porcelain where
call g = D $ do
pure $ node [atom "call", showb g]
| null | https://raw.githubusercontent.com/mstewartgallus/hs-callbypushvalue/d8770b7e9e444e1261901f5ee435fcefb0f7ad75/src/AsPorcelain.hs | haskell | # LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
module AsPorcelain (Porcelain, extract) where
import Common
import Constant
import Cps
import Data.Text
import HasCode
import HasConstants
import HasData
import HasLet
import HasStack
import HasTerminal
import HasTuple
import TextShow
import qualified Unique
extract :: Data Porcelain a -> Text
extract (D val) = toText (Unique.run val)
ws :: Builder
ws = fromString " "
lp :: Builder
lp = fromString "("
rp :: Builder
rp = fromString ")"
atom :: String -> Builder
atom = fromString
node :: [Builder] -> Builder
node x = lp <> mconcat (interleave ws x) <> rp
interleave :: a -> [a] -> [a]
interleave x = w
where
w [] = []
w l@[_] = l
w (h : t) = h : x : w t
fresh :: Unique.State Builder
fresh = do
v <- Unique.uniqueId
pure $ fromString "v" <> showb v
pType :: SSet a -> Builder
pType = showb
pAction :: SAlgebra a -> Builder
pAction = showb
data Porcelain
instance HasData Porcelain where
newtype Data Porcelain a = D (Unique.State Builder)
instance HasCode Porcelain where
newtype Code Porcelain a = C (Unique.State Builder)
instance HasStack Porcelain where
newtype Stack Porcelain a = S (Unique.State Builder)
instance HasConstants Porcelain where
constant (U64Constant x) = D $ pure $ node [atom "u64", showb x]
instance HasTuple Porcelain
instance HasLet Porcelain where
letBe (D x) f = C $ do
x' <- x
v <- fresh
let C body = f (D $ pure v)
body' <- body
pure $ node [atom "be", x', body']
instance HasTerminal Porcelain where
terminal = D $ do
pure $ atom "terminal"
instance HasLabel Porcelain where
label (S x) f = C $ do
x' <- x
v <- fresh
let C body = f (S $ pure v)
body' <- body
pure $ node [atom "label", x', body']
instance HasThunk Porcelain where
force (D th) (S k) = C $ do
thunk' <- th
k' <- k
pure $ node [atom "force", thunk', k']
thunk t f = D $ do
v <- fresh
let C body = f (S $ pure v)
body' <- body
pure $ node [atom "thunk", v, pAction t, body']
instance HasReturn Porcelain where
returns (D value) (S k) = C $ do
k' <- k
value' <- value
pure $ node [atom "return", value', k']
letTo t f = S $ do
v <- fresh
let C body = f (D $ pure v)
body' <- body
pure $ node [atom "to", v, pType t, body']
instance HasFn Porcelain where
D h <*> S t = S $ do
h' <- h
t' <- t
pure $ node [atom "apply", h', t']
lambda (S k) f = C $ do
k' <- k
x <- fresh
n <- fresh
let C body = f (D $ pure x) (S $ pure n)
body' <- body
pure $ node [atom "lambda", k', x, n, body']
instance HasCall Porcelain where
call g = D $ do
pure $ node [atom "call", showb g]
| |
6601167a03f2e19c1d50d35db86b3405bcf25d195bf77ce9afe8cd9b37598198 | haskell-opengl/OpenGLRaw | AsyncPixel.hs | # LANGUAGE PatternSynonyms #
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.SGIX.AsyncPixel
Copyright : ( c ) 2019
-- License : BSD3
--
Maintainer : < >
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.SGIX.AsyncPixel (
-- * Extension Support
glGetSGIXAsyncPixel,
gl_SGIX_async_pixel,
-- * Enums
pattern GL_ASYNC_DRAW_PIXELS_SGIX,
pattern GL_ASYNC_READ_PIXELS_SGIX,
pattern GL_ASYNC_TEX_IMAGE_SGIX,
pattern GL_MAX_ASYNC_DRAW_PIXELS_SGIX,
pattern GL_MAX_ASYNC_READ_PIXELS_SGIX,
pattern GL_MAX_ASYNC_TEX_IMAGE_SGIX
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
| null | https://raw.githubusercontent.com/haskell-opengl/OpenGLRaw/57e50c9d28dfa62d6a87ae9b561af28f64ce32a0/src/Graphics/GL/SGIX/AsyncPixel.hs | haskell | ------------------------------------------------------------------------------
|
Module : Graphics.GL.SGIX.AsyncPixel
License : BSD3
Stability : stable
Portability : portable
------------------------------------------------------------------------------
* Extension Support
* Enums | # LANGUAGE PatternSynonyms #
Copyright : ( c ) 2019
Maintainer : < >
module Graphics.GL.SGIX.AsyncPixel (
glGetSGIXAsyncPixel,
gl_SGIX_async_pixel,
pattern GL_ASYNC_DRAW_PIXELS_SGIX,
pattern GL_ASYNC_READ_PIXELS_SGIX,
pattern GL_ASYNC_TEX_IMAGE_SGIX,
pattern GL_MAX_ASYNC_DRAW_PIXELS_SGIX,
pattern GL_MAX_ASYNC_READ_PIXELS_SGIX,
pattern GL_MAX_ASYNC_TEX_IMAGE_SGIX
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
|
92306a203268d343ebb7e592629047271cd6fa423cced2d237a755bbcdfdcbcb | RefactoringTools/wrangler | refac_bug_cond.erl | %% @hidden
@private
-module(refac_bug_cond).
-behaviour(gen_refac).
-export([input_par_prompts/0, select_focus/1,
check_pre_cond/1, selective/0,
transform/1, refac_bug_cond/3]).
-include("wrangler.hrl").
-spec input_par_prompts() -> [string()].
input_par_prompts() -> [].
-spec select_focus(#args{}) -> {ok, syntaxTree()} | {ok, none}.
select_focus(_Args) ->
{ok, none}.
-spec check_pre_cond(#args{}) -> ok.
check_pre_cond(_Args) ->
ok.
-spec selective() -> boolean().
selective() ->
false.
-spec transform(#args{}) ->
{ok, [{{filename(), filename()}, syntaxTree()}]}
| {error, term()}.
transform(_Args=#args{current_file_name=File})->
?FULL_BU_TP(rules(),[File]).
rules() ->
[replace_bug_cond_macro_rule(),
logic_rule_1(),
logic_rule_2(),
logic_rule_3(),
list_rule_1(),
list_rule_2(),
list_rule_3(),
list_rule_4(),
imply_rule_1(),
if_rule_1(),
if_rule_2(),
case_rule_1(),
case_rule_2(),
case_rule_3(),
guard_rule_1(),
guard_rule_2()
].
replace_bug_cond_macro_rule() ->
?RULE(?T("Expr@"),
?TO_AST("false"),
is_bug_cond_macro(Expr@)).
logic_rule_1() ->
?RULE(?T("not false"),?TO_AST("true"),true).
logic_rule_2() ->
?RULE(?T("Expr1@ orelse Expr2@"),
eval_expr('orelse', Expr1@, Expr2@),
is_bool_literal(Expr1@) orelse is_bool_literal(Expr2@)).
logic_rule_3() ->
?RULE(?T("Expr1@ andalso Expr2@"),
eval_expr('andalso', Expr1@, Expr2@),
is_bool_literal(Expr1@) orelse is_bool_literal(Expr2@)).
list_rule_1() ->
?RULE(?T("[Expr@||Es@@,true]"),
if Es@@==[] ->
?TO_AST("[Expr@]");
true ->?TO_AST("[Expr@||Es@@]")
end,
true).
list_rule_2() ->
?RULE(?T("[Expr@||Es@@,false]"),
?TO_AST("[]"), true).
list_rule_3() ->
?RULE(?T("Expr@++[]"), Expr@, true).
list_rule_4() ->
?RULE(?T("[]++Expr@"), Expr@, true).
if_rule_1() ->
?RULE(?T("if false,Conds@@ -> Body1@@;
true -> Body2@@
end"), Body2@@, true).
if_rule_2() ->
?RULE(?T("if Pats1@@@ -> Body1@@@;
false, Cond@@ -> Body2@@;
Pats3@@@ -> Body3@@@
end"),
?TO_AST("if Pats1@@@ -> Body1@@@;
Pats3@@@ -> Body3@@@
end"),
true).
guard_rule_1()->
?RULE(?T("f@(Args@@) when true, Guards@@ -> Body@@;"),
?TO_AST("f@(Args@@) when Guards@@ -> Body@@;"), true).
guard_rule_2()->
?RULE(?T("f@(Args@@) when false, Guards@@ -> Body@@;"),
?TO_AST(""), true).
imply_rule_1() ->
?RULE(?T("?IMPL(false, Expr@)"),
?TO_AST("true"), true).
case_rule_1() ->
?RULE(?T("case true of
Pats1@@@ when Guards1@@@ ->
Body1@@@;
true ->
Body1@@;
Pats2@@@ when Guards2@@@ ->
Body2@@@
end"),
Body1@@, true).
case_rule_2() ->
?RULE(?T("case false of
Pats1@@@ when Guards1@@@ ->
Body1@@@;
false ->
Body1@@;
Pats2@@@ when Guards2@@@ ->
Body2@@@
end"),
Body1@@, true).
case_rule_3() ->
?RULE(?T("case Expr@ of
Pats@@@ when Guard@@@ -> Body@@@;
Pats@@ when false -> Body@@;
Pats1@@@ when Guard1@@@ -> Body1@@@
end"),
?TO_AST(
"case Expr@ of
Pats@@@ when Guard@@@ -> Body@@@;
Pats1@@@ when Guard1@@@ -> Body1@@@
end"),
true).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
is_bug_cond_macro(Expr) ->
api_refac:type(Expr) == macro andalso
is_bug_cond_name(?PP(wrangler_misc:reset_attrs(Expr))).
is_bug_cond_name(Str) ->
Len = length(Str),
{match, [{0, Len}]} ==re:run(Str, "\\?[a-z]+_bug_[0-9]+").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
is_bool_literal(E) ->
%% only use ?PP when E is a small literal.
alternatively , you can write = wrangler_syntax : atom_value(E )
Str = ?PP(E),
Str == "true" orelse Str=="false".
eval_expr(Op, E1, E2) ->
eval_expr_1(Op, {E1, ?PP(E1)}, {E2, ?PP(E2)}).
eval_expr_1('orelse', {E1, "true"}, _) -> E1;
eval_expr_1('orelse', {_, "false"}, {E2, _}) -> E2;
eval_expr_1('orelse', _, {E2, "true"}) -> E2;
eval_expr_1('orelse', {E1, _}, {_, "false"}) -> E1;
eval_expr_1('andalso', {_, "true"}, {E2, _}) -> E2;
eval_expr_1('andalso', {E1, "false"}, _) -> E1;
eval_expr_1('andalso', {E1,_}, {_, "true"}) -> E1;
eval_expr_1('andalso', _, {E2, "false"}) -> E2.
-spec refac_bug_cond(FileOrDirs, Editor, TabWidth) -> {ok, string()}
when FileOrDirs :: file:filename() | dir(),
Editor :: editor(),
TabWidth :: integer().
refac_bug_cond(FileOrDirs, Editor, TabWidth) ->
Files = wrangler_misc:expand_files(FileOrDirs, ".erl"),
{ok, Res}=?FULL_BU_TP(rules(), Files),
wrangler_write_file:write_refactored_files(Res,Editor,TabWidth,"").
| null | https://raw.githubusercontent.com/RefactoringTools/wrangler/1c33ad0e923bb7bcebb6fd75347638def91e50a8/src/refac_bug_cond.erl | erlang | @hidden
only use ?PP when E is a small literal. | @private
-module(refac_bug_cond).
-behaviour(gen_refac).
-export([input_par_prompts/0, select_focus/1,
check_pre_cond/1, selective/0,
transform/1, refac_bug_cond/3]).
-include("wrangler.hrl").
-spec input_par_prompts() -> [string()].
input_par_prompts() -> [].
-spec select_focus(#args{}) -> {ok, syntaxTree()} | {ok, none}.
select_focus(_Args) ->
{ok, none}.
-spec check_pre_cond(#args{}) -> ok.
check_pre_cond(_Args) ->
ok.
-spec selective() -> boolean().
selective() ->
false.
-spec transform(#args{}) ->
{ok, [{{filename(), filename()}, syntaxTree()}]}
| {error, term()}.
transform(_Args=#args{current_file_name=File})->
?FULL_BU_TP(rules(),[File]).
rules() ->
[replace_bug_cond_macro_rule(),
logic_rule_1(),
logic_rule_2(),
logic_rule_3(),
list_rule_1(),
list_rule_2(),
list_rule_3(),
list_rule_4(),
imply_rule_1(),
if_rule_1(),
if_rule_2(),
case_rule_1(),
case_rule_2(),
case_rule_3(),
guard_rule_1(),
guard_rule_2()
].
replace_bug_cond_macro_rule() ->
?RULE(?T("Expr@"),
?TO_AST("false"),
is_bug_cond_macro(Expr@)).
logic_rule_1() ->
?RULE(?T("not false"),?TO_AST("true"),true).
logic_rule_2() ->
?RULE(?T("Expr1@ orelse Expr2@"),
eval_expr('orelse', Expr1@, Expr2@),
is_bool_literal(Expr1@) orelse is_bool_literal(Expr2@)).
logic_rule_3() ->
?RULE(?T("Expr1@ andalso Expr2@"),
eval_expr('andalso', Expr1@, Expr2@),
is_bool_literal(Expr1@) orelse is_bool_literal(Expr2@)).
list_rule_1() ->
?RULE(?T("[Expr@||Es@@,true]"),
if Es@@==[] ->
?TO_AST("[Expr@]");
true ->?TO_AST("[Expr@||Es@@]")
end,
true).
list_rule_2() ->
?RULE(?T("[Expr@||Es@@,false]"),
?TO_AST("[]"), true).
list_rule_3() ->
?RULE(?T("Expr@++[]"), Expr@, true).
list_rule_4() ->
?RULE(?T("[]++Expr@"), Expr@, true).
if_rule_1() ->
?RULE(?T("if false,Conds@@ -> Body1@@;
true -> Body2@@
end"), Body2@@, true).
if_rule_2() ->
?RULE(?T("if Pats1@@@ -> Body1@@@;
false, Cond@@ -> Body2@@;
Pats3@@@ -> Body3@@@
end"),
?TO_AST("if Pats1@@@ -> Body1@@@;
Pats3@@@ -> Body3@@@
end"),
true).
guard_rule_1()->
?RULE(?T("f@(Args@@) when true, Guards@@ -> Body@@;"),
?TO_AST("f@(Args@@) when Guards@@ -> Body@@;"), true).
guard_rule_2()->
?RULE(?T("f@(Args@@) when false, Guards@@ -> Body@@;"),
?TO_AST(""), true).
imply_rule_1() ->
?RULE(?T("?IMPL(false, Expr@)"),
?TO_AST("true"), true).
case_rule_1() ->
?RULE(?T("case true of
Pats1@@@ when Guards1@@@ ->
Body1@@@;
true ->
Body1@@;
Pats2@@@ when Guards2@@@ ->
Body2@@@
end"),
Body1@@, true).
case_rule_2() ->
?RULE(?T("case false of
Pats1@@@ when Guards1@@@ ->
Body1@@@;
false ->
Body1@@;
Pats2@@@ when Guards2@@@ ->
Body2@@@
end"),
Body1@@, true).
case_rule_3() ->
?RULE(?T("case Expr@ of
Pats@@@ when Guard@@@ -> Body@@@;
Pats@@ when false -> Body@@;
Pats1@@@ when Guard1@@@ -> Body1@@@
end"),
?TO_AST(
"case Expr@ of
Pats@@@ when Guard@@@ -> Body@@@;
Pats1@@@ when Guard1@@@ -> Body1@@@
end"),
true).
is_bug_cond_macro(Expr) ->
api_refac:type(Expr) == macro andalso
is_bug_cond_name(?PP(wrangler_misc:reset_attrs(Expr))).
is_bug_cond_name(Str) ->
Len = length(Str),
{match, [{0, Len}]} ==re:run(Str, "\\?[a-z]+_bug_[0-9]+").
is_bool_literal(E) ->
alternatively , you can write = wrangler_syntax : atom_value(E )
Str = ?PP(E),
Str == "true" orelse Str=="false".
eval_expr(Op, E1, E2) ->
eval_expr_1(Op, {E1, ?PP(E1)}, {E2, ?PP(E2)}).
eval_expr_1('orelse', {E1, "true"}, _) -> E1;
eval_expr_1('orelse', {_, "false"}, {E2, _}) -> E2;
eval_expr_1('orelse', _, {E2, "true"}) -> E2;
eval_expr_1('orelse', {E1, _}, {_, "false"}) -> E1;
eval_expr_1('andalso', {_, "true"}, {E2, _}) -> E2;
eval_expr_1('andalso', {E1, "false"}, _) -> E1;
eval_expr_1('andalso', {E1,_}, {_, "true"}) -> E1;
eval_expr_1('andalso', _, {E2, "false"}) -> E2.
-spec refac_bug_cond(FileOrDirs, Editor, TabWidth) -> {ok, string()}
when FileOrDirs :: file:filename() | dir(),
Editor :: editor(),
TabWidth :: integer().
refac_bug_cond(FileOrDirs, Editor, TabWidth) ->
Files = wrangler_misc:expand_files(FileOrDirs, ".erl"),
{ok, Res}=?FULL_BU_TP(rules(), Files),
wrangler_write_file:write_refactored_files(Res,Editor,TabWidth,"").
|
f2d65d24b4933041eb11f198a67265a43a29e180685ede6cc4baeba3df0725b9 | rmloveland/scheme48-0.53 | dump.scm | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
; Dump and restore
; Unix has special meanings for
ETX , FS , DEL , ETB , NAK , DC2 , EOT , EM ( or SUB ? ) , DC3 , , SI , SYN ,
3 28 127 23 21 18 4 25 26 19 17 15 22
; so avoid using them.
(define type/null #\n)
(define type/true #\t)
(define type/false #\f)
(define type/unspecific #\u)
obj1 obj2
(define type/string #\s) ;length chars
# chars rep
(define type/symbol #\y) ;length chars
(define type/char #\c) ;char
(define type/vector #\v) ;length objects
(define type/template #\a) ;length objects
length bytes ( each byte is 2 hex digits ? )
uid
(define type/closure #\q) ;template-info
(define type/ellipsis #\e)
(define type/random #\r)
; Recursive entry
(define (dump obj write-char depth)
(cond ((null? obj) (dump-type type/null write-char))
((eq? obj #t) (dump-type type/true write-char))
((eq? obj #f) (dump-type type/false write-char))
((pair? obj) (dump-pair obj write-char depth))
;; Template case needs to precede vector case
((template? obj) (dump-template obj write-char depth))
((vector? obj) (dump-vector obj write-char depth))
((symbol? obj) (dump-symbol obj write-char))
((number? obj) (dump-number obj write-char))
((string? obj) (dump-string obj write-char))
((char? obj) (dump-char-literal obj write-char))
((code-vector? obj) (dump-code-vector obj write-char))
((location? obj) (dump-location obj write-char))
((unspecific? obj) (dump-type type/unspecific write-char))
((closure? obj) (dump-closure obj write-char))
(else (dump-random obj write-char depth))))
(define (restore read-char)
(let ((type (restore-type read-char)))
((vector-ref restorers (char->ascii type)) type read-char)))
(define restorers
(make-vector 256 (lambda (type read-char)
Invalid type
(error "invalid type code" type))))
(define (define-restorer! type proc)
(vector-set! restorers (char->ascii type) proc))
; Particular dumpers & restorers
(define-restorer! type/null (lambda (c read-char) '()))
(define-restorer! type/false (lambda (c read-char) #f))
(define-restorer! type/true (lambda (c read-char) #t))
(define-restorer! type/unspecific (lambda (c read-char) (if #f #f)))
; Pairs
(define (dump-pair obj write-char depth)
(if (= depth 0)
(dump-ellipsis obj write-char)
(let ((depth (- depth 1)))
(dump-type type/pair write-char)
(dump (car obj) write-char depth)
(dump (cdr obj) write-char depth))))
(define-restorer! type/pair
(lambda (c write-char)
c ;ignored
(let ((the-car (restore write-char)))
(cons the-car (restore write-char)))))
; Symbols
(define (dump-symbol obj write-char)
(dump-type type/symbol write-char)
(dump-a-string (symbol-case-converter (symbol->string obj)) write-char))
(define-restorer! type/symbol
(lambda (c read-char)
c ;ignored
(string->symbol (symbol-case-converter (restore-a-string read-char)))))
; Numbers
<space> ... _ represent 0 ... 63 ,
{ <space> ... { _ represent 64 ... 127 , -- { is ascii 123
|<space > ... | _ represent 128 ... 191 , -- | is ascii 124
} <space> ... } _ represent 192 ... 256 . -- } is ascii 125
(define (dump-number n write-char)
(if (not (communicable-number? n))
(error "can't dump this number" n))
(if (and (integer? n)
(>= n 0)
(< n 256))
(dump-byte n write-char)
(begin (dump-type type/number write-char)
;; Note logarithmic recursion
(dump-a-string (number->string n comm-radix) write-char))))
(define (communicable-number? n) #t) ;this gets redefined in client
Dump a number between 0 and 255
(if (< n 64)
(write-char (ascii->char (+ n ascii-space)))
(begin (write-char (ascii->char (+ (arithmetic-shift n -6)
122)))
(write-char (ascii->char (+ (bitwise-and n 63)
ascii-space))))))
32
(define (restore-small-integer c read-char)
(- (char->ascii c) ascii-space))
(do ((i (+ ascii-space 63) (- i 1)))
((< i ascii-space))
(define-restorer! (ascii->char i) restore-small-integer))
(define (restore-medium-integer c read-char)
(+ (arithmetic-shift (- (char->ascii c) 122) 6)
(- (char->ascii (read-char)) ascii-space)))
(do ((i 123 (+ i 1)))
((> i 125))
(define-restorer! (ascii->char i) restore-medium-integer))
(define (restore-number read-char)
(let ((c (read-char)))
(if (char=? c type/number)
(string->number (restore-a-string read-char) comm-radix)
(let ((n (char->ascii c)))
(if (> n 122)
(restore-medium-integer c read-char)
(- n ascii-space))))))
(define-restorer! type/number
(lambda (c read-char)
c ;ignored
(string->number (restore-a-string read-char) comm-radix)))
(define comm-radix 16)
; String literals
(define (dump-string obj write-char)
(dump-type type/string write-char)
(dump-a-string obj write-char))
(define-restorer! type/string
(lambda (c read-char)
c ;ignored
(restore-a-string read-char)))
; Characters
(define (dump-char-literal obj write-char)
(dump-type type/char write-char)
(dump-a-char obj write-char))
(define-restorer! type/char
(lambda (c read-char)
c ;ignored
(restore-a-char read-char)))
; Vectors
(define (dump-vector obj write-char depth)
(dump-vector-like obj write-char depth
type/vector vector-length vector-ref))
(define (dump-template obj write-char depth)
(dump-vector-like obj write-char depth
type/template template-length template-ref))
(define (dump-vector-like obj write-char depth type vector-length vector-ref)
(if (= depth 0)
(dump-ellipsis obj write-char)
(let ((depth (- depth 1))
(len (vector-length obj)))
(dump-type type write-char)
(dump-length len write-char)
(do ((i 0 (+ i 1)))
((= i len) 'done)
(dump (vector-ref obj i) write-char depth)))))
(define (restore-vector-like make-vector vector-set!)
(lambda (c read-char)
c ;ignored
(let* ((len (restore-length read-char))
(v (make-vector len #\?)))
(do ((i 0 (+ i 1)))
((= i len) v)
(vector-set! v i (restore read-char))))))
(define-restorer! type/vector
(restore-vector-like make-vector vector-set!))
(define-restorer! type/template
(restore-vector-like make-template template-set!))
; Code vectors
(define (dump-code-vector obj write-char)
(dump-type type/code-vector write-char)
(let ((len (code-vector-length obj)))
(dump-length len write-char)
(do ((i 0 (+ i 1)))
((= i len) 'done)
(dump-byte (code-vector-ref obj i) write-char))))
(define-restorer! type/code-vector
(lambda (c read-char)
c ;ignored
(let* ((len (restore-length read-char))
(cv (make-code-vector len 0)))
(do ((i 0 (+ i 1)))
((= i len) cv)
(code-vector-set! cv i
(restore-number read-char))))))
; Locations
(define (dump-location obj write-char)
(dump-type type/location write-char)
(dump-number (location->uid obj) write-char))
(define (location->uid obj)
(or ((fluid $dump-index) obj)
(location-id obj)))
(define-restorer! type/location
(lambda (c read-char)
c ;ignored
(uid->location (restore-number read-char))))
(define (uid->location uid)
(or ((fluid $restore-index) uid)
(table-ref uid->location-table uid)
(let ((loc (make-undefined-location uid)))
(note-location! loc)
loc)))
(define $restore-index (make-fluid (lambda (uid) #f)))
(define uid->location-table (make-table))
(define (note-location! den)
(table-set! uid->location-table
(location-id den)
den))
(define $dump-index (make-fluid (lambda (loc) #f)))
For simulation purposes , it 's better for location uid 's not to
conflict with any that might be in the base Scheme 48 system . ( In the
real server system there is n't any base Scheme 48 system , so there 's
; no danger of conflict.)
( define location - uid - origin 5000 )
; Closure
(define (dump-closure obj write-char)
(dump-type type/closure write-char)
(let ((id (template-info (closure-template obj))))
(dump-number (if (integer? id) id 0) write-char)))
(define-restorer! type/closure
(lambda (c read-char)
c ;ignored
(make-random (list 'closure (restore-number read-char)))))
; Random
(define random-type (make-record-type 'random '(disclosure)))
(define make-random (record-constructor random-type '(disclosure)))
(define-record-discloser random-type
(let ((d (record-accessor random-type 'disclosure)))
(lambda (r) (cons "Remote" (d r)))))
(define (dump-random obj write-char depth)
(dump-type type/random write-char)
(dump (or (disclose obj) (list '?))
write-char
depth))
(define-restorer! type/random
(lambda (c read-char)
(make-random (restore read-char))))
Ellipsis
(define (dump-ellipsis obj write-char)
(dump-type type/ellipsis write-char))
(define-restorer! type/ellipsis
(lambda (c read-char) (make-random (list (string->symbol "---")))))
Auxiliaries :
; Strings (not necessarily preceded by type code)
(define (dump-a-string obj write-char)
(let ((len (string-length obj)))
(dump-length len write-char)
(do ((i 0 (+ i 1)))
((= i len) 'done)
(dump-a-char (string-ref obj i) write-char))))
(define (restore-a-string read-char)
(let* ((len (restore-length read-char))
(str (make-string len #\?)))
(do ((i 0 (+ i 1)))
((= i len) str)
(string-set! str i (restore-a-char read-char)))))
(define (dump-a-char c write-char)
(write-char c))
(define (restore-a-char read-char)
(read-char))
; Type characters
(define (dump-type c write-char)
(write-char c))
(define (restore-type read-char)
(read-char))
(define dump-length dump-number)
(define restore-length restore-number)
( define ) -- defined in p-features.scm
( define ) -- ditto
; Miscellaneous support
(define (unspecific? obj)
(eq? obj *unspecific*))
(define *unspecific* (if #f #f)) ;foo
( define ( integer->digit - char n )
; (ascii->char (+ n (if (< n 10) ascii-zero a-minus-ten))))
;
;(define (digit-char->integer c)
; (cond ((char-numeric? c)
; (- (char->ascii c) ascii-zero))
; ((char=? c #\#) 0)
; (else
( - ( char->ascii ( char - downcase c ) ) a - minus - ten ) ) ) )
;
;(define ascii-zero (char->ascii #\0))
;
( define a - minus - ten ( - ( char->integer # \a ) 10 ) )
; These modified from s48/boot/transport.scm
(define (string-case-converter string)
(let ((new (make-string (string-length string) #\?)))
(do ((i 0 (+ i 1)))
((>= i (string-length new)) new)
(string-set! new i (invert-case (string-ref string i))))))
(define (invert-case c)
(cond ((char-upper-case? c) (char-downcase c))
((char-lower-case? c) (char-upcase c))
(else c)))
(define symbol-case-converter
(if (char=? (string-ref (symbol->string 't) 0) #\t)
(lambda (string) string)
string-case-converter))
; ASCII
! " # $ % & ' ( ) * + , -./0123456789:;<= > ?
; @ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_
` abcdefghijklmnopqrstuvwxyz{|}~
;(define (tst x)
; (let ((l '()))
; (dump x (lambda (c) (set! l (cons c l))) -1)
; (let ((l (reverse l)))
; (restore (lambda ()
; (let ((c (car l)))
; (set! l (cdr l))
; c))))))
;(define cwcc call-with-current-continuation)
;
;(define (tst x)
; (letrec ((write-cont (lambda (ignore)
; (dump x
; (lambda (c)
; (cwcc (lambda (k)
; (set! write-cont k)
; (read-cont c))))
; -1)))
; (read-cont #f))
; (restore (lambda ()
; (cwcc (lambda (k)
; (set! read-cont k)
; (write-cont 'ignore)))))))
| null | https://raw.githubusercontent.com/rmloveland/scheme48-0.53/1ae4531fac7150bd2af42d124da9b50dd1b89ec1/scheme/big/dump.scm | scheme | Dump and restore
Unix has special meanings for
so avoid using them.
length chars
length chars
char
length objects
length objects
template-info
Recursive entry
Template case needs to precede vector case
Particular dumpers & restorers
Pairs
ignored
Symbols
ignored
Numbers
Note logarithmic recursion
this gets redefined in client
ignored
String literals
ignored
Characters
ignored
Vectors
ignored
Code vectors
ignored
Locations
ignored
no danger of conflict.)
Closure
ignored
Random
Strings (not necessarily preceded by type code)
Type characters
Miscellaneous support
foo
(ascii->char (+ n (if (< n 10) ascii-zero a-minus-ten))))
(define (digit-char->integer c)
(cond ((char-numeric? c)
(- (char->ascii c) ascii-zero))
((char=? c #\#) 0)
(else
(define ascii-zero (char->ascii #\0))
These modified from s48/boot/transport.scm
ASCII
<= > ?
@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_
(define (tst x)
(let ((l '()))
(dump x (lambda (c) (set! l (cons c l))) -1)
(let ((l (reverse l)))
(restore (lambda ()
(let ((c (car l)))
(set! l (cdr l))
c))))))
(define cwcc call-with-current-continuation)
(define (tst x)
(letrec ((write-cont (lambda (ignore)
(dump x
(lambda (c)
(cwcc (lambda (k)
(set! write-cont k)
(read-cont c))))
-1)))
(read-cont #f))
(restore (lambda ()
(cwcc (lambda (k)
(set! read-cont k)
(write-cont 'ignore))))))) | Copyright ( c ) 1993 - 1999 by and . See file COPYING .
ETX , FS , DEL , ETB , NAK , DC2 , EOT , EM ( or SUB ? ) , DC3 , , SI , SYN ,
3 28 127 23 21 18 4 25 26 19 17 15 22
(define type/null #\n)
(define type/true #\t)
(define type/false #\f)
(define type/unspecific #\u)
obj1 obj2
# chars rep
length bytes ( each byte is 2 hex digits ? )
uid
(define type/ellipsis #\e)
(define type/random #\r)
(define (dump obj write-char depth)
(cond ((null? obj) (dump-type type/null write-char))
((eq? obj #t) (dump-type type/true write-char))
((eq? obj #f) (dump-type type/false write-char))
((pair? obj) (dump-pair obj write-char depth))
((template? obj) (dump-template obj write-char depth))
((vector? obj) (dump-vector obj write-char depth))
((symbol? obj) (dump-symbol obj write-char))
((number? obj) (dump-number obj write-char))
((string? obj) (dump-string obj write-char))
((char? obj) (dump-char-literal obj write-char))
((code-vector? obj) (dump-code-vector obj write-char))
((location? obj) (dump-location obj write-char))
((unspecific? obj) (dump-type type/unspecific write-char))
((closure? obj) (dump-closure obj write-char))
(else (dump-random obj write-char depth))))
(define (restore read-char)
(let ((type (restore-type read-char)))
((vector-ref restorers (char->ascii type)) type read-char)))
(define restorers
(make-vector 256 (lambda (type read-char)
Invalid type
(error "invalid type code" type))))
(define (define-restorer! type proc)
(vector-set! restorers (char->ascii type) proc))
(define-restorer! type/null (lambda (c read-char) '()))
(define-restorer! type/false (lambda (c read-char) #f))
(define-restorer! type/true (lambda (c read-char) #t))
(define-restorer! type/unspecific (lambda (c read-char) (if #f #f)))
(define (dump-pair obj write-char depth)
(if (= depth 0)
(dump-ellipsis obj write-char)
(let ((depth (- depth 1)))
(dump-type type/pair write-char)
(dump (car obj) write-char depth)
(dump (cdr obj) write-char depth))))
(define-restorer! type/pair
(lambda (c write-char)
(let ((the-car (restore write-char)))
(cons the-car (restore write-char)))))
(define (dump-symbol obj write-char)
(dump-type type/symbol write-char)
(dump-a-string (symbol-case-converter (symbol->string obj)) write-char))
(define-restorer! type/symbol
(lambda (c read-char)
(string->symbol (symbol-case-converter (restore-a-string read-char)))))
<space> ... _ represent 0 ... 63 ,
{ <space> ... { _ represent 64 ... 127 , -- { is ascii 123
|<space > ... | _ represent 128 ... 191 , -- | is ascii 124
} <space> ... } _ represent 192 ... 256 . -- } is ascii 125
(define (dump-number n write-char)
(if (not (communicable-number? n))
(error "can't dump this number" n))
(if (and (integer? n)
(>= n 0)
(< n 256))
(dump-byte n write-char)
(begin (dump-type type/number write-char)
(dump-a-string (number->string n comm-radix) write-char))))
Dump a number between 0 and 255
(if (< n 64)
(write-char (ascii->char (+ n ascii-space)))
(begin (write-char (ascii->char (+ (arithmetic-shift n -6)
122)))
(write-char (ascii->char (+ (bitwise-and n 63)
ascii-space))))))
32
(define (restore-small-integer c read-char)
(- (char->ascii c) ascii-space))
(do ((i (+ ascii-space 63) (- i 1)))
((< i ascii-space))
(define-restorer! (ascii->char i) restore-small-integer))
(define (restore-medium-integer c read-char)
(+ (arithmetic-shift (- (char->ascii c) 122) 6)
(- (char->ascii (read-char)) ascii-space)))
(do ((i 123 (+ i 1)))
((> i 125))
(define-restorer! (ascii->char i) restore-medium-integer))
(define (restore-number read-char)
(let ((c (read-char)))
(if (char=? c type/number)
(string->number (restore-a-string read-char) comm-radix)
(let ((n (char->ascii c)))
(if (> n 122)
(restore-medium-integer c read-char)
(- n ascii-space))))))
(define-restorer! type/number
(lambda (c read-char)
(string->number (restore-a-string read-char) comm-radix)))
(define comm-radix 16)
(define (dump-string obj write-char)
(dump-type type/string write-char)
(dump-a-string obj write-char))
(define-restorer! type/string
(lambda (c read-char)
(restore-a-string read-char)))
(define (dump-char-literal obj write-char)
(dump-type type/char write-char)
(dump-a-char obj write-char))
(define-restorer! type/char
(lambda (c read-char)
(restore-a-char read-char)))
(define (dump-vector obj write-char depth)
(dump-vector-like obj write-char depth
type/vector vector-length vector-ref))
(define (dump-template obj write-char depth)
(dump-vector-like obj write-char depth
type/template template-length template-ref))
(define (dump-vector-like obj write-char depth type vector-length vector-ref)
(if (= depth 0)
(dump-ellipsis obj write-char)
(let ((depth (- depth 1))
(len (vector-length obj)))
(dump-type type write-char)
(dump-length len write-char)
(do ((i 0 (+ i 1)))
((= i len) 'done)
(dump (vector-ref obj i) write-char depth)))))
(define (restore-vector-like make-vector vector-set!)
(lambda (c read-char)
(let* ((len (restore-length read-char))
(v (make-vector len #\?)))
(do ((i 0 (+ i 1)))
((= i len) v)
(vector-set! v i (restore read-char))))))
(define-restorer! type/vector
(restore-vector-like make-vector vector-set!))
(define-restorer! type/template
(restore-vector-like make-template template-set!))
(define (dump-code-vector obj write-char)
(dump-type type/code-vector write-char)
(let ((len (code-vector-length obj)))
(dump-length len write-char)
(do ((i 0 (+ i 1)))
((= i len) 'done)
(dump-byte (code-vector-ref obj i) write-char))))
(define-restorer! type/code-vector
(lambda (c read-char)
(let* ((len (restore-length read-char))
(cv (make-code-vector len 0)))
(do ((i 0 (+ i 1)))
((= i len) cv)
(code-vector-set! cv i
(restore-number read-char))))))
(define (dump-location obj write-char)
(dump-type type/location write-char)
(dump-number (location->uid obj) write-char))
(define (location->uid obj)
(or ((fluid $dump-index) obj)
(location-id obj)))
(define-restorer! type/location
(lambda (c read-char)
(uid->location (restore-number read-char))))
(define (uid->location uid)
(or ((fluid $restore-index) uid)
(table-ref uid->location-table uid)
(let ((loc (make-undefined-location uid)))
(note-location! loc)
loc)))
(define $restore-index (make-fluid (lambda (uid) #f)))
(define uid->location-table (make-table))
(define (note-location! den)
(table-set! uid->location-table
(location-id den)
den))
(define $dump-index (make-fluid (lambda (loc) #f)))
For simulation purposes , it 's better for location uid 's not to
conflict with any that might be in the base Scheme 48 system . ( In the
real server system there is n't any base Scheme 48 system , so there 's
( define location - uid - origin 5000 )
(define (dump-closure obj write-char)
(dump-type type/closure write-char)
(let ((id (template-info (closure-template obj))))
(dump-number (if (integer? id) id 0) write-char)))
(define-restorer! type/closure
(lambda (c read-char)
(make-random (list 'closure (restore-number read-char)))))
(define random-type (make-record-type 'random '(disclosure)))
(define make-random (record-constructor random-type '(disclosure)))
(define-record-discloser random-type
(let ((d (record-accessor random-type 'disclosure)))
(lambda (r) (cons "Remote" (d r)))))
(define (dump-random obj write-char depth)
(dump-type type/random write-char)
(dump (or (disclose obj) (list '?))
write-char
depth))
(define-restorer! type/random
(lambda (c read-char)
(make-random (restore read-char))))
Ellipsis
(define (dump-ellipsis obj write-char)
(dump-type type/ellipsis write-char))
(define-restorer! type/ellipsis
(lambda (c read-char) (make-random (list (string->symbol "---")))))
Auxiliaries :
(define (dump-a-string obj write-char)
(let ((len (string-length obj)))
(dump-length len write-char)
(do ((i 0 (+ i 1)))
((= i len) 'done)
(dump-a-char (string-ref obj i) write-char))))
(define (restore-a-string read-char)
(let* ((len (restore-length read-char))
(str (make-string len #\?)))
(do ((i 0 (+ i 1)))
((= i len) str)
(string-set! str i (restore-a-char read-char)))))
(define (dump-a-char c write-char)
(write-char c))
(define (restore-a-char read-char)
(read-char))
(define (dump-type c write-char)
(write-char c))
(define (restore-type read-char)
(read-char))
(define dump-length dump-number)
(define restore-length restore-number)
( define ) -- defined in p-features.scm
( define ) -- ditto
(define (unspecific? obj)
(eq? obj *unspecific*))
( define ( integer->digit - char n )
( - ( char->ascii ( char - downcase c ) ) a - minus - ten ) ) ) )
( define a - minus - ten ( - ( char->integer # \a ) 10 ) )
(define (string-case-converter string)
(let ((new (make-string (string-length string) #\?)))
(do ((i 0 (+ i 1)))
((>= i (string-length new)) new)
(string-set! new i (invert-case (string-ref string i))))))
(define (invert-case c)
(cond ((char-upper-case? c) (char-downcase c))
((char-lower-case? c) (char-upcase c))
(else c)))
(define symbol-case-converter
(if (char=? (string-ref (symbol->string 't) 0) #\t)
(lambda (string) string)
string-case-converter))
` abcdefghijklmnopqrstuvwxyz{|}~
|
8dd1f67b36eed116bcaf08ef14f83afc718bdaa7ca9b138351302c4fbe6ce977 | aeolus-project/zephyrus | incompatibilities_of.ml | (****************************************************************************)
(* *)
This file is part of Zephyrus .
(* *)
Zephyrus is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
(* (at your option) any later version. *)
(* *)
Zephyrus is distributed in the hope that it will be useful ,
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License
along with Zephyrus . If not , see < / > .
(* *)
(****************************************************************************)
Depends on
- datatypes / Data_common
- datatypes / Data_model
- datatypes/Data_common
- datatypes/Data_model
*)
open Data_common
open Data_model
(* TODO: Very temporary simplified ad-hoc file handling and external program execution in this module! *)
type real_or_pseudo_package =
| Real_package of package_id
| Pseudo_package of component_type_id
module Real_or_pseudo_package = struct type t = real_or_pseudo_package let compare = compare end
module Real_or_pseudo_package_set = Set.Make(Real_or_pseudo_package)
module Real_or_pseudo_package_map = Map.Make(Real_or_pseudo_package)
module Real_or_pseudo_package_catalog =
Data_common.Catalog(Fresh_integer)(Int_set)(Real_or_pseudo_package_set)(Int_map)(Real_or_pseudo_package_map)
let repository universe repository_id =
let repository = universe#get_repository repository_id in
let package_list : (real_or_pseudo_package * package) list =
1 . Prepare the packages coming from the repository .
let repository_packages_list : (package_id * package) list =
Package_id_set.map_to_list (fun package_id ->
(package_id, universe#get_package package_id)
) repository#package_ids in
2 . Prepare the pseudo packages encoding
the implementation of component types .
the implementation of component types. *)
let implementation_pseudo_packages_list : (component_type_id * package) list =
Component_type_id_set.filter_map_to_list (fun component_type_id ->
(* Filter the packages implementing the component type
and keep only these from the current repository. *)
let package_ids = Package_id_set.inter
(universe#get_implementation component_type_id)
repository#package_ids in
Check if the component type is installable using the current repository ,
i.e. if there is at least one package which implements it .
i.e. if there is at least one package which implements it. *)
if Package_id_set.is_empty package_ids
(* If the component type is not installable - skip it. *)
then None
(* If it is installable, then prepare a pseudo package, which is installable
if-and-only-if the component type is implemented correctly. *)
else
let pseudo_package =
(* The pseudo package simply depends on a disjunction of the packages
implementing its corresponding component type. *)
let depend = Package_id_set_set.singleton package_ids in
new package ~depend () in
Some (component_type_id, pseudo_package)
) universe#get_implementation_domain in
(List.map (fun (package_id, package) ->
(Real_package package_id, package)
) repository_packages_list)
@
(List.map (fun (component_type_id, package) ->
(Pseudo_package component_type_id, package)
) implementation_pseudo_packages_list)
in
3 . Prepare a catalog of string < - > package_id / component_type_id .
let catalog = new Real_or_pseudo_package_catalog.obj_catalog in
List.iter (fun (real_or_pseudo_package, _) ->
catalog#add real_or_pseudo_package
) package_list;
let cudf_package_name_of_real_or_pseudo_package real_or_pseudo_package =
let id = catalog#id_of_obj real_or_pseudo_package in
string_of_int id in
let real_or_pseudo_package_of_cudf_package_name cudf_package_name =
let id = int_of_string cudf_package_name in
catalog#obj_of_id id in
4 . Convert the repository and the pseudo - packages to the CUDF form .
let cudf_string =
let package_strings =
List.map (fun (real_or_pseudo_package, package) ->
Cudf_of.package
(fun package_id -> cudf_package_name_of_real_or_pseudo_package (Real_package package_id))
(cudf_package_name_of_real_or_pseudo_package real_or_pseudo_package)
package
) package_list in
Printf.sprintf "%s\n" (String.concat "\n\n" package_strings) in
5 . Print the packages in CUDF form to a temporary file .
let repository_name = Name_of.repository_id repository_id in
let cudf_filepath = Printf.sprintf "tmp/%s.cudf" repository_name in
Output_helper.print_output cudf_filepath cudf_string;
6 . Use coinst to generate the conflicts file .
let conflicts_filepath = Printf.sprintf "tmp/%s-conflicts.json" repository_name in
let coinst_program = Engine_helper.coinst in
if not (Engine_helper.program_is_available coinst_program)
then Zephyrus_log.log_panic "The coinst external tool cannot be found. Aborting execution.\n";
let coinst_exit_code = Engine_helper.program_sync_exec coinst_program [cudf_filepath; conflicts_filepath] in
if not (Engine_helper.did_program_exit_ok coinst_exit_code) then
Zephyrus_log.log_panic "The coinst external tool exited abnormally. Aborting execution.\n";
7 . the generated conflicts file to a conflicts structure .
match Input_helper.parse_json Coinst_conflicts_j.read_coinst_conflicts conflicts_filepath with
| None -> Component_type_id_set_set.empty
| Some coinst_conflicts ->
8 . Use the catalog to dereference the package_ids / component_type_ids ( in the same time filtering out the real packages ) in the " classes " structure .
let classes : (string * (component_type_id list)) list =
List.map (fun (class_name, class_members) ->
let class_members : component_type_id list =
List.filter_map (fun cudf_package_name ->
match real_or_pseudo_package_of_cudf_package_name cudf_package_name with
| Real_package _ -> None
| Pseudo_package component_type_id -> Some component_type_id
) class_members in
(class_name, class_members)
) coinst_conflicts.Coinst_conflicts_t.classes in
9 . Substitute the classes in conflict groups by component_type_ids , computing the cartesian product where needed .
let incompatibilities : component_type_id list list =
List.flatten (
List.map (fun (conflict_group : Coinst_conflicts_t.incompatibility) ->
List.cartesian_product (
List.map (fun class_name ->
List.assoc class_name classes
) conflict_group)
) coinst_conflicts.Coinst_conflicts_t.incompatibilities) in
10 . Make the list of lists a set of sets .
let incompatibilities_set : Component_type_id_set_set.t =
Component_type_id_set_set.of_list Component_type_id_set.of_list_directly incompatibilities in
11 . Return the result .
incompatibilities_set
let universe universe =
let incompatibilities_list : (repository_id * Component_type_id_set_set.t) list =
Repository_id_set.map_to_list (fun repository_id ->
let incompatibilities = repository universe repository_id in
(repository_id, incompatibilities)
) universe#get_repository_ids in
Repository_id_map.of_assoc_list incompatibilities_list
| null | https://raw.githubusercontent.com/aeolus-project/zephyrus/0b52de4038bbab724e6a9628430165a7f09f77ae/src/engine/preprocess/incompatibilities_of.ml | ocaml | **************************************************************************
(at your option) any later version.
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
**************************************************************************
TODO: Very temporary simplified ad-hoc file handling and external program execution in this module!
Filter the packages implementing the component type
and keep only these from the current repository.
If the component type is not installable - skip it.
If it is installable, then prepare a pseudo package, which is installable
if-and-only-if the component type is implemented correctly.
The pseudo package simply depends on a disjunction of the packages
implementing its corresponding component type. | This file is part of Zephyrus .
Zephyrus is free software : you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
Zephyrus is distributed in the hope that it will be useful ,
You should have received a copy of the GNU General Public License
along with Zephyrus . If not , see < / > .
Depends on
- datatypes / Data_common
- datatypes / Data_model
- datatypes/Data_common
- datatypes/Data_model
*)
open Data_common
open Data_model
type real_or_pseudo_package =
| Real_package of package_id
| Pseudo_package of component_type_id
module Real_or_pseudo_package = struct type t = real_or_pseudo_package let compare = compare end
module Real_or_pseudo_package_set = Set.Make(Real_or_pseudo_package)
module Real_or_pseudo_package_map = Map.Make(Real_or_pseudo_package)
module Real_or_pseudo_package_catalog =
Data_common.Catalog(Fresh_integer)(Int_set)(Real_or_pseudo_package_set)(Int_map)(Real_or_pseudo_package_map)
let repository universe repository_id =
let repository = universe#get_repository repository_id in
let package_list : (real_or_pseudo_package * package) list =
1 . Prepare the packages coming from the repository .
let repository_packages_list : (package_id * package) list =
Package_id_set.map_to_list (fun package_id ->
(package_id, universe#get_package package_id)
) repository#package_ids in
2 . Prepare the pseudo packages encoding
the implementation of component types .
the implementation of component types. *)
let implementation_pseudo_packages_list : (component_type_id * package) list =
Component_type_id_set.filter_map_to_list (fun component_type_id ->
let package_ids = Package_id_set.inter
(universe#get_implementation component_type_id)
repository#package_ids in
Check if the component type is installable using the current repository ,
i.e. if there is at least one package which implements it .
i.e. if there is at least one package which implements it. *)
if Package_id_set.is_empty package_ids
then None
else
let pseudo_package =
let depend = Package_id_set_set.singleton package_ids in
new package ~depend () in
Some (component_type_id, pseudo_package)
) universe#get_implementation_domain in
(List.map (fun (package_id, package) ->
(Real_package package_id, package)
) repository_packages_list)
@
(List.map (fun (component_type_id, package) ->
(Pseudo_package component_type_id, package)
) implementation_pseudo_packages_list)
in
3 . Prepare a catalog of string < - > package_id / component_type_id .
let catalog = new Real_or_pseudo_package_catalog.obj_catalog in
List.iter (fun (real_or_pseudo_package, _) ->
catalog#add real_or_pseudo_package
) package_list;
let cudf_package_name_of_real_or_pseudo_package real_or_pseudo_package =
let id = catalog#id_of_obj real_or_pseudo_package in
string_of_int id in
let real_or_pseudo_package_of_cudf_package_name cudf_package_name =
let id = int_of_string cudf_package_name in
catalog#obj_of_id id in
4 . Convert the repository and the pseudo - packages to the CUDF form .
let cudf_string =
let package_strings =
List.map (fun (real_or_pseudo_package, package) ->
Cudf_of.package
(fun package_id -> cudf_package_name_of_real_or_pseudo_package (Real_package package_id))
(cudf_package_name_of_real_or_pseudo_package real_or_pseudo_package)
package
) package_list in
Printf.sprintf "%s\n" (String.concat "\n\n" package_strings) in
5 . Print the packages in CUDF form to a temporary file .
let repository_name = Name_of.repository_id repository_id in
let cudf_filepath = Printf.sprintf "tmp/%s.cudf" repository_name in
Output_helper.print_output cudf_filepath cudf_string;
6 . Use coinst to generate the conflicts file .
let conflicts_filepath = Printf.sprintf "tmp/%s-conflicts.json" repository_name in
let coinst_program = Engine_helper.coinst in
if not (Engine_helper.program_is_available coinst_program)
then Zephyrus_log.log_panic "The coinst external tool cannot be found. Aborting execution.\n";
let coinst_exit_code = Engine_helper.program_sync_exec coinst_program [cudf_filepath; conflicts_filepath] in
if not (Engine_helper.did_program_exit_ok coinst_exit_code) then
Zephyrus_log.log_panic "The coinst external tool exited abnormally. Aborting execution.\n";
7 . the generated conflicts file to a conflicts structure .
match Input_helper.parse_json Coinst_conflicts_j.read_coinst_conflicts conflicts_filepath with
| None -> Component_type_id_set_set.empty
| Some coinst_conflicts ->
8 . Use the catalog to dereference the package_ids / component_type_ids ( in the same time filtering out the real packages ) in the " classes " structure .
let classes : (string * (component_type_id list)) list =
List.map (fun (class_name, class_members) ->
let class_members : component_type_id list =
List.filter_map (fun cudf_package_name ->
match real_or_pseudo_package_of_cudf_package_name cudf_package_name with
| Real_package _ -> None
| Pseudo_package component_type_id -> Some component_type_id
) class_members in
(class_name, class_members)
) coinst_conflicts.Coinst_conflicts_t.classes in
9 . Substitute the classes in conflict groups by component_type_ids , computing the cartesian product where needed .
let incompatibilities : component_type_id list list =
List.flatten (
List.map (fun (conflict_group : Coinst_conflicts_t.incompatibility) ->
List.cartesian_product (
List.map (fun class_name ->
List.assoc class_name classes
) conflict_group)
) coinst_conflicts.Coinst_conflicts_t.incompatibilities) in
10 . Make the list of lists a set of sets .
let incompatibilities_set : Component_type_id_set_set.t =
Component_type_id_set_set.of_list Component_type_id_set.of_list_directly incompatibilities in
11 . Return the result .
incompatibilities_set
let universe universe =
let incompatibilities_list : (repository_id * Component_type_id_set_set.t) list =
Repository_id_set.map_to_list (fun repository_id ->
let incompatibilities = repository universe repository_id in
(repository_id, incompatibilities)
) universe#get_repository_ids in
Repository_id_map.of_assoc_list incompatibilities_list
|
ebc6910b81676b044c64ee6c43118e5096a48a5d23b2ef5da020f3757b755ab6 | bdeket/rktsicm | fpf.rkt | #lang racket/base
(provide (all-defined-out))
(require (only-in "../rkt/glue.rkt" if make-initialized-list
fix:= fix:> fix:< fix:+
int:zero? int:quotient int:-)
(only-in "../rkt/define.rkt" define default-object?)
"../general/list-utils.rkt"
"../general/logic-utils.rkt"
"../general/sets.rkt"
"../kernel-intr.rkt"
)
bdk ; ; start original file
Flat Polynomial Form , for Commutative Rings
(define fpf:coeff? number?)
(define fpf:coeff-zero? zero?)
(define fpf:coeff-add +)
(define fpf:coeff-sub -)
(define fpf:coeff-mul *)
(define fpf:coeff-div /)
(define fpf:coeff-negate -)
(define fpf:coeff-expt expt)
(define fpf:coeff-divide scheme-number-divide)
;;; An fpf is a sorted list of terms.
;;; Each term has exponents and a coefficient.
(define (fpf? x)
(or (fpf:coeff? x) (explicit-fpf? x)))
(define (explicit-fpf? x)
(and (pair? x)
(eq? (car x) '*fpf*)))
(define (fpf:arity fpf)
(if (fpf:coeff? fpf)
0
(fpf:number-of-vars (fpf:terms fpf))))
(define (fpf:number-of-vars termlist)
(length (fpf:exponents (car termlist))))
(define (fpf:make terms)
(cond ((null? terms) :zero)
((and (null? (cdr terms))
(fpf:constant-term? (car terms)))
(fpf:coefficient (car terms)))
(else
(cons '*fpf* terms))))
(define (fpf:terms fpf)
(if (and (fpf:coeff? fpf) (fpf:coeff-zero? fpf))
'()
(cdr fpf)))
(define (fpf:make-term exponents coeff)
(cons exponents coeff))
(define (fpf:exponents term)
(car term))
(define (fpf:coefficient term)
(cdr term))
(define (fpf:constant-term? term)
(all-zeros? (fpf:exponents term)))
(define (all-zeros? exponents)
(or (null? exponents)
(and (fix:= 0 (car exponents))
(all-zeros? (cdr exponents)))))
(define (fpf:make-constant c arity)
(list '*fpf*
(fpf:make-term (make-list arity 0)
c)))
(define fpf:zero :zero)
(define fpf:one :one)
(define fpf:-one :-one)
(define fpf:identity
(fpf:make (list (fpf:make-term (list 1) :one))))
(define (fpf:new-variables n)
(make-initialized-list n
(lambda (i)
(fpf:make (list (fpf:make-term
(make-initialized-list n
(lambda (j) (if (fix:= i j) 1 0)))
:one))))))
(define (fpf:same-exponents? fs1 fs2)
(equal? fs1 fs2))
(define (fpf:>exponents? fs1 fs2)
(fpf:graded> fs1 fs2))
(define (fpf:graded> fs1 fs2) ;Graded lexicographical order
(let ((o1 (reduce fix:+ 0 fs1))
(o2 (reduce fix:+ 0 fs2)))
(cond ((fix:> o1 o2) #t)
((fix:< o1 o2) #f)
(else
(fpf:lexicographical> fs1 fs2)))))
(define (fpf:lexicographical> fs1 fs2) ;Lexicographical order
(let lp ((l1 fs1) (l2 fs2))
(cond ((null? l1) #f)
((null? l2) #t)
((fix:> (car l1) (car l2)) #t)
((fix:< (car l1) (car l2)) #f)
(else (lp (cdr l1) (cdr l2))))))
(define (fpf:map-coefficients proc terms)
(if (null? terms)
'()
(let ((ncoeff (proc (fpf:coefficient (car terms)))))
(if (fpf:coeff-zero? ncoeff)
(fpf:map-coefficients proc (cdr terms))
(cons (fpf:make-term (fpf:exponents (car terms))
ncoeff)
(fpf:map-coefficients proc (cdr terms)))))))
(define (fpf:binary-combine a1 a2 coeff-op terms-op opname)
(define (wta)
(error "Wrong type argument -- FPF" opname a1 a2))
(if (fpf:coeff? a1)
(if (fpf:coeff? a2)
(coeff-op a1 a2)
(if (explicit-fpf? a2)
(fpf:make
(terms-op (fpf:terms (fpf:make-constant a1 (fpf:arity a2)))
(fpf:terms a2)))
(wta)))
(if (fpf:coeff? a2)
(if (explicit-fpf? a1)
(fpf:make
(terms-op (fpf:terms a1)
(fpf:terms (fpf:make-constant a2 (fpf:arity a1)))))
(wta))
(if (and (explicit-fpf? a1)
(explicit-fpf? a2)
(fix:= (fpf:arity a1) (fpf:arity a2)))
(fpf:make (terms-op (fpf:terms a1) (fpf:terms a2)))
(wta)))))
(define (fpf:+ a1 a2)
(fpf:binary-combine a1 a2 fpf:coeff-add fpf:add-terms 'add))
(define (fpf:add-terms xlist ylist)
(fpf:add-terms-general xlist ylist fpf:coeff-add))
(define (fpf:add-terms-general xlist ylist coeff-add)
(let tloop ((xlist xlist) (ylist ylist))
(cond ((null? xlist) ylist)
((null? ylist) xlist)
(else
(let ((f1 (fpf:exponents (car xlist)))
(f2 (fpf:exponents (car ylist))))
(cond ((fpf:same-exponents? f1 f2)
(let ((ncoeff
(coeff-add (fpf:coefficient (car xlist))
(fpf:coefficient (car ylist)))))
(if (fpf:coeff-zero? ncoeff)
(tloop (cdr xlist) (cdr ylist))
(cons (fpf:make-term f1 ncoeff)
(tloop (cdr xlist) (cdr ylist))))))
((fpf:>exponents? f1 f2)
(cons (car xlist) (tloop (cdr xlist) ylist)))
(else
(cons (car ylist)
(tloop xlist (cdr ylist))))))))))
(define (fpf:- minuend subtrahend)
(fpf:+ minuend (fpf:negate subtrahend)))
(define (fpf:scale scale-factor p)
(if (fpf:coeff? p)
(fpf:coeff-mul scale-factor p)
(fpf:make (fpf:scale-terms scale-factor (fpf:terms p)))))
(define (fpf:scale-terms scale-factor terms)
(fpf:scale-terms-general scale-factor terms fpf:coeff-mul))
(define (fpf:scale-terms-general scale-factor terms coeff-mul)
(fpf:map-coefficients
(lambda (coefficient)
(coeff-mul scale-factor coefficient))
terms))
(define (fpf:negate p)
(if (fpf:coeff? p)
(fpf:coeff-negate p)
(fpf:make (fpf:negate-terms (fpf:terms p)))))
(define (fpf:negate-terms terms)
(fpf:negate-terms-general terms fpf:coeff-negate))
(define (fpf:negate-terms-general terms neg)
(fpf:map-coefficients neg terms))
(define (fpf:* m1 m2)
(fpf:binary-combine m1 m2 fpf:coeff-mul fpf:mul-terms 'mul))
(define (fpf:mul-terms xlist ylist)
(fpf:mul-terms-general xlist ylist fpf:coeff-add fpf:coeff-mul))
(define (fpf:mul-terms-general xlist ylist add mul)
(let xloop ((xlist xlist))
(if (null? xlist)
'()
(fpf:add-terms-general (fpf:term*terms-general (car xlist) ylist mul)
(xloop (cdr xlist))
add))))
(define (fpf:term*terms-general term terms coeff-mul)
(let ((exponents (fpf:exponents term))
(coeff (fpf:coefficient term)))
(let lp ((terms terms))
(if (null? terms)
'()
(cons (fpf:make-term
(fpf:combine-exponents exponents
(fpf:exponents (car terms)))
(coeff-mul coeff (fpf:coefficient (car terms))))
(lp (cdr terms)))))))
(define (fpf:combine-exponents exponents1 exponents2)
(cond ((null? exponents1) exponents2)
((null? exponents2) exponents1)
(else
(map fix:+ exponents1 exponents2))))
(define (fpf:square p)
(fpf:* p p))
(define (fpf:expt base exponent)
(define (expt-iter x count answer)
(if (int:zero? count)
answer
(if (even? count)
(expt-iter (fpf:square x) (int:quotient count 2) answer)
(expt-iter x (int:- count 1) (fpf:* x answer)))))
(cond ((fpf:coeff? base) (fpf:coeff-expt base exponent))
((not (explicit-fpf? base))
(error "Wrong type -- FPF:EXPT:" base exponent))
((not (exact-integer? exponent))
(error "Can only raise an FPF to an exact integer power" base exponent))
((negative? exponent)
(error "No inverse -- FPF:EXPT:" base exponent))
(else
(expt-iter base exponent :one))))
(define (fpf:divide x y #:optional continue)
(let ((cont
(if (default-object? continue)
(lambda (q r) (list (fpf:make q) (fpf:make r)))
(lambda (q r) (continue (fpf:make q) (fpf:make r))))))
(if (and (fpf:coeff? x) (fpf:coeff? y))
(fpf:coeff-divide x y cont)
(cond ((and (fpf:coeff? x) (explicit-fpf? y))
(fpf:divide-terms (fpf:terms (fpf:make-constant x (fpf:arity y)))
(fpf:terms y)
cont))
((and (fpf:coeff? y) (explicit-fpf? x))
(fpf:divide-terms (fpf:terms x)
(fpf:terms (fpf:make-constant y (fpf:arity x)))
cont))
((and (explicit-fpf? x)
(explicit-fpf? y)
(fix:= (fpf:arity x) (fpf:arity y)))
(fpf:divide-terms (fpf:terms x) (fpf:terms y) cont))
(else (error "Bad arguments -- FPF:DIVIDE" x y))))))
(define (fpf:divide-terms termlist1 termlist2 #:optional continue)
(if (default-object? continue) (set! continue list))
(fpf:divide-terms-general termlist1 termlist2
fpf:coeff-add fpf:coeff-mul fpf:coeff-div fpf:coeff-negate continue))
(define (fpf:divide-terms-general numerator-terms denominator-terms add mul div neg cont)
(let ((dexps (fpf:exponents (car denominator-terms)))
(dcoeff (fpf:coefficient (car denominator-terms))))
(define (dloop nterms cont)
(if (null? nterms)
(cont '() '())
(let ((nexps (fpf:exponents (car nterms))))
(cond ((*and (map >= nexps dexps))
(let ((qt
(fpf:make-term (map int:- nexps dexps)
(div (fpf:coefficient (car nterms)) dcoeff))))
(dloop (fpf:add-terms-general nterms
(fpf:negate-terms-general
(fpf:term*terms-general
qt denominator-terms mul)
neg)
add)
(lambda (q r)
(cont (fpf:add-terms-general
(list qt) q add) r)))))
(else
(dloop (cdr nterms)
(lambda (q r)
(cont q
(fpf:add-terms-general
(list (car nterms)) r add)))))))))
(dloop numerator-terms cont)))
(define (fpf:horner-eval poly args)
(if (fpf:coeff? poly) poly (fpf:horner-eval-terms (fpf:terms poly) args)))
(define (fpf:horner-eval-terms terms args)
(fpf:horner-eval-general terms args
fpf:coeff-add fpf:coeff-sub fpf:coeff-mul fpf:coeff-expt))
(define (fpf:horner-eval-general terms args add sub mul expt)
(if (null? terms)
:zero
(let hloop ((terms (cdr terms))
(exponents (fpf:exponents (car terms)))
(sum (fpf:coefficient (car terms))))
(if (null? terms)
(mul sum (a-reduce mul (map expt args exponents)))
(let ((new-exponents (fpf:exponents (car terms))))
(hloop (cdr terms)
new-exponents
(add (fpf:coefficient (car terms))
(mul sum
(a-reduce mul
(map expt
args
(map int:-
exponents
new-exponents)))))))))))
;;; Converting between flat polynomials and other kinds of expressions
(define (fpf:->expression p vars)
(cond ((fpf:coeff? p) p)
((explicit-fpf? p)
(a-reduce symb:+
(map (lambda (term)
(symb:* (fpf:coefficient term)
(a-reduce symb:*
(map (lambda (exponent var)
(symb:expt var exponent))
(fpf:exponents term)
vars))))
(fpf:terms p))))
(else
(error "Bad fpf -- ->EXPRESSION" p vars))))
(define (fpf:expression-> expr cont #:optional less?)
;; cont = (lambda (poly vars) ... )
(let ((evars
(sort (list-difference (variables-in expr)
fpf:operators-known)
(if (default-object? less?) variable<? less?))))
(cont ((expression-walker
(pair-up evars
(fpf:new-variables (length evars))
fpf:operator-table))
expr)
evars)))
(define +$fpf (accumulation fpf:+ fpf:zero))
(define -$fpf (inverse-accumulation fpf:- fpf:+ fpf:negate fpf:zero))
(define *$fpf (accumulation fpf:* fpf:one))
(define fpf:operator-table
`((+ ,+$fpf)
(- ,-$fpf)
(* ,*$fpf)
(negate ,fpf:negate)
(square ,fpf:square)
(expt ,fpf:expt)))
(define fpf:operators-known (map car fpf:operator-table))
| null | https://raw.githubusercontent.com/bdeket/rktsicm/225a43bc3d9953f9dbbdbfb2fa4a50028a7a41ce/rktsicm/sicm/simplify/fpf.rkt | racket | ; start original file
An fpf is a sorted list of terms.
Each term has exponents and a coefficient.
Graded lexicographical order
Lexicographical order
Converting between flat polynomials and other kinds of expressions
cont = (lambda (poly vars) ... ) | #lang racket/base
(provide (all-defined-out))
(require (only-in "../rkt/glue.rkt" if make-initialized-list
fix:= fix:> fix:< fix:+
int:zero? int:quotient int:-)
(only-in "../rkt/define.rkt" define default-object?)
"../general/list-utils.rkt"
"../general/logic-utils.rkt"
"../general/sets.rkt"
"../kernel-intr.rkt"
)
Flat Polynomial Form , for Commutative Rings
(define fpf:coeff? number?)
(define fpf:coeff-zero? zero?)
(define fpf:coeff-add +)
(define fpf:coeff-sub -)
(define fpf:coeff-mul *)
(define fpf:coeff-div /)
(define fpf:coeff-negate -)
(define fpf:coeff-expt expt)
(define fpf:coeff-divide scheme-number-divide)
(define (fpf? x)
(or (fpf:coeff? x) (explicit-fpf? x)))
(define (explicit-fpf? x)
(and (pair? x)
(eq? (car x) '*fpf*)))
(define (fpf:arity fpf)
(if (fpf:coeff? fpf)
0
(fpf:number-of-vars (fpf:terms fpf))))
(define (fpf:number-of-vars termlist)
(length (fpf:exponents (car termlist))))
(define (fpf:make terms)
(cond ((null? terms) :zero)
((and (null? (cdr terms))
(fpf:constant-term? (car terms)))
(fpf:coefficient (car terms)))
(else
(cons '*fpf* terms))))
(define (fpf:terms fpf)
(if (and (fpf:coeff? fpf) (fpf:coeff-zero? fpf))
'()
(cdr fpf)))
(define (fpf:make-term exponents coeff)
(cons exponents coeff))
(define (fpf:exponents term)
(car term))
(define (fpf:coefficient term)
(cdr term))
(define (fpf:constant-term? term)
(all-zeros? (fpf:exponents term)))
(define (all-zeros? exponents)
(or (null? exponents)
(and (fix:= 0 (car exponents))
(all-zeros? (cdr exponents)))))
(define (fpf:make-constant c arity)
(list '*fpf*
(fpf:make-term (make-list arity 0)
c)))
(define fpf:zero :zero)
(define fpf:one :one)
(define fpf:-one :-one)
(define fpf:identity
(fpf:make (list (fpf:make-term (list 1) :one))))
(define (fpf:new-variables n)
(make-initialized-list n
(lambda (i)
(fpf:make (list (fpf:make-term
(make-initialized-list n
(lambda (j) (if (fix:= i j) 1 0)))
:one))))))
(define (fpf:same-exponents? fs1 fs2)
(equal? fs1 fs2))
(define (fpf:>exponents? fs1 fs2)
(fpf:graded> fs1 fs2))
(let ((o1 (reduce fix:+ 0 fs1))
(o2 (reduce fix:+ 0 fs2)))
(cond ((fix:> o1 o2) #t)
((fix:< o1 o2) #f)
(else
(fpf:lexicographical> fs1 fs2)))))
(let lp ((l1 fs1) (l2 fs2))
(cond ((null? l1) #f)
((null? l2) #t)
((fix:> (car l1) (car l2)) #t)
((fix:< (car l1) (car l2)) #f)
(else (lp (cdr l1) (cdr l2))))))
(define (fpf:map-coefficients proc terms)
(if (null? terms)
'()
(let ((ncoeff (proc (fpf:coefficient (car terms)))))
(if (fpf:coeff-zero? ncoeff)
(fpf:map-coefficients proc (cdr terms))
(cons (fpf:make-term (fpf:exponents (car terms))
ncoeff)
(fpf:map-coefficients proc (cdr terms)))))))
(define (fpf:binary-combine a1 a2 coeff-op terms-op opname)
(define (wta)
(error "Wrong type argument -- FPF" opname a1 a2))
(if (fpf:coeff? a1)
(if (fpf:coeff? a2)
(coeff-op a1 a2)
(if (explicit-fpf? a2)
(fpf:make
(terms-op (fpf:terms (fpf:make-constant a1 (fpf:arity a2)))
(fpf:terms a2)))
(wta)))
(if (fpf:coeff? a2)
(if (explicit-fpf? a1)
(fpf:make
(terms-op (fpf:terms a1)
(fpf:terms (fpf:make-constant a2 (fpf:arity a1)))))
(wta))
(if (and (explicit-fpf? a1)
(explicit-fpf? a2)
(fix:= (fpf:arity a1) (fpf:arity a2)))
(fpf:make (terms-op (fpf:terms a1) (fpf:terms a2)))
(wta)))))
(define (fpf:+ a1 a2)
(fpf:binary-combine a1 a2 fpf:coeff-add fpf:add-terms 'add))
(define (fpf:add-terms xlist ylist)
(fpf:add-terms-general xlist ylist fpf:coeff-add))
(define (fpf:add-terms-general xlist ylist coeff-add)
(let tloop ((xlist xlist) (ylist ylist))
(cond ((null? xlist) ylist)
((null? ylist) xlist)
(else
(let ((f1 (fpf:exponents (car xlist)))
(f2 (fpf:exponents (car ylist))))
(cond ((fpf:same-exponents? f1 f2)
(let ((ncoeff
(coeff-add (fpf:coefficient (car xlist))
(fpf:coefficient (car ylist)))))
(if (fpf:coeff-zero? ncoeff)
(tloop (cdr xlist) (cdr ylist))
(cons (fpf:make-term f1 ncoeff)
(tloop (cdr xlist) (cdr ylist))))))
((fpf:>exponents? f1 f2)
(cons (car xlist) (tloop (cdr xlist) ylist)))
(else
(cons (car ylist)
(tloop xlist (cdr ylist))))))))))
(define (fpf:- minuend subtrahend)
(fpf:+ minuend (fpf:negate subtrahend)))
(define (fpf:scale scale-factor p)
(if (fpf:coeff? p)
(fpf:coeff-mul scale-factor p)
(fpf:make (fpf:scale-terms scale-factor (fpf:terms p)))))
(define (fpf:scale-terms scale-factor terms)
(fpf:scale-terms-general scale-factor terms fpf:coeff-mul))
(define (fpf:scale-terms-general scale-factor terms coeff-mul)
(fpf:map-coefficients
(lambda (coefficient)
(coeff-mul scale-factor coefficient))
terms))
(define (fpf:negate p)
(if (fpf:coeff? p)
(fpf:coeff-negate p)
(fpf:make (fpf:negate-terms (fpf:terms p)))))
(define (fpf:negate-terms terms)
(fpf:negate-terms-general terms fpf:coeff-negate))
(define (fpf:negate-terms-general terms neg)
(fpf:map-coefficients neg terms))
(define (fpf:* m1 m2)
(fpf:binary-combine m1 m2 fpf:coeff-mul fpf:mul-terms 'mul))
(define (fpf:mul-terms xlist ylist)
(fpf:mul-terms-general xlist ylist fpf:coeff-add fpf:coeff-mul))
(define (fpf:mul-terms-general xlist ylist add mul)
(let xloop ((xlist xlist))
(if (null? xlist)
'()
(fpf:add-terms-general (fpf:term*terms-general (car xlist) ylist mul)
(xloop (cdr xlist))
add))))
(define (fpf:term*terms-general term terms coeff-mul)
(let ((exponents (fpf:exponents term))
(coeff (fpf:coefficient term)))
(let lp ((terms terms))
(if (null? terms)
'()
(cons (fpf:make-term
(fpf:combine-exponents exponents
(fpf:exponents (car terms)))
(coeff-mul coeff (fpf:coefficient (car terms))))
(lp (cdr terms)))))))
(define (fpf:combine-exponents exponents1 exponents2)
(cond ((null? exponents1) exponents2)
((null? exponents2) exponents1)
(else
(map fix:+ exponents1 exponents2))))
(define (fpf:square p)
(fpf:* p p))
(define (fpf:expt base exponent)
(define (expt-iter x count answer)
(if (int:zero? count)
answer
(if (even? count)
(expt-iter (fpf:square x) (int:quotient count 2) answer)
(expt-iter x (int:- count 1) (fpf:* x answer)))))
(cond ((fpf:coeff? base) (fpf:coeff-expt base exponent))
((not (explicit-fpf? base))
(error "Wrong type -- FPF:EXPT:" base exponent))
((not (exact-integer? exponent))
(error "Can only raise an FPF to an exact integer power" base exponent))
((negative? exponent)
(error "No inverse -- FPF:EXPT:" base exponent))
(else
(expt-iter base exponent :one))))
(define (fpf:divide x y #:optional continue)
(let ((cont
(if (default-object? continue)
(lambda (q r) (list (fpf:make q) (fpf:make r)))
(lambda (q r) (continue (fpf:make q) (fpf:make r))))))
(if (and (fpf:coeff? x) (fpf:coeff? y))
(fpf:coeff-divide x y cont)
(cond ((and (fpf:coeff? x) (explicit-fpf? y))
(fpf:divide-terms (fpf:terms (fpf:make-constant x (fpf:arity y)))
(fpf:terms y)
cont))
((and (fpf:coeff? y) (explicit-fpf? x))
(fpf:divide-terms (fpf:terms x)
(fpf:terms (fpf:make-constant y (fpf:arity x)))
cont))
((and (explicit-fpf? x)
(explicit-fpf? y)
(fix:= (fpf:arity x) (fpf:arity y)))
(fpf:divide-terms (fpf:terms x) (fpf:terms y) cont))
(else (error "Bad arguments -- FPF:DIVIDE" x y))))))
(define (fpf:divide-terms termlist1 termlist2 #:optional continue)
(if (default-object? continue) (set! continue list))
(fpf:divide-terms-general termlist1 termlist2
fpf:coeff-add fpf:coeff-mul fpf:coeff-div fpf:coeff-negate continue))
(define (fpf:divide-terms-general numerator-terms denominator-terms add mul div neg cont)
(let ((dexps (fpf:exponents (car denominator-terms)))
(dcoeff (fpf:coefficient (car denominator-terms))))
(define (dloop nterms cont)
(if (null? nterms)
(cont '() '())
(let ((nexps (fpf:exponents (car nterms))))
(cond ((*and (map >= nexps dexps))
(let ((qt
(fpf:make-term (map int:- nexps dexps)
(div (fpf:coefficient (car nterms)) dcoeff))))
(dloop (fpf:add-terms-general nterms
(fpf:negate-terms-general
(fpf:term*terms-general
qt denominator-terms mul)
neg)
add)
(lambda (q r)
(cont (fpf:add-terms-general
(list qt) q add) r)))))
(else
(dloop (cdr nterms)
(lambda (q r)
(cont q
(fpf:add-terms-general
(list (car nterms)) r add)))))))))
(dloop numerator-terms cont)))
(define (fpf:horner-eval poly args)
(if (fpf:coeff? poly) poly (fpf:horner-eval-terms (fpf:terms poly) args)))
(define (fpf:horner-eval-terms terms args)
(fpf:horner-eval-general terms args
fpf:coeff-add fpf:coeff-sub fpf:coeff-mul fpf:coeff-expt))
(define (fpf:horner-eval-general terms args add sub mul expt)
(if (null? terms)
:zero
(let hloop ((terms (cdr terms))
(exponents (fpf:exponents (car terms)))
(sum (fpf:coefficient (car terms))))
(if (null? terms)
(mul sum (a-reduce mul (map expt args exponents)))
(let ((new-exponents (fpf:exponents (car terms))))
(hloop (cdr terms)
new-exponents
(add (fpf:coefficient (car terms))
(mul sum
(a-reduce mul
(map expt
args
(map int:-
exponents
new-exponents)))))))))))
(define (fpf:->expression p vars)
(cond ((fpf:coeff? p) p)
((explicit-fpf? p)
(a-reduce symb:+
(map (lambda (term)
(symb:* (fpf:coefficient term)
(a-reduce symb:*
(map (lambda (exponent var)
(symb:expt var exponent))
(fpf:exponents term)
vars))))
(fpf:terms p))))
(else
(error "Bad fpf -- ->EXPRESSION" p vars))))
(define (fpf:expression-> expr cont #:optional less?)
(let ((evars
(sort (list-difference (variables-in expr)
fpf:operators-known)
(if (default-object? less?) variable<? less?))))
(cont ((expression-walker
(pair-up evars
(fpf:new-variables (length evars))
fpf:operator-table))
expr)
evars)))
(define +$fpf (accumulation fpf:+ fpf:zero))
(define -$fpf (inverse-accumulation fpf:- fpf:+ fpf:negate fpf:zero))
(define *$fpf (accumulation fpf:* fpf:one))
(define fpf:operator-table
`((+ ,+$fpf)
(- ,-$fpf)
(* ,*$fpf)
(negate ,fpf:negate)
(square ,fpf:square)
(expt ,fpf:expt)))
(define fpf:operators-known (map car fpf:operator-table))
|
f5327632a7f85e56814136f10f3bd72301e21675fe5e7bdaae106b1cb08e4d57 | kupl/LearnML | patch.ml | let iter ((n : int), (f : 'a -> 'a)) : 'b -> 'a =
let rec iter_sub ((m : int), (g : 'b -> 'a), (f' : 'a -> 'a)) : 'b -> 'a =
match m with
| 0 -> fun (__s8 : int) -> __s8
| 1 -> g
| _ -> iter_sub (m - 1, fun __s9 -> (g (f' __s9), f'))
in
iter_sub (n, f, f)
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/iter/sub14/patch.ml | ocaml | let iter ((n : int), (f : 'a -> 'a)) : 'b -> 'a =
let rec iter_sub ((m : int), (g : 'b -> 'a), (f' : 'a -> 'a)) : 'b -> 'a =
match m with
| 0 -> fun (__s8 : int) -> __s8
| 1 -> g
| _ -> iter_sub (m - 1, fun __s9 -> (g (f' __s9), f'))
in
iter_sub (n, f, f)
| |
72204113119e2414a9c13f683aba7bb37de409d189c12ab38322f497230b468b | tonyg/racket-abnf | arithmetic-rules.rkt | #lang abnf
expr = term "+" expr / term "-" expr / term
term = factor "*" term / factor "/" term / factor
factor = "(" expr ")" / num
num = *SP *DIGIT *SP
SP = %x20 / %x09 / %x0d / %x0a
DIGIT = %x30-39
| null | https://raw.githubusercontent.com/tonyg/racket-abnf/1079bc5b30a227f52ac00a84dc3fcd539da5f8db/abnf/scribblings/arithmetic-rules.rkt | racket | #lang abnf
expr = term "+" expr / term "-" expr / term
term = factor "*" term / factor "/" term / factor
factor = "(" expr ")" / num
num = *SP *DIGIT *SP
SP = %x20 / %x09 / %x0d / %x0a
DIGIT = %x30-39
| |
27e05e6b60ccc1cf1011c751298d03982888c57d465e1cc4875267c5b61a0cf7 | fujita-y/ypsilon | parameters.scm | #!core
Copyright ( c ) 2004 - 2022 Yoshikatsu Fujita / LittleWing Company Limited .
;;; See LICENSE file for terms and conditions of use.
(library (core parameters)
(export make-parameter parameterize)
(import (core intrinsics)
(only (core primitives) make-parameter subr?))
(define-syntax parameterize-aux
(syntax-rules ()
((_ () ((save new param value) ...) body ...)
(let ((save #f) ... (new value) ...)
(dynamic-wind
(lambda () (set! save (param)) ... (param new) ...)
(lambda () body ...)
(lambda () (begin (if (subr? param) (param save) (param save #f))) ...))))
((_ ((e1 e2) . more) (stash ...) body ...)
(parameterize-aux more (stash ... (tmp1 tmp2 e1 e2)) body ...))))
(define-syntax parameterize
(syntax-rules ()
((_ ((e1 e2) ...) body ...)
(parameterize-aux ((e1 e2) ...) () body ...))))
)
| null | https://raw.githubusercontent.com/fujita-y/ypsilon/cbd38be6b2a021706cdf93d0e6a2daf1c61d6a8b/stdlib/core/parameters.scm | scheme | See LICENSE file for terms and conditions of use. | #!core
Copyright ( c ) 2004 - 2022 Yoshikatsu Fujita / LittleWing Company Limited .
(library (core parameters)
(export make-parameter parameterize)
(import (core intrinsics)
(only (core primitives) make-parameter subr?))
(define-syntax parameterize-aux
(syntax-rules ()
((_ () ((save new param value) ...) body ...)
(let ((save #f) ... (new value) ...)
(dynamic-wind
(lambda () (set! save (param)) ... (param new) ...)
(lambda () body ...)
(lambda () (begin (if (subr? param) (param save) (param save #f))) ...))))
((_ ((e1 e2) . more) (stash ...) body ...)
(parameterize-aux more (stash ... (tmp1 tmp2 e1 e2)) body ...))))
(define-syntax parameterize
(syntax-rules ()
((_ ((e1 e2) ...) body ...)
(parameterize-aux ((e1 e2) ...) () body ...))))
)
|
c84369e6ef210c532cbd6f62ed55719cddd286952fc0cb680856791c1fe12099 | seandepagnier/cruisingplot | config.scm | Copyright ( C ) 2010 < >
;;
This Program is free software ; you can redistribute it and/or
;; modify it under the terms of the GNU General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
(declare (unit config))
(use args glut)
(define config-file #f)
(define (usage)
(print "Usage: " (car (argv)) " [options...]")
(newline)
(print (args:usage opts))
(print "Report bugs to sean at depagnier dot com")
(exit))
(define output-file #f)
(define opts
(list
(args:make-option (a autopilot) (optional: "OPTIONS") "Create an autopilot"
(create-autopilot arg))
(args:make-option (A ahrs) (optional: "OPTIONS") "Create an ahrs which runs kalman filter algorithms. Invoke -a help for more information."
(make-ahrs-from-string arg))
(args:make-option (C config) (required: "FILENAME") "include a config file which contentss specifies command line options"
(with-input-from-file arg
(lambda ()
(let loop ()
(let ((line (read-line)))
(if (not (eof-object? line))
(let ((space (string-index line #\space)))
(if space
(args:parse
(list (string-append "-" (string-take line space))
(string-drop line (+ space 1)))
opts)
(error "invalid configuration line" line))
(loop))))))))
(args:make-option (c client) (required: "HOST") "Connect to a remote host to obtain sensor data"
(sensor-net-client arg))
(args:make-option (d debug) (required: "EXPRESSION1[,EXPESSION2...]")
"Perform the given computation at 1hz and print to stdout, help for info"
(debug arg))
(args:make-option (f filter) (required: "EXPRESSION1[,options]")
"Filter this computation, help for info"
(create-filter arg))
(args:make-option (g gps) (optional: "DEVICE") "device or url for gps data, if none is specified, then the default gpsd location (localhost:2947) is tried."
(gps-setup arg))
(args:make-option (h help) #:none "Display this text" (usage))
(args:make-option (i input) (required: "FILENAME,options")
"Use a file with sensor data as input (replay), -i help for info"
(sensor-replay-logfile arg))
(args:make-option (m magnetometer) (optional: "OPTIONS") "automatically perform calibration on the magnetic sensors. -m help for info"
(magnetometer-setup arg))
(args:make-option (o output) (required: "FILENAME")
"Write all input sensor data to a log file log for future replay"
(verbose "adding output log file: " (if (equal? arg "-") "<stdout>" arg))
(if (equal? arg "-")
(sensor-log-to-port (current-output-port))
(sensor-log-to-file arg)))
(args:make-option (p plot) (required: "PLOT") "Plot computations. Invoke with -p help for more information."
(set! setup (lambda ()
(sleep 1)
(plots-setup plots)))
(set! plots (append plots `(,(lambda () (create-plot-from-string arg))))))
(args:make-option (s serial) (optional: "DEVICE")
"read from a serial port for sensor input"
(generic-serial-sensor-reader arg))
(args:make-option (S server) (optional: "PORT")
(string-append "Run a server listening on the specified port or "
(number->string net-default-port)
" by default")
(net-server arg))
(args:make-option (t testdata) (required: "FILENAME")
"from test data"
(magnetometer-test-data arg))
(args:make-option (v verbose) #:none "Print debugging info, -vv for extra info"
(cond ((eq? verbose nice-print)
(verbose "Extra Debugging output "
(if (eq? very-verbose verbose) "already " "")
"enabled")
(set! very-verbose verbose))
(else
(set! verbose nice-print)
(verbose "Debugging output enabled"))))
(args:make-option (w weather) (required: "DEVICE") "device or url for weather data"
(weather-setup arg))
(args:make-option (2 gps-plot) (optional: "ARGS") "draw gps plot display."
(set! plots (append plots `(,(lambda () (create-gps-plot-from-string arg))))))
(args:make-option (3 relay) (required: "RELAY") "Device to use for relay control"
(relay-setup arg))
(args:make-option (4 wifiaimer) (required: "options")
"Device to control servo to aim antenna, -wifiaimer help for more info"
(create-wifi-aimer arg))
(args:make-option (6 tiltcompensate) #:none "Enable tilt-compensation routines"
(tilt-compensation-setup))
(args:make-option (9 9DOF) (required: "DEVICE") "device to use for sparkfun 9DOF"
(sensor-9dof-setup arg))
))
(define (config-setup)
(very-verbose "Configuration setup")
(args:parse (command-line-arguments) opts)
(verbose "Configuration complete."))
| null | https://raw.githubusercontent.com/seandepagnier/cruisingplot/d3d83e7372e2c5ce1a8e8071286e30c2028088cf/config.scm | scheme |
you can redistribute it and/or
modify it under the terms of the GNU General Public
either | Copyright ( C ) 2010 < >
version 3 of the License , or ( at your option ) any later version .
(declare (unit config))
(use args glut)
(define config-file #f)
(define (usage)
(print "Usage: " (car (argv)) " [options...]")
(newline)
(print (args:usage opts))
(print "Report bugs to sean at depagnier dot com")
(exit))
(define output-file #f)
(define opts
(list
(args:make-option (a autopilot) (optional: "OPTIONS") "Create an autopilot"
(create-autopilot arg))
(args:make-option (A ahrs) (optional: "OPTIONS") "Create an ahrs which runs kalman filter algorithms. Invoke -a help for more information."
(make-ahrs-from-string arg))
(args:make-option (C config) (required: "FILENAME") "include a config file which contentss specifies command line options"
(with-input-from-file arg
(lambda ()
(let loop ()
(let ((line (read-line)))
(if (not (eof-object? line))
(let ((space (string-index line #\space)))
(if space
(args:parse
(list (string-append "-" (string-take line space))
(string-drop line (+ space 1)))
opts)
(error "invalid configuration line" line))
(loop))))))))
(args:make-option (c client) (required: "HOST") "Connect to a remote host to obtain sensor data"
(sensor-net-client arg))
(args:make-option (d debug) (required: "EXPRESSION1[,EXPESSION2...]")
"Perform the given computation at 1hz and print to stdout, help for info"
(debug arg))
(args:make-option (f filter) (required: "EXPRESSION1[,options]")
"Filter this computation, help for info"
(create-filter arg))
(args:make-option (g gps) (optional: "DEVICE") "device or url for gps data, if none is specified, then the default gpsd location (localhost:2947) is tried."
(gps-setup arg))
(args:make-option (h help) #:none "Display this text" (usage))
(args:make-option (i input) (required: "FILENAME,options")
"Use a file with sensor data as input (replay), -i help for info"
(sensor-replay-logfile arg))
(args:make-option (m magnetometer) (optional: "OPTIONS") "automatically perform calibration on the magnetic sensors. -m help for info"
(magnetometer-setup arg))
(args:make-option (o output) (required: "FILENAME")
"Write all input sensor data to a log file log for future replay"
(verbose "adding output log file: " (if (equal? arg "-") "<stdout>" arg))
(if (equal? arg "-")
(sensor-log-to-port (current-output-port))
(sensor-log-to-file arg)))
(args:make-option (p plot) (required: "PLOT") "Plot computations. Invoke with -p help for more information."
(set! setup (lambda ()
(sleep 1)
(plots-setup plots)))
(set! plots (append plots `(,(lambda () (create-plot-from-string arg))))))
(args:make-option (s serial) (optional: "DEVICE")
"read from a serial port for sensor input"
(generic-serial-sensor-reader arg))
(args:make-option (S server) (optional: "PORT")
(string-append "Run a server listening on the specified port or "
(number->string net-default-port)
" by default")
(net-server arg))
(args:make-option (t testdata) (required: "FILENAME")
"from test data"
(magnetometer-test-data arg))
(args:make-option (v verbose) #:none "Print debugging info, -vv for extra info"
(cond ((eq? verbose nice-print)
(verbose "Extra Debugging output "
(if (eq? very-verbose verbose) "already " "")
"enabled")
(set! very-verbose verbose))
(else
(set! verbose nice-print)
(verbose "Debugging output enabled"))))
(args:make-option (w weather) (required: "DEVICE") "device or url for weather data"
(weather-setup arg))
(args:make-option (2 gps-plot) (optional: "ARGS") "draw gps plot display."
(set! plots (append plots `(,(lambda () (create-gps-plot-from-string arg))))))
(args:make-option (3 relay) (required: "RELAY") "Device to use for relay control"
(relay-setup arg))
(args:make-option (4 wifiaimer) (required: "options")
"Device to control servo to aim antenna, -wifiaimer help for more info"
(create-wifi-aimer arg))
(args:make-option (6 tiltcompensate) #:none "Enable tilt-compensation routines"
(tilt-compensation-setup))
(args:make-option (9 9DOF) (required: "DEVICE") "device to use for sparkfun 9DOF"
(sensor-9dof-setup arg))
))
(define (config-setup)
(very-verbose "Configuration setup")
(args:parse (command-line-arguments) opts)
(verbose "Configuration complete."))
|
9762a05b8f866c0b60d591ed68864928a3c005da3e55bc4ebcc3ec83f35f8f94 | ngrunwald/datasplash | examples.clj | (ns datasplash.examples
(:require [clojure.string :as str]
[clojure.tools.logging :as log]
[datasplash
[api :as ds]
[bq :as bq]
[datastore :as dts]
[pubsub :as ps]
[options :as options :refer [defoptions]]]
[clojure.edn :as edn])
(:import (java.util UUID)
(com.google.datastore.v1 Query PropertyFilter$Operator)
(com.google.datastore.v1.client DatastoreHelper)
(org.apache.beam.sdk.options PipelineOptionsFactory))
(:gen-class))
;;;;;;;;;;;;;;;
WordCount ; ;
;;;;;;;;;;;;;;;
;; Port of
(defn tokenize
[l]
(remove empty? (.split (str/trim l) "[^a-zA-Z']+")))
(defn count-words
[p]
(ds/->> :count-words p
(ds/mapcat tokenize {:name :tokenize})
(ds/frequencies)))
(defn format-count
[[k v]]
(format "%s: %d" k v))
(defoptions WordCountOptions
{:input {:default "gs-beam-samples/shakespeare/kinglear.txt"
:type String}
:output {:default "kinglear-freqs.txt"
:type String}
:numShards {:default 0
:type Long}})
(defn run-word-count
[str-args]
(let [p (ds/make-pipeline WordCountOptions str-args)
{:keys [input output numShards]} (ds/get-pipeline-options p)]
(->> p
(ds/read-text-file input {:name "King-Lear"})
(count-words)
(ds/map format-count {:name :format-count})
(ds/write-text-file output {:num-shards numShards}))))
;;;;;;;;;;;
DeDup ; ;
;;;;;;;;;;;
;; Port of
(defoptions DeDupOptions
{:input {:default "gs-beam-samples/shakespeare/*"
:type String}
:output {:default "shakespeare-dedup.txt"
:type String}})
(defn run-dedup
[str-args]
(let [p (ds/make-pipeline DeDupOptions str-args)
{:keys [input output]} (ds/get-pipeline-options p)]
(->> p
(ds/read-text-file input {:name "ReadLines"})
(ds/distinct {:name "dedup"})
(ds/write-text-file output {:name "DedupedShakespeare"}))))
;;;;;;;;;;;;
;; Filter ;;
;;;;;;;;;;;;
;; Port of
(defoptions FilterOptions
{:input {:default "clouddataflow-readonly:samples.weather_stations"
:type String}
:output {:default "youproject:yourdataset.weather_stations_new"
:type String}
:monthFilter {:default 7
:type String}})
(defn run-filter
[str-args]
(let [p (ds/make-pipeline FilterOptions str-args)
{:keys [input output monthFilter]} (ds/get-pipeline-options p)
all-rows (->> p
(bq/read-bq {:table input})
(ds/map (fn [row]
(->>
(select-keys row [:year :month :day :mean_temp])
(map (fn [[k v]] (if (string? v) [k (edn/read-string v)] [k v])))
(into {})))
{:name "Projection"}))
global-mean-temp (->> all-rows
(ds/combine (ds/mean-fn :mapper :mean_temp))
(ds/view))
filtered-results (->> all-rows
(ds/filter (fn [{:keys [month]}] (= month monthFilter)))
(ds/filter (fn [{:keys [mean_temp]}]
(let [gtemp (:global-mean (ds/side-inputs))]
(< mean_temp gtemp)))
{:name "ParseAndFilter" :side-inputs {:global-mean global-mean-temp}}))]
(if (re-find #":[^/]" output)
(bq/write-bq-table (bq/custom-output-fn (fn [x]
(str output "_" (:year (.getValue x)))))
{:schema [{:name "year" :type "INTEGER"}
{:name "month":type "INTEGER"}
{:name "day" :type "INTEGER"}
{:name "mean_temp" :type "FLOAT"}]
:create-disposition :if-needed
:write-disposition :truncate}
filtered-results)
(ds/write-edn-file output filtered-results))))
;;;;;;;;;;;;;;;;;;;
CombinePerKey ; ;
;;;;;;;;;;;;;;;;;;;
;; Port of
(defoptions CombinePerKeyOptions
{:input {:default "publicdata:samples.shakespeare"
:type String}
:output {:default "combinePerKeyRes.edn"
:type String}
:minWordLength {:default 8
:type Long}})
(defn run-combine-per-key
[str-args]
(let [p (ds/make-pipeline CombinePerKeyOptions str-args)
{:keys [input output minWordLength]} (ds/get-pipeline-options p)
results (->> p
(bq/read-bq {:table input})
(ds/filter (fn [{:keys [word]}] (> (count word) minWordLength)))
(ds/map-kv (fn [{:keys [word corpus]}] [word corpus]))
(ds/combine
(ds/sfn (fn [words] (str/join "," words)))
{:scope :per-key})
(ds/map (fn [[word plays]] {:word word :all_plays plays})))]
(if (re-find #":[^/]" output)
(bq/write-bq-table output {:schema [{:name "word" :type "STRING"}
{:name "all_plays" :type "STRING"}]
:create-disposition :if-needed
:write-disposition :truncate}
results)
(ds/write-edn-file output results))))
;;;;;;;;;;;;;;;
MaxPerKey ; ;
;;;;;;;;;;;;;;;
Port of
(defoptions MaxPerKeyOptions
{:input {:default "clouddataflow-readonly:samples.weather_stations"
:type String}
:output {:default "maxperKeyRes.edn"
:type String}})
(defn run-max-per-key
[str-args]
(let [p (ds/make-pipeline MaxPerKeyOptions str-args)
{:keys [input output]} (ds/get-pipeline-options p)
results (->> p
(bq/read-bq {:table input})
(ds/map-kv (fn [{:keys [month mean_temp]}]
[(edn/read-string month) (double mean_temp)]))
(ds/combine (ds/max-fn) {:scope :per-key})
(ds/map (fn [[k v]]
{:month k :max_mean_temp v})))]
(if (re-find #":[^/]" output)
(bq/write-bq-table output {:schema [{:name "month" :type "INTEGER"}
{:name "max_mean_temp" :type "FLOAT"}]
:create-disposition :if-needed
:write-disposition :truncate}
results)
(ds/write-edn-file output results))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
StandardSQL WordCount > 500 ; ;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Example showing how to enable support for StandardSQL in your queries querying for words in the
shakespeare dataset that has more than 500 words
Test calling run standard - sql --stagingLocation = gs://[your - bucket]/jars --output gs://[your - bucket]/
(def StandardSQLOptions
{:input {:default "bigquery-public-data.samples.shakespeare"
:type String}
:output {:default "project:dataset.table"
:type String}
:tempLocation {:default "gs"
:type String}})
(defn run-standard-sql-query
[str-args]
the DirectPipelineRunner does n't support standardSql yet
(let [p (ds/make-pipeline StandardSQLOptions str-args {:runner "DataflowRunner"})
{:keys [_input output]} (ds/get-pipeline-options p)
query "SELECT * from `bigquery-public-data.samples.shakespeare` LIMIT 100"
results (->> p
(bq/read-bq {:query query
:standard-sql? true}))]
(ds/write-edn-file output results)))
;;;;;;;;;;;;;;;;;;;;;;;;
;; DatastoreWordCount ;;
;;;;;;;;;;;;;;;;;;;;;;;;
;; Port of
(defoptions DatastoreWordCountOptions
{:input {:default "gs-beam-samples/shakespeare/kinglear.txt"
:type String}
:output {:default "kinglear-freqs.txt"
:type String}
:dataset {:default "yourdataset"
:type String}
:kind {:default "yourkind"
:type String}
:namespace {:default "yournamespace"
:type String}
:isReadOnly {:default false
:type Boolean}
:numShards {:default 0
:type Long}})
(defn make-ancestor-key
[{:keys [kind namespace]}]
(dts/make-ds-key {:kind kind :namespace namespace :key "root"}))
Query is not wrapped yet , use Interop
;; PR welcome :)
(defn make-ancestor-kind-query
[{:keys [kind namespace] :as opts}]
(let [qb (Query/newBuilder)]
(-> qb (.addKindBuilder) (.setName kind))
(.setFilter qb (DatastoreHelper/makeFilter
"__key__"
(PropertyFilter$Operator/valueOf "HAS_ANCESTOR")
(dts/make-ds-value (make-ancestor-key opts))))
(.build qb)))
(defn run-datastore-word-count
[str-args]
(let [p (ds/make-pipeline DatastoreWordCountOptions str-args)
{:keys [input output dataset kind
namespace isReadOnly numShards] :as opts} (ds/get-pipeline-options p)
root (make-ancestor-key opts)]
(when-not isReadOnly
(->> p
(ds/read-text-file input {:name "King-Lear"})
(ds/map (fn [content]
(dts/make-ds-entity
{:content content}
{:namespace namespace
:key (-> (UUID/randomUUID) (.toString))
:kind kind
:path [root]}))
{:name "create-entities"})
(dts/write-datastore-raw
{:project-id dataset :name :write-datastore})))
(->> p
(dts/read-datastore-raw {:project-id dataset
:query (make-ancestor-kind-query opts)
:namespace namespace})
(ds/map dts/entity->clj {:name "convert-clj"})
(ds/map :content) {:name "get-content"}
(count-words)
(ds/map format-count {:name :format-count})
(ds/write-text-file output {:num-shards numShards}))
p))
;;;;;;;;;;;;;
Pub / Sub ; ;
;;;;;;;;;;;;;
Run using : run pub - sub --pubsubProject=[your google cloud project ] --tempLocation = gs:/[your - bucket]/tmp/ --stagingLocation = gs://[your - bucket]/jars
;; You must create the my-subscription and my-transformed-subscription subscriptions, and the my-transformed-topic topics
;; before you run this
(defoptions PubSubOptions
{:pubsubProject {:default "yourproject"
:type String}})
(defn stream-interactions-from-pubsub
[pipeline read-topic write-transformed-topic]
(->> pipeline
(ps/read-from-pubsub read-topic {:name "read-interactions-from-pubsub" :kind :topic})
(ds/map (fn [message]
(log/info (str "Got message:\n" message))
(str/reverse message))
{:name "log-message"})
(ps/write-to-pubsub write-transformed-topic {:name "write-forwarded-interactions-to-pubsub"})))
(defn stream-forwarded-interactions-from-pubsub
[pipeline read-transformed-subscription]
(->> pipeline
(ps/read-from-pubsub read-transformed-subscription {:name "read-transformed-interactions-from-pubsub"})
(ds/map (fn [message]
(log/info (str "Got transformed message:\n" message))
message)
{:name "log-transformed-message"})))
(defn run-pub-sub
[str-args]
(let [pipeline (ds/make-pipeline
PubSubOptions
str-args
{:runner "DataflowRunner"
:streaming true})
{:keys [pubsubProject]} (ds/get-pipeline-options pipeline)
read-topic (format "projects/%s/topics/my-topic" pubsubProject)
write-transformed-topic (format "projects/%s/topics/my-transformed-topic" pubsubProject)
read-transformed-subscription (format "projects/%s/subscriptions/my-transformed-subscription" pubsubProject)]
(stream-interactions-from-pubsub pipeline read-topic write-transformed-topic)
(stream-forwarded-interactions-from-pubsub pipeline read-transformed-subscription)))
;;;;;;;;;;
Main ; ;
;;;;;;;;;;
(defn -main
[job & args]
(compile 'datasplash.examples)
(some-> (cond
(= job "word-count") (run-word-count args)
(= job "dedup") (run-dedup args)
(= job "filter") (run-filter args)
(= job "combine-per-key") (run-combine-per-key args)
(= job "max-per-key") (run-max-per-key args)
(= job "standard-sql") (run-standard-sql-query args)
(= job "datastore-word-count") (run-datastore-word-count args)
(= job "pub-sub") (run-pub-sub args)
(re-find #"help" job)
(do
(doseq [klass [WordCountOptions]]
(PipelineOptionsFactory/register (Class/forName (name klass))))
(-> (PipelineOptionsFactory/fromArgs
(into-array String (concat [job] args)))
(.create)
(.run))))
(ds/run-pipeline)))
| null | https://raw.githubusercontent.com/ngrunwald/datasplash/2275ea15b6c906a234b4455cd94926d8b2d54b41/src/clj/datasplash/examples.clj | clojure |
;
Port of
;
Port of
Filter ;;
Port of
;
Port of
;
;
Example showing how to enable support for StandardSQL in your queries querying for words in the
DatastoreWordCount ;;
Port of
PR welcome :)
;
You must create the my-subscription and my-transformed-subscription subscriptions, and the my-transformed-topic topics
before you run this
;
| (ns datasplash.examples
(:require [clojure.string :as str]
[clojure.tools.logging :as log]
[datasplash
[api :as ds]
[bq :as bq]
[datastore :as dts]
[pubsub :as ps]
[options :as options :refer [defoptions]]]
[clojure.edn :as edn])
(:import (java.util UUID)
(com.google.datastore.v1 Query PropertyFilter$Operator)
(com.google.datastore.v1.client DatastoreHelper)
(org.apache.beam.sdk.options PipelineOptionsFactory))
(:gen-class))
(defn tokenize
[l]
(remove empty? (.split (str/trim l) "[^a-zA-Z']+")))
(defn count-words
[p]
(ds/->> :count-words p
(ds/mapcat tokenize {:name :tokenize})
(ds/frequencies)))
(defn format-count
[[k v]]
(format "%s: %d" k v))
(defoptions WordCountOptions
{:input {:default "gs-beam-samples/shakespeare/kinglear.txt"
:type String}
:output {:default "kinglear-freqs.txt"
:type String}
:numShards {:default 0
:type Long}})
(defn run-word-count
[str-args]
(let [p (ds/make-pipeline WordCountOptions str-args)
{:keys [input output numShards]} (ds/get-pipeline-options p)]
(->> p
(ds/read-text-file input {:name "King-Lear"})
(count-words)
(ds/map format-count {:name :format-count})
(ds/write-text-file output {:num-shards numShards}))))
(defoptions DeDupOptions
{:input {:default "gs-beam-samples/shakespeare/*"
:type String}
:output {:default "shakespeare-dedup.txt"
:type String}})
(defn run-dedup
[str-args]
(let [p (ds/make-pipeline DeDupOptions str-args)
{:keys [input output]} (ds/get-pipeline-options p)]
(->> p
(ds/read-text-file input {:name "ReadLines"})
(ds/distinct {:name "dedup"})
(ds/write-text-file output {:name "DedupedShakespeare"}))))
(defoptions FilterOptions
{:input {:default "clouddataflow-readonly:samples.weather_stations"
:type String}
:output {:default "youproject:yourdataset.weather_stations_new"
:type String}
:monthFilter {:default 7
:type String}})
(defn run-filter
[str-args]
(let [p (ds/make-pipeline FilterOptions str-args)
{:keys [input output monthFilter]} (ds/get-pipeline-options p)
all-rows (->> p
(bq/read-bq {:table input})
(ds/map (fn [row]
(->>
(select-keys row [:year :month :day :mean_temp])
(map (fn [[k v]] (if (string? v) [k (edn/read-string v)] [k v])))
(into {})))
{:name "Projection"}))
global-mean-temp (->> all-rows
(ds/combine (ds/mean-fn :mapper :mean_temp))
(ds/view))
filtered-results (->> all-rows
(ds/filter (fn [{:keys [month]}] (= month monthFilter)))
(ds/filter (fn [{:keys [mean_temp]}]
(let [gtemp (:global-mean (ds/side-inputs))]
(< mean_temp gtemp)))
{:name "ParseAndFilter" :side-inputs {:global-mean global-mean-temp}}))]
(if (re-find #":[^/]" output)
(bq/write-bq-table (bq/custom-output-fn (fn [x]
(str output "_" (:year (.getValue x)))))
{:schema [{:name "year" :type "INTEGER"}
{:name "month":type "INTEGER"}
{:name "day" :type "INTEGER"}
{:name "mean_temp" :type "FLOAT"}]
:create-disposition :if-needed
:write-disposition :truncate}
filtered-results)
(ds/write-edn-file output filtered-results))))
(defoptions CombinePerKeyOptions
{:input {:default "publicdata:samples.shakespeare"
:type String}
:output {:default "combinePerKeyRes.edn"
:type String}
:minWordLength {:default 8
:type Long}})
(defn run-combine-per-key
[str-args]
(let [p (ds/make-pipeline CombinePerKeyOptions str-args)
{:keys [input output minWordLength]} (ds/get-pipeline-options p)
results (->> p
(bq/read-bq {:table input})
(ds/filter (fn [{:keys [word]}] (> (count word) minWordLength)))
(ds/map-kv (fn [{:keys [word corpus]}] [word corpus]))
(ds/combine
(ds/sfn (fn [words] (str/join "," words)))
{:scope :per-key})
(ds/map (fn [[word plays]] {:word word :all_plays plays})))]
(if (re-find #":[^/]" output)
(bq/write-bq-table output {:schema [{:name "word" :type "STRING"}
{:name "all_plays" :type "STRING"}]
:create-disposition :if-needed
:write-disposition :truncate}
results)
(ds/write-edn-file output results))))
Port of
(defoptions MaxPerKeyOptions
{:input {:default "clouddataflow-readonly:samples.weather_stations"
:type String}
:output {:default "maxperKeyRes.edn"
:type String}})
(defn run-max-per-key
[str-args]
(let [p (ds/make-pipeline MaxPerKeyOptions str-args)
{:keys [input output]} (ds/get-pipeline-options p)
results (->> p
(bq/read-bq {:table input})
(ds/map-kv (fn [{:keys [month mean_temp]}]
[(edn/read-string month) (double mean_temp)]))
(ds/combine (ds/max-fn) {:scope :per-key})
(ds/map (fn [[k v]]
{:month k :max_mean_temp v})))]
(if (re-find #":[^/]" output)
(bq/write-bq-table output {:schema [{:name "month" :type "INTEGER"}
{:name "max_mean_temp" :type "FLOAT"}]
:create-disposition :if-needed
:write-disposition :truncate}
results)
(ds/write-edn-file output results))))
shakespeare dataset that has more than 500 words
Test calling run standard - sql --stagingLocation = gs://[your - bucket]/jars --output gs://[your - bucket]/
(def StandardSQLOptions
{:input {:default "bigquery-public-data.samples.shakespeare"
:type String}
:output {:default "project:dataset.table"
:type String}
:tempLocation {:default "gs"
:type String}})
(defn run-standard-sql-query
[str-args]
the DirectPipelineRunner does n't support standardSql yet
(let [p (ds/make-pipeline StandardSQLOptions str-args {:runner "DataflowRunner"})
{:keys [_input output]} (ds/get-pipeline-options p)
query "SELECT * from `bigquery-public-data.samples.shakespeare` LIMIT 100"
results (->> p
(bq/read-bq {:query query
:standard-sql? true}))]
(ds/write-edn-file output results)))
(defoptions DatastoreWordCountOptions
{:input {:default "gs-beam-samples/shakespeare/kinglear.txt"
:type String}
:output {:default "kinglear-freqs.txt"
:type String}
:dataset {:default "yourdataset"
:type String}
:kind {:default "yourkind"
:type String}
:namespace {:default "yournamespace"
:type String}
:isReadOnly {:default false
:type Boolean}
:numShards {:default 0
:type Long}})
(defn make-ancestor-key
[{:keys [kind namespace]}]
(dts/make-ds-key {:kind kind :namespace namespace :key "root"}))
Query is not wrapped yet , use Interop
(defn make-ancestor-kind-query
[{:keys [kind namespace] :as opts}]
(let [qb (Query/newBuilder)]
(-> qb (.addKindBuilder) (.setName kind))
(.setFilter qb (DatastoreHelper/makeFilter
"__key__"
(PropertyFilter$Operator/valueOf "HAS_ANCESTOR")
(dts/make-ds-value (make-ancestor-key opts))))
(.build qb)))
(defn run-datastore-word-count
[str-args]
(let [p (ds/make-pipeline DatastoreWordCountOptions str-args)
{:keys [input output dataset kind
namespace isReadOnly numShards] :as opts} (ds/get-pipeline-options p)
root (make-ancestor-key opts)]
(when-not isReadOnly
(->> p
(ds/read-text-file input {:name "King-Lear"})
(ds/map (fn [content]
(dts/make-ds-entity
{:content content}
{:namespace namespace
:key (-> (UUID/randomUUID) (.toString))
:kind kind
:path [root]}))
{:name "create-entities"})
(dts/write-datastore-raw
{:project-id dataset :name :write-datastore})))
(->> p
(dts/read-datastore-raw {:project-id dataset
:query (make-ancestor-kind-query opts)
:namespace namespace})
(ds/map dts/entity->clj {:name "convert-clj"})
(ds/map :content) {:name "get-content"}
(count-words)
(ds/map format-count {:name :format-count})
(ds/write-text-file output {:num-shards numShards}))
p))
Run using : run pub - sub --pubsubProject=[your google cloud project ] --tempLocation = gs:/[your - bucket]/tmp/ --stagingLocation = gs://[your - bucket]/jars
(defoptions PubSubOptions
{:pubsubProject {:default "yourproject"
:type String}})
(defn stream-interactions-from-pubsub
[pipeline read-topic write-transformed-topic]
(->> pipeline
(ps/read-from-pubsub read-topic {:name "read-interactions-from-pubsub" :kind :topic})
(ds/map (fn [message]
(log/info (str "Got message:\n" message))
(str/reverse message))
{:name "log-message"})
(ps/write-to-pubsub write-transformed-topic {:name "write-forwarded-interactions-to-pubsub"})))
(defn stream-forwarded-interactions-from-pubsub
[pipeline read-transformed-subscription]
(->> pipeline
(ps/read-from-pubsub read-transformed-subscription {:name "read-transformed-interactions-from-pubsub"})
(ds/map (fn [message]
(log/info (str "Got transformed message:\n" message))
message)
{:name "log-transformed-message"})))
(defn run-pub-sub
[str-args]
(let [pipeline (ds/make-pipeline
PubSubOptions
str-args
{:runner "DataflowRunner"
:streaming true})
{:keys [pubsubProject]} (ds/get-pipeline-options pipeline)
read-topic (format "projects/%s/topics/my-topic" pubsubProject)
write-transformed-topic (format "projects/%s/topics/my-transformed-topic" pubsubProject)
read-transformed-subscription (format "projects/%s/subscriptions/my-transformed-subscription" pubsubProject)]
(stream-interactions-from-pubsub pipeline read-topic write-transformed-topic)
(stream-forwarded-interactions-from-pubsub pipeline read-transformed-subscription)))
(defn -main
[job & args]
(compile 'datasplash.examples)
(some-> (cond
(= job "word-count") (run-word-count args)
(= job "dedup") (run-dedup args)
(= job "filter") (run-filter args)
(= job "combine-per-key") (run-combine-per-key args)
(= job "max-per-key") (run-max-per-key args)
(= job "standard-sql") (run-standard-sql-query args)
(= job "datastore-word-count") (run-datastore-word-count args)
(= job "pub-sub") (run-pub-sub args)
(re-find #"help" job)
(do
(doseq [klass [WordCountOptions]]
(PipelineOptionsFactory/register (Class/forName (name klass))))
(-> (PipelineOptionsFactory/fromArgs
(into-array String (concat [job] args)))
(.create)
(.run))))
(ds/run-pipeline)))
|
477f63a2067011f925b2e1eb835410242f509a2c6bd6714c6a8dca1cac6eb1b6 | csabahruska/jhc-components | DataP.hs | module DerivingDrift.DataP where
import Name.Name(Name)
import FrontEnd.HsSyn
data Statement = DataStmt | NewTypeStmt
deriving (Eq,Show)
data Data = D {
name :: Name, -- type name
constraints :: [(Class,Var)],
vars :: [Var], -- Parameters
body :: [Body],
derives :: [Class], -- derived classes
statement :: Statement
} deriving (Eq,Show)
data Body = Body {
constructor :: Constructor,
labels :: [Name],
types :: [HsBangType]
} deriving (Eq,Show)
type Var = String
type Class = String
type Constructor = String
| null | https://raw.githubusercontent.com/csabahruska/jhc-components/a7dace481d017f5a83fbfc062bdd2d099133adf1/jhc-frontend/src/DerivingDrift/DataP.hs | haskell | type name
Parameters
derived classes | module DerivingDrift.DataP where
import Name.Name(Name)
import FrontEnd.HsSyn
data Statement = DataStmt | NewTypeStmt
deriving (Eq,Show)
data Data = D {
constraints :: [(Class,Var)],
body :: [Body],
statement :: Statement
} deriving (Eq,Show)
data Body = Body {
constructor :: Constructor,
labels :: [Name],
types :: [HsBangType]
} deriving (Eq,Show)
type Var = String
type Class = String
type Constructor = String
|
04f4914d89c5fac206c1d375fcaddbe5dacb7d7d447ceb7c3caa0d6a1f123912 | janestreet/bonsai | main.ml | open! Core
open! Bonsai_web
open Bonsai.Let_syntax
module Animation = Bonsai_experimental_animation
module Form = Bonsai_web_ui_form
let component =
let%sub interpolator_form =
Form.Elements.Dropdown.enumerable (module Animation.Interpolator) ~init:`First_item
in
let%sub text_picker = Form.Elements.Textbox.string () in
let%sub text_picker =
text_picker |> Form.Dynamic.with_default (Bonsai.Value.return "Hello Animation!")
in
let interpolator =
interpolator_form >>| Form.value_or_default ~default:Animation.Interpolator.Linear
in
let%sub { value; animate } =
Animation.Advanced.make
~fallback:(Value.return 0.0)
~interpolate:Animation.Interpolatable.float
in
let%sub forward, set_forward = Bonsai.state (module Bool) ~default_model:true in
let%sub get_forward = Bonsai.yoink forward in
let%sub get_interpolator = Bonsai.yoink interpolator in
let%sub get_things_started =
let%arr animate = animate
and get_forward = get_forward
and get_interpolator = get_interpolator
and set_forward = set_forward in
let rec switch_directions () =
let%bind.Effect forward =
match%bind.Effect get_forward with
| Active forward -> Effect.return forward
| Inactive ->
Effect.never
in
let%bind.Effect interpolator =
match%bind.Effect get_interpolator with
| Active interpolator -> Effect.return interpolator
| Inactive ->
Effect.never
in
let%bind.Effect () = set_forward (not forward) in
let target = if forward then 100.0 else 0.0 in
let duration = `For (Time_ns.Span.of_sec 0.5) in
animate ~with_:interpolator ~after_finished:(switch_directions ()) duration target
in
switch_directions ()
in
let%sub () = Bonsai.Edge.lifecycle ~on_activate:get_things_started () in
let%arr value = value
and text_picker = text_picker
and interpolator_form = interpolator_form in
let margin = Vdom.Attr.style (Css_gen.margin_left (`Px_float value)) in
let color =
let v = Float.to_int (value /. 100.0 *. 255.0) in
Vdom.Attr.style (Css_gen.color (`RGBA (Css_gen.Color.RGBA.create ~r:v ~g:v ~b:v ())))
in
let text = Form.value_or_default text_picker ~default:"Marquee" in
Vdom.Node.div
[ Form.view_as_vdom text_picker
; Form.view_as_vdom interpolator_form
; Vdom.Node.h1 ~attr:margin [ Vdom.Node.text text ]
; Vdom.Node.h1 ~attr:color [ Vdom.Node.text text ]
]
;;
let () = Bonsai_web.Start.start component
| null | https://raw.githubusercontent.com/janestreet/bonsai/782fecd000a1f97b143a3f24b76efec96e36a398/examples/animation/main.ml | ocaml | open! Core
open! Bonsai_web
open Bonsai.Let_syntax
module Animation = Bonsai_experimental_animation
module Form = Bonsai_web_ui_form
let component =
let%sub interpolator_form =
Form.Elements.Dropdown.enumerable (module Animation.Interpolator) ~init:`First_item
in
let%sub text_picker = Form.Elements.Textbox.string () in
let%sub text_picker =
text_picker |> Form.Dynamic.with_default (Bonsai.Value.return "Hello Animation!")
in
let interpolator =
interpolator_form >>| Form.value_or_default ~default:Animation.Interpolator.Linear
in
let%sub { value; animate } =
Animation.Advanced.make
~fallback:(Value.return 0.0)
~interpolate:Animation.Interpolatable.float
in
let%sub forward, set_forward = Bonsai.state (module Bool) ~default_model:true in
let%sub get_forward = Bonsai.yoink forward in
let%sub get_interpolator = Bonsai.yoink interpolator in
let%sub get_things_started =
let%arr animate = animate
and get_forward = get_forward
and get_interpolator = get_interpolator
and set_forward = set_forward in
let rec switch_directions () =
let%bind.Effect forward =
match%bind.Effect get_forward with
| Active forward -> Effect.return forward
| Inactive ->
Effect.never
in
let%bind.Effect interpolator =
match%bind.Effect get_interpolator with
| Active interpolator -> Effect.return interpolator
| Inactive ->
Effect.never
in
let%bind.Effect () = set_forward (not forward) in
let target = if forward then 100.0 else 0.0 in
let duration = `For (Time_ns.Span.of_sec 0.5) in
animate ~with_:interpolator ~after_finished:(switch_directions ()) duration target
in
switch_directions ()
in
let%sub () = Bonsai.Edge.lifecycle ~on_activate:get_things_started () in
let%arr value = value
and text_picker = text_picker
and interpolator_form = interpolator_form in
let margin = Vdom.Attr.style (Css_gen.margin_left (`Px_float value)) in
let color =
let v = Float.to_int (value /. 100.0 *. 255.0) in
Vdom.Attr.style (Css_gen.color (`RGBA (Css_gen.Color.RGBA.create ~r:v ~g:v ~b:v ())))
in
let text = Form.value_or_default text_picker ~default:"Marquee" in
Vdom.Node.div
[ Form.view_as_vdom text_picker
; Form.view_as_vdom interpolator_form
; Vdom.Node.h1 ~attr:margin [ Vdom.Node.text text ]
; Vdom.Node.h1 ~attr:color [ Vdom.Node.text text ]
]
;;
let () = Bonsai_web.Start.start component
| |
76284201567c714dd596c80db633fbca572ab0157ac285d4cc9c26ea4c4f7640 | agda/agda | PartialOrd.hs | {-# LANGUAGE TemplateHaskell #-}
module Internal.Utils.PartialOrd
( ISet(ISet)
, tests
) where
import Agda.Utils.PartialOrd
import Data.List ( (\\) )
import Data.Set (Set)
import qualified Data.Set as Set
import Internal.Helpers
------------------------------------------------------------------------------
-- * Properties
instance Arbitrary PartialOrdering where
arbitrary = arbitraryBoundedEnum
-- | We test our properties on integer sets ordered by inclusion.
newtype ISet = ISet { iset :: Inclusion (Set Int) }
deriving (Eq, Ord, PartialOrd, Show)
instance Arbitrary ISet where
arbitrary = ISet . Inclusion . Set.fromList <$> listOf (choose (0, 8))
| Any two elements are ' related ' in the way ' comparable ' computes .
prop_comparable_related :: ISet -> ISet -> Bool
prop_comparable_related (ISet a) (ISet b) =
related a o b where o = comparable a b
| @flip comparable a b = = oppPO ( comparable a b)@
prop_oppPO :: ISet -> ISet -> Bool
prop_oppPO (ISet a) (ISet b) =
comparable a b == oppPO (comparable b a)
-- | Auxiliary function: lists to sets = sorted duplicate-free lists.
sortUniq :: [Ordering] -> [Ordering]
sortUniq = Set.toAscList . Set.fromList
-- | 'leqPO' is inclusion of the associated 'Ordering' sets.
prop_leqPO_sound :: PartialOrdering -> PartialOrdering -> Bool
prop_leqPO_sound p q =
(p `leqPO` q) == null (toOrderings p \\ toOrderings q)
-- | 'orPO' amounts to the union of the associated 'Ordering' sets.
Except that ' orPO POLT POGT = = ' which should also include ' POEQ ' .
prop_orPO_sound :: PartialOrdering -> PartialOrdering -> Bool
prop_orPO_sound p q =
(p `orPO` q) == fromOrderings (toOrderings p ++ toOrderings q)
-- | 'orPO' is associative.
prop_associative_orPO :: PartialOrdering -> PartialOrdering ->
PartialOrdering -> Bool
prop_associative_orPO = isAssociative orPO
-- | 'orPO' is commutative.
prop_commutative_orPO :: PartialOrdering -> PartialOrdering -> Bool
prop_commutative_orPO = isCommutative orPO
-- | 'orPO' is idempotent.
prop_idempotent_orPO :: PartialOrdering -> Bool
prop_idempotent_orPO = isIdempotent orPO
| The dominant element wrt . ' orPO ' is ' ' .
prop_zero_orPO :: PartialOrdering -> Bool
prop_zero_orPO = isZero POAny orPO
-- | Soundness of 'seqPO'.
--
As QuickCheck test , this property is inefficient , see ' prop_seqPO ' .
property_seqPO :: ISet -> PartialOrdering -> ISet -> PartialOrdering ->
ISet -> Property
property_seqPO (ISet a) o (ISet b) p (ISet c) =
related a o b && related b p c ==> related a (seqPO o p) c
-- | A more efficient way of stating soundness of 'seqPO'.
prop_seqPO :: ISet -> ISet -> ISet -> Bool
prop_seqPO (ISet a) (ISet b) (ISet c) = related a o c
where o = comparable a b `seqPO` comparable b c
-- | 'PartialOrdering' is a monoid, i.e. 'seqPO' is associative and
the unit of ' seqPO ' is ' POEQ ' .
prop_monoid_seqPO :: Property3 PartialOrdering
prop_monoid_seqPO = isMonoid
| The zero of ' seqPO ' is ' ' .
prop_zero_seqPO :: PartialOrdering -> Bool
prop_zero_seqPO = isZero POAny seqPO
-- | 'seqPO' is also commutative.
prop_commutative_seqPO :: PartialOrdering -> PartialOrdering -> Bool
prop_commutative_seqPO = isCommutative seqPO
-- | 'seqPO' is idempotent.
prop_idempotent_seqPO :: PartialOrdering -> Bool
prop_idempotent_seqPO = isIdempotent seqPO
-- | 'seqPO' distributes over 'orPO'.
prop_distributive_seqPO_orPO :: PartialOrdering -> PartialOrdering ->
PartialOrdering -> Bool
prop_distributive_seqPO_orPO = isDistributive seqPO orPO
-- | The result of 'toOrderings' is a sorted list without duplicates.
prop_sorted_toOrderings :: PartialOrdering -> Bool
prop_sorted_toOrderings p =
sortUniq os == os where os = toOrderings p
-- | From 'Ordering' to 'PartialOrdering' and back is the identity.
prop_toOrderings_after_fromOrdering :: Ordering -> Bool
prop_toOrderings_after_fromOrdering o =
toOrderings (fromOrdering o) == [o]
| From ' PartialOrdering ' to ' ' and back is the identity .
prop_fromOrderings_after_toOrderings :: PartialOrdering -> Bool
prop_fromOrderings_after_toOrderings p =
fromOrderings (toOrderings p) == p
| From ' ' to ' PartialOrdering ' and back is the identity .
Except for @[LT , which is a non - canonical representative of ' ' .
prop_toOrderings_after_fromOrderings :: NonEmptyList Ordering -> Bool
prop_toOrderings_after_fromOrderings (NonEmpty os) =
Set.fromList os `Set.isSubsetOf`
Set.fromList (toOrderings (fromOrderings os))
-- | Pairs are related iff both components are related.
prop_related_pair :: ISet -> ISet -> ISet -> ISet -> PartialOrdering -> Bool
prop_related_pair (ISet x1) (ISet x2) (ISet y1) (ISet y2) o =
related (x1,x2) o (y1,y2) == (related x1 o y1 && related x2 o y2)
| Comparing ' PartialOrdering 's amounts to compare their representation as
-- 'Ordering' sets.
prop_comparable_PartialOrdering :: PartialOrdering -> PartialOrdering -> Bool
prop_comparable_PartialOrdering p q =
comparable p q == comparable (to p) (to q)
where to = Inclusion . toOrderings
------------------------------------------------------------------------
-- * All tests
------------------------------------------------------------------------
Template Haskell hack to make the following $ allProperties work
under ghc-7.8 .
return [] -- KEEP!
| All tests as collected by ' allProperties ' .
--
Using ' allProperties ' is convenient and superior to the manual
-- enumeration of tests, since the name of the property is added
-- automatically.
tests :: TestTree
tests = testProperties "Internal.Utils.PartialOrd" $allProperties
| null | https://raw.githubusercontent.com/agda/agda/3543ef3df19228012a1ac5be766cc38fd2f65f6a/test/Internal/Utils/PartialOrd.hs | haskell | # LANGUAGE TemplateHaskell #
----------------------------------------------------------------------------
* Properties
| We test our properties on integer sets ordered by inclusion.
| Auxiliary function: lists to sets = sorted duplicate-free lists.
| 'leqPO' is inclusion of the associated 'Ordering' sets.
| 'orPO' amounts to the union of the associated 'Ordering' sets.
| 'orPO' is associative.
| 'orPO' is commutative.
| 'orPO' is idempotent.
| Soundness of 'seqPO'.
| A more efficient way of stating soundness of 'seqPO'.
| 'PartialOrdering' is a monoid, i.e. 'seqPO' is associative and
| 'seqPO' is also commutative.
| 'seqPO' is idempotent.
| 'seqPO' distributes over 'orPO'.
| The result of 'toOrderings' is a sorted list without duplicates.
| From 'Ordering' to 'PartialOrdering' and back is the identity.
| Pairs are related iff both components are related.
'Ordering' sets.
----------------------------------------------------------------------
* All tests
----------------------------------------------------------------------
KEEP!
enumeration of tests, since the name of the property is added
automatically. |
module Internal.Utils.PartialOrd
( ISet(ISet)
, tests
) where
import Agda.Utils.PartialOrd
import Data.List ( (\\) )
import Data.Set (Set)
import qualified Data.Set as Set
import Internal.Helpers
instance Arbitrary PartialOrdering where
arbitrary = arbitraryBoundedEnum
newtype ISet = ISet { iset :: Inclusion (Set Int) }
deriving (Eq, Ord, PartialOrd, Show)
instance Arbitrary ISet where
arbitrary = ISet . Inclusion . Set.fromList <$> listOf (choose (0, 8))
| Any two elements are ' related ' in the way ' comparable ' computes .
prop_comparable_related :: ISet -> ISet -> Bool
prop_comparable_related (ISet a) (ISet b) =
related a o b where o = comparable a b
| @flip comparable a b = = oppPO ( comparable a b)@
prop_oppPO :: ISet -> ISet -> Bool
prop_oppPO (ISet a) (ISet b) =
comparable a b == oppPO (comparable b a)
sortUniq :: [Ordering] -> [Ordering]
sortUniq = Set.toAscList . Set.fromList
prop_leqPO_sound :: PartialOrdering -> PartialOrdering -> Bool
prop_leqPO_sound p q =
(p `leqPO` q) == null (toOrderings p \\ toOrderings q)
Except that ' orPO POLT POGT = = ' which should also include ' POEQ ' .
prop_orPO_sound :: PartialOrdering -> PartialOrdering -> Bool
prop_orPO_sound p q =
(p `orPO` q) == fromOrderings (toOrderings p ++ toOrderings q)
prop_associative_orPO :: PartialOrdering -> PartialOrdering ->
PartialOrdering -> Bool
prop_associative_orPO = isAssociative orPO
prop_commutative_orPO :: PartialOrdering -> PartialOrdering -> Bool
prop_commutative_orPO = isCommutative orPO
prop_idempotent_orPO :: PartialOrdering -> Bool
prop_idempotent_orPO = isIdempotent orPO
| The dominant element wrt . ' orPO ' is ' ' .
prop_zero_orPO :: PartialOrdering -> Bool
prop_zero_orPO = isZero POAny orPO
As QuickCheck test , this property is inefficient , see ' prop_seqPO ' .
property_seqPO :: ISet -> PartialOrdering -> ISet -> PartialOrdering ->
ISet -> Property
property_seqPO (ISet a) o (ISet b) p (ISet c) =
related a o b && related b p c ==> related a (seqPO o p) c
prop_seqPO :: ISet -> ISet -> ISet -> Bool
prop_seqPO (ISet a) (ISet b) (ISet c) = related a o c
where o = comparable a b `seqPO` comparable b c
the unit of ' seqPO ' is ' POEQ ' .
prop_monoid_seqPO :: Property3 PartialOrdering
prop_monoid_seqPO = isMonoid
| The zero of ' seqPO ' is ' ' .
prop_zero_seqPO :: PartialOrdering -> Bool
prop_zero_seqPO = isZero POAny seqPO
prop_commutative_seqPO :: PartialOrdering -> PartialOrdering -> Bool
prop_commutative_seqPO = isCommutative seqPO
prop_idempotent_seqPO :: PartialOrdering -> Bool
prop_idempotent_seqPO = isIdempotent seqPO
prop_distributive_seqPO_orPO :: PartialOrdering -> PartialOrdering ->
PartialOrdering -> Bool
prop_distributive_seqPO_orPO = isDistributive seqPO orPO
prop_sorted_toOrderings :: PartialOrdering -> Bool
prop_sorted_toOrderings p =
sortUniq os == os where os = toOrderings p
prop_toOrderings_after_fromOrdering :: Ordering -> Bool
prop_toOrderings_after_fromOrdering o =
toOrderings (fromOrdering o) == [o]
| From ' PartialOrdering ' to ' ' and back is the identity .
prop_fromOrderings_after_toOrderings :: PartialOrdering -> Bool
prop_fromOrderings_after_toOrderings p =
fromOrderings (toOrderings p) == p
| From ' ' to ' PartialOrdering ' and back is the identity .
Except for @[LT , which is a non - canonical representative of ' ' .
prop_toOrderings_after_fromOrderings :: NonEmptyList Ordering -> Bool
prop_toOrderings_after_fromOrderings (NonEmpty os) =
Set.fromList os `Set.isSubsetOf`
Set.fromList (toOrderings (fromOrderings os))
prop_related_pair :: ISet -> ISet -> ISet -> ISet -> PartialOrdering -> Bool
prop_related_pair (ISet x1) (ISet x2) (ISet y1) (ISet y2) o =
related (x1,x2) o (y1,y2) == (related x1 o y1 && related x2 o y2)
| Comparing ' PartialOrdering 's amounts to compare their representation as
prop_comparable_PartialOrdering :: PartialOrdering -> PartialOrdering -> Bool
prop_comparable_PartialOrdering p q =
comparable p q == comparable (to p) (to q)
where to = Inclusion . toOrderings
Template Haskell hack to make the following $ allProperties work
under ghc-7.8 .
| All tests as collected by ' allProperties ' .
Using ' allProperties ' is convenient and superior to the manual
tests :: TestTree
tests = testProperties "Internal.Utils.PartialOrd" $allProperties
|
7e7a01352c847affa264899bfdc7e976eafe7b9e3b91c56825b6848586f574a8 | yutopp/rill | os.ml |
* Copyright 2020 - .
*
* Distributed under the Boost Software License , Version 1.0 .
* ( See accompanying file LICENSE_1_0.txt or copy at
* )
* Copyright yutopp 2020 - .
*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* )
*)
open! Base
module Option = struct
include Option
let map_or_else opt ~default ~f =
match opt with Some v -> f v | None -> default ()
let ok_or_else opt ~err = match opt with Some v -> v | None -> err ()
end
exception Unexpected_result of string
exception Not_exited_with_code_zero of string * Unix.process_status
module Fd = struct
type t = { mutable fd : Unix.file_descr option }
let from fd = { fd = Some fd }
let close desc =
match desc.fd with
| Some fd ->
Unix.close fd;
desc.fd <- None
| None -> ()
let into_in_ch desc =
let ch = Unix.in_channel_of_descr (Option.value_exn desc.fd) in
desc.fd <- None;
ch
let raw desc = Option.value_exn desc.fd
end
module Pipe = struct
let create () =
let (in_r, in_w) = Unix.pipe ~cloexec:true () in
(Fd.from in_r, Fd.from in_w)
end
let current_exe () = Unix.readlink "/proc/self/exe"
let read_dir_names dir =
let handle = Unix.opendir dir in
Exn.protectx
~f:(fun handle ->
let rec f (acc : string list) =
try
let name = Unix.readdir handle in
f (name :: acc)
with End_of_file -> acc
in
f [])
handle ~finally:Unix.closedir
(* TODO: check is_file/is_dir *)
let grob_dir dir pattern =
let reg = Str.regexp pattern in
let names = read_dir_names dir in
List.filter names ~f:(fun name -> Str.string_match reg name 0)
(* TODO: fix *)
let join_path paths =
match paths with
| [] -> ""
| x :: xs -> List.fold_left xs ~init:x ~f:Stdlib.Filename.concat
let exec args ~f =
let () =
let s = String.concat ~sep:" " args in
[%loga.debug "exec = %s" s]
in
let (in_r, in_w) = Pipe.create () in
[%defer Fd.close in_w];
[%defer Fd.close in_r];
let (out_r, out_w) = Pipe.create () in
[%defer Fd.close out_w];
let out_ch = Fd.into_in_ch out_r in
[%defer Stdlib.close_in_noerr out_ch];
let (err_r, err_w) = Pipe.create () in
[%defer Fd.close err_r];
let err_ch = Fd.into_in_ch err_r in
[%defer Stdlib.close_in_noerr err_ch];
let pid =
let args = Array.of_list args in
Unix.create_process args.(0) args (Fd.raw in_r) (Fd.raw out_w)
(Fd.raw err_w)
in
let (_pid, status) = Unix.waitpid [] pid in
Fd.close out_w;
Fd.close err_w;
f out_ch err_ch status (String.concat ~sep:" " args)
let assume_exit_successfully ~status ~args =
match status with
| Unix.WEXITED 0 -> Ok ()
| s -> Error (Not_exited_with_code_zero (args, s))
let assume_exit_successfully_with_out ~status ~args ~out_ch ~err_ch =
let ret = assume_exit_successfully ~status ~args in
Result.iter_error ret ~f:(fun _ ->
let out = Stdio.In_channel.input_all out_ch in
Stdio.Out_channel.printf "%s" out;
let err = Stdio.In_channel.input_all err_ch in
Stdio.Out_channel.eprintf "%s" err);
ret
let mktemp_dir prefix =
let open Result.Let_syntax in
let tmp = Caml.Filename.get_temp_dir_name () in
exec [ "mktemp"; "-d"; "-p"; tmp; "-t"; prefix ]
~f:(fun out_ch err_ch status args ->
let%bind () = assume_exit_successfully ~status ~args in
let out =
Stdio.In_channel.input_all out_ch |> String.chop_suffix_exn ~suffix:"\n"
in
let%bind () =
if Stdlib.Filename.is_relative out then Error (Unexpected_result "")
else Ok ()
in
Ok out)
module Spec_env = struct
let cc ~spec =
Sys.getenv "RILL_CC"
|> Option.ok_or_else ~err:(fun () -> Target_spec.(spec.cc))
let cc_sysroot ~spec =
Sys.getenv "RILL_CC_SYSROOT"
|> Option.bind ~f:(fun path ->
if String.equal path "" then None else Some path)
|> Option.map_or_else
~default:(fun () -> Target_spec.(spec.cc_sysroot))
~f:Option.return
let ar ~spec =
Sys.getenv "RILL_AR"
|> Option.ok_or_else ~err:(fun () -> Target_spec.(spec.ar))
let ranlib ~spec =
Sys.getenv "RILL_RANLIB"
|> Option.ok_or_else ~err:(fun () -> Target_spec.(spec.ranlib))
end
let cc_obj src out =
let open Result.Let_syntax in
let args = [ [ "gcc" ] ] in
let args = [ Printf.sprintf "-o%s" out; "-c"; src ] :: args in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun out_ch err_ch status args ->
let%bind () =
assume_exit_successfully_with_out ~status ~args ~out_ch ~err_ch
in
Ok ())
let cc_exe ~spec ?(only_pp = false) ?(only_comp = false)
?(only_comp_asm = false) ?(linker_flags = []) ~lib_dirs ~lib_names ~objs
~out () =
let open Result.Let_syntax in
let args = [ [ Spec_env.cc ~spec ] ] in
let args =
Spec_env.cc_sysroot ~spec
|> Option.value_map ~default:args ~f:(fun path ->
[ Printf.sprintf "--sysroot=%s" path ] :: args)
in
let args = objs :: args in
let args =
match (only_pp, only_comp, only_comp_asm) with
(* with linking *)
| (false, false, false) ->
let args = List.map lib_dirs ~f:(Printf.sprintf "-L%s") :: args in
let args = [ "-static" ] :: args in
let args = linker_flags :: args in
let args = List.map lib_names ~f:(Printf.sprintf "-l%s") :: args in
args
(* without linking *)
| (_, _, _) ->
let args = (if only_pp then [ "-E" ] else []) :: args in
let args = (if only_comp then [ "-S" ] else []) :: args in
let args = (if only_comp_asm then [ "-c" ] else []) :: args in
args
in
let args = [ Printf.sprintf "-o%s" out ] :: args in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun out_ch err_ch status args ->
let%bind () =
assume_exit_successfully_with_out ~status ~args ~out_ch ~err_ch
in
Ok ())
let ar ~spec ~objs ~out () =
let open Result.Let_syntax in
let args = [ [ Spec_env.ar ~spec; "qc"; out ] ] in
let args = objs :: args in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun _out_ch _err_ch status args ->
let%bind () = assume_exit_successfully ~status ~args in
Ok ())
let ranlib ~spec ~out () =
let open Result.Let_syntax in
let args = [ [ Spec_env.ranlib ~spec; out ] ] in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun _out_ch _err_ch status args ->
let%bind () = assume_exit_successfully ~status ~args in
Ok ())
let cp ~src ~dst =
let open Result.Let_syntax in
let args = [ [ "cp"; src; dst ] ] in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun _out_ch _err_ch status args ->
let%bind () = assume_exit_successfully ~status ~args in
Ok ())
| null | https://raw.githubusercontent.com/yutopp/rill/375b67c03ab2087d0a2a833bd9e80f3e51e2694f/rillc/lib/common/os.ml | ocaml | TODO: check is_file/is_dir
TODO: fix
with linking
without linking |
* Copyright 2020 - .
*
* Distributed under the Boost Software License , Version 1.0 .
* ( See accompanying file LICENSE_1_0.txt or copy at
* )
* Copyright yutopp 2020 - .
*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* )
*)
open! Base
module Option = struct
include Option
let map_or_else opt ~default ~f =
match opt with Some v -> f v | None -> default ()
let ok_or_else opt ~err = match opt with Some v -> v | None -> err ()
end
exception Unexpected_result of string
exception Not_exited_with_code_zero of string * Unix.process_status
module Fd = struct
type t = { mutable fd : Unix.file_descr option }
let from fd = { fd = Some fd }
let close desc =
match desc.fd with
| Some fd ->
Unix.close fd;
desc.fd <- None
| None -> ()
let into_in_ch desc =
let ch = Unix.in_channel_of_descr (Option.value_exn desc.fd) in
desc.fd <- None;
ch
let raw desc = Option.value_exn desc.fd
end
module Pipe = struct
let create () =
let (in_r, in_w) = Unix.pipe ~cloexec:true () in
(Fd.from in_r, Fd.from in_w)
end
let current_exe () = Unix.readlink "/proc/self/exe"
let read_dir_names dir =
let handle = Unix.opendir dir in
Exn.protectx
~f:(fun handle ->
let rec f (acc : string list) =
try
let name = Unix.readdir handle in
f (name :: acc)
with End_of_file -> acc
in
f [])
handle ~finally:Unix.closedir
let grob_dir dir pattern =
let reg = Str.regexp pattern in
let names = read_dir_names dir in
List.filter names ~f:(fun name -> Str.string_match reg name 0)
let join_path paths =
match paths with
| [] -> ""
| x :: xs -> List.fold_left xs ~init:x ~f:Stdlib.Filename.concat
let exec args ~f =
let () =
let s = String.concat ~sep:" " args in
[%loga.debug "exec = %s" s]
in
let (in_r, in_w) = Pipe.create () in
[%defer Fd.close in_w];
[%defer Fd.close in_r];
let (out_r, out_w) = Pipe.create () in
[%defer Fd.close out_w];
let out_ch = Fd.into_in_ch out_r in
[%defer Stdlib.close_in_noerr out_ch];
let (err_r, err_w) = Pipe.create () in
[%defer Fd.close err_r];
let err_ch = Fd.into_in_ch err_r in
[%defer Stdlib.close_in_noerr err_ch];
let pid =
let args = Array.of_list args in
Unix.create_process args.(0) args (Fd.raw in_r) (Fd.raw out_w)
(Fd.raw err_w)
in
let (_pid, status) = Unix.waitpid [] pid in
Fd.close out_w;
Fd.close err_w;
f out_ch err_ch status (String.concat ~sep:" " args)
let assume_exit_successfully ~status ~args =
match status with
| Unix.WEXITED 0 -> Ok ()
| s -> Error (Not_exited_with_code_zero (args, s))
let assume_exit_successfully_with_out ~status ~args ~out_ch ~err_ch =
let ret = assume_exit_successfully ~status ~args in
Result.iter_error ret ~f:(fun _ ->
let out = Stdio.In_channel.input_all out_ch in
Stdio.Out_channel.printf "%s" out;
let err = Stdio.In_channel.input_all err_ch in
Stdio.Out_channel.eprintf "%s" err);
ret
let mktemp_dir prefix =
let open Result.Let_syntax in
let tmp = Caml.Filename.get_temp_dir_name () in
exec [ "mktemp"; "-d"; "-p"; tmp; "-t"; prefix ]
~f:(fun out_ch err_ch status args ->
let%bind () = assume_exit_successfully ~status ~args in
let out =
Stdio.In_channel.input_all out_ch |> String.chop_suffix_exn ~suffix:"\n"
in
let%bind () =
if Stdlib.Filename.is_relative out then Error (Unexpected_result "")
else Ok ()
in
Ok out)
module Spec_env = struct
let cc ~spec =
Sys.getenv "RILL_CC"
|> Option.ok_or_else ~err:(fun () -> Target_spec.(spec.cc))
let cc_sysroot ~spec =
Sys.getenv "RILL_CC_SYSROOT"
|> Option.bind ~f:(fun path ->
if String.equal path "" then None else Some path)
|> Option.map_or_else
~default:(fun () -> Target_spec.(spec.cc_sysroot))
~f:Option.return
let ar ~spec =
Sys.getenv "RILL_AR"
|> Option.ok_or_else ~err:(fun () -> Target_spec.(spec.ar))
let ranlib ~spec =
Sys.getenv "RILL_RANLIB"
|> Option.ok_or_else ~err:(fun () -> Target_spec.(spec.ranlib))
end
let cc_obj src out =
let open Result.Let_syntax in
let args = [ [ "gcc" ] ] in
let args = [ Printf.sprintf "-o%s" out; "-c"; src ] :: args in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun out_ch err_ch status args ->
let%bind () =
assume_exit_successfully_with_out ~status ~args ~out_ch ~err_ch
in
Ok ())
let cc_exe ~spec ?(only_pp = false) ?(only_comp = false)
?(only_comp_asm = false) ?(linker_flags = []) ~lib_dirs ~lib_names ~objs
~out () =
let open Result.Let_syntax in
let args = [ [ Spec_env.cc ~spec ] ] in
let args =
Spec_env.cc_sysroot ~spec
|> Option.value_map ~default:args ~f:(fun path ->
[ Printf.sprintf "--sysroot=%s" path ] :: args)
in
let args = objs :: args in
let args =
match (only_pp, only_comp, only_comp_asm) with
| (false, false, false) ->
let args = List.map lib_dirs ~f:(Printf.sprintf "-L%s") :: args in
let args = [ "-static" ] :: args in
let args = linker_flags :: args in
let args = List.map lib_names ~f:(Printf.sprintf "-l%s") :: args in
args
| (_, _, _) ->
let args = (if only_pp then [ "-E" ] else []) :: args in
let args = (if only_comp then [ "-S" ] else []) :: args in
let args = (if only_comp_asm then [ "-c" ] else []) :: args in
args
in
let args = [ Printf.sprintf "-o%s" out ] :: args in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun out_ch err_ch status args ->
let%bind () =
assume_exit_successfully_with_out ~status ~args ~out_ch ~err_ch
in
Ok ())
let ar ~spec ~objs ~out () =
let open Result.Let_syntax in
let args = [ [ Spec_env.ar ~spec; "qc"; out ] ] in
let args = objs :: args in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun _out_ch _err_ch status args ->
let%bind () = assume_exit_successfully ~status ~args in
Ok ())
let ranlib ~spec ~out () =
let open Result.Let_syntax in
let args = [ [ Spec_env.ranlib ~spec; out ] ] in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun _out_ch _err_ch status args ->
let%bind () = assume_exit_successfully ~status ~args in
Ok ())
let cp ~src ~dst =
let open Result.Let_syntax in
let args = [ [ "cp"; src; dst ] ] in
let args = args |> List.rev |> List.concat in
exec args ~f:(fun _out_ch _err_ch status args ->
let%bind () = assume_exit_successfully ~status ~args in
Ok ())
|
7a69a3fe82c1bfdd37673e8c74b482946c348d47aea3988900094697ce8e12fd | zcaudate-me/lein-repack | file_info.clj | (ns leiningen.repack.data.file-info
(:require [clojure.string :as string]))
(defrecord FileInfo []
Object
(toString [this] (-> this :path)))
(defmethod print-method FileInfo [v w]
(.write w (str v)))
| null | https://raw.githubusercontent.com/zcaudate-me/lein-repack/1eb542d66a77f55c4b5625783027c31fd2dddfe5/src/leiningen/repack/data/file_info.clj | clojure | (ns leiningen.repack.data.file-info
(:require [clojure.string :as string]))
(defrecord FileInfo []
Object
(toString [this] (-> this :path)))
(defmethod print-method FileInfo [v w]
(.write w (str v)))
| |
962f320c0ea88184df7466373a1e5db6aca0d88f879815793e9e633981f8b689 | lasp-lang/partisan | skeen_3pc.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2019 . All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% NOTE: This protocol doesn't cover recovery. It's merely here for
%% demonstration purposes.
-module(skeen_3pc).
-author("Christopher S. Meiklejohn <>").
-include("partisan.hrl").
-include("partisan_logger.hrl").
%% API
-export([start_link/0,
broadcast/2,
update/1,
stop/0]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-record(state, {next_id, membership}).
-record(transaction, {id,
coordinator,
from,
participants,
coordinator_status,
participant_status,
prepared,
precommitted,
committed,
aborted,
uncertain,
server_ref,
message}).
-define(COORDINATING_TRANSACTIONS, coordinating_transactions_table).
-define(PARTICIPATING_TRANSACTIONS, participating_transactions_table).
%%%===================================================================
%%% API
%%%===================================================================
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
stop() ->
gen_server:stop(?MODULE, normal, infinity).
%% @doc Broadcast.
%% Avoid using call by sending a message and waiting for a response.
broadcast(ServerRef, Message) ->
%% TODO: Bit of a hack just to get this working.
true = erlang:register(txn_coordinator, self()),
From = partisan_remote_ref:from_term(txn_coordinator),
gen_server:cast(?MODULE, {broadcast, From, ServerRef, Message}),
receive
Response ->
Response
end.
%% @doc Membership update.
update(LocalState0) ->
LocalState = partisan_peer_service:decode(LocalState0),
gen_server:cast(?MODULE, {update, LocalState}).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
@private
init([]) ->
%% Seed the random number generator.
partisan_config:seed(),
Register membership update callback .
partisan_peer_service:add_sup_callback(fun ?MODULE:update/1),
%% Open ETS table to track coordinated transactions.
?COORDINATING_TRANSACTIONS = ets:new(?COORDINATING_TRANSACTIONS, [set, named_table, public]),
%% Open ETS table to track participating transactions.
?PARTICIPATING_TRANSACTIONS = ets:new(?PARTICIPATING_TRANSACTIONS, [set, named_table, public]),
%% Start with initial membership.
{ok, Membership} = partisan_peer_service:members(),
?LOG_INFO("Starting with membership: ~p", [Membership]),
{ok, #state{next_id=0, membership=membership(Membership)}}.
@private
handle_call(Msg, _From, State) ->
?LOG_WARNING("Unhandled call messages at module ~p: ~p", [?MODULE, Msg]),
{reply, ok, State}.
@private
handle_cast({broadcast, From, ServerRef, Message}, #state{next_id=NextId, membership=Membership}=State) ->
%% Generate unique transaction id.
MyNode = partisan:node(),
Id = {MyNode, NextId},
%% Set transaction timer.
erlang:send_after(1000, self(), {coordinator_timeout, Id}),
%% Create transaction in a preparing state.
Transaction = #transaction{
id=Id,
coordinator=MyNode,
from=From,
participants=Membership,
coordinator_status=preparing,
participant_status=unknown,
prepared=[],
precommitted=[],
committed=[],
aborted=[],
uncertain=[],
server_ref=ServerRef,
message=Message
},
%% Store transaction.
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
%% Send prepare message to all participants including ourself.
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending prepare message to node ~p: ~p", [node(), N, Message]),
partisan:forward_message(
N,
?MODULE,
{prepare, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Membership)),
{noreply, State#state{next_id=NextId}};
handle_cast({update, Membership0}, State) ->
Membership = membership(Membership0),
{noreply, State#state{membership=Membership}};
handle_cast(Msg, State) ->
?LOG_WARNING("Unhandled cast messages at module ~p: ~p", [?MODULE, Msg]),
{noreply, State}.
@private
%% Incoming messages.
handle_info({participant_timeout, Id}, State) ->
%% Find transaction record.
case ets:lookup(?PARTICIPATING_TRANSACTIONS, Id) of
[{_Id, #transaction{participants=_Participants, participant_status=ParticipantStatus, server_ref=ServerRef, message=Message} = Transaction}] ->
?LOG_INFO("Participant timeout when participant ~p was in the ~p state.", [node(), ParticipantStatus]),
case ParticipantStatus of
prepared ->
?LOG_INFO("Participant: ~p moving from ~p to abort state.", [node(), ParticipantStatus]),
%% Write log record showing abort occurred.
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=abort}});
precommit ->
?LOG_INFO("Participant: ~p moving from precommit to commit state.", [node()]),
%% Proceed with the commit.
%% Write log record showing commit occurred.
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=commit}}),
%% Forward to process.
partisan_peer_service_manager:process_forward(ServerRef, Message);
commit ->
?LOG_INFO("Participant: ~p already committed.", [node()]),
ok
end;
[] ->
?LOG_ERROR("Notification for participant timeout message but no transaction found: abort or commit already occurred!", [])
end,
{noreply, State};
handle_info({coordinator_timeout, Id}, State) ->
%% Find transaction record.
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{coordinator_status=CoordinatorStatus, participants=Participants, precommitted=Precommitted, from=From} = Transaction0}] ->
?LOG_INFO("Coordinator timeout when participant ~p was in the ~p state.", [node(), CoordinatorStatus]),
case CoordinatorStatus of
commit_authorized ->
?LOG_INFO("Coordinator ~p in commit_authorized state, moving to abort.", [node()]),
%% Update local state.
Transaction = Transaction0#transaction{coordinator_status=aborting},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
%% Reply to caller.
?LOG_INFO("Aborting transaction: ~p", [Id]),
partisan:forward_message(From, error),
%% Send notification to abort.
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending abort message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{abort, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants)),
ok;
commit_finalizing ->
?LOG_INFO("Coordinator ~p in commit_finalizing state, moving to abort.", [node()]),
%% Have we made a decision?
case lists:usort(Participants) =:= lists:usort(Precommitted) of
true ->
%% Decision has already been made, participants have been told to commit.
ok;
false ->
%% Update local state.
Transaction = Transaction0#transaction{coordinator_status=aborting},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
%% Reply to caller.
?LOG_INFO("Aborting transaction: ~p", [Id]),
partisan:forward_message(From, error),
%% Send notification to abort.
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending abort message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{abort, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants)),
ok
end,
%% Can't do anything; block.
ok;
aborting ->
?LOG_INFO("Coordinator ~p in abort state already.", [node()]),
%% Can't do anything; block.
ok;
preparing ->
?LOG_INFO("Coordinator: ~p moving from preparing to abort state.", [node()]),
%% Update local state.
Transaction = Transaction0#transaction{coordinator_status=aborting},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
%% Reply to caller.
?LOG_INFO("Aborting transaction: ~p", [Id]),
partisan:forward_message(From, error),
%% Send notification to abort.
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending abort message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{abort, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants))
end;
[] ->
?LOG_ERROR("Notification for coordinator timeout message but no transaction found!", [])
end,
{noreply, State};
handle_info({abort_ack, FromNode, Id}, State) ->
%% Find transaction record.
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{participants=Participants, aborted=Aborted0} = Transaction}] ->
?LOG_INFO("Received abort_ack from node ~p", [FromNode]),
%% Update aborted.
Aborted = lists:usort(Aborted0 ++ [FromNode]),
%% Are we all committed?
case lists:usort(Participants) =:= lists:usort(Aborted) of
true ->
%% Remove record from storage.
true = ets:delete(?COORDINATING_TRANSACTIONS, Id),
ok;
false ->
?LOG_INFO("Not all participants have aborted yet: ~p != ~p", [Aborted, Participants]),
%% Update local state.
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction#transaction{aborted=Aborted}}),
ok
end;
[] ->
?LOG_ERROR("Notification for abort_ack message but no transaction found!", [])
end,
{noreply, State};
handle_info({commit_ack, FromNode, Id}, State) ->
%% Find transaction record.
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{participants=Participants, committed=Committed0} = Transaction}] ->
?LOG_INFO("Received commit_ack from node ~p at node: ~p", [FromNode, node()]),
%% Update committed.
Committed = lists:usort(Committed0 ++ [FromNode]),
%% Are we all committed?
case lists:usort(Participants) =:= lists:usort(Committed) of
true ->
%% Remove record from storage.
true = ets:delete(?COORDINATING_TRANSACTIONS, Id),
ok;
false ->
?LOG_INFO("Not all participants have committed yet: ~p != ~p", [Committed, Participants]),
%% Update local state.
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction#transaction{committed=Committed}}),
ok
end;
[] ->
?LOG_ERROR("Notification for commit_ack message but no transaction found!", [])
end,
{noreply, State};
handle_info({abort, #transaction{id=Id, coordinator=Coordinator}}, State) ->
true = ets:delete(?PARTICIPATING_TRANSACTIONS, Id),
MyNode = partisan:node(),
?LOG_INFO("~p: sending abort ack message to node ~p: ~p", [node(), Coordinator, Id]),
partisan:forward_message(
Coordinator,
?MODULE,
{abort_ack, MyNode, Id},
#{channel => ?DEFAULT_CHANNEL}
),
{noreply, State};
handle_info({commit, #transaction{id=Id, coordinator=Coordinator, server_ref=ServerRef, message=Message} = Transaction}, State) ->
?LOG_INFO("Commit received at node: ~p", [node()]),
%% Write log record showing commit occurred.
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=commit}}),
%% Forward to process.
partisan_peer_service_manager:process_forward(ServerRef, Message),
%% Repond to coordinator that we are now committed.
MyNode = partisan:node(),
?LOG_INFO("~p: sending commit ack message to node ~p: ~p", [node(), Coordinator, Id]),
partisan:forward_message(
Coordinator,
?MODULE,
{commit_ack, MyNode, Id},
#{channel => ?DEFAULT_CHANNEL}
),
{noreply, State};
handle_info({precommit_ack, FromNode, Id}, State) ->
%% Find transaction record.
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{from=From, participants=Participants, precommitted=Precommitted0} = Transaction0}] ->
%% Update prepared.
Precommitted = lists:usort(Precommitted0 ++ [FromNode]),
%% Are we all prepared?
case lists:usort(Participants) =:= lists:usort(Precommitted) of
true ->
%% Change state to committing.
CoordinatorStatus = commit_finalizing,
%% Reply to caller.
?LOG_INFO("all precommit_acks received, replying to the caller: ~p", [From]),
partisan:forward_message(From, ok),
%% Update local state before sending decision to participants.
Transaction = Transaction0#transaction{coordinator_status=CoordinatorStatus, precommitted=Precommitted},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
%% Send notification to commit.
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending commit message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{commit, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants));
false ->
%% Update local state before sending decision to participants.
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction0#transaction{precommitted=Precommitted}})
end;
[] ->
?LOG_ERROR("Notification for precommit_ack message but no transaction found!")
end,
{noreply, State};
handle_info({precommit, #transaction{id=Id, coordinator=Coordinator} = Transaction}, State) ->
%% Write log record showing commit occurred.
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=precommit}}),
%% Repond to coordinator that we are now committed.
?LOG_INFO("~p: sending precommit_ack message to node ~p: ~p", [node(), Coordinator, Id]),
MyNode = partisan:node(),
partisan:forward_message(
Coordinator,
?MODULE,
{precommit_ack, MyNode, Id},
#{channel => ?DEFAULT_CHANNEL}
),
{noreply, State};
handle_info({prepared, FromNode, Id}, State) ->
%% Find transaction record.
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{participants=Participants, prepared=Prepared0} = Transaction0}] ->
%% Update prepared.
Prepared = lists:usort(Prepared0 ++ [FromNode]),
%% Are we all prepared?
case lists:usort(Participants) =:= lists:usort(Prepared) of
true ->
%% Change state to committing.
CoordinatorStatus = commit_authorized,
%% Update local state before sending decision to participants.
Transaction = Transaction0#transaction{coordinator_status=CoordinatorStatus, prepared=Prepared},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
%% Send notification to commit.
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending precommit message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{precommit, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants));
false ->
%% Update local state before sending decision to participants.
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction0#transaction{prepared=Prepared}})
end;
[] ->
?LOG_ERROR("Notification for prepared message but no transaction found!")
end,
{noreply, State};
handle_info({prepare, #transaction{coordinator=Coordinator, id=Id}=Transaction}, State) ->
%% Durably store the message for recovery.
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=prepared}}),
%% Set a timeout to hear about a decision.
erlang:send_after(2000, self(), {participant_timeout, Id}),
%% Repond to coordinator that we are now prepared.
MyNode = partisan:node(),
?LOG_INFO("~p: sending prepared message to node ~p: ~p", [node(), Coordinator, Id]),
partisan:forward_message(
Coordinator,
?MODULE,
{prepared, MyNode, Id},
#{channel => ?DEFAULT_CHANNEL}
),
{noreply, State};
handle_info(Msg, State) ->
?LOG_INFO("~p received unhandled message: ~p", [node(), Msg]),
{noreply, State}.
@private
terminate(_Reason, _State) ->
ok.
@private
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
@private -- sort to remove nondeterminism in node selection .
membership(Membership) ->
lists:usort(Membership). | null | https://raw.githubusercontent.com/lasp-lang/partisan/fd048fc1b34309d9fa41450434a7e7b3b2fa1fb8/protocols/skeen_3pc.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
NOTE: This protocol doesn't cover recovery. It's merely here for
demonstration purposes.
API
gen_server callbacks
===================================================================
API
===================================================================
@doc Broadcast.
Avoid using call by sending a message and waiting for a response.
TODO: Bit of a hack just to get this working.
@doc Membership update.
===================================================================
gen_server callbacks
===================================================================
Seed the random number generator.
Open ETS table to track coordinated transactions.
Open ETS table to track participating transactions.
Start with initial membership.
Generate unique transaction id.
Set transaction timer.
Create transaction in a preparing state.
Store transaction.
Send prepare message to all participants including ourself.
Incoming messages.
Find transaction record.
Write log record showing abort occurred.
Proceed with the commit.
Write log record showing commit occurred.
Forward to process.
Find transaction record.
Update local state.
Reply to caller.
Send notification to abort.
Have we made a decision?
Decision has already been made, participants have been told to commit.
Update local state.
Reply to caller.
Send notification to abort.
Can't do anything; block.
Can't do anything; block.
Update local state.
Reply to caller.
Send notification to abort.
Find transaction record.
Update aborted.
Are we all committed?
Remove record from storage.
Update local state.
Find transaction record.
Update committed.
Are we all committed?
Remove record from storage.
Update local state.
Write log record showing commit occurred.
Forward to process.
Repond to coordinator that we are now committed.
Find transaction record.
Update prepared.
Are we all prepared?
Change state to committing.
Reply to caller.
Update local state before sending decision to participants.
Send notification to commit.
Update local state before sending decision to participants.
Write log record showing commit occurred.
Repond to coordinator that we are now committed.
Find transaction record.
Update prepared.
Are we all prepared?
Change state to committing.
Update local state before sending decision to participants.
Send notification to commit.
Update local state before sending decision to participants.
Durably store the message for recovery.
Set a timeout to hear about a decision.
Repond to coordinator that we are now prepared.
===================================================================
=================================================================== | Copyright ( c ) 2019 . All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(skeen_3pc).
-author("Christopher S. Meiklejohn <>").
-include("partisan.hrl").
-include("partisan_logger.hrl").
-export([start_link/0,
broadcast/2,
update/1,
stop/0]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-record(state, {next_id, membership}).
-record(transaction, {id,
coordinator,
from,
participants,
coordinator_status,
participant_status,
prepared,
precommitted,
committed,
aborted,
uncertain,
server_ref,
message}).
-define(COORDINATING_TRANSACTIONS, coordinating_transactions_table).
-define(PARTICIPATING_TRANSACTIONS, participating_transactions_table).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
stop() ->
gen_server:stop(?MODULE, normal, infinity).
broadcast(ServerRef, Message) ->
true = erlang:register(txn_coordinator, self()),
From = partisan_remote_ref:from_term(txn_coordinator),
gen_server:cast(?MODULE, {broadcast, From, ServerRef, Message}),
receive
Response ->
Response
end.
update(LocalState0) ->
LocalState = partisan_peer_service:decode(LocalState0),
gen_server:cast(?MODULE, {update, LocalState}).
@private
init([]) ->
partisan_config:seed(),
Register membership update callback .
partisan_peer_service:add_sup_callback(fun ?MODULE:update/1),
?COORDINATING_TRANSACTIONS = ets:new(?COORDINATING_TRANSACTIONS, [set, named_table, public]),
?PARTICIPATING_TRANSACTIONS = ets:new(?PARTICIPATING_TRANSACTIONS, [set, named_table, public]),
{ok, Membership} = partisan_peer_service:members(),
?LOG_INFO("Starting with membership: ~p", [Membership]),
{ok, #state{next_id=0, membership=membership(Membership)}}.
@private
handle_call(Msg, _From, State) ->
?LOG_WARNING("Unhandled call messages at module ~p: ~p", [?MODULE, Msg]),
{reply, ok, State}.
@private
handle_cast({broadcast, From, ServerRef, Message}, #state{next_id=NextId, membership=Membership}=State) ->
MyNode = partisan:node(),
Id = {MyNode, NextId},
erlang:send_after(1000, self(), {coordinator_timeout, Id}),
Transaction = #transaction{
id=Id,
coordinator=MyNode,
from=From,
participants=Membership,
coordinator_status=preparing,
participant_status=unknown,
prepared=[],
precommitted=[],
committed=[],
aborted=[],
uncertain=[],
server_ref=ServerRef,
message=Message
},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending prepare message to node ~p: ~p", [node(), N, Message]),
partisan:forward_message(
N,
?MODULE,
{prepare, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Membership)),
{noreply, State#state{next_id=NextId}};
handle_cast({update, Membership0}, State) ->
Membership = membership(Membership0),
{noreply, State#state{membership=Membership}};
handle_cast(Msg, State) ->
?LOG_WARNING("Unhandled cast messages at module ~p: ~p", [?MODULE, Msg]),
{noreply, State}.
@private
handle_info({participant_timeout, Id}, State) ->
case ets:lookup(?PARTICIPATING_TRANSACTIONS, Id) of
[{_Id, #transaction{participants=_Participants, participant_status=ParticipantStatus, server_ref=ServerRef, message=Message} = Transaction}] ->
?LOG_INFO("Participant timeout when participant ~p was in the ~p state.", [node(), ParticipantStatus]),
case ParticipantStatus of
prepared ->
?LOG_INFO("Participant: ~p moving from ~p to abort state.", [node(), ParticipantStatus]),
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=abort}});
precommit ->
?LOG_INFO("Participant: ~p moving from precommit to commit state.", [node()]),
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=commit}}),
partisan_peer_service_manager:process_forward(ServerRef, Message);
commit ->
?LOG_INFO("Participant: ~p already committed.", [node()]),
ok
end;
[] ->
?LOG_ERROR("Notification for participant timeout message but no transaction found: abort or commit already occurred!", [])
end,
{noreply, State};
handle_info({coordinator_timeout, Id}, State) ->
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{coordinator_status=CoordinatorStatus, participants=Participants, precommitted=Precommitted, from=From} = Transaction0}] ->
?LOG_INFO("Coordinator timeout when participant ~p was in the ~p state.", [node(), CoordinatorStatus]),
case CoordinatorStatus of
commit_authorized ->
?LOG_INFO("Coordinator ~p in commit_authorized state, moving to abort.", [node()]),
Transaction = Transaction0#transaction{coordinator_status=aborting},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
?LOG_INFO("Aborting transaction: ~p", [Id]),
partisan:forward_message(From, error),
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending abort message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{abort, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants)),
ok;
commit_finalizing ->
?LOG_INFO("Coordinator ~p in commit_finalizing state, moving to abort.", [node()]),
case lists:usort(Participants) =:= lists:usort(Precommitted) of
true ->
ok;
false ->
Transaction = Transaction0#transaction{coordinator_status=aborting},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
?LOG_INFO("Aborting transaction: ~p", [Id]),
partisan:forward_message(From, error),
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending abort message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{abort, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants)),
ok
end,
ok;
aborting ->
?LOG_INFO("Coordinator ~p in abort state already.", [node()]),
ok;
preparing ->
?LOG_INFO("Coordinator: ~p moving from preparing to abort state.", [node()]),
Transaction = Transaction0#transaction{coordinator_status=aborting},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
?LOG_INFO("Aborting transaction: ~p", [Id]),
partisan:forward_message(From, error),
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending abort message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{abort, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants))
end;
[] ->
?LOG_ERROR("Notification for coordinator timeout message but no transaction found!", [])
end,
{noreply, State};
handle_info({abort_ack, FromNode, Id}, State) ->
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{participants=Participants, aborted=Aborted0} = Transaction}] ->
?LOG_INFO("Received abort_ack from node ~p", [FromNode]),
Aborted = lists:usort(Aborted0 ++ [FromNode]),
case lists:usort(Participants) =:= lists:usort(Aborted) of
true ->
true = ets:delete(?COORDINATING_TRANSACTIONS, Id),
ok;
false ->
?LOG_INFO("Not all participants have aborted yet: ~p != ~p", [Aborted, Participants]),
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction#transaction{aborted=Aborted}}),
ok
end;
[] ->
?LOG_ERROR("Notification for abort_ack message but no transaction found!", [])
end,
{noreply, State};
handle_info({commit_ack, FromNode, Id}, State) ->
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{participants=Participants, committed=Committed0} = Transaction}] ->
?LOG_INFO("Received commit_ack from node ~p at node: ~p", [FromNode, node()]),
Committed = lists:usort(Committed0 ++ [FromNode]),
case lists:usort(Participants) =:= lists:usort(Committed) of
true ->
true = ets:delete(?COORDINATING_TRANSACTIONS, Id),
ok;
false ->
?LOG_INFO("Not all participants have committed yet: ~p != ~p", [Committed, Participants]),
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction#transaction{committed=Committed}}),
ok
end;
[] ->
?LOG_ERROR("Notification for commit_ack message but no transaction found!", [])
end,
{noreply, State};
handle_info({abort, #transaction{id=Id, coordinator=Coordinator}}, State) ->
true = ets:delete(?PARTICIPATING_TRANSACTIONS, Id),
MyNode = partisan:node(),
?LOG_INFO("~p: sending abort ack message to node ~p: ~p", [node(), Coordinator, Id]),
partisan:forward_message(
Coordinator,
?MODULE,
{abort_ack, MyNode, Id},
#{channel => ?DEFAULT_CHANNEL}
),
{noreply, State};
handle_info({commit, #transaction{id=Id, coordinator=Coordinator, server_ref=ServerRef, message=Message} = Transaction}, State) ->
?LOG_INFO("Commit received at node: ~p", [node()]),
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=commit}}),
partisan_peer_service_manager:process_forward(ServerRef, Message),
MyNode = partisan:node(),
?LOG_INFO("~p: sending commit ack message to node ~p: ~p", [node(), Coordinator, Id]),
partisan:forward_message(
Coordinator,
?MODULE,
{commit_ack, MyNode, Id},
#{channel => ?DEFAULT_CHANNEL}
),
{noreply, State};
handle_info({precommit_ack, FromNode, Id}, State) ->
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{from=From, participants=Participants, precommitted=Precommitted0} = Transaction0}] ->
Precommitted = lists:usort(Precommitted0 ++ [FromNode]),
case lists:usort(Participants) =:= lists:usort(Precommitted) of
true ->
CoordinatorStatus = commit_finalizing,
?LOG_INFO("all precommit_acks received, replying to the caller: ~p", [From]),
partisan:forward_message(From, ok),
Transaction = Transaction0#transaction{coordinator_status=CoordinatorStatus, precommitted=Precommitted},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending commit message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{commit, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants));
false ->
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction0#transaction{precommitted=Precommitted}})
end;
[] ->
?LOG_ERROR("Notification for precommit_ack message but no transaction found!")
end,
{noreply, State};
handle_info({precommit, #transaction{id=Id, coordinator=Coordinator} = Transaction}, State) ->
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=precommit}}),
?LOG_INFO("~p: sending precommit_ack message to node ~p: ~p", [node(), Coordinator, Id]),
MyNode = partisan:node(),
partisan:forward_message(
Coordinator,
?MODULE,
{precommit_ack, MyNode, Id},
#{channel => ?DEFAULT_CHANNEL}
),
{noreply, State};
handle_info({prepared, FromNode, Id}, State) ->
case ets:lookup(?COORDINATING_TRANSACTIONS, Id) of
[{_Id, #transaction{participants=Participants, prepared=Prepared0} = Transaction0}] ->
Prepared = lists:usort(Prepared0 ++ [FromNode]),
case lists:usort(Participants) =:= lists:usort(Prepared) of
true ->
CoordinatorStatus = commit_authorized,
Transaction = Transaction0#transaction{coordinator_status=CoordinatorStatus, prepared=Prepared},
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction}),
lists:foreach(fun(N) ->
?LOG_INFO("~p: sending precommit message to node ~p: ~p", [node(), N, Id]),
partisan:forward_message(
N,
?MODULE,
{precommit, Transaction},
#{channel => ?DEFAULT_CHANNEL}
)
end, membership(Participants));
false ->
true = ets:insert(?COORDINATING_TRANSACTIONS, {Id, Transaction0#transaction{prepared=Prepared}})
end;
[] ->
?LOG_ERROR("Notification for prepared message but no transaction found!")
end,
{noreply, State};
handle_info({prepare, #transaction{coordinator=Coordinator, id=Id}=Transaction}, State) ->
true = ets:insert(?PARTICIPATING_TRANSACTIONS, {Id, Transaction#transaction{participant_status=prepared}}),
erlang:send_after(2000, self(), {participant_timeout, Id}),
MyNode = partisan:node(),
?LOG_INFO("~p: sending prepared message to node ~p: ~p", [node(), Coordinator, Id]),
partisan:forward_message(
Coordinator,
?MODULE,
{prepared, MyNode, Id},
#{channel => ?DEFAULT_CHANNEL}
),
{noreply, State};
handle_info(Msg, State) ->
?LOG_INFO("~p received unhandled message: ~p", [node(), Msg]),
{noreply, State}.
@private
terminate(_Reason, _State) ->
ok.
@private
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
@private -- sort to remove nondeterminism in node selection .
membership(Membership) ->
lists:usort(Membership). |
433736a0863d58f9bda5a965c1d39bb8de22cef3a6d4cd9dd376bc73ccb9a01e | juji-io/datalevin | timeout.cljc | (ns ^:no-doc datalevin.timeout)
(def ^:dynamic *deadline*
"When non nil, query pr pull will throw if its not done before *deadline* -- as returned by (System/currentTimeMillis) or (.now js/Date)"
nil)
(defn to-deadline
"Converts a timeout in milliseconds (or nil) to a deadline (or nil)."
[timeout-in-ms]
(some-> timeout-in-ms
(#(+ ^long %
#?(:clj ^long (System/currentTimeMillis)
:cljs (.now js/Date))))))
(defn assert-time-left
"Throws if timeout exceeded"
[]
(when (some-> *deadline*
(#(< ^long %
#?(:clj ^long (System/currentTimeMillis)
:cljs (.now js/Date)))))
(throw
(ex-info "Query and/or pull expression took too long to run."
{}))))
| null | https://raw.githubusercontent.com/juji-io/datalevin/5fc778fa31a63ad09635b41622d3337275d57efa/src/datalevin/timeout.cljc | clojure | (ns ^:no-doc datalevin.timeout)
(def ^:dynamic *deadline*
"When non nil, query pr pull will throw if its not done before *deadline* -- as returned by (System/currentTimeMillis) or (.now js/Date)"
nil)
(defn to-deadline
"Converts a timeout in milliseconds (or nil) to a deadline (or nil)."
[timeout-in-ms]
(some-> timeout-in-ms
(#(+ ^long %
#?(:clj ^long (System/currentTimeMillis)
:cljs (.now js/Date))))))
(defn assert-time-left
"Throws if timeout exceeded"
[]
(when (some-> *deadline*
(#(< ^long %
#?(:clj ^long (System/currentTimeMillis)
:cljs (.now js/Date)))))
(throw
(ex-info "Query and/or pull expression took too long to run."
{}))))
| |
24f3d9a42ec63ccc535e72ece718df5006978c629dd3b2a07a65e39967f649a8 | lemmaandrew/CodingBatHaskell | make2.hs | From
Given 2 int arrays , a and b , return a new array length 2 containing , as much as will
fit , the elements from a followed by the elements from b. The arrays may be any length ,
including 0 , but there will be 2 or more elements available between the 2 arrays .
Given 2 int arrays, a and b, return a new array length 2 containing, as much as will
fit, the elements from a followed by the elements from b. The arrays may be any length,
including 0, but there will be 2 or more elements available between the 2 arrays.
-}
import Test.Hspec ( hspec, describe, it, shouldBe )
make2 :: [Int] -> [Int] -> [Int]
make2 a b = undefined
main :: IO ()
main = hspec $ describe "Tests:" $ do
it "[4,5]" $
make2 [4,5] [1,2,3] `shouldBe` [4,5]
it "[4,1]" $
make2 [4] [1,2,3] `shouldBe` [4,1]
it "[1,2]" $
make2 [] [1,2] `shouldBe` [1,2]
it "[1,2]" $
make2 [1,2] [] `shouldBe` [1,2]
it "[3,1]" $
make2 [3] [1,2,3] `shouldBe` [3,1]
it "[3,1]" $
make2 [3] [1] `shouldBe` [3,1]
it "[3,1]" $
make2 [3,1,4] [] `shouldBe` [3,1]
it "[1,1]" $
make2 [1] [1] `shouldBe` [1,1]
it "[1,2]" $
make2 [1,2,3] [7,8] `shouldBe` [1,2]
it "[7,8]" $
make2 [7,8] [1,2,3] `shouldBe` [7,8]
it "[7,1]" $
make2 [7] [1,2,3] `shouldBe` [7,1]
it "[5,4]" $
make2 [5,4] [2,3,7] `shouldBe` [5,4]
| null | https://raw.githubusercontent.com/lemmaandrew/CodingBatHaskell/d839118be02e1867504206657a0664fd79d04736/CodingBat/Array-1/make2.hs | haskell | From
Given 2 int arrays , a and b , return a new array length 2 containing , as much as will
fit , the elements from a followed by the elements from b. The arrays may be any length ,
including 0 , but there will be 2 or more elements available between the 2 arrays .
Given 2 int arrays, a and b, return a new array length 2 containing, as much as will
fit, the elements from a followed by the elements from b. The arrays may be any length,
including 0, but there will be 2 or more elements available between the 2 arrays.
-}
import Test.Hspec ( hspec, describe, it, shouldBe )
make2 :: [Int] -> [Int] -> [Int]
make2 a b = undefined
main :: IO ()
main = hspec $ describe "Tests:" $ do
it "[4,5]" $
make2 [4,5] [1,2,3] `shouldBe` [4,5]
it "[4,1]" $
make2 [4] [1,2,3] `shouldBe` [4,1]
it "[1,2]" $
make2 [] [1,2] `shouldBe` [1,2]
it "[1,2]" $
make2 [1,2] [] `shouldBe` [1,2]
it "[3,1]" $
make2 [3] [1,2,3] `shouldBe` [3,1]
it "[3,1]" $
make2 [3] [1] `shouldBe` [3,1]
it "[3,1]" $
make2 [3,1,4] [] `shouldBe` [3,1]
it "[1,1]" $
make2 [1] [1] `shouldBe` [1,1]
it "[1,2]" $
make2 [1,2,3] [7,8] `shouldBe` [1,2]
it "[7,8]" $
make2 [7,8] [1,2,3] `shouldBe` [7,8]
it "[7,1]" $
make2 [7] [1,2,3] `shouldBe` [7,1]
it "[5,4]" $
make2 [5,4] [2,3,7] `shouldBe` [5,4]
| |
8d339d02bbabfd27a61376cf9aed449df99244f79e253efab17e6d5d486e0378 | ocamllabs/ocaml-modular-implicits | typedecl.mli | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
(* Typing of type definitions and primitive definitions *)
open Types
open Format
val transl_type_decl:
Env.t -> Parsetree.type_declaration list ->
Typedtree.type_declaration list * Env.t
val transl_exception:
Env.t ->
Parsetree.extension_constructor -> Typedtree.extension_constructor * Env.t
val transl_type_extension:
bool -> Env.t -> Location.t -> Parsetree.type_extension ->
Typedtree.type_extension * Env.t
val transl_value_decl:
Env.t -> Location.t ->
Parsetree.value_description -> Typedtree.value_description * Env.t
val transl_with_constraint:
Env.t -> Ident.t -> Path.t option -> Types.type_declaration ->
Parsetree.type_declaration -> Typedtree.type_declaration
val abstract_type_decl: int -> type_declaration
val approx_type_decl:
Env.t -> Parsetree.type_declaration list ->
(Ident.t * type_declaration) list
val check_recmod_typedecl:
Env.t -> Location.t -> Ident.t list -> Path.t -> type_declaration -> unit
val check_coherence:
Env.t -> Location.t -> Ident.t -> type_declaration -> unit
(* for fixed types *)
val is_fixed_type : Parsetree.type_declaration -> bool
(* for typeclass.ml *)
val compute_variance_decls:
Env.t ->
(Ident.t * Types.type_declaration * Types.type_declaration *
Types.class_declaration * Types.class_type_declaration *
'a Typedtree.class_infos) list ->
(Types.type_declaration * Types.type_declaration *
Types.class_declaration * Types.class_type_declaration) list
type error =
Repeated_parameter
| Duplicate_constructor of string
| Too_many_constructors
| Duplicate_label of string
| Recursive_abbrev of string
| Cycle_in_def of string * type_expr
| Definition_mismatch of type_expr * Includecore.type_mismatch list
| Constraint_failed of type_expr * type_expr
| Inconsistent_constraint of Env.t * (type_expr * type_expr) list
| Type_clash of Env.t * (type_expr * type_expr) list
| Parameters_differ of Path.t * type_expr * type_expr
| Null_arity_external
| Missing_native_external
| Unbound_type_var of type_expr * type_declaration
| Not_open_type of Path.t
| Not_extensible_type of Path.t
| Extension_mismatch of Path.t * Includecore.type_mismatch list
| Rebind_wrong_type of Longident.t * Env.t * (type_expr * type_expr) list
| Rebind_mismatch of Longident.t * Path.t * Path.t
| Rebind_private of Longident.t
| Bad_variance of int * (bool*bool*bool) * (bool*bool*bool)
| Unavailable_type_constructor of Path.t
| Bad_fixed_type of string
| Unbound_type_var_ext of type_expr * extension_constructor
| Varying_anonymous
exception Error of Location.t * error
val report_error: formatter -> error -> unit
| null | https://raw.githubusercontent.com/ocamllabs/ocaml-modular-implicits/92e45da5c8a4c2db8b2cd5be28a5bec2ac2181f1/typing/typedecl.mli | ocaml | *********************************************************************
OCaml
*********************************************************************
Typing of type definitions and primitive definitions
for fixed types
for typeclass.ml | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
open Types
open Format
val transl_type_decl:
Env.t -> Parsetree.type_declaration list ->
Typedtree.type_declaration list * Env.t
val transl_exception:
Env.t ->
Parsetree.extension_constructor -> Typedtree.extension_constructor * Env.t
val transl_type_extension:
bool -> Env.t -> Location.t -> Parsetree.type_extension ->
Typedtree.type_extension * Env.t
val transl_value_decl:
Env.t -> Location.t ->
Parsetree.value_description -> Typedtree.value_description * Env.t
val transl_with_constraint:
Env.t -> Ident.t -> Path.t option -> Types.type_declaration ->
Parsetree.type_declaration -> Typedtree.type_declaration
val abstract_type_decl: int -> type_declaration
val approx_type_decl:
Env.t -> Parsetree.type_declaration list ->
(Ident.t * type_declaration) list
val check_recmod_typedecl:
Env.t -> Location.t -> Ident.t list -> Path.t -> type_declaration -> unit
val check_coherence:
Env.t -> Location.t -> Ident.t -> type_declaration -> unit
val is_fixed_type : Parsetree.type_declaration -> bool
val compute_variance_decls:
Env.t ->
(Ident.t * Types.type_declaration * Types.type_declaration *
Types.class_declaration * Types.class_type_declaration *
'a Typedtree.class_infos) list ->
(Types.type_declaration * Types.type_declaration *
Types.class_declaration * Types.class_type_declaration) list
type error =
Repeated_parameter
| Duplicate_constructor of string
| Too_many_constructors
| Duplicate_label of string
| Recursive_abbrev of string
| Cycle_in_def of string * type_expr
| Definition_mismatch of type_expr * Includecore.type_mismatch list
| Constraint_failed of type_expr * type_expr
| Inconsistent_constraint of Env.t * (type_expr * type_expr) list
| Type_clash of Env.t * (type_expr * type_expr) list
| Parameters_differ of Path.t * type_expr * type_expr
| Null_arity_external
| Missing_native_external
| Unbound_type_var of type_expr * type_declaration
| Not_open_type of Path.t
| Not_extensible_type of Path.t
| Extension_mismatch of Path.t * Includecore.type_mismatch list
| Rebind_wrong_type of Longident.t * Env.t * (type_expr * type_expr) list
| Rebind_mismatch of Longident.t * Path.t * Path.t
| Rebind_private of Longident.t
| Bad_variance of int * (bool*bool*bool) * (bool*bool*bool)
| Unavailable_type_constructor of Path.t
| Bad_fixed_type of string
| Unbound_type_var_ext of type_expr * extension_constructor
| Varying_anonymous
exception Error of Location.t * error
val report_error: formatter -> error -> unit
|
b06cd7b8db44183b04de6a59f7a54d0592b2e0a553c99e2b4e7075be071ccdfc | brendanhay/amazonka | TimeRangeType.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
{-# LANGUAGE StrictData #-}
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
Module : Amazonka . . Types . TimeRangeType
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Amazonka.XRay.Types.TimeRangeType
( TimeRangeType
( ..,
TimeRangeType_Event,
TimeRangeType_TraceId
),
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
newtype TimeRangeType = TimeRangeType'
{ fromTimeRangeType ::
Data.Text
}
deriving stock
( Prelude.Show,
Prelude.Read,
Prelude.Eq,
Prelude.Ord,
Prelude.Generic
)
deriving newtype
( Prelude.Hashable,
Prelude.NFData,
Data.FromText,
Data.ToText,
Data.ToByteString,
Data.ToLog,
Data.ToHeader,
Data.ToQuery,
Data.FromJSON,
Data.FromJSONKey,
Data.ToJSON,
Data.ToJSONKey,
Data.FromXML,
Data.ToXML
)
pattern TimeRangeType_Event :: TimeRangeType
pattern TimeRangeType_Event = TimeRangeType' "Event"
pattern TimeRangeType_TraceId :: TimeRangeType
pattern TimeRangeType_TraceId = TimeRangeType' "TraceId"
{-# COMPLETE
TimeRangeType_Event,
TimeRangeType_TraceId,
TimeRangeType'
#-}
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-xray/gen/Amazonka/XRay/Types/TimeRangeType.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
# COMPLETE
TimeRangeType_Event,
TimeRangeType_TraceId,
TimeRangeType'
# | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Module : Amazonka . . Types . TimeRangeType
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.XRay.Types.TimeRangeType
( TimeRangeType
( ..,
TimeRangeType_Event,
TimeRangeType_TraceId
),
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
newtype TimeRangeType = TimeRangeType'
{ fromTimeRangeType ::
Data.Text
}
deriving stock
( Prelude.Show,
Prelude.Read,
Prelude.Eq,
Prelude.Ord,
Prelude.Generic
)
deriving newtype
( Prelude.Hashable,
Prelude.NFData,
Data.FromText,
Data.ToText,
Data.ToByteString,
Data.ToLog,
Data.ToHeader,
Data.ToQuery,
Data.FromJSON,
Data.FromJSONKey,
Data.ToJSON,
Data.ToJSONKey,
Data.FromXML,
Data.ToXML
)
pattern TimeRangeType_Event :: TimeRangeType
pattern TimeRangeType_Event = TimeRangeType' "Event"
pattern TimeRangeType_TraceId :: TimeRangeType
pattern TimeRangeType_TraceId = TimeRangeType' "TraceId"
|
5beb63dcdcdf784f23883842b8dcc965b3e05f40a03228747aff9959cee2080f | ekmett/parsec-parsers | doctests.hs | module Main where
import Build_doctests (deps)
import Control.Applicative
import Control.Monad
import Data.List
import System.Directory
import System.FilePath
import Test.DocTest
main :: IO ()
main = getSources >>= \sources -> doctest $
"-isrc"
: "-idist/build/autogen"
: "-optP-include"
: "-optPdist/build/autogen/cabal_macros.h"
: "-hide-all-packages"
: map ("-package="++) deps ++ sources
getSources :: IO [FilePath]
getSources = filter (isSuffixOf ".hs") <$> go "src"
where
go dir = do
(dirs, files) <- getFilesAndDirectories dir
(files ++) . concat <$> mapM go dirs
getFilesAndDirectories :: FilePath -> IO ([FilePath], [FilePath])
getFilesAndDirectories dir = do
c <- map (dir </>) . filter (`notElem` ["..", "."]) <$> getDirectoryContents dir
(,) <$> filterM doesDirectoryExist c <*> filterM doesFileExist c
| null | https://raw.githubusercontent.com/ekmett/parsec-parsers/b8a53db11df0bbdd2da953e59a9a8abeb893ce0e/tests/doctests.hs | haskell | module Main where
import Build_doctests (deps)
import Control.Applicative
import Control.Monad
import Data.List
import System.Directory
import System.FilePath
import Test.DocTest
main :: IO ()
main = getSources >>= \sources -> doctest $
"-isrc"
: "-idist/build/autogen"
: "-optP-include"
: "-optPdist/build/autogen/cabal_macros.h"
: "-hide-all-packages"
: map ("-package="++) deps ++ sources
getSources :: IO [FilePath]
getSources = filter (isSuffixOf ".hs") <$> go "src"
where
go dir = do
(dirs, files) <- getFilesAndDirectories dir
(files ++) . concat <$> mapM go dirs
getFilesAndDirectories :: FilePath -> IO ([FilePath], [FilePath])
getFilesAndDirectories dir = do
c <- map (dir </>) . filter (`notElem` ["..", "."]) <$> getDirectoryContents dir
(,) <$> filterM doesDirectoryExist c <*> filterM doesFileExist c
| |
d9842becdbb4d93f43d40cb8d8e600d40088f6da1452611f710e80ef1fef02f2 | airalab/habit | Internal.hs | # LANGUAGE TemplateHaskell #
-- |
-- Module : Web.Bot.Story.Internal
Copyright : 2016 - 2017
-- License : BSD3
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- Story bot implementation.
--
module Web.Bot.Story.Internal (storyBot) where
import Control.Concurrent (killThread, ThreadId)
import Data.IntMap.Strict as I
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Control.Monad (forever)
import qualified Data.Text as T
import Data.Map.Strict as M
import Data.Monoid ((<>))
import Data.Text (Text)
import Pipes
import Web.Bot.Platform
import Web.Bot.Metrics
import Web.Bot.Persist
import Web.Bot.Message
import Web.Bot.Story
import Web.Bot.User
import Web.Bot.Log
| ' Producer ' from ' ' creator
fromChan :: MonadIO m => Chan b -> Producer b m ()
fromChan c = forever $ liftIO (readChan c) >>= yield
-- | Incoming messages will be sended
toSender :: (APIToken a, Persist a)
=> User
-> (User -> Message -> Bot a ())
-> Consumer Message (Bot a) ()
toSender u sender = forever $ do
await >>= lift . sender u
-- Metrics
lift $ runDB $ upsertBy (StatUser $ userIdent u)
(UserStat (userIdent u) 0 1)
[UserStatMessageOut +=. 1]
-- | Chat ID based message splitter
storyHandler :: (Persist a, APIToken a, ToMessage help)
=> MVar (IntMap (Chan Message, ThreadId))
-> Map Message (Story a)
-> help
-> User -> Message -> Bot a ()
storyHandler chats stories help user msg = do
-- Get a chat id
let newStory item = modifyMVar_ chats
(return . I.insert (userChat user) item)
deleteStory = modifyMVar_ chats
(return . I.delete (userChat user))
-- Metrics
runDB $ do
upsertBy (StatUser $ userIdent user)
(UserStat (userIdent user) 0 1)
[UserStatMessageIn +=. 1]
upsertBy (UserIdentity $ userIdent user)
user
[ UserName =. userName user
, UserChat =. userChat user ]
chatMap <- liftIO (readMVar chats)
-- Lookup chat id in the map
case I.lookup (userChat user) chatMap of
-- Chat exist => story is run now
Just (chan, tid) ->
-- Want to cancel it?
case msg of
"/cancel" -> do
$logDebugS "Story" ("Cancel request, story "
<> T.pack (show tid) <> " killed.")
liftIO (killThread tid)
sendMessage user help
_ -> liftIO (writeChan chan msg)
-- Is no runned stories
Nothing ->
case M.lookup msg stories of
-- Unknown story, try to help
Nothing -> do
sendMessage user help
$logDebugS "Story" ("Unknown story "
<> T.pack (show msg) <> ".")
-- Story exist
Just story -> do
-- Create chan
chan <- liftIO newChan
-- Story pipeline
let pipeline = fromChan chan
>-> (story user >>= yield)
>-> toSender user sendMessage
-- Run story in separate thread
tid <- forkFinallyBot (runEffect pipeline)
(const deleteStory)
-- Update userMap
liftIO (newStory (chan, tid))
-- Log and update metrics
let sname = T.pack (show msg)
runDB $ upsertBy (StatStory sname)
(StoryStat sname 1)
[StoryStatCalls +=. 1]
$logDebugS "Story" ("Story " <> sname
<> " spawned at "
<> T.pack (show tid) <> ".")
-- | User story handler
storyBot :: (Persist a, APIToken a, ToMessage help)
=> help -> Map Message (Story a) -> Bot a ()
storyBot help stories = do
-- Create map from user chat to it story
chats <- liftIO (newMVar I.empty)
-- Run update loop
$logDebugS "Story" "Init success."
messageHandler $ storyHandler chats stories help
| null | https://raw.githubusercontent.com/airalab/habit/a3fd757cc5a8dd9086df98ed8658535d063cd49e/src/Web/Bot/Story/Internal.hs | haskell | |
Module : Web.Bot.Story.Internal
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Story bot implementation.
| Incoming messages will be sended
Metrics
| Chat ID based message splitter
Get a chat id
Metrics
Lookup chat id in the map
Chat exist => story is run now
Want to cancel it?
Is no runned stories
Unknown story, try to help
Story exist
Create chan
Story pipeline
Run story in separate thread
Update userMap
Log and update metrics
| User story handler
Create map from user chat to it story
Run update loop | # LANGUAGE TemplateHaskell #
Copyright : 2016 - 2017
module Web.Bot.Story.Internal (storyBot) where
import Control.Concurrent (killThread, ThreadId)
import Data.IntMap.Strict as I
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import Control.Monad (forever)
import qualified Data.Text as T
import Data.Map.Strict as M
import Data.Monoid ((<>))
import Data.Text (Text)
import Pipes
import Web.Bot.Platform
import Web.Bot.Metrics
import Web.Bot.Persist
import Web.Bot.Message
import Web.Bot.Story
import Web.Bot.User
import Web.Bot.Log
| ' Producer ' from ' ' creator
fromChan :: MonadIO m => Chan b -> Producer b m ()
fromChan c = forever $ liftIO (readChan c) >>= yield
toSender :: (APIToken a, Persist a)
=> User
-> (User -> Message -> Bot a ())
-> Consumer Message (Bot a) ()
toSender u sender = forever $ do
await >>= lift . sender u
lift $ runDB $ upsertBy (StatUser $ userIdent u)
(UserStat (userIdent u) 0 1)
[UserStatMessageOut +=. 1]
storyHandler :: (Persist a, APIToken a, ToMessage help)
=> MVar (IntMap (Chan Message, ThreadId))
-> Map Message (Story a)
-> help
-> User -> Message -> Bot a ()
storyHandler chats stories help user msg = do
let newStory item = modifyMVar_ chats
(return . I.insert (userChat user) item)
deleteStory = modifyMVar_ chats
(return . I.delete (userChat user))
runDB $ do
upsertBy (StatUser $ userIdent user)
(UserStat (userIdent user) 0 1)
[UserStatMessageIn +=. 1]
upsertBy (UserIdentity $ userIdent user)
user
[ UserName =. userName user
, UserChat =. userChat user ]
chatMap <- liftIO (readMVar chats)
case I.lookup (userChat user) chatMap of
Just (chan, tid) ->
case msg of
"/cancel" -> do
$logDebugS "Story" ("Cancel request, story "
<> T.pack (show tid) <> " killed.")
liftIO (killThread tid)
sendMessage user help
_ -> liftIO (writeChan chan msg)
Nothing ->
case M.lookup msg stories of
Nothing -> do
sendMessage user help
$logDebugS "Story" ("Unknown story "
<> T.pack (show msg) <> ".")
Just story -> do
chan <- liftIO newChan
let pipeline = fromChan chan
>-> (story user >>= yield)
>-> toSender user sendMessage
tid <- forkFinallyBot (runEffect pipeline)
(const deleteStory)
liftIO (newStory (chan, tid))
let sname = T.pack (show msg)
runDB $ upsertBy (StatStory sname)
(StoryStat sname 1)
[StoryStatCalls +=. 1]
$logDebugS "Story" ("Story " <> sname
<> " spawned at "
<> T.pack (show tid) <> ".")
storyBot :: (Persist a, APIToken a, ToMessage help)
=> help -> Map Message (Story a) -> Bot a ()
storyBot help stories = do
chats <- liftIO (newMVar I.empty)
$logDebugS "Story" "Init success."
messageHandler $ storyHandler chats stories help
|
836f45c9e97bc95bffc21e04964a979ea489c6181ade11b9e131d41bc1f5061d | alesaccoia/festival_flinger | pauses.scm | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; ;;
Centre for Speech Technology Research ; ;
University of Edinburgh , UK ; ;
;;; Copyright (c) 1996,1997 ;;
All Rights Reserved . ; ;
;;; ;;
;;; Permission is hereby granted, free of charge, to use and distribute ;;
;;; this software and its documentation without restriction, including ;;
;;; without limitation the rights to use, copy, modify, merge, publish, ;;
;;; distribute, sublicense, and/or sell copies of this work, and to ;;
;;; permit persons to whom this work is furnished to do so, subject to ;;
;;; the following conditions: ;;
;;; 1. The code must retain the above copyright notice, this list of ;;
;;; conditions and the following disclaimer. ;;
;;; 2. Any modifications must be clearly marked as such. ;;
3 . Original authors ' names are not deleted . ; ;
;;; 4. The authors' names are not used to endorse or promote products ;;
;;; derived from this software without specific prior written ;;
;;; permission. ;;
;;; ;;
;;; THE UNIVERSITY OF EDINBURGH AND THE CONTRIBUTORS TO THIS WORK ;;
;;; DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;
;;; ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;
;;; SHALL THE UNIVERSITY OF EDINBURGH NOR THE CONTRIBUTORS BE LIABLE ;;
;;; FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , IN ; ;
;;; AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;
;;; ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;
;;; THIS SOFTWARE. ;;
;;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Predicting pause insertion
(define (Pauses utt)
"(Pauses utt)
Insert pauses where required."
(let ((rval (apply_method 'Pause_Method utt)))
(cond
(rval rval) ;; new style
(t
(Classic_Pauses utt))))
(Pause_optional_deleting_B_X utt))
(define (Classic_Pauses utt)
"(Pauses UTT)
Predict pause insertion."
(let ((words (utt.relation.items utt 'Word)) lastword tpname)
(if words
(begin
(insert_initial_pause utt) ;; always have a start pause
(set! lastword (car (last words)))
(mapcar
(lambda (w)
(let ((pbreak (item.feat w "pbreak"))
(emph (item.feat w "R:Token.parent.EMPH")))
(cond
((or (string-equal "B" pbreak)
(string-equal "BB" pbreak))
(insert_pause utt w))
; ((string-equal emph "1")
; (insert_pause utt w))
((equal? w lastword)
(insert_pause utt w)))))
words)
;; The embarrassing bit. Remove any words labelled as punc or fpunc
(mapcar
(lambda (w)
(let ((pos (item.feat w "pos")))
(if (or (string-equal "punc" pos)
(string-equal "fpunc" pos))
(let ((pbreak (item.feat w "pbreak"))
(wp (item.relation w 'Phrase)))
(if (and (string-matches pbreak "BB?")
(item.relation.prev w 'Word))
(item.set_feat
(item.relation.prev w 'Word) "pbreak" pbreak))
(item.relation.remove w 'Word)
;; can't refer to w as we've just deleted it
(item.relation.remove wp 'Phrase)))))
words)
12/01/2006 V.Strom : Even more embarrasing : Delete all silences
;; that are followed by a silence. These silence sequences
emerge if ' punc of phrase - final words consists of more than one
;; character, e.g. period+quote. That in turn causes problems in
build_utts : the 2nd silence ends up with no features but its name ,
because there is no corresponding 2nd silence in the phone
;; segmentation to align with.
;; This schould be fixed in the functions below, but it is easier for
;; me to clean up at the end:
(set! sil (car (car (cdr (car (PhoneSet.description '(silences)))))))
(set! seg (item.next(utt.relation.first utt 'Segment)))
(while seg
(if(and(equal? sil (item.name seg))
(equal? sil (item.name (item.prev seg))))
(item.delete (item.prev seg)))
(set! seg (item.next seg)))))
utt))
(define (insert_pause utt word)
"(insert_pause UTT WORDITEM)
Insert a silence segment after the last segment in WORDITEM in UTT."
(let ((lastseg (find_last_seg word))
(silence (car (car (cdr (car (PhoneSet.description '(silences))))))))
(if lastseg
(item.relation.insert
lastseg 'Segment (list silence) 'after))))
(define (insert_initial_pause utt)
"(insert_initial_pause UTT)
Always have an initial silence if the utterance is non-empty.
Insert a silence segment after the last segment in WORDITEM in UTT."
(let ((firstseg (car (utt.relation.items utt 'Segment)))
(silence (car (car (cdr (car (PhoneSet.description '(silences))))))))
(if firstseg
(item.relation.insert
firstseg 'Segment (list silence) 'before))))
(define (insert_final_pause utt)
"(insert_final_pause UTT)
Always have a final silence if the utterance is non-empty."
(let ((lastseg (utt.relation.last utt 'Segment))
(silence (car (car (cdr (car (PhoneSet.description '(silences))))))))
(set! silence (format nil "%l" silence)) ; to make the symbol a string
( format t " silence is % " silence )
( format t " lastseg is % " ( item.name lastseg ) )
(if lastseg
(if (not(equal? (item.name lastseg) silence))
(begin
(format t "iserted final pause %s\n" silence)
(item.relation.insert lastseg 'Segment (list silence) 'after))))))
(define (find_last_seg word)
;;; Find the segment that is immediately at this end of this word
;;; If this word is punctuation it might not have any segments
;;; so we have to check back until we find a word with a segment in it
(cond
((null word)
nil) ;; there are no segs (don't think this can happen)
(t
(let ((lsyl (item.relation.daughtern word 'SylStructure)))
(if lsyl
(item.relation.daughtern lsyl 'SylStructure)
(find_last_seg (item.relation.prev word 'Word)))))))
(define (Unisyn_Pauses utt)
"(Unisyn_Pauses UTT)
Predict pause insertion in a Unisyn utterance structure."
(let ((words (utt.relation.items utt 'Word)) lastword tpname)
(if words
(begin
(us_insert_initial_pause utt) ;; always have a start pause
(set! lastword (car (last words)))
(mapcar
(lambda (w)
(let ((pbreak (item.feat w "pbreak"))
(emph (item.feat w "R:Token.parent.EMPH")))
(cond
((or (string-equal "B" pbreak)
(string-equal "BB" pbreak))
(us_insert_pause utt w))
; ((string-equal emph "1")
( us_insert_pause utt w ) )
((equal? w lastword)
(us_insert_pause utt w)))))
words)
;; The embarrassing bit. Remove any words labelled as punc or fpunc
(mapcar
(lambda (w)
(let ((pos (item.feat w "pos")))
(if (or (string-equal "punc" pos)
(string-equal "fpunc" pos))
(let ((pbreak (item.feat w "pbreak"))
(wp (item.relation w 'Phrase)))
(if (and (string-matches pbreak "BB?")
(item.relation.prev w 'Word))
(item.set_feat
(item.relation.prev w 'Word) "pbreak" pbreak))
(item.relation.remove w 'Word)
;; can't refer to w as we've just deleted it
(item.relation.remove wp 'Phrase)))))
words)))
utt))
(define (us_insert_pause utt word)
"(us_insert_pause UTT WORDITEM)
Insert a silence segment after the last segment in WORDITEM in UTT."
(let ((lastseg (us_find_last_seg word))
(silence "pau"))
(if lastseg
(item.relation.insert
lastseg 'Segment (list silence) 'after))))
(define (us_insert_initial_pause utt)
"(us_insert_initial_pause UTT)
Always have an initial silence if the utterance is non-empty.
Insert a silence segment after the last segment in WORDITEM in UTT."
(let ((firstseg (utt.relation.first utt 'Segment))
(silence "pau"))
(if firstseg
(item.relation.insert
firstseg 'Segment (list silence) 'before))))
(define (us_find_last_seg word)
;;; Find the segment that is immediately at this end of this word
;;; If this word is punctuation it might not have any segments
;;; so we have to check back until we find a word with a segment in it
(cond
((null word)
nil) ;; there are no segs (don't think this can happen)
(t
(if (item.daughtern_to (item.relation word 'WordStructure) 'Syllable)
(item.daughtern_to
(item.relation
(item.daughtern_to (item.relation word 'WordStructure) 'Syllable)
'SylStructure)
'Segment)
(us_find_last_seg (item.relation.prev word 'Word))))))
(define (Pause_optional_deleting_B_X utt)
"(Pause_optional_deleting_B_X utt)
Delete all phone symbols starting with 'B_' from the segemt relation
(a B_150 e.g. is a 150ms pause) if symbol 'Pause_delete_B_X is defined.
"
; The B_X never occur in the phone segmentation but are predicted by
; some pause methods, in particular the default I used to produce the
.utt files for the 2009 test sentences for the Blizzard challange .
; Some participants complained about them and I had to fix it quickly.
(if (symbol-bound? 'Pause_delete_B_X)
(let(seg )
(set! seg (item.next(utt.relation.first utt 'Segment)))
(while seg
(set! next_seg (item.next seg))
( format t " segment % " ( item.name seg ) )
(if(string-matches (item.name seg) "B_[0-9]*")
(item.delete seg))
(set! seg next_seg)))))
(provide 'pauses)
| null | https://raw.githubusercontent.com/alesaccoia/festival_flinger/87345aad3a3230751a8ff479f74ba1676217accd/lib/pauses.scm | scheme |
;;
;
;
Copyright (c) 1996,1997 ;;
;
;;
Permission is hereby granted, free of charge, to use and distribute ;;
this software and its documentation without restriction, including ;;
without limitation the rights to use, copy, modify, merge, publish, ;;
distribute, sublicense, and/or sell copies of this work, and to ;;
permit persons to whom this work is furnished to do so, subject to ;;
the following conditions: ;;
1. The code must retain the above copyright notice, this list of ;;
conditions and the following disclaimer. ;;
2. Any modifications must be clearly marked as such. ;;
;
4. The authors' names are not used to endorse or promote products ;;
derived from this software without specific prior written ;;
permission. ;;
;;
THE UNIVERSITY OF EDINBURGH AND THE CONTRIBUTORS TO THIS WORK ;;
DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ;;
ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT ;;
SHALL THE UNIVERSITY OF EDINBURGH NOR THE CONTRIBUTORS BE LIABLE ;;
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES ;;
;
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ;;
ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF ;;
THIS SOFTWARE. ;;
;;
Predicting pause insertion
new style
always have a start pause
((string-equal emph "1")
(insert_pause utt w))
The embarrassing bit. Remove any words labelled as punc or fpunc
can't refer to w as we've just deleted it
that are followed by a silence. These silence sequences
character, e.g. period+quote. That in turn causes problems in
segmentation to align with.
This schould be fixed in the functions below, but it is easier for
me to clean up at the end:
to make the symbol a string
Find the segment that is immediately at this end of this word
If this word is punctuation it might not have any segments
so we have to check back until we find a word with a segment in it
there are no segs (don't think this can happen)
always have a start pause
((string-equal emph "1")
The embarrassing bit. Remove any words labelled as punc or fpunc
can't refer to w as we've just deleted it
Find the segment that is immediately at this end of this word
If this word is punctuation it might not have any segments
so we have to check back until we find a word with a segment in it
there are no segs (don't think this can happen)
The B_X never occur in the phone segmentation but are predicted by
some pause methods, in particular the default I used to produce the
Some participants complained about them and I had to fix it quickly. |
(define (Pauses utt)
"(Pauses utt)
Insert pauses where required."
(let ((rval (apply_method 'Pause_Method utt)))
(cond
(t
(Classic_Pauses utt))))
(Pause_optional_deleting_B_X utt))
(define (Classic_Pauses utt)
"(Pauses UTT)
Predict pause insertion."
(let ((words (utt.relation.items utt 'Word)) lastword tpname)
(if words
(begin
(set! lastword (car (last words)))
(mapcar
(lambda (w)
(let ((pbreak (item.feat w "pbreak"))
(emph (item.feat w "R:Token.parent.EMPH")))
(cond
((or (string-equal "B" pbreak)
(string-equal "BB" pbreak))
(insert_pause utt w))
((equal? w lastword)
(insert_pause utt w)))))
words)
(mapcar
(lambda (w)
(let ((pos (item.feat w "pos")))
(if (or (string-equal "punc" pos)
(string-equal "fpunc" pos))
(let ((pbreak (item.feat w "pbreak"))
(wp (item.relation w 'Phrase)))
(if (and (string-matches pbreak "BB?")
(item.relation.prev w 'Word))
(item.set_feat
(item.relation.prev w 'Word) "pbreak" pbreak))
(item.relation.remove w 'Word)
(item.relation.remove wp 'Phrase)))))
words)
12/01/2006 V.Strom : Even more embarrasing : Delete all silences
emerge if ' punc of phrase - final words consists of more than one
build_utts : the 2nd silence ends up with no features but its name ,
because there is no corresponding 2nd silence in the phone
(set! sil (car (car (cdr (car (PhoneSet.description '(silences)))))))
(set! seg (item.next(utt.relation.first utt 'Segment)))
(while seg
(if(and(equal? sil (item.name seg))
(equal? sil (item.name (item.prev seg))))
(item.delete (item.prev seg)))
(set! seg (item.next seg)))))
utt))
(define (insert_pause utt word)
"(insert_pause UTT WORDITEM)
Insert a silence segment after the last segment in WORDITEM in UTT."
(let ((lastseg (find_last_seg word))
(silence (car (car (cdr (car (PhoneSet.description '(silences))))))))
(if lastseg
(item.relation.insert
lastseg 'Segment (list silence) 'after))))
(define (insert_initial_pause utt)
"(insert_initial_pause UTT)
Always have an initial silence if the utterance is non-empty.
Insert a silence segment after the last segment in WORDITEM in UTT."
(let ((firstseg (car (utt.relation.items utt 'Segment)))
(silence (car (car (cdr (car (PhoneSet.description '(silences))))))))
(if firstseg
(item.relation.insert
firstseg 'Segment (list silence) 'before))))
(define (insert_final_pause utt)
"(insert_final_pause UTT)
Always have a final silence if the utterance is non-empty."
(let ((lastseg (utt.relation.last utt 'Segment))
(silence (car (car (cdr (car (PhoneSet.description '(silences))))))))
( format t " silence is % " silence )
( format t " lastseg is % " ( item.name lastseg ) )
(if lastseg
(if (not(equal? (item.name lastseg) silence))
(begin
(format t "iserted final pause %s\n" silence)
(item.relation.insert lastseg 'Segment (list silence) 'after))))))
(define (find_last_seg word)
(cond
((null word)
(t
(let ((lsyl (item.relation.daughtern word 'SylStructure)))
(if lsyl
(item.relation.daughtern lsyl 'SylStructure)
(find_last_seg (item.relation.prev word 'Word)))))))
(define (Unisyn_Pauses utt)
"(Unisyn_Pauses UTT)
Predict pause insertion in a Unisyn utterance structure."
(let ((words (utt.relation.items utt 'Word)) lastword tpname)
(if words
(begin
(set! lastword (car (last words)))
(mapcar
(lambda (w)
(let ((pbreak (item.feat w "pbreak"))
(emph (item.feat w "R:Token.parent.EMPH")))
(cond
((or (string-equal "B" pbreak)
(string-equal "BB" pbreak))
(us_insert_pause utt w))
( us_insert_pause utt w ) )
((equal? w lastword)
(us_insert_pause utt w)))))
words)
(mapcar
(lambda (w)
(let ((pos (item.feat w "pos")))
(if (or (string-equal "punc" pos)
(string-equal "fpunc" pos))
(let ((pbreak (item.feat w "pbreak"))
(wp (item.relation w 'Phrase)))
(if (and (string-matches pbreak "BB?")
(item.relation.prev w 'Word))
(item.set_feat
(item.relation.prev w 'Word) "pbreak" pbreak))
(item.relation.remove w 'Word)
(item.relation.remove wp 'Phrase)))))
words)))
utt))
(define (us_insert_pause utt word)
"(us_insert_pause UTT WORDITEM)
Insert a silence segment after the last segment in WORDITEM in UTT."
(let ((lastseg (us_find_last_seg word))
(silence "pau"))
(if lastseg
(item.relation.insert
lastseg 'Segment (list silence) 'after))))
(define (us_insert_initial_pause utt)
"(us_insert_initial_pause UTT)
Always have an initial silence if the utterance is non-empty.
Insert a silence segment after the last segment in WORDITEM in UTT."
(let ((firstseg (utt.relation.first utt 'Segment))
(silence "pau"))
(if firstseg
(item.relation.insert
firstseg 'Segment (list silence) 'before))))
(define (us_find_last_seg word)
(cond
((null word)
(t
(if (item.daughtern_to (item.relation word 'WordStructure) 'Syllable)
(item.daughtern_to
(item.relation
(item.daughtern_to (item.relation word 'WordStructure) 'Syllable)
'SylStructure)
'Segment)
(us_find_last_seg (item.relation.prev word 'Word))))))
(define (Pause_optional_deleting_B_X utt)
"(Pause_optional_deleting_B_X utt)
Delete all phone symbols starting with 'B_' from the segemt relation
(a B_150 e.g. is a 150ms pause) if symbol 'Pause_delete_B_X is defined.
"
.utt files for the 2009 test sentences for the Blizzard challange .
(if (symbol-bound? 'Pause_delete_B_X)
(let(seg )
(set! seg (item.next(utt.relation.first utt 'Segment)))
(while seg
(set! next_seg (item.next seg))
( format t " segment % " ( item.name seg ) )
(if(string-matches (item.name seg) "B_[0-9]*")
(item.delete seg))
(set! seg next_seg)))))
(provide 'pauses)
|
1e70a52bc434cdca464a3ba8ddc20824079aee1ab651187d3459dc91edcc170a | sebsheep/elm2node | Declaration.hs | # OPTIONS_GHC -Wall -fno - warn - unused - do - bind #
{-# LANGUAGE OverloadedStrings #-}
module Parse.Declaration
( Decl(..)
, declaration
, infix_
)
where
import qualified Data.Name as Name
import qualified AST.Source as Src
import qualified AST.Utils.Binop as Binop
import qualified Parse.Expression as Expr
import qualified Parse.Pattern as Pattern
import qualified Parse.Keyword as Keyword
import qualified Parse.Number as Number
import qualified Parse.Space as Space
import qualified Parse.Symbol as Symbol
import qualified Parse.Type as Type
import qualified Parse.Variable as Var
import Parse.Primitives hiding (State)
import qualified Parse.Primitives as P
import qualified Reporting.Annotation as A
import qualified Reporting.Error.Syntax as E
-- DECLARATION
data Decl
= Value (Maybe Src.Comment) (A.Located Src.Value)
| Union (Maybe Src.Comment) (A.Located Src.Union)
| Alias (Maybe Src.Comment) (A.Located Src.Alias)
| Port (Maybe Src.Comment) Src.Port
declaration :: Space.Parser E.Decl Decl
declaration =
do maybeDocs <- chompDocComment
start <- getPosition
oneOf E.DeclStart
[ typeDecl maybeDocs start
, portDecl maybeDocs
, valueDecl maybeDocs start
]
DOC COMMENT
chompDocComment :: Parser E.Decl (Maybe Src.Comment)
chompDocComment =
oneOfWithFallback
[
do docComment <- Space.docComment E.DeclStart E.DeclSpace
Space.chomp E.DeclSpace
Space.checkFreshLine E.DeclFreshLineAfterDocComment
return (Just docComment)
]
Nothing
DEFINITION and ANNOTATION
# INLINE valueDecl #
valueDecl :: Maybe Src.Comment -> A.Position -> Space.Parser E.Decl Decl
valueDecl maybeDocs start =
do name <- Var.lower E.DeclStart
end <- getPosition
specialize (E.DeclDef name) $
do Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentEquals
oneOf E.DeclDefEquals
[
do word1 0x3A {-:-} E.DeclDefEquals
Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentType
(tipe, _) <- specialize E.DeclDefType Type.expression
Space.checkFreshLine E.DeclDefNameRepeat
defName <- chompMatchingName name
Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentEquals
chompDefArgsAndBody maybeDocs start defName (Just tipe) []
,
chompDefArgsAndBody maybeDocs start (A.at start end name) Nothing []
]
chompDefArgsAndBody :: Maybe Src.Comment -> A.Position -> A.Located Name.Name -> Maybe Src.Type -> [Src.Pattern] -> Space.Parser E.DeclDef Decl
chompDefArgsAndBody maybeDocs start name tipe revArgs =
oneOf E.DeclDefEquals
[ do arg <- specialize E.DeclDefArg Pattern.term
Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentEquals
chompDefArgsAndBody maybeDocs start name tipe (arg : revArgs)
, do word1 0x3D {-=-} E.DeclDefEquals
Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentBody
(body, end) <- specialize E.DeclDefBody Expr.expression
let value = Src.Value name (reverse revArgs) body tipe
let avalue = A.at start end value
return (Value maybeDocs avalue, end)
]
chompMatchingName :: Name.Name -> Parser E.DeclDef (A.Located Name.Name)
chompMatchingName expectedName =
let
(P.Parser parserL) = Var.lower E.DeclDefNameRepeat
in
P.Parser $ \state@(P.State _ _ _ _ sr sc) cok eok cerr eerr ->
let
cokL name newState@(P.State _ _ _ _ er ec) =
if expectedName == name
then cok (A.At (A.Region (A.Position sr sc) (A.Position er ec)) name) newState
else cerr sr sc (E.DeclDefNameMatch name)
eokL name newState@(P.State _ _ _ _ er ec) =
if expectedName == name
then eok (A.At (A.Region (A.Position sr sc) (A.Position er ec)) name) newState
else eerr sr sc (E.DeclDefNameMatch name)
in
parserL state cokL eokL cerr eerr
-- TYPE DECLARATIONS
# INLINE typeDecl #
typeDecl :: Maybe Src.Comment -> A.Position -> Space.Parser E.Decl Decl
typeDecl maybeDocs start =
inContext E.DeclType (Keyword.type_ E.DeclStart) $
do Space.chompAndCheckIndent E.DT_Space E.DT_IndentName
oneOf E.DT_Name
[
inContext E.DT_Alias (Keyword.alias_ E.DT_Name) $
do Space.chompAndCheckIndent E.AliasSpace E.AliasIndentEquals
(name, args) <- chompAliasNameToEquals
(tipe, end) <- specialize E.AliasBody Type.expression
let alias = A.at start end (Src.Alias name args tipe)
return (Alias maybeDocs alias, end)
,
specialize E.DT_Union $
do (name, args) <- chompCustomNameToEquals
(firstVariant, firstEnd) <- Type.variant
(variants, end) <- chompVariants [firstVariant] firstEnd
let union = A.at start end (Src.Union name args variants)
return (Union maybeDocs union, end)
]
-- TYPE ALIASES
chompAliasNameToEquals :: Parser E.TypeAlias (A.Located Name.Name, [A.Located Name.Name])
chompAliasNameToEquals =
do name <- addLocation (Var.upper E.AliasName)
Space.chompAndCheckIndent E.AliasSpace E.AliasIndentEquals
chompAliasNameToEqualsHelp name []
chompAliasNameToEqualsHelp :: A.Located Name.Name -> [A.Located Name.Name] -> Parser E.TypeAlias (A.Located Name.Name, [A.Located Name.Name])
chompAliasNameToEqualsHelp name args =
oneOf E.AliasEquals
[ do arg <- addLocation (Var.lower E.AliasEquals)
Space.chompAndCheckIndent E.AliasSpace E.AliasIndentEquals
chompAliasNameToEqualsHelp name (arg:args)
, do word1 0x3D {-=-} E.AliasEquals
Space.chompAndCheckIndent E.AliasSpace E.AliasIndentBody
return ( name, reverse args )
]
-- CUSTOM TYPES
chompCustomNameToEquals :: Parser E.CustomType (A.Located Name.Name, [A.Located Name.Name])
chompCustomNameToEquals =
do name <- addLocation (Var.upper E.CT_Name)
Space.chompAndCheckIndent E.CT_Space E.CT_IndentEquals
chompCustomNameToEqualsHelp name []
chompCustomNameToEqualsHelp :: A.Located Name.Name -> [A.Located Name.Name] -> Parser E.CustomType (A.Located Name.Name, [A.Located Name.Name])
chompCustomNameToEqualsHelp name args =
oneOf E.CT_Equals
[ do arg <- addLocation (Var.lower E.CT_Equals)
Space.chompAndCheckIndent E.CT_Space E.CT_IndentEquals
chompCustomNameToEqualsHelp name (arg:args)
, do word1 0x3D {-=-} E.CT_Equals
Space.chompAndCheckIndent E.CT_Space E.CT_IndentAfterEquals
return ( name, reverse args )
]
chompVariants :: [(A.Located Name.Name, [Src.Type])] -> A.Position -> Space.Parser E.CustomType [(A.Located Name.Name, [Src.Type])]
chompVariants variants end =
oneOfWithFallback
[ do Space.checkIndent end E.CT_IndentBar
word1 0x7C {-|-} E.CT_Bar
Space.chompAndCheckIndent E.CT_Space E.CT_IndentAfterBar
(variant, newEnd) <- Type.variant
chompVariants (variant:variants) newEnd
]
(reverse variants, end)
-- PORT
# INLINE portDecl #
portDecl :: Maybe Src.Comment -> Space.Parser E.Decl Decl
portDecl maybeDocs =
inContext E.Port (Keyword.port_ E.DeclStart) $
do Space.chompAndCheckIndent E.PortSpace E.PortIndentName
name <- addLocation (Var.lower E.PortName)
Space.chompAndCheckIndent E.PortSpace E.PortIndentColon
word1 0x3A {-:-} E.PortColon
Space.chompAndCheckIndent E.PortSpace E.PortIndentType
(tipe, end) <- specialize E.PortType Type.expression
return
( Port maybeDocs (Src.Port name tipe)
, end
)
INFIX
-- INVARIANT: always chomps to a freshline
--
infix_ :: Parser E.Module (A.Located Src.Infix)
infix_ =
let
err = E.Infix
_err = \_ -> E.Infix
in
do start <- getPosition
Keyword.infix_ err
Space.chompAndCheckIndent _err err
associativity <-
oneOf err
[ Keyword.left_ err >> return Binop.Left
, Keyword.right_ err >> return Binop.Right
, Keyword.non_ err >> return Binop.Non
]
Space.chompAndCheckIndent _err err
precedence <- Number.precedence err
Space.chompAndCheckIndent _err err
word1 0x28 {-(-} err
op <- Symbol.operator err _err
word1 0x29 {-)-} err
Space.chompAndCheckIndent _err err
word1 0x3D {-=-} err
Space.chompAndCheckIndent _err err
name <- Var.lower err
end <- getPosition
Space.chomp _err
Space.checkFreshLine err
return (A.at start end (Src.Infix op associativity precedence name))
| null | https://raw.githubusercontent.com/sebsheep/elm2node/602a64f48e39edcdfa6d99793cc2827b677d650d/compiler/src/Parse/Declaration.hs | haskell | # LANGUAGE OverloadedStrings #
DECLARATION
:
=
TYPE DECLARATIONS
TYPE ALIASES
=
CUSTOM TYPES
=
|
PORT
:
INVARIANT: always chomps to a freshline
(
)
= | # OPTIONS_GHC -Wall -fno - warn - unused - do - bind #
module Parse.Declaration
( Decl(..)
, declaration
, infix_
)
where
import qualified Data.Name as Name
import qualified AST.Source as Src
import qualified AST.Utils.Binop as Binop
import qualified Parse.Expression as Expr
import qualified Parse.Pattern as Pattern
import qualified Parse.Keyword as Keyword
import qualified Parse.Number as Number
import qualified Parse.Space as Space
import qualified Parse.Symbol as Symbol
import qualified Parse.Type as Type
import qualified Parse.Variable as Var
import Parse.Primitives hiding (State)
import qualified Parse.Primitives as P
import qualified Reporting.Annotation as A
import qualified Reporting.Error.Syntax as E
data Decl
= Value (Maybe Src.Comment) (A.Located Src.Value)
| Union (Maybe Src.Comment) (A.Located Src.Union)
| Alias (Maybe Src.Comment) (A.Located Src.Alias)
| Port (Maybe Src.Comment) Src.Port
declaration :: Space.Parser E.Decl Decl
declaration =
do maybeDocs <- chompDocComment
start <- getPosition
oneOf E.DeclStart
[ typeDecl maybeDocs start
, portDecl maybeDocs
, valueDecl maybeDocs start
]
DOC COMMENT
chompDocComment :: Parser E.Decl (Maybe Src.Comment)
chompDocComment =
oneOfWithFallback
[
do docComment <- Space.docComment E.DeclStart E.DeclSpace
Space.chomp E.DeclSpace
Space.checkFreshLine E.DeclFreshLineAfterDocComment
return (Just docComment)
]
Nothing
DEFINITION and ANNOTATION
# INLINE valueDecl #
valueDecl :: Maybe Src.Comment -> A.Position -> Space.Parser E.Decl Decl
valueDecl maybeDocs start =
do name <- Var.lower E.DeclStart
end <- getPosition
specialize (E.DeclDef name) $
do Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentEquals
oneOf E.DeclDefEquals
[
Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentType
(tipe, _) <- specialize E.DeclDefType Type.expression
Space.checkFreshLine E.DeclDefNameRepeat
defName <- chompMatchingName name
Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentEquals
chompDefArgsAndBody maybeDocs start defName (Just tipe) []
,
chompDefArgsAndBody maybeDocs start (A.at start end name) Nothing []
]
chompDefArgsAndBody :: Maybe Src.Comment -> A.Position -> A.Located Name.Name -> Maybe Src.Type -> [Src.Pattern] -> Space.Parser E.DeclDef Decl
chompDefArgsAndBody maybeDocs start name tipe revArgs =
oneOf E.DeclDefEquals
[ do arg <- specialize E.DeclDefArg Pattern.term
Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentEquals
chompDefArgsAndBody maybeDocs start name tipe (arg : revArgs)
Space.chompAndCheckIndent E.DeclDefSpace E.DeclDefIndentBody
(body, end) <- specialize E.DeclDefBody Expr.expression
let value = Src.Value name (reverse revArgs) body tipe
let avalue = A.at start end value
return (Value maybeDocs avalue, end)
]
chompMatchingName :: Name.Name -> Parser E.DeclDef (A.Located Name.Name)
chompMatchingName expectedName =
let
(P.Parser parserL) = Var.lower E.DeclDefNameRepeat
in
P.Parser $ \state@(P.State _ _ _ _ sr sc) cok eok cerr eerr ->
let
cokL name newState@(P.State _ _ _ _ er ec) =
if expectedName == name
then cok (A.At (A.Region (A.Position sr sc) (A.Position er ec)) name) newState
else cerr sr sc (E.DeclDefNameMatch name)
eokL name newState@(P.State _ _ _ _ er ec) =
if expectedName == name
then eok (A.At (A.Region (A.Position sr sc) (A.Position er ec)) name) newState
else eerr sr sc (E.DeclDefNameMatch name)
in
parserL state cokL eokL cerr eerr
# INLINE typeDecl #
typeDecl :: Maybe Src.Comment -> A.Position -> Space.Parser E.Decl Decl
typeDecl maybeDocs start =
inContext E.DeclType (Keyword.type_ E.DeclStart) $
do Space.chompAndCheckIndent E.DT_Space E.DT_IndentName
oneOf E.DT_Name
[
inContext E.DT_Alias (Keyword.alias_ E.DT_Name) $
do Space.chompAndCheckIndent E.AliasSpace E.AliasIndentEquals
(name, args) <- chompAliasNameToEquals
(tipe, end) <- specialize E.AliasBody Type.expression
let alias = A.at start end (Src.Alias name args tipe)
return (Alias maybeDocs alias, end)
,
specialize E.DT_Union $
do (name, args) <- chompCustomNameToEquals
(firstVariant, firstEnd) <- Type.variant
(variants, end) <- chompVariants [firstVariant] firstEnd
let union = A.at start end (Src.Union name args variants)
return (Union maybeDocs union, end)
]
chompAliasNameToEquals :: Parser E.TypeAlias (A.Located Name.Name, [A.Located Name.Name])
chompAliasNameToEquals =
do name <- addLocation (Var.upper E.AliasName)
Space.chompAndCheckIndent E.AliasSpace E.AliasIndentEquals
chompAliasNameToEqualsHelp name []
chompAliasNameToEqualsHelp :: A.Located Name.Name -> [A.Located Name.Name] -> Parser E.TypeAlias (A.Located Name.Name, [A.Located Name.Name])
chompAliasNameToEqualsHelp name args =
oneOf E.AliasEquals
[ do arg <- addLocation (Var.lower E.AliasEquals)
Space.chompAndCheckIndent E.AliasSpace E.AliasIndentEquals
chompAliasNameToEqualsHelp name (arg:args)
Space.chompAndCheckIndent E.AliasSpace E.AliasIndentBody
return ( name, reverse args )
]
chompCustomNameToEquals :: Parser E.CustomType (A.Located Name.Name, [A.Located Name.Name])
chompCustomNameToEquals =
do name <- addLocation (Var.upper E.CT_Name)
Space.chompAndCheckIndent E.CT_Space E.CT_IndentEquals
chompCustomNameToEqualsHelp name []
chompCustomNameToEqualsHelp :: A.Located Name.Name -> [A.Located Name.Name] -> Parser E.CustomType (A.Located Name.Name, [A.Located Name.Name])
chompCustomNameToEqualsHelp name args =
oneOf E.CT_Equals
[ do arg <- addLocation (Var.lower E.CT_Equals)
Space.chompAndCheckIndent E.CT_Space E.CT_IndentEquals
chompCustomNameToEqualsHelp name (arg:args)
Space.chompAndCheckIndent E.CT_Space E.CT_IndentAfterEquals
return ( name, reverse args )
]
chompVariants :: [(A.Located Name.Name, [Src.Type])] -> A.Position -> Space.Parser E.CustomType [(A.Located Name.Name, [Src.Type])]
chompVariants variants end =
oneOfWithFallback
[ do Space.checkIndent end E.CT_IndentBar
Space.chompAndCheckIndent E.CT_Space E.CT_IndentAfterBar
(variant, newEnd) <- Type.variant
chompVariants (variant:variants) newEnd
]
(reverse variants, end)
# INLINE portDecl #
portDecl :: Maybe Src.Comment -> Space.Parser E.Decl Decl
portDecl maybeDocs =
inContext E.Port (Keyword.port_ E.DeclStart) $
do Space.chompAndCheckIndent E.PortSpace E.PortIndentName
name <- addLocation (Var.lower E.PortName)
Space.chompAndCheckIndent E.PortSpace E.PortIndentColon
Space.chompAndCheckIndent E.PortSpace E.PortIndentType
(tipe, end) <- specialize E.PortType Type.expression
return
( Port maybeDocs (Src.Port name tipe)
, end
)
INFIX
infix_ :: Parser E.Module (A.Located Src.Infix)
infix_ =
let
err = E.Infix
_err = \_ -> E.Infix
in
do start <- getPosition
Keyword.infix_ err
Space.chompAndCheckIndent _err err
associativity <-
oneOf err
[ Keyword.left_ err >> return Binop.Left
, Keyword.right_ err >> return Binop.Right
, Keyword.non_ err >> return Binop.Non
]
Space.chompAndCheckIndent _err err
precedence <- Number.precedence err
Space.chompAndCheckIndent _err err
op <- Symbol.operator err _err
Space.chompAndCheckIndent _err err
Space.chompAndCheckIndent _err err
name <- Var.lower err
end <- getPosition
Space.chomp _err
Space.checkFreshLine err
return (A.at start end (Src.Infix op associativity precedence name))
|
ca887003b576eb10c67fd1723097b5aeb77cf269e99dd336f3aa94ef357f42b6 | tonyg/racket-operational-transformation | main.rkt | #lang racket/base
(require "operation.rkt")
(require "client.rkt")
(require "server.rkt")
(provide (all-from-out "operation.rkt"))
(provide (all-from-out "client.rkt"))
(provide (all-from-out "server.rkt"))
| null | https://raw.githubusercontent.com/tonyg/racket-operational-transformation/1960b7f70138a9de6e3ceb2943b8ca46c83d94ae/operational-transformation-lib/operational-transformation/main.rkt | racket | #lang racket/base
(require "operation.rkt")
(require "client.rkt")
(require "server.rkt")
(provide (all-from-out "operation.rkt"))
(provide (all-from-out "client.rkt"))
(provide (all-from-out "server.rkt"))
| |
071ea78fba4dc946bd6df1c30a1030c0220d24c47cd22afda23f5ff79f178b25 | nedap/formatting-stack | valid_syntax.clj | (ns valid-syntax)
| null | https://raw.githubusercontent.com/nedap/formatting-stack/c43e74d5409e9338f208457bb8928ce437381a3f/test-resources/valid_syntax.clj | clojure | (ns valid-syntax)
| |
b08c8d9d0e3f9054b0873a75674f7cee11de12b1c0ed61b481481e73d60f80ef | silky/quipper | Classical.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
# LANGUAGE FlexibleContexts #
-- | This module provides some operations for low-level manipulation
-- of classical circuits. It is built directly on top of
-- "Quipper.Circuit".
module Quipper.Classical where
import other Quipper stuff
import Quipper.Generic
import Quipper.QData
import Quipper.Monad
import Quipper.Control
import Quipper.Transformer
-- import other stuff
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.IntMap as IntMap
-- ======================================================================
-- * Manipulation of classical circuits
-- ----------------------------------------------------------------------
-- ** Eliminating CGate
-- | A 'Transformer' to eliminate all 'CGate' style gates, such as
\"and\ " , " , \"not\ " , \"xor\ " , \"eq\ " , and \"if - then - else\ "
gates , and replace them by equivalent ' CInit ' and ' CNot ' gates .
cgate_to_cnot_transformer :: Transformer Circ Qubit Bit
cgate_to_cnot_transformer (T_CGate name ncf f) = f $
\qs -> without_controls_if ncf $ do
q <- cinit False
translate_cgate name q qs
return (q, qs)
cgate_to_cnot_transformer (T_CGateInv name ncf f) = f $
\q qs -> without_controls_if ncf $ do
reverse_generic_imp (translate_cgate name) q qs
cterm False q
return qs
cgate_to_cnot_transformer gate = identity_transformer gate
-- | Auxiliary function: compute the reversible circuit corresponding
-- to a 'CGate' of the given name, using only controlled-not gates.
translate_cgate :: String -> Bit -> [Bit] -> Circ ()
translate_cgate "if" q [a,b,c] = do
cnot_at q `controlled` a .==. True .&&. b .==. True
cnot_at q `controlled` a .==. False .&&. c .==. True
translate_cgate "if" q list = do
error ("translate_cgate: \"if\" needs 3 arguments, not " ++ show (length list))
translate_cgate "and" q list = do
cnot_at q `controlled` list
translate_cgate "or" q list = do
cnot_at q `controlled` [ x .==. 0 | x <- list]
cnot_at q
translate_cgate "xor" q list = do
sequence_ [cnot_at q `controlled` c | c <- list]
translate_cgate "eq" q [a,b] = do
cnot_at q `controlled` a .==. True
cnot_at q `controlled` b .==. False
translate_cgate "eq" q list = do
error ("translate_cgate: \"eq\" needs 2 arguments, not " ++ show (length list))
translate_cgate "not" q [a] = do
cnot_at q `controlled` a .==. False
translate_cgate "not" q list = do
error ("translate_cgate: \"not\" needs 1 argument, not " ++ show (length list))
translate_cgate name q list = do
error ("translate_cgate: gate \"" ++ name ++ "\" not known")
-- | Translate all classical gates in a circuit into equivalent
-- controlled-not gates.
--
-- The type of this overloaded function is difficult to read. In more
-- readable form, it has all of the following types:
--
> classical_to_cnot : : ( QCData qa ) = > Circ qa - > Circ qa
> classical_to_cnot : : ( QCData qa , QCData qb ) = > ( qa - > Circ qb ) - > ( qa - > Circ qb )
> classical_to_cnot : : ( QCData qa , QCData qb , QCData qc ) = > ( qa - > qb - > Circ qc ) - > ( qa - > qb - > Circ qc )
--
-- and so forth.
classical_to_cnot :: (QCData qa, QCData qb, QCurry qfun qa qb) => qfun -> qfun
classical_to_cnot = transform_generic cgate_to_cnot_transformer
-- ----------------------------------------------------------------------
-- ** Classical to quantum
-- | Map an endpoint to the underlying 'Qubit' in the trivial
-- case. Auxiliary function.
trivial_endpoint :: B_Endpoint Qubit Qubit -> Qubit
trivial_endpoint (Endpoint_Qubit q) = q
trivial_endpoint (Endpoint_Bit q) = q
-- | A 'Transformer' to replace all classical gates in a circuit by
-- equivalent quantum gates.
classical_to_quantum_transformer :: Transformer Circ Qubit Qubit
Classical gates .
classical_to_quantum_transformer (T_CNot ncf f) = f $
\q c -> without_controls_if ncf $ do
q' <- qnot q `controlled` c
return (q', c)
classical_to_quantum_transformer (T_CSwap ncf f) = f $
\w v c -> without_controls_if ncf $ do
(w',v') <- swap w v `controlled` c
return (w',v',c)
classical_to_quantum_transformer (T_CInit b ncf f) = f $
without_controls_if ncf $ do
w <- qinit b
return w
classical_to_quantum_transformer (T_CTerm b ncf f) = f $
\w -> without_controls_if ncf $ do
qterm b w
return ()
classical_to_quantum_transformer (T_CDiscard f) = f $
\w -> do
qdiscard w
return ()
classical_to_quantum_transformer (T_DTerm b f) = f $
\w -> do
qdiscard w
return ()
classical_to_quantum_transformer (T_CGate name ncf f) = f $
-- This case is recursive. The well-foundedness rests on the fact
-- that the output of classical_to_cnot contains no CGate.
classical_to_quantum . classical_to_cnot $
\ws -> without_controls_if ncf $ do
v <- cgate name ws
return (v, ws)
classical_to_quantum_transformer (T_CGateInv name ncf f) = f $
-- This case is recursive. The well-foundedness rests on the fact
-- that the output of classical_to_cnot contains no CGate.
classical_to_quantum . classical_to_cnot $
\v ws -> without_controls_if ncf $ do
cgateinv name v ws
return ws
-- Preparation, unpreparation, and measurement. These become no-ops.
classical_to_quantum_transformer (T_QPrep ncf f) = f $
\w -> return w
classical_to_quantum_transformer (T_QUnprep ncf f) = f $
\w -> return w
classical_to_quantum_transformer (T_QMeas f) = f $
\w -> return w
Quantum gates . These are similar to the identity transformer .
-- However, we cannot explicitly call the identity transformer,
-- because its typing does not correctly translate 'Bit' to
-- 'Qubit'. This matters because a pure quantum gate may have
-- classical controls that need to be translated to quantum controls.
classical_to_quantum_transformer (T_QGate name _ _ inv ncf f) = f $
\ws vs c -> without_controls_if ncf $ do
(ws', vs') <- named_gate_qulist name inv ws vs `controlled` c
return (ws', vs', c)
classical_to_quantum_transformer (T_QRot name _ _ inv t ncf f) = f $
\ws vs c -> without_controls_if ncf $ do
(ws', vs') <- named_rotation_qulist name inv t ws vs `controlled` c
return (ws', vs', c)
classical_to_quantum_transformer (T_GPhase t ncf f) = f $
\q c -> without_controls_if ncf $ do
global_phase_anchored_list t (map fix_endpoint q) `controlled` c
return c
where
fix_endpoint (Endpoint_Qubit q) = (Endpoint_Qubit q)
fix_endpoint (Endpoint_Bit q) = (Endpoint_Qubit q)
classical_to_quantum_transformer (T_QInit b ncf f) = f $
without_controls_if ncf $ do
w <- qinit_qubit b
return w
classical_to_quantum_transformer (T_QTerm b ncf f) = f $
\w -> without_controls_if ncf $ do
qterm_qubit b w
return ()
classical_to_quantum_transformer (T_QDiscard f) = f $
\w -> do
qdiscard_qubit w
return ()
classical_to_quantum_transformer (T_Subroutine n inv ncf scf ws_pat a1_pat vs_pat a2_pat repeat f) = f $
\namespace ws c -> without_controls_if ncf $ do
provide_subroutines namespace
v <- subroutine n inv scf repeat ws_pat a1_pat vs_pat a2_pat (map fix_endpoint ws) `controlled` c
return (map fix_endpoint v,c)
where
fix_endpoint (Endpoint_Qubit q) = Endpoint_Qubit q
fix_endpoint (Endpoint_Bit q) =
error "classical_to_quantum: classical subroutine not permitted"
classical_to_quantum_transformer (T_Comment s inv f) = f $
\ws -> do
comment_label s inv [ (fix_endpoint e, s) | (e,s) <- ws ]
return ()
where
fix_endpoint (Endpoint_Qubit q) = wire_of_qubit q
fix_endpoint (Endpoint_Bit q) = wire_of_qubit q
-- | Replace all classical gates in a circuit by equivalent quantum gates.
classical_to_quantum_unary :: (QCData qa, QCData qb) => (qa -> Circ qb) -> (QType qa -> Circ (QType qb))
classical_to_quantum_unary f x = transform_unary_shape classical_to_quantum_transformer f shape x
where
shape = qcdata_makeshape (dummy :: qa) qubit qubit x
-- | Replace all classical gates in a circuit by equivalent quantum gates.
--
-- The type of this overloaded function is difficult to read. In more
-- readable form, it has all of the following types:
--
> classical_to_quantum : : ( QCData qa ) = > Circ qa - > Circ ( QType qa )
> classical_to_quantum : : ( QCData qa , QCData qb ) = > ( qa - > Circ qb ) - > ( QType qa - > Circ ( QType qb ) )
> classical_to_quantum : : ( QCData qa , QCData qb , QCData qc ) = > ( qa - > qb - > Circ qc ) - > ( QType qa - > QType qb - > Circ ( QType qc ) )
--
-- and so forth.
classical_to_quantum :: (QCData qa, QCData qb, QCurry qfun qa qb, QCurry qfun' (QType qa) (QType qb)) => qfun -> qfun'
classical_to_quantum f = g where
f1 = quncurry f
g1 = classical_to_quantum_unary f1
g = qcurry g1
-- ======================================================================
-- * Classical to reversible
-- | Generic function for turning a classical (or pseudo-classical)
-- circuit into a reversible circuit. The input is a classical boolean
-- function /x/ ↦ /f/(/x/), given as a not necessarily reversible
circuit ( however , the circuit should be one - to - one , i.e. , no
-- \"garbage\" should be explicitly erased). The output is the
-- corresponding reversible function (/x/,/y/) ↦ (/x/,/y/ ⊕
-- /f/(/x/)). /qa/ and /qb/ can be any quantum data types. The
-- function 'classical_to_reversible' does not itself change
-- classical bits to qubits; use 'classical_to_quantum' for that.
classical_to_reversible :: (QCData qa, QCData qb) => (qa -> Circ qb) -> ((qa,qb) -> Circ (qa,qb))
classical_to_reversible f (input, target) = do
with_computed (f input) $ \output -> do
controlled_not target output
return (input, target)
| null | https://raw.githubusercontent.com/silky/quipper/1ef6d031984923d8b7ded1c14f05db0995791633/quipper/Quipper/Classical.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
======================================================================
| This module provides some operations for low-level manipulation
of classical circuits. It is built directly on top of
"Quipper.Circuit".
import other stuff
======================================================================
* Manipulation of classical circuits
----------------------------------------------------------------------
** Eliminating CGate
| A 'Transformer' to eliminate all 'CGate' style gates, such as
| Auxiliary function: compute the reversible circuit corresponding
to a 'CGate' of the given name, using only controlled-not gates.
| Translate all classical gates in a circuit into equivalent
controlled-not gates.
The type of this overloaded function is difficult to read. In more
readable form, it has all of the following types:
and so forth.
----------------------------------------------------------------------
** Classical to quantum
| Map an endpoint to the underlying 'Qubit' in the trivial
case. Auxiliary function.
| A 'Transformer' to replace all classical gates in a circuit by
equivalent quantum gates.
This case is recursive. The well-foundedness rests on the fact
that the output of classical_to_cnot contains no CGate.
This case is recursive. The well-foundedness rests on the fact
that the output of classical_to_cnot contains no CGate.
Preparation, unpreparation, and measurement. These become no-ops.
However, we cannot explicitly call the identity transformer,
because its typing does not correctly translate 'Bit' to
'Qubit'. This matters because a pure quantum gate may have
classical controls that need to be translated to quantum controls.
| Replace all classical gates in a circuit by equivalent quantum gates.
| Replace all classical gates in a circuit by equivalent quantum gates.
The type of this overloaded function is difficult to read. In more
readable form, it has all of the following types:
and so forth.
======================================================================
* Classical to reversible
| Generic function for turning a classical (or pseudo-classical)
circuit into a reversible circuit. The input is a classical boolean
function /x/ ↦ /f/(/x/), given as a not necessarily reversible
\"garbage\" should be explicitly erased). The output is the
corresponding reversible function (/x/,/y/) ↦ (/x/,/y/ ⊕
/f/(/x/)). /qa/ and /qb/ can be any quantum data types. The
function 'classical_to_reversible' does not itself change
classical bits to qubits; use 'classical_to_quantum' for that. | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
# LANGUAGE FlexibleContexts #
module Quipper.Classical where
import other Quipper stuff
import Quipper.Generic
import Quipper.QData
import Quipper.Monad
import Quipper.Control
import Quipper.Transformer
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.IntMap as IntMap
\"and\ " , " , \"not\ " , \"xor\ " , \"eq\ " , and \"if - then - else\ "
gates , and replace them by equivalent ' CInit ' and ' CNot ' gates .
cgate_to_cnot_transformer :: Transformer Circ Qubit Bit
cgate_to_cnot_transformer (T_CGate name ncf f) = f $
\qs -> without_controls_if ncf $ do
q <- cinit False
translate_cgate name q qs
return (q, qs)
cgate_to_cnot_transformer (T_CGateInv name ncf f) = f $
\q qs -> without_controls_if ncf $ do
reverse_generic_imp (translate_cgate name) q qs
cterm False q
return qs
cgate_to_cnot_transformer gate = identity_transformer gate
translate_cgate :: String -> Bit -> [Bit] -> Circ ()
translate_cgate "if" q [a,b,c] = do
cnot_at q `controlled` a .==. True .&&. b .==. True
cnot_at q `controlled` a .==. False .&&. c .==. True
translate_cgate "if" q list = do
error ("translate_cgate: \"if\" needs 3 arguments, not " ++ show (length list))
translate_cgate "and" q list = do
cnot_at q `controlled` list
translate_cgate "or" q list = do
cnot_at q `controlled` [ x .==. 0 | x <- list]
cnot_at q
translate_cgate "xor" q list = do
sequence_ [cnot_at q `controlled` c | c <- list]
translate_cgate "eq" q [a,b] = do
cnot_at q `controlled` a .==. True
cnot_at q `controlled` b .==. False
translate_cgate "eq" q list = do
error ("translate_cgate: \"eq\" needs 2 arguments, not " ++ show (length list))
translate_cgate "not" q [a] = do
cnot_at q `controlled` a .==. False
translate_cgate "not" q list = do
error ("translate_cgate: \"not\" needs 1 argument, not " ++ show (length list))
translate_cgate name q list = do
error ("translate_cgate: gate \"" ++ name ++ "\" not known")
> classical_to_cnot : : ( QCData qa ) = > Circ qa - > Circ qa
> classical_to_cnot : : ( QCData qa , QCData qb ) = > ( qa - > Circ qb ) - > ( qa - > Circ qb )
> classical_to_cnot : : ( QCData qa , QCData qb , QCData qc ) = > ( qa - > qb - > Circ qc ) - > ( qa - > qb - > Circ qc )
classical_to_cnot :: (QCData qa, QCData qb, QCurry qfun qa qb) => qfun -> qfun
classical_to_cnot = transform_generic cgate_to_cnot_transformer
trivial_endpoint :: B_Endpoint Qubit Qubit -> Qubit
trivial_endpoint (Endpoint_Qubit q) = q
trivial_endpoint (Endpoint_Bit q) = q
classical_to_quantum_transformer :: Transformer Circ Qubit Qubit
Classical gates .
classical_to_quantum_transformer (T_CNot ncf f) = f $
\q c -> without_controls_if ncf $ do
q' <- qnot q `controlled` c
return (q', c)
classical_to_quantum_transformer (T_CSwap ncf f) = f $
\w v c -> without_controls_if ncf $ do
(w',v') <- swap w v `controlled` c
return (w',v',c)
classical_to_quantum_transformer (T_CInit b ncf f) = f $
without_controls_if ncf $ do
w <- qinit b
return w
classical_to_quantum_transformer (T_CTerm b ncf f) = f $
\w -> without_controls_if ncf $ do
qterm b w
return ()
classical_to_quantum_transformer (T_CDiscard f) = f $
\w -> do
qdiscard w
return ()
classical_to_quantum_transformer (T_DTerm b f) = f $
\w -> do
qdiscard w
return ()
classical_to_quantum_transformer (T_CGate name ncf f) = f $
classical_to_quantum . classical_to_cnot $
\ws -> without_controls_if ncf $ do
v <- cgate name ws
return (v, ws)
classical_to_quantum_transformer (T_CGateInv name ncf f) = f $
classical_to_quantum . classical_to_cnot $
\v ws -> without_controls_if ncf $ do
cgateinv name v ws
return ws
classical_to_quantum_transformer (T_QPrep ncf f) = f $
\w -> return w
classical_to_quantum_transformer (T_QUnprep ncf f) = f $
\w -> return w
classical_to_quantum_transformer (T_QMeas f) = f $
\w -> return w
Quantum gates . These are similar to the identity transformer .
classical_to_quantum_transformer (T_QGate name _ _ inv ncf f) = f $
\ws vs c -> without_controls_if ncf $ do
(ws', vs') <- named_gate_qulist name inv ws vs `controlled` c
return (ws', vs', c)
classical_to_quantum_transformer (T_QRot name _ _ inv t ncf f) = f $
\ws vs c -> without_controls_if ncf $ do
(ws', vs') <- named_rotation_qulist name inv t ws vs `controlled` c
return (ws', vs', c)
classical_to_quantum_transformer (T_GPhase t ncf f) = f $
\q c -> without_controls_if ncf $ do
global_phase_anchored_list t (map fix_endpoint q) `controlled` c
return c
where
fix_endpoint (Endpoint_Qubit q) = (Endpoint_Qubit q)
fix_endpoint (Endpoint_Bit q) = (Endpoint_Qubit q)
classical_to_quantum_transformer (T_QInit b ncf f) = f $
without_controls_if ncf $ do
w <- qinit_qubit b
return w
classical_to_quantum_transformer (T_QTerm b ncf f) = f $
\w -> without_controls_if ncf $ do
qterm_qubit b w
return ()
classical_to_quantum_transformer (T_QDiscard f) = f $
\w -> do
qdiscard_qubit w
return ()
classical_to_quantum_transformer (T_Subroutine n inv ncf scf ws_pat a1_pat vs_pat a2_pat repeat f) = f $
\namespace ws c -> without_controls_if ncf $ do
provide_subroutines namespace
v <- subroutine n inv scf repeat ws_pat a1_pat vs_pat a2_pat (map fix_endpoint ws) `controlled` c
return (map fix_endpoint v,c)
where
fix_endpoint (Endpoint_Qubit q) = Endpoint_Qubit q
fix_endpoint (Endpoint_Bit q) =
error "classical_to_quantum: classical subroutine not permitted"
classical_to_quantum_transformer (T_Comment s inv f) = f $
\ws -> do
comment_label s inv [ (fix_endpoint e, s) | (e,s) <- ws ]
return ()
where
fix_endpoint (Endpoint_Qubit q) = wire_of_qubit q
fix_endpoint (Endpoint_Bit q) = wire_of_qubit q
classical_to_quantum_unary :: (QCData qa, QCData qb) => (qa -> Circ qb) -> (QType qa -> Circ (QType qb))
classical_to_quantum_unary f x = transform_unary_shape classical_to_quantum_transformer f shape x
where
shape = qcdata_makeshape (dummy :: qa) qubit qubit x
> classical_to_quantum : : ( QCData qa ) = > Circ qa - > Circ ( QType qa )
> classical_to_quantum : : ( QCData qa , QCData qb ) = > ( qa - > Circ qb ) - > ( QType qa - > Circ ( QType qb ) )
> classical_to_quantum : : ( QCData qa , QCData qb , QCData qc ) = > ( qa - > qb - > Circ qc ) - > ( QType qa - > QType qb - > Circ ( QType qc ) )
classical_to_quantum :: (QCData qa, QCData qb, QCurry qfun qa qb, QCurry qfun' (QType qa) (QType qb)) => qfun -> qfun'
classical_to_quantum f = g where
f1 = quncurry f
g1 = classical_to_quantum_unary f1
g = qcurry g1
circuit ( however , the circuit should be one - to - one , i.e. , no
classical_to_reversible :: (QCData qa, QCData qb) => (qa -> Circ qb) -> ((qa,qb) -> Circ (qa,qb))
classical_to_reversible f (input, target) = do
with_computed (f input) $ \output -> do
controlled_not target output
return (input, target)
|
601a9be222c518686bc094bc4cccca753effda55186e8f9da2446c6b655b8b70 | digital-asset/ghc | T13075.hs | {-# LANGUAGE BangPatterns #-}
module Main where
!(Just x) = Nothing
main = putStrLn "hi there!"
| null | https://raw.githubusercontent.com/digital-asset/ghc/323dc6fcb127f77c08423873efc0a088c071440a/testsuite/tests/typecheck/should_fail/T13075.hs | haskell | # LANGUAGE BangPatterns # |
module Main where
!(Just x) = Nothing
main = putStrLn "hi there!"
|
b697940926d109c2a088a9d4a2d16f58cdd87465ab81b3e8162f61fe50329fe8 | tweag/ormolu | multiple-guards.hs | foo :: Int -> Int
foo x | x == 5 = 10
| otherwise = 12
bar :: Int -> Int
bar x
| x == 5 = foo x
+ foo 10
| x == 6 = foo x
+ foo 20
| otherwise = foo 100
| null | https://raw.githubusercontent.com/tweag/ormolu/714b98e7f27c7d14ecfd5d2c19443f14d6b9bfc9/data/examples/declaration/value/function/multiple-guards.hs | haskell | foo :: Int -> Int
foo x | x == 5 = 10
| otherwise = 12
bar :: Int -> Int
bar x
| x == 5 = foo x
+ foo 10
| x == 6 = foo x
+ foo 20
| otherwise = foo 100
| |
d24b85e4391b37018341052fc8e777dd1f935aca5e02ec2fe46febce0a9550f4 | janestreet/krb | krb_result.ml | open! Core
type 'a t = ('a, Krb_error.t) Result.t
let to_or_error ?context ~info result =
Result.map_error result ~f:(fun code ->
let krb_error = Krb_error.to_string code ?context ~info in
Error.create_s [%message "" ~_:(krb_error : string) (code : int32)])
;;
| null | https://raw.githubusercontent.com/janestreet/krb/1105ba1e8b836f80f09e663bc1b4233cf2607e7b/internal/krb_result.ml | ocaml | open! Core
type 'a t = ('a, Krb_error.t) Result.t
let to_or_error ?context ~info result =
Result.map_error result ~f:(fun code ->
let krb_error = Krb_error.to_string code ?context ~info in
Error.create_s [%message "" ~_:(krb_error : string) (code : int32)])
;;
| |
144717e5fde40cd0c17c17cdd35b8b86777fcfbe907ec6af7766d7671f7e87ab | metabase/metabase | http_action.clj | (ns metabase.actions.http-action
(:require
[cheshire.core :as json]
[clj-http.client :as http]
[clojure.string :as str]
[metabase.driver.common.parameters :as params]
[metabase.driver.common.parameters.parse :as params.parse]
[metabase.query-processor.error-type :as qp.error-type]
[metabase.util :as u]
[metabase.util.i18n :refer [tru]]
[metabase.util.log :as log])
(:import
(com.fasterxml.jackson.databind ObjectMapper)
(net.thisptr.jackson.jq BuiltinFunctionLoader JsonQuery Output Scope Versions)))
(set! *warn-on-reflection* true)
(defonce ^:private root-scope
(delay
(let [scope (Scope/newEmptyScope)]
(.loadFunctions (BuiltinFunctionLoader/getInstance) Versions/JQ_1_6 scope))))
(defonce ^:private object-mapper
(delay (ObjectMapper.)))
Largely copied from sql drivers param substitute .
May go away if parameters substitution is taken out of query - processing / db dependency
(declare substitute*)
(defn- substitute-param [param->value [sql missing] _in-optional? {:keys [k]}]
(if-not (contains? param->value k)
[sql (conj missing k)]
(let [v (get param->value k)]
(cond
(= params/no-value v)
[sql (conj missing k)]
:else
[(str sql v) missing]))))
(defn- substitute-optional [param->value [sql missing] {subclauses :args}]
(let [[opt-sql opt-missing] (substitute* param->value subclauses true)]
(if (seq opt-missing)
[sql missing]
[(str sql opt-sql) missing])))
(defn- substitute*
"Returns a sequence of `[replaced-sql-string jdbc-args missing-parameters]`."
[param->value parsed in-optional?]
(reduce
(fn [[sql missing] x]
(cond
(string? x)
[(str sql x) missing]
(params/Param? x)
(substitute-param param->value [sql missing] in-optional? x)
(params/Optional? x)
(substitute-optional param->value [sql missing] x)))
nil
parsed))
(defn substitute
"Substitute `Optional` and `Param` objects in a `parsed-template`, a sequence of parsed string fragments and tokens, with
the values from the map `param->value` (using logic from `substitution` to decide what replacement SQL should be
generated).
(substitute [\"=\" (param \"bird_type\")]
{\"bird_type\" \"Steller's Jay\"})
- > \" = 's Jay\ " "
[parsed-template param->value]
(log/tracef "Substituting params\n%s\nin template\n%s" (u/pprint-to-str param->value) (u/pprint-to-str parsed-template))
(let [[sql missing] (try
(substitute* param->value parsed-template false)
(catch Throwable e
(throw (ex-info (tru "Unable to substitute parameters: {0}" (ex-message e))
{:type (or (:type (ex-data e)) qp.error-type/qp)
:params param->value
:parsed-query parsed-template}
e))))]
(log/tracef "=>%s" sql)
(when (seq missing)
(throw (ex-info (tru "Cannot call the service: missing required parameters: {0}" (set missing))
{:type qp.error-type/missing-required-parameter
:missing missing})))
(str/trim sql)))
(defn- parse-and-substitute [s params->value]
(when s
(-> s
params.parse/parse
(substitute params->value))))
;;
(deftype ActionOutput [results]
Output
(emit [_ x]
(vswap! results conj (str x))))
(defn apply-json-query
"Executes a jq query on [[object]]."
[object jq-query]
TODO this is pretty ineficient . We parse with ` : as : , then reencode within a response
I could n't find a way to get JSONNode out of cheshire , so we fall back to .
Should be added explicitly to deps.edn ?
(let [json-node (.readTree ^ObjectMapper @object-mapper (json/generate-string object))
vresults (volatile! [])
output (ActionOutput. vresults)
expr (JsonQuery/compile jq-query Versions/JQ_1_6)
;; might need to Scope childScope = Scope.newChildScope(rootScope); if root-scope can be modified by expression
_ (.apply expr @root-scope json-node output)
results @vresults]
(if (<= (count results) 1)
(first results)
(throw (ex-info (tru "Too many results returned: {0}" (pr-str results)) {:jq-query jq-query :results results})))))
(defn execute-http-action!
"Calls an http endpoint based on action and params"
[action params->value]
(try
(let [{:keys [method url body headers]} (:template action)
request {:method (keyword method)
:url (parse-and-substitute url params->value)
:accept :json
:content-type :json
:throw-exceptions false
:headers (merge
;; TODO maybe we want to default Agent here? Maybe Origin/Referer?
{"X-Metabase-Action" (:name action)}
(-> headers
(parse-and-substitute params->value)
(json/decode)))
:body (parse-and-substitute body params->value)}
response (-> (http/request request)
(select-keys [:body :headers :status])
(update :body json/decode))
error (json/parse-string (apply-json-query response (or (:error_handle action) ".status >= 400")))]
(log/trace "Response before handle:" response)
(if error
{:status 400
:headers {"Content-Type" "application/json"}
:body (if (boolean? error)
{:remote-status (:status response)}
error)}
(if-some [response (some->> action :response_handle (apply-json-query response))]
{:status 200
:headers {"Content-Type" "application/json"}
:body response}
{:status 204
:body nil})))
(catch Exception e
(throw (ex-info (str "Problem building request: " (ex-message e))
{:template (:template action)}
e)))))
| null | https://raw.githubusercontent.com/metabase/metabase/7e3048bf73f6cb7527579446166d054292166163/src/metabase/actions/http_action.clj | clojure |
might need to Scope childScope = Scope.newChildScope(rootScope); if root-scope can be modified by expression
TODO maybe we want to default Agent here? Maybe Origin/Referer? | (ns metabase.actions.http-action
(:require
[cheshire.core :as json]
[clj-http.client :as http]
[clojure.string :as str]
[metabase.driver.common.parameters :as params]
[metabase.driver.common.parameters.parse :as params.parse]
[metabase.query-processor.error-type :as qp.error-type]
[metabase.util :as u]
[metabase.util.i18n :refer [tru]]
[metabase.util.log :as log])
(:import
(com.fasterxml.jackson.databind ObjectMapper)
(net.thisptr.jackson.jq BuiltinFunctionLoader JsonQuery Output Scope Versions)))
(set! *warn-on-reflection* true)
(defonce ^:private root-scope
(delay
(let [scope (Scope/newEmptyScope)]
(.loadFunctions (BuiltinFunctionLoader/getInstance) Versions/JQ_1_6 scope))))
(defonce ^:private object-mapper
(delay (ObjectMapper.)))
Largely copied from sql drivers param substitute .
May go away if parameters substitution is taken out of query - processing / db dependency
(declare substitute*)
(defn- substitute-param [param->value [sql missing] _in-optional? {:keys [k]}]
(if-not (contains? param->value k)
[sql (conj missing k)]
(let [v (get param->value k)]
(cond
(= params/no-value v)
[sql (conj missing k)]
:else
[(str sql v) missing]))))
(defn- substitute-optional [param->value [sql missing] {subclauses :args}]
(let [[opt-sql opt-missing] (substitute* param->value subclauses true)]
(if (seq opt-missing)
[sql missing]
[(str sql opt-sql) missing])))
(defn- substitute*
"Returns a sequence of `[replaced-sql-string jdbc-args missing-parameters]`."
[param->value parsed in-optional?]
(reduce
(fn [[sql missing] x]
(cond
(string? x)
[(str sql x) missing]
(params/Param? x)
(substitute-param param->value [sql missing] in-optional? x)
(params/Optional? x)
(substitute-optional param->value [sql missing] x)))
nil
parsed))
(defn substitute
"Substitute `Optional` and `Param` objects in a `parsed-template`, a sequence of parsed string fragments and tokens, with
the values from the map `param->value` (using logic from `substitution` to decide what replacement SQL should be
generated).
(substitute [\"=\" (param \"bird_type\")]
{\"bird_type\" \"Steller's Jay\"})
- > \" = 's Jay\ " "
[parsed-template param->value]
(log/tracef "Substituting params\n%s\nin template\n%s" (u/pprint-to-str param->value) (u/pprint-to-str parsed-template))
(let [[sql missing] (try
(substitute* param->value parsed-template false)
(catch Throwable e
(throw (ex-info (tru "Unable to substitute parameters: {0}" (ex-message e))
{:type (or (:type (ex-data e)) qp.error-type/qp)
:params param->value
:parsed-query parsed-template}
e))))]
(log/tracef "=>%s" sql)
(when (seq missing)
(throw (ex-info (tru "Cannot call the service: missing required parameters: {0}" (set missing))
{:type qp.error-type/missing-required-parameter
:missing missing})))
(str/trim sql)))
(defn- parse-and-substitute [s params->value]
(when s
(-> s
params.parse/parse
(substitute params->value))))
(deftype ActionOutput [results]
Output
(emit [_ x]
(vswap! results conj (str x))))
(defn apply-json-query
"Executes a jq query on [[object]]."
[object jq-query]
TODO this is pretty ineficient . We parse with ` : as : , then reencode within a response
I could n't find a way to get JSONNode out of cheshire , so we fall back to .
Should be added explicitly to deps.edn ?
(let [json-node (.readTree ^ObjectMapper @object-mapper (json/generate-string object))
vresults (volatile! [])
output (ActionOutput. vresults)
expr (JsonQuery/compile jq-query Versions/JQ_1_6)
_ (.apply expr @root-scope json-node output)
results @vresults]
(if (<= (count results) 1)
(first results)
(throw (ex-info (tru "Too many results returned: {0}" (pr-str results)) {:jq-query jq-query :results results})))))
(defn execute-http-action!
"Calls an http endpoint based on action and params"
[action params->value]
(try
(let [{:keys [method url body headers]} (:template action)
request {:method (keyword method)
:url (parse-and-substitute url params->value)
:accept :json
:content-type :json
:throw-exceptions false
:headers (merge
{"X-Metabase-Action" (:name action)}
(-> headers
(parse-and-substitute params->value)
(json/decode)))
:body (parse-and-substitute body params->value)}
response (-> (http/request request)
(select-keys [:body :headers :status])
(update :body json/decode))
error (json/parse-string (apply-json-query response (or (:error_handle action) ".status >= 400")))]
(log/trace "Response before handle:" response)
(if error
{:status 400
:headers {"Content-Type" "application/json"}
:body (if (boolean? error)
{:remote-status (:status response)}
error)}
(if-some [response (some->> action :response_handle (apply-json-query response))]
{:status 200
:headers {"Content-Type" "application/json"}
:body response}
{:status 204
:body nil})))
(catch Exception e
(throw (ex-info (str "Problem building request: " (ex-message e))
{:template (:template action)}
e)))))
|
b8059efeef83d21780526a04a55ff0ee34edf1de755384027ea99f2726027bd9 | liquidz/iced-nrepl | transport.clj | (ns iced.nrepl.transport)
(if (find-ns 'clojure.tools.nrepl)
(require
'[clojure.tools.nrepl.misc :refer [response-for]]
'[clojure.tools.nrepl.transport :as transport])
(require
'[nrepl.misc :refer [response-for]]
'[nrepl.transport :as transport]))
(def send-list-limit 50)
(defn send!
[msg m]
(let [{:keys [transport]} msg]
(transport/send transport (response-for msg m))))
| null | https://raw.githubusercontent.com/liquidz/iced-nrepl/f63ae00385be912196fceb9d26e174cc5d68295a/src/iced/nrepl/transport.clj | clojure | (ns iced.nrepl.transport)
(if (find-ns 'clojure.tools.nrepl)
(require
'[clojure.tools.nrepl.misc :refer [response-for]]
'[clojure.tools.nrepl.transport :as transport])
(require
'[nrepl.misc :refer [response-for]]
'[nrepl.transport :as transport]))
(def send-list-limit 50)
(defn send!
[msg m]
(let [{:keys [transport]} msg]
(transport/send transport (response-for msg m))))
| |
ed9b827f49439a59475f8901588ad1763ff550b2977e6a9ab05e2b129895cdae | valderman/haste-compiler | IntOverflow.hs | # LANGUAGE CPP #
module Tests.IntOverflow where
import Data.Int
#ifdef __HASTE__
type Intish = Int
#else
type Intish = Int32
#endif
# NOINLINE int_max #
int_max :: Intish
int_max = 0x7fffffff
# NOINLINE int_min #
int_min :: Intish
int_min = -2147483648
# NOINLINE num #
num :: IO Intish
num = return 1999999999
runTest :: IO [Intish]
runTest = do
num' <- num
return [
int_max + 27,
int_min - 1,
num' * num']
| null | https://raw.githubusercontent.com/valderman/haste-compiler/47d942521570eb4b8b6828b0aa38e1f6b9c3e8a8/Tests/IntOverflow.hs | haskell | # LANGUAGE CPP #
module Tests.IntOverflow where
import Data.Int
#ifdef __HASTE__
type Intish = Int
#else
type Intish = Int32
#endif
# NOINLINE int_max #
int_max :: Intish
int_max = 0x7fffffff
# NOINLINE int_min #
int_min :: Intish
int_min = -2147483648
# NOINLINE num #
num :: IO Intish
num = return 1999999999
runTest :: IO [Intish]
runTest = do
num' <- num
return [
int_max + 27,
int_min - 1,
num' * num']
| |
1e49fc748cb7e58d0a560d59d1b257cad834d88c4831efe12736338055a3b9dd | aphyr/clj-antlr | static.clj | (ns clj-antlr.static
"Interacts with statically compiled antlr classes."
(:require [clojure.reflect :refer [reflect]])
(:import (org.antlr.v4.runtime.tree ParseTreeVisitor)))
(defmacro parser
"Constructs a parser over a token stream."
[parser-class tokens]
`(new ~parser-class ~tokens))
(defmacro lexer
"Given a lexer class, returns a lexer over a string or stream."
[lexer-class s]
`(new ~lexer-class (char-stream ~s)))
(defn signature
"The signature of a reflected method."
[meth]
(select-keys meth [:name :parameter-types :return-type]))
(defn method?
"Is a reflection member a method?"
[member]
(boolean (:return-type member)))
(defn method-signatures
"Returns a list of method signatures for a class."
[class]
(->> class
reflect
:members
(filter method?)
(map signature)))
(defn unique-methods
"Given a class, returns specs for methods which are defined in that class and
are *not* defined in its supers."
[class]
; Compute all superclass/interface method signatures
(let [supes (->> class
supers
(mapcat method-signatures)
set)]
; Take methods on this class
(->> class reflect :members (filter method?)
; And drop any which have corresponding signatures in supers
(remove (comp supes signature)))))
(defn ->class
"Converts symbols and strings into classes."
[sym]
(Class/forName (str sym)))
(defn visitor-method-children
"Given a visitor method, returns a map of children to forms which visit
those children, e.g.
{:kittens (map (partial visit this (.kittens ctx)))}"
[sig]
(->> sig
; Find the parser context this method accepts
:parameter-types
first
->class
; Figure out what methods that context uniquely defines
unique-methods
; Select zero-arities
(filter (comp empty? :parameter-types))
Three possibilities forms :
- Returns a TerminalNode : ( text ( .FOO ctx ) )
- Returns a List : ( map ( partial visit this ) ( .foo ) )
- Returns a thing : ( visit this ( .foo ) )
(map (fn [meth]
(let [child (:name meth)
acc (symbol (str "." child))
value (list acc 'ctx)]
[(keyword child)
(list `when-let `[~'v ~value]
(condp = (:return-type meth)
; Multiple children
'java.util.List
`(map (∂ visit ~'this) ~'v)
; Terminal node
'org.antlr.v4.runtime.tree.TerminalNode
`(text ~'v)
; Descend
`(visit ~'this ~'v)))])))
; Make a map out of it.
(into {})))
(defn degenerate-visitor-spec
"A reify spec for a particular visitor method. Returns code which, when
used in a visitor, handles that node by returning a hashmap of its children.
When a node has only one children, returns that child's value instead."
[sig]
(let [children (visitor-method-children sig)]
Construct a reify spec for this method
(list (:name sig)
'[this ctx]
(condp = (count children)
; When there are no children, return the text at this node.
0 `(text ~'ctx)
With one child , proxy directly to the child node .
1 (first (vals children))
; Otherwise, make a map of our children
children))))
(defn visitor-spec
"Helps compile reify functions specs for a particular visitor method. In its
two-arity form, generates one of a few common parse helpers. In its n-arity
form, passes through user-specified code."
([sig & args]
(if (vector? (first args))
; By default, pass through user code unchanged.
`(~(:name sig) ~@args)
(case (first args)
This builtin chooses the first non - nil branch .
:first-alternative
(let [_children (vals (visitor-method-children sig))]
`(~(:name sig) [~'this ~'ctx] (visit ~'this (child ~'ctx 0))))))))
(defmacro visitor
"Helps compile a visitor for an antlr grammar. Takes the name of a visitor
interface, followed by several method bodies. Given a grammar with a node
like
json: object # JsonObject
| array # JsonArray
;
Antlr will compile an interface to traverse the parse tree like:
public interface JsonVisitor<T> extends ParseTreeVisitor<T> {
T visitJsonObject(JsonParser.JsonObjectContext ctx);
T visitJsonArray(JsonParser.JsonArrayContext ctx);
}
You might want to define a function to handle each of these branches. Here
'this refers to the visitor itself, and 'context refers to the parser context
at the current node. You can traverse the tree by calling (.visit this node).
(visitor JsonVisitor
(JsonObject [this context] (prn :got (.getText .object c)))
(JsonArray [this context] (.visit this (.array context))))"
[interface-name & specs]
(let [interface (eval interface-name)
; Reflect on the visitor interface
reflection (reflect interface)
; Index the interface methods by name
iface-methods (->> reflection
:members
(map (fn [m] [(:name m) m]))
(into {}))
; Translate provided specs into reify specs
reify-specs (->> specs
(map (fn [[node- & args]]
(let [spec-name (symbol (str "visit" node-))
method (get iface-methods spec-name)]
; Pull up red leader!
(when-not method
(throw (IllegalArgumentException.
(str "Can't override nonexistent method "
spec-name
" of interface "
interface-name))))
; Reify method spec
(apply visitor-spec method args)))))
; Fill in unfulfilled methods for the interface
provided-spec-names (set (map first reify-specs))
default-specs (->> reflection
:members
(remove (comp provided-spec-names :name))
; Sort for convenience in reading code
(sort-by :name)
(map degenerate-visitor-spec))]
`(reify ~interface-name
~@reify-specs
~@default-specs
Adapted from
(visit [~'this ~'tree] (.accept ~'tree ~'this))
(visitChildren [~'this ~'node]
(map (∂ visit ~'this) (children ~'node)))
(visitTerminal [~'this ~'node])
(visitErrorNode [~'this ~'node]))))
(defmacro visit-string
"Given a lexer class, parser class, a visitor, and a string, tokenizes,
parses, and visits the string, returning the result of the visitor.
Example:
(visit-string JsonLexer JsonParser (JsonInitBaseVisitor.) .json \"[1,2,3]\""
[lexer-class parser-class visitor root-node string]
`(->> ~string
(lexer ~lexer-class)
tokens
(parser ~parser-class)
~root-node
(.visit ~(vary-meta visitor assoc :tag `ParseTreeVisitor))))
| null | https://raw.githubusercontent.com/aphyr/clj-antlr/d82f80e4d63341d4e04c81dc01a23321e2a2fc21/src/clj_antlr/static.clj | clojure | Compute all superclass/interface method signatures
Take methods on this class
And drop any which have corresponding signatures in supers
Find the parser context this method accepts
Figure out what methods that context uniquely defines
Select zero-arities
Multiple children
Terminal node
Descend
Make a map out of it.
When there are no children, return the text at this node.
Otherwise, make a map of our children
By default, pass through user code unchanged.
Reflect on the visitor interface
Index the interface methods by name
Translate provided specs into reify specs
Pull up red leader!
Reify method spec
Fill in unfulfilled methods for the interface
Sort for convenience in reading code | (ns clj-antlr.static
"Interacts with statically compiled antlr classes."
(:require [clojure.reflect :refer [reflect]])
(:import (org.antlr.v4.runtime.tree ParseTreeVisitor)))
(defmacro parser
"Constructs a parser over a token stream."
[parser-class tokens]
`(new ~parser-class ~tokens))
(defmacro lexer
"Given a lexer class, returns a lexer over a string or stream."
[lexer-class s]
`(new ~lexer-class (char-stream ~s)))
(defn signature
"The signature of a reflected method."
[meth]
(select-keys meth [:name :parameter-types :return-type]))
(defn method?
"Is a reflection member a method?"
[member]
(boolean (:return-type member)))
(defn method-signatures
"Returns a list of method signatures for a class."
[class]
(->> class
reflect
:members
(filter method?)
(map signature)))
(defn unique-methods
"Given a class, returns specs for methods which are defined in that class and
are *not* defined in its supers."
[class]
(let [supes (->> class
supers
(mapcat method-signatures)
set)]
(->> class reflect :members (filter method?)
(remove (comp supes signature)))))
(defn ->class
"Converts symbols and strings into classes."
[sym]
(Class/forName (str sym)))
(defn visitor-method-children
"Given a visitor method, returns a map of children to forms which visit
those children, e.g.
{:kittens (map (partial visit this (.kittens ctx)))}"
[sig]
(->> sig
:parameter-types
first
->class
unique-methods
(filter (comp empty? :parameter-types))
Three possibilities forms :
- Returns a TerminalNode : ( text ( .FOO ctx ) )
- Returns a List : ( map ( partial visit this ) ( .foo ) )
- Returns a thing : ( visit this ( .foo ) )
(map (fn [meth]
(let [child (:name meth)
acc (symbol (str "." child))
value (list acc 'ctx)]
[(keyword child)
(list `when-let `[~'v ~value]
(condp = (:return-type meth)
'java.util.List
`(map (∂ visit ~'this) ~'v)
'org.antlr.v4.runtime.tree.TerminalNode
`(text ~'v)
`(visit ~'this ~'v)))])))
(into {})))
(defn degenerate-visitor-spec
"A reify spec for a particular visitor method. Returns code which, when
used in a visitor, handles that node by returning a hashmap of its children.
When a node has only one children, returns that child's value instead."
[sig]
(let [children (visitor-method-children sig)]
Construct a reify spec for this method
(list (:name sig)
'[this ctx]
(condp = (count children)
0 `(text ~'ctx)
With one child , proxy directly to the child node .
1 (first (vals children))
children))))
(defn visitor-spec
"Helps compile reify functions specs for a particular visitor method. In its
two-arity form, generates one of a few common parse helpers. In its n-arity
form, passes through user-specified code."
([sig & args]
(if (vector? (first args))
`(~(:name sig) ~@args)
(case (first args)
This builtin chooses the first non - nil branch .
:first-alternative
(let [_children (vals (visitor-method-children sig))]
`(~(:name sig) [~'this ~'ctx] (visit ~'this (child ~'ctx 0))))))))
(defmacro visitor
"Helps compile a visitor for an antlr grammar. Takes the name of a visitor
interface, followed by several method bodies. Given a grammar with a node
like
json: object # JsonObject
| array # JsonArray
Antlr will compile an interface to traverse the parse tree like:
public interface JsonVisitor<T> extends ParseTreeVisitor<T> {
}
You might want to define a function to handle each of these branches. Here
'this refers to the visitor itself, and 'context refers to the parser context
at the current node. You can traverse the tree by calling (.visit this node).
(visitor JsonVisitor
(JsonObject [this context] (prn :got (.getText .object c)))
(JsonArray [this context] (.visit this (.array context))))"
[interface-name & specs]
(let [interface (eval interface-name)
reflection (reflect interface)
iface-methods (->> reflection
:members
(map (fn [m] [(:name m) m]))
(into {}))
reify-specs (->> specs
(map (fn [[node- & args]]
(let [spec-name (symbol (str "visit" node-))
method (get iface-methods spec-name)]
(when-not method
(throw (IllegalArgumentException.
(str "Can't override nonexistent method "
spec-name
" of interface "
interface-name))))
(apply visitor-spec method args)))))
provided-spec-names (set (map first reify-specs))
default-specs (->> reflection
:members
(remove (comp provided-spec-names :name))
(sort-by :name)
(map degenerate-visitor-spec))]
`(reify ~interface-name
~@reify-specs
~@default-specs
Adapted from
(visit [~'this ~'tree] (.accept ~'tree ~'this))
(visitChildren [~'this ~'node]
(map (∂ visit ~'this) (children ~'node)))
(visitTerminal [~'this ~'node])
(visitErrorNode [~'this ~'node]))))
(defmacro visit-string
"Given a lexer class, parser class, a visitor, and a string, tokenizes,
parses, and visits the string, returning the result of the visitor.
Example:
(visit-string JsonLexer JsonParser (JsonInitBaseVisitor.) .json \"[1,2,3]\""
[lexer-class parser-class visitor root-node string]
`(->> ~string
(lexer ~lexer-class)
tokens
(parser ~parser-class)
~root-node
(.visit ~(vary-meta visitor assoc :tag `ParseTreeVisitor))))
|
d5e99ebace4447400f9411bf10edd7a1a3b2780600053d34f8d831c4334cc262 | schemeorg-community/index.scheme.org | srfi.63.scm | (((name . "array?") (signature lambda (obj) boolean?) (tags pure predicate))
((name . "equal?") (signature lambda (obj1 obj2) boolean?) (tags pure))
((name . "array-rank") (signature lambda (obj) integer?) (tags pure))
((name . "array-dimnesions")
(signature lambda ((array? array)) list?)
(tags pure))
((name . "make-array")
(signature lambda ((array? prototype) (integer? k1) ...) array?)
(tags pure))
((name . "make-shared-array")
(signature
lambda
((array? array) (procedure? mapper) (integer? k1) ...)
array?)
(subsigs (mapper (lambda ((integer? i1) ...) list?)))
(tags pure))
((name . "list->array")
(signature lambda ((integer? rank) (array? proto) (list? list)) array?)
(tags pure))
((name . "array->list") (signature lambda ((array? array)) *) (tags pure))
((name . "vector->array")
(signature lambda ((vector? vect) (array? proto) (integer? dim1) ...) array?)
(tags pure))
((name . "array->vector")
(signature lambda ((array? array)) vector?)
(tags pure))
((name . "array-in-bounds?")
(signature lambda ((array? array) (integer? index1) ...) boolean?)
(tags pure))
((name . "array-ref")
(signature lambda ((array? array) (integer? index1) ...) *)
(tags pure))
((name . "array-set!")
(signature lambda ((array? array) obj (integer? index1) ...) undefined))
((name . "A:floC128b")
(signature case-lambda (() array?) (((complex? z)) array?))
(tags pure))
((name . "A:floC64b")
(signature case-lambda (() array?) (((complex? z)) array?))
(tags pure))
((name . "A:floC32b")
(signature case-lambda (() array?) (((complex? z)) array?))
(tags pure))
((name . "A:floC16b")
(signature case-lambda (() array?) (((complex? z)) array?))
(tags pure))
((name . "A:floR128b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floR64b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floR32b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floR16b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floQ128b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floQ64b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floQ32b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:fixZ64b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixZ32b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixZ16b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixZ8b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixN64b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixN32b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixN16b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixN8b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:bool")
(signature case-lambda (() array?) (((boolean? b)) array?))
(tags pure)))
| null | https://raw.githubusercontent.com/schemeorg-community/index.scheme.org/32e1afcfe423a158ac8ce014f5c0b8399d12a1ea/types/srfi.63.scm | scheme | (((name . "array?") (signature lambda (obj) boolean?) (tags pure predicate))
((name . "equal?") (signature lambda (obj1 obj2) boolean?) (tags pure))
((name . "array-rank") (signature lambda (obj) integer?) (tags pure))
((name . "array-dimnesions")
(signature lambda ((array? array)) list?)
(tags pure))
((name . "make-array")
(signature lambda ((array? prototype) (integer? k1) ...) array?)
(tags pure))
((name . "make-shared-array")
(signature
lambda
((array? array) (procedure? mapper) (integer? k1) ...)
array?)
(subsigs (mapper (lambda ((integer? i1) ...) list?)))
(tags pure))
((name . "list->array")
(signature lambda ((integer? rank) (array? proto) (list? list)) array?)
(tags pure))
((name . "array->list") (signature lambda ((array? array)) *) (tags pure))
((name . "vector->array")
(signature lambda ((vector? vect) (array? proto) (integer? dim1) ...) array?)
(tags pure))
((name . "array->vector")
(signature lambda ((array? array)) vector?)
(tags pure))
((name . "array-in-bounds?")
(signature lambda ((array? array) (integer? index1) ...) boolean?)
(tags pure))
((name . "array-ref")
(signature lambda ((array? array) (integer? index1) ...) *)
(tags pure))
((name . "array-set!")
(signature lambda ((array? array) obj (integer? index1) ...) undefined))
((name . "A:floC128b")
(signature case-lambda (() array?) (((complex? z)) array?))
(tags pure))
((name . "A:floC64b")
(signature case-lambda (() array?) (((complex? z)) array?))
(tags pure))
((name . "A:floC32b")
(signature case-lambda (() array?) (((complex? z)) array?))
(tags pure))
((name . "A:floC16b")
(signature case-lambda (() array?) (((complex? z)) array?))
(tags pure))
((name . "A:floR128b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floR64b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floR32b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floR16b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floQ128b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floQ64b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:floQ32b")
(signature case-lambda (() array?) (((real? z)) array?))
(tags pure))
((name . "A:fixZ64b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixZ32b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixZ16b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixZ8b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixN64b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixN32b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixN16b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:fixN8b")
(signature case-lambda (() array?) (((integer? z)) array?))
(tags pure))
((name . "A:bool")
(signature case-lambda (() array?) (((boolean? b)) array?))
(tags pure)))
| |
ee00ee31a9cbe66d170002d80c74d28cb41a585380c32fd52508b6733a0b29e7 | lymar/hastache | function.hs | #!/usr/local/bin/runhaskell
import Text.Hastache
import Text.Hastache.Context
import qualified Data.Text.Lazy.IO as TL
main = hastacheStr defaultConfig (encodeStr template) (mkStrContext context)
>>= TL.putStrLn
-- begin example
template = "Hello, {{#reverse}}world{{/reverse}}!"
context "reverse" = MuLambda (reverse . decodeStr)
| null | https://raw.githubusercontent.com/lymar/hastache/cd299ff1ac4c35259fbd333ea7fa9b3c280b9ff9/examples/function.hs | haskell | begin example | #!/usr/local/bin/runhaskell
import Text.Hastache
import Text.Hastache.Context
import qualified Data.Text.Lazy.IO as TL
main = hastacheStr defaultConfig (encodeStr template) (mkStrContext context)
>>= TL.putStrLn
template = "Hello, {{#reverse}}world{{/reverse}}!"
context "reverse" = MuLambda (reverse . decodeStr)
|
a3d5ab754a95755cb2c0934f9388852f620f8f5eecc9bde5b3e1f232aca837af | jafingerhut/dolly | track.clj | Copyright ( c ) , 2012 . All rights reserved . The use and
distribution terms for this software are covered by the Eclipse
;; Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this
;; distribution. By using this software in any fashion, you are
;; agreeing to be bound by the terms of this license. You must not
;; remove this notice, or any other, from this software.
(ns ^{:author "Stuart Sierra"
:doc "Dependency tracker which can compute which namespaces need to be
reloaded after files have changed. This is the low-level
implementation that requires you to find the namespace dependencies
yourself: most uses will interact with the wrappers in
dolly.copieddeps.tns.clojure.tools.namespace.file and dolly.copieddeps.tns.clojure.tools.namespace.dir or the
public API in clojure.tools.namespace.repl."}
dolly.copieddeps.tns.clojure.tools.namespace.track
(:refer-clojure :exclude (remove))
(:require [clojure.set :as set]
[dolly.copieddeps.tns.clojure.tools.namespace.dependency :as dep]))
(defn- remove-deps [deps names]
(reduce dep/remove-node deps names))
(defn- add-deps [deps depmap]
(reduce (fn [ds [name dependencies]]
(reduce (fn [g dep] (dep/depend g name dep))
ds dependencies))
deps depmap))
(defn- update-deps [deps depmap]
(-> deps
(remove-deps (keys depmap))
(add-deps depmap)))
(defn- affected-namespaces [deps names]
(set/union (set names)
(dep/transitive-dependents-set deps names)))
(defn add
"Returns an updated dependency tracker with new/updated namespaces.
Depmap is a map describing the new or modified namespaces. Keys in
the map are namespace names (symbols). Values in the map are sets of
symbols naming the direct dependencies of each namespace. For
example, assuming these ns declarations:
(ns alpha (:require beta))
(ns beta (:require gamma delta))
the depmap would look like this:
{alpha #{beta}
beta #{gamma delta}}
After adding new/updated namespaces, the dependency tracker will
have two lists associated with the following keys:
:dolly.copieddeps.tns.clojure.tools.namespace.track/unload
is the list of namespaces that need to be removed
:dolly.copieddeps.tns.clojure.tools.namespace.track/load
is the list of namespaces that need to be reloaded
To reload namespaces in the correct order, first remove/unload all
namespaces in the 'unload' list, then (re)load all namespaces in the
'load' list. The dolly.copieddeps.tns.clojure.tools.namespace.reload namespace has
functions to do this."
[tracker depmap]
(let [{load ::load
unload ::unload
deps ::deps
:or {load (), unload (), deps (dep/graph)}} tracker
new-deps (update-deps deps depmap)
changed (affected-namespaces new-deps (keys depmap))]
(assoc tracker
::deps new-deps
::unload (distinct
(concat (reverse (sort (dep/topo-comparator deps) changed))
unload))
::load (distinct
(concat (sort (dep/topo-comparator new-deps) changed)
load)))))
(defn remove
"Returns an updated dependency tracker from which the namespaces
(symbols) have been removed. The ::unload and ::load lists are
populated as with 'add'."
[tracker names]
(let [{load ::load
unload ::unload
deps ::deps
:or {load (), unload (), deps (dep/graph)}} tracker
known (set (dep/nodes deps))
removed-names (filter known names)
new-deps (remove-deps deps removed-names)
changed (affected-namespaces deps removed-names)]
(assoc tracker
::deps new-deps
::unload (distinct
(concat (reverse (sort (dep/topo-comparator deps) changed))
unload))
::load (distinct
(filter (complement (set removed-names))
(concat (sort (dep/topo-comparator new-deps) changed)
load))))))
(defn tracker
"Returns a new, empty dependency tracker"
[]
{})
(comment
;; Structure of the namespace tracker map
{;; Dependency graph of namespace names (symbols) as defined in
;; dolly.copieddeps.tns.clojure.tools.namespace.dependency/graph
:dolly.copieddeps.tns.clojure.tools.namespace.track/deps {}
;; Ordered list of namespace names (symbols) that need to be
;; removed to bring the running system into agreement with the
;; source files.
:dolly.copieddeps.tns.clojure.tools.namespace.track/unload ()
;; Ordered list of namespace names (symbols) that need to be
;; (re)loaded to bring the running system into agreement with the
;; source files.
:dolly.copieddeps.tns.clojure.tools.namespace.track/load ()
;; Added by dolly.copieddeps.tns.clojure.tools.namespace.file: Map from source files
;; (java.io.File) to the names (symbols) of namespaces they
;; represent.
:dolly.copieddeps.tns.clojure.tools.namespace.file/filemap {}
Added by dolly.copieddeps.tns.clojure.tools.namespace.dir : Set of source files
;; (java.io.File) which have been seen by this dependency tracker;
;; used to determine when files have been deleted.
:dolly.copieddeps.tns.clojure.tools.namespace.dir/files #{}
Added by dolly.copieddeps.tns.clojure.tools.namespace.dir : Instant when the
;; directories were last scanned, as returned by
System / currentTimeMillis .
:dolly.copieddeps.tns.clojure.tools.namespace.dir/time 1405201862262})
| null | https://raw.githubusercontent.com/jafingerhut/dolly/6dfe7f3bcd58d81fba7793d214230792b6140ffd/src/dolly/copieddeps/tns/clojure/tools/namespace/track.clj | clojure | Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this
distribution. By using this software in any fashion, you are
agreeing to be bound by the terms of this license. You must not
remove this notice, or any other, from this software.
Structure of the namespace tracker map
Dependency graph of namespace names (symbols) as defined in
dolly.copieddeps.tns.clojure.tools.namespace.dependency/graph
Ordered list of namespace names (symbols) that need to be
removed to bring the running system into agreement with the
source files.
Ordered list of namespace names (symbols) that need to be
(re)loaded to bring the running system into agreement with the
source files.
Added by dolly.copieddeps.tns.clojure.tools.namespace.file: Map from source files
(java.io.File) to the names (symbols) of namespaces they
represent.
(java.io.File) which have been seen by this dependency tracker;
used to determine when files have been deleted.
directories were last scanned, as returned by | Copyright ( c ) , 2012 . All rights reserved . The use and
distribution terms for this software are covered by the Eclipse
(ns ^{:author "Stuart Sierra"
:doc "Dependency tracker which can compute which namespaces need to be
reloaded after files have changed. This is the low-level
implementation that requires you to find the namespace dependencies
yourself: most uses will interact with the wrappers in
dolly.copieddeps.tns.clojure.tools.namespace.file and dolly.copieddeps.tns.clojure.tools.namespace.dir or the
public API in clojure.tools.namespace.repl."}
dolly.copieddeps.tns.clojure.tools.namespace.track
(:refer-clojure :exclude (remove))
(:require [clojure.set :as set]
[dolly.copieddeps.tns.clojure.tools.namespace.dependency :as dep]))
(defn- remove-deps [deps names]
(reduce dep/remove-node deps names))
(defn- add-deps [deps depmap]
(reduce (fn [ds [name dependencies]]
(reduce (fn [g dep] (dep/depend g name dep))
ds dependencies))
deps depmap))
(defn- update-deps [deps depmap]
(-> deps
(remove-deps (keys depmap))
(add-deps depmap)))
(defn- affected-namespaces [deps names]
(set/union (set names)
(dep/transitive-dependents-set deps names)))
(defn add
"Returns an updated dependency tracker with new/updated namespaces.
Depmap is a map describing the new or modified namespaces. Keys in
the map are namespace names (symbols). Values in the map are sets of
symbols naming the direct dependencies of each namespace. For
example, assuming these ns declarations:
(ns alpha (:require beta))
(ns beta (:require gamma delta))
the depmap would look like this:
{alpha #{beta}
beta #{gamma delta}}
After adding new/updated namespaces, the dependency tracker will
have two lists associated with the following keys:
:dolly.copieddeps.tns.clojure.tools.namespace.track/unload
is the list of namespaces that need to be removed
:dolly.copieddeps.tns.clojure.tools.namespace.track/load
is the list of namespaces that need to be reloaded
To reload namespaces in the correct order, first remove/unload all
namespaces in the 'unload' list, then (re)load all namespaces in the
'load' list. The dolly.copieddeps.tns.clojure.tools.namespace.reload namespace has
functions to do this."
[tracker depmap]
(let [{load ::load
unload ::unload
deps ::deps
:or {load (), unload (), deps (dep/graph)}} tracker
new-deps (update-deps deps depmap)
changed (affected-namespaces new-deps (keys depmap))]
(assoc tracker
::deps new-deps
::unload (distinct
(concat (reverse (sort (dep/topo-comparator deps) changed))
unload))
::load (distinct
(concat (sort (dep/topo-comparator new-deps) changed)
load)))))
(defn remove
"Returns an updated dependency tracker from which the namespaces
(symbols) have been removed. The ::unload and ::load lists are
populated as with 'add'."
[tracker names]
(let [{load ::load
unload ::unload
deps ::deps
:or {load (), unload (), deps (dep/graph)}} tracker
known (set (dep/nodes deps))
removed-names (filter known names)
new-deps (remove-deps deps removed-names)
changed (affected-namespaces deps removed-names)]
(assoc tracker
::deps new-deps
::unload (distinct
(concat (reverse (sort (dep/topo-comparator deps) changed))
unload))
::load (distinct
(filter (complement (set removed-names))
(concat (sort (dep/topo-comparator new-deps) changed)
load))))))
(defn tracker
"Returns a new, empty dependency tracker"
[]
{})
(comment
:dolly.copieddeps.tns.clojure.tools.namespace.track/deps {}
:dolly.copieddeps.tns.clojure.tools.namespace.track/unload ()
:dolly.copieddeps.tns.clojure.tools.namespace.track/load ()
:dolly.copieddeps.tns.clojure.tools.namespace.file/filemap {}
Added by dolly.copieddeps.tns.clojure.tools.namespace.dir : Set of source files
:dolly.copieddeps.tns.clojure.tools.namespace.dir/files #{}
Added by dolly.copieddeps.tns.clojure.tools.namespace.dir : Instant when the
System / currentTimeMillis .
:dolly.copieddeps.tns.clojure.tools.namespace.dir/time 1405201862262})
|
af707e57f5cd706eecdc96eeba38f512b8b85b51b2cd712f327b13cc0909e16c | esl/escalus | connection_SUITE.erl | %% @doc Tests for escalus_connection
-module(connection_SUITE).
-include_lib("exml/include/exml_stream.hrl").
-include_lib("escalus/include/escalus.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-export([all/0,
groups/0]).
-export([wait/1,
wait_with_handler/1,
receive_stanza_timeout/1,
receive_stanza_timeout_safe/1,
receive_stanza_with_metadata/1,
receive_stanza_pred/1,
receive_stanza_pred_with_handler/1,
receive_stanza_assert/1
]).
%% Common Test callbacks
all() ->
[{group, basic}].
groups() ->
[{basic, [parallel], [wait,
wait_with_handler,
receive_stanza_timeout,
receive_stanza_timeout_safe,
receive_stanza_with_metadata,
receive_stanza_pred,
receive_stanza_pred_with_handler,
receive_stanza_assert]}].
%% Test cases
wait(_Config) ->
Wait for 10 ms , no handlers , no incoming stanzas
ok = escalus_connection:wait(client(), 10).
wait_with_handler(_Config) ->
Handler = handler(fun(Msg) -> escalus_pred:is_chat_message(msg(), Msg) end,
fun(Msg) -> self() ! {handled, Msg} end),
Client = set_received_handlers(client(), [Handler]),
HandledStanza = escalus_stanza:chat_to(my_jid(), msg()),
OtherStanza = escalus_stanza:chat_to(my_jid(), msg2()),
self() ! escalus_connection:stanza_msg(OtherStanza, metadata()),
self() ! escalus_connection:stanza_msg(HandledStanza, metadata()),
ok = escalus_connection:wait(Client, 10),
receive {handled, HandledStanza} -> ok after 0 -> ct:fail("not handled: ~p", [HandledStanza]) end.
receive_stanza_timeout(_Config) ->
Client = client(),
Stanza = escalus_stanza:chat_to(my_jid(), msg()),
erlang:send_after(1000, self(), escalus_connection:stanza_msg(Stanza, metadata())),
Waits for 10 ms , but the stanza is sent later
?assertException(throw, timeout, escalus_connection:receive_stanza(Client, #{timeout => 10})),
The same but with a named timeout message , the two calls below are identical
?assertException(throw, {timeout, msg}, escalus_connection:get_stanza(Client, msg, 10)),
?assertException(throw, {timeout, msg},
escalus_connection:receive_stanza(Client, #{timeout => 10, name => msg})),
Stanza = escalus_connection:receive_stanza(Client, #{}).
receive_stanza_timeout_safe(_Config) ->
Client = client(),
Stanza = escalus_stanza:chat_to(my_jid(), msg()),
erlang:send_after(1000, self(), escalus_connection:stanza_msg(Stanza, metadata())),
Waits for 10 ms , but the stanza is sent later , the two calls below are identical
{error, timeout} = escalus_connection:receive_stanza(Client, #{timeout => 10, safe => true}),
{error, timeout} = escalus_connection:get_stanza_safe(Client, 10),
Stanza = escalus_connection:receive_stanza(Client, #{}).
receive_stanza_with_metadata(_Config) ->
Client = client(),
Stanza = escalus_stanza:chat_to(my_jid(), msg()),
Metadata1 = metadata(),
self() ! escalus_connection:stanza_msg(Stanza, Metadata1),
{Stanza, Metadata1} = escalus_connection:receive_stanza(Client, #{with_metadata => true}),
%% The same test but with a helper function
Metadata2 = metadata(),
self() ! escalus_connection:stanza_msg(Stanza, Metadata2),
{Stanza, Metadata2} = escalus_connection:get_stanza_with_metadata(Client, msg, 1000).
receive_stanza_pred(_Config) ->
Client = client(),
SkippedStanza = escalus_stanza:chat_to(my_jid(), msg()),
Stanza = escalus_stanza:chat_to(my_jid(), msg2()),
Pred = fun(S) -> escalus_pred:is_chat_message(msg2(), S) end,
self() ! escalus_connection:stanza_msg(SkippedStanza, metadata()),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
The predicate should filter out the first stanza , so we receive only the second one
Stanza = escalus_connection:receive_stanza(Client, #{pred => Pred}),
?assertException(throw, timeout, escalus_connection:receive_stanza(Client, #{timeout => 10})).
receive_stanza_pred_with_handler(_Config) ->
Handler = handler(fun(_) -> true end, fun(Msg) -> self() ! {skipped, Msg} end),
Client = set_received_handlers(client(), [Handler]),
SkippedStanza = escalus_stanza:chat_to(my_jid(), msg()),
Stanza = escalus_stanza:chat_to(my_jid(), msg2()),
Pred = fun(S) -> escalus_pred:is_chat_message(msg2(), S) end,
%% Filtered out by Pred, but handled by Handler
self() ! escalus_connection:stanza_msg(SkippedStanza, metadata()),
?assertException(throw, timeout, escalus_connection:receive_stanza(Client, #{timeout => 10, pred => Pred})),
receive {skipped, SkippedStanza} -> ok after 0 -> ct:fail("not skipped: ~p", [SkippedStanza]) end,
%% Accepted by Pred, so not handled
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
Stanza = escalus_connection:receive_stanza(Client, #{timeout => 10, pred => Pred}),
receive {skipped, Stanza} -> ct:fail("skipped: ~p", [SkippedStanza]) after 0 -> ok end.
receive_stanza_assert(_Config) ->
Client = client(),
Stanza = escalus_stanza:chat_to(my_jid(), msg2()),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
?assertException(error, {assertion_failed, assert, is_chat_message, _, _, _},
escalus_connection:receive_stanza(Client, #{assert => {is_chat_message, [msg()]}})),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
Stanza = escalus_connection:receive_stanza(Client, #{assert => {is_chat_message, [msg2()]}}),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
Stanza = escalus_connection:receive_stanza(Client, #{assert => fun(S) -> S =:= Stanza end}),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
Stanza = escalus_connection:receive_stanza(Client, #{assert => is_chat_message}).
%% Helpers
handler(Pred, Action) ->
fun(_Client, Msg) ->
case Pred(Msg) of
true -> Action(Msg), true;
false -> false
end
end.
my_jid() -> <<"alice@localhost">>.
msg() -> <<"Message">>.
msg2() -> <<"Message 2">>.
metadata() -> #{recv_timestamp => os:system_time(micro_seconds)}.
client() -> #client{jid = my_jid(), rcv_pid = self(), props = []}.
set_received_handlers(Client, Handlers) ->
Client#client{props = [{received_stanza_handlers, Handlers}]}.
| null | https://raw.githubusercontent.com/esl/escalus/ac5e813ac96c0cdb5d5ac738d63d992f5f948585/test/connection_SUITE.erl | erlang | @doc Tests for escalus_connection
Common Test callbacks
Test cases
The same test but with a helper function
Filtered out by Pred, but handled by Handler
Accepted by Pred, so not handled
Helpers |
-module(connection_SUITE).
-include_lib("exml/include/exml_stream.hrl").
-include_lib("escalus/include/escalus.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-export([all/0,
groups/0]).
-export([wait/1,
wait_with_handler/1,
receive_stanza_timeout/1,
receive_stanza_timeout_safe/1,
receive_stanza_with_metadata/1,
receive_stanza_pred/1,
receive_stanza_pred_with_handler/1,
receive_stanza_assert/1
]).
all() ->
[{group, basic}].
groups() ->
[{basic, [parallel], [wait,
wait_with_handler,
receive_stanza_timeout,
receive_stanza_timeout_safe,
receive_stanza_with_metadata,
receive_stanza_pred,
receive_stanza_pred_with_handler,
receive_stanza_assert]}].
wait(_Config) ->
Wait for 10 ms , no handlers , no incoming stanzas
ok = escalus_connection:wait(client(), 10).
wait_with_handler(_Config) ->
Handler = handler(fun(Msg) -> escalus_pred:is_chat_message(msg(), Msg) end,
fun(Msg) -> self() ! {handled, Msg} end),
Client = set_received_handlers(client(), [Handler]),
HandledStanza = escalus_stanza:chat_to(my_jid(), msg()),
OtherStanza = escalus_stanza:chat_to(my_jid(), msg2()),
self() ! escalus_connection:stanza_msg(OtherStanza, metadata()),
self() ! escalus_connection:stanza_msg(HandledStanza, metadata()),
ok = escalus_connection:wait(Client, 10),
receive {handled, HandledStanza} -> ok after 0 -> ct:fail("not handled: ~p", [HandledStanza]) end.
receive_stanza_timeout(_Config) ->
Client = client(),
Stanza = escalus_stanza:chat_to(my_jid(), msg()),
erlang:send_after(1000, self(), escalus_connection:stanza_msg(Stanza, metadata())),
Waits for 10 ms , but the stanza is sent later
?assertException(throw, timeout, escalus_connection:receive_stanza(Client, #{timeout => 10})),
The same but with a named timeout message , the two calls below are identical
?assertException(throw, {timeout, msg}, escalus_connection:get_stanza(Client, msg, 10)),
?assertException(throw, {timeout, msg},
escalus_connection:receive_stanza(Client, #{timeout => 10, name => msg})),
Stanza = escalus_connection:receive_stanza(Client, #{}).
receive_stanza_timeout_safe(_Config) ->
Client = client(),
Stanza = escalus_stanza:chat_to(my_jid(), msg()),
erlang:send_after(1000, self(), escalus_connection:stanza_msg(Stanza, metadata())),
Waits for 10 ms , but the stanza is sent later , the two calls below are identical
{error, timeout} = escalus_connection:receive_stanza(Client, #{timeout => 10, safe => true}),
{error, timeout} = escalus_connection:get_stanza_safe(Client, 10),
Stanza = escalus_connection:receive_stanza(Client, #{}).
receive_stanza_with_metadata(_Config) ->
Client = client(),
Stanza = escalus_stanza:chat_to(my_jid(), msg()),
Metadata1 = metadata(),
self() ! escalus_connection:stanza_msg(Stanza, Metadata1),
{Stanza, Metadata1} = escalus_connection:receive_stanza(Client, #{with_metadata => true}),
Metadata2 = metadata(),
self() ! escalus_connection:stanza_msg(Stanza, Metadata2),
{Stanza, Metadata2} = escalus_connection:get_stanza_with_metadata(Client, msg, 1000).
receive_stanza_pred(_Config) ->
Client = client(),
SkippedStanza = escalus_stanza:chat_to(my_jid(), msg()),
Stanza = escalus_stanza:chat_to(my_jid(), msg2()),
Pred = fun(S) -> escalus_pred:is_chat_message(msg2(), S) end,
self() ! escalus_connection:stanza_msg(SkippedStanza, metadata()),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
The predicate should filter out the first stanza , so we receive only the second one
Stanza = escalus_connection:receive_stanza(Client, #{pred => Pred}),
?assertException(throw, timeout, escalus_connection:receive_stanza(Client, #{timeout => 10})).
receive_stanza_pred_with_handler(_Config) ->
Handler = handler(fun(_) -> true end, fun(Msg) -> self() ! {skipped, Msg} end),
Client = set_received_handlers(client(), [Handler]),
SkippedStanza = escalus_stanza:chat_to(my_jid(), msg()),
Stanza = escalus_stanza:chat_to(my_jid(), msg2()),
Pred = fun(S) -> escalus_pred:is_chat_message(msg2(), S) end,
self() ! escalus_connection:stanza_msg(SkippedStanza, metadata()),
?assertException(throw, timeout, escalus_connection:receive_stanza(Client, #{timeout => 10, pred => Pred})),
receive {skipped, SkippedStanza} -> ok after 0 -> ct:fail("not skipped: ~p", [SkippedStanza]) end,
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
Stanza = escalus_connection:receive_stanza(Client, #{timeout => 10, pred => Pred}),
receive {skipped, Stanza} -> ct:fail("skipped: ~p", [SkippedStanza]) after 0 -> ok end.
receive_stanza_assert(_Config) ->
Client = client(),
Stanza = escalus_stanza:chat_to(my_jid(), msg2()),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
?assertException(error, {assertion_failed, assert, is_chat_message, _, _, _},
escalus_connection:receive_stanza(Client, #{assert => {is_chat_message, [msg()]}})),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
Stanza = escalus_connection:receive_stanza(Client, #{assert => {is_chat_message, [msg2()]}}),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
Stanza = escalus_connection:receive_stanza(Client, #{assert => fun(S) -> S =:= Stanza end}),
self() ! escalus_connection:stanza_msg(Stanza, metadata()),
Stanza = escalus_connection:receive_stanza(Client, #{assert => is_chat_message}).
handler(Pred, Action) ->
fun(_Client, Msg) ->
case Pred(Msg) of
true -> Action(Msg), true;
false -> false
end
end.
my_jid() -> <<"alice@localhost">>.
msg() -> <<"Message">>.
msg2() -> <<"Message 2">>.
metadata() -> #{recv_timestamp => os:system_time(micro_seconds)}.
client() -> #client{jid = my_jid(), rcv_pid = self(), props = []}.
set_received_handlers(Client, Handlers) ->
Client#client{props = [{received_stanza_handlers, Handlers}]}.
|
106537d25cb03c91146c47e57c4c2547476f8f4bdd2fe402f780db169437a5db | biscuit-auth/biscuit-haskell | Executor.hs | # LANGUAGE OverloadedLists #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
module Spec.Executor (specs) where
import Data.Map.Strict as Map
import Data.Set as Set
import Data.Text (Text, unpack)
import Numeric.Natural (Natural)
import Test.Tasty
import Test.Tasty.HUnit
import Auth.Biscuit.Datalog.AST
import Auth.Biscuit.Datalog.Executor (ExecutionError (..),
Limits (..),
defaultLimits,
evaluateExpression)
import Auth.Biscuit.Datalog.Parser (expressionParser, fact,
rule)
import Auth.Biscuit.Datalog.ScopedExecutor hiding (limits)
import Spec.Parser (parseExpression)
specs :: TestTree
specs = testGroup "Datalog evaluation"
[ grandparent
, ancestor
, scopedRules
, exprEval
, exprEvalError
, rulesWithConstraints
, ruleHeadWithNoVars
, limits
]
authGroup :: Set Fact -> FactGroup
authGroup = FactGroup . Map.singleton (Set.singleton 0)
authRulesGroup :: Set Rule -> Map Natural (Set EvalRule)
authRulesGroup = Map.singleton 0 . adaptRules
adaptRules :: Set Rule -> Set EvalRule
adaptRules = Set.map (toEvaluation [])
grandparent :: TestTree
grandparent = testCase "Basic grandparent rule" $
let rules = authRulesGroup $ Set.fromList
[ [rule|grandparent($a,$b) <- parent($a,$c), parent($c,$b)|]
]
facts = authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("alice", "toto")|]
]
in runFactGeneration defaultLimits 1 rules facts @?= Right (authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("alice", "toto")|]
, [fact|grandparent("alice", "jean-pierre")|]
])
ancestor :: TestTree
ancestor = testCase "Ancestor rule" $
let rules = authRulesGroup $ Set.fromList
[ [rule|ancestor($a,$b) <- parent($a,$c), ancestor($c,$b)|]
, [rule|ancestor($a,$b) <- parent($a,$b)|]
]
facts = authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("alice", "toto")|]
]
in runFactGeneration defaultLimits 1 rules facts @?= Right (authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("alice", "toto")|]
, [fact|ancestor("alice", "bob")|]
, [fact|ancestor("bob", "jean-pierre")|]
, [fact|ancestor("alice", "toto")|]
, [fact|ancestor("alice", "jean-pierre")|]
])
expr :: Text -> Expression
expr = either error id . parseExpression
exprEval :: TestTree
exprEval = do
let bindings = Map.fromList
[ ("var1", LInteger 0)
, ("topDomain", LString "example.com")
, ("domain", LString "test.example.com")
]
eval (e, r) = testCase (unpack e) $
evaluateExpression defaultLimits bindings (expr e) @?= Right r
-- ("1 / 0") @?= Left "Divide by 0"
testGroup "Expressions evaluation" $ eval <$>
[ ("!(1 < $var1)", LBool True)
, ("[0].contains($var1)", LBool True)
, ("1 + 2 * 3", LInteger 7)
, ("!(1 + 2 * 3 > 4)", LBool False)
, ("!true", LBool False)
, ("!false", LBool True)
, ("(true)", LBool True)
, ("\"test\".length()", LInteger 4)
, ("hex:ababab.length()", LInteger 3)
, ("[].length()", LInteger 0)
, ("[\"test\", \"test\"].length()", LInteger 1)
, ("1 == 1", LBool True)
, ("2 == 1", LBool False)
, ("\"toto\" == \"toto\"", LBool True)
, ("\"toto\" == \"truc\"", LBool False)
, ("\"toto\".matches(\"to(to)?\")", LBool True)
, ("\"toto\".matches(\"^to$\")", LBool False)
, ("2021-05-07T18:00:00Z == 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z == 2021-05-07T19:00:00Z", LBool False)
, ("hex:ababab == hex:ababab", LBool True)
, ("hex:ababab == hex:ababac", LBool False)
, ("true == true", LBool True)
, ("true == false", LBool False)
, ("[1,2,3] == [1,2,3]", LBool True)
, ("[1,2,3] == [1,2,4]", LBool False)
, ("1 < 2", LBool True)
, ("2 < 1", LBool False)
, ("2021-05-07T18:00:00Z < 2021-05-07T19:00:00Z", LBool True)
, ("2021-05-07T19:00:00Z < 2021-05-07T18:00:00Z", LBool False)
, ("2 > 1", LBool True)
, ("1 > 2", LBool False)
, ("2021-05-07T19:00:00Z > 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z > 2021-05-07T19:00:00Z", LBool False)
, ("1 <= 2", LBool True)
, ("1 <= 1", LBool True)
, ("2 <= 1", LBool False)
, ("2021-05-07T18:00:00Z <= 2021-05-07T19:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z <= 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T19:00:00Z <= 2021-05-07T18:00:00Z", LBool False)
, ("2 >= 1", LBool True)
, ("2 >= 2", LBool True)
, ("1 >= 2", LBool False)
, ("2021-05-07T19:00:00Z >= 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z >= 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z >= 2021-05-07T19:00:00Z", LBool False)
, ("\"my string\".starts_with(\"my\")", LBool True)
, ("\"my string\".starts_with(\"string\")", LBool False)
, ("\"my string\".ends_with(\"string\")", LBool True)
, ("\"my string\".ends_with(\"my\")", LBool False)
, ("$domain.ends_with(\".\" + $topDomain)", LBool True)
, ("2 + 1", LInteger 3)
, ("2 - 1", LInteger 1)
, ("5 / 2", LInteger 2)
, ("2 * 1", LInteger 2)
, ("true && true", LBool True)
, ("true && false", LBool False)
, ("false && true", LBool False)
, ("false && false", LBool False)
, ("true || true", LBool True)
, ("true || false", LBool True)
, ("false || true", LBool True)
, ("false || false", LBool False)
, ("[1].contains([1])", LBool True)
, ("[1].contains(1)", LBool True)
, ("[].contains(1)", LBool False)
, ("[\"test\"].contains(2)", LBool False)
, ("[1].intersection([1])", TermSet (Set.fromList [LInteger 1]))
, ("[1].intersection([\"test\"])", TermSet (Set.fromList []))
, ("[1].union([1])", TermSet (Set.fromList [LInteger 1]))
, ("[1].union([\"test\"])", TermSet (Set.fromList [LInteger 1, LString "test"]))
]
exprEvalError :: TestTree
exprEvalError = do
let bindings = Map.fromList
[ ("var1", LInteger 0)
]
l = defaultLimits { allowRegexes = False }
evalFail (e, r) = testCase (unpack e) $
evaluateExpression l bindings (expr e) @?= Left r
testGroup "Expressions evaluation (expected errors)" $ evalFail <$>
[ ("1 / 0", "Divide by 0")
, ("\"toto\".matches(\"to\")", "Regex evaluation is disabled")
, ("9223372036854775807 + 1", "integer overflow")
, ("-9223372036854775808 - 1", "integer underflow")
]
rulesWithConstraints :: TestTree
rulesWithConstraints = testCase "Rule with constraints" $
let rules = authRulesGroup $ Set.fromList
[ [rule|valid_date("file1") <- time($0), resource("file1"), $0 <= 2019-12-04T09:46:41+00:00|]
, [rule|valid_date("file2") <- time($0), resource("file2"), $0 <= 2010-12-04T09:46:41+00:00|]
]
facts = authGroup $ Set.fromList
[ [fact|time(2019-12-04T01:00:00Z)|]
, [fact|resource("file1")|]
, [fact|resource("file2")|]
]
in runFactGeneration defaultLimits 1 rules facts @?= Right (authGroup $ Set.fromList
[ [fact|time(2019-12-04T01:00:00Z)|]
, [fact|resource("file1")|]
, [fact|resource("file2")|]
, [fact|valid_date("file1")|]
])
ruleHeadWithNoVars :: TestTree
ruleHeadWithNoVars = testCase "Rule head with no variables" $
let rules = authRulesGroup $ Set.fromList
[ [rule|operation("authority", "read") <- test($yolo, "nothing")|]
]
facts = authGroup $ Set.fromList
[ [fact|test("whatever", "notNothing")|]
]
in runFactGeneration defaultLimits 1 rules facts @?= Right (authGroup $ Set.fromList
[ [fact|test("whatever", "notNothing")|]
])
limits :: TestTree
limits =
let rules = authRulesGroup $ Set.fromList
[ [rule|ancestor($a,$b) <- parent($a,$c), ancestor($c,$b)|]
, [rule|ancestor($a,$b) <- parent($a,$b)|]
]
facts = authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("bob", "marielle")|]
, [fact|parent("alice", "toto")|]
]
factLimits = defaultLimits { maxFacts = 10 }
iterLimits = defaultLimits { maxIterations = 2 }
in testGroup "Facts generation limits"
[ testCase "max facts" $
runFactGeneration factLimits 1 rules facts @?= Left Facts
, testCase "max iterations" $
runFactGeneration iterLimits 1 rules facts @?= Left Iterations
]
scopedRules :: TestTree
scopedRules = testGroup "Rules and facts in different scopes"
[ testCase "with default scoping for rules" $
let rules :: Map Natural (Set Rule)
rules = [ (0, [ [rule|ancestor($a,$b) <- parent($a,$b)|] ])
, (1, [ [rule|ancestor($a,$b) <- parent($a,$c), ancestor($c,$b)|] ])
]
facts :: FactGroup
facts = FactGroup
[ ([0], [ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "trudy")|]
])
, ([1], [ [fact|parent("bob", "jean-pierre")|]
])
, ([2], [ [fact|parent("toto", "toto")|]
])
]
in runFactGeneration defaultLimits 3 (adaptRules <$> rules) facts @?= Right (FactGroup
[ ([0], [ [fact|parent("alice", "bob")|]
, [fact|ancestor("alice", "bob")|]
, [fact|parent("bob", "trudy")|]
, [fact|ancestor("bob", "trudy")|]
])
, ([1], [ [fact|parent("bob", "jean-pierre")|]
])
, ([0,1], [ [fact|ancestor("alice", "trudy")|]
])
, ([2], [ [fact|parent("toto", "toto")|] ])
])
, testCase "with explicit scoping for rules (authority)" $
let rules :: Map Natural (Set Rule)
rules = [ (0, [ [rule|ancestor($a,$b) <- parent($a,$b) trusting authority |] ])
, (1, [ [rule|ancestor($a,$b) <- parent($a,$c), ancestor($c,$b) trusting authority |] ])
, (2, [ [rule|family($a,$b) <- parent($a,$b) trusting authority |] ])
]
facts :: FactGroup
facts = FactGroup
[ ([0], [ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "trudy")|]
])
, ([1], [ [fact|parent("bob", "jean-pierre")|]
])
, ([2], [ [fact|parent("toto", "toto")|]
])
]
in runFactGeneration defaultLimits 3 (adaptRules <$> rules) facts @?= Right (FactGroup
[ ([0], [ [fact|parent("alice", "bob")|]
, [fact|ancestor("alice", "bob")|]
, [fact|parent("bob", "trudy")|]
, [fact|ancestor("bob", "trudy")|]
])
, ([1], [ [fact|parent("bob", "jean-pierre")|]
])
, ([0,1], [ [fact|ancestor("alice", "trudy")|]
])
, ([2], [ [fact|parent("toto", "toto")|]
, [fact|family("toto", "toto")|]
])
, ([0,2], [ [fact|family("alice", "bob")|]
, [fact|family("bob", "trudy")|]
])
])
]
| null | https://raw.githubusercontent.com/biscuit-auth/biscuit-haskell/b1863e3e2ce5c431742effb52c9cfeba8433c01a/biscuit/test/Spec/Executor.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE QuasiQuotes #
("1 / 0") @?= Left "Divide by 0" | # LANGUAGE OverloadedLists #
module Spec.Executor (specs) where
import Data.Map.Strict as Map
import Data.Set as Set
import Data.Text (Text, unpack)
import Numeric.Natural (Natural)
import Test.Tasty
import Test.Tasty.HUnit
import Auth.Biscuit.Datalog.AST
import Auth.Biscuit.Datalog.Executor (ExecutionError (..),
Limits (..),
defaultLimits,
evaluateExpression)
import Auth.Biscuit.Datalog.Parser (expressionParser, fact,
rule)
import Auth.Biscuit.Datalog.ScopedExecutor hiding (limits)
import Spec.Parser (parseExpression)
specs :: TestTree
specs = testGroup "Datalog evaluation"
[ grandparent
, ancestor
, scopedRules
, exprEval
, exprEvalError
, rulesWithConstraints
, ruleHeadWithNoVars
, limits
]
authGroup :: Set Fact -> FactGroup
authGroup = FactGroup . Map.singleton (Set.singleton 0)
authRulesGroup :: Set Rule -> Map Natural (Set EvalRule)
authRulesGroup = Map.singleton 0 . adaptRules
adaptRules :: Set Rule -> Set EvalRule
adaptRules = Set.map (toEvaluation [])
grandparent :: TestTree
grandparent = testCase "Basic grandparent rule" $
let rules = authRulesGroup $ Set.fromList
[ [rule|grandparent($a,$b) <- parent($a,$c), parent($c,$b)|]
]
facts = authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("alice", "toto")|]
]
in runFactGeneration defaultLimits 1 rules facts @?= Right (authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("alice", "toto")|]
, [fact|grandparent("alice", "jean-pierre")|]
])
ancestor :: TestTree
ancestor = testCase "Ancestor rule" $
let rules = authRulesGroup $ Set.fromList
[ [rule|ancestor($a,$b) <- parent($a,$c), ancestor($c,$b)|]
, [rule|ancestor($a,$b) <- parent($a,$b)|]
]
facts = authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("alice", "toto")|]
]
in runFactGeneration defaultLimits 1 rules facts @?= Right (authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("alice", "toto")|]
, [fact|ancestor("alice", "bob")|]
, [fact|ancestor("bob", "jean-pierre")|]
, [fact|ancestor("alice", "toto")|]
, [fact|ancestor("alice", "jean-pierre")|]
])
expr :: Text -> Expression
expr = either error id . parseExpression
exprEval :: TestTree
exprEval = do
let bindings = Map.fromList
[ ("var1", LInteger 0)
, ("topDomain", LString "example.com")
, ("domain", LString "test.example.com")
]
eval (e, r) = testCase (unpack e) $
evaluateExpression defaultLimits bindings (expr e) @?= Right r
testGroup "Expressions evaluation" $ eval <$>
[ ("!(1 < $var1)", LBool True)
, ("[0].contains($var1)", LBool True)
, ("1 + 2 * 3", LInteger 7)
, ("!(1 + 2 * 3 > 4)", LBool False)
, ("!true", LBool False)
, ("!false", LBool True)
, ("(true)", LBool True)
, ("\"test\".length()", LInteger 4)
, ("hex:ababab.length()", LInteger 3)
, ("[].length()", LInteger 0)
, ("[\"test\", \"test\"].length()", LInteger 1)
, ("1 == 1", LBool True)
, ("2 == 1", LBool False)
, ("\"toto\" == \"toto\"", LBool True)
, ("\"toto\" == \"truc\"", LBool False)
, ("\"toto\".matches(\"to(to)?\")", LBool True)
, ("\"toto\".matches(\"^to$\")", LBool False)
, ("2021-05-07T18:00:00Z == 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z == 2021-05-07T19:00:00Z", LBool False)
, ("hex:ababab == hex:ababab", LBool True)
, ("hex:ababab == hex:ababac", LBool False)
, ("true == true", LBool True)
, ("true == false", LBool False)
, ("[1,2,3] == [1,2,3]", LBool True)
, ("[1,2,3] == [1,2,4]", LBool False)
, ("1 < 2", LBool True)
, ("2 < 1", LBool False)
, ("2021-05-07T18:00:00Z < 2021-05-07T19:00:00Z", LBool True)
, ("2021-05-07T19:00:00Z < 2021-05-07T18:00:00Z", LBool False)
, ("2 > 1", LBool True)
, ("1 > 2", LBool False)
, ("2021-05-07T19:00:00Z > 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z > 2021-05-07T19:00:00Z", LBool False)
, ("1 <= 2", LBool True)
, ("1 <= 1", LBool True)
, ("2 <= 1", LBool False)
, ("2021-05-07T18:00:00Z <= 2021-05-07T19:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z <= 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T19:00:00Z <= 2021-05-07T18:00:00Z", LBool False)
, ("2 >= 1", LBool True)
, ("2 >= 2", LBool True)
, ("1 >= 2", LBool False)
, ("2021-05-07T19:00:00Z >= 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z >= 2021-05-07T18:00:00Z", LBool True)
, ("2021-05-07T18:00:00Z >= 2021-05-07T19:00:00Z", LBool False)
, ("\"my string\".starts_with(\"my\")", LBool True)
, ("\"my string\".starts_with(\"string\")", LBool False)
, ("\"my string\".ends_with(\"string\")", LBool True)
, ("\"my string\".ends_with(\"my\")", LBool False)
, ("$domain.ends_with(\".\" + $topDomain)", LBool True)
, ("2 + 1", LInteger 3)
, ("2 - 1", LInteger 1)
, ("5 / 2", LInteger 2)
, ("2 * 1", LInteger 2)
, ("true && true", LBool True)
, ("true && false", LBool False)
, ("false && true", LBool False)
, ("false && false", LBool False)
, ("true || true", LBool True)
, ("true || false", LBool True)
, ("false || true", LBool True)
, ("false || false", LBool False)
, ("[1].contains([1])", LBool True)
, ("[1].contains(1)", LBool True)
, ("[].contains(1)", LBool False)
, ("[\"test\"].contains(2)", LBool False)
, ("[1].intersection([1])", TermSet (Set.fromList [LInteger 1]))
, ("[1].intersection([\"test\"])", TermSet (Set.fromList []))
, ("[1].union([1])", TermSet (Set.fromList [LInteger 1]))
, ("[1].union([\"test\"])", TermSet (Set.fromList [LInteger 1, LString "test"]))
]
exprEvalError :: TestTree
exprEvalError = do
let bindings = Map.fromList
[ ("var1", LInteger 0)
]
l = defaultLimits { allowRegexes = False }
evalFail (e, r) = testCase (unpack e) $
evaluateExpression l bindings (expr e) @?= Left r
testGroup "Expressions evaluation (expected errors)" $ evalFail <$>
[ ("1 / 0", "Divide by 0")
, ("\"toto\".matches(\"to\")", "Regex evaluation is disabled")
, ("9223372036854775807 + 1", "integer overflow")
, ("-9223372036854775808 - 1", "integer underflow")
]
rulesWithConstraints :: TestTree
rulesWithConstraints = testCase "Rule with constraints" $
let rules = authRulesGroup $ Set.fromList
[ [rule|valid_date("file1") <- time($0), resource("file1"), $0 <= 2019-12-04T09:46:41+00:00|]
, [rule|valid_date("file2") <- time($0), resource("file2"), $0 <= 2010-12-04T09:46:41+00:00|]
]
facts = authGroup $ Set.fromList
[ [fact|time(2019-12-04T01:00:00Z)|]
, [fact|resource("file1")|]
, [fact|resource("file2")|]
]
in runFactGeneration defaultLimits 1 rules facts @?= Right (authGroup $ Set.fromList
[ [fact|time(2019-12-04T01:00:00Z)|]
, [fact|resource("file1")|]
, [fact|resource("file2")|]
, [fact|valid_date("file1")|]
])
ruleHeadWithNoVars :: TestTree
ruleHeadWithNoVars = testCase "Rule head with no variables" $
let rules = authRulesGroup $ Set.fromList
[ [rule|operation("authority", "read") <- test($yolo, "nothing")|]
]
facts = authGroup $ Set.fromList
[ [fact|test("whatever", "notNothing")|]
]
in runFactGeneration defaultLimits 1 rules facts @?= Right (authGroup $ Set.fromList
[ [fact|test("whatever", "notNothing")|]
])
limits :: TestTree
limits =
let rules = authRulesGroup $ Set.fromList
[ [rule|ancestor($a,$b) <- parent($a,$c), ancestor($c,$b)|]
, [rule|ancestor($a,$b) <- parent($a,$b)|]
]
facts = authGroup $ Set.fromList
[ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "jean-pierre")|]
, [fact|parent("bob", "marielle")|]
, [fact|parent("alice", "toto")|]
]
factLimits = defaultLimits { maxFacts = 10 }
iterLimits = defaultLimits { maxIterations = 2 }
in testGroup "Facts generation limits"
[ testCase "max facts" $
runFactGeneration factLimits 1 rules facts @?= Left Facts
, testCase "max iterations" $
runFactGeneration iterLimits 1 rules facts @?= Left Iterations
]
scopedRules :: TestTree
scopedRules = testGroup "Rules and facts in different scopes"
[ testCase "with default scoping for rules" $
let rules :: Map Natural (Set Rule)
rules = [ (0, [ [rule|ancestor($a,$b) <- parent($a,$b)|] ])
, (1, [ [rule|ancestor($a,$b) <- parent($a,$c), ancestor($c,$b)|] ])
]
facts :: FactGroup
facts = FactGroup
[ ([0], [ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "trudy")|]
])
, ([1], [ [fact|parent("bob", "jean-pierre")|]
])
, ([2], [ [fact|parent("toto", "toto")|]
])
]
in runFactGeneration defaultLimits 3 (adaptRules <$> rules) facts @?= Right (FactGroup
[ ([0], [ [fact|parent("alice", "bob")|]
, [fact|ancestor("alice", "bob")|]
, [fact|parent("bob", "trudy")|]
, [fact|ancestor("bob", "trudy")|]
])
, ([1], [ [fact|parent("bob", "jean-pierre")|]
])
, ([0,1], [ [fact|ancestor("alice", "trudy")|]
])
, ([2], [ [fact|parent("toto", "toto")|] ])
])
, testCase "with explicit scoping for rules (authority)" $
let rules :: Map Natural (Set Rule)
rules = [ (0, [ [rule|ancestor($a,$b) <- parent($a,$b) trusting authority |] ])
, (1, [ [rule|ancestor($a,$b) <- parent($a,$c), ancestor($c,$b) trusting authority |] ])
, (2, [ [rule|family($a,$b) <- parent($a,$b) trusting authority |] ])
]
facts :: FactGroup
facts = FactGroup
[ ([0], [ [fact|parent("alice", "bob")|]
, [fact|parent("bob", "trudy")|]
])
, ([1], [ [fact|parent("bob", "jean-pierre")|]
])
, ([2], [ [fact|parent("toto", "toto")|]
])
]
in runFactGeneration defaultLimits 3 (adaptRules <$> rules) facts @?= Right (FactGroup
[ ([0], [ [fact|parent("alice", "bob")|]
, [fact|ancestor("alice", "bob")|]
, [fact|parent("bob", "trudy")|]
, [fact|ancestor("bob", "trudy")|]
])
, ([1], [ [fact|parent("bob", "jean-pierre")|]
])
, ([0,1], [ [fact|ancestor("alice", "trudy")|]
])
, ([2], [ [fact|parent("toto", "toto")|]
, [fact|family("toto", "toto")|]
])
, ([0,2], [ [fact|family("alice", "bob")|]
, [fact|family("bob", "trudy")|]
])
])
]
|
e93d154786c3536fa8643202beaf141c01d2300efc661769a0d09fdbc70056ee | racket/typed-racket | pr11686.rkt | #;
(exn-pred exn:fail:contract? #rx"shape-check")
#lang racket/load
(module T typed/racket/shallow
(struct: [X] doll ([contents : X]))
(define-type RussianDoll
(Rec RD (U 'center (doll RD))))
(: f (RussianDoll -> RussianDoll))
(define (f rd)
(let ((v (doll-contents (assert rd doll?))))
(assert v doll?)))
(: md (All (x) (x -> (doll x))))
(define md doll)
(provide (all-defined-out)))
(module U racket
(require 'T)
(f (md 3)))
(require 'U)
| null | https://raw.githubusercontent.com/racket/typed-racket/1dde78d165472d67ae682b68622d2b7ee3e15e1e/typed-racket-test/fail/shallow/pr11686.rkt | racket | (exn-pred exn:fail:contract? #rx"shape-check")
#lang racket/load
(module T typed/racket/shallow
(struct: [X] doll ([contents : X]))
(define-type RussianDoll
(Rec RD (U 'center (doll RD))))
(: f (RussianDoll -> RussianDoll))
(define (f rd)
(let ((v (doll-contents (assert rd doll?))))
(assert v doll?)))
(: md (All (x) (x -> (doll x))))
(define md doll)
(provide (all-defined-out)))
(module U racket
(require 'T)
(f (md 3)))
(require 'U)
| |
924bcef360eb128900cd3b0f0d5661d46dec7658ffffa0ebc981166b1c40b06c | pflanze/chj-schemelib | vectorlib-2.scm | Copyright 2013 - 2017 by < >
;;; This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
;;; (at your option) any later version.
(require easy
parallel
math/vectorlib)
XXXX aufraumen nur
(define. (Mc.mirror0 m)
(letv ((s0 s1) (.sizes m))
(let ((res (@make-Mc s0 s1)))
(parallel-for-all
2000
m
(s0 s1)
(i0 i1)
(.set! res i0 i1
(.ref m (- (dec s0) i0) i1)))
res)))
;;COPYPASTE
(define. (Mr.mirror0 m)
(letv ((s0 s1) (.sizes m))
(let ((res (@make-Mr s0 s1)))
(parallel-for-all
2000
m
(s0 s1)
(i0 i1)
(.set! res i0 i1
(.ref m (- (dec s0) i0) i1)))
res)))
(TEST
> (show (.mirror0 (Mc (Vc 1 2) (Vc 3 4))))
(Mc (Vc 3.+0.i 4.+0.i) (Vc 1.+0.i 2.+0.i))
)
| null | https://raw.githubusercontent.com/pflanze/chj-schemelib/59ff8476e39f207c2f1d807cfc9670581c8cedd3/math/vectorlib-2.scm | scheme | This file is free software; you can redistribute it and/or modify
(at your option) any later version.
COPYPASTE | Copyright 2013 - 2017 by < >
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
(require easy
parallel
math/vectorlib)
XXXX aufraumen nur
(define. (Mc.mirror0 m)
(letv ((s0 s1) (.sizes m))
(let ((res (@make-Mc s0 s1)))
(parallel-for-all
2000
m
(s0 s1)
(i0 i1)
(.set! res i0 i1
(.ref m (- (dec s0) i0) i1)))
res)))
(define. (Mr.mirror0 m)
(letv ((s0 s1) (.sizes m))
(let ((res (@make-Mr s0 s1)))
(parallel-for-all
2000
m
(s0 s1)
(i0 i1)
(.set! res i0 i1
(.ref m (- (dec s0) i0) i1)))
res)))
(TEST
> (show (.mirror0 (Mc (Vc 1 2) (Vc 3 4))))
(Mc (Vc 3.+0.i 4.+0.i) (Vc 1.+0.i 2.+0.i))
)
|
eb23367b022bd069e4d09330cef30a3e0e85de4ec5aa8d8f94203d3bed6e11a5 | fragnix/fragnix | Data.IntSet.Base.hs | # LANGUAGE Haskell98 #
{-# LINE 1 "Data/IntSet/Base.hs" #-}
# LANGUAGE CPP #
# LANGUAGE MagicHash , BangPatterns , DeriveDataTypeable , StandaloneDeriving #
# LANGUAGE Trustworthy #
# LANGUAGE TypeFamilies #
-----------------------------------------------------------------------------
-- |
-- Module : Data.IntSet.Base
Copyright : ( c ) 2002
( c ) 2011
-- License : BSD-style
-- Maintainer :
-- Stability : provisional
-- Portability : portable
--
-- An efficient implementation of integer sets.
--
-- These modules are intended to be imported qualified, to avoid name
-- clashes with Prelude functions, e.g.
--
> import Data . IntSet ( )
> import qualified Data . IntSet as
--
-- The implementation is based on /big-endian patricia trees/. This data
-- structure performs especially well on binary operations like 'union'
-- and 'intersection'. However, my benchmarks show that it is also
-- (much) faster on insertions and deletions when compared to a generic
-- size-balanced set implementation (see "Data.Set").
--
* and , \"/Fast Maps/\ " ,
Workshop on ML , September 1998 , pages 77 - 86 ,
-- <>
--
* , -- Practical Algorithm To Retrieve
Information Coded In Alphanumeric/\ " , Journal of the ACM , 15(4 ) ,
October 1968 , pages 514 - 534 .
--
-- Additionally, this implementation places bitmaps in the leaves of the tree.
Their size is the natural size of a machine word ( 32 or 64 bits ) and greatly
-- reduce memory footprint and execution times for dense sets, e.g. sets where
-- it is likely that many values lie close to each other. The asymptotics are
-- not affected by this optimization.
--
Many operations have a worst - case complexity of /O(min(n , W))/.
-- This means that the operation can become linear in the number of
-- elements with a maximum of /W/ -- the number of bits in an 'Int'
( 32 or 64 ) .
-----------------------------------------------------------------------------
-- [Note: INLINE bit fiddling]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is essential that the bit fiddling functions like mask , zero , branchMask
etc are inlined . If they do not , the memory allocation skyrockets . The GHC
-- usually gets it right, but it is disastrous if it does not. Therefore we
-- explicitly mark these functions INLINE.
-- [Note: Local 'go' functions and capturing]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-- Care must be taken when using 'go' function which captures an argument.
-- Sometimes (for example when the argument is passed to a data constructor,
as in insert ) , GHC heap - allocates more than necessary . Therefore C-- code
-- must be checked for increased allocation when creating and modifying such
-- functions.
-- [Note: Order of constructors]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The order of constructors of matters when considering performance .
Currently in GHC 7.0 , when type has 3 constructors , they are matched from
the first to the last -- the best performance is achieved when the
-- constructors are ordered by frequency.
On GHC 7.0 , reordering constructors from Nil | Tip | to Bin | Tip | Nil
improves the benchmark by circa 10 % .
module Data.IntSet.Base (
-- * Set type
instance Eq , Show
-- * Operators
, (\\)
-- * Query
, null
, size
, member
, notMember
, lookupLT
, lookupGT
, lookupLE
, lookupGE
, isSubsetOf
, isProperSubsetOf
-- * Construction
, empty
, singleton
, insert
, delete
-- * Combine
, union
, unions
, difference
, intersection
-- * Filter
, filter
, partition
, split
, splitMember
, splitRoot
-- * Map
, map
-- * Folds
, foldr
, foldl
-- ** Strict folds
, foldr'
, foldl'
-- ** Legacy folds
, fold
*
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, maxView
, minView
-- * Conversion
-- ** List
, elems
, toList
, fromList
-- ** Ordered list
, toAscList
, toDescList
, fromAscList
, fromDistinctAscList
-- * Debugging
, showTree
, showTreeWith
-- * Internals
, match
, suffixBitMask
, prefixBitMask
, bitmapOf
) where
import Control.DeepSeq (NFData(rnf))
import Data.Bits
import qualified Data.List as List
import Data.Maybe (fromMaybe)
import Data.Monoid (Monoid(..))
import Data.Word (Word)
import Data.Typeable
import Prelude hiding (filter, foldr, foldl, null, map)
import Data.Utils.BitUtil
import Data.Utils.StrictFold
import Data.Utils.StrictPair
import Data.Data (Data(..), Constr, mkConstr, constrIndex, Fixity(Prefix), DataType, mkDataType)
import Text.Read
import GHC.Exts (Int(..), build)
import qualified GHC.Exts as GHCExts
import GHC.Prim (indexInt8OffAddr#)
This comment teaches CPP correct behaviour
A " " is a natural machine word ( an unsigned Int )
type Nat = Word
natFromInt :: Int -> Nat
natFromInt i = fromIntegral i
# INLINE natFromInt #
intFromNat :: Nat -> Int
intFromNat w = fromIntegral w
# INLINE intFromNat #
-------------------------------------------------------------------
Operators
-------------------------------------------------------------------
Operators
--------------------------------------------------------------------}
-- | /O(n+m)/. See 'difference'.
(\\) :: IntSet -> IntSet -> IntSet
m1 \\ m2 = difference m1 m2
{--------------------------------------------------------------------
Types
--------------------------------------------------------------------}
-- | A set of integers.
-- See Note: Order of constructors
data IntSet = Bin {-# UNPACK #-} !Prefix {-# UNPACK #-} !Mask !IntSet !IntSet
Invariant : Nil is never found as a child of .
Invariant : The Mask is a power of 2 . It is the largest bit position at which
two elements of the set differ .
-- Invariant: Prefix is the common high-order bits that all elements share to
-- the left of the Mask bit.
Invariant : In prefix mask left right , left consists of the elements that
-- don't have the mask bit set; right is all the elements that do.
| Tip {-# UNPACK #-} !Prefix {-# UNPACK #-} !BitMap
Invariant : The Prefix is zero for all but the last 5 ( on 32 bit arches ) or 6
bits ( on 64 bit arches ) . The values of the map represented by a tip
-- are the prefix plus the indices of the set bits in the bit map.
| Nil
-- A number stored in a set is stored as
* Prefix ( all but last 5 - 6 bits ) and
* BitMap ( last 5 - 6 bits stored as a bitmask )
Last 5 - 6 bits are called a Suffix .
type Prefix = Int
type Mask = Int
type BitMap = Word
type Key = Int
instance Monoid IntSet where
mempty = empty
mappend = union
mconcat = unions
{--------------------------------------------------------------------
A Data instance
--------------------------------------------------------------------}
-- This instance preserves data abstraction at the cost of inefficiency.
-- We provide limited reflection services for the sake of data abstraction.
instance Data IntSet where
gfoldl f z is = z fromList `f` (toList is)
toConstr _ = fromListConstr
gunfold k z c = case constrIndex c of
1 -> k (z fromList)
_ -> error "gunfold"
dataTypeOf _ = intSetDataType
fromListConstr :: Constr
fromListConstr = mkConstr intSetDataType "fromList" [] Prefix
intSetDataType :: DataType
intSetDataType = mkDataType "Data.IntSet.Base.IntSet" [fromListConstr]
{--------------------------------------------------------------------
Query
--------------------------------------------------------------------}
-- | /O(1)/. Is the set empty?
null :: IntSet -> Bool
null Nil = True
null _ = False
# INLINE null #
-- | /O(n)/. Cardinality of the set.
size :: IntSet -> Int
size t
= case t of
Bin _ _ l r -> size l + size r
Tip _ bm -> bitcount 0 bm
Nil -> 0
-- | /O(min(n,W))/. Is the value a member of the set?
-- See Note: Local 'go' functions and capturing]
member :: Key -> IntSet -> Bool
member x = x `seq` go
where
go (Bin p m l r)
| nomatch x p m = False
| zero x m = go l
| otherwise = go r
go (Tip y bm) = prefixOf x == y && bitmapOf x .&. bm /= 0
go Nil = False
-- | /O(min(n,W))/. Is the element not in the set?
notMember :: Key -> IntSet -> Bool
notMember k = not . member k
-- | /O(log n)/. Find largest element smaller than the given one.
--
> lookupLT 3 ( fromList [ 3 , 5 ] ) = = Nothing
> lookupLT 5 ( fromList [ 3 , 5 ] ) = = Just 3
-- See Note: Local 'go' functions and capturing.
lookupLT :: Key -> IntSet -> Maybe Key
lookupLT x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go r l else go Nil r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMax def else unsafeFindMax r
| zero x m = go def l
| otherwise = go l r
go def (Tip kx bm) | prefixOf x > kx = Just $ kx + highestBitSet bm
| prefixOf x == kx && maskLT /= 0 = Just $ kx + highestBitSet maskLT
| otherwise = unsafeFindMax def
where maskLT = (bitmapOf x - 1) .&. bm
go def Nil = unsafeFindMax def
-- | /O(log n)/. Find smallest element greater than the given one.
--
> lookupGT 4 ( fromList [ 3 , 5 ] ) = = Just 5
> lookupGT 5 ( fromList [ 3 , 5 ] ) = = Nothing
-- See Note: Local 'go' functions and capturing.
lookupGT :: Key -> IntSet -> Maybe Key
lookupGT x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go Nil l else go l r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMin l else unsafeFindMin def
| zero x m = go r l
| otherwise = go def r
go def (Tip kx bm) | prefixOf x < kx = Just $ kx + lowestBitSet bm
| prefixOf x == kx && maskGT /= 0 = Just $ kx + lowestBitSet maskGT
| otherwise = unsafeFindMin def
where maskGT = (- ((bitmapOf x) `shiftLL` 1)) .&. bm
go def Nil = unsafeFindMin def
-- | /O(log n)/. Find largest element smaller or equal to the given one.
--
> lookupLE 2 ( fromList [ 3 , 5 ] ) = = Nothing
> lookupLE 4 ( fromList [ 3 , 5 ] ) = = Just 3
> lookupLE 5 ( fromList [ 3 , 5 ] ) = = Just 5
-- See Note: Local 'go' functions and capturing.
lookupLE :: Key -> IntSet -> Maybe Key
lookupLE x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go r l else go Nil r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMax def else unsafeFindMax r
| zero x m = go def l
| otherwise = go l r
go def (Tip kx bm) | prefixOf x > kx = Just $ kx + highestBitSet bm
| prefixOf x == kx && maskLE /= 0 = Just $ kx + highestBitSet maskLE
| otherwise = unsafeFindMax def
where maskLE = (((bitmapOf x) `shiftLL` 1) - 1) .&. bm
go def Nil = unsafeFindMax def
-- | /O(log n)/. Find smallest element greater or equal to the given one.
--
> lookupGE 3 ( fromList [ 3 , 5 ] ) = = Just 3
> lookupGE 4 ( fromList [ 3 , 5 ] ) = = Just 5
> lookupGE 6 ( fromList [ 3 , 5 ] ) = = Nothing
-- See Note: Local 'go' functions and capturing.
lookupGE :: Key -> IntSet -> Maybe Key
lookupGE x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go Nil l else go l r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMin l else unsafeFindMin def
| zero x m = go r l
| otherwise = go def r
go def (Tip kx bm) | prefixOf x < kx = Just $ kx + lowestBitSet bm
| prefixOf x == kx && maskGE /= 0 = Just $ kx + lowestBitSet maskGE
| otherwise = unsafeFindMin def
where maskGE = (- (bitmapOf x)) .&. bm
go def Nil = unsafeFindMin def
Helper function for lookupGE and . It assumes that if a node is
-- given, it has m > 0.
unsafeFindMin :: IntSet -> Maybe Key
unsafeFindMin Nil = Nothing
unsafeFindMin (Tip kx bm) = Just $ kx + lowestBitSet bm
unsafeFindMin (Bin _ _ l _) = unsafeFindMin l
Helper function for and lookupLT . It assumes that if a node is
-- given, it has m > 0.
unsafeFindMax :: IntSet -> Maybe Key
unsafeFindMax Nil = Nothing
unsafeFindMax (Tip kx bm) = Just $ kx + highestBitSet bm
unsafeFindMax (Bin _ _ _ r) = unsafeFindMax r
{--------------------------------------------------------------------
Construction
--------------------------------------------------------------------}
-- | /O(1)/. The empty set.
empty :: IntSet
empty
= Nil
{-# INLINE empty #-}
| /O(1)/. A set of one element .
singleton :: Key -> IntSet
singleton x
= Tip (prefixOf x) (bitmapOf x)
# INLINE singleton #
{--------------------------------------------------------------------
Insert
--------------------------------------------------------------------}
-- | /O(min(n,W))/. Add a value to the set. There is no left- or right bias for
-- IntSets.
insert :: Key -> IntSet -> IntSet
insert x = x `seq` insertBM (prefixOf x) (bitmapOf x)
-- Helper function for insert and union.
insertBM :: Prefix -> BitMap -> IntSet -> IntSet
insertBM kx bm t = kx `seq` bm `seq`
case t of
Bin p m l r
| nomatch kx p m -> link kx (Tip kx bm) p t
| zero kx m -> Bin p m (insertBM kx bm l) r
| otherwise -> Bin p m l (insertBM kx bm r)
Tip kx' bm'
| kx' == kx -> Tip kx' (bm .|. bm')
| otherwise -> link kx (Tip kx bm) kx' t
Nil -> Tip kx bm
-- | /O(min(n,W))/. Delete a value in the set. Returns the
-- original set when the value was not present.
delete :: Key -> IntSet -> IntSet
delete x = x `seq` deleteBM (prefixOf x) (bitmapOf x)
Deletes all values mentioned in the BitMap from the set .
-- Helper function for delete and difference.
deleteBM :: Prefix -> BitMap -> IntSet -> IntSet
deleteBM kx bm t = kx `seq` bm `seq`
case t of
Bin p m l r
| nomatch kx p m -> t
| zero kx m -> bin p m (deleteBM kx bm l) r
| otherwise -> bin p m l (deleteBM kx bm r)
Tip kx' bm'
| kx' == kx -> tip kx (bm' .&. complement bm)
| otherwise -> t
Nil -> Nil
{--------------------------------------------------------------------
Union
--------------------------------------------------------------------}
-- | The union of a list of sets.
unions :: [IntSet] -> IntSet
unions xs
= foldlStrict union empty xs
| /O(n+m)/. The union of two sets .
union :: IntSet -> IntSet -> IntSet
union t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = union1
| shorter m2 m1 = union2
| p1 == p2 = Bin p1 m1 (union l1 l2) (union r1 r2)
| otherwise = link p1 t1 p2 t2
where
union1 | nomatch p2 p1 m1 = link p1 t1 p2 t2
| zero p2 m1 = Bin p1 m1 (union l1 t2) r1
| otherwise = Bin p1 m1 l1 (union r1 t2)
union2 | nomatch p1 p2 m2 = link p1 t1 p2 t2
| zero p1 m2 = Bin p2 m2 (union t1 l2) r2
| otherwise = Bin p2 m2 l2 (union t1 r2)
union t@(Bin _ _ _ _) (Tip kx bm) = insertBM kx bm t
union t@(Bin _ _ _ _) Nil = t
union (Tip kx bm) t = insertBM kx bm t
union Nil t = t
{--------------------------------------------------------------------
Difference
--------------------------------------------------------------------}
| /O(n+m)/. Difference between two sets .
difference :: IntSet -> IntSet -> IntSet
difference t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = difference1
| shorter m2 m1 = difference2
| p1 == p2 = bin p1 m1 (difference l1 l2) (difference r1 r2)
| otherwise = t1
where
difference1 | nomatch p2 p1 m1 = t1
| zero p2 m1 = bin p1 m1 (difference l1 t2) r1
| otherwise = bin p1 m1 l1 (difference r1 t2)
difference2 | nomatch p1 p2 m2 = t1
| zero p1 m2 = difference t1 l2
| otherwise = difference t1 r2
difference t@(Bin _ _ _ _) (Tip kx bm) = deleteBM kx bm t
difference t@(Bin _ _ _ _) Nil = t
difference t1@(Tip kx bm) t2 = differenceTip t2
where differenceTip (Bin p2 m2 l2 r2) | nomatch kx p2 m2 = t1
| zero kx m2 = differenceTip l2
| otherwise = differenceTip r2
differenceTip (Tip kx2 bm2) | kx == kx2 = tip kx (bm .&. complement bm2)
| otherwise = t1
differenceTip Nil = t1
difference Nil _ = Nil
{--------------------------------------------------------------------
Intersection
--------------------------------------------------------------------}
| /O(n+m)/. The intersection of two sets .
intersection :: IntSet -> IntSet -> IntSet
intersection t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = intersection1
| shorter m2 m1 = intersection2
| p1 == p2 = bin p1 m1 (intersection l1 l2) (intersection r1 r2)
| otherwise = Nil
where
intersection1 | nomatch p2 p1 m1 = Nil
| zero p2 m1 = intersection l1 t2
| otherwise = intersection r1 t2
intersection2 | nomatch p1 p2 m2 = Nil
| zero p1 m2 = intersection t1 l2
| otherwise = intersection t1 r2
intersection t1@(Bin _ _ _ _) (Tip kx2 bm2) = intersectBM t1
where intersectBM (Bin p1 m1 l1 r1) | nomatch kx2 p1 m1 = Nil
| zero kx2 m1 = intersectBM l1
| otherwise = intersectBM r1
intersectBM (Tip kx1 bm1) | kx1 == kx2 = tip kx1 (bm1 .&. bm2)
| otherwise = Nil
intersectBM Nil = Nil
intersection (Bin _ _ _ _) Nil = Nil
intersection (Tip kx1 bm1) t2 = intersectBM t2
where intersectBM (Bin p2 m2 l2 r2) | nomatch kx1 p2 m2 = Nil
| zero kx1 m2 = intersectBM l2
| otherwise = intersectBM r2
intersectBM (Tip kx2 bm2) | kx1 == kx2 = tip kx1 (bm1 .&. bm2)
| otherwise = Nil
intersectBM Nil = Nil
intersection Nil _ = Nil
{--------------------------------------------------------------------
Subset
--------------------------------------------------------------------}
| /O(n+m)/. Is this a proper subset ? ( ie . a subset but not equal ) .
isProperSubsetOf :: IntSet -> IntSet -> Bool
isProperSubsetOf t1 t2
= case subsetCmp t1 t2 of
LT -> True
_ -> False
subsetCmp :: IntSet -> IntSet -> Ordering
subsetCmp t1@(Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
| shorter m1 m2 = GT
| shorter m2 m1 = case subsetCmpLt of
GT -> GT
_ -> LT
| p1 == p2 = subsetCmpEq
| otherwise = GT -- disjoint
where
subsetCmpLt | nomatch p1 p2 m2 = GT
| zero p1 m2 = subsetCmp t1 l2
| otherwise = subsetCmp t1 r2
subsetCmpEq = case (subsetCmp l1 l2, subsetCmp r1 r2) of
(GT,_ ) -> GT
(_ ,GT) -> GT
(EQ,EQ) -> EQ
_ -> LT
subsetCmp (Bin _ _ _ _) _ = GT
subsetCmp (Tip kx1 bm1) (Tip kx2 bm2)
| kx1 /= kx2 = GT -- disjoint
| bm1 == bm2 = EQ
| bm1 .&. complement bm2 == 0 = LT
| otherwise = GT
subsetCmp t1@(Tip kx _) (Bin p m l r)
| nomatch kx p m = GT
| zero kx m = case subsetCmp t1 l of GT -> GT ; _ -> LT
| otherwise = case subsetCmp t1 r of GT -> GT ; _ -> LT
subsetCmp (Tip _ _) Nil = GT -- disjoint
subsetCmp Nil Nil = EQ
subsetCmp Nil _ = LT
| /O(n+m)/. Is this a subset ?
@(s1 ` isSubsetOf ` tells whether @s1@ is a subset of @s2@.
isSubsetOf :: IntSet -> IntSet -> Bool
isSubsetOf t1@(Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
| shorter m1 m2 = False
| shorter m2 m1 = match p1 p2 m2 && (if zero p1 m2 then isSubsetOf t1 l2
else isSubsetOf t1 r2)
| otherwise = (p1==p2) && isSubsetOf l1 l2 && isSubsetOf r1 r2
isSubsetOf (Bin _ _ _ _) _ = False
isSubsetOf (Tip kx1 bm1) (Tip kx2 bm2) = kx1 == kx2 && bm1 .&. complement bm2 == 0
isSubsetOf t1@(Tip kx _) (Bin p m l r)
| nomatch kx p m = False
| zero kx m = isSubsetOf t1 l
| otherwise = isSubsetOf t1 r
isSubsetOf (Tip _ _) Nil = False
isSubsetOf Nil _ = True
{--------------------------------------------------------------------
Filter
--------------------------------------------------------------------}
-- | /O(n)/. Filter all elements that satisfy some predicate.
filter :: (Key -> Bool) -> IntSet -> IntSet
filter predicate t
= case t of
Bin p m l r
-> bin p m (filter predicate l) (filter predicate r)
Tip kx bm
-> tip kx (foldl'Bits 0 (bitPred kx) 0 bm)
Nil -> Nil
where bitPred kx bm bi | predicate (kx + bi) = bm .|. bitmapOfSuffix bi
| otherwise = bm
# INLINE bitPred #
-- | /O(n)/. partition the set according to some predicate.
partition :: (Key -> Bool) -> IntSet -> (IntSet,IntSet)
partition predicate0 t0 = toPair $ go predicate0 t0
where
go predicate t
= case t of
Bin p m l r
-> let (l1 :*: l2) = go predicate l
(r1 :*: r2) = go predicate r
in bin p m l1 r1 :*: bin p m l2 r2
Tip kx bm
-> let bm1 = foldl'Bits 0 (bitPred kx) 0 bm
in tip kx bm1 :*: tip kx (bm `xor` bm1)
Nil -> (Nil :*: Nil)
where bitPred kx bm bi | predicate (kx + bi) = bm .|. bitmapOfSuffix bi
| otherwise = bm
# INLINE bitPred #
-- | /O(min(n,W))/. The expression (@'split' x set@) is a pair @(set1,set2)@
where @set1@ comprises the elements of @set@ less than @x@ and @set2@
comprises the elements of @set@ greater than @x@.
--
> split 3 ( fromList [ 1 .. 5 ] ) = = ( fromList [ 1,2 ] , fromList [ 4,5 ] )
split :: Key -> IntSet -> (IntSet,IntSet)
split x t =
case t of
Bin _ m l r
| m < 0 -> if x >= 0 -- handle negative numbers.
then case go x l of (lt :*: gt) -> let lt' = union lt r
in lt' `seq` (lt', gt)
else case go x r of (lt :*: gt) -> let gt' = union gt l
in gt' `seq` (lt, gt')
_ -> case go x t of
(lt :*: gt) -> (lt, gt)
where
go !x' t'@(Bin p m l r)
| match x' p m = if zero x' m
then case go x' l of
(lt :*: gt) -> lt :*: union gt r
else case go x' r of
(lt :*: gt) -> union lt l :*: gt
| otherwise = if x' < p then (Nil :*: t')
else (t' :*: Nil)
go x' t'@(Tip kx' bm)
| kx' > x' = (Nil :*: t')
-- equivalent to kx' > prefixOf x'
| kx' < prefixOf x' = (t' :*: Nil)
| otherwise = tip kx' (bm .&. lowerBitmap) :*: tip kx' (bm .&. higherBitmap)
where lowerBitmap = bitmapOf x' - 1
higherBitmap = complement (lowerBitmap + bitmapOf x')
go _ Nil = (Nil :*: Nil)
-- | /O(min(n,W))/. Performs a 'split' but also returns whether the pivot
-- element was found in the original set.
splitMember :: Key -> IntSet -> (IntSet,Bool,IntSet)
splitMember x t =
case t of
Bin _ m l r | m < 0 -> if x >= 0
then case go x l of
(lt, fnd, gt) -> let lt' = union lt r
in lt' `seq` (lt', fnd, gt)
else case go x r of
(lt, fnd, gt) -> let gt' = union gt l
in gt' `seq` (lt, fnd, gt')
_ -> go x t
where
go x' t'@(Bin p m l r)
| match x' p m = if zero x' m
then case go x' l of
(lt, fnd, gt) -> (lt, fnd, union gt r)
else case go x' r of
(lt, fnd, gt) -> (union lt l, fnd, gt)
| otherwise = if x' < p then (Nil, False, t') else (t', False, Nil)
go x' t'@(Tip kx' bm)
| kx' > x' = (Nil, False, t')
-- equivalent to kx' > prefixOf x'
| kx' < prefixOf x' = (t', False, Nil)
| otherwise = let lt = tip kx' (bm .&. lowerBitmap)
found = (bm .&. bitmapOfx') /= 0
gt = tip kx' (bm .&. higherBitmap)
in lt `seq` found `seq` gt `seq` (lt, found, gt)
where bitmapOfx' = bitmapOf x'
lowerBitmap = bitmapOfx' - 1
higherBitmap = complement (lowerBitmap + bitmapOfx')
go _ Nil = (Nil, False, Nil)
---------------------------------------------------------------------
Min /
---------------------------------------------------------------------
Min/Max
----------------------------------------------------------------------}
-- | /O(min(n,W))/. Retrieves the maximal key of the set, and the set
-- stripped of that element, or 'Nothing' if passed an empty set.
maxView :: IntSet -> Maybe (Key, IntSet)
maxView t =
case t of Nil -> Nothing
Bin p m l r | m < 0 -> case go l of (result, l') -> Just (result, bin p m l' r)
_ -> Just (go t)
where
go (Bin p m l r) = case go r of (result, r') -> (result, bin p m l r')
go (Tip kx bm) = case highestBitSet bm of bi -> (kx + bi, tip kx (bm .&. complement (bitmapOfSuffix bi)))
go Nil = error "maxView Nil"
-- | /O(min(n,W))/. Retrieves the minimal key of the set, and the set
-- stripped of that element, or 'Nothing' if passed an empty set.
minView :: IntSet -> Maybe (Key, IntSet)
minView t =
case t of Nil -> Nothing
Bin p m l r | m < 0 -> case go r of (result, r') -> Just (result, bin p m l r')
_ -> Just (go t)
where
go (Bin p m l r) = case go l of (result, l') -> (result, bin p m l' r)
go (Tip kx bm) = case lowestBitSet bm of bi -> (kx + bi, tip kx (bm .&. complement (bitmapOfSuffix bi)))
go Nil = error "minView Nil"
-- | /O(min(n,W))/. Delete and find the minimal element.
--
-- > deleteFindMin set = (findMin set, deleteMin set)
deleteFindMin :: IntSet -> (Key, IntSet)
deleteFindMin = fromMaybe (error "deleteFindMin: empty set has no minimal element") . minView
-- | /O(min(n,W))/. Delete and find the maximal element.
--
> set = ( set , deleteMax set )
deleteFindMax :: IntSet -> (Key, IntSet)
deleteFindMax = fromMaybe (error "deleteFindMax: empty set has no maximal element") . maxView
-- | /O(min(n,W))/. The minimal element of the set.
findMin :: IntSet -> Key
findMin Nil = error "findMin: empty set has no minimal element"
findMin (Tip kx bm) = kx + lowestBitSet bm
findMin (Bin _ m l r)
| m < 0 = find r
| otherwise = find l
where find (Tip kx bm) = kx + lowestBitSet bm
find (Bin _ _ l' _) = find l'
find Nil = error "findMin Nil"
-- | /O(min(n,W))/. The maximal element of a set.
findMax :: IntSet -> Key
findMax Nil = error "findMax: empty set has no maximal element"
findMax (Tip kx bm) = kx + highestBitSet bm
findMax (Bin _ m l r)
| m < 0 = find l
| otherwise = find r
where find (Tip kx bm) = kx + highestBitSet bm
find (Bin _ _ _ r') = find r'
find Nil = error "findMax Nil"
-- | /O(min(n,W))/. Delete the minimal element. Returns an empty set if the set is empty.
--
Note that this is a change of behaviour for consistency with ' Data . Set . Set ' & # 8211 ;
versions prior to 0.5 threw an error if the ' ' was already empty .
deleteMin :: IntSet -> IntSet
deleteMin = maybe Nil snd . minView
-- | /O(min(n,W))/. Delete the maximal element. Returns an empty set if the set is empty.
--
Note that this is a change of behaviour for consistency with ' Data . Set . Set ' & # 8211 ;
versions prior to 0.5 threw an error if the ' ' was already empty .
deleteMax :: IntSet -> IntSet
deleteMax = maybe Nil snd . maxView
{----------------------------------------------------------------------
Map
----------------------------------------------------------------------}
-- | /O(n*min(n,W))/.
@'map ' f s@ is the set obtained by applying @f@ to each element of @s@.
--
-- It's worth noting that the size of the result may be smaller if,
for some @(x , y)@ , @x \/= y & & f x = = f y@
map :: (Key -> Key) -> IntSet -> IntSet
map f = fromList . List.map f . toList
-------------------------------------------------------------------
Fold
-------------------------------------------------------------------
Fold
--------------------------------------------------------------------}
-- | /O(n)/. Fold the elements in the set using the given right-associative
-- binary operator. This function is an equivalent of 'foldr' and is present
-- for compatibility only.
--
/Please note that fold will be deprecated in the future and removed./
fold :: (Key -> b -> b) -> b -> IntSet -> b
fold = foldr
# INLINE fold #
-- | /O(n)/. Fold the elements in the set using the given right-associative
binary operator , such that @'foldr ' f z = = ' Prelude.foldr ' f z . '
--
-- For example,
--
-- > toAscList set = foldr (:) [] set
foldr :: (Key -> b -> b) -> b -> IntSet -> b
Use lambda t to be inlinable with two arguments only .
case t of Bin _ m l r | m < 0 -> go (go z l) r -- put negative numbers before
| otherwise -> go (go z r) l
_ -> go z t
where
go z' Nil = z'
go z' (Tip kx bm) = foldrBits kx f z' bm
go z' (Bin _ _ l r) = go (go z' r) l
# INLINE foldr #
-- | /O(n)/. A strict version of 'foldr'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldr' :: (Key -> b -> b) -> b -> IntSet -> b
Use lambda t to be inlinable with two arguments only .
case t of Bin _ m l r | m < 0 -> go (go z l) r -- put negative numbers before
| otherwise -> go (go z r) l
_ -> go z t
where
go arg _ | arg `seq` False = undefined
go z' Nil = z'
go z' (Tip kx bm) = foldr'Bits kx f z' bm
go z' (Bin _ _ l r) = go (go z' r) l
# INLINE foldr ' #
-- | /O(n)/. Fold the elements in the set using the given left-associative
binary operator , such that @'foldl ' f z = = ' Prelude.foldl ' f z . '
--
-- For example,
--
-- > toDescList set = foldl (flip (:)) [] set
foldl :: (a -> Key -> a) -> a -> IntSet -> a
Use lambda t to be inlinable with two arguments only .
case t of Bin _ m l r | m < 0 -> go (go z r) l -- put negative numbers before
| otherwise -> go (go z l) r
_ -> go z t
where
go arg _ | arg `seq` False = undefined
go z' Nil = z'
go z' (Tip kx bm) = foldlBits kx f z' bm
go z' (Bin _ _ l r) = go (go z' l) r
{-# INLINE foldl #-}
-- | /O(n)/. A strict version of 'foldl'. Each application of the operator is
-- evaluated before using the result in the next application. This
-- function is strict in the starting value.
foldl' :: (a -> Key -> a) -> a -> IntSet -> a
Use lambda t to be inlinable with two arguments only .
case t of Bin _ m l r | m < 0 -> go (go z r) l -- put negative numbers before
| otherwise -> go (go z l) r
_ -> go z t
where
go arg _ | arg `seq` False = undefined
go z' Nil = z'
go z' (Tip kx bm) = foldl'Bits kx f z' bm
go z' (Bin _ _ l r) = go (go z' l) r
{-# INLINE foldl' #-}
{--------------------------------------------------------------------
List variations
--------------------------------------------------------------------}
-- | /O(n)/. An alias of 'toAscList'. The elements of a set in ascending order.
-- Subject to list fusion.
elems :: IntSet -> [Key]
elems
= toAscList
{--------------------------------------------------------------------
Lists
--------------------------------------------------------------------}
instance GHCExts.IsList IntSet where
type Item IntSet = Key
fromList = fromList
toList = toList
-- | /O(n)/. Convert the set to a list of elements. Subject to list fusion.
toList :: IntSet -> [Key]
toList
= toAscList
-- | /O(n)/. Convert the set to an ascending list of elements. Subject to list
-- fusion.
toAscList :: IntSet -> [Key]
toAscList = foldr (:) []
-- | /O(n)/. Convert the set to a descending list of elements. Subject to list
-- fusion.
toDescList :: IntSet -> [Key]
toDescList = foldl (flip (:)) []
-- List fusion for the list generating functions.
The foldrFB and foldlFB are foldr and foldl equivalents , used for list fusion .
They are important to convert unfused to{Asc , Desc}List back , see mapFB in prelude .
foldrFB :: (Key -> b -> b) -> b -> IntSet -> b
foldrFB = foldr
{-# INLINE[0] foldrFB #-}
foldlFB :: (a -> Key -> a) -> a -> IntSet -> a
foldlFB = foldl
# INLINE[0 ] foldlFB #
and toList , so that we need to fuse only toAscList .
# INLINE elems #
# INLINE toList #
The fusion is enabled up to phase 2 included . If it does not succeed ,
convert in phase 1 the expanded to{Asc , Desc}List calls back to
-- to{Asc,Desc}List. In phase 0, we inline fold{lr}FB (which were used in
a list fusion , otherwise it would go away in phase 1 ) , and let compiler do
-- whatever it wants with to{Asc,Desc}List -- it was forbidden to inline it
-- before phase 0, otherwise the fusion rules would not fire at all.
{-# NOINLINE[0] toAscList #-}
{-# NOINLINE[0] toDescList #-}
# RULES " IntSet.toAscList " [ ~1 ] forall s . toAscList s = build ( \c n - > foldrFB c n s ) #
# RULES " IntSet.toAscListBack " [ 1 ] foldrFB ( :) [ ] = toAscList #
# RULES " IntSet.toDescList " [ ~1 ] forall s . toDescList s = build ( \c n - > foldlFB ( ) n s ) #
# RULES " IntSet.toDescListBack " [ 1 ] foldlFB ( - > x : xs ) [ ] = toDescList #
| /O(n*min(n , W))/. Create a set from a list of integers .
fromList :: [Key] -> IntSet
fromList xs
= foldlStrict ins empty xs
where
ins t x = insert x t
-- | /O(n)/. Build a set from an ascending list of elements.
/The precondition ( input list is ascending ) is not checked./
fromAscList :: [Key] -> IntSet
fromAscList [] = Nil
fromAscList (x0 : xs0) = fromDistinctAscList (combineEq x0 xs0)
where
combineEq x' [] = [x']
combineEq x' (x:xs)
| x==x' = combineEq x' xs
| otherwise = x' : combineEq x xs
-- | /O(n)/. Build a set from an ascending list of distinct elements.
/The precondition ( input list is strictly ascending ) is not checked./
fromDistinctAscList :: [Key] -> IntSet
fromDistinctAscList [] = Nil
fromDistinctAscList (z0 : zs0) = work (prefixOf z0) (bitmapOf z0) zs0 Nada
where
' work ' accumulates all values that go into one tip , before passing this Tip
-- to 'reduce'
work kx bm [] stk = finish kx (Tip kx bm) stk
work kx bm (z:zs) stk | kx == prefixOf z = work kx (bm .|. bitmapOf z) zs stk
work kx bm (z:zs) stk = reduce z zs (branchMask z kx) kx (Tip kx bm) stk
reduce z zs _ px tx Nada = work (prefixOf z) (bitmapOf z) zs (Push px tx Nada)
reduce z zs m px tx stk@(Push py ty stk') =
let mxy = branchMask px py
pxy = mask px mxy
in if shorter m mxy
then reduce z zs m pxy (Bin pxy mxy ty tx) stk'
else work (prefixOf z) (bitmapOf z) zs (Push px tx stk)
finish _ t Nada = t
finish px tx (Push py ty stk) = finish p (link py ty px tx) stk
where m = branchMask px py
p = mask px m
data Stack = Push {-# UNPACK #-} !Prefix !IntSet !Stack | Nada
-------------------------------------------------------------------
Eq
-------------------------------------------------------------------
Eq
--------------------------------------------------------------------}
instance Eq IntSet where
t1 == t2 = equal t1 t2
t1 /= t2 = nequal t1 t2
equal :: IntSet -> IntSet -> Bool
equal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 == m2) && (p1 == p2) && (equal l1 l2) && (equal r1 r2)
equal (Tip kx1 bm1) (Tip kx2 bm2)
= kx1 == kx2 && bm1 == bm2
equal Nil Nil = True
equal _ _ = False
nequal :: IntSet -> IntSet -> Bool
nequal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 /= m2) || (p1 /= p2) || (nequal l1 l2) || (nequal r1 r2)
nequal (Tip kx1 bm1) (Tip kx2 bm2)
= kx1 /= kx2 || bm1 /= bm2
nequal Nil Nil = False
nequal _ _ = True
-------------------------------------------------------------------
-------------------------------------------------------------------
Ord
--------------------------------------------------------------------}
instance Ord IntSet where
compare s1 s2 = compare (toAscList s1) (toAscList s2)
-- tentative implementation. See if more efficient exists.
{--------------------------------------------------------------------
Show
--------------------------------------------------------------------}
instance Show IntSet where
showsPrec p xs = showParen (p > 10) $
showString "fromList " . shows (toList xs)
{--------------------------------------------------------------------
Read
--------------------------------------------------------------------}
instance Read IntSet where
readPrec = parens $ prec 10 $ do
Ident "fromList" <- lexP
xs <- readPrec
return (fromList xs)
readListPrec = readListPrecDefault
-------------------------------------------------------------------
Typeable
-------------------------------------------------------------------
Typeable
--------------------------------------------------------------------}
deriving instance Typeable IntSet
{--------------------------------------------------------------------
NFData
--------------------------------------------------------------------}
The constructors consist only of strict fields of Ints and
-- IntSets, thus the default NFData instance which evaluates to whnf
-- should suffice
instance NFData IntSet where rnf x = seq x ()
{--------------------------------------------------------------------
Debugging
--------------------------------------------------------------------}
-- | /O(n)/. Show the tree that implements the set. The tree is shown
-- in a compressed, hanging format.
showTree :: IntSet -> String
showTree s
= showTreeWith True False s
| /O(n)/. The expression ( @'showTreeWith ' hang wide map@ ) shows
the tree that implements the set . If @hang@ is
' True ' , a /hanging/ tree is shown otherwise a rotated tree is shown . If
@wide@ is ' True ' , an extra wide version is shown .
the tree that implements the set. If @hang@ is
'True', a /hanging/ tree is shown otherwise a rotated tree is shown. If
@wide@ is 'True', an extra wide version is shown.
-}
showTreeWith :: Bool -> Bool -> IntSet -> String
showTreeWith hang wide t
| hang = (showsTreeHang wide [] t) ""
| otherwise = (showsTree wide [] [] t) ""
showsTree :: Bool -> [String] -> [String] -> IntSet -> ShowS
showsTree wide lbars rbars t
= case t of
Bin p m l r
-> showsTree wide (withBar rbars) (withEmpty rbars) r .
showWide wide rbars .
showsBars lbars . showString (showBin p m) . showString "\n" .
showWide wide lbars .
showsTree wide (withEmpty lbars) (withBar lbars) l
Tip kx bm
-> showsBars lbars . showString " " . shows kx . showString " + " .
showsBitMap bm . showString "\n"
Nil -> showsBars lbars . showString "|\n"
showsTreeHang :: Bool -> [String] -> IntSet -> ShowS
showsTreeHang wide bars t
= case t of
Bin p m l r
-> showsBars bars . showString (showBin p m) . showString "\n" .
showWide wide bars .
showsTreeHang wide (withBar bars) l .
showWide wide bars .
showsTreeHang wide (withEmpty bars) r
Tip kx bm
-> showsBars bars . showString " " . shows kx . showString " + " .
showsBitMap bm . showString "\n"
Nil -> showsBars bars . showString "|\n"
showBin :: Prefix -> Mask -> String
showBin _ _
= "*" -- ++ show (p,m)
showWide :: Bool -> [String] -> String -> String
showWide wide bars
| wide = showString (concat (reverse bars)) . showString "|\n"
| otherwise = id
showsBars :: [String] -> ShowS
showsBars bars
= case bars of
[] -> id
_ -> showString (concat (reverse (tail bars))) . showString node
showsBitMap :: Word -> ShowS
showsBitMap = showString . showBitMap
showBitMap :: Word -> String
showBitMap w = show $ foldrBits 0 (:) [] w
node :: String
node = "+--"
withBar, withEmpty :: [String] -> [String]
withBar bars = "| ":bars
withEmpty bars = " ":bars
{--------------------------------------------------------------------
Helpers
--------------------------------------------------------------------}
{--------------------------------------------------------------------
Link
--------------------------------------------------------------------}
link :: Prefix -> IntSet -> Prefix -> IntSet -> IntSet
link p1 t1 p2 t2
| zero p1 m = Bin p m t1 t2
| otherwise = Bin p m t2 t1
where
m = branchMask p1 p2
p = mask p1 m
# INLINE link #
{--------------------------------------------------------------------
@bin@ assures that we never have empty trees within a tree.
--------------------------------------------------------------------}
bin :: Prefix -> Mask -> IntSet -> IntSet -> IntSet
bin _ _ l Nil = l
bin _ _ Nil r = r
bin p m l r = Bin p m l r
# INLINE bin #
{--------------------------------------------------------------------
@tip@ assures that we never have empty bitmaps within a tree.
--------------------------------------------------------------------}
tip :: Prefix -> BitMap -> IntSet
tip _ 0 = Nil
tip kx bm = Tip kx bm
# INLINE tip #
---------------------------------------------------------------------
Functions that generate Prefix and BitMap of a Key or a Suffix .
---------------------------------------------------------------------
Functions that generate Prefix and BitMap of a Key or a Suffix.
----------------------------------------------------------------------}
suffixBitMask :: Int
suffixBitMask = finiteBitSize (undefined::Word) - 1
# INLINE suffixBitMask #
prefixBitMask :: Int
prefixBitMask = complement suffixBitMask
# INLINE prefixBitMask #
prefixOf :: Int -> Prefix
prefixOf x = x .&. prefixBitMask
# INLINE prefixOf #
suffixOf :: Int -> Int
suffixOf x = x .&. suffixBitMask
# INLINE suffixOf #
bitmapOfSuffix :: Int -> BitMap
bitmapOfSuffix s = 1 `shiftLL` s
# INLINE bitmapOfSuffix #
bitmapOf :: Int -> BitMap
bitmapOf x = bitmapOfSuffix (suffixOf x)
# INLINE bitmapOf #
-------------------------------------------------------------------
Endian independent bit twiddling
-------------------------------------------------------------------
Endian independent bit twiddling
--------------------------------------------------------------------}
zero :: Int -> Mask -> Bool
zero i m
= (natFromInt i) .&. (natFromInt m) == 0
# INLINE zero #
nomatch,match :: Int -> Prefix -> Mask -> Bool
nomatch i p m
= (mask i m) /= p
# INLINE nomatch #
match i p m
= (mask i m) == p
# INLINE match #
Suppose a is largest such that 2^a divides 2*m .
-- Then mask i m is i with the low a bits zeroed out.
mask :: Int -> Mask -> Prefix
mask i m
= maskW (natFromInt i) (natFromInt m)
# INLINE mask #
{--------------------------------------------------------------------
Big endian operations
--------------------------------------------------------------------}
maskW :: Nat -> Nat -> Prefix
maskW i m
= intFromNat (i .&. (complement (m-1) `xor` m))
# INLINE maskW #
shorter :: Mask -> Mask -> Bool
shorter m1 m2
= (natFromInt m1) > (natFromInt m2)
# INLINE shorter #
branchMask :: Prefix -> Prefix -> Mask
branchMask p1 p2
= intFromNat (highestBitMask (natFromInt p1 `xor` natFromInt p2))
# INLINE branchMask #
---------------------------------------------------------------------
To get best performance , we provide fast implementations of
lowestBitSet , highestBitSet and fold[lr][l]Bits for GHC .
If the intel bsf and bsr instructions ever become GHC primops ,
this code should be reimplemented using these .
Performance of this code is crucial for folds , toList , filter , partition .
The signatures of methods in question are placed after this comment .
---------------------------------------------------------------------
To get best performance, we provide fast implementations of
lowestBitSet, highestBitSet and fold[lr][l]Bits for GHC.
If the intel bsf and bsr instructions ever become GHC primops,
this code should be reimplemented using these.
Performance of this code is crucial for folds, toList, filter, partition.
The signatures of methods in question are placed after this comment.
----------------------------------------------------------------------}
lowestBitSet :: Nat -> Int
highestBitSet :: Nat -> Int
foldlBits :: Int -> (a -> Int -> a) -> a -> Nat -> a
foldl'Bits :: Int -> (a -> Int -> a) -> a -> Nat -> a
foldrBits :: Int -> (Int -> a -> a) -> a -> Nat -> a
foldr'Bits :: Int -> (Int -> a -> a) -> a -> Nat -> a
# INLINE lowestBitSet #
# INLINE highestBitSet #
# INLINE foldlBits #
# INLINE foldl'Bits #
# INLINE foldrBits #
# INLINE foldr'Bits #
---------------------------------------------------------------------
For lowestBitSet we use wordsize - dependant implementation based on
multiplication and , which was proposed by
< -September/016749.html >
The core of this implementation is fast indexOfTheOnlyBit ,
which is given a with exactly one bit set , and returns
its index .
Lot of effort was put in these implementations , please benchmark carefully
before changing this code .
---------------------------------------------------------------------
For lowestBitSet we use wordsize-dependant implementation based on
multiplication and DeBrujn indeces, which was proposed by Edward Kmett
<-September/016749.html>
The core of this implementation is fast indexOfTheOnlyBit,
which is given a Nat with exactly one bit set, and returns
its index.
Lot of effort was put in these implementations, please benchmark carefully
before changing this code.
----------------------------------------------------------------------}
indexOfTheOnlyBit :: Nat -> Int
{-# INLINE indexOfTheOnlyBit #-}
indexOfTheOnlyBit bitmask =
I# (lsbArray `indexInt8OffAddr#` unboxInt (intFromNat ((bitmask * magic) `shiftRL` offset)))
where unboxInt (I# i) = i
magic = 0x07EDD5E59A4E28C2
offset = 58
!lsbArray = "\63\0\58\1\59\47\53\2\60\39\48\27\54\33\42\3\61\51\37\40\49\18\28\20\55\30\34\11\43\14\22\4\62\57\46\52\38\26\32\41\50\36\17\19\29\10\13\21\56\45\25\31\35\16\9\12\44\24\15\8\23\7\6\5"#
-- The lsbArray gets inlined to every call site of indexOfTheOnlyBit.
That can not be easily avoided , as GHC forbids top - level literal .
-- One could go around that by supplying getLsbArray :: () -> Addr# marked
as NOINLINE . But the code size of calling it and processing the result
is 48B on 32 - bit and 56B on 64 - bit architectures -- so the 32B and 64B array
is actually improvement on 32 - bit and only a 8B size increase on 64 - bit .
lowestBitMask :: Nat -> Nat
lowestBitMask x = x .&. negate x
# INLINE lowestBitMask #
Reverse the order of bits in the .
revNat :: Nat -> Nat
revNat x1 = case ((x1 `shiftRL` 1) .&. 0x5555555555555555) .|. ((x1 .&. 0x5555555555555555) `shiftLL` 1) of
x2 -> case ((x2 `shiftRL` 2) .&. 0x3333333333333333) .|. ((x2 .&. 0x3333333333333333) `shiftLL` 2) of
x3 -> case ((x3 `shiftRL` 4) .&. 0x0F0F0F0F0F0F0F0F) .|. ((x3 .&. 0x0F0F0F0F0F0F0F0F) `shiftLL` 4) of
x4 -> case ((x4 `shiftRL` 8) .&. 0x00FF00FF00FF00FF) .|. ((x4 .&. 0x00FF00FF00FF00FF) `shiftLL` 8) of
x5 -> case ((x5 `shiftRL` 16) .&. 0x0000FFFF0000FFFF) .|. ((x5 .&. 0x0000FFFF0000FFFF) `shiftLL` 16) of
x6 -> ( x6 `shiftRL` 32 ) .|. ( x6 `shiftLL` 32);
lowestBitSet x = indexOfTheOnlyBit (lowestBitMask x)
highestBitSet x = indexOfTheOnlyBit (highestBitMask x)
foldlBits prefix f z bitmap = go bitmap z
where go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f acc) $! (prefix+bi))
foldl'Bits prefix f z bitmap = go bitmap z
where go _ arg | arg `seq` False = undefined
go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f acc) $! (prefix+bi))
foldrBits prefix f z bitmap = go (revNat bitmap) z
where go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f $! (prefix+(64-1)-bi)) acc)
foldr'Bits prefix f z bitmap = go (revNat bitmap) z
where go _ arg | arg `seq` False = undefined
go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f $! (prefix+(64-1)-bi)) acc)
---------------------------------------------------------------------
[ bitcount ] as posted by to haskell - cafe on April 11 , 2006 ,
based on the code on
/~seander/bithacks.html#CountBitsSetKernighan ,
where the following source is given :
Published in 1988 , the C Programming Language 2nd Ed . ( by and ) mentions this in exercise 2 - 9 . On April
19 , 2006 pointed out to me that this method " was first published
by in CACM 3 ( 1960 ) , 322 . ( Also discovered independently by
and published in 1964 in a book edited by Beckenbach . ) "
---------------------------------------------------------------------
[bitcount] as posted by David F. Place to haskell-cafe on April 11, 2006,
based on the code on
/~seander/bithacks.html#CountBitsSetKernighan,
where the following source is given:
Published in 1988, the C Programming Language 2nd Ed. (by Brian W.
Kernighan and Dennis M. Ritchie) mentions this in exercise 2-9. On April
19, 2006 Don Knuth pointed out to me that this method "was first published
by Peter Wegner in CACM 3 (1960), 322. (Also discovered independently by
Derrick Lehmer and published in 1964 in a book edited by Beckenbach.)"
----------------------------------------------------------------------}
bitcount :: Int -> Word -> Int
bitcount a x = a + popCount x
# INLINE bitcount #
-------------------------------------------------------------------
Utilities
-------------------------------------------------------------------
Utilities
--------------------------------------------------------------------}
| /O(1)/. Decompose a set into pieces based on the structure of the underlying
-- tree. This function is useful for consuming a set in parallel.
--
-- No guarantee is made as to the sizes of the pieces; an internal, but
-- deterministic process determines this. However, it is guaranteed that the
pieces returned will be in ascending order ( all elements in the first submap
less than all elements in the second , and so on ) .
--
-- Examples:
--
> splitRoot ( fromList [ 1 .. 120 ] ) = = [ fromList [ 1 .. 63],fromList [ 64 .. 120 ] ]
-- > splitRoot empty == []
--
Note that the current implementation does not return more than two subsets ,
-- but you should not depend on this behaviour because it can change in the
-- future without notice. Also, the current version does not continue
-- splitting all the way to individual singleton sets -- it stops at some
-- point.
splitRoot :: IntSet -> [IntSet]
splitRoot orig =
case orig of
Nil -> []
-- NOTE: we don't currently split below Tip, but we could.
x@(Tip _ _) -> [x]
Bin _ m l r | m < 0 -> [r, l]
| otherwise -> [l, r]
# INLINE splitRoot #
| null | https://raw.githubusercontent.com/fragnix/fragnix/b9969e9c6366e2917a782f3ac4e77cce0835448b/benchmarks/containers/Data.IntSet.Base.hs | haskell | # LINE 1 "Data/IntSet/Base.hs" #
---------------------------------------------------------------------------
|
Module : Data.IntSet.Base
License : BSD-style
Maintainer :
Stability : provisional
Portability : portable
An efficient implementation of integer sets.
These modules are intended to be imported qualified, to avoid name
clashes with Prelude functions, e.g.
The implementation is based on /big-endian patricia trees/. This data
structure performs especially well on binary operations like 'union'
and 'intersection'. However, my benchmarks show that it is also
(much) faster on insertions and deletions when compared to a generic
size-balanced set implementation (see "Data.Set").
<>
Practical Algorithm To Retrieve
Additionally, this implementation places bitmaps in the leaves of the tree.
reduce memory footprint and execution times for dense sets, e.g. sets where
it is likely that many values lie close to each other. The asymptotics are
not affected by this optimization.
This means that the operation can become linear in the number of
elements with a maximum of /W/ -- the number of bits in an 'Int'
---------------------------------------------------------------------------
[Note: INLINE bit fiddling]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
usually gets it right, but it is disastrous if it does not. Therefore we
explicitly mark these functions INLINE.
[Note: Local 'go' functions and capturing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Care must be taken when using 'go' function which captures an argument.
Sometimes (for example when the argument is passed to a data constructor,
code
must be checked for increased allocation when creating and modifying such
functions.
[Note: Order of constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
the best performance is achieved when the
constructors are ordered by frequency.
* Set type
* Operators
* Query
* Construction
* Combine
* Filter
* Map
* Folds
** Strict folds
** Legacy folds
* Conversion
** List
** Ordered list
* Debugging
* Internals
-----------------------------------------------------------------
-----------------------------------------------------------------
------------------------------------------------------------------}
| /O(n+m)/. See 'difference'.
-------------------------------------------------------------------
Types
-------------------------------------------------------------------
| A set of integers.
See Note: Order of constructors
# UNPACK #
# UNPACK #
Invariant: Prefix is the common high-order bits that all elements share to
the left of the Mask bit.
don't have the mask bit set; right is all the elements that do.
# UNPACK #
# UNPACK #
are the prefix plus the indices of the set bits in the bit map.
A number stored in a set is stored as
-------------------------------------------------------------------
A Data instance
-------------------------------------------------------------------
This instance preserves data abstraction at the cost of inefficiency.
We provide limited reflection services for the sake of data abstraction.
-------------------------------------------------------------------
Query
-------------------------------------------------------------------
| /O(1)/. Is the set empty?
| /O(n)/. Cardinality of the set.
| /O(min(n,W))/. Is the value a member of the set?
See Note: Local 'go' functions and capturing]
| /O(min(n,W))/. Is the element not in the set?
| /O(log n)/. Find largest element smaller than the given one.
See Note: Local 'go' functions and capturing.
| /O(log n)/. Find smallest element greater than the given one.
See Note: Local 'go' functions and capturing.
| /O(log n)/. Find largest element smaller or equal to the given one.
See Note: Local 'go' functions and capturing.
| /O(log n)/. Find smallest element greater or equal to the given one.
See Note: Local 'go' functions and capturing.
given, it has m > 0.
given, it has m > 0.
-------------------------------------------------------------------
Construction
-------------------------------------------------------------------
| /O(1)/. The empty set.
# INLINE empty #
-------------------------------------------------------------------
Insert
-------------------------------------------------------------------
| /O(min(n,W))/. Add a value to the set. There is no left- or right bias for
IntSets.
Helper function for insert and union.
| /O(min(n,W))/. Delete a value in the set. Returns the
original set when the value was not present.
Helper function for delete and difference.
-------------------------------------------------------------------
Union
-------------------------------------------------------------------
| The union of a list of sets.
-------------------------------------------------------------------
Difference
-------------------------------------------------------------------
-------------------------------------------------------------------
Intersection
-------------------------------------------------------------------
-------------------------------------------------------------------
Subset
-------------------------------------------------------------------
disjoint
disjoint
disjoint
-------------------------------------------------------------------
Filter
-------------------------------------------------------------------
| /O(n)/. Filter all elements that satisfy some predicate.
| /O(n)/. partition the set according to some predicate.
| /O(min(n,W))/. The expression (@'split' x set@) is a pair @(set1,set2)@
handle negative numbers.
equivalent to kx' > prefixOf x'
| /O(min(n,W))/. Performs a 'split' but also returns whether the pivot
element was found in the original set.
equivalent to kx' > prefixOf x'
-------------------------------------------------------------------
-------------------------------------------------------------------
--------------------------------------------------------------------}
| /O(min(n,W))/. Retrieves the maximal key of the set, and the set
stripped of that element, or 'Nothing' if passed an empty set.
| /O(min(n,W))/. Retrieves the minimal key of the set, and the set
stripped of that element, or 'Nothing' if passed an empty set.
| /O(min(n,W))/. Delete and find the minimal element.
> deleteFindMin set = (findMin set, deleteMin set)
| /O(min(n,W))/. Delete and find the maximal element.
| /O(min(n,W))/. The minimal element of the set.
| /O(min(n,W))/. The maximal element of a set.
| /O(min(n,W))/. Delete the minimal element. Returns an empty set if the set is empty.
| /O(min(n,W))/. Delete the maximal element. Returns an empty set if the set is empty.
---------------------------------------------------------------------
Map
---------------------------------------------------------------------
| /O(n*min(n,W))/.
It's worth noting that the size of the result may be smaller if,
-----------------------------------------------------------------
-----------------------------------------------------------------
------------------------------------------------------------------}
| /O(n)/. Fold the elements in the set using the given right-associative
binary operator. This function is an equivalent of 'foldr' and is present
for compatibility only.
| /O(n)/. Fold the elements in the set using the given right-associative
For example,
> toAscList set = foldr (:) [] set
put negative numbers before
| /O(n)/. A strict version of 'foldr'. Each application of the operator is
evaluated before using the result in the next application. This
function is strict in the starting value.
put negative numbers before
| /O(n)/. Fold the elements in the set using the given left-associative
For example,
> toDescList set = foldl (flip (:)) [] set
put negative numbers before
# INLINE foldl #
| /O(n)/. A strict version of 'foldl'. Each application of the operator is
evaluated before using the result in the next application. This
function is strict in the starting value.
put negative numbers before
# INLINE foldl' #
-------------------------------------------------------------------
List variations
-------------------------------------------------------------------
| /O(n)/. An alias of 'toAscList'. The elements of a set in ascending order.
Subject to list fusion.
-------------------------------------------------------------------
Lists
-------------------------------------------------------------------
| /O(n)/. Convert the set to a list of elements. Subject to list fusion.
| /O(n)/. Convert the set to an ascending list of elements. Subject to list
fusion.
| /O(n)/. Convert the set to a descending list of elements. Subject to list
fusion.
List fusion for the list generating functions.
# INLINE[0] foldrFB #
to{Asc,Desc}List. In phase 0, we inline fold{lr}FB (which were used in
whatever it wants with to{Asc,Desc}List -- it was forbidden to inline it
before phase 0, otherwise the fusion rules would not fire at all.
# NOINLINE[0] toAscList #
# NOINLINE[0] toDescList #
| /O(n)/. Build a set from an ascending list of elements.
| /O(n)/. Build a set from an ascending list of distinct elements.
to 'reduce'
# UNPACK #
-----------------------------------------------------------------
-----------------------------------------------------------------
------------------------------------------------------------------}
-----------------------------------------------------------------
-----------------------------------------------------------------
------------------------------------------------------------------}
tentative implementation. See if more efficient exists.
-------------------------------------------------------------------
Show
-------------------------------------------------------------------
-------------------------------------------------------------------
Read
-------------------------------------------------------------------
-----------------------------------------------------------------
-----------------------------------------------------------------
------------------------------------------------------------------}
-------------------------------------------------------------------
NFData
-------------------------------------------------------------------
IntSets, thus the default NFData instance which evaluates to whnf
should suffice
-------------------------------------------------------------------
Debugging
-------------------------------------------------------------------
| /O(n)/. Show the tree that implements the set. The tree is shown
in a compressed, hanging format.
++ show (p,m)
-------------------------------------------------------------------
Helpers
-------------------------------------------------------------------
-------------------------------------------------------------------
Link
-------------------------------------------------------------------
-------------------------------------------------------------------
@bin@ assures that we never have empty trees within a tree.
-------------------------------------------------------------------
-------------------------------------------------------------------
@tip@ assures that we never have empty bitmaps within a tree.
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
--------------------------------------------------------------------}
-----------------------------------------------------------------
-----------------------------------------------------------------
------------------------------------------------------------------}
Then mask i m is i with the low a bits zeroed out.
-------------------------------------------------------------------
Big endian operations
-------------------------------------------------------------------
-------------------------------------------------------------------
-------------------------------------------------------------------
--------------------------------------------------------------------}
-------------------------------------------------------------------
-------------------------------------------------------------------
--------------------------------------------------------------------}
# INLINE indexOfTheOnlyBit #
The lsbArray gets inlined to every call site of indexOfTheOnlyBit.
One could go around that by supplying getLsbArray :: () -> Addr# marked
so the 32B and 64B array
-------------------------------------------------------------------
-------------------------------------------------------------------
--------------------------------------------------------------------}
-----------------------------------------------------------------
-----------------------------------------------------------------
------------------------------------------------------------------}
tree. This function is useful for consuming a set in parallel.
No guarantee is made as to the sizes of the pieces; an internal, but
deterministic process determines this. However, it is guaranteed that the
Examples:
> splitRoot empty == []
but you should not depend on this behaviour because it can change in the
future without notice. Also, the current version does not continue
splitting all the way to individual singleton sets -- it stops at some
point.
NOTE: we don't currently split below Tip, but we could. | # LANGUAGE Haskell98 #
# LANGUAGE CPP #
# LANGUAGE MagicHash , BangPatterns , DeriveDataTypeable , StandaloneDeriving #
# LANGUAGE Trustworthy #
# LANGUAGE TypeFamilies #
Copyright : ( c ) 2002
( c ) 2011
> import Data . IntSet ( )
> import qualified Data . IntSet as
* and , \"/Fast Maps/\ " ,
Workshop on ML , September 1998 , pages 77 - 86 ,
Information Coded In Alphanumeric/\ " , Journal of the ACM , 15(4 ) ,
October 1968 , pages 514 - 534 .
Their size is the natural size of a machine word ( 32 or 64 bits ) and greatly
Many operations have a worst - case complexity of /O(min(n , W))/.
( 32 or 64 ) .
It is essential that the bit fiddling functions like mask , zero , branchMask
etc are inlined . If they do not , the memory allocation skyrockets . The GHC
The order of constructors of matters when considering performance .
Currently in GHC 7.0 , when type has 3 constructors , they are matched from
On GHC 7.0 , reordering constructors from Nil | Tip | to Bin | Tip | Nil
improves the benchmark by circa 10 % .
module Data.IntSet.Base (
instance Eq , Show
, (\\)
, null
, size
, member
, notMember
, lookupLT
, lookupGT
, lookupLE
, lookupGE
, isSubsetOf
, isProperSubsetOf
, empty
, singleton
, insert
, delete
, union
, unions
, difference
, intersection
, filter
, partition
, split
, splitMember
, splitRoot
, map
, foldr
, foldl
, foldr'
, foldl'
, fold
*
, findMin
, findMax
, deleteMin
, deleteMax
, deleteFindMin
, deleteFindMax
, maxView
, minView
, elems
, toList
, fromList
, toAscList
, toDescList
, fromAscList
, fromDistinctAscList
, showTree
, showTreeWith
, match
, suffixBitMask
, prefixBitMask
, bitmapOf
) where
import Control.DeepSeq (NFData(rnf))
import Data.Bits
import qualified Data.List as List
import Data.Maybe (fromMaybe)
import Data.Monoid (Monoid(..))
import Data.Word (Word)
import Data.Typeable
import Prelude hiding (filter, foldr, foldl, null, map)
import Data.Utils.BitUtil
import Data.Utils.StrictFold
import Data.Utils.StrictPair
import Data.Data (Data(..), Constr, mkConstr, constrIndex, Fixity(Prefix), DataType, mkDataType)
import Text.Read
import GHC.Exts (Int(..), build)
import qualified GHC.Exts as GHCExts
import GHC.Prim (indexInt8OffAddr#)
This comment teaches CPP correct behaviour
A " " is a natural machine word ( an unsigned Int )
type Nat = Word
natFromInt :: Int -> Nat
natFromInt i = fromIntegral i
# INLINE natFromInt #
intFromNat :: Nat -> Int
intFromNat w = fromIntegral w
# INLINE intFromNat #
Operators
Operators
(\\) :: IntSet -> IntSet -> IntSet
m1 \\ m2 = difference m1 m2
Invariant : Nil is never found as a child of .
Invariant : The Mask is a power of 2 . It is the largest bit position at which
two elements of the set differ .
Invariant : In prefix mask left right , left consists of the elements that
Invariant : The Prefix is zero for all but the last 5 ( on 32 bit arches ) or 6
bits ( on 64 bit arches ) . The values of the map represented by a tip
| Nil
* Prefix ( all but last 5 - 6 bits ) and
* BitMap ( last 5 - 6 bits stored as a bitmask )
Last 5 - 6 bits are called a Suffix .
type Prefix = Int
type Mask = Int
type BitMap = Word
type Key = Int
instance Monoid IntSet where
mempty = empty
mappend = union
mconcat = unions
instance Data IntSet where
gfoldl f z is = z fromList `f` (toList is)
toConstr _ = fromListConstr
gunfold k z c = case constrIndex c of
1 -> k (z fromList)
_ -> error "gunfold"
dataTypeOf _ = intSetDataType
fromListConstr :: Constr
fromListConstr = mkConstr intSetDataType "fromList" [] Prefix
intSetDataType :: DataType
intSetDataType = mkDataType "Data.IntSet.Base.IntSet" [fromListConstr]
null :: IntSet -> Bool
null Nil = True
null _ = False
# INLINE null #
size :: IntSet -> Int
size t
= case t of
Bin _ _ l r -> size l + size r
Tip _ bm -> bitcount 0 bm
Nil -> 0
member :: Key -> IntSet -> Bool
member x = x `seq` go
where
go (Bin p m l r)
| nomatch x p m = False
| zero x m = go l
| otherwise = go r
go (Tip y bm) = prefixOf x == y && bitmapOf x .&. bm /= 0
go Nil = False
notMember :: Key -> IntSet -> Bool
notMember k = not . member k
> lookupLT 3 ( fromList [ 3 , 5 ] ) = = Nothing
> lookupLT 5 ( fromList [ 3 , 5 ] ) = = Just 3
lookupLT :: Key -> IntSet -> Maybe Key
lookupLT x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go r l else go Nil r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMax def else unsafeFindMax r
| zero x m = go def l
| otherwise = go l r
go def (Tip kx bm) | prefixOf x > kx = Just $ kx + highestBitSet bm
| prefixOf x == kx && maskLT /= 0 = Just $ kx + highestBitSet maskLT
| otherwise = unsafeFindMax def
where maskLT = (bitmapOf x - 1) .&. bm
go def Nil = unsafeFindMax def
> lookupGT 4 ( fromList [ 3 , 5 ] ) = = Just 5
> lookupGT 5 ( fromList [ 3 , 5 ] ) = = Nothing
lookupGT :: Key -> IntSet -> Maybe Key
lookupGT x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go Nil l else go l r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMin l else unsafeFindMin def
| zero x m = go r l
| otherwise = go def r
go def (Tip kx bm) | prefixOf x < kx = Just $ kx + lowestBitSet bm
| prefixOf x == kx && maskGT /= 0 = Just $ kx + lowestBitSet maskGT
| otherwise = unsafeFindMin def
where maskGT = (- ((bitmapOf x) `shiftLL` 1)) .&. bm
go def Nil = unsafeFindMin def
> lookupLE 2 ( fromList [ 3 , 5 ] ) = = Nothing
> lookupLE 4 ( fromList [ 3 , 5 ] ) = = Just 3
> lookupLE 5 ( fromList [ 3 , 5 ] ) = = Just 5
lookupLE :: Key -> IntSet -> Maybe Key
lookupLE x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go r l else go Nil r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMax def else unsafeFindMax r
| zero x m = go def l
| otherwise = go l r
go def (Tip kx bm) | prefixOf x > kx = Just $ kx + highestBitSet bm
| prefixOf x == kx && maskLE /= 0 = Just $ kx + highestBitSet maskLE
| otherwise = unsafeFindMax def
where maskLE = (((bitmapOf x) `shiftLL` 1) - 1) .&. bm
go def Nil = unsafeFindMax def
> lookupGE 3 ( fromList [ 3 , 5 ] ) = = Just 3
> lookupGE 4 ( fromList [ 3 , 5 ] ) = = Just 5
> lookupGE 6 ( fromList [ 3 , 5 ] ) = = Nothing
lookupGE :: Key -> IntSet -> Maybe Key
lookupGE x t = x `seq` case t of
Bin _ m l r | m < 0 -> if x >= 0 then go Nil l else go l r
_ -> go Nil t
where
go def (Bin p m l r) | nomatch x p m = if x < p then unsafeFindMin l else unsafeFindMin def
| zero x m = go r l
| otherwise = go def r
go def (Tip kx bm) | prefixOf x < kx = Just $ kx + lowestBitSet bm
| prefixOf x == kx && maskGE /= 0 = Just $ kx + lowestBitSet maskGE
| otherwise = unsafeFindMin def
where maskGE = (- (bitmapOf x)) .&. bm
go def Nil = unsafeFindMin def
Helper function for lookupGE and . It assumes that if a node is
unsafeFindMin :: IntSet -> Maybe Key
unsafeFindMin Nil = Nothing
unsafeFindMin (Tip kx bm) = Just $ kx + lowestBitSet bm
unsafeFindMin (Bin _ _ l _) = unsafeFindMin l
Helper function for and lookupLT . It assumes that if a node is
unsafeFindMax :: IntSet -> Maybe Key
unsafeFindMax Nil = Nothing
unsafeFindMax (Tip kx bm) = Just $ kx + highestBitSet bm
unsafeFindMax (Bin _ _ _ r) = unsafeFindMax r
empty :: IntSet
empty
= Nil
| /O(1)/. A set of one element .
singleton :: Key -> IntSet
singleton x
= Tip (prefixOf x) (bitmapOf x)
# INLINE singleton #
insert :: Key -> IntSet -> IntSet
insert x = x `seq` insertBM (prefixOf x) (bitmapOf x)
insertBM :: Prefix -> BitMap -> IntSet -> IntSet
insertBM kx bm t = kx `seq` bm `seq`
case t of
Bin p m l r
| nomatch kx p m -> link kx (Tip kx bm) p t
| zero kx m -> Bin p m (insertBM kx bm l) r
| otherwise -> Bin p m l (insertBM kx bm r)
Tip kx' bm'
| kx' == kx -> Tip kx' (bm .|. bm')
| otherwise -> link kx (Tip kx bm) kx' t
Nil -> Tip kx bm
delete :: Key -> IntSet -> IntSet
delete x = x `seq` deleteBM (prefixOf x) (bitmapOf x)
Deletes all values mentioned in the BitMap from the set .
deleteBM :: Prefix -> BitMap -> IntSet -> IntSet
deleteBM kx bm t = kx `seq` bm `seq`
case t of
Bin p m l r
| nomatch kx p m -> t
| zero kx m -> bin p m (deleteBM kx bm l) r
| otherwise -> bin p m l (deleteBM kx bm r)
Tip kx' bm'
| kx' == kx -> tip kx (bm' .&. complement bm)
| otherwise -> t
Nil -> Nil
unions :: [IntSet] -> IntSet
unions xs
= foldlStrict union empty xs
| /O(n+m)/. The union of two sets .
union :: IntSet -> IntSet -> IntSet
union t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = union1
| shorter m2 m1 = union2
| p1 == p2 = Bin p1 m1 (union l1 l2) (union r1 r2)
| otherwise = link p1 t1 p2 t2
where
union1 | nomatch p2 p1 m1 = link p1 t1 p2 t2
| zero p2 m1 = Bin p1 m1 (union l1 t2) r1
| otherwise = Bin p1 m1 l1 (union r1 t2)
union2 | nomatch p1 p2 m2 = link p1 t1 p2 t2
| zero p1 m2 = Bin p2 m2 (union t1 l2) r2
| otherwise = Bin p2 m2 l2 (union t1 r2)
union t@(Bin _ _ _ _) (Tip kx bm) = insertBM kx bm t
union t@(Bin _ _ _ _) Nil = t
union (Tip kx bm) t = insertBM kx bm t
union Nil t = t
| /O(n+m)/. Difference between two sets .
difference :: IntSet -> IntSet -> IntSet
difference t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = difference1
| shorter m2 m1 = difference2
| p1 == p2 = bin p1 m1 (difference l1 l2) (difference r1 r2)
| otherwise = t1
where
difference1 | nomatch p2 p1 m1 = t1
| zero p2 m1 = bin p1 m1 (difference l1 t2) r1
| otherwise = bin p1 m1 l1 (difference r1 t2)
difference2 | nomatch p1 p2 m2 = t1
| zero p1 m2 = difference t1 l2
| otherwise = difference t1 r2
difference t@(Bin _ _ _ _) (Tip kx bm) = deleteBM kx bm t
difference t@(Bin _ _ _ _) Nil = t
difference t1@(Tip kx bm) t2 = differenceTip t2
where differenceTip (Bin p2 m2 l2 r2) | nomatch kx p2 m2 = t1
| zero kx m2 = differenceTip l2
| otherwise = differenceTip r2
differenceTip (Tip kx2 bm2) | kx == kx2 = tip kx (bm .&. complement bm2)
| otherwise = t1
differenceTip Nil = t1
difference Nil _ = Nil
| /O(n+m)/. The intersection of two sets .
intersection :: IntSet -> IntSet -> IntSet
intersection t1@(Bin p1 m1 l1 r1) t2@(Bin p2 m2 l2 r2)
| shorter m1 m2 = intersection1
| shorter m2 m1 = intersection2
| p1 == p2 = bin p1 m1 (intersection l1 l2) (intersection r1 r2)
| otherwise = Nil
where
intersection1 | nomatch p2 p1 m1 = Nil
| zero p2 m1 = intersection l1 t2
| otherwise = intersection r1 t2
intersection2 | nomatch p1 p2 m2 = Nil
| zero p1 m2 = intersection t1 l2
| otherwise = intersection t1 r2
intersection t1@(Bin _ _ _ _) (Tip kx2 bm2) = intersectBM t1
where intersectBM (Bin p1 m1 l1 r1) | nomatch kx2 p1 m1 = Nil
| zero kx2 m1 = intersectBM l1
| otherwise = intersectBM r1
intersectBM (Tip kx1 bm1) | kx1 == kx2 = tip kx1 (bm1 .&. bm2)
| otherwise = Nil
intersectBM Nil = Nil
intersection (Bin _ _ _ _) Nil = Nil
intersection (Tip kx1 bm1) t2 = intersectBM t2
where intersectBM (Bin p2 m2 l2 r2) | nomatch kx1 p2 m2 = Nil
| zero kx1 m2 = intersectBM l2
| otherwise = intersectBM r2
intersectBM (Tip kx2 bm2) | kx1 == kx2 = tip kx1 (bm1 .&. bm2)
| otherwise = Nil
intersectBM Nil = Nil
intersection Nil _ = Nil
| /O(n+m)/. Is this a proper subset ? ( ie . a subset but not equal ) .
isProperSubsetOf :: IntSet -> IntSet -> Bool
isProperSubsetOf t1 t2
= case subsetCmp t1 t2 of
LT -> True
_ -> False
subsetCmp :: IntSet -> IntSet -> Ordering
subsetCmp t1@(Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
| shorter m1 m2 = GT
| shorter m2 m1 = case subsetCmpLt of
GT -> GT
_ -> LT
| p1 == p2 = subsetCmpEq
where
subsetCmpLt | nomatch p1 p2 m2 = GT
| zero p1 m2 = subsetCmp t1 l2
| otherwise = subsetCmp t1 r2
subsetCmpEq = case (subsetCmp l1 l2, subsetCmp r1 r2) of
(GT,_ ) -> GT
(_ ,GT) -> GT
(EQ,EQ) -> EQ
_ -> LT
subsetCmp (Bin _ _ _ _) _ = GT
subsetCmp (Tip kx1 bm1) (Tip kx2 bm2)
| bm1 == bm2 = EQ
| bm1 .&. complement bm2 == 0 = LT
| otherwise = GT
subsetCmp t1@(Tip kx _) (Bin p m l r)
| nomatch kx p m = GT
| zero kx m = case subsetCmp t1 l of GT -> GT ; _ -> LT
| otherwise = case subsetCmp t1 r of GT -> GT ; _ -> LT
subsetCmp Nil Nil = EQ
subsetCmp Nil _ = LT
| /O(n+m)/. Is this a subset ?
@(s1 ` isSubsetOf ` tells whether @s1@ is a subset of @s2@.
isSubsetOf :: IntSet -> IntSet -> Bool
isSubsetOf t1@(Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
| shorter m1 m2 = False
| shorter m2 m1 = match p1 p2 m2 && (if zero p1 m2 then isSubsetOf t1 l2
else isSubsetOf t1 r2)
| otherwise = (p1==p2) && isSubsetOf l1 l2 && isSubsetOf r1 r2
isSubsetOf (Bin _ _ _ _) _ = False
isSubsetOf (Tip kx1 bm1) (Tip kx2 bm2) = kx1 == kx2 && bm1 .&. complement bm2 == 0
isSubsetOf t1@(Tip kx _) (Bin p m l r)
| nomatch kx p m = False
| zero kx m = isSubsetOf t1 l
| otherwise = isSubsetOf t1 r
isSubsetOf (Tip _ _) Nil = False
isSubsetOf Nil _ = True
filter :: (Key -> Bool) -> IntSet -> IntSet
filter predicate t
= case t of
Bin p m l r
-> bin p m (filter predicate l) (filter predicate r)
Tip kx bm
-> tip kx (foldl'Bits 0 (bitPred kx) 0 bm)
Nil -> Nil
where bitPred kx bm bi | predicate (kx + bi) = bm .|. bitmapOfSuffix bi
| otherwise = bm
# INLINE bitPred #
partition :: (Key -> Bool) -> IntSet -> (IntSet,IntSet)
partition predicate0 t0 = toPair $ go predicate0 t0
where
go predicate t
= case t of
Bin p m l r
-> let (l1 :*: l2) = go predicate l
(r1 :*: r2) = go predicate r
in bin p m l1 r1 :*: bin p m l2 r2
Tip kx bm
-> let bm1 = foldl'Bits 0 (bitPred kx) 0 bm
in tip kx bm1 :*: tip kx (bm `xor` bm1)
Nil -> (Nil :*: Nil)
where bitPred kx bm bi | predicate (kx + bi) = bm .|. bitmapOfSuffix bi
| otherwise = bm
# INLINE bitPred #
where @set1@ comprises the elements of @set@ less than @x@ and @set2@
comprises the elements of @set@ greater than @x@.
> split 3 ( fromList [ 1 .. 5 ] ) = = ( fromList [ 1,2 ] , fromList [ 4,5 ] )
split :: Key -> IntSet -> (IntSet,IntSet)
split x t =
case t of
Bin _ m l r
then case go x l of (lt :*: gt) -> let lt' = union lt r
in lt' `seq` (lt', gt)
else case go x r of (lt :*: gt) -> let gt' = union gt l
in gt' `seq` (lt, gt')
_ -> case go x t of
(lt :*: gt) -> (lt, gt)
where
go !x' t'@(Bin p m l r)
| match x' p m = if zero x' m
then case go x' l of
(lt :*: gt) -> lt :*: union gt r
else case go x' r of
(lt :*: gt) -> union lt l :*: gt
| otherwise = if x' < p then (Nil :*: t')
else (t' :*: Nil)
go x' t'@(Tip kx' bm)
| kx' > x' = (Nil :*: t')
| kx' < prefixOf x' = (t' :*: Nil)
| otherwise = tip kx' (bm .&. lowerBitmap) :*: tip kx' (bm .&. higherBitmap)
where lowerBitmap = bitmapOf x' - 1
higherBitmap = complement (lowerBitmap + bitmapOf x')
go _ Nil = (Nil :*: Nil)
splitMember :: Key -> IntSet -> (IntSet,Bool,IntSet)
splitMember x t =
case t of
Bin _ m l r | m < 0 -> if x >= 0
then case go x l of
(lt, fnd, gt) -> let lt' = union lt r
in lt' `seq` (lt', fnd, gt)
else case go x r of
(lt, fnd, gt) -> let gt' = union gt l
in gt' `seq` (lt, fnd, gt')
_ -> go x t
where
go x' t'@(Bin p m l r)
| match x' p m = if zero x' m
then case go x' l of
(lt, fnd, gt) -> (lt, fnd, union gt r)
else case go x' r of
(lt, fnd, gt) -> (union lt l, fnd, gt)
| otherwise = if x' < p then (Nil, False, t') else (t', False, Nil)
go x' t'@(Tip kx' bm)
| kx' > x' = (Nil, False, t')
| kx' < prefixOf x' = (t', False, Nil)
| otherwise = let lt = tip kx' (bm .&. lowerBitmap)
found = (bm .&. bitmapOfx') /= 0
gt = tip kx' (bm .&. higherBitmap)
in lt `seq` found `seq` gt `seq` (lt, found, gt)
where bitmapOfx' = bitmapOf x'
lowerBitmap = bitmapOfx' - 1
higherBitmap = complement (lowerBitmap + bitmapOfx')
go _ Nil = (Nil, False, Nil)
Min /
Min/Max
maxView :: IntSet -> Maybe (Key, IntSet)
maxView t =
case t of Nil -> Nothing
Bin p m l r | m < 0 -> case go l of (result, l') -> Just (result, bin p m l' r)
_ -> Just (go t)
where
go (Bin p m l r) = case go r of (result, r') -> (result, bin p m l r')
go (Tip kx bm) = case highestBitSet bm of bi -> (kx + bi, tip kx (bm .&. complement (bitmapOfSuffix bi)))
go Nil = error "maxView Nil"
minView :: IntSet -> Maybe (Key, IntSet)
minView t =
case t of Nil -> Nothing
Bin p m l r | m < 0 -> case go r of (result, r') -> Just (result, bin p m l r')
_ -> Just (go t)
where
go (Bin p m l r) = case go l of (result, l') -> (result, bin p m l' r)
go (Tip kx bm) = case lowestBitSet bm of bi -> (kx + bi, tip kx (bm .&. complement (bitmapOfSuffix bi)))
go Nil = error "minView Nil"
deleteFindMin :: IntSet -> (Key, IntSet)
deleteFindMin = fromMaybe (error "deleteFindMin: empty set has no minimal element") . minView
> set = ( set , deleteMax set )
deleteFindMax :: IntSet -> (Key, IntSet)
deleteFindMax = fromMaybe (error "deleteFindMax: empty set has no maximal element") . maxView
findMin :: IntSet -> Key
findMin Nil = error "findMin: empty set has no minimal element"
findMin (Tip kx bm) = kx + lowestBitSet bm
findMin (Bin _ m l r)
| m < 0 = find r
| otherwise = find l
where find (Tip kx bm) = kx + lowestBitSet bm
find (Bin _ _ l' _) = find l'
find Nil = error "findMin Nil"
findMax :: IntSet -> Key
findMax Nil = error "findMax: empty set has no maximal element"
findMax (Tip kx bm) = kx + highestBitSet bm
findMax (Bin _ m l r)
| m < 0 = find l
| otherwise = find r
where find (Tip kx bm) = kx + highestBitSet bm
find (Bin _ _ _ r') = find r'
find Nil = error "findMax Nil"
Note that this is a change of behaviour for consistency with ' Data . Set . Set ' & # 8211 ;
versions prior to 0.5 threw an error if the ' ' was already empty .
deleteMin :: IntSet -> IntSet
deleteMin = maybe Nil snd . minView
Note that this is a change of behaviour for consistency with ' Data . Set . Set ' & # 8211 ;
versions prior to 0.5 threw an error if the ' ' was already empty .
deleteMax :: IntSet -> IntSet
deleteMax = maybe Nil snd . maxView
@'map ' f s@ is the set obtained by applying @f@ to each element of @s@.
for some @(x , y)@ , @x \/= y & & f x = = f y@
map :: (Key -> Key) -> IntSet -> IntSet
map f = fromList . List.map f . toList
Fold
Fold
/Please note that fold will be deprecated in the future and removed./
fold :: (Key -> b -> b) -> b -> IntSet -> b
fold = foldr
# INLINE fold #
binary operator , such that @'foldr ' f z = = ' Prelude.foldr ' f z . '
foldr :: (Key -> b -> b) -> b -> IntSet -> b
Use lambda t to be inlinable with two arguments only .
| otherwise -> go (go z r) l
_ -> go z t
where
go z' Nil = z'
go z' (Tip kx bm) = foldrBits kx f z' bm
go z' (Bin _ _ l r) = go (go z' r) l
# INLINE foldr #
foldr' :: (Key -> b -> b) -> b -> IntSet -> b
Use lambda t to be inlinable with two arguments only .
| otherwise -> go (go z r) l
_ -> go z t
where
go arg _ | arg `seq` False = undefined
go z' Nil = z'
go z' (Tip kx bm) = foldr'Bits kx f z' bm
go z' (Bin _ _ l r) = go (go z' r) l
# INLINE foldr ' #
binary operator , such that @'foldl ' f z = = ' Prelude.foldl ' f z . '
foldl :: (a -> Key -> a) -> a -> IntSet -> a
Use lambda t to be inlinable with two arguments only .
| otherwise -> go (go z l) r
_ -> go z t
where
go arg _ | arg `seq` False = undefined
go z' Nil = z'
go z' (Tip kx bm) = foldlBits kx f z' bm
go z' (Bin _ _ l r) = go (go z' l) r
foldl' :: (a -> Key -> a) -> a -> IntSet -> a
Use lambda t to be inlinable with two arguments only .
| otherwise -> go (go z l) r
_ -> go z t
where
go arg _ | arg `seq` False = undefined
go z' Nil = z'
go z' (Tip kx bm) = foldl'Bits kx f z' bm
go z' (Bin _ _ l r) = go (go z' l) r
elems :: IntSet -> [Key]
elems
= toAscList
instance GHCExts.IsList IntSet where
type Item IntSet = Key
fromList = fromList
toList = toList
toList :: IntSet -> [Key]
toList
= toAscList
toAscList :: IntSet -> [Key]
toAscList = foldr (:) []
toDescList :: IntSet -> [Key]
toDescList = foldl (flip (:)) []
The foldrFB and foldlFB are foldr and foldl equivalents , used for list fusion .
They are important to convert unfused to{Asc , Desc}List back , see mapFB in prelude .
foldrFB :: (Key -> b -> b) -> b -> IntSet -> b
foldrFB = foldr
foldlFB :: (a -> Key -> a) -> a -> IntSet -> a
foldlFB = foldl
# INLINE[0 ] foldlFB #
and toList , so that we need to fuse only toAscList .
# INLINE elems #
# INLINE toList #
The fusion is enabled up to phase 2 included . If it does not succeed ,
convert in phase 1 the expanded to{Asc , Desc}List calls back to
a list fusion , otherwise it would go away in phase 1 ) , and let compiler do
# RULES " IntSet.toAscList " [ ~1 ] forall s . toAscList s = build ( \c n - > foldrFB c n s ) #
# RULES " IntSet.toAscListBack " [ 1 ] foldrFB ( :) [ ] = toAscList #
# RULES " IntSet.toDescList " [ ~1 ] forall s . toDescList s = build ( \c n - > foldlFB ( ) n s ) #
# RULES " IntSet.toDescListBack " [ 1 ] foldlFB ( - > x : xs ) [ ] = toDescList #
| /O(n*min(n , W))/. Create a set from a list of integers .
fromList :: [Key] -> IntSet
fromList xs
= foldlStrict ins empty xs
where
ins t x = insert x t
/The precondition ( input list is ascending ) is not checked./
fromAscList :: [Key] -> IntSet
fromAscList [] = Nil
fromAscList (x0 : xs0) = fromDistinctAscList (combineEq x0 xs0)
where
combineEq x' [] = [x']
combineEq x' (x:xs)
| x==x' = combineEq x' xs
| otherwise = x' : combineEq x xs
/The precondition ( input list is strictly ascending ) is not checked./
fromDistinctAscList :: [Key] -> IntSet
fromDistinctAscList [] = Nil
fromDistinctAscList (z0 : zs0) = work (prefixOf z0) (bitmapOf z0) zs0 Nada
where
' work ' accumulates all values that go into one tip , before passing this Tip
work kx bm [] stk = finish kx (Tip kx bm) stk
work kx bm (z:zs) stk | kx == prefixOf z = work kx (bm .|. bitmapOf z) zs stk
work kx bm (z:zs) stk = reduce z zs (branchMask z kx) kx (Tip kx bm) stk
reduce z zs _ px tx Nada = work (prefixOf z) (bitmapOf z) zs (Push px tx Nada)
reduce z zs m px tx stk@(Push py ty stk') =
let mxy = branchMask px py
pxy = mask px mxy
in if shorter m mxy
then reduce z zs m pxy (Bin pxy mxy ty tx) stk'
else work (prefixOf z) (bitmapOf z) zs (Push px tx stk)
finish _ t Nada = t
finish px tx (Push py ty stk) = finish p (link py ty px tx) stk
where m = branchMask px py
p = mask px m
Eq
Eq
instance Eq IntSet where
t1 == t2 = equal t1 t2
t1 /= t2 = nequal t1 t2
equal :: IntSet -> IntSet -> Bool
equal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 == m2) && (p1 == p2) && (equal l1 l2) && (equal r1 r2)
equal (Tip kx1 bm1) (Tip kx2 bm2)
= kx1 == kx2 && bm1 == bm2
equal Nil Nil = True
equal _ _ = False
nequal :: IntSet -> IntSet -> Bool
nequal (Bin p1 m1 l1 r1) (Bin p2 m2 l2 r2)
= (m1 /= m2) || (p1 /= p2) || (nequal l1 l2) || (nequal r1 r2)
nequal (Tip kx1 bm1) (Tip kx2 bm2)
= kx1 /= kx2 || bm1 /= bm2
nequal Nil Nil = False
nequal _ _ = True
Ord
instance Ord IntSet where
compare s1 s2 = compare (toAscList s1) (toAscList s2)
instance Show IntSet where
showsPrec p xs = showParen (p > 10) $
showString "fromList " . shows (toList xs)
instance Read IntSet where
readPrec = parens $ prec 10 $ do
Ident "fromList" <- lexP
xs <- readPrec
return (fromList xs)
readListPrec = readListPrecDefault
Typeable
Typeable
deriving instance Typeable IntSet
The constructors consist only of strict fields of Ints and
instance NFData IntSet where rnf x = seq x ()
showTree :: IntSet -> String
showTree s
= showTreeWith True False s
| /O(n)/. The expression ( @'showTreeWith ' hang wide map@ ) shows
the tree that implements the set . If @hang@ is
' True ' , a /hanging/ tree is shown otherwise a rotated tree is shown . If
@wide@ is ' True ' , an extra wide version is shown .
the tree that implements the set. If @hang@ is
'True', a /hanging/ tree is shown otherwise a rotated tree is shown. If
@wide@ is 'True', an extra wide version is shown.
-}
showTreeWith :: Bool -> Bool -> IntSet -> String
showTreeWith hang wide t
| hang = (showsTreeHang wide [] t) ""
| otherwise = (showsTree wide [] [] t) ""
showsTree :: Bool -> [String] -> [String] -> IntSet -> ShowS
showsTree wide lbars rbars t
= case t of
Bin p m l r
-> showsTree wide (withBar rbars) (withEmpty rbars) r .
showWide wide rbars .
showsBars lbars . showString (showBin p m) . showString "\n" .
showWide wide lbars .
showsTree wide (withEmpty lbars) (withBar lbars) l
Tip kx bm
-> showsBars lbars . showString " " . shows kx . showString " + " .
showsBitMap bm . showString "\n"
Nil -> showsBars lbars . showString "|\n"
showsTreeHang :: Bool -> [String] -> IntSet -> ShowS
showsTreeHang wide bars t
= case t of
Bin p m l r
-> showsBars bars . showString (showBin p m) . showString "\n" .
showWide wide bars .
showsTreeHang wide (withBar bars) l .
showWide wide bars .
showsTreeHang wide (withEmpty bars) r
Tip kx bm
-> showsBars bars . showString " " . shows kx . showString " + " .
showsBitMap bm . showString "\n"
Nil -> showsBars bars . showString "|\n"
showBin :: Prefix -> Mask -> String
showBin _ _
showWide :: Bool -> [String] -> String -> String
showWide wide bars
| wide = showString (concat (reverse bars)) . showString "|\n"
| otherwise = id
showsBars :: [String] -> ShowS
showsBars bars
= case bars of
[] -> id
_ -> showString (concat (reverse (tail bars))) . showString node
showsBitMap :: Word -> ShowS
showsBitMap = showString . showBitMap
showBitMap :: Word -> String
showBitMap w = show $ foldrBits 0 (:) [] w
node :: String
node = "+--"
withBar, withEmpty :: [String] -> [String]
withBar bars = "| ":bars
withEmpty bars = " ":bars
link :: Prefix -> IntSet -> Prefix -> IntSet -> IntSet
link p1 t1 p2 t2
| zero p1 m = Bin p m t1 t2
| otherwise = Bin p m t2 t1
where
m = branchMask p1 p2
p = mask p1 m
# INLINE link #
bin :: Prefix -> Mask -> IntSet -> IntSet -> IntSet
bin _ _ l Nil = l
bin _ _ Nil r = r
bin p m l r = Bin p m l r
# INLINE bin #
tip :: Prefix -> BitMap -> IntSet
tip _ 0 = Nil
tip kx bm = Tip kx bm
# INLINE tip #
Functions that generate Prefix and BitMap of a Key or a Suffix .
Functions that generate Prefix and BitMap of a Key or a Suffix.
suffixBitMask :: Int
suffixBitMask = finiteBitSize (undefined::Word) - 1
# INLINE suffixBitMask #
prefixBitMask :: Int
prefixBitMask = complement suffixBitMask
# INLINE prefixBitMask #
prefixOf :: Int -> Prefix
prefixOf x = x .&. prefixBitMask
# INLINE prefixOf #
suffixOf :: Int -> Int
suffixOf x = x .&. suffixBitMask
# INLINE suffixOf #
bitmapOfSuffix :: Int -> BitMap
bitmapOfSuffix s = 1 `shiftLL` s
# INLINE bitmapOfSuffix #
bitmapOf :: Int -> BitMap
bitmapOf x = bitmapOfSuffix (suffixOf x)
# INLINE bitmapOf #
Endian independent bit twiddling
Endian independent bit twiddling
zero :: Int -> Mask -> Bool
zero i m
= (natFromInt i) .&. (natFromInt m) == 0
# INLINE zero #
nomatch,match :: Int -> Prefix -> Mask -> Bool
nomatch i p m
= (mask i m) /= p
# INLINE nomatch #
match i p m
= (mask i m) == p
# INLINE match #
Suppose a is largest such that 2^a divides 2*m .
mask :: Int -> Mask -> Prefix
mask i m
= maskW (natFromInt i) (natFromInt m)
# INLINE mask #
maskW :: Nat -> Nat -> Prefix
maskW i m
= intFromNat (i .&. (complement (m-1) `xor` m))
# INLINE maskW #
shorter :: Mask -> Mask -> Bool
shorter m1 m2
= (natFromInt m1) > (natFromInt m2)
# INLINE shorter #
branchMask :: Prefix -> Prefix -> Mask
branchMask p1 p2
= intFromNat (highestBitMask (natFromInt p1 `xor` natFromInt p2))
# INLINE branchMask #
To get best performance , we provide fast implementations of
lowestBitSet , highestBitSet and fold[lr][l]Bits for GHC .
If the intel bsf and bsr instructions ever become GHC primops ,
this code should be reimplemented using these .
Performance of this code is crucial for folds , toList , filter , partition .
The signatures of methods in question are placed after this comment .
To get best performance, we provide fast implementations of
lowestBitSet, highestBitSet and fold[lr][l]Bits for GHC.
If the intel bsf and bsr instructions ever become GHC primops,
this code should be reimplemented using these.
Performance of this code is crucial for folds, toList, filter, partition.
The signatures of methods in question are placed after this comment.
lowestBitSet :: Nat -> Int
highestBitSet :: Nat -> Int
foldlBits :: Int -> (a -> Int -> a) -> a -> Nat -> a
foldl'Bits :: Int -> (a -> Int -> a) -> a -> Nat -> a
foldrBits :: Int -> (Int -> a -> a) -> a -> Nat -> a
foldr'Bits :: Int -> (Int -> a -> a) -> a -> Nat -> a
# INLINE lowestBitSet #
# INLINE highestBitSet #
# INLINE foldlBits #
# INLINE foldl'Bits #
# INLINE foldrBits #
# INLINE foldr'Bits #
For lowestBitSet we use wordsize - dependant implementation based on
multiplication and , which was proposed by
< -September/016749.html >
The core of this implementation is fast indexOfTheOnlyBit ,
which is given a with exactly one bit set , and returns
its index .
Lot of effort was put in these implementations , please benchmark carefully
before changing this code .
For lowestBitSet we use wordsize-dependant implementation based on
multiplication and DeBrujn indeces, which was proposed by Edward Kmett
<-September/016749.html>
The core of this implementation is fast indexOfTheOnlyBit,
which is given a Nat with exactly one bit set, and returns
its index.
Lot of effort was put in these implementations, please benchmark carefully
before changing this code.
indexOfTheOnlyBit :: Nat -> Int
indexOfTheOnlyBit bitmask =
I# (lsbArray `indexInt8OffAddr#` unboxInt (intFromNat ((bitmask * magic) `shiftRL` offset)))
where unboxInt (I# i) = i
magic = 0x07EDD5E59A4E28C2
offset = 58
!lsbArray = "\63\0\58\1\59\47\53\2\60\39\48\27\54\33\42\3\61\51\37\40\49\18\28\20\55\30\34\11\43\14\22\4\62\57\46\52\38\26\32\41\50\36\17\19\29\10\13\21\56\45\25\31\35\16\9\12\44\24\15\8\23\7\6\5"#
That can not be easily avoided , as GHC forbids top - level literal .
as NOINLINE . But the code size of calling it and processing the result
is actually improvement on 32 - bit and only a 8B size increase on 64 - bit .
lowestBitMask :: Nat -> Nat
lowestBitMask x = x .&. negate x
# INLINE lowestBitMask #
Reverse the order of bits in the .
revNat :: Nat -> Nat
revNat x1 = case ((x1 `shiftRL` 1) .&. 0x5555555555555555) .|. ((x1 .&. 0x5555555555555555) `shiftLL` 1) of
x2 -> case ((x2 `shiftRL` 2) .&. 0x3333333333333333) .|. ((x2 .&. 0x3333333333333333) `shiftLL` 2) of
x3 -> case ((x3 `shiftRL` 4) .&. 0x0F0F0F0F0F0F0F0F) .|. ((x3 .&. 0x0F0F0F0F0F0F0F0F) `shiftLL` 4) of
x4 -> case ((x4 `shiftRL` 8) .&. 0x00FF00FF00FF00FF) .|. ((x4 .&. 0x00FF00FF00FF00FF) `shiftLL` 8) of
x5 -> case ((x5 `shiftRL` 16) .&. 0x0000FFFF0000FFFF) .|. ((x5 .&. 0x0000FFFF0000FFFF) `shiftLL` 16) of
x6 -> ( x6 `shiftRL` 32 ) .|. ( x6 `shiftLL` 32);
lowestBitSet x = indexOfTheOnlyBit (lowestBitMask x)
highestBitSet x = indexOfTheOnlyBit (highestBitMask x)
foldlBits prefix f z bitmap = go bitmap z
where go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f acc) $! (prefix+bi))
foldl'Bits prefix f z bitmap = go bitmap z
where go _ arg | arg `seq` False = undefined
go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f acc) $! (prefix+bi))
foldrBits prefix f z bitmap = go (revNat bitmap) z
where go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f $! (prefix+(64-1)-bi)) acc)
foldr'Bits prefix f z bitmap = go (revNat bitmap) z
where go _ arg | arg `seq` False = undefined
go bm acc | bm == 0 = acc
| otherwise = case lowestBitMask bm of
bitmask -> bitmask `seq` case indexOfTheOnlyBit bitmask of
bi -> bi `seq` go (bm `xor` bitmask) ((f $! (prefix+(64-1)-bi)) acc)
[ bitcount ] as posted by to haskell - cafe on April 11 , 2006 ,
based on the code on
/~seander/bithacks.html#CountBitsSetKernighan ,
where the following source is given :
Published in 1988 , the C Programming Language 2nd Ed . ( by and ) mentions this in exercise 2 - 9 . On April
19 , 2006 pointed out to me that this method " was first published
by in CACM 3 ( 1960 ) , 322 . ( Also discovered independently by
and published in 1964 in a book edited by Beckenbach . ) "
[bitcount] as posted by David F. Place to haskell-cafe on April 11, 2006,
based on the code on
/~seander/bithacks.html#CountBitsSetKernighan,
where the following source is given:
Published in 1988, the C Programming Language 2nd Ed. (by Brian W.
Kernighan and Dennis M. Ritchie) mentions this in exercise 2-9. On April
19, 2006 Don Knuth pointed out to me that this method "was first published
by Peter Wegner in CACM 3 (1960), 322. (Also discovered independently by
Derrick Lehmer and published in 1964 in a book edited by Beckenbach.)"
bitcount :: Int -> Word -> Int
bitcount a x = a + popCount x
# INLINE bitcount #
Utilities
Utilities
| /O(1)/. Decompose a set into pieces based on the structure of the underlying
pieces returned will be in ascending order ( all elements in the first submap
less than all elements in the second , and so on ) .
> splitRoot ( fromList [ 1 .. 120 ] ) = = [ fromList [ 1 .. 63],fromList [ 64 .. 120 ] ]
Note that the current implementation does not return more than two subsets ,
splitRoot :: IntSet -> [IntSet]
splitRoot orig =
case orig of
Nil -> []
x@(Tip _ _) -> [x]
Bin _ m l r | m < 0 -> [r, l]
| otherwise -> [l, r]
# INLINE splitRoot #
|
14115617e088cf3534aaf001a8c7fa5e2da6b6c9b9adb56fa059229c1184e12f | curaai/H-R-Tracing | Metal.hs | module Material.Metal where
import Hit (HitRecord (HitRecord, hitNormal, hitPoint),
Scatterable (..),
Scattered (Scattered, scatteredRay))
import Ray (Ray (Ray, direction))
import Sampling (sampleUnitSphere)
import Vector (Color, vDot, vReflect, vUnit)
data Metal =
Metal
{ albedo :: Color Float
, fuzz :: Float
}
whenMaybe :: Bool -> a -> Maybe a
whenMaybe False _ = Nothing
whenMaybe True a = Just a
instance Scatterable Metal where
scatter (Metal color f) (Ray _ dir) HitRecord { hitPoint = p
, hitNormal = normal
} g =
(whenMaybe (0 < vDot (direction sctRay) normal) res, g')
where
(randUnitSphere, g') = sampleUnitSphere g
sctRay =
let reflected = vReflect (vUnit dir) normal
in Ray p (reflected + (pure . min 1 $ f) * randUnitSphere)
res = Scattered sctRay color
| null | https://raw.githubusercontent.com/curaai/H-R-Tracing/ad4867763083994c0d482a3e73e5b3de97e20d5c/src/Material/Metal.hs | haskell | module Material.Metal where
import Hit (HitRecord (HitRecord, hitNormal, hitPoint),
Scatterable (..),
Scattered (Scattered, scatteredRay))
import Ray (Ray (Ray, direction))
import Sampling (sampleUnitSphere)
import Vector (Color, vDot, vReflect, vUnit)
data Metal =
Metal
{ albedo :: Color Float
, fuzz :: Float
}
whenMaybe :: Bool -> a -> Maybe a
whenMaybe False _ = Nothing
whenMaybe True a = Just a
instance Scatterable Metal where
scatter (Metal color f) (Ray _ dir) HitRecord { hitPoint = p
, hitNormal = normal
} g =
(whenMaybe (0 < vDot (direction sctRay) normal) res, g')
where
(randUnitSphere, g') = sampleUnitSphere g
sctRay =
let reflected = vReflect (vUnit dir) normal
in Ray p (reflected + (pure . min 1 $ f) * randUnitSphere)
res = Scattered sctRay color
| |
ab48adb207428f23c02d2df7091f0c22ff3da4167a8d84f517c78194af1f4811 | ropas/sparrow | arrayBlk.mli | (***********************************************************************)
(* *)
Copyright ( c ) 2007 - present .
Programming Research Laboratory ( ROPAS ) , Seoul National University .
(* All rights reserved. *)
(* *)
This software is distributed under the term of the BSD license .
(* See the LICENSE file for details. *)
(* *)
(***********************************************************************)
(** Array domain *)
module ArrInfo :
sig
type t = {
offset : Itv.t;
size : Itv.t;
stride : Itv.t;
null_pos : Itv.t;
structure : StructBlk.PowStruct.t;
}
val top : t
val input : t
end
include MapDom.LAT with type A.t = BasicDom.Allocsite.t and type B.t = ArrInfo.t
val make : BasicDom.Allocsite.t -> Itv.t -> Itv.t -> Itv.t -> Itv.t -> t
val offsetof : t -> Itv.t
val sizeof : t -> Itv.t
val nullof : t -> Itv.t
val extern : BasicDom.Allocsite.t -> t
val input : BasicDom.Allocsite.t -> t
val weak_plus_size : t -> Itv.t -> t
val plus_offset : t -> Itv.t -> t
val minus_offset : t -> Itv.t -> t
val set_null_pos : t -> Itv.t -> t
val plus_null_pos : t -> Itv.t -> t
val cast_array : Cil.typ -> t -> t
val allocsites_of_array : t -> BasicDom.Allocsite.t BatSet.t
val pow_loc_of_array : t -> BasicDom.PowLoc.t
val append_field : t -> Cil.fieldinfo -> BasicDom.PowLoc.t
| null | https://raw.githubusercontent.com/ropas/sparrow/3ec055b8c87b5c8340ef3ed6cde34f5835865b31/src/domain/arrayBlk.mli | ocaml | *********************************************************************
All rights reserved.
See the LICENSE file for details.
*********************************************************************
* Array domain | Copyright ( c ) 2007 - present .
Programming Research Laboratory ( ROPAS ) , Seoul National University .
This software is distributed under the term of the BSD license .
module ArrInfo :
sig
type t = {
offset : Itv.t;
size : Itv.t;
stride : Itv.t;
null_pos : Itv.t;
structure : StructBlk.PowStruct.t;
}
val top : t
val input : t
end
include MapDom.LAT with type A.t = BasicDom.Allocsite.t and type B.t = ArrInfo.t
val make : BasicDom.Allocsite.t -> Itv.t -> Itv.t -> Itv.t -> Itv.t -> t
val offsetof : t -> Itv.t
val sizeof : t -> Itv.t
val nullof : t -> Itv.t
val extern : BasicDom.Allocsite.t -> t
val input : BasicDom.Allocsite.t -> t
val weak_plus_size : t -> Itv.t -> t
val plus_offset : t -> Itv.t -> t
val minus_offset : t -> Itv.t -> t
val set_null_pos : t -> Itv.t -> t
val plus_null_pos : t -> Itv.t -> t
val cast_array : Cil.typ -> t -> t
val allocsites_of_array : t -> BasicDom.Allocsite.t BatSet.t
val pow_loc_of_array : t -> BasicDom.PowLoc.t
val append_field : t -> Cil.fieldinfo -> BasicDom.PowLoc.t
|
425246e99253fb25cb105f49e73c149fec490834454e5daae5491f01891278a8 | SimulaVR/Simula | Simula.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
module Plugin.Simula (GodotSimula(..)) where
import Plugin.Imports
import Data.Maybe
import Plugin.Input
import Plugin.Input.Grab
import Plugin.SimulaController
import Plugin.SimulaViewSprite
import Plugin.VR
import Plugin.Types
import Plugin.PancakeCamera
import Godot.Core.GodotVisualServer as G
import Godot.Core.GodotGlobalConstants as G
import Godot.Nativescript
import qualified Godot.Gdnative.Internal.Api as Api
import qualified Godot.Methods as G
import Godot.Gdnative.Types
import Godot.Api
import qualified Godot.Gdnative.Internal.Api as Api
import qualified Godot.Methods as G
import Godot.Internal.Dispatch ( (:<)
, safeCast
)
import Godot.Gdnative.Internal ( GodotNodePath
, GodotObject
)
import System.Environment
data GodotSimula = GodotSimula
{ _sObj :: GodotObject
, _sGrabState :: TVar GrabState
}
instance NativeScript GodotSimula where
-- className = "Simula"
classInit node = GodotSimula (safeCast node)
<$> newTVarIO NoGrab
classExtends = " Node "
classMethods =
[ func NoRPC "_ready" (catchGodot Plugin.Simula.ready)
, func NoRPC "_process" (catchGodot Plugin.Simula.process)
, func NoRPC "on_button_signal" (catchGodot Plugin.Simula.on_button_signal)
]
classSignals = []
instance HasBaseClass GodotSimula where
type BaseClass GodotSimula = GodotNode
super (GodotSimula obj _) = GodotNode obj
ready :: GodotSimula -> [GodotVariant] -> IO ()
ready self _ = do
-- OpenHMD is unfortunately not yet a working substitute for OpenVR
--
gssSpatial <- addSimulaServerNode :: IO GodotSpatial
maybeGSS <- asNativeScript (safeCast gssSpatial) :: IO (Maybe GodotSimulaServer)
xrRuntimeJson <- lookupEnv "XR_RUNTIME_JSON"
openBackend <- case (maybeGSS, xrRuntimeJson) of
(Just gss, Nothing) -> do gssConf <- readTVarIO (gss ^. gssConfiguration)
let backend = _backend gssConf :: String
case backend of
"OpenVR" -> return openVR
"OpenXR" -> return openXR
_ -> do putStrLn "Unable to parse backend; defaulting to OpenVR"
return openVR
(Just gss, _) -> return openXR
(Nothing, _) -> do return openVR
debugModeMaybe <- lookupEnv "DEBUG"
rrModeMaybe <- lookupEnv "RUNNING_UNDER_RR"
case (rrModeMaybe, debugModeMaybe) of
(Just rrModeVal, _) -> putStrLn "RUNNING_UNDER_RR detected: not launching VR"
(_, Just debugModeVal) -> putStrLn "DEBUG mode detected: not launching VR"
_ ->
do openBackend >>= initVR (safeCast self) >>= \case
InitVRSuccess -> do
putStrLn "InitVRSuccess"
vrViewport <- unsafeInstance GodotViewport "Viewport"
G.set_name vrViewport =<< toLowLevel "VRViewport"
G.set_update_mode vrViewport 3 -- UPDATE_ALWAYS
G.set_use_arvr vrViewport True
vrViewportSize <- toLowLevel (V2 100 100) :: IO GodotVector2 -- Godot requires us to set a default size
G.set_size vrViewport vrViewportSize
G.add_child self (safeCast vrViewport) True
orig <- unsafeInstance GodotARVROrigin "ARVROrigin"
G.add_child vrViewport (safeCast orig) True
-- Add the HMD as a child of the origin node
hmd <- unsafeInstance GodotARVRCamera "ARVRCamera"
G.add_child orig (safeCast hmd) True
Add two controllers and connect their button presses to the Simula
-- node.
let addCt = addSimulaController orig
addCt "LeftController" 1 >>= connectController
addCt "RightController" 2 >>= connectController
return ()
InitVRFailed -> putStrLn "InitVRFailed"
gpcObj <- "res-haskell-plugin/PancakeCamera.gdns"
& newNS' [] :: IO GodotObject
maybeGPC <- asNativeScript gpcObj :: IO (Maybe GodotPancakeCamera)
let gpc = Data.Maybe.fromJust maybeGPC
G.set_current gpc True
G.add_child self (safeCast gpc) True
return ()
where
-- Helper function for black texture debugging.
-- From the internet:
-- var img = Image()
-- var tex = ImageTexture.new()
img.load("image.png " )
-- tex.create_from_image(img)
getTextureFromURL :: String -> IO (GodotTexture)
getTextureFromURL urlStr = do
-- instance new types
godotImage <- unsafeInstance GodotImage "Image" :: IO GodotImage
godotImageTexture <- unsafeInstance GodotImageTexture "ImageTexture"
-- Get image from URL
pngUrl <- toLowLevel (pack urlStr) :: IO GodotString
exitCode <- G.load godotImageTexture pngUrl -- load :: GodotImageTexture -> GodotString -> IO Int
-- Load image into texture
G.create_from_image godotImageTexture godotImage G.TEXTURE_FLAGS_DEFAULT
return (safeCast godotImageTexture) -- NOTE: This [probably] leaks godotImage?
addSimulaServerNode :: IO GodotSpatial
addSimulaServerNode = do
gss <- "res-haskell-plugin/SimulaServer.gdns"
& newNS'' GodotSpatial "Spatial" []
G.set_name gss =<< toLowLevel "SimulaServer"
self ( ) True
G.add_child self ((safeCast gss) :: GodotNode) True
G.print_tree ( ( safeCast ) : : ) -- Print tree for debugging
-- Test
return gss
connectController :: GodotSimulaController -> IO ()
connectController ct = do
-- putStrLn "connectController"
argsPressed <- Api.godot_array_new
ctA <- toLowLevel $ toVariant $ asObj ct :: IO GodotVariant
ctB <- toLowLevel $ toVariant $ asObj ct :: IO GodotVariant
trueGV <- toLowLevel $ toVariant True :: IO GodotVariant
falseGV <- toLowLevel $ toVariant False :: IO GodotVariant
Api.godot_array_append argsPressed ctA
Api.godot_array_append argsPressed trueGV
argsReleased <- Api.godot_array_new
Api.godot_array_append argsReleased ctB
Api.godot_array_append argsReleased falseGV
btnSignal <- toLowLevel "on_button_signal"
btnPressed <- toLowLevel "button_pressed"
btnReleased <- toLowLevel "button_release"
G.connect ct btnPressed (safeCast self) btnSignal argsPressed 0
G.connect ct btnReleased (safeCast self) btnSignal argsReleased 0
mapM_ Api.godot_variant_destroy [ctA, ctB, trueGV, falseGV]
mapM_ Api.godot_string_destroy [btnSignal, btnPressed, btnReleased]
mapM_ Api.godot_array_destroy [argsPressed]
return ()
on_button_signal :: GodotSimula -> [GodotVariant] -> IO ()
on_button_signal self [buttonVar, controllerVar, pressedVar] = do
-- putStrLn "on_button_signal in Simula.hs"
button <- fromGodotVariant buttonVar
controllerObj <- fromGodotVariant controllerVar
tryObjectCast controllerObj
let controller = Data.Maybe.fromJust maybeController
--Just controller <- asNativeScript controllerObj -- tryObjectCast controllerObj
pressed <- fromGodotVariant pressedVar
onButton self controller button pressed
return ()
onButton :: GodotSimula -> GodotSimulaController -> Int -> Bool -> IO ()
onButton self gsc button pressed = do
-- putStrLn "onButton in Simula.hs"
case (button, pressed) of
(OVR_Button_Grip, False) -> -- Release grabbed
readTVarIO gst
>>= processGrabEvent gsc Nothing pressed
>>= atomically
. writeTVar gst
_ -> do
let rc = _gscRayCast gsc
G.force_raycast_update rc
whenM (G.is_colliding rc) $ do
maybeSprite <- G.get_collider rc >>= asNativeScript :: IO (Maybe GodotSimulaViewSprite) --fromNativeScript
let sprite = Data . Maybe.fromJust
maybe (return ()) (onSpriteInput rc) maybeSprite
-- >>= maybe (return ()) (onSpriteInput rc)
where
gst = _sGrabState self
onSpriteInput rc sprite =
G.get_collision_point rc >>= case button of
OVR_Button_Trigger -> processClickEvent sprite (Button pressed G.BUTTON_LEFT)
OVR_Button_AppMenu -> processClickEvent sprite (Button pressed G.BUTTON_RIGHT)
OVR_Button_Grip -> const $
readTVarIO gst
>>= processGrabEvent gsc (Just sprite) pressed
>>= atomically
. writeTVar gst
_ -> const $ return ()
process :: GodotSimula -> [GodotVariant] -> IO ()
process self _ = do
-- putStrLn "process in Simula.hs"
let gst = _sGrabState self
atomically (readTVar gst)
>>= handleState
>>= atomically . writeTVar gst
return ()
| null | https://raw.githubusercontent.com/SimulaVR/Simula/0a6041c73c419a35fc45c028191ac1c32d4c419f/addons/godot-haskell-plugin/src/Plugin/Simula.hs | haskell | # LANGUAGE BangPatterns #
className = "Simula"
OpenHMD is unfortunately not yet a working substitute for OpenVR
UPDATE_ALWAYS
Godot requires us to set a default size
Add the HMD as a child of the origin node
node.
Helper function for black texture debugging.
From the internet:
var img = Image()
var tex = ImageTexture.new()
tex.create_from_image(img)
instance new types
Get image from URL
load :: GodotImageTexture -> GodotString -> IO Int
Load image into texture
NOTE: This [probably] leaks godotImage?
Print tree for debugging
Test
putStrLn "connectController"
putStrLn "on_button_signal in Simula.hs"
Just controller <- asNativeScript controllerObj -- tryObjectCast controllerObj
putStrLn "onButton in Simula.hs"
Release grabbed
fromNativeScript
>>= maybe (return ()) (onSpriteInput rc)
putStrLn "process in Simula.hs" | # LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
module Plugin.Simula (GodotSimula(..)) where
import Plugin.Imports
import Data.Maybe
import Plugin.Input
import Plugin.Input.Grab
import Plugin.SimulaController
import Plugin.SimulaViewSprite
import Plugin.VR
import Plugin.Types
import Plugin.PancakeCamera
import Godot.Core.GodotVisualServer as G
import Godot.Core.GodotGlobalConstants as G
import Godot.Nativescript
import qualified Godot.Gdnative.Internal.Api as Api
import qualified Godot.Methods as G
import Godot.Gdnative.Types
import Godot.Api
import qualified Godot.Gdnative.Internal.Api as Api
import qualified Godot.Methods as G
import Godot.Internal.Dispatch ( (:<)
, safeCast
)
import Godot.Gdnative.Internal ( GodotNodePath
, GodotObject
)
import System.Environment
data GodotSimula = GodotSimula
{ _sObj :: GodotObject
, _sGrabState :: TVar GrabState
}
instance NativeScript GodotSimula where
classInit node = GodotSimula (safeCast node)
<$> newTVarIO NoGrab
classExtends = " Node "
classMethods =
[ func NoRPC "_ready" (catchGodot Plugin.Simula.ready)
, func NoRPC "_process" (catchGodot Plugin.Simula.process)
, func NoRPC "on_button_signal" (catchGodot Plugin.Simula.on_button_signal)
]
classSignals = []
instance HasBaseClass GodotSimula where
type BaseClass GodotSimula = GodotNode
super (GodotSimula obj _) = GodotNode obj
ready :: GodotSimula -> [GodotVariant] -> IO ()
ready self _ = do
gssSpatial <- addSimulaServerNode :: IO GodotSpatial
maybeGSS <- asNativeScript (safeCast gssSpatial) :: IO (Maybe GodotSimulaServer)
xrRuntimeJson <- lookupEnv "XR_RUNTIME_JSON"
openBackend <- case (maybeGSS, xrRuntimeJson) of
(Just gss, Nothing) -> do gssConf <- readTVarIO (gss ^. gssConfiguration)
let backend = _backend gssConf :: String
case backend of
"OpenVR" -> return openVR
"OpenXR" -> return openXR
_ -> do putStrLn "Unable to parse backend; defaulting to OpenVR"
return openVR
(Just gss, _) -> return openXR
(Nothing, _) -> do return openVR
debugModeMaybe <- lookupEnv "DEBUG"
rrModeMaybe <- lookupEnv "RUNNING_UNDER_RR"
case (rrModeMaybe, debugModeMaybe) of
(Just rrModeVal, _) -> putStrLn "RUNNING_UNDER_RR detected: not launching VR"
(_, Just debugModeVal) -> putStrLn "DEBUG mode detected: not launching VR"
_ ->
do openBackend >>= initVR (safeCast self) >>= \case
InitVRSuccess -> do
putStrLn "InitVRSuccess"
vrViewport <- unsafeInstance GodotViewport "Viewport"
G.set_name vrViewport =<< toLowLevel "VRViewport"
G.set_use_arvr vrViewport True
G.set_size vrViewport vrViewportSize
G.add_child self (safeCast vrViewport) True
orig <- unsafeInstance GodotARVROrigin "ARVROrigin"
G.add_child vrViewport (safeCast orig) True
hmd <- unsafeInstance GodotARVRCamera "ARVRCamera"
G.add_child orig (safeCast hmd) True
Add two controllers and connect their button presses to the Simula
let addCt = addSimulaController orig
addCt "LeftController" 1 >>= connectController
addCt "RightController" 2 >>= connectController
return ()
InitVRFailed -> putStrLn "InitVRFailed"
gpcObj <- "res-haskell-plugin/PancakeCamera.gdns"
& newNS' [] :: IO GodotObject
maybeGPC <- asNativeScript gpcObj :: IO (Maybe GodotPancakeCamera)
let gpc = Data.Maybe.fromJust maybeGPC
G.set_current gpc True
G.add_child self (safeCast gpc) True
return ()
where
img.load("image.png " )
getTextureFromURL :: String -> IO (GodotTexture)
getTextureFromURL urlStr = do
godotImage <- unsafeInstance GodotImage "Image" :: IO GodotImage
godotImageTexture <- unsafeInstance GodotImageTexture "ImageTexture"
pngUrl <- toLowLevel (pack urlStr) :: IO GodotString
G.create_from_image godotImageTexture godotImage G.TEXTURE_FLAGS_DEFAULT
addSimulaServerNode :: IO GodotSpatial
addSimulaServerNode = do
gss <- "res-haskell-plugin/SimulaServer.gdns"
& newNS'' GodotSpatial "Spatial" []
G.set_name gss =<< toLowLevel "SimulaServer"
self ( ) True
G.add_child self ((safeCast gss) :: GodotNode) True
return gss
connectController :: GodotSimulaController -> IO ()
connectController ct = do
argsPressed <- Api.godot_array_new
ctA <- toLowLevel $ toVariant $ asObj ct :: IO GodotVariant
ctB <- toLowLevel $ toVariant $ asObj ct :: IO GodotVariant
trueGV <- toLowLevel $ toVariant True :: IO GodotVariant
falseGV <- toLowLevel $ toVariant False :: IO GodotVariant
Api.godot_array_append argsPressed ctA
Api.godot_array_append argsPressed trueGV
argsReleased <- Api.godot_array_new
Api.godot_array_append argsReleased ctB
Api.godot_array_append argsReleased falseGV
btnSignal <- toLowLevel "on_button_signal"
btnPressed <- toLowLevel "button_pressed"
btnReleased <- toLowLevel "button_release"
G.connect ct btnPressed (safeCast self) btnSignal argsPressed 0
G.connect ct btnReleased (safeCast self) btnSignal argsReleased 0
mapM_ Api.godot_variant_destroy [ctA, ctB, trueGV, falseGV]
mapM_ Api.godot_string_destroy [btnSignal, btnPressed, btnReleased]
mapM_ Api.godot_array_destroy [argsPressed]
return ()
on_button_signal :: GodotSimula -> [GodotVariant] -> IO ()
on_button_signal self [buttonVar, controllerVar, pressedVar] = do
button <- fromGodotVariant buttonVar
controllerObj <- fromGodotVariant controllerVar
tryObjectCast controllerObj
let controller = Data.Maybe.fromJust maybeController
pressed <- fromGodotVariant pressedVar
onButton self controller button pressed
return ()
onButton :: GodotSimula -> GodotSimulaController -> Int -> Bool -> IO ()
onButton self gsc button pressed = do
case (button, pressed) of
readTVarIO gst
>>= processGrabEvent gsc Nothing pressed
>>= atomically
. writeTVar gst
_ -> do
let rc = _gscRayCast gsc
G.force_raycast_update rc
whenM (G.is_colliding rc) $ do
let sprite = Data . Maybe.fromJust
maybe (return ()) (onSpriteInput rc) maybeSprite
where
gst = _sGrabState self
onSpriteInput rc sprite =
G.get_collision_point rc >>= case button of
OVR_Button_Trigger -> processClickEvent sprite (Button pressed G.BUTTON_LEFT)
OVR_Button_AppMenu -> processClickEvent sprite (Button pressed G.BUTTON_RIGHT)
OVR_Button_Grip -> const $
readTVarIO gst
>>= processGrabEvent gsc (Just sprite) pressed
>>= atomically
. writeTVar gst
_ -> const $ return ()
process :: GodotSimula -> [GodotVariant] -> IO ()
process self _ = do
let gst = _sGrabState self
atomically (readTVar gst)
>>= handleState
>>= atomically . writeTVar gst
return ()
|
a0f1a2087df90a6cc15faca1bf7923192d2a05d4cefd441e2c85d2985cca4564 | input-output-hk/ouroboros-network | MockEnvironment.hs | {-# LANGUAGE BangPatterns #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE RecordWildCards #-}
# LANGUAGE ScopedTypeVariables #
# OPTIONS_GHC -Wno - orphans #
{-# OPTIONS_GHC -Wno-deferred-out-of-scope-variables #-}
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
module Test.Ouroboros.Network.PeerSelection.MockEnvironment
( PeerGraph (..)
, GovernorMockEnvironment (..)
, GovernorMockEnvironmentWithoutAsyncDemotion (..)
, runGovernorInMockEnvironment
, exploreGovernorInMockEnvironment
, TraceMockEnv (..)
, TestTraceEvent (..)
, selectGovernorEvents
, selectPeerSelectionTraceEvents
, selectPeerSelectionTraceEventsUntil
, firstGossipReachablePeers
, module Ouroboros.Network.Testing.Data.Script
, module Ouroboros.Network.PeerSelection.Types
, tests
, prop_shrinkCarefully_GovernorMockEnvironment
) where
import Data.Dynamic (fromDynamic)
import Data.List (nub)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Proxy (Proxy (..))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Typeable (Typeable)
import Data.Void (Void)
import System.Random (mkStdGen)
import Control.Concurrent.Class.MonadSTM
import Control.Exception (throw)
import Control.Monad.Class.MonadAsync
import Control.Monad.Class.MonadFork
import Control.Monad.Class.MonadSay
import Control.Monad.Class.MonadTest
import Control.Monad.Class.MonadThrow
import Control.Monad.Class.MonadTime
import Control.Monad.Class.MonadTimer hiding (timeout)
import qualified Control.Monad.Fail as Fail
import Control.Monad.IOSim
import Control.Tracer (Tracer (..), contramap, traceWith)
import Ouroboros.Network.ExitPolicy
import Ouroboros.Network.PeerSelection.Governor hiding
(PeerSelectionState (..))
import qualified Ouroboros.Network.PeerSelection.LocalRootPeers as LocalRootPeers
import Ouroboros.Network.PeerSelection.Types
import Ouroboros.Network.Testing.Data.Script (PickScript,
ScriptDelay (..), TimedScript, arbitraryPickScript,
initScript', interpretPickScript, playTimedScript,
prop_shrink_Script, singletonScript, stepScript)
import Ouroboros.Network.Testing.Utils (arbitrarySubset,
prop_shrink_nonequal, prop_shrink_valid)
import Test.Ouroboros.Network.PeerSelection.Instances
import Test.Ouroboros.Network.PeerSelection.LocalRootPeers as LocalRootPeers hiding
(tests)
import Test.Ouroboros.Network.PeerSelection.PeerGraph
import Test.Ouroboros.Network.ShrinkCarefully
import Test.QuickCheck
import Test.Tasty (TestTree, localOption, testGroup)
import Test.Tasty.QuickCheck (QuickCheckMaxSize (..), testProperty)
tests :: TestTree
tests =
testGroup "Ouroboros.Network.PeerSelection"
[ testGroup "MockEnvironment"
[ testProperty "shrink for Script" prop_shrink_Script
, testProperty "shrink for GovernorScripts" prop_shrink_GovernorScripts
, testProperty "arbitrary for PeerSelectionTargets" prop_arbitrary_PeerSelectionTargets
, testProperty "shrink for PeerSelectionTargets" prop_shrink_PeerSelectionTargets
, testProperty "arbitrary for PeerGraph" prop_arbitrary_PeerGraph
, localOption (QuickCheckMaxSize 30) $
testProperty "shrink for PeerGraph" prop_shrink_PeerGraph
, testProperty "arbitrary for GovernorMockEnvironment" prop_arbitrary_GovernorMockEnvironment
, localOption (QuickCheckMaxSize 30) $
testProperty "shrink for GovernorMockEnvironment" prop_shrink_GovernorMockEnvironment
, testProperty "shrink GovernorMockEnvironment carefully"
prop_shrinkCarefully_GovernorMockEnvironment
]
]
--
-- Mock environment types
--
-- | The data needed to execute the peer selection governor in a test with a
-- mock network environment. It contains the data needed to provide the
' PeerSelectionActions ' and ' PeerSelectionPolicy ' to run the governor .
--
-- The representations are chosen to be easily shrinkable. See the @Arbitrary@
-- instances.
--
data GovernorMockEnvironment = GovernorMockEnvironment {
peerGraph :: PeerGraph,
localRootPeers :: LocalRootPeers PeerAddr,
publicRootPeers :: Set PeerAddr,
targets :: TimedScript PeerSelectionTargets,
pickKnownPeersForGossip :: PickScript PeerAddr,
pickColdPeersToPromote :: PickScript PeerAddr,
pickWarmPeersToPromote :: PickScript PeerAddr,
pickHotPeersToDemote :: PickScript PeerAddr,
pickWarmPeersToDemote :: PickScript PeerAddr,
pickColdPeersToForget :: PickScript PeerAddr
}
deriving (Show, Eq)
data PeerConn m = PeerConn !PeerAddr !(TVar m PeerStatus)
instance Show (PeerConn m) where
show (PeerConn peeraddr _) = "PeerConn " ++ show peeraddr
-- | 'GovernorMockEnvironment' which does not do any asynchronous demotions.
--
newtype GovernorMockEnvironmentWithoutAsyncDemotion =
GovernorMockEnvironmentWAD GovernorMockEnvironment
deriving Show
instance Arbitrary GovernorMockEnvironmentWithoutAsyncDemotion where
arbitrary = GovernorMockEnvironmentWAD . fixGraph <$> arbitrary
where
fixGraph g@GovernorMockEnvironment { peerGraph = PeerGraph peerGraph } =
g { peerGraph = PeerGraph (map fixNode peerGraph) }
fixNode (addr, addrs, peerInfo) =
(addr, addrs, peerInfo { connectionScript = singletonScript (Noop, ShortDelay) })
shrink (GovernorMockEnvironmentWAD env) = map GovernorMockEnvironmentWAD (shrink env)
-- | Invariant. Used to check the QC generator and shrinker.
--
-- NOTE: Local and Public Root Peers sets should be disjoint.
-- However we do not check for that invariant here. The goal
-- is to check if the actual Governor takes care of this and enforces
-- the invariant.
validGovernorMockEnvironment :: GovernorMockEnvironment -> Bool
validGovernorMockEnvironment GovernorMockEnvironment {
peerGraph,
localRootPeers,
publicRootPeers,
targets
} =
validPeerGraph peerGraph
&& LocalRootPeers.keysSet localRootPeers `Set.isSubsetOf` allPeersSet
&& publicRootPeers `Set.isSubsetOf` allPeersSet
&& all (sanePeerSelectionTargets . fst) targets
where
allPeersSet = allPeers peerGraph
--
-- Execution in the mock environment
--
-- | Run the 'peerSelectionGovernor' in the mock environment dictated by the
-- data in the 'GovernorMockEnvironment'.
--
-- The result is an execution trace.
--
runGovernorInMockEnvironment :: GovernorMockEnvironment -> SimTrace Void
runGovernorInMockEnvironment mockEnv =
runSimTrace $ governorAction mockEnv
governorAction :: GovernorMockEnvironment -> IOSim s Void
governorAction mockEnv = do
policy <- mockPeerSelectionPolicy mockEnv
actions <- mockPeerSelectionActions tracerMockEnv mockEnv policy
exploreRaces -- explore races within the governor
_ <- forkIO $ do -- races with the governor should be explored
_ <- peerSelectionGovernor
tracerTracePeerSelection
tracerDebugPeerSelection
tracerTracePeerSelectionCounters
(mkStdGen 42)
actions
policy
atomically retry
atomically retry -- block to allow the governor to run
exploreGovernorInMockEnvironment :: Testable test
=> (ExplorationOptions->ExplorationOptions)
-> GovernorMockEnvironment
-> (Maybe (SimTrace Void) -> SimTrace Void -> test)
-> Property
exploreGovernorInMockEnvironment optsf mockEnv k =
exploreSimTrace optsf (governorAction mockEnv) k
data TraceMockEnv = TraceEnvAddPeers PeerGraph
| TraceEnvSetLocalRoots (LocalRootPeers PeerAddr)
| TraceEnvRequestPublicRootPeers
| TraceEnvSetPublicRoots (Set PeerAddr)
| TraceEnvPublicRootTTL
| TraceEnvGossipTTL PeerAddr
| TraceEnvSetTargets PeerSelectionTargets
| TraceEnvPeersDemote AsyncDemotion PeerAddr
| TraceEnvEstablishConn PeerAddr
| TraceEnvActivatePeer PeerAddr
| TraceEnvDeactivatePeer PeerAddr
| TraceEnvCloseConn PeerAddr
| TraceEnvRootsResult [PeerAddr]
| TraceEnvGossipRequest PeerAddr (Maybe ([PeerAddr], GossipTime))
| TraceEnvGossipResult PeerAddr [PeerAddr]
| TraceEnvPeersStatus (Map PeerAddr PeerStatus)
deriving Show
mockPeerSelectionActions :: forall m.
(MonadAsync m, MonadTimer m, Fail.MonadFail m,
MonadThrow (STM m), MonadTraceSTM m)
=> Tracer m TraceMockEnv
-> GovernorMockEnvironment
-> PeerSelectionPolicy PeerAddr m
-> m (PeerSelectionActions PeerAddr (PeerConn m) m)
mockPeerSelectionActions tracer
env@GovernorMockEnvironment {
peerGraph,
localRootPeers,
publicRootPeers,
targets
}
policy = do
scripts <- Map.fromList <$>
sequence
[ (\a b -> (addr, (a, b)))
<$> initScript' gossipScript
<*> initScript' connectionScript
| let PeerGraph adjacency = peerGraph
, (addr, _, GovernorScripts {
gossipScript,
connectionScript
}) <- adjacency
]
targetsVar <- playTimedScript (contramap TraceEnvSetTargets tracer) targets
peerConns <- atomically $ do
v <- newTVar Map.empty
traceTVar proxy
v (\_ a -> TraceDynamic . TraceEnvPeersStatus
<$> snapshotPeersStatus proxy a)
return v
traceWith tracer (TraceEnvAddPeers peerGraph)
traceWith tracer (TraceEnvSetLocalRoots localRootPeers) --TODO: make dynamic
traceWith tracer (TraceEnvSetPublicRoots publicRootPeers) --TODO: make dynamic
return $ mockPeerSelectionActions'
tracer env policy
scripts targetsVar peerConns
where
proxy :: Proxy m
proxy = Proxy
data TransitionError
= ActivationError
| DeactivationError
deriving (Show, Typeable)
instance Exception TransitionError where
mockPeerSelectionActions' :: forall m.
(MonadAsync m, MonadSTM m, MonadTimer m, Fail.MonadFail m,
MonadThrow (STM m))
=> Tracer m TraceMockEnv
-> GovernorMockEnvironment
-> PeerSelectionPolicy PeerAddr m
-> Map PeerAddr (TVar m GossipScript, TVar m ConnectionScript)
-> TVar m PeerSelectionTargets
-> TVar m (Map PeerAddr (TVar m PeerStatus))
-> PeerSelectionActions PeerAddr (PeerConn m) m
mockPeerSelectionActions' tracer
GovernorMockEnvironment {
localRootPeers,
publicRootPeers
}
PeerSelectionPolicy {
policyGossipRetryTime
}
scripts
targetsVar
connsVar =
PeerSelectionActions {
readLocalRootPeers = return (LocalRootPeers.toGroups localRootPeers),
requestPublicRootPeers,
readPeerSelectionTargets = readTVar targetsVar,
requestPeerGossip,
peerStateActions = PeerStateActions {
establishPeerConnection,
monitorPeerConnection,
activatePeerConnection,
deactivatePeerConnection,
closePeerConnection
}
}
where
-- TODO: make this dynamic
requestPublicRootPeers _n = do
traceWith tracer TraceEnvRequestPublicRootPeers
let ttl :: Num n => n
ttl = 60
_ <- async $ do
threadDelay ttl
traceWith tracer TraceEnvPublicRootTTL
traceWith tracer (TraceEnvRootsResult (Set.toList publicRootPeers))
return (publicRootPeers, ttl)
requestPeerGossip addr = do
let Just (gossipScript, _) = Map.lookup addr scripts
mgossip <- stepScript gossipScript
traceWith tracer (TraceEnvGossipRequest addr mgossip)
_ <- async $ do
threadDelay policyGossipRetryTime
traceWith tracer (TraceEnvGossipTTL addr)
case mgossip of
Nothing -> do
threadDelay 1
traceWith tracer (TraceEnvGossipResult addr [])
fail "no peers"
Just (peeraddrs, time) -> do
threadDelay (interpretGossipTime time)
traceWith tracer (TraceEnvGossipResult addr peeraddrs)
return peeraddrs
establishPeerConnection :: PeerAddr -> m (PeerConn m)
establishPeerConnection peeraddr = do
--TODO: add support for variable delays and synchronous failure
traceWith tracer (TraceEnvEstablishConn peeraddr)
threadDelay 1
conn@(PeerConn _ v) <- atomically $ do
conn <- newTVar PeerWarm
conns <- readTVar connsVar
let !conns' = Map.insert peeraddr conn conns
writeTVar connsVar conns'
return (PeerConn peeraddr conn)
let Just (_, connectScript) = Map.lookup peeraddr scripts
_ <- async $
-- monitoring loop which does asynchronous demotions. It will terminate
-- as soon as either of the events:
--
+ the script returns ' Noop '
-- + peer demoted to 'PeerCold'
--
let loop = do
(demotion, delay) <- stepScript connectScript
let interpretScriptDelay NoDelay = 1
interpretScriptDelay ShortDelay = 60
interpretScriptDelay LongDelay = 600
interpretScriptDelay (Delay a) = a -- not used by the generator
done <-
case demotion of
Noop -> return True
ToWarm -> do
threadDelay (interpretScriptDelay delay)
atomically $ do
s <- readTVar v
case s of
PeerHot -> writeTVar v PeerWarm
>> return False
PeerWarm -> return False
PeerCold -> return True
ToCold -> do
threadDelay (interpretScriptDelay delay)
atomically $ do
s <- readTVar v
case s of
PeerCold -> return True
_ -> writeTVar v PeerCold
>> return True
traceWith tracer (TraceEnvPeersDemote demotion peeraddr)
if done
then return ()
else loop
in loop
return conn
activatePeerConnection :: PeerConn m -> m ()
activatePeerConnection (PeerConn peeraddr conn) = do
traceWith tracer (TraceEnvActivatePeer peeraddr)
threadDelay 1
atomically $ do
status <- readTVar conn
case status of
PeerHot -> error "activatePeerConnection of hot peer"
PeerWarm -> writeTVar conn PeerHot
--TODO: check it's just a race condition and not just wrong:
--
-- We throw 'ActivationError' for the following reason:
-- 'PeerCold' can be set by the monitoring loop started by
-- 'establishedPeerConnection' above. However if that happens we
-- want to signal the governor that the warm -> hot transition
-- errored. Otherwise 'jobPromoteWarmPeer' will try to update the
-- state as if the transition went fine which will violate
' invariantPeerSelectionState ' .
PeerCold -> throwIO ActivationError
deactivatePeerConnection :: PeerConn m -> m ()
deactivatePeerConnection (PeerConn peeraddr conn) = do
traceWith tracer (TraceEnvDeactivatePeer peeraddr)
atomically $ do
status <- readTVar conn
case status of
PeerHot -> writeTVar conn PeerWarm
--TODO: check it's just a race condition and not just wrong:
PeerWarm -> return ()
-- See the note in 'activatePeerConnection' why we throw an exception
-- here.
PeerCold -> throwIO DeactivationError
closePeerConnection :: PeerConn m -> m ()
closePeerConnection (PeerConn peeraddr conn) = do
traceWith tracer (TraceEnvCloseConn peeraddr)
atomically $ do
status <- readTVar conn
case status of
PeerHot -> writeTVar conn PeerCold
--TODO: check it's just a race condition and not just wrong:
PeerWarm -> writeTVar conn PeerCold
PeerCold -> return ()
conns <- readTVar connsVar
let !conns' = Map.delete peeraddr conns
writeTVar connsVar conns'
monitorPeerConnection :: PeerConn m -> STM m (PeerStatus, Maybe ReconnectDelay)
monitorPeerConnection (PeerConn _peeraddr conn) = (,) <$> readTVar conn
<*> pure Nothing
snapshotPeersStatus :: MonadInspectSTM m
=> proxy m
-> Map PeerAddr (TVar m PeerStatus)
-> InspectMonad m (Map PeerAddr PeerStatus)
snapshotPeersStatus p conns = traverse (inspectTVar p) conns
mockPeerSelectionPolicy :: MonadSTM m
=> GovernorMockEnvironment
-> m (PeerSelectionPolicy PeerAddr m)
mockPeerSelectionPolicy GovernorMockEnvironment {
pickKnownPeersForGossip,
pickColdPeersToPromote,
pickWarmPeersToPromote,
pickHotPeersToDemote,
pickWarmPeersToDemote,
pickColdPeersToForget
} = do
pickKnownPeersForGossipVar <- initScript' pickKnownPeersForGossip
pickColdPeersToPromoteVar <- initScript' pickColdPeersToPromote
pickWarmPeersToPromoteVar <- initScript' pickWarmPeersToPromote
pickHotPeersToDemoteVar <- initScript' pickHotPeersToDemote
pickWarmPeersToDemoteVar <- initScript' pickWarmPeersToDemote
pickColdPeersToForgetVar <- initScript' pickColdPeersToForget
return PeerSelectionPolicy {
policyPickKnownPeersForGossip = \_ _ _ -> interpretPickScript pickKnownPeersForGossipVar,
policyPickColdPeersToPromote = \_ _ _ -> interpretPickScript pickColdPeersToPromoteVar,
policyPickWarmPeersToPromote = \_ _ _ -> interpretPickScript pickWarmPeersToPromoteVar,
policyPickHotPeersToDemote = \_ _ _ -> interpretPickScript pickHotPeersToDemoteVar,
policyPickWarmPeersToDemote = \_ _ _ -> interpretPickScript pickWarmPeersToDemoteVar,
policyPickColdPeersToForget = \_ _ _ -> interpretPickScript pickColdPeersToForgetVar,
policyFindPublicRootTimeout = 5, -- seconds
policyMaxInProgressGossipReqs = 2,
policyGossipRetryTime = 3600, -- seconds
policyGossipBatchWaitTime = 3, -- seconds
policyGossipOverallTimeout = 10, -- seconds
policyErrorDelay = 10 -- seconds
}
--
-- Utils for properties
--
data TestTraceEvent = GovernorDebug (DebugPeerSelection PeerAddr)
| GovernorEvent (TracePeerSelection PeerAddr)
| GovernorCounters PeerSelectionCounters
| MockEnvEvent TraceMockEnv
-- Warning: be careful with writing properties that rely
-- on trace events from both the governor and from the
-- environment. These events typically occur in separate
-- threads and so are not casually ordered. It is ok to use
-- them for timeout/eventually properties, but not for
-- properties that check conditions synchronously.
-- The governor debug vs other events are fully ordered.
deriving Show
tracerTracePeerSelection :: Tracer (IOSim s) (TracePeerSelection PeerAddr)
tracerTracePeerSelection = contramap GovernorEvent tracerTestTraceEvent
tracerDebugPeerSelection :: Tracer (IOSim s) (DebugPeerSelection PeerAddr)
tracerDebugPeerSelection = contramap (GovernorDebug . voidDebugPeerSelection)
tracerTestTraceEvent
where
voidDebugPeerSelection :: DebugPeerSelection peeraddr -> DebugPeerSelection peeraddr
voidDebugPeerSelection (TraceGovernorState btime wtime state) =
TraceGovernorState btime wtime (const () <$> state)
tracerTracePeerSelectionCounters :: Tracer (IOSim s) PeerSelectionCounters
tracerTracePeerSelectionCounters = contramap GovernorCounters tracerTestTraceEvent
tracerMockEnv :: Tracer (IOSim s) TraceMockEnv
tracerMockEnv = contramap MockEnvEvent tracerTestTraceEvent
tracerTestTraceEvent :: Tracer (IOSim s) TestTraceEvent
tracerTestTraceEvent = dynamicTracer <> Tracer (say . show)
dynamicTracer :: Typeable a => Tracer (IOSim s) a
dynamicTracer = Tracer traceM
selectPeerSelectionTraceEvents :: SimTrace a -> [(Time, TestTraceEvent)]
selectPeerSelectionTraceEvents = go
where
go (SimTrace t _ _ (EventLog e) trace)
| Just x <- fromDynamic e = (t,x) : go trace
go (SimPORTrace t _ _ _ (EventLog e) trace)
| Just x <- fromDynamic e = (t,x) : go trace
go (SimTrace _ _ _ _ trace) = go trace
go (SimPORTrace _ _ _ _ _ trace) = go trace
go (TraceRacesFound _ trace) = go trace
go (TraceMainException _ e _) = throw e
go (TraceDeadlock _ _) = [] -- expected result in many cases
go (TraceMainReturn _ _ _) = []
go TraceLoop = error "Step time limit exceeded"
selectPeerSelectionTraceEventsUntil :: Time -> SimTrace a -> [(Time, TestTraceEvent)]
selectPeerSelectionTraceEventsUntil tmax = go
where
go (SimTrace t _ _ _ _)
| t > tmax = []
go (SimTrace t _ _ (EventLog e) trace)
| Just x <- fromDynamic e = (t,x) : go trace
go (SimPORTrace t _ _ _ _ _)
| t > tmax = []
go (SimPORTrace t _ _ _ (EventLog e) trace)
| Just x <- fromDynamic e = (t,x) : go trace
go (SimTrace _ _ _ _ trace) = go trace
go (SimPORTrace _ _ _ _ _ trace) = go trace
go (TraceRacesFound _ trace) = go trace
go (TraceMainException _ e _) = throw e
go (TraceDeadlock _ _) = [] -- expected result in many cases
go (TraceMainReturn _ _ _) = []
go TraceLoop = error "Step time limit exceeded"
selectGovernorEvents :: [(Time, TestTraceEvent)]
-> [(Time, TracePeerSelection PeerAddr)]
selectGovernorEvents trace = [ (t, e) | (t, GovernorEvent e) <- trace ]
--
QuickCheck instances
--
instance Arbitrary GovernorMockEnvironment where
arbitrary = do
-- Dependency of the root set on the graph
peerGraph <- arbitrary
let peersSet = allPeers peerGraph
(localRootPeers,
publicRootPeers) <- arbitraryRootPeers peersSet
-- But the others are independent
targets <- arbitrary
let arbitrarySubsetOfPeers = arbitrarySubset peersSet
pickKnownPeersForGossip <- arbitraryPickScript arbitrarySubsetOfPeers
pickColdPeersToPromote <- arbitraryPickScript arbitrarySubsetOfPeers
pickWarmPeersToPromote <- arbitraryPickScript arbitrarySubsetOfPeers
pickHotPeersToDemote <- arbitraryPickScript arbitrarySubsetOfPeers
pickWarmPeersToDemote <- arbitraryPickScript arbitrarySubsetOfPeers
pickColdPeersToForget <- arbitraryPickScript arbitrarySubsetOfPeers
return GovernorMockEnvironment{..}
where
arbitraryRootPeers :: Set PeerAddr
-> Gen (LocalRootPeers PeerAddr, Set PeerAddr)
arbitraryRootPeers peers | Set.null peers =
return (LocalRootPeers.empty, Set.empty)
arbitraryRootPeers peers = do
-- We decide how many we want and then pick randomly.
sz <- getSize
let minroots
| sz >= 10 = 1
| otherwise = 0
maxroots = ceiling
. sqrt
. (fromIntegral :: Int -> Double)
. length
$ peers
numroots <- choose (minroots, maxroots)
ixs <- vectorOf numroots (getNonNegative <$> arbitrary)
let pick n = Set.elemAt i peers where i = n `mod` Set.size peers
rootPeers = nub (map pick ixs)
-- divide into local and public, but with a bit of overlap:
local <- vectorOf (length rootPeers) (choose (0, 10 :: Int))
-- Deliberatly asking for a small intersection in order to test if
-- the Governor actually takes care of this invariant
let localRootsSet = Set.fromList [ x | (x, v) <- zip rootPeers local
, v <= 5 ]
publicRootsSet = Set.fromList [ x | (x, v) <- zip rootPeers local
, v >= 5 ]
localRoots <- arbitraryLocalRootPeers localRootsSet
return (localRoots, publicRootsSet)
shrink env@GovernorMockEnvironment {
peerGraph,
localRootPeers,
publicRootPeers,
targets,
pickKnownPeersForGossip,
pickColdPeersToPromote,
pickWarmPeersToPromote,
pickHotPeersToDemote,
pickWarmPeersToDemote,
pickColdPeersToForget
} =
-- Special rule for shrinking the peerGraph because the localRootPeers
-- depends on it so has to be updated too.
[ env {
peerGraph = peerGraph',
localRootPeers = LocalRootPeers.restrictKeys localRootPeers nodes',
publicRootPeers = publicRootPeers `Set.intersection` nodes'
}
| peerGraph' <- shrink peerGraph
, let nodes' = allPeers peerGraph' ]
-- All the others are generic.
++ [ GovernorMockEnvironment {
peerGraph,
localRootPeers = localRootPeers',
publicRootPeers = publicRootPeers',
targets = targets',
pickKnownPeersForGossip = pickKnownPeersForGossip',
pickColdPeersToPromote = pickColdPeersToPromote',
pickWarmPeersToPromote = pickWarmPeersToPromote',
pickHotPeersToDemote = pickHotPeersToDemote',
pickWarmPeersToDemote = pickWarmPeersToDemote',
pickColdPeersToForget = pickColdPeersToForget'
}
| (localRootPeers', publicRootPeers', targets',
pickKnownPeersForGossip',
pickColdPeersToPromote',
pickWarmPeersToPromote',
pickHotPeersToDemote',
pickWarmPeersToDemote',
pickColdPeersToForget')
<- shrink (localRootPeers, publicRootPeers, targets,
pickKnownPeersForGossip,
pickColdPeersToPromote,
pickWarmPeersToPromote,
pickHotPeersToDemote,
pickWarmPeersToDemote,
pickColdPeersToForget)
]
--
-- Tests for the QC Arbitrary instances
--
prop_arbitrary_GovernorMockEnvironment :: GovernorMockEnvironment -> Property
prop_arbitrary_GovernorMockEnvironment env =
tabulate "num root peers" [show (LocalRootPeers.size (localRootPeers env)
+ Set.size (publicRootPeers env))] $
tabulate "num local root peers" [show (LocalRootPeers.size (localRootPeers env))] $
tabulate "num public root peers" [show (Set.size (publicRootPeers env))] $
tabulate "empty root peers" [show $ not emptyGraph && emptyRootPeers] $
tabulate "overlapping local/public roots" [show overlappingRootPeers] $
validGovernorMockEnvironment env
where
emptyGraph = null g where PeerGraph g = peerGraph env
emptyRootPeers = LocalRootPeers.null (localRootPeers env)
&& Set.null (publicRootPeers env)
overlappingRootPeers =
not $ Set.null $
Set.intersection
(LocalRootPeers.keysSet (localRootPeers env))
(publicRootPeers env)
prop_shrink_GovernorMockEnvironment :: Fixed GovernorMockEnvironment -> Property
prop_shrink_GovernorMockEnvironment x =
prop_shrink_valid validGovernorMockEnvironment x
.&&. prop_shrink_nonequal x
prop_shrinkCarefully_GovernorMockEnvironment ::
ShrinkCarefully GovernorMockEnvironment -> Property
prop_shrinkCarefully_GovernorMockEnvironment = prop_shrinkCarefully
| null | https://raw.githubusercontent.com/input-output-hk/ouroboros-network/679c7da2079a5e9972a1c502b6a4d6af3eb76945/ouroboros-network/test/Test/Ouroboros/Network/PeerSelection/MockEnvironment.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE FlexibleContexts #
# LANGUAGE RecordWildCards #
# OPTIONS_GHC -Wno-deferred-out-of-scope-variables #
Mock environment types
| The data needed to execute the peer selection governor in a test with a
mock network environment. It contains the data needed to provide the
The representations are chosen to be easily shrinkable. See the @Arbitrary@
instances.
| 'GovernorMockEnvironment' which does not do any asynchronous demotions.
| Invariant. Used to check the QC generator and shrinker.
NOTE: Local and Public Root Peers sets should be disjoint.
However we do not check for that invariant here. The goal
is to check if the actual Governor takes care of this and enforces
the invariant.
Execution in the mock environment
| Run the 'peerSelectionGovernor' in the mock environment dictated by the
data in the 'GovernorMockEnvironment'.
The result is an execution trace.
explore races within the governor
races with the governor should be explored
block to allow the governor to run
TODO: make dynamic
TODO: make dynamic
TODO: make this dynamic
TODO: add support for variable delays and synchronous failure
monitoring loop which does asynchronous demotions. It will terminate
as soon as either of the events:
+ peer demoted to 'PeerCold'
not used by the generator
TODO: check it's just a race condition and not just wrong:
We throw 'ActivationError' for the following reason:
'PeerCold' can be set by the monitoring loop started by
'establishedPeerConnection' above. However if that happens we
want to signal the governor that the warm -> hot transition
errored. Otherwise 'jobPromoteWarmPeer' will try to update the
state as if the transition went fine which will violate
TODO: check it's just a race condition and not just wrong:
See the note in 'activatePeerConnection' why we throw an exception
here.
TODO: check it's just a race condition and not just wrong:
seconds
seconds
seconds
seconds
seconds
Utils for properties
Warning: be careful with writing properties that rely
on trace events from both the governor and from the
environment. These events typically occur in separate
threads and so are not casually ordered. It is ok to use
them for timeout/eventually properties, but not for
properties that check conditions synchronously.
The governor debug vs other events are fully ordered.
expected result in many cases
expected result in many cases
Dependency of the root set on the graph
But the others are independent
We decide how many we want and then pick randomly.
divide into local and public, but with a bit of overlap:
Deliberatly asking for a small intersection in order to test if
the Governor actually takes care of this invariant
Special rule for shrinking the peerGraph because the localRootPeers
depends on it so has to be updated too.
All the others are generic.
Tests for the QC Arbitrary instances
| # LANGUAGE NamedFieldPuns #
# LANGUAGE ScopedTypeVariables #
# OPTIONS_GHC -Wno - orphans #
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
module Test.Ouroboros.Network.PeerSelection.MockEnvironment
( PeerGraph (..)
, GovernorMockEnvironment (..)
, GovernorMockEnvironmentWithoutAsyncDemotion (..)
, runGovernorInMockEnvironment
, exploreGovernorInMockEnvironment
, TraceMockEnv (..)
, TestTraceEvent (..)
, selectGovernorEvents
, selectPeerSelectionTraceEvents
, selectPeerSelectionTraceEventsUntil
, firstGossipReachablePeers
, module Ouroboros.Network.Testing.Data.Script
, module Ouroboros.Network.PeerSelection.Types
, tests
, prop_shrinkCarefully_GovernorMockEnvironment
) where
import Data.Dynamic (fromDynamic)
import Data.List (nub)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Proxy (Proxy (..))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Typeable (Typeable)
import Data.Void (Void)
import System.Random (mkStdGen)
import Control.Concurrent.Class.MonadSTM
import Control.Exception (throw)
import Control.Monad.Class.MonadAsync
import Control.Monad.Class.MonadFork
import Control.Monad.Class.MonadSay
import Control.Monad.Class.MonadTest
import Control.Monad.Class.MonadThrow
import Control.Monad.Class.MonadTime
import Control.Monad.Class.MonadTimer hiding (timeout)
import qualified Control.Monad.Fail as Fail
import Control.Monad.IOSim
import Control.Tracer (Tracer (..), contramap, traceWith)
import Ouroboros.Network.ExitPolicy
import Ouroboros.Network.PeerSelection.Governor hiding
(PeerSelectionState (..))
import qualified Ouroboros.Network.PeerSelection.LocalRootPeers as LocalRootPeers
import Ouroboros.Network.PeerSelection.Types
import Ouroboros.Network.Testing.Data.Script (PickScript,
ScriptDelay (..), TimedScript, arbitraryPickScript,
initScript', interpretPickScript, playTimedScript,
prop_shrink_Script, singletonScript, stepScript)
import Ouroboros.Network.Testing.Utils (arbitrarySubset,
prop_shrink_nonequal, prop_shrink_valid)
import Test.Ouroboros.Network.PeerSelection.Instances
import Test.Ouroboros.Network.PeerSelection.LocalRootPeers as LocalRootPeers hiding
(tests)
import Test.Ouroboros.Network.PeerSelection.PeerGraph
import Test.Ouroboros.Network.ShrinkCarefully
import Test.QuickCheck
import Test.Tasty (TestTree, localOption, testGroup)
import Test.Tasty.QuickCheck (QuickCheckMaxSize (..), testProperty)
tests :: TestTree
tests =
testGroup "Ouroboros.Network.PeerSelection"
[ testGroup "MockEnvironment"
[ testProperty "shrink for Script" prop_shrink_Script
, testProperty "shrink for GovernorScripts" prop_shrink_GovernorScripts
, testProperty "arbitrary for PeerSelectionTargets" prop_arbitrary_PeerSelectionTargets
, testProperty "shrink for PeerSelectionTargets" prop_shrink_PeerSelectionTargets
, testProperty "arbitrary for PeerGraph" prop_arbitrary_PeerGraph
, localOption (QuickCheckMaxSize 30) $
testProperty "shrink for PeerGraph" prop_shrink_PeerGraph
, testProperty "arbitrary for GovernorMockEnvironment" prop_arbitrary_GovernorMockEnvironment
, localOption (QuickCheckMaxSize 30) $
testProperty "shrink for GovernorMockEnvironment" prop_shrink_GovernorMockEnvironment
, testProperty "shrink GovernorMockEnvironment carefully"
prop_shrinkCarefully_GovernorMockEnvironment
]
]
' PeerSelectionActions ' and ' PeerSelectionPolicy ' to run the governor .
data GovernorMockEnvironment = GovernorMockEnvironment {
peerGraph :: PeerGraph,
localRootPeers :: LocalRootPeers PeerAddr,
publicRootPeers :: Set PeerAddr,
targets :: TimedScript PeerSelectionTargets,
pickKnownPeersForGossip :: PickScript PeerAddr,
pickColdPeersToPromote :: PickScript PeerAddr,
pickWarmPeersToPromote :: PickScript PeerAddr,
pickHotPeersToDemote :: PickScript PeerAddr,
pickWarmPeersToDemote :: PickScript PeerAddr,
pickColdPeersToForget :: PickScript PeerAddr
}
deriving (Show, Eq)
data PeerConn m = PeerConn !PeerAddr !(TVar m PeerStatus)
instance Show (PeerConn m) where
show (PeerConn peeraddr _) = "PeerConn " ++ show peeraddr
newtype GovernorMockEnvironmentWithoutAsyncDemotion =
GovernorMockEnvironmentWAD GovernorMockEnvironment
deriving Show
instance Arbitrary GovernorMockEnvironmentWithoutAsyncDemotion where
arbitrary = GovernorMockEnvironmentWAD . fixGraph <$> arbitrary
where
fixGraph g@GovernorMockEnvironment { peerGraph = PeerGraph peerGraph } =
g { peerGraph = PeerGraph (map fixNode peerGraph) }
fixNode (addr, addrs, peerInfo) =
(addr, addrs, peerInfo { connectionScript = singletonScript (Noop, ShortDelay) })
shrink (GovernorMockEnvironmentWAD env) = map GovernorMockEnvironmentWAD (shrink env)
validGovernorMockEnvironment :: GovernorMockEnvironment -> Bool
validGovernorMockEnvironment GovernorMockEnvironment {
peerGraph,
localRootPeers,
publicRootPeers,
targets
} =
validPeerGraph peerGraph
&& LocalRootPeers.keysSet localRootPeers `Set.isSubsetOf` allPeersSet
&& publicRootPeers `Set.isSubsetOf` allPeersSet
&& all (sanePeerSelectionTargets . fst) targets
where
allPeersSet = allPeers peerGraph
runGovernorInMockEnvironment :: GovernorMockEnvironment -> SimTrace Void
runGovernorInMockEnvironment mockEnv =
runSimTrace $ governorAction mockEnv
governorAction :: GovernorMockEnvironment -> IOSim s Void
governorAction mockEnv = do
policy <- mockPeerSelectionPolicy mockEnv
actions <- mockPeerSelectionActions tracerMockEnv mockEnv policy
_ <- peerSelectionGovernor
tracerTracePeerSelection
tracerDebugPeerSelection
tracerTracePeerSelectionCounters
(mkStdGen 42)
actions
policy
atomically retry
exploreGovernorInMockEnvironment :: Testable test
=> (ExplorationOptions->ExplorationOptions)
-> GovernorMockEnvironment
-> (Maybe (SimTrace Void) -> SimTrace Void -> test)
-> Property
exploreGovernorInMockEnvironment optsf mockEnv k =
exploreSimTrace optsf (governorAction mockEnv) k
data TraceMockEnv = TraceEnvAddPeers PeerGraph
| TraceEnvSetLocalRoots (LocalRootPeers PeerAddr)
| TraceEnvRequestPublicRootPeers
| TraceEnvSetPublicRoots (Set PeerAddr)
| TraceEnvPublicRootTTL
| TraceEnvGossipTTL PeerAddr
| TraceEnvSetTargets PeerSelectionTargets
| TraceEnvPeersDemote AsyncDemotion PeerAddr
| TraceEnvEstablishConn PeerAddr
| TraceEnvActivatePeer PeerAddr
| TraceEnvDeactivatePeer PeerAddr
| TraceEnvCloseConn PeerAddr
| TraceEnvRootsResult [PeerAddr]
| TraceEnvGossipRequest PeerAddr (Maybe ([PeerAddr], GossipTime))
| TraceEnvGossipResult PeerAddr [PeerAddr]
| TraceEnvPeersStatus (Map PeerAddr PeerStatus)
deriving Show
mockPeerSelectionActions :: forall m.
(MonadAsync m, MonadTimer m, Fail.MonadFail m,
MonadThrow (STM m), MonadTraceSTM m)
=> Tracer m TraceMockEnv
-> GovernorMockEnvironment
-> PeerSelectionPolicy PeerAddr m
-> m (PeerSelectionActions PeerAddr (PeerConn m) m)
mockPeerSelectionActions tracer
env@GovernorMockEnvironment {
peerGraph,
localRootPeers,
publicRootPeers,
targets
}
policy = do
scripts <- Map.fromList <$>
sequence
[ (\a b -> (addr, (a, b)))
<$> initScript' gossipScript
<*> initScript' connectionScript
| let PeerGraph adjacency = peerGraph
, (addr, _, GovernorScripts {
gossipScript,
connectionScript
}) <- adjacency
]
targetsVar <- playTimedScript (contramap TraceEnvSetTargets tracer) targets
peerConns <- atomically $ do
v <- newTVar Map.empty
traceTVar proxy
v (\_ a -> TraceDynamic . TraceEnvPeersStatus
<$> snapshotPeersStatus proxy a)
return v
traceWith tracer (TraceEnvAddPeers peerGraph)
return $ mockPeerSelectionActions'
tracer env policy
scripts targetsVar peerConns
where
proxy :: Proxy m
proxy = Proxy
data TransitionError
= ActivationError
| DeactivationError
deriving (Show, Typeable)
instance Exception TransitionError where
mockPeerSelectionActions' :: forall m.
(MonadAsync m, MonadSTM m, MonadTimer m, Fail.MonadFail m,
MonadThrow (STM m))
=> Tracer m TraceMockEnv
-> GovernorMockEnvironment
-> PeerSelectionPolicy PeerAddr m
-> Map PeerAddr (TVar m GossipScript, TVar m ConnectionScript)
-> TVar m PeerSelectionTargets
-> TVar m (Map PeerAddr (TVar m PeerStatus))
-> PeerSelectionActions PeerAddr (PeerConn m) m
mockPeerSelectionActions' tracer
GovernorMockEnvironment {
localRootPeers,
publicRootPeers
}
PeerSelectionPolicy {
policyGossipRetryTime
}
scripts
targetsVar
connsVar =
PeerSelectionActions {
readLocalRootPeers = return (LocalRootPeers.toGroups localRootPeers),
requestPublicRootPeers,
readPeerSelectionTargets = readTVar targetsVar,
requestPeerGossip,
peerStateActions = PeerStateActions {
establishPeerConnection,
monitorPeerConnection,
activatePeerConnection,
deactivatePeerConnection,
closePeerConnection
}
}
where
requestPublicRootPeers _n = do
traceWith tracer TraceEnvRequestPublicRootPeers
let ttl :: Num n => n
ttl = 60
_ <- async $ do
threadDelay ttl
traceWith tracer TraceEnvPublicRootTTL
traceWith tracer (TraceEnvRootsResult (Set.toList publicRootPeers))
return (publicRootPeers, ttl)
requestPeerGossip addr = do
let Just (gossipScript, _) = Map.lookup addr scripts
mgossip <- stepScript gossipScript
traceWith tracer (TraceEnvGossipRequest addr mgossip)
_ <- async $ do
threadDelay policyGossipRetryTime
traceWith tracer (TraceEnvGossipTTL addr)
case mgossip of
Nothing -> do
threadDelay 1
traceWith tracer (TraceEnvGossipResult addr [])
fail "no peers"
Just (peeraddrs, time) -> do
threadDelay (interpretGossipTime time)
traceWith tracer (TraceEnvGossipResult addr peeraddrs)
return peeraddrs
establishPeerConnection :: PeerAddr -> m (PeerConn m)
establishPeerConnection peeraddr = do
traceWith tracer (TraceEnvEstablishConn peeraddr)
threadDelay 1
conn@(PeerConn _ v) <- atomically $ do
conn <- newTVar PeerWarm
conns <- readTVar connsVar
let !conns' = Map.insert peeraddr conn conns
writeTVar connsVar conns'
return (PeerConn peeraddr conn)
let Just (_, connectScript) = Map.lookup peeraddr scripts
_ <- async $
+ the script returns ' Noop '
let loop = do
(demotion, delay) <- stepScript connectScript
let interpretScriptDelay NoDelay = 1
interpretScriptDelay ShortDelay = 60
interpretScriptDelay LongDelay = 600
done <-
case demotion of
Noop -> return True
ToWarm -> do
threadDelay (interpretScriptDelay delay)
atomically $ do
s <- readTVar v
case s of
PeerHot -> writeTVar v PeerWarm
>> return False
PeerWarm -> return False
PeerCold -> return True
ToCold -> do
threadDelay (interpretScriptDelay delay)
atomically $ do
s <- readTVar v
case s of
PeerCold -> return True
_ -> writeTVar v PeerCold
>> return True
traceWith tracer (TraceEnvPeersDemote demotion peeraddr)
if done
then return ()
else loop
in loop
return conn
activatePeerConnection :: PeerConn m -> m ()
activatePeerConnection (PeerConn peeraddr conn) = do
traceWith tracer (TraceEnvActivatePeer peeraddr)
threadDelay 1
atomically $ do
status <- readTVar conn
case status of
PeerHot -> error "activatePeerConnection of hot peer"
PeerWarm -> writeTVar conn PeerHot
' invariantPeerSelectionState ' .
PeerCold -> throwIO ActivationError
deactivatePeerConnection :: PeerConn m -> m ()
deactivatePeerConnection (PeerConn peeraddr conn) = do
traceWith tracer (TraceEnvDeactivatePeer peeraddr)
atomically $ do
status <- readTVar conn
case status of
PeerHot -> writeTVar conn PeerWarm
PeerWarm -> return ()
PeerCold -> throwIO DeactivationError
closePeerConnection :: PeerConn m -> m ()
closePeerConnection (PeerConn peeraddr conn) = do
traceWith tracer (TraceEnvCloseConn peeraddr)
atomically $ do
status <- readTVar conn
case status of
PeerHot -> writeTVar conn PeerCold
PeerWarm -> writeTVar conn PeerCold
PeerCold -> return ()
conns <- readTVar connsVar
let !conns' = Map.delete peeraddr conns
writeTVar connsVar conns'
monitorPeerConnection :: PeerConn m -> STM m (PeerStatus, Maybe ReconnectDelay)
monitorPeerConnection (PeerConn _peeraddr conn) = (,) <$> readTVar conn
<*> pure Nothing
snapshotPeersStatus :: MonadInspectSTM m
=> proxy m
-> Map PeerAddr (TVar m PeerStatus)
-> InspectMonad m (Map PeerAddr PeerStatus)
snapshotPeersStatus p conns = traverse (inspectTVar p) conns
mockPeerSelectionPolicy :: MonadSTM m
=> GovernorMockEnvironment
-> m (PeerSelectionPolicy PeerAddr m)
mockPeerSelectionPolicy GovernorMockEnvironment {
pickKnownPeersForGossip,
pickColdPeersToPromote,
pickWarmPeersToPromote,
pickHotPeersToDemote,
pickWarmPeersToDemote,
pickColdPeersToForget
} = do
pickKnownPeersForGossipVar <- initScript' pickKnownPeersForGossip
pickColdPeersToPromoteVar <- initScript' pickColdPeersToPromote
pickWarmPeersToPromoteVar <- initScript' pickWarmPeersToPromote
pickHotPeersToDemoteVar <- initScript' pickHotPeersToDemote
pickWarmPeersToDemoteVar <- initScript' pickWarmPeersToDemote
pickColdPeersToForgetVar <- initScript' pickColdPeersToForget
return PeerSelectionPolicy {
policyPickKnownPeersForGossip = \_ _ _ -> interpretPickScript pickKnownPeersForGossipVar,
policyPickColdPeersToPromote = \_ _ _ -> interpretPickScript pickColdPeersToPromoteVar,
policyPickWarmPeersToPromote = \_ _ _ -> interpretPickScript pickWarmPeersToPromoteVar,
policyPickHotPeersToDemote = \_ _ _ -> interpretPickScript pickHotPeersToDemoteVar,
policyPickWarmPeersToDemote = \_ _ _ -> interpretPickScript pickWarmPeersToDemoteVar,
policyPickColdPeersToForget = \_ _ _ -> interpretPickScript pickColdPeersToForgetVar,
policyMaxInProgressGossipReqs = 2,
}
data TestTraceEvent = GovernorDebug (DebugPeerSelection PeerAddr)
| GovernorEvent (TracePeerSelection PeerAddr)
| GovernorCounters PeerSelectionCounters
| MockEnvEvent TraceMockEnv
deriving Show
tracerTracePeerSelection :: Tracer (IOSim s) (TracePeerSelection PeerAddr)
tracerTracePeerSelection = contramap GovernorEvent tracerTestTraceEvent
tracerDebugPeerSelection :: Tracer (IOSim s) (DebugPeerSelection PeerAddr)
tracerDebugPeerSelection = contramap (GovernorDebug . voidDebugPeerSelection)
tracerTestTraceEvent
where
voidDebugPeerSelection :: DebugPeerSelection peeraddr -> DebugPeerSelection peeraddr
voidDebugPeerSelection (TraceGovernorState btime wtime state) =
TraceGovernorState btime wtime (const () <$> state)
tracerTracePeerSelectionCounters :: Tracer (IOSim s) PeerSelectionCounters
tracerTracePeerSelectionCounters = contramap GovernorCounters tracerTestTraceEvent
tracerMockEnv :: Tracer (IOSim s) TraceMockEnv
tracerMockEnv = contramap MockEnvEvent tracerTestTraceEvent
tracerTestTraceEvent :: Tracer (IOSim s) TestTraceEvent
tracerTestTraceEvent = dynamicTracer <> Tracer (say . show)
dynamicTracer :: Typeable a => Tracer (IOSim s) a
dynamicTracer = Tracer traceM
selectPeerSelectionTraceEvents :: SimTrace a -> [(Time, TestTraceEvent)]
selectPeerSelectionTraceEvents = go
where
go (SimTrace t _ _ (EventLog e) trace)
| Just x <- fromDynamic e = (t,x) : go trace
go (SimPORTrace t _ _ _ (EventLog e) trace)
| Just x <- fromDynamic e = (t,x) : go trace
go (SimTrace _ _ _ _ trace) = go trace
go (SimPORTrace _ _ _ _ _ trace) = go trace
go (TraceRacesFound _ trace) = go trace
go (TraceMainException _ e _) = throw e
go (TraceMainReturn _ _ _) = []
go TraceLoop = error "Step time limit exceeded"
selectPeerSelectionTraceEventsUntil :: Time -> SimTrace a -> [(Time, TestTraceEvent)]
selectPeerSelectionTraceEventsUntil tmax = go
where
go (SimTrace t _ _ _ _)
| t > tmax = []
go (SimTrace t _ _ (EventLog e) trace)
| Just x <- fromDynamic e = (t,x) : go trace
go (SimPORTrace t _ _ _ _ _)
| t > tmax = []
go (SimPORTrace t _ _ _ (EventLog e) trace)
| Just x <- fromDynamic e = (t,x) : go trace
go (SimTrace _ _ _ _ trace) = go trace
go (SimPORTrace _ _ _ _ _ trace) = go trace
go (TraceRacesFound _ trace) = go trace
go (TraceMainException _ e _) = throw e
go (TraceMainReturn _ _ _) = []
go TraceLoop = error "Step time limit exceeded"
selectGovernorEvents :: [(Time, TestTraceEvent)]
-> [(Time, TracePeerSelection PeerAddr)]
selectGovernorEvents trace = [ (t, e) | (t, GovernorEvent e) <- trace ]
QuickCheck instances
instance Arbitrary GovernorMockEnvironment where
arbitrary = do
peerGraph <- arbitrary
let peersSet = allPeers peerGraph
(localRootPeers,
publicRootPeers) <- arbitraryRootPeers peersSet
targets <- arbitrary
let arbitrarySubsetOfPeers = arbitrarySubset peersSet
pickKnownPeersForGossip <- arbitraryPickScript arbitrarySubsetOfPeers
pickColdPeersToPromote <- arbitraryPickScript arbitrarySubsetOfPeers
pickWarmPeersToPromote <- arbitraryPickScript arbitrarySubsetOfPeers
pickHotPeersToDemote <- arbitraryPickScript arbitrarySubsetOfPeers
pickWarmPeersToDemote <- arbitraryPickScript arbitrarySubsetOfPeers
pickColdPeersToForget <- arbitraryPickScript arbitrarySubsetOfPeers
return GovernorMockEnvironment{..}
where
arbitraryRootPeers :: Set PeerAddr
-> Gen (LocalRootPeers PeerAddr, Set PeerAddr)
arbitraryRootPeers peers | Set.null peers =
return (LocalRootPeers.empty, Set.empty)
arbitraryRootPeers peers = do
sz <- getSize
let minroots
| sz >= 10 = 1
| otherwise = 0
maxroots = ceiling
. sqrt
. (fromIntegral :: Int -> Double)
. length
$ peers
numroots <- choose (minroots, maxroots)
ixs <- vectorOf numroots (getNonNegative <$> arbitrary)
let pick n = Set.elemAt i peers where i = n `mod` Set.size peers
rootPeers = nub (map pick ixs)
local <- vectorOf (length rootPeers) (choose (0, 10 :: Int))
let localRootsSet = Set.fromList [ x | (x, v) <- zip rootPeers local
, v <= 5 ]
publicRootsSet = Set.fromList [ x | (x, v) <- zip rootPeers local
, v >= 5 ]
localRoots <- arbitraryLocalRootPeers localRootsSet
return (localRoots, publicRootsSet)
shrink env@GovernorMockEnvironment {
peerGraph,
localRootPeers,
publicRootPeers,
targets,
pickKnownPeersForGossip,
pickColdPeersToPromote,
pickWarmPeersToPromote,
pickHotPeersToDemote,
pickWarmPeersToDemote,
pickColdPeersToForget
} =
[ env {
peerGraph = peerGraph',
localRootPeers = LocalRootPeers.restrictKeys localRootPeers nodes',
publicRootPeers = publicRootPeers `Set.intersection` nodes'
}
| peerGraph' <- shrink peerGraph
, let nodes' = allPeers peerGraph' ]
++ [ GovernorMockEnvironment {
peerGraph,
localRootPeers = localRootPeers',
publicRootPeers = publicRootPeers',
targets = targets',
pickKnownPeersForGossip = pickKnownPeersForGossip',
pickColdPeersToPromote = pickColdPeersToPromote',
pickWarmPeersToPromote = pickWarmPeersToPromote',
pickHotPeersToDemote = pickHotPeersToDemote',
pickWarmPeersToDemote = pickWarmPeersToDemote',
pickColdPeersToForget = pickColdPeersToForget'
}
| (localRootPeers', publicRootPeers', targets',
pickKnownPeersForGossip',
pickColdPeersToPromote',
pickWarmPeersToPromote',
pickHotPeersToDemote',
pickWarmPeersToDemote',
pickColdPeersToForget')
<- shrink (localRootPeers, publicRootPeers, targets,
pickKnownPeersForGossip,
pickColdPeersToPromote,
pickWarmPeersToPromote,
pickHotPeersToDemote,
pickWarmPeersToDemote,
pickColdPeersToForget)
]
prop_arbitrary_GovernorMockEnvironment :: GovernorMockEnvironment -> Property
prop_arbitrary_GovernorMockEnvironment env =
tabulate "num root peers" [show (LocalRootPeers.size (localRootPeers env)
+ Set.size (publicRootPeers env))] $
tabulate "num local root peers" [show (LocalRootPeers.size (localRootPeers env))] $
tabulate "num public root peers" [show (Set.size (publicRootPeers env))] $
tabulate "empty root peers" [show $ not emptyGraph && emptyRootPeers] $
tabulate "overlapping local/public roots" [show overlappingRootPeers] $
validGovernorMockEnvironment env
where
emptyGraph = null g where PeerGraph g = peerGraph env
emptyRootPeers = LocalRootPeers.null (localRootPeers env)
&& Set.null (publicRootPeers env)
overlappingRootPeers =
not $ Set.null $
Set.intersection
(LocalRootPeers.keysSet (localRootPeers env))
(publicRootPeers env)
prop_shrink_GovernorMockEnvironment :: Fixed GovernorMockEnvironment -> Property
prop_shrink_GovernorMockEnvironment x =
prop_shrink_valid validGovernorMockEnvironment x
.&&. prop_shrink_nonequal x
prop_shrinkCarefully_GovernorMockEnvironment ::
ShrinkCarefully GovernorMockEnvironment -> Property
prop_shrinkCarefully_GovernorMockEnvironment = prop_shrinkCarefully
|
aa51c81079ffc166a0fb3a04fe16e21063375706011490074caca49363c854e0 | xvw/preface | fun.ml | type ('a, 'b) t = 'a -> 'b
include Preface_core.Fun
module Profunctor = Preface_make.Profunctor.Via_dimap (struct
type nonrec ('a, 'b) t = ('a, 'b) t
let dimap x y z = y % z % x
end)
module Strong =
Preface_make.Strong.Over_profunctor_via_fst
(Profunctor)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let fst x (y, z) = (x y, z)
end)
module Choice =
Preface_make.Choice.Over_profunctor_via_left
(Profunctor)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let left f = function
| Either.Left x -> Either.Left (f x)
| Either.Right x -> Either.Right x
;;
end)
module Closed =
Preface_make.Closed.Over_profunctor_via_closed
(Profunctor)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let closed = Preface_core.Fun.compose_right_to_left
end)
module Semigroupoid = Preface_make.Semigroupoid.Via_compose (struct
type nonrec ('a, 'b) t = ('a, 'b) t
let compose = compose_right_to_left
end)
module Category =
Preface_make.Category.Over_semigroupoid
(Semigroupoid)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let id x = x
end)
module Arrow = Preface_make.Arrow.From_strong_and_category (Strong) (Category)
module Arrow_choice =
Preface_make.Arrow_choice.Over_arrow_with_choose
(Arrow)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let case f g = Stdlib.Either.fold ~left:f ~right:g
let choose f g = case (Stdlib.Either.left % f) (Stdlib.Either.right % g)
end)
module Arrow_apply =
Preface_make.Arrow_apply.Over_arrow
(Arrow)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let apply (f, x) = f x
end)
| null | https://raw.githubusercontent.com/xvw/preface/ece0829c9ede3e5a4975df3864edb657b96fe82c/lib/preface_stdlib/fun.ml | ocaml | type ('a, 'b) t = 'a -> 'b
include Preface_core.Fun
module Profunctor = Preface_make.Profunctor.Via_dimap (struct
type nonrec ('a, 'b) t = ('a, 'b) t
let dimap x y z = y % z % x
end)
module Strong =
Preface_make.Strong.Over_profunctor_via_fst
(Profunctor)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let fst x (y, z) = (x y, z)
end)
module Choice =
Preface_make.Choice.Over_profunctor_via_left
(Profunctor)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let left f = function
| Either.Left x -> Either.Left (f x)
| Either.Right x -> Either.Right x
;;
end)
module Closed =
Preface_make.Closed.Over_profunctor_via_closed
(Profunctor)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let closed = Preface_core.Fun.compose_right_to_left
end)
module Semigroupoid = Preface_make.Semigroupoid.Via_compose (struct
type nonrec ('a, 'b) t = ('a, 'b) t
let compose = compose_right_to_left
end)
module Category =
Preface_make.Category.Over_semigroupoid
(Semigroupoid)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let id x = x
end)
module Arrow = Preface_make.Arrow.From_strong_and_category (Strong) (Category)
module Arrow_choice =
Preface_make.Arrow_choice.Over_arrow_with_choose
(Arrow)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let case f g = Stdlib.Either.fold ~left:f ~right:g
let choose f g = case (Stdlib.Either.left % f) (Stdlib.Either.right % g)
end)
module Arrow_apply =
Preface_make.Arrow_apply.Over_arrow
(Arrow)
(struct
type nonrec ('a, 'b) t = ('a, 'b) t
let apply (f, x) = f x
end)
| |
cde03d1243c14b2837cb3942e0b68a9582ce185773fcd70cebb3b250e4cfbde3 | serokell/fift-asm-dsl | FiftAsm.hs | SPDX - FileCopyrightText : 2019 >
--
SPDX - License - Identifier : MPL-2.0
# OPTIONS_GHC -F -pgmF autoexporter #
# OPTIONS_GHC -Wno - dodgy - exports #
{-# OPTIONS_GHC -Wno-unused-imports #-} | null | https://raw.githubusercontent.com/serokell/fift-asm-dsl/d710e076d4212aa2cce4aa8bb15277d3b09c48af/src/FiftAsm.hs | haskell |
# OPTIONS_GHC -Wno-unused-imports # | SPDX - FileCopyrightText : 2019 >
SPDX - License - Identifier : MPL-2.0
# OPTIONS_GHC -F -pgmF autoexporter #
# OPTIONS_GHC -Wno - dodgy - exports # |
3d0dcfdb0e2a985a01622857774e8d64558ba47c233de36a465c9aa656a20b56 | vicampo/riposte | cart.rkt | #lang racket/base
(require racket/match
web-server/http/request-structs
web-server/dispatch
json
(file "common.rkt"))
(define (view-cart req cart-id)
(response/jsexpr (hasheq 'grand_total 34.58)
#:code 200))
(define (add-item-to-cart req cart-id)
(match (request-post-data/raw req)
[#f
(response/empty #:code 400)]
[(? bytes? bs)
(with-handlers ([exn:fail? (lambda (err)
(response/empty #:code 400))])
(match (bytes->jsexpr bs)
[(hash-table ('product_id (? exact-positive-integer? product-id))
('campaign_id (? exact-positive-integer? campaign-id))
('qty (? exact-positive-integer? qty)))
(define item (hasheq 'product_id product-id
'campaign_id campaign-id
'qty qty
'cart_item_id 42))
(define price (* qty (+ product-id campaign-id (random 1 25))))
(response/jsexpr (hasheq 'grand_total price
'items (list item))
#:code 200)]
[else
(response/empty #:code 400)]))]))
(define-values (start url-generator)
(dispatch-rules
[("cart" (string-arg)) #:method "get" view-cart]
[("cart" (string-arg) "items") #:method "post" add-item-to-cart]))
(module+ main
(run start))
| null | https://raw.githubusercontent.com/vicampo/riposte/0a71e54539cb40b574f84674769792444691a8cf/examples/cart.rkt | racket | #lang racket/base
(require racket/match
web-server/http/request-structs
web-server/dispatch
json
(file "common.rkt"))
(define (view-cart req cart-id)
(response/jsexpr (hasheq 'grand_total 34.58)
#:code 200))
(define (add-item-to-cart req cart-id)
(match (request-post-data/raw req)
[#f
(response/empty #:code 400)]
[(? bytes? bs)
(with-handlers ([exn:fail? (lambda (err)
(response/empty #:code 400))])
(match (bytes->jsexpr bs)
[(hash-table ('product_id (? exact-positive-integer? product-id))
('campaign_id (? exact-positive-integer? campaign-id))
('qty (? exact-positive-integer? qty)))
(define item (hasheq 'product_id product-id
'campaign_id campaign-id
'qty qty
'cart_item_id 42))
(define price (* qty (+ product-id campaign-id (random 1 25))))
(response/jsexpr (hasheq 'grand_total price
'items (list item))
#:code 200)]
[else
(response/empty #:code 400)]))]))
(define-values (start url-generator)
(dispatch-rules
[("cart" (string-arg)) #:method "get" view-cart]
[("cart" (string-arg) "items") #:method "post" add-item-to-cart]))
(module+ main
(run start))
| |
3a8034204da8375f11d4eaeb023f2ddbf60fa758ab71f6902a91f4420284313c | Clojure2D/clojure2d-examples | interval.clj | (ns rt4.in-one-weekend.ch08b.interval
(:refer-clojure :exclude [empty])
(:require [fastmath.core :as m]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defprotocol IntervalProto
(contains [interval x]) ;; a <= x <= b
;; introduced due to the bug in the book (in the time of writing this code), a < x <= b
(contains- [interval x])
(clamp [interval x]))
(defrecord Interval [^double mn ^double mx]
IntervalProto
(contains [_ x] (m/between? mn mx ^double x))
(contains- [_ x] (m/between-? mn mx ^double x))
(clamp [_ x] (m/constrain ^double x mn mx)))
(defn interval
([] (->Interval ##Inf ##-Inf))
([m] (map->Interval m))
([^double mn ^double mx] (->Interval mn mx)))
(def empty (interval))
(def universe (interval ##-Inf ##Inf))
| null | https://raw.githubusercontent.com/Clojure2D/clojure2d-examples/ead92d6f17744b91070e6308157364ad4eab8a1b/src/rt4/in_one_weekend/ch08b/interval.clj | clojure | a <= x <= b
introduced due to the bug in the book (in the time of writing this code), a < x <= b | (ns rt4.in-one-weekend.ch08b.interval
(:refer-clojure :exclude [empty])
(:require [fastmath.core :as m]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defprotocol IntervalProto
(contains- [interval x])
(clamp [interval x]))
(defrecord Interval [^double mn ^double mx]
IntervalProto
(contains [_ x] (m/between? mn mx ^double x))
(contains- [_ x] (m/between-? mn mx ^double x))
(clamp [_ x] (m/constrain ^double x mn mx)))
(defn interval
([] (->Interval ##Inf ##-Inf))
([m] (map->Interval m))
([^double mn ^double mx] (->Interval mn mx)))
(def empty (interval))
(def universe (interval ##-Inf ##Inf))
|
f4caba84bdae5ad55a5b75d70f03ec4edaad9089b5d218e3d714d2fd0f4a8c44 | jacekschae/learn-reitit-course-files | handlers.clj | (ns cheffy.recipe.handlers
(:require [cheffy.recipe.db :as recipe-db]
[ring.util.response :as rr]
[cheffy.responses :as responses])
(:import (java.util UUID)))
(defn list-all-recipes
[db]
(fn [request]
(let [uid "auth0|5ef440986e8fbb001355fd9c"
recipes (recipe-db/find-all-recipes db uid)]
(rr/response recipes))))
(defn create-recipe!
[db]
(fn [request]
(let [recipe-id (str (UUID/randomUUID))
uid "auth0|5ef440986e8fbb001355fd9c"
recipe (-> request :parameters :body)]
(recipe-db/insert-recipe! db (assoc recipe :recipe-id recipe-id :uid uid))
(rr/created (str responses/base-url "/recipes/" recipe-id) {:recipe-id recipe-id}))))
(defn retrieve-recipe
[db]
(fn [request]
(let [recipe-id "a3dde84c-4a33-45aa-b0f3-4bf9ac997680"
recipe (recipe-db/find-recipe-by-id db recipe-id)]
(if recipe
(rr/response recipe)
(rr/not-found {:type "recipe-not-found"
:message "Recipe not found"
:data (str "recipe-id " recipe-id)})))))
(defn update-recipe!
[db]
(fn [request]
(let [recipe-id "a3dde84c-4a33-45aa-b0f3-4bf9ac997680"
recipe (-> request :parameters :body)
update-successful? (recipe-db/update-recipe! db (assoc recipe :recipe-id recipe-id))]
(if update-successful?
(rr/status 204)
(rr/not-found {:recipe-id recipe-id}))))) | null | https://raw.githubusercontent.com/jacekschae/learn-reitit-course-files/c13a8eb622a371ad719d3d9023f1b4eff9392e4c/increments/25-delete-recipe/src/cheffy/recipe/handlers.clj | clojure | (ns cheffy.recipe.handlers
(:require [cheffy.recipe.db :as recipe-db]
[ring.util.response :as rr]
[cheffy.responses :as responses])
(:import (java.util UUID)))
(defn list-all-recipes
[db]
(fn [request]
(let [uid "auth0|5ef440986e8fbb001355fd9c"
recipes (recipe-db/find-all-recipes db uid)]
(rr/response recipes))))
(defn create-recipe!
[db]
(fn [request]
(let [recipe-id (str (UUID/randomUUID))
uid "auth0|5ef440986e8fbb001355fd9c"
recipe (-> request :parameters :body)]
(recipe-db/insert-recipe! db (assoc recipe :recipe-id recipe-id :uid uid))
(rr/created (str responses/base-url "/recipes/" recipe-id) {:recipe-id recipe-id}))))
(defn retrieve-recipe
[db]
(fn [request]
(let [recipe-id "a3dde84c-4a33-45aa-b0f3-4bf9ac997680"
recipe (recipe-db/find-recipe-by-id db recipe-id)]
(if recipe
(rr/response recipe)
(rr/not-found {:type "recipe-not-found"
:message "Recipe not found"
:data (str "recipe-id " recipe-id)})))))
(defn update-recipe!
[db]
(fn [request]
(let [recipe-id "a3dde84c-4a33-45aa-b0f3-4bf9ac997680"
recipe (-> request :parameters :body)
update-successful? (recipe-db/update-recipe! db (assoc recipe :recipe-id recipe-id))]
(if update-successful?
(rr/status 204)
(rr/not-found {:recipe-id recipe-id}))))) | |
eb84aa791add6cef2a6c1ea8406917cd7a24d1bb53f0eb7f9c9b312006b19d77 | AppsFlyer/pronto | utils.clj | (ns pronto.utils
(:require [clojure.string :as s]
[pronto.protos :refer [global-ns]])
(:import
[pronto ProtoMap ProtoMapper]
[com.google.protobuf
Descriptors$FieldDescriptor
Descriptors$GenericDescriptor
Descriptors$FieldDescriptor$Type
GeneratedMessageV3]))
(defn javaify [s] (s/replace s "-" "_"))
(defn normalize-path [s]
(-> s
(s/replace "." "_")
(s/replace "$" "__")))
(defn sanitized-class-name [^Class clazz]
(normalize-path (.getName clazz)))
(defn class->map-class-name [^Class clazz]
(symbol (str (sanitized-class-name clazz) "ProtoMap")))
(defn class->abstract-map-class-name [^Class clazz]
(symbol (str (sanitized-class-name clazz) "AbstractMap")))
(defn class->abstract-persistent-map-class-name [^Class clazz]
(symbol (str (sanitized-class-name clazz) "AbstractPersistentMap")))
(defn class->transient-class-name [^Class clazz]
(symbol (str 'transient_ (sanitized-class-name clazz))))
(defn ->kebab-case
"Converts `s`, assumed to be in snake_case, to kebab-case"
[^String s]
(when s
(s/lower-case (.replace s \_ \-))))
(defn with-type-hint [sym ^Class clazz]
(with-meta sym {:tag (symbol (.getName clazz))}))
(defn ctor-name [prefix ^Class clazz]
(symbol (str prefix '-> (class->map-class-name clazz))))
(defn ->camel-case
"Implements protobuf's camel case conversion for Java. See: #L157"
[^String s]
(when-let [length (some-> s .length)]
(loop [i 0
^StringBuilder sb (StringBuilder.)
cap-next-letter? true]
(if (= i length)
(.toString sb)
(let [x (.charAt s i)]
(cond
(Character/isLowerCase x)
(recur (inc i)
(.append sb (if cap-next-letter? (Character/toUpperCase x) x))
false)
(Character/isUpperCase x)
(recur (inc i) (.append sb x) false)
(Character/isDigit x)
(recur (inc i) (.append sb x) true)
:else
(recur (inc i) sb true)))))))
(defn field->camel-case [^Descriptors$GenericDescriptor field]
(->camel-case (.getName field)))
(defn field->kebab-case [^Descriptors$GenericDescriptor field]
(->kebab-case (.getName field)))
(defn message? [^Descriptors$FieldDescriptor fd]
(= (.getType fd)
Descriptors$FieldDescriptor$Type/MESSAGE))
(defn struct? [^Descriptors$FieldDescriptor fd]
(and (message? fd)
(not (.isMapField fd))
(not (.isRepeated fd))))
(defn enum? [^Descriptors$FieldDescriptor fd]
(= (.getType fd)
Descriptors$FieldDescriptor$Type/ENUM))
(defn static-call [^Class class method-name]
(symbol (str (.getName class) "/" method-name)))
(defn type-error-info [clazz field-name expected-type value]
{:class clazz
:field field-name
:expected-type expected-type
:value value})
(defn make-type-error
([clazz field-name expected-type value]
(make-type-error clazz field-name expected-type value nil))
([clazz field-name expected-type value cause]
;; return as code so this frame isn't included in the stack trace
`(ex-info "Invalid type" ~(type-error-info clazz field-name expected-type value) ~cause)))
(defmacro with-ns [new-ns & body]
(let [orig-ns *ns*
orig-ns-name (ns-name orig-ns)
ns-name-sym (symbol new-ns)
existing-classes (set (when-let [n (find-ns ns-name-sym)]
(vals (ns-imports n))))]
(if (or (nil? new-ns)
(= new-ns (str *ns*)))
body
`(do
(create-ns (quote ~ns-name-sym))
(in-ns (quote ~ns-name-sym))
~@(for [[_ ^Class clazz]
(ns-imports orig-ns)
:let [class-name (.getName clazz)]
:when (not (get existing-classes clazz))
No point to import POJO classes , and this can also
lead to conflicts if 2 namespaces import 2 classes
;; with the same name but different packages.
:when (not= (.getSuperclass clazz) GeneratedMessageV3)
;; don't import generated classes created by the lib, as this might
;; lead to collision between different mappers when importing
;; these classes into the global ns
:when (not (s/starts-with? class-name (javaify global-ns)))]
`(import ~(symbol (.getName clazz))))
;; clojure.core is not auto-loaded so load it explicitly
;; in order for any of its vars to be resolvable
(use '[clojure.core])
~@body
#_(finally)
(in-ns (quote ~(symbol orig-ns-name)))))))
(defn- split' [f coll]
(loop [[x & xs :as c] coll
res []]
(if-not x
res
(if (f x)
(recur
xs
(conj res x))
(let [[a b] (split-with (complement f) c)]
(recur
b
(conj res a)))))))
(def leaf-val :val)
(defn leaf [x] (with-meta {:val x} {::leaf? true}))
(def leaf? (comp boolean ::leaf? meta))
(defn kv-forest [kvs]
(loop [[kv-partition & ps] (partition-by ffirst kvs)
res []]
(if-not kv-partition
res
(let [leader-key (first (ffirst kv-partition))
follower-kvs (->> kv-partition
(map
(fn [[ks v]]
(let [rks (rest ks)]
(if (seq rks)
(vector rks v)
(leaf v)))))
(split' leaf?))]
(recur
ps
(conj
res
[leader-key
(mapcat
(fn [g]
(if (leaf? g)
[g]
(kv-forest g)))
follower-kvs)]))))))
(defn- flatten-forest* [forest]
(if-not (seq forest)
[]
(for [[k tree] forest
v tree]
(if (leaf? v)
[[k] (leaf-val v)]
(mapcat
(fn [[k' v']]
[(cons k k') v'])
(flatten-forest* [v]))))))
(defn flatten-forest [forest]
(partition 2 (apply concat (flatten-forest* forest))))
(defn safe-resolve [x]
(try
(resolve x)
(catch Exception _)))
(defn proto-map? [m]
(instance? ProtoMap m))
(defn proto-map->proto
"Returns the protobuf instance associated with the proto-map"
[^ProtoMap m]
(.pmap_getProto m))
(defn mapper? [m]
(instance? ProtoMapper m))
| null | https://raw.githubusercontent.com/AppsFlyer/pronto/b7bbbd0a85194073aefd0cf1e8c3189eeae07e01/src/clj/pronto/utils.clj | clojure | return as code so this frame isn't included in the stack trace
with the same name but different packages.
don't import generated classes created by the lib, as this might
lead to collision between different mappers when importing
these classes into the global ns
clojure.core is not auto-loaded so load it explicitly
in order for any of its vars to be resolvable | (ns pronto.utils
(:require [clojure.string :as s]
[pronto.protos :refer [global-ns]])
(:import
[pronto ProtoMap ProtoMapper]
[com.google.protobuf
Descriptors$FieldDescriptor
Descriptors$GenericDescriptor
Descriptors$FieldDescriptor$Type
GeneratedMessageV3]))
(defn javaify [s] (s/replace s "-" "_"))
(defn normalize-path [s]
(-> s
(s/replace "." "_")
(s/replace "$" "__")))
(defn sanitized-class-name [^Class clazz]
(normalize-path (.getName clazz)))
(defn class->map-class-name [^Class clazz]
(symbol (str (sanitized-class-name clazz) "ProtoMap")))
(defn class->abstract-map-class-name [^Class clazz]
(symbol (str (sanitized-class-name clazz) "AbstractMap")))
(defn class->abstract-persistent-map-class-name [^Class clazz]
(symbol (str (sanitized-class-name clazz) "AbstractPersistentMap")))
(defn class->transient-class-name [^Class clazz]
(symbol (str 'transient_ (sanitized-class-name clazz))))
(defn ->kebab-case
"Converts `s`, assumed to be in snake_case, to kebab-case"
[^String s]
(when s
(s/lower-case (.replace s \_ \-))))
(defn with-type-hint [sym ^Class clazz]
(with-meta sym {:tag (symbol (.getName clazz))}))
(defn ctor-name [prefix ^Class clazz]
(symbol (str prefix '-> (class->map-class-name clazz))))
(defn ->camel-case
"Implements protobuf's camel case conversion for Java. See: #L157"
[^String s]
(when-let [length (some-> s .length)]
(loop [i 0
^StringBuilder sb (StringBuilder.)
cap-next-letter? true]
(if (= i length)
(.toString sb)
(let [x (.charAt s i)]
(cond
(Character/isLowerCase x)
(recur (inc i)
(.append sb (if cap-next-letter? (Character/toUpperCase x) x))
false)
(Character/isUpperCase x)
(recur (inc i) (.append sb x) false)
(Character/isDigit x)
(recur (inc i) (.append sb x) true)
:else
(recur (inc i) sb true)))))))
(defn field->camel-case [^Descriptors$GenericDescriptor field]
(->camel-case (.getName field)))
(defn field->kebab-case [^Descriptors$GenericDescriptor field]
(->kebab-case (.getName field)))
(defn message? [^Descriptors$FieldDescriptor fd]
(= (.getType fd)
Descriptors$FieldDescriptor$Type/MESSAGE))
(defn struct? [^Descriptors$FieldDescriptor fd]
(and (message? fd)
(not (.isMapField fd))
(not (.isRepeated fd))))
(defn enum? [^Descriptors$FieldDescriptor fd]
(= (.getType fd)
Descriptors$FieldDescriptor$Type/ENUM))
(defn static-call [^Class class method-name]
(symbol (str (.getName class) "/" method-name)))
(defn type-error-info [clazz field-name expected-type value]
{:class clazz
:field field-name
:expected-type expected-type
:value value})
(defn make-type-error
([clazz field-name expected-type value]
(make-type-error clazz field-name expected-type value nil))
([clazz field-name expected-type value cause]
`(ex-info "Invalid type" ~(type-error-info clazz field-name expected-type value) ~cause)))
(defmacro with-ns [new-ns & body]
(let [orig-ns *ns*
orig-ns-name (ns-name orig-ns)
ns-name-sym (symbol new-ns)
existing-classes (set (when-let [n (find-ns ns-name-sym)]
(vals (ns-imports n))))]
(if (or (nil? new-ns)
(= new-ns (str *ns*)))
body
`(do
(create-ns (quote ~ns-name-sym))
(in-ns (quote ~ns-name-sym))
~@(for [[_ ^Class clazz]
(ns-imports orig-ns)
:let [class-name (.getName clazz)]
:when (not (get existing-classes clazz))
No point to import POJO classes , and this can also
lead to conflicts if 2 namespaces import 2 classes
:when (not= (.getSuperclass clazz) GeneratedMessageV3)
:when (not (s/starts-with? class-name (javaify global-ns)))]
`(import ~(symbol (.getName clazz))))
(use '[clojure.core])
~@body
#_(finally)
(in-ns (quote ~(symbol orig-ns-name)))))))
(defn- split' [f coll]
(loop [[x & xs :as c] coll
res []]
(if-not x
res
(if (f x)
(recur
xs
(conj res x))
(let [[a b] (split-with (complement f) c)]
(recur
b
(conj res a)))))))
(def leaf-val :val)
(defn leaf [x] (with-meta {:val x} {::leaf? true}))
(def leaf? (comp boolean ::leaf? meta))
(defn kv-forest [kvs]
(loop [[kv-partition & ps] (partition-by ffirst kvs)
res []]
(if-not kv-partition
res
(let [leader-key (first (ffirst kv-partition))
follower-kvs (->> kv-partition
(map
(fn [[ks v]]
(let [rks (rest ks)]
(if (seq rks)
(vector rks v)
(leaf v)))))
(split' leaf?))]
(recur
ps
(conj
res
[leader-key
(mapcat
(fn [g]
(if (leaf? g)
[g]
(kv-forest g)))
follower-kvs)]))))))
(defn- flatten-forest* [forest]
(if-not (seq forest)
[]
(for [[k tree] forest
v tree]
(if (leaf? v)
[[k] (leaf-val v)]
(mapcat
(fn [[k' v']]
[(cons k k') v'])
(flatten-forest* [v]))))))
(defn flatten-forest [forest]
(partition 2 (apply concat (flatten-forest* forest))))
(defn safe-resolve [x]
(try
(resolve x)
(catch Exception _)))
(defn proto-map? [m]
(instance? ProtoMap m))
(defn proto-map->proto
"Returns the protobuf instance associated with the proto-map"
[^ProtoMap m]
(.pmap_getProto m))
(defn mapper? [m]
(instance? ProtoMapper m))
|
f25cfb813494921d96dc0d24ebf686c670b334fdcaa07c11eacba6357571bba5 | haskell/ghcide | IdeConfiguration.hs | # LANGUAGE DuplicateRecordFields #
module Development.IDE.Core.IdeConfiguration
( IdeConfiguration(..)
, registerIdeConfiguration
, getIdeConfiguration
, parseConfiguration
, parseWorkspaceFolder
, isWorkspaceFile
, modifyWorkspaceFolders
, modifyClientSettings
, getClientSettings
)
where
import Control.Concurrent.Extra
import Control.Monad
import Data.Hashable (Hashed, hashed, unhashed)
import Data.HashSet (HashSet, singleton)
import Data.Text (Text, isPrefixOf)
import Data.Aeson.Types (Value)
import Development.IDE.Core.Shake
import Development.IDE.Types.Location
import Development.Shake
import Language.Haskell.LSP.Types
import System.FilePath (isRelative)
-- | Lsp client relevant configuration details
data IdeConfiguration = IdeConfiguration
{ workspaceFolders :: HashSet NormalizedUri
, clientSettings :: Hashed (Maybe Value)
}
deriving (Show)
newtype IdeConfigurationVar = IdeConfigurationVar {unIdeConfigurationRef :: Var IdeConfiguration}
instance IsIdeGlobal IdeConfigurationVar
registerIdeConfiguration :: ShakeExtras -> IdeConfiguration -> IO ()
registerIdeConfiguration extras =
addIdeGlobalExtras extras . IdeConfigurationVar <=< newVar
getIdeConfiguration :: Action IdeConfiguration
getIdeConfiguration =
getIdeGlobalAction >>= liftIO . readVar . unIdeConfigurationRef
parseConfiguration :: InitializeParams -> IdeConfiguration
parseConfiguration InitializeParams {..} =
IdeConfiguration {..}
where
workspaceFolders =
foldMap (singleton . toNormalizedUri) _rootUri
<> (foldMap . foldMap)
(singleton . parseWorkspaceFolder)
_workspaceFolders
clientSettings = hashed _initializationOptions
parseWorkspaceFolder :: WorkspaceFolder -> NormalizedUri
parseWorkspaceFolder =
toNormalizedUri . Uri . (_uri :: WorkspaceFolder -> Text)
modifyWorkspaceFolders
:: IdeState -> (HashSet NormalizedUri -> HashSet NormalizedUri) -> IO ()
modifyWorkspaceFolders ide f = modifyIdeConfiguration ide f'
where f' (IdeConfiguration ws initOpts) = IdeConfiguration (f ws) initOpts
modifyClientSettings
:: IdeState -> (Maybe Value -> Maybe Value) -> IO ()
modifyClientSettings ide f = modifyIdeConfiguration ide f'
where f' (IdeConfiguration ws clientSettings) =
IdeConfiguration ws (hashed . f . unhashed $ clientSettings)
modifyIdeConfiguration
:: IdeState -> (IdeConfiguration -> IdeConfiguration) -> IO ()
modifyIdeConfiguration ide f = do
IdeConfigurationVar var <- getIdeGlobalState ide
modifyVar_ var (pure . f)
isWorkspaceFile :: NormalizedFilePath -> Action Bool
isWorkspaceFile file =
if isRelative (fromNormalizedFilePath file)
then return True
else do
IdeConfiguration {..} <- getIdeConfiguration
let toText = getUri . fromNormalizedUri
return $
any
(\root -> toText root `isPrefixOf` toText (filePathToUri' file))
workspaceFolders
getClientSettings :: Action (Maybe Value)
getClientSettings = unhashed . clientSettings <$> getIdeConfiguration | null | https://raw.githubusercontent.com/haskell/ghcide/3ef4ef99c4b9cde867d29180c32586947df64b9e/src/Development/IDE/Core/IdeConfiguration.hs | haskell | | Lsp client relevant configuration details | # LANGUAGE DuplicateRecordFields #
module Development.IDE.Core.IdeConfiguration
( IdeConfiguration(..)
, registerIdeConfiguration
, getIdeConfiguration
, parseConfiguration
, parseWorkspaceFolder
, isWorkspaceFile
, modifyWorkspaceFolders
, modifyClientSettings
, getClientSettings
)
where
import Control.Concurrent.Extra
import Control.Monad
import Data.Hashable (Hashed, hashed, unhashed)
import Data.HashSet (HashSet, singleton)
import Data.Text (Text, isPrefixOf)
import Data.Aeson.Types (Value)
import Development.IDE.Core.Shake
import Development.IDE.Types.Location
import Development.Shake
import Language.Haskell.LSP.Types
import System.FilePath (isRelative)
data IdeConfiguration = IdeConfiguration
{ workspaceFolders :: HashSet NormalizedUri
, clientSettings :: Hashed (Maybe Value)
}
deriving (Show)
newtype IdeConfigurationVar = IdeConfigurationVar {unIdeConfigurationRef :: Var IdeConfiguration}
instance IsIdeGlobal IdeConfigurationVar
registerIdeConfiguration :: ShakeExtras -> IdeConfiguration -> IO ()
registerIdeConfiguration extras =
addIdeGlobalExtras extras . IdeConfigurationVar <=< newVar
getIdeConfiguration :: Action IdeConfiguration
getIdeConfiguration =
getIdeGlobalAction >>= liftIO . readVar . unIdeConfigurationRef
parseConfiguration :: InitializeParams -> IdeConfiguration
parseConfiguration InitializeParams {..} =
IdeConfiguration {..}
where
workspaceFolders =
foldMap (singleton . toNormalizedUri) _rootUri
<> (foldMap . foldMap)
(singleton . parseWorkspaceFolder)
_workspaceFolders
clientSettings = hashed _initializationOptions
parseWorkspaceFolder :: WorkspaceFolder -> NormalizedUri
parseWorkspaceFolder =
toNormalizedUri . Uri . (_uri :: WorkspaceFolder -> Text)
modifyWorkspaceFolders
:: IdeState -> (HashSet NormalizedUri -> HashSet NormalizedUri) -> IO ()
modifyWorkspaceFolders ide f = modifyIdeConfiguration ide f'
where f' (IdeConfiguration ws initOpts) = IdeConfiguration (f ws) initOpts
modifyClientSettings
:: IdeState -> (Maybe Value -> Maybe Value) -> IO ()
modifyClientSettings ide f = modifyIdeConfiguration ide f'
where f' (IdeConfiguration ws clientSettings) =
IdeConfiguration ws (hashed . f . unhashed $ clientSettings)
modifyIdeConfiguration
:: IdeState -> (IdeConfiguration -> IdeConfiguration) -> IO ()
modifyIdeConfiguration ide f = do
IdeConfigurationVar var <- getIdeGlobalState ide
modifyVar_ var (pure . f)
isWorkspaceFile :: NormalizedFilePath -> Action Bool
isWorkspaceFile file =
if isRelative (fromNormalizedFilePath file)
then return True
else do
IdeConfiguration {..} <- getIdeConfiguration
let toText = getUri . fromNormalizedUri
return $
any
(\root -> toText root `isPrefixOf` toText (filePathToUri' file))
workspaceFolders
getClientSettings :: Action (Maybe Value)
getClientSettings = unhashed . clientSettings <$> getIdeConfiguration |
1affc2c876b3b0b0059dd61fa3ea6debce3a26f1392ab7b7267b4dc39c26396a | janestreet/accessor_base | accessor_staged.ml | open! Base
open! Import
let staged = [%accessor Accessor.isomorphism ~get:Staged.stage ~construct:Staged.unstage]
let unstaged = [%accessor Accessor.invert staged]
| null | https://raw.githubusercontent.com/janestreet/accessor_base/8384c29a37e557168ae8a43b2a5a531f0ffc16e4/src/accessor_staged.ml | ocaml | open! Base
open! Import
let staged = [%accessor Accessor.isomorphism ~get:Staged.stage ~construct:Staged.unstage]
let unstaged = [%accessor Accessor.invert staged]
| |
f97c935f7736b38eb1ca89516e8dd98b837d5eb520bbe6df11530ec27fb69a0d | marigold-dev/deku | handlers.ml | open Ocaml_wasm_vm
type params = (string * string list) list
module Operation_payload = struct
include Operation_payload
let enc = Data_encoding.Json.convert Operation_payload.encoding
end
module type HANDLERS = sig
type path
type body
val body_encoding : body Json_encoding.encoding
type response
val response_encoding : response Json_encoding.encoding
val meth : [> `GET | `POST ]
val route : path Routes.route
val handler :
env:Eio.Stdenv.t ->
path:path ->
params:params ->
body:body ->
(response, string) result
end
module type NO_BODY_HANDLERS = sig
type path
type response
val response_encoding : response Data_encoding.t
val meth : [> `GET | `POST ]
val route : path Routes.route
val handler :
env:Eio.Stdenv.t -> path:path -> params:params -> (response, string) result
end
let version p = Routes.(s "api" / s "v1") p
type lang = Jsligo | Cameligo | PascalLigo | Michelson
let lang_enc =
let open Json_encoding in
conv
(fun lang ->
match lang with
| Jsligo -> "jsligo"
| Cameligo -> "mligo"
| PascalLigo -> "ligo"
| Michelson -> "michelson")
(fun str ->
match str with
| "jsligo" -> Jsligo
| "mligo" -> Cameligo
| "ligo" -> PascalLigo
| "michelson" -> Michelson
| _ -> failwith "unknown lang string")
string
let lang_to_string = function
| Jsligo -> "jsligo"
| Cameligo -> "mligo"
| PascalLigo -> "ligo"
| Michelson -> "michelson"
type compilation_target = Michelson_target | Wasm_target
let rec get_compilation_target = function
| [] -> Wasm_target
| ("target", [ "michelson" ]) :: _ -> Michelson_target
| ("target", [ "wasm" ]) :: _ -> Wasm_target
| _ :: tl -> get_compilation_target tl
module Compile_contract : HANDLERS = struct
type path = unit
type body = { source : string; lang : lang; storage : string }
[@@deriving encoding]
let body_encoding = body_enc
type response =
| Michelson_result of { code : string; storage : string }
| Wasm_result of Operation_payload.t
[@@deriving encoding]
let response_encoding = response_enc
let%expect_test "encodings" =
let show_json kind encoding result =
let json = Json_encoding.construct encoding result in
Format.printf "%s:\n%a\n%!" kind Data_encoding.Json.pp json
in
show_json "Body JSON" body_encoding
{
source = "const add = ([x, y] : [int, int]) : int => { x + y }";
lang = Jsligo;
storage = "7";
};
show_json "Michelson Response JSON" response_encoding
(Michelson_result
{ code = "some michelson code"; storage = "some michelson expression" });
let operation =
let open Ocaml_wasm_vm in
let open Deku_tezos in
let open Deku_ledger in
let open Deku_concepts in
let contract_address =
Contract_hash.of_b58 "KT1LiabSxPyVUmVZCqHneCFLJrqQcLHkmX9d"
|> Option.get
in
let ticketer = Ticket_id.Tezos contract_address in
let ticket_id = Ticket_id.make ticketer (Bytes.of_string "hello") in
let address =
Address.of_b58 "tz1UAxwRXXDvpZ5sAanbbP8tjKBoa2dxKUHE" |> Option.get
in
let argument = Value.(Union (Left (Union (Right (Int (Z.of_int 5)))))) in
let operation = Operation.Call { address; argument } in
(* Currently we don't support passing tickets to this endpoint so
tickets will always be an emtpy array, but we'll include them
in this test in case we want to add them in the future *)
Operation_payload.{ operation; tickets = [ (ticket_id, Amount.zero) ] }
in
show_json "WASM Result Response JSON" response_encoding
(Wasm_result operation);
[%expect
{|
Body JSON:
{ "source": "const add = ([x, y] : [int, int]) : int => { x + y }",
"lang": "jsligo", "storage": "7" }
Michelson Response JSON:
{ "code": "some michelson code",
"storage": "some michelson expression" }
WASM Result Response JSON:
{ "operation":
{ "address":
"tz1UAxwRXXDvpZ5sAanbbP8tjKBoa2dxKUHE",
"argument":
[ "Union",
[ "Left",
[ "Union",
[ "Right", [ "Int", "5" ] ] ] ] ] },
"tickets":
[ [ [ "KT1LiabSxPyVUmVZCqHneCFLJrqQcLHkmX9d",
"68656c6c6f" ], "0" ] ] } |}]
let meth = `POST
let path = Routes.(version / s "compile-contract" /? nil)
let route = Routes.(path @--> ())
let handler ~env ~path:_ ~params ~body:{ source; lang; storage } =
let michelson_code, michelson_storage =
match lang with
| Michelson -> (source, storage)
| _ -> (
let lang = lang_to_string lang in
let hash = Hash.make source in
let filename_ligo = Printf.sprintf "%s.%s" hash lang in
Logs.info (fun m -> m "filename_ligo: %s" filename_ligo);
let filename_tz = Printf.sprintf "%s.tz" hash in
Logs.info (fun m -> m "filename_tz: %s" filename_tz);
Logs.info (fun m -> m "storage: %s" storage);
let ligo_path = Eio.Path.(Eio.Stdenv.cwd env / filename_ligo) in
let tz_path = Eio.Path.(Eio.Stdenv.cwd env / filename_tz) in
let tz_already_exists =
try Some (Eio.Path.load tz_path) |> Option.is_some with _ -> false
in
match tz_already_exists with
| false ->
let () =
try Eio.Path.save ~create:(`Exclusive 0o600) ligo_path source
with _ -> ()
in
let () =
Ligo_commands.compile_contract ~env ~lang ~filename_ligo
~filename_tz ()
in
let code = Eio.Path.load tz_path in
let storage =
Ligo_commands.compile_storage ~lang ~filename_ligo
~expression:storage ()
in
(code, storage)
| true ->
let code = Eio.Path.load tz_path in
let storage =
Ligo_commands.compile_storage ~lang ~filename_ligo
~expression:storage ()
|> String.trim
in
(code, storage))
in
(* TODO: better error messages in Tuna *)
let show_tuna_error = function
| `Parsing_error _ -> "Tuna failed to parse the expression"
| `Prim_parsing_error error -> Tunac.Michelson_primitives.show_error error
| `Unexpected_error -> "Tuna encountered an unexpected error"
in
match get_compilation_target params with
| Michelson_target ->
Ok (Michelson_result { code = michelson_code; storage })
| Wasm_target -> (
Logs.info (fun m ->
m "Compiling michelson storage: %s" michelson_storage);
match Tunac.Compiler.compile_value michelson_storage with
| Ok (tickets, init) -> (
Logs.info (fun m ->
m "Compiling michelson source:\n%s" michelson_code);
match Tunac.Compiler.compile michelson_code with
| Ok (wat, constants, entrypoints) ->
let out = Tunac.Output.make wat constants |> Result.get_ok in
let entrypoints = entrypoints |> Option.value ~default:[] in
Ok
(Wasm_result
Operation_payload.
{
tickets;
operation =
Operation.Originate
{
module_ = out.module_;
entrypoints = Entrypoints.of_assoc entrypoints;
constants;
initial_storage = init;
};
})
| Error err -> Error (show_tuna_error err))
| Error err -> Error (show_tuna_error err))
end
module Compile_invocation : HANDLERS = struct
type path = unit
type body = {
source : string;
lang : lang;
expression : string;
Users may omit the address if they just want
address : string; [@ddft ""]
}
[@@deriving encoding]
let body_encoding = body_enc
type response =
| Michelson_expression of string
| Wasm_payload of Operation_payload.t
[@@deriving encoding]
let response_encoding = response_enc
let meth = `POST
let path = Routes.(version / s "compile-invocation" /? nil)
let route = Routes.(path @--> ())
let handler ~env ~path:_ ~params ~body:{ source; lang; expression; address } =
let expression =
match lang with
| Michelson -> expression
| _ ->
let lang = lang_to_string lang in
let hash = Hash.make source in
let filename_ligo = Printf.sprintf "%s.%s" hash lang in
let ligo_path = Eio.Path.(Eio.Stdenv.cwd env / filename_ligo) in
let ligo_already_exists =
try Some (Eio.Path.load ligo_path) |> Option.is_some
with _ -> false
in
(if not ligo_already_exists then
try Eio.Path.save ~create:(`Exclusive 0o600) ligo_path source
with _ -> ());
Ligo_commands.compile_parameter ~lang ~filename_ligo ~expression ()
in
match get_compilation_target params with
| Michelson_target -> Ok (Michelson_expression expression)
| Wasm_target -> (
let tickets, init =
Tunac.Compiler.compile_value expression |> Result.get_ok
in
match Deku_ledger.Address.of_b58 address with
| Some address ->
Ok
(Wasm_payload
Operation_payload.
{
tickets;
operation = Operation.Call { address; argument = init };
})
| None ->
Error (Format.sprintf "Unable to parse '%s' as an address" address))
end
module Health : NO_BODY_HANDLERS = struct
type path = unit
type response = unit
let response_encoding = Data_encoding.unit
let meth = `GET
let path = Routes.(s "health" /? nil)
let route = Routes.(path @--> ())
let handler ~env:_ ~path:_ ~params:_ = Ok ()
end
| null | https://raw.githubusercontent.com/marigold-dev/deku/5d578d6a6124ade1deff4ed88eac71de17a065fd/deku-c/ligo-deku-rpc/handlers.ml | ocaml | Currently we don't support passing tickets to this endpoint so
tickets will always be an emtpy array, but we'll include them
in this test in case we want to add them in the future
TODO: better error messages in Tuna | open Ocaml_wasm_vm
type params = (string * string list) list
module Operation_payload = struct
include Operation_payload
let enc = Data_encoding.Json.convert Operation_payload.encoding
end
module type HANDLERS = sig
type path
type body
val body_encoding : body Json_encoding.encoding
type response
val response_encoding : response Json_encoding.encoding
val meth : [> `GET | `POST ]
val route : path Routes.route
val handler :
env:Eio.Stdenv.t ->
path:path ->
params:params ->
body:body ->
(response, string) result
end
module type NO_BODY_HANDLERS = sig
type path
type response
val response_encoding : response Data_encoding.t
val meth : [> `GET | `POST ]
val route : path Routes.route
val handler :
env:Eio.Stdenv.t -> path:path -> params:params -> (response, string) result
end
let version p = Routes.(s "api" / s "v1") p
type lang = Jsligo | Cameligo | PascalLigo | Michelson
let lang_enc =
let open Json_encoding in
conv
(fun lang ->
match lang with
| Jsligo -> "jsligo"
| Cameligo -> "mligo"
| PascalLigo -> "ligo"
| Michelson -> "michelson")
(fun str ->
match str with
| "jsligo" -> Jsligo
| "mligo" -> Cameligo
| "ligo" -> PascalLigo
| "michelson" -> Michelson
| _ -> failwith "unknown lang string")
string
let lang_to_string = function
| Jsligo -> "jsligo"
| Cameligo -> "mligo"
| PascalLigo -> "ligo"
| Michelson -> "michelson"
type compilation_target = Michelson_target | Wasm_target
let rec get_compilation_target = function
| [] -> Wasm_target
| ("target", [ "michelson" ]) :: _ -> Michelson_target
| ("target", [ "wasm" ]) :: _ -> Wasm_target
| _ :: tl -> get_compilation_target tl
module Compile_contract : HANDLERS = struct
type path = unit
type body = { source : string; lang : lang; storage : string }
[@@deriving encoding]
let body_encoding = body_enc
type response =
| Michelson_result of { code : string; storage : string }
| Wasm_result of Operation_payload.t
[@@deriving encoding]
let response_encoding = response_enc
let%expect_test "encodings" =
let show_json kind encoding result =
let json = Json_encoding.construct encoding result in
Format.printf "%s:\n%a\n%!" kind Data_encoding.Json.pp json
in
show_json "Body JSON" body_encoding
{
source = "const add = ([x, y] : [int, int]) : int => { x + y }";
lang = Jsligo;
storage = "7";
};
show_json "Michelson Response JSON" response_encoding
(Michelson_result
{ code = "some michelson code"; storage = "some michelson expression" });
let operation =
let open Ocaml_wasm_vm in
let open Deku_tezos in
let open Deku_ledger in
let open Deku_concepts in
let contract_address =
Contract_hash.of_b58 "KT1LiabSxPyVUmVZCqHneCFLJrqQcLHkmX9d"
|> Option.get
in
let ticketer = Ticket_id.Tezos contract_address in
let ticket_id = Ticket_id.make ticketer (Bytes.of_string "hello") in
let address =
Address.of_b58 "tz1UAxwRXXDvpZ5sAanbbP8tjKBoa2dxKUHE" |> Option.get
in
let argument = Value.(Union (Left (Union (Right (Int (Z.of_int 5)))))) in
let operation = Operation.Call { address; argument } in
Operation_payload.{ operation; tickets = [ (ticket_id, Amount.zero) ] }
in
show_json "WASM Result Response JSON" response_encoding
(Wasm_result operation);
[%expect
{|
Body JSON:
{ "source": "const add = ([x, y] : [int, int]) : int => { x + y }",
"lang": "jsligo", "storage": "7" }
Michelson Response JSON:
{ "code": "some michelson code",
"storage": "some michelson expression" }
WASM Result Response JSON:
{ "operation":
{ "address":
"tz1UAxwRXXDvpZ5sAanbbP8tjKBoa2dxKUHE",
"argument":
[ "Union",
[ "Left",
[ "Union",
[ "Right", [ "Int", "5" ] ] ] ] ] },
"tickets":
[ [ [ "KT1LiabSxPyVUmVZCqHneCFLJrqQcLHkmX9d",
"68656c6c6f" ], "0" ] ] } |}]
let meth = `POST
let path = Routes.(version / s "compile-contract" /? nil)
let route = Routes.(path @--> ())
let handler ~env ~path:_ ~params ~body:{ source; lang; storage } =
let michelson_code, michelson_storage =
match lang with
| Michelson -> (source, storage)
| _ -> (
let lang = lang_to_string lang in
let hash = Hash.make source in
let filename_ligo = Printf.sprintf "%s.%s" hash lang in
Logs.info (fun m -> m "filename_ligo: %s" filename_ligo);
let filename_tz = Printf.sprintf "%s.tz" hash in
Logs.info (fun m -> m "filename_tz: %s" filename_tz);
Logs.info (fun m -> m "storage: %s" storage);
let ligo_path = Eio.Path.(Eio.Stdenv.cwd env / filename_ligo) in
let tz_path = Eio.Path.(Eio.Stdenv.cwd env / filename_tz) in
let tz_already_exists =
try Some (Eio.Path.load tz_path) |> Option.is_some with _ -> false
in
match tz_already_exists with
| false ->
let () =
try Eio.Path.save ~create:(`Exclusive 0o600) ligo_path source
with _ -> ()
in
let () =
Ligo_commands.compile_contract ~env ~lang ~filename_ligo
~filename_tz ()
in
let code = Eio.Path.load tz_path in
let storage =
Ligo_commands.compile_storage ~lang ~filename_ligo
~expression:storage ()
in
(code, storage)
| true ->
let code = Eio.Path.load tz_path in
let storage =
Ligo_commands.compile_storage ~lang ~filename_ligo
~expression:storage ()
|> String.trim
in
(code, storage))
in
let show_tuna_error = function
| `Parsing_error _ -> "Tuna failed to parse the expression"
| `Prim_parsing_error error -> Tunac.Michelson_primitives.show_error error
| `Unexpected_error -> "Tuna encountered an unexpected error"
in
match get_compilation_target params with
| Michelson_target ->
Ok (Michelson_result { code = michelson_code; storage })
| Wasm_target -> (
Logs.info (fun m ->
m "Compiling michelson storage: %s" michelson_storage);
match Tunac.Compiler.compile_value michelson_storage with
| Ok (tickets, init) -> (
Logs.info (fun m ->
m "Compiling michelson source:\n%s" michelson_code);
match Tunac.Compiler.compile michelson_code with
| Ok (wat, constants, entrypoints) ->
let out = Tunac.Output.make wat constants |> Result.get_ok in
let entrypoints = entrypoints |> Option.value ~default:[] in
Ok
(Wasm_result
Operation_payload.
{
tickets;
operation =
Operation.Originate
{
module_ = out.module_;
entrypoints = Entrypoints.of_assoc entrypoints;
constants;
initial_storage = init;
};
})
| Error err -> Error (show_tuna_error err))
| Error err -> Error (show_tuna_error err))
end
module Compile_invocation : HANDLERS = struct
type path = unit
type body = {
source : string;
lang : lang;
expression : string;
Users may omit the address if they just want
address : string; [@ddft ""]
}
[@@deriving encoding]
let body_encoding = body_enc
type response =
| Michelson_expression of string
| Wasm_payload of Operation_payload.t
[@@deriving encoding]
let response_encoding = response_enc
let meth = `POST
let path = Routes.(version / s "compile-invocation" /? nil)
let route = Routes.(path @--> ())
let handler ~env ~path:_ ~params ~body:{ source; lang; expression; address } =
let expression =
match lang with
| Michelson -> expression
| _ ->
let lang = lang_to_string lang in
let hash = Hash.make source in
let filename_ligo = Printf.sprintf "%s.%s" hash lang in
let ligo_path = Eio.Path.(Eio.Stdenv.cwd env / filename_ligo) in
let ligo_already_exists =
try Some (Eio.Path.load ligo_path) |> Option.is_some
with _ -> false
in
(if not ligo_already_exists then
try Eio.Path.save ~create:(`Exclusive 0o600) ligo_path source
with _ -> ());
Ligo_commands.compile_parameter ~lang ~filename_ligo ~expression ()
in
match get_compilation_target params with
| Michelson_target -> Ok (Michelson_expression expression)
| Wasm_target -> (
let tickets, init =
Tunac.Compiler.compile_value expression |> Result.get_ok
in
match Deku_ledger.Address.of_b58 address with
| Some address ->
Ok
(Wasm_payload
Operation_payload.
{
tickets;
operation = Operation.Call { address; argument = init };
})
| None ->
Error (Format.sprintf "Unable to parse '%s' as an address" address))
end
module Health : NO_BODY_HANDLERS = struct
type path = unit
type response = unit
let response_encoding = Data_encoding.unit
let meth = `GET
let path = Routes.(s "health" /? nil)
let route = Routes.(path @--> ())
let handler ~env:_ ~path:_ ~params:_ = Ok ()
end
|
3f85d26e1e2a294b5610618e43f4d3a4ed5648927a249ce02d24654f9607798c | chris-taylor/aima-haskell | MapColoring.hs | {-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module AI.Search.Example.MapColoring where
import Data.Map (Map, (!))
import qualified Data.Map as M
import AI.Search.CSP
import AI.Util.Graph (Graph)
import AI.Util.Util
import qualified AI.Util.Graph as G
----------------------
-- Map Coloring CSP --
----------------------
data MapColoringCSP v a = MCP
{ neighboursMC :: Graph String
, colorsMC :: [Char] } deriving (Show)
instance CSP MapColoringCSP String Char where
vars (MCP nbrs _) = M.keys nbrs
domains csp = mkUniversalMap (vars csp) (colorsMC csp)
neighbours (MCP nbrs _) = nbrs
constraints csp x xv y yv = xv /=yv || not (y `elem` neighbours csp ! x)
-----------------------------------
Map Coloring Problems in AIMA --
-----------------------------------
australia :: MapColoringCSP String Char
australia = MCP territories "RGB"
where
territories = G.toGraph $
[ ("SA", ["WA","NT","Q","NSW","V"])
, ("NT", ["WA","Q","SA"])
, ("NSW", ["Q","V","SA"])
, ("T", [])
, ("WA", ["SA","NT"])
, ("Q", ["SA","NT","NSW"])
, ("V", ["SA","NSW"]) ]
usa :: MapColoringCSP String Char
usa = MCP states "RGBY"
where states = G.parseGraph
"WA: OR ID; OR: ID NV CA; CA: NV AZ; NV: ID UT AZ; ID: MT WY UT;\
\UT: WY CO AZ; MT: ND SD WY; WY: SD NE CO; CO: NE KA OK NM; NM: OK TX;\
\ND: MN SD; SD: MN IA NE; NE: IA MO KA; KA: MO OK; OK: MO AR TX;\
\TX: AR LA; MN: WI IA; IA: WI IL MO; MO: IL KY TN AR; AR: MS TN LA;\
\LA: MS; WI: MI IL; IL: IN; IN: KY; MS: TN AL; AL: TN GA FL; MI: OH;\
\OH: PA WV KY; KY: WV VA TN; TN: VA NC GA; GA: NC SC FL;\
\PA: NY NJ DE MD WV; WV: MD VA; VA: MD DC NC; NC: SC; NY: VT MA CA NJ;\
\NJ: DE; DE: MD; MD: DC; VT: NH MA; MA: NH RI CT; CT: RI; ME: NH;\
\HI: ; AK: "
-----------
-- Demos --
-----------
demo1 :: IO ()
demo1 = case backtrackingSearch australia fastOpts of
Nothing -> putStrLn "No solution found."
Just a -> putStrLn "Solution found:" >> print a
demo2 :: IO ()
demo2 = case backtrackingSearch usa fastOpts of
Nothing -> putStrLn "No solution found."
Just a -> putStrLn "Solution found:" >> print a
| null | https://raw.githubusercontent.com/chris-taylor/aima-haskell/538dcfe82a57a623e45174e911ce68974d8aa839/src/AI/Search/Example/MapColoring.hs | haskell | # LANGUAGE TypeSynonymInstances, FlexibleInstances #
--------------------
Map Coloring CSP --
--------------------
---------------------------------
---------------------------------
---------
Demos --
--------- |
module AI.Search.Example.MapColoring where
import Data.Map (Map, (!))
import qualified Data.Map as M
import AI.Search.CSP
import AI.Util.Graph (Graph)
import AI.Util.Util
import qualified AI.Util.Graph as G
data MapColoringCSP v a = MCP
{ neighboursMC :: Graph String
, colorsMC :: [Char] } deriving (Show)
instance CSP MapColoringCSP String Char where
vars (MCP nbrs _) = M.keys nbrs
domains csp = mkUniversalMap (vars csp) (colorsMC csp)
neighbours (MCP nbrs _) = nbrs
constraints csp x xv y yv = xv /=yv || not (y `elem` neighbours csp ! x)
australia :: MapColoringCSP String Char
australia = MCP territories "RGB"
where
territories = G.toGraph $
[ ("SA", ["WA","NT","Q","NSW","V"])
, ("NT", ["WA","Q","SA"])
, ("NSW", ["Q","V","SA"])
, ("T", [])
, ("WA", ["SA","NT"])
, ("Q", ["SA","NT","NSW"])
, ("V", ["SA","NSW"]) ]
usa :: MapColoringCSP String Char
usa = MCP states "RGBY"
where states = G.parseGraph
"WA: OR ID; OR: ID NV CA; CA: NV AZ; NV: ID UT AZ; ID: MT WY UT;\
\UT: WY CO AZ; MT: ND SD WY; WY: SD NE CO; CO: NE KA OK NM; NM: OK TX;\
\ND: MN SD; SD: MN IA NE; NE: IA MO KA; KA: MO OK; OK: MO AR TX;\
\TX: AR LA; MN: WI IA; IA: WI IL MO; MO: IL KY TN AR; AR: MS TN LA;\
\LA: MS; WI: MI IL; IL: IN; IN: KY; MS: TN AL; AL: TN GA FL; MI: OH;\
\OH: PA WV KY; KY: WV VA TN; TN: VA NC GA; GA: NC SC FL;\
\PA: NY NJ DE MD WV; WV: MD VA; VA: MD DC NC; NC: SC; NY: VT MA CA NJ;\
\NJ: DE; DE: MD; MD: DC; VT: NH MA; MA: NH RI CT; CT: RI; ME: NH;\
\HI: ; AK: "
demo1 :: IO ()
demo1 = case backtrackingSearch australia fastOpts of
Nothing -> putStrLn "No solution found."
Just a -> putStrLn "Solution found:" >> print a
demo2 :: IO ()
demo2 = case backtrackingSearch usa fastOpts of
Nothing -> putStrLn "No solution found."
Just a -> putStrLn "Solution found:" >> print a
|
71feaee06b7373c1aaf96b8c4dfa7104972169b224cf4930c66dd48e97009193 | reflex-frp/reflex-native | Style.hs | # LANGUAGE ExplicitNamespaces #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
|Functions for applying " Reflex . Native " style structures to views .
module Reflex.UIKit.Style
(
-- * Per-view-type functionality
applyLabelStyle, applyViewStyle
-- * Per-phase functionality (initial / modify)
, initialStyle, modifyStyle
) where
import Control.Monad (Monad, (<=<))
import Data.Functor.Identity (Identity(..))
import ObjC (ObjPtr, SafeObjCoerce)
import Reflex (Event, Requester(type Request), requesting_)
import Reflex.Native (TextStyle(..), ViewStyle(..))
import Reflex.UIKit.Conversions (makeUIColor, makeUIFont)
import UIKit.Types (MainThread, UILabel, UIViewType)
import qualified UIKit.UILabel as UILabel
import qualified UIKit.UIView as UIView
{-# INLINABLE applyLabelStyle #-}
applyLabelStyle :: Monad m => (forall a. (a -> MainThread ()) -> f a -> m ()) -> UILabel -> TextStyle f -> m ()
applyLabelStyle f l (TextStyle {..}) = do
f (UILabel.setTextColor l <=< makeUIColor) _textStyle_textColor
f (UILabel.setFont l <=< makeUIFont) _textStyle_font
# INLINABLE applyViewStyle #
applyViewStyle :: (SafeObjCoerce v UIViewType, Monad m) => (forall a. (a -> MainThread ()) -> f a -> m ()) -> ObjPtr v -> ViewStyle f -> m ()
applyViewStyle f l (ViewStyle {..}) = do
f (UIView.setBackgroundColor l <=< makeUIColor) _viewStyle_backgroundColor
# INLINABLE initialStyle #
initialStyle :: (a -> MainThread ()) -> Identity a -> MainThread ()
initialStyle action (Identity a) = action a
# INLINABLE modifyStyle #
modifyStyle :: (Requester t m, Request m ~ MainThread) => (a -> MainThread ()) -> Event t a -> m ()
modifyStyle action = requesting_ . fmap action
| null | https://raw.githubusercontent.com/reflex-frp/reflex-native/5fb6a07845e4f7c51f97e9c8ce1a48009f341246/reflex-native-uikit/src/Reflex/UIKit/Style.hs | haskell | # LANGUAGE RankNTypes #
* Per-view-type functionality
* Per-phase functionality (initial / modify)
# INLINABLE applyLabelStyle # | # LANGUAGE ExplicitNamespaces #
# LANGUAGE FlexibleContexts #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
|Functions for applying " Reflex . Native " style structures to views .
module Reflex.UIKit.Style
(
applyLabelStyle, applyViewStyle
, initialStyle, modifyStyle
) where
import Control.Monad (Monad, (<=<))
import Data.Functor.Identity (Identity(..))
import ObjC (ObjPtr, SafeObjCoerce)
import Reflex (Event, Requester(type Request), requesting_)
import Reflex.Native (TextStyle(..), ViewStyle(..))
import Reflex.UIKit.Conversions (makeUIColor, makeUIFont)
import UIKit.Types (MainThread, UILabel, UIViewType)
import qualified UIKit.UILabel as UILabel
import qualified UIKit.UIView as UIView
applyLabelStyle :: Monad m => (forall a. (a -> MainThread ()) -> f a -> m ()) -> UILabel -> TextStyle f -> m ()
applyLabelStyle f l (TextStyle {..}) = do
f (UILabel.setTextColor l <=< makeUIColor) _textStyle_textColor
f (UILabel.setFont l <=< makeUIFont) _textStyle_font
# INLINABLE applyViewStyle #
applyViewStyle :: (SafeObjCoerce v UIViewType, Monad m) => (forall a. (a -> MainThread ()) -> f a -> m ()) -> ObjPtr v -> ViewStyle f -> m ()
applyViewStyle f l (ViewStyle {..}) = do
f (UIView.setBackgroundColor l <=< makeUIColor) _viewStyle_backgroundColor
# INLINABLE initialStyle #
initialStyle :: (a -> MainThread ()) -> Identity a -> MainThread ()
initialStyle action (Identity a) = action a
# INLINABLE modifyStyle #
modifyStyle :: (Requester t m, Request m ~ MainThread) => (a -> MainThread ()) -> Event t a -> m ()
modifyStyle action = requesting_ . fmap action
|
c1df14633a79c5491a89b1845ff1c1d65c5c54c37523ea7409510858ac1bd123 | ekmett/unboxed | Int8.hs | # Language NoImplicitPrelude #
{-# Language RebindableSyntax #-}
{-# Language MagicHash #-}
{-# Language KindSignatures #-}
{-# Language PolyKinds #-}
# Language UnboxedTuples #
{-# Language BangPatterns #-}
{-# Language DataKinds #-}
{-# Language RankNTypes #-}
{-# Language TypeSynonymInstances #-}
{-# Language ImportQualifiedPost #-}
# OPTIONS_GHC -Wno - orphans #
-- | exposes detailed names that can be used for RULES
module Unboxed.Rep.Int8
( module Def.Int8
, eqInt8, neInt8, ltInt8, leInt8, gtInt8, geInt8
, Int8#
) where
import Unboxed.Internal.Class
import ( succError , predError )
import GHC.Exception
import GHC.Int (Int8(..))
import GHC.Prim
import GHC.Real ((%))
import GHC.Num.Integer
import GHC.Types
import Prelude (otherwise, (&&), String, (++), errorWithoutStackTrace, ($))
import Def.Int8
eqInt8, neInt8, ltInt8, leInt8, gtInt8, geInt8 :: Int8# -> Int8# -> Bool
eqInt8 x y = isTrue# (int8ToInt# x ==# int8ToInt# y)
{-# INLINE [1] eqInt8 #-}
neInt8 x y = isTrue# (int8ToInt# x /=# int8ToInt# y)
{-# INLINE [1] neInt8 #-}
ltInt8 x y = isTrue# (int8ToInt# x <# int8ToInt# y)
# INLINE [ 1 ] ltInt8 #
gtInt8 x y = isTrue# (int8ToInt# x ># int8ToInt# y)
{-# INLINE [1] gtInt8 #-}
leInt8 x y = isTrue# (int8ToInt# x <=# int8ToInt# y)
{-# INLINE [1] leInt8 #-}
geInt8 x y = isTrue# (int8ToInt# x >=# int8ToInt# y)
{-# INLINE [1] geInt8 #-}
instance Eq Int8# where
(==) = eqInt8
(/=) = neInt8
instance Ord Int8# where
(<=) = leInt8
(>=) = geInt8
(<) = ltInt8
(>) = gtInt8
instance Bounded Int8# where
minBound = 127
maxBound = -128
instance Num Int8# where
(+) = plusInt8#
(-) = subInt8#
(*) = timesInt8#
negate = negateInt8#
abs x
| x >= 0 = x
| otherwise = negate x
signum x | x > 0 = 1
signum 0 = 0
signum _ = -1
fromInteger i = intToInt8# (integerToInt# i)
# INLINE fromInteger #
instance Show Int8# where
showsPrec d a = showsPrec d (I8# a)
{-# INLINE showsPrec #-}
{-# INLINE [0] divInt8# #-}
divInt8# :: Int8# -> Int8# -> Int8#
divInt8# x# y# = ((x# `plusInt8#` bias#) `quotInt8#` y#) `subInt8#` hard# where
zero# = intToInt8# 0#
x `andInt8#` y = word8ToInt8# (int8ToWord8# x `andWord8#` int8ToWord8# y)
x `orInt8#` y = word8ToInt8# (int8ToWord8# x `orWord8#` int8ToWord8# y)
notInt8# x = word8ToInt8# (notWord8# (int8ToWord8# x))
-- See Note [divInt# implementation]
!yn# = intToInt8# (y# `ltInt8#` zero#)
!c0# = intToInt8# (x# `ltInt8#` zero#) `andInt8#` (notInt8# yn#)
!c1# = intToInt8# (x# `gtInt8#` zero#) `andInt8#` yn#
!bias# = c0# `subInt8#` c1#
!hard# = c0# `orInt8#` c1#
{-# INLINE [0] modInt8# #-}
modInt8# :: Int8# -> Int8# -> Int8#
modInt8# x# y# = r# `plusInt8#` k# where
zero# = intToInt8# 0#
x `andInt8#` y = word8ToInt8# (int8ToWord8# x `andWord8#` int8ToWord8# y)
x `orInt8#` y = word8ToInt8# (int8ToWord8# x `orWord8#` int8ToWord8# y)
notInt8# x = word8ToInt8# (notWord8# (int8ToWord8# x))
-- See Note [modInt# implementation]
!yn# = intToInt8# (y# `ltInt8#` zero#)
!c0# = intToInt8# (x# `ltInt8#` zero#) `andInt8#` (notInt8# yn#)
!c1# = intToInt8# (x# `gtInt8#` zero#) `andInt8#` yn#
!s# = zero# `subInt8#` ((c0# `orInt8#` c1#) `andInt8#` (intToInt8# (r# `neInt8#` zero#)))
!k# = s# `andInt8#` y#
!r# = x# `remInt8#` y#
{-# INLINE [0] divModInt8# #-}
divModInt8# :: Int8# -> Int8# -> (# Int8#, Int8# #)
divModInt8# x# y# = case (x# `plusInt8#` bias#) `quotRemInt8#` y# of
(# q#, r# #) -> (# q# `subInt8#` hard#, r# `plusInt8#` k# #)
where
zero# = intToInt8# 0#
x `andInt8#` y = word8ToInt8# (int8ToWord8# x `andWord8#` int8ToWord8# y)
x `orInt8#` y = word8ToInt8# (int8ToWord8# x `orWord8#` int8ToWord8# y)
notInt8# x = word8ToInt8# (notWord8# (int8ToWord8# x))
-- See Note [divModInt# implementation]
!yn# = intToInt8# (y# `ltInt8#` zero#)
!c0# = intToInt8# (x# `ltInt8#` zero#) `andInt8#` (notInt8# yn#)
!c1# = intToInt8# (x# `gtInt8#` zero#) `andInt8#` yn#
!bias# = c0# `subInt8#` c1#
!hard# = c0# `orInt8#` c1#
!s# = zero# `subInt8#` hard#
!k# = (s# `andInt8#` y#) `subInt8#` bias#
# NOINLINE succError # #
succError# :: forall r (a :: TYPE r). String -> a
succError# inst_ty =
errorWithoutStackTrace $ "Enum.succ{" ++ inst_ty ++ "}: tried to take `succ' of maxBound"
# NOINLINE predError # #
predError# :: forall r (a :: TYPE r). String -> a
predError# inst_ty =
errorWithoutStackTrace $ "Enum.pred{" ++ inst_ty ++ "}: tried to take `pred' of minBound"
instance Enum Int8# where
succ x
| x /= maxBound = x + 1
| otherwise = succError# "Int8#"
pred x
| x /= minBound = x - 1
| otherwise = predError# "Int8#"
toEnum i@(I# i#)
| isTrue# (i# >=# int8ToInt# minBound) && isTrue# (i# <=# int8ToInt# maxBound) = intToInt8# i#
| otherwise = errorWithoutStackTrace $
"Enum.toEnum{Int8#}: tag (" ++ show i ++ ") is outside of bounds (# " ++
show (minBound :: Int8) ++ ", " ++ show (maxBound :: Int8) ++ " #)"
fromEnum x# = I# (int8ToInt# x#)
instance Real Int8# where
toRational x = toInteger x % 1
instance Integral Int8# where
quot x# y#
| y# == 0 = throw divZeroException
| y# == (-1) && x# == minBound = throw overflowException
| otherwise = quotInt8# x# y#
rem x# y#
| y# == 0 = throw divZeroException
| y# == (-1) = 0
| otherwise = remInt8# x# y#
div x# y#
| y# == 0 = throw divZeroException
| y# == (-1) && x# == minBound = throw overflowException -- Note [Order of tests]
| otherwise = divInt8# x# y#
mod x# y#
| y# == 0 = throw divZeroException
| y# == (-1) = 0
| otherwise = modInt8# x# y#
quotRem x# y#
| y# == 0 = throw divZeroException
| y# == (-1) && x# == minBound = throw overflowException
| otherwise = quotRemInt8# x# y#
divMod x# y#
| y# == 0 = throw divZeroException
| y# == (-1) && x# == minBound = throw overflowException
| otherwise = divModInt8# x# y#
toInteger x# = IS (int8ToInt# x#)
| null | https://raw.githubusercontent.com/ekmett/unboxed/2f8bf0c92f3d584c8d154c037ca4c7747b6d9f65/src/Unboxed/Rep/Int8.hs | haskell | # Language RebindableSyntax #
# Language MagicHash #
# Language KindSignatures #
# Language PolyKinds #
# Language BangPatterns #
# Language DataKinds #
# Language RankNTypes #
# Language TypeSynonymInstances #
# Language ImportQualifiedPost #
| exposes detailed names that can be used for RULES
# INLINE [1] eqInt8 #
# INLINE [1] neInt8 #
# INLINE [1] gtInt8 #
# INLINE [1] leInt8 #
# INLINE [1] geInt8 #
# INLINE showsPrec #
# INLINE [0] divInt8# #
See Note [divInt# implementation]
# INLINE [0] modInt8# #
See Note [modInt# implementation]
# INLINE [0] divModInt8# #
See Note [divModInt# implementation]
Note [Order of tests] | # Language NoImplicitPrelude #
# Language UnboxedTuples #
# OPTIONS_GHC -Wno - orphans #
module Unboxed.Rep.Int8
( module Def.Int8
, eqInt8, neInt8, ltInt8, leInt8, gtInt8, geInt8
, Int8#
) where
import Unboxed.Internal.Class
import ( succError , predError )
import GHC.Exception
import GHC.Int (Int8(..))
import GHC.Prim
import GHC.Real ((%))
import GHC.Num.Integer
import GHC.Types
import Prelude (otherwise, (&&), String, (++), errorWithoutStackTrace, ($))
import Def.Int8
eqInt8, neInt8, ltInt8, leInt8, gtInt8, geInt8 :: Int8# -> Int8# -> Bool
eqInt8 x y = isTrue# (int8ToInt# x ==# int8ToInt# y)
neInt8 x y = isTrue# (int8ToInt# x /=# int8ToInt# y)
ltInt8 x y = isTrue# (int8ToInt# x <# int8ToInt# y)
# INLINE [ 1 ] ltInt8 #
gtInt8 x y = isTrue# (int8ToInt# x ># int8ToInt# y)
leInt8 x y = isTrue# (int8ToInt# x <=# int8ToInt# y)
geInt8 x y = isTrue# (int8ToInt# x >=# int8ToInt# y)
instance Eq Int8# where
(==) = eqInt8
(/=) = neInt8
instance Ord Int8# where
(<=) = leInt8
(>=) = geInt8
(<) = ltInt8
(>) = gtInt8
instance Bounded Int8# where
minBound = 127
maxBound = -128
instance Num Int8# where
(+) = plusInt8#
(-) = subInt8#
(*) = timesInt8#
negate = negateInt8#
abs x
| x >= 0 = x
| otherwise = negate x
signum x | x > 0 = 1
signum 0 = 0
signum _ = -1
fromInteger i = intToInt8# (integerToInt# i)
# INLINE fromInteger #
instance Show Int8# where
showsPrec d a = showsPrec d (I8# a)
divInt8# :: Int8# -> Int8# -> Int8#
divInt8# x# y# = ((x# `plusInt8#` bias#) `quotInt8#` y#) `subInt8#` hard# where
zero# = intToInt8# 0#
x `andInt8#` y = word8ToInt8# (int8ToWord8# x `andWord8#` int8ToWord8# y)
x `orInt8#` y = word8ToInt8# (int8ToWord8# x `orWord8#` int8ToWord8# y)
notInt8# x = word8ToInt8# (notWord8# (int8ToWord8# x))
!yn# = intToInt8# (y# `ltInt8#` zero#)
!c0# = intToInt8# (x# `ltInt8#` zero#) `andInt8#` (notInt8# yn#)
!c1# = intToInt8# (x# `gtInt8#` zero#) `andInt8#` yn#
!bias# = c0# `subInt8#` c1#
!hard# = c0# `orInt8#` c1#
modInt8# :: Int8# -> Int8# -> Int8#
modInt8# x# y# = r# `plusInt8#` k# where
zero# = intToInt8# 0#
x `andInt8#` y = word8ToInt8# (int8ToWord8# x `andWord8#` int8ToWord8# y)
x `orInt8#` y = word8ToInt8# (int8ToWord8# x `orWord8#` int8ToWord8# y)
notInt8# x = word8ToInt8# (notWord8# (int8ToWord8# x))
!yn# = intToInt8# (y# `ltInt8#` zero#)
!c0# = intToInt8# (x# `ltInt8#` zero#) `andInt8#` (notInt8# yn#)
!c1# = intToInt8# (x# `gtInt8#` zero#) `andInt8#` yn#
!s# = zero# `subInt8#` ((c0# `orInt8#` c1#) `andInt8#` (intToInt8# (r# `neInt8#` zero#)))
!k# = s# `andInt8#` y#
!r# = x# `remInt8#` y#
divModInt8# :: Int8# -> Int8# -> (# Int8#, Int8# #)
divModInt8# x# y# = case (x# `plusInt8#` bias#) `quotRemInt8#` y# of
(# q#, r# #) -> (# q# `subInt8#` hard#, r# `plusInt8#` k# #)
where
zero# = intToInt8# 0#
x `andInt8#` y = word8ToInt8# (int8ToWord8# x `andWord8#` int8ToWord8# y)
x `orInt8#` y = word8ToInt8# (int8ToWord8# x `orWord8#` int8ToWord8# y)
notInt8# x = word8ToInt8# (notWord8# (int8ToWord8# x))
!yn# = intToInt8# (y# `ltInt8#` zero#)
!c0# = intToInt8# (x# `ltInt8#` zero#) `andInt8#` (notInt8# yn#)
!c1# = intToInt8# (x# `gtInt8#` zero#) `andInt8#` yn#
!bias# = c0# `subInt8#` c1#
!hard# = c0# `orInt8#` c1#
!s# = zero# `subInt8#` hard#
!k# = (s# `andInt8#` y#) `subInt8#` bias#
# NOINLINE succError # #
succError# :: forall r (a :: TYPE r). String -> a
succError# inst_ty =
errorWithoutStackTrace $ "Enum.succ{" ++ inst_ty ++ "}: tried to take `succ' of maxBound"
# NOINLINE predError # #
predError# :: forall r (a :: TYPE r). String -> a
predError# inst_ty =
errorWithoutStackTrace $ "Enum.pred{" ++ inst_ty ++ "}: tried to take `pred' of minBound"
instance Enum Int8# where
succ x
| x /= maxBound = x + 1
| otherwise = succError# "Int8#"
pred x
| x /= minBound = x - 1
| otherwise = predError# "Int8#"
toEnum i@(I# i#)
| isTrue# (i# >=# int8ToInt# minBound) && isTrue# (i# <=# int8ToInt# maxBound) = intToInt8# i#
| otherwise = errorWithoutStackTrace $
"Enum.toEnum{Int8#}: tag (" ++ show i ++ ") is outside of bounds (# " ++
show (minBound :: Int8) ++ ", " ++ show (maxBound :: Int8) ++ " #)"
fromEnum x# = I# (int8ToInt# x#)
instance Real Int8# where
toRational x = toInteger x % 1
instance Integral Int8# where
quot x# y#
| y# == 0 = throw divZeroException
| y# == (-1) && x# == minBound = throw overflowException
| otherwise = quotInt8# x# y#
rem x# y#
| y# == 0 = throw divZeroException
| y# == (-1) = 0
| otherwise = remInt8# x# y#
div x# y#
| y# == 0 = throw divZeroException
| otherwise = divInt8# x# y#
mod x# y#
| y# == 0 = throw divZeroException
| y# == (-1) = 0
| otherwise = modInt8# x# y#
quotRem x# y#
| y# == 0 = throw divZeroException
| y# == (-1) && x# == minBound = throw overflowException
| otherwise = quotRemInt8# x# y#
divMod x# y#
| y# == 0 = throw divZeroException
| y# == (-1) && x# == minBound = throw overflowException
| otherwise = divModInt8# x# y#
toInteger x# = IS (int8ToInt# x#)
|
3d3962d3d7292e176f676a5f8bf36d45ef60a4dc06097b77e691c7ff73617fe5 | ocaml-ppx/ppx | builder_unstable_for_testing.ml | $ Ppx_ast_cinaps.print_builder_ml ( Astlib . Version.of_string " unstable_for_testing " )
open Versions.Unstable_for_testing
let module_binding ~loc ~expr ~name =
Module_binding.create ~pmb_loc:loc ~pmb_attributes:(Attributes.of_concrete []) ~pmb_expr:expr ~pmb_name:name
let value_binding ~loc ~expr ~pat =
Value_binding.create ~pvb_loc:loc ~pvb_attributes:(Attributes.of_concrete []) ~pvb_expr:expr ~pvb_pat:pat
let pstr_extension ~loc a1 a2 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_extension a1 a2) ~pstr_loc:loc
let pstr_attribute ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_attribute a1) ~pstr_loc:loc
let pstr_include ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_include a1) ~pstr_loc:loc
let pstr_class_type ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_class_type a1) ~pstr_loc:loc
let pstr_class ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_class a1) ~pstr_loc:loc
let pstr_open ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_open a1) ~pstr_loc:loc
let pstr_modtype ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_modtype a1) ~pstr_loc:loc
let pstr_recmodule ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_recmodule a1) ~pstr_loc:loc
let pstr_module ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_module a1) ~pstr_loc:loc
let pstr_exception ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_exception a1) ~pstr_loc:loc
let pstr_typext ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_typext a1) ~pstr_loc:loc
let pstr_type ~loc a1 a2 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_type a1 a2) ~pstr_loc:loc
let pstr_primitive ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_primitive a1) ~pstr_loc:loc
let pstr_value ~loc a1 a2 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_value a1 a2) ~pstr_loc:loc
let pstr_eval ~loc a1 a2 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_eval a1 a2) ~pstr_loc:loc
let pmod_extension ~loc a1 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_extension a1) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_unpack ~loc a1 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_unpack a1) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_constraint ~loc a1 a2 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_constraint a1 a2) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_apply ~loc a1 a2 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_apply a1 a2) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_functor ~loc a1 a2 a3 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_functor a1 a2 a3) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_structure ~loc a1 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_structure a1) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_ident ~loc a1 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_ident a1) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let open_description ~loc ~lid ~override =
Open_description.create ~popen_attributes:(Attributes.of_concrete []) ~popen_loc:loc ~popen_override:override ~popen_lid:lid
let module_type_declaration ~loc ~name ~type_ =
Module_type_declaration.create ~pmtd_loc:loc ~pmtd_attributes:(Attributes.of_concrete []) ~pmtd_type:type_ ~pmtd_name:name
let module_declaration ~loc ~name ~type_ =
Module_declaration.create ~pmd_loc:loc ~pmd_attributes:(Attributes.of_concrete []) ~pmd_type:type_ ~pmd_name:name
let psig_extension ~loc a1 a2 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_extension a1 a2) ~psig_loc:loc
let psig_attribute ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_attribute a1) ~psig_loc:loc
let psig_class_type ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_class_type a1) ~psig_loc:loc
let psig_class ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_class a1) ~psig_loc:loc
let psig_include ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_include a1) ~psig_loc:loc
let psig_open ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_open a1) ~psig_loc:loc
let psig_modtype ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_modtype a1) ~psig_loc:loc
let psig_recmodule ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_recmodule a1) ~psig_loc:loc
let psig_module ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_module a1) ~psig_loc:loc
let psig_exception ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_exception a1) ~psig_loc:loc
let psig_typext ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_typext a1) ~psig_loc:loc
let psig_type ~loc a1 a2 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_type a1 a2) ~psig_loc:loc
let psig_value ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_value a1) ~psig_loc:loc
let pmty_alias ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_alias a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_extension ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_extension a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_typeof ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_typeof a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_with ~loc a1 a2 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_with a1 a2) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_functor ~loc a1 a2 a3 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_functor a1 a2 a3) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_signature ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_signature a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_ident ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_ident a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pcf_extension ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_extension a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_attribute ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_attribute a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_initializer ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_initializer a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_constraint ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_constraint a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_method ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_method a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_val ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_val a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_inherit ~loc a1 a2 a3 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_inherit a1 a2 a3) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let class_structure ~fields ~self =
Class_structure.create ~pcstr_fields:fields ~pcstr_self:self
let pcl_open ~loc a1 a2 a3 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_open a1 a2 a3) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_extension ~loc a1 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_extension a1) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_constraint ~loc a1 a2 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_constraint a1 a2) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_let ~loc a1 a2 a3 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_let a1 a2 a3) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_apply ~loc a1 a2 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_apply a1 a2) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_fun ~loc a1 a2 a3 a4 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_fun a1 a2 a3 a4) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_structure ~loc a1 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_structure a1) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_constr ~loc a1 a2 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_constr a1 a2) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pctf_extension ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_extension a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_attribute ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_attribute a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_constraint ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_constraint a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_method ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_method a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_val ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_val a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_inherit ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_inherit a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let class_signature ~fields ~self =
Class_signature.create ~pcsig_fields:fields ~pcsig_self:self
let pcty_open ~loc a1 a2 a3 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_open a1 a2 a3) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let pcty_extension ~loc a1 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_extension a1) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let pcty_arrow ~loc a1 a2 a3 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_arrow a1 a2 a3) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let pcty_signature ~loc a1 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_signature a1) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let pcty_constr ~loc a1 a2 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_constr a1 a2) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let extension_constructor ~loc ~kind ~name =
Extension_constructor.create ~pext_attributes:(Attributes.of_concrete []) ~pext_loc:loc ~pext_kind:kind ~pext_name:name
let type_extension ~constructors ~params ~path ~private_ =
Type_extension.create ~ptyext_attributes:(Attributes.of_concrete []) ~ptyext_private:private_ ~ptyext_constructors:constructors ~ptyext_params:params ~ptyext_path:path
let constructor_declaration ~loc ~args ~name ~res =
Constructor_declaration.create ~pcd_attributes:(Attributes.of_concrete []) ~pcd_loc:loc ~pcd_res:res ~pcd_args:args ~pcd_name:name
let label_declaration ~loc ~mutable_ ~name ~type_ =
Label_declaration.create ~pld_attributes:(Attributes.of_concrete []) ~pld_loc:loc ~pld_type:type_ ~pld_mutable:mutable_ ~pld_name:name
let type_declaration ~loc ~cstrs ~kind ~manifest ~name ~params ~private_ =
Type_declaration.create ~ptype_loc:loc ~ptype_attributes:(Attributes.of_concrete []) ~ptype_manifest:manifest ~ptype_private:private_ ~ptype_kind:kind ~ptype_cstrs:cstrs ~ptype_params:params ~ptype_name:name
let value_description ~loc ~name ~prim ~type_ =
Value_description.create ~pval_loc:loc ~pval_attributes:(Attributes.of_concrete []) ~pval_prim:prim ~pval_type:type_ ~pval_name:name
let case ~guard ~lhs ~rhs =
Case.create ~pc_rhs:rhs ~pc_guard:guard ~pc_lhs:lhs
let pexp_unreachable ~loc =
Expression.create ~pexp_desc:(Expression_desc.pexp_unreachable) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_extension ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_extension a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_open ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_open a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_pack ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_pack a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_newtype ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_newtype a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_object ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_object a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_poly ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_poly a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_lazy ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_lazy a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_assert ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_assert a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_letexception ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_letexception a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_letmodule ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_letmodule a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_override ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_override a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_setinstvar ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_setinstvar a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_new ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_new a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_send ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_send a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_coerce ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_coerce a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_constraint ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_constraint a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_for ~loc a1 a2 a3 a4 a5 =
Expression.create ~pexp_desc:(Expression_desc.pexp_for a1 a2 a3 a4 a5) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_while ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_while a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_sequence ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_sequence a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_ifthenelse ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_ifthenelse a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_array ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_array a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_setfield ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_setfield a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_field ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_field a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_record ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_record a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_variant ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_variant a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_construct ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_construct a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_tuple ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_tuple a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_try ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_try a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_match ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_match a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_apply ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_apply a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_fun ~loc a1 a2 a3 a4 =
Expression.create ~pexp_desc:(Expression_desc.pexp_fun a1 a2 a3 a4) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_function ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_function a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_let ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_let a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_constant ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_constant a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_ident ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_ident a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let ppat_open ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_open a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_extension ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_extension a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_exception ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_exception a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_unpack ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_unpack a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_lazy ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_lazy a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_type ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_type a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_constraint ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_constraint a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_or ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_or a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_array ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_array a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_record ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_record a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_variant ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_variant a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_construct ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_construct a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_tuple ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_tuple a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_interval ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_interval a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_constant ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_constant a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_alias ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_alias a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_var ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_var a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_any ~loc =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_any) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ptyp_extension ~loc a1 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_extension a1) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_package ~loc a1 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_package a1) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_poly ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_poly a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_variant ~loc a1 a2 a3 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_variant a1 a2 a3) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_alias ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_alias a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_class ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_class a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_object ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_object a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_constr ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_constr a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_tuple ~loc a1 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_tuple a1) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_arrow ~loc a1 a2 a3 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_arrow a1 a2 a3) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_var ~loc a1 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_var a1) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_any ~loc =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_any) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
(*$*)
| null | https://raw.githubusercontent.com/ocaml-ppx/ppx/40e5a35a4386d969effaf428078c900bd03b78ec/ast/builder_unstable_for_testing.ml | ocaml | $ | $ Ppx_ast_cinaps.print_builder_ml ( Astlib . Version.of_string " unstable_for_testing " )
open Versions.Unstable_for_testing
let module_binding ~loc ~expr ~name =
Module_binding.create ~pmb_loc:loc ~pmb_attributes:(Attributes.of_concrete []) ~pmb_expr:expr ~pmb_name:name
let value_binding ~loc ~expr ~pat =
Value_binding.create ~pvb_loc:loc ~pvb_attributes:(Attributes.of_concrete []) ~pvb_expr:expr ~pvb_pat:pat
let pstr_extension ~loc a1 a2 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_extension a1 a2) ~pstr_loc:loc
let pstr_attribute ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_attribute a1) ~pstr_loc:loc
let pstr_include ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_include a1) ~pstr_loc:loc
let pstr_class_type ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_class_type a1) ~pstr_loc:loc
let pstr_class ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_class a1) ~pstr_loc:loc
let pstr_open ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_open a1) ~pstr_loc:loc
let pstr_modtype ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_modtype a1) ~pstr_loc:loc
let pstr_recmodule ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_recmodule a1) ~pstr_loc:loc
let pstr_module ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_module a1) ~pstr_loc:loc
let pstr_exception ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_exception a1) ~pstr_loc:loc
let pstr_typext ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_typext a1) ~pstr_loc:loc
let pstr_type ~loc a1 a2 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_type a1 a2) ~pstr_loc:loc
let pstr_primitive ~loc a1 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_primitive a1) ~pstr_loc:loc
let pstr_value ~loc a1 a2 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_value a1 a2) ~pstr_loc:loc
let pstr_eval ~loc a1 a2 =
Structure_item.create ~pstr_desc:(Structure_item_desc.pstr_eval a1 a2) ~pstr_loc:loc
let pmod_extension ~loc a1 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_extension a1) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_unpack ~loc a1 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_unpack a1) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_constraint ~loc a1 a2 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_constraint a1 a2) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_apply ~loc a1 a2 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_apply a1 a2) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_functor ~loc a1 a2 a3 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_functor a1 a2 a3) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_structure ~loc a1 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_structure a1) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let pmod_ident ~loc a1 =
Module_expr.create ~pmod_desc:(Module_expr_desc.pmod_ident a1) ~pmod_loc:loc ~pmod_attributes:(Attributes.of_concrete [])
let open_description ~loc ~lid ~override =
Open_description.create ~popen_attributes:(Attributes.of_concrete []) ~popen_loc:loc ~popen_override:override ~popen_lid:lid
let module_type_declaration ~loc ~name ~type_ =
Module_type_declaration.create ~pmtd_loc:loc ~pmtd_attributes:(Attributes.of_concrete []) ~pmtd_type:type_ ~pmtd_name:name
let module_declaration ~loc ~name ~type_ =
Module_declaration.create ~pmd_loc:loc ~pmd_attributes:(Attributes.of_concrete []) ~pmd_type:type_ ~pmd_name:name
let psig_extension ~loc a1 a2 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_extension a1 a2) ~psig_loc:loc
let psig_attribute ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_attribute a1) ~psig_loc:loc
let psig_class_type ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_class_type a1) ~psig_loc:loc
let psig_class ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_class a1) ~psig_loc:loc
let psig_include ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_include a1) ~psig_loc:loc
let psig_open ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_open a1) ~psig_loc:loc
let psig_modtype ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_modtype a1) ~psig_loc:loc
let psig_recmodule ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_recmodule a1) ~psig_loc:loc
let psig_module ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_module a1) ~psig_loc:loc
let psig_exception ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_exception a1) ~psig_loc:loc
let psig_typext ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_typext a1) ~psig_loc:loc
let psig_type ~loc a1 a2 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_type a1 a2) ~psig_loc:loc
let psig_value ~loc a1 =
Signature_item.create ~psig_desc:(Signature_item_desc.psig_value a1) ~psig_loc:loc
let pmty_alias ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_alias a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_extension ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_extension a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_typeof ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_typeof a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_with ~loc a1 a2 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_with a1 a2) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_functor ~loc a1 a2 a3 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_functor a1 a2 a3) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_signature ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_signature a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pmty_ident ~loc a1 =
Module_type.create ~pmty_desc:(Module_type_desc.pmty_ident a1) ~pmty_loc:loc ~pmty_attributes:(Attributes.of_concrete [])
let pcf_extension ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_extension a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_attribute ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_attribute a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_initializer ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_initializer a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_constraint ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_constraint a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_method ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_method a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_val ~loc a1 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_val a1) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let pcf_inherit ~loc a1 a2 a3 =
Class_field.create ~pcf_desc:(Class_field_desc.pcf_inherit a1 a2 a3) ~pcf_loc:loc ~pcf_attributes:(Attributes.of_concrete [])
let class_structure ~fields ~self =
Class_structure.create ~pcstr_fields:fields ~pcstr_self:self
let pcl_open ~loc a1 a2 a3 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_open a1 a2 a3) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_extension ~loc a1 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_extension a1) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_constraint ~loc a1 a2 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_constraint a1 a2) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_let ~loc a1 a2 a3 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_let a1 a2 a3) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_apply ~loc a1 a2 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_apply a1 a2) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_fun ~loc a1 a2 a3 a4 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_fun a1 a2 a3 a4) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_structure ~loc a1 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_structure a1) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pcl_constr ~loc a1 a2 =
Class_expr.create ~pcl_desc:(Class_expr_desc.pcl_constr a1 a2) ~pcl_loc:loc ~pcl_attributes:(Attributes.of_concrete [])
let pctf_extension ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_extension a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_attribute ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_attribute a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_constraint ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_constraint a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_method ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_method a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_val ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_val a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let pctf_inherit ~loc a1 =
Class_type_field.create ~pctf_desc:(Class_type_field_desc.pctf_inherit a1) ~pctf_loc:loc ~pctf_attributes:(Attributes.of_concrete [])
let class_signature ~fields ~self =
Class_signature.create ~pcsig_fields:fields ~pcsig_self:self
let pcty_open ~loc a1 a2 a3 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_open a1 a2 a3) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let pcty_extension ~loc a1 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_extension a1) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let pcty_arrow ~loc a1 a2 a3 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_arrow a1 a2 a3) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let pcty_signature ~loc a1 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_signature a1) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let pcty_constr ~loc a1 a2 =
Class_type.create ~pcty_desc:(Class_type_desc.pcty_constr a1 a2) ~pcty_loc:loc ~pcty_attributes:(Attributes.of_concrete [])
let extension_constructor ~loc ~kind ~name =
Extension_constructor.create ~pext_attributes:(Attributes.of_concrete []) ~pext_loc:loc ~pext_kind:kind ~pext_name:name
let type_extension ~constructors ~params ~path ~private_ =
Type_extension.create ~ptyext_attributes:(Attributes.of_concrete []) ~ptyext_private:private_ ~ptyext_constructors:constructors ~ptyext_params:params ~ptyext_path:path
let constructor_declaration ~loc ~args ~name ~res =
Constructor_declaration.create ~pcd_attributes:(Attributes.of_concrete []) ~pcd_loc:loc ~pcd_res:res ~pcd_args:args ~pcd_name:name
let label_declaration ~loc ~mutable_ ~name ~type_ =
Label_declaration.create ~pld_attributes:(Attributes.of_concrete []) ~pld_loc:loc ~pld_type:type_ ~pld_mutable:mutable_ ~pld_name:name
let type_declaration ~loc ~cstrs ~kind ~manifest ~name ~params ~private_ =
Type_declaration.create ~ptype_loc:loc ~ptype_attributes:(Attributes.of_concrete []) ~ptype_manifest:manifest ~ptype_private:private_ ~ptype_kind:kind ~ptype_cstrs:cstrs ~ptype_params:params ~ptype_name:name
let value_description ~loc ~name ~prim ~type_ =
Value_description.create ~pval_loc:loc ~pval_attributes:(Attributes.of_concrete []) ~pval_prim:prim ~pval_type:type_ ~pval_name:name
let case ~guard ~lhs ~rhs =
Case.create ~pc_rhs:rhs ~pc_guard:guard ~pc_lhs:lhs
let pexp_unreachable ~loc =
Expression.create ~pexp_desc:(Expression_desc.pexp_unreachable) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_extension ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_extension a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_open ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_open a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_pack ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_pack a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_newtype ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_newtype a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_object ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_object a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_poly ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_poly a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_lazy ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_lazy a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_assert ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_assert a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_letexception ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_letexception a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_letmodule ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_letmodule a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_override ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_override a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_setinstvar ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_setinstvar a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_new ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_new a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_send ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_send a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_coerce ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_coerce a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_constraint ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_constraint a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_for ~loc a1 a2 a3 a4 a5 =
Expression.create ~pexp_desc:(Expression_desc.pexp_for a1 a2 a3 a4 a5) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_while ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_while a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_sequence ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_sequence a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_ifthenelse ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_ifthenelse a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_array ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_array a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_setfield ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_setfield a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_field ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_field a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_record ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_record a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_variant ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_variant a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_construct ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_construct a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_tuple ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_tuple a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_try ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_try a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_match ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_match a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_apply ~loc a1 a2 =
Expression.create ~pexp_desc:(Expression_desc.pexp_apply a1 a2) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_fun ~loc a1 a2 a3 a4 =
Expression.create ~pexp_desc:(Expression_desc.pexp_fun a1 a2 a3 a4) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_function ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_function a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_let ~loc a1 a2 a3 =
Expression.create ~pexp_desc:(Expression_desc.pexp_let a1 a2 a3) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_constant ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_constant a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let pexp_ident ~loc a1 =
Expression.create ~pexp_desc:(Expression_desc.pexp_ident a1) ~pexp_loc:loc ~pexp_attributes:(Attributes.of_concrete [])
let ppat_open ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_open a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_extension ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_extension a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_exception ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_exception a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_unpack ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_unpack a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_lazy ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_lazy a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_type ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_type a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_constraint ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_constraint a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_or ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_or a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_array ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_array a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_record ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_record a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_variant ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_variant a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_construct ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_construct a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_tuple ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_tuple a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_interval ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_interval a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_constant ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_constant a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_alias ~loc a1 a2 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_alias a1 a2) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_var ~loc a1 =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_var a1) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ppat_any ~loc =
Pattern.create ~ppat_desc:(Pattern_desc.ppat_any) ~ppat_loc:loc ~ppat_attributes:(Attributes.of_concrete [])
let ptyp_extension ~loc a1 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_extension a1) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_package ~loc a1 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_package a1) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_poly ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_poly a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_variant ~loc a1 a2 a3 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_variant a1 a2 a3) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_alias ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_alias a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_class ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_class a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_object ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_object a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_constr ~loc a1 a2 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_constr a1 a2) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_tuple ~loc a1 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_tuple a1) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_arrow ~loc a1 a2 a3 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_arrow a1 a2 a3) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_var ~loc a1 =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_var a1) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
let ptyp_any ~loc =
Core_type.create ~ptyp_desc:(Core_type_desc.ptyp_any) ~ptyp_loc:loc ~ptyp_attributes:(Attributes.of_concrete [])
|
3ea8d70ab5b724403db1bec45fadf22a131bf783c2a49835b49248267ef23da6 | cl21/cl21 | cl21.lisp | (defpackage cl21)
(cl:in-package :cl21)
(cl:eval-when (:compile-toplevel :load-toplevel :execute)
(cl:dolist (#1=#:package-name '(:cl21.core))
(cl:let ((#2=#:package (cl:find-package #1#)))
(cl:unless #2#
(cl:error "Package \"~A\" doesn't exist." #1#))
(cl:do-external-symbols (#3=#:symbol #2#)
(cl:shadowing-import (cl:list #3#))
(cl:export (cl:list #3#))))))
(export-syntax :cl21)
(cl:in-package :cl-user)
(cl21::defpackage cl21-user
(:use :cl21))
(cl21::in-package :cl21-user)
#+(or sbcl ccl clisp allegro ecl)
(cl:do-external-symbols (#1=#:symb
#+sbcl :sb-ext
#+ccl :ccl
#+clisp :ext
#+allegro :excl
#+ecl :quit)
(cl:shadowing-import (cl:list #1#)))
| null | https://raw.githubusercontent.com/cl21/cl21/c36644f3b6ea4975174c8ce72de43a4524dd0696/src/cl21.lisp | lisp | (defpackage cl21)
(cl:in-package :cl21)
(cl:eval-when (:compile-toplevel :load-toplevel :execute)
(cl:dolist (#1=#:package-name '(:cl21.core))
(cl:let ((#2=#:package (cl:find-package #1#)))
(cl:unless #2#
(cl:error "Package \"~A\" doesn't exist." #1#))
(cl:do-external-symbols (#3=#:symbol #2#)
(cl:shadowing-import (cl:list #3#))
(cl:export (cl:list #3#))))))
(export-syntax :cl21)
(cl:in-package :cl-user)
(cl21::defpackage cl21-user
(:use :cl21))
(cl21::in-package :cl21-user)
#+(or sbcl ccl clisp allegro ecl)
(cl:do-external-symbols (#1=#:symb
#+sbcl :sb-ext
#+ccl :ccl
#+clisp :ext
#+allegro :excl
#+ecl :quit)
(cl:shadowing-import (cl:list #1#)))
| |
9dee93cc62d590b2f3c46470a6e165f8a5dc28e342148f6b2957abea2b5d5ff6 | tqtezos/minter-sdk | Util.hs | {-# OPTIONS_GHC -Wno-redundant-constraints #-}
# OPTIONS_GHC -Wno - orphans #
module Test.Util
( (-:)
, type (:#)
, pattern (::<)
, pattern SNil
, FA2Setup (..)
, doFA2Setup
, originateFA2
, originateFA2WithGlobalOperators
, assertingBalanceDeltas
, assertingBalanceDeltas'
, balanceOf
, mkAllowlistSimpleParam
, originateWithAdmin
-- * Property-based tests
, clevelandProp
, iterateM
-- Re-exports
, Sized
) where
import qualified Data.Foldable as F
import qualified Data.Map as Map
import Data.Maybe
import Data.Sized (Sized)
import qualified Data.Sized as Sized
import Data.Type.Natural.Lemma.Order (type (<))
import Data.Type.Ordinal (ordToNatural)
import Fmt (build, indentF, unlinesF, (+|), (|+))
import GHC.TypeLits (Symbol)
import GHC.TypeNats (Nat, type (+))
import Hedgehog (Gen, MonadTest)
import Lorentz.Test.Consumer
import Lorentz.Value
import qualified Indigo.Contracts.FA2Sample as FA2
import Lorentz.Contracts.FA2
import qualified Lorentz.Contracts.Spec.FA2Interface as FA2
import qualified Lorentz.Contracts.MinterCollection.Ft.Asset as FtAsset
import qualified Lorentz.Contracts.MinterCollection.Ft.Token as FtToken
import qualified Lorentz.Contracts.PausableAdminOption as PausableAdminOption
import Morley.Nettest
import Morley.Nettest.Pure (PureM, runEmulated)
-- | An alias for pair constructor.
infix 0 -:
(-:) :: a -> b -> (a, b)
(-:) = (,)
-- | Helper type that attaches a name to a numeric type literal.
data (:#) :: Symbol -> Nat -> Type
-- | Pattern-match on a list of fixed size that has some elements in it.
--
-- Unlike 'Sized.:<', this pattern requires the list to be non-empty via
-- the type-system and thus is total.
infixr 2 ::<
pattern (::<) :: (((1 + n) ~ m), (0 < m), KnownNat m) => a -> Sized [] n a -> Sized [] m a
pattern a ::< l <- ((Sized.head &&& Sized.tail) -> (a, l))
{-# COMPLETE (::<) #-}
-- | Pattern-match on an empty list of fixed size.
--
-- Unlike 'Sized.:<', this pattern requires the list to be empty via the
-- type-system and thus is total.
pattern SNil :: Sized [] 0 a
pattern SNil <- _
{-# COMPLETE SNil #-}
-- | Test setup.
--
-- We remember sizes of all entries lists in types because that facilitates
-- allocating exactly as many entities as necessary for the given test suite.
data FA2Setup addrsNum tokensNum = FA2Setup
{ sAddresses :: Sized [] addrsNum Address
, sTokens :: Sized [] tokensNum FA2.TokenId
} deriving stock (Show)
| Prepare all the operated entities .
Note that number of addresses and other entities may be inferred automatically ,
so you should bind all the fields of returned ' FA2Setup ' . For instance :
@
scenario = do
setup < - doFA2Setup
let addr1 : : < addr2 : : < SNil = sAddresses setup
-- ↑ Here compiler figures out that exactly 2 addresses should be allocated
-- during setup ...
let tokenId : : < SNil = sTokens setup
-- ↑ ... and only one token .
...
@
Another option is to explicitly annotate the ' doFA2Setup ' call :
@
scenario = do
setup < - doFA2Setup @("addresses " : # 2 ) @("tokens " : # 1 )
...
@
Note that number of addresses and other entities may be inferred automatically,
so you should bind all the fields of returned 'FA2Setup'. For instance:
@
scenario = do
setup <- doFA2Setup
let addr1 ::< addr2 ::< SNil = sAddresses setup
-- ↑ Here compiler figures out that exactly 2 addresses should be allocated
-- during setup...
let tokenId ::< SNil = sTokens setup
-- ↑ ...and only one token.
...
@
Another option is to explicitly annotate the 'doFA2Setup' call:
@
scenario = do
setup <- doFA2Setup @("addresses" :# 2) @("tokens" :# 1)
...
@
-}
doFA2Setup
:: forall addrsArg tokensArg addrsNum tokensNum caps base m.
( MonadNettest caps base m
, KnownNat addrsNum, addrsArg ~ ("addresses" :# addrsNum)
, KnownNat tokensNum, tokensArg ~ ("tokens" :# tokensNum)
)
=> m (FA2Setup addrsNum tokensNum)
doFA2Setup = do
let aliases = Sized.generate' $ \i -> fromString ("fa2-addr-" <> show (ordToNatural i))
sAddresses <- mapM newAddress aliases
let sTokens = Sized.generate' $ \i -> FA2.TokenId (ordToNatural i)
return FA2Setup{..}
-- | Originate a trivial FA2 contract suitable for testing the provided swaps
-- contract:
-- * Some money will be put on the addresses from setup, the swaps contract
-- will be made operator of those addresses.
-- * The tokenIds from setup will be supported by the originated contract.
originateFA2
:: MonadNettest caps base m
=> AliasHint
-> FA2Setup addrsNum tokensNum
-> [ContractHandler contractParam contractStorage]
-> m (ContractHandler FA2.FA2SampleParameter FA2.Storage)
originateFA2 name FA2Setup{..} contracts = do
fa2 <- originateSimple name
FA2.Storage
{ sLedger = BigMap $ Map.fromList do
-- put money on several tokenIds for each given address
addr <- F.toList sAddresses
tokenId <- F.toList sTokens
pure ((addr, tokenId), 1000)
, sOperators = BigMap $ Map.fromList do
owner <- F.toList sAddresses
operator <- contracts
pure ((owner, toAddress operator), ())
, sTokenMetadata = mempty
}
(FA2.fa2Contract def
{ FA2.cAllowedTokenIds = F.toList sTokens
}
)
return fa2
originateFA2WithGlobalOperators
:: MonadNettest caps base m
=> AliasHint
-> FA2Setup addrsNum tokensNum
-> Set Address
-> Address
-> [ContractHandler contractParam contractStorage]
-> m (ContractHandler FtAsset.LimitedWithGlobalOperatorsEntrypoints FtAsset.LimitedStorageWithGlobalOperators)
originateFA2WithGlobalOperators name FA2Setup{..} globalOperators admin operatorContracts = do
fa2 <- originateTypedSimple name
FtAsset.LimitedStorageWithGlobalOperators
{
assets = FtToken.LimitedStorageWithGlobalOperators
{
ledger = BigMap $ Map.fromList do
-- put money on several tokenIds for each given address
addr <- F.toList sAddresses
tokenId <- F.toList sTokens
pure ((addr, tokenId), 1000)
, operators = BigMap $ Map.fromList do
owner <- F.toList sAddresses
operator <- operatorContracts
tokenId <- F.toList sTokens
pure ((OperatorKey owner (toAddress operator) tokenId), ())
, tokenMetadata = BigMap $ Map.fromList do
tokenId <- F.toList sTokens
pure (tokenId, (TokenMetadata tokenId mempty))
, globalOperators = globalOperators
, nextTokenId = 0
, totalTokenSupply = mempty
},
metadata = mempty,
admin = fromJust $ PausableAdminOption.initAdminStorage admin
}
(FtAsset.limitedWithGlobalOperatorsContract)
return fa2
-- | Given a FA2 contract address, checks that balances of the given
-- address/token_ids change by the specified delta values.
assertingBalanceDeltas
:: (MonadNettest caps base m, HasCallStack)
=> ContractHandler FA2.FA2SampleParameter storage
-> [((Address, FA2.TokenId), Integer)]
-> m a
-> m a
assertingBalanceDeltas fa2 indicedDeltas action = do
consumer <- originateSimple "consumer" [] contractConsumer
pullBalance consumer
res <- action
pullBalance consumer
balancesRes <- map (map FA2.briBalance) <$> getStorage consumer
(balancesAfter, balancesBefore) <- case balancesRes of
[balancesAfter, balancesBefore] ->
return (balancesAfter, balancesBefore)
other -> failure $ "Unexpected consumer storage: " +| other |+ ""
forM_ (zip3 indicedDeltas balancesBefore balancesAfter) $
\(((addr, tokenId), expected), actualBefore, actualAfter) -> do
let actual = toInteger actualAfter - toInteger actualBefore
assert (expected == actual) $
"For address " +| addr |+ "\n(token id = " +| tokenId |+ ")\n\
\got unexpected balance delta: \
\expected " +| expected |+ ", got " +| actual |+ ""
return res
where
pullBalance
:: MonadNettest base caps m
=> ContractHandler [FA2.BalanceResponseItem] storage -> m ()
pullBalance consumer = do
let tokenRefs = map fst indicedDeltas
call fa2 (Call @"Balance_of") $
FA2.mkFA2View
(uncurry FA2.BalanceRequestItem <$> tokenRefs)
consumer
-- | Given a FA2 contract address, checks that balances of the given
-- address/token_ids change by the specified delta values.
assertingBalanceDeltas'
:: (MonadNettest caps base m, HasCallStack)
=> ContractHandler FtAsset.LimitedWithGlobalOperatorsEntrypoints st
-> [((Address, FA2.TokenId), Integer)]
-> m a
-> m a
assertingBalanceDeltas' fa2 indicedDeltas action = do
consumer <- originateSimple "consumer" [] contractConsumer
pullBalance consumer
res <- action
pullBalance consumer
balancesRes <- map (map FA2.briBalance) <$>
getStorage consumer
(balancesAfter, balancesBefore) <- case balancesRes of
[balancesAfter, balancesBefore] ->
return (balancesAfter, balancesBefore)
other -> failure $ "Unexpected consumer storage: " +| other |+ ""
forM_ (zip3 indicedDeltas balancesBefore balancesAfter) $
\(((addr, tokenId), expected), actualBefore, actualAfter) -> do
let actual = toInteger actualAfter - toInteger actualBefore
assert (expected == actual) $
"For address " +| addr |+ "\n(token id = " +| tokenId |+ ")\n\
\got unexpected balance delta: \
\expected " +| expected |+ ", got " +| actual |+ ""
return res
where
pullBalance
:: MonadNettest base caps m
=> ContractHandler [FA2.BalanceResponseItem] st -> m ()
pullBalance consumer = do
let tokenRefs = map fst indicedDeltas
call fa2 (Call @"Balance_of") $
FA2.mkFA2View
(uncurry FA2.BalanceRequestItem <$> tokenRefs)
consumer
-- | Retrieve the FA2 balance for a given account.
balanceOf
:: (HasCallStack, MonadNettest caps base m, ToAddress addr)
=> ContractHandler FA2.FA2SampleParameter storage -> FA2.TokenId -> addr -> m Natural
balanceOf fa2 tokenId account = do
consumer <- originateSimple "balance-response-consumer" [] (contractConsumer @[FA2.BalanceResponseItem])
call fa2 (Call @"Balance_of") (FA2.mkFA2View [FA2.BalanceRequestItem (toAddress account) tokenId] consumer)
consumerStorage <- getStorage consumer
case consumerStorage of
[[balanceResponseItem]] -> pure $ FA2.briBalance balanceResponseItem
_ -> failure $ unlinesF
[ "Expected consumer storage to have exactly 1 balance response, with exactly 1 item."
, "Consumer storage:"
, indentF 2 $ build consumerStorage
]
| Construct allowlist for passing to allowlist overriding entrypoint .
mkAllowlistSimpleParam :: [ContractHandler p s] -> BigMap Address ()
mkAllowlistSimpleParam = mconcat . map (\a -> one (toAddress a, ()))
-- | Originate the a contract and admin for it.
originateWithAdmin
:: MonadNettest caps base m
=> (Address -> m (ContractHandler param storage))
-> m (ContractHandler param storage, Address)
originateWithAdmin originateFn = do
admin <- newAddress "admin"
swaps <- originateFn admin
return (swaps, admin)
| Create a hedgehog property - based test from a cleveland scenario .
clevelandProp :: (MonadIO m, MonadTest m) => EmulatedT PureM () -> m ()
clevelandProp = nettestTestProp . runEmulated . uncapsNettestEmulated
-- | Given a generator of values of type @a@ and an initial value,
-- repeatedly uses the generator to create a list of the given length,
-- feeding it the previously generated value at each iteration.
iterateM :: forall a. Int -> (a -> Gen a) -> a -> Gen [a]
iterateM 0 _ _ = pure []
iterateM len gen previous = do
current <- gen previous
(current :) <$> iterateM (len - 1) gen current
| null | https://raw.githubusercontent.com/tqtezos/minter-sdk/6239f6ee8435977085c00c194224d4223386841a/packages/minter-contracts/test-hs/Test/Util.hs | haskell | # OPTIONS_GHC -Wno-redundant-constraints #
* Property-based tests
Re-exports
| An alias for pair constructor.
| Helper type that attaches a name to a numeric type literal.
| Pattern-match on a list of fixed size that has some elements in it.
Unlike 'Sized.:<', this pattern requires the list to be non-empty via
the type-system and thus is total.
# COMPLETE (::<) #
| Pattern-match on an empty list of fixed size.
Unlike 'Sized.:<', this pattern requires the list to be empty via the
type-system and thus is total.
# COMPLETE SNil #
| Test setup.
We remember sizes of all entries lists in types because that facilitates
allocating exactly as many entities as necessary for the given test suite.
↑ Here compiler figures out that exactly 2 addresses should be allocated
during setup ...
↑ ... and only one token .
↑ Here compiler figures out that exactly 2 addresses should be allocated
during setup...
↑ ...and only one token.
| Originate a trivial FA2 contract suitable for testing the provided swaps
contract:
* Some money will be put on the addresses from setup, the swaps contract
will be made operator of those addresses.
* The tokenIds from setup will be supported by the originated contract.
put money on several tokenIds for each given address
put money on several tokenIds for each given address
| Given a FA2 contract address, checks that balances of the given
address/token_ids change by the specified delta values.
| Given a FA2 contract address, checks that balances of the given
address/token_ids change by the specified delta values.
| Retrieve the FA2 balance for a given account.
| Originate the a contract and admin for it.
| Given a generator of values of type @a@ and an initial value,
repeatedly uses the generator to create a list of the given length,
feeding it the previously generated value at each iteration. | # OPTIONS_GHC -Wno - orphans #
module Test.Util
( (-:)
, type (:#)
, pattern (::<)
, pattern SNil
, FA2Setup (..)
, doFA2Setup
, originateFA2
, originateFA2WithGlobalOperators
, assertingBalanceDeltas
, assertingBalanceDeltas'
, balanceOf
, mkAllowlistSimpleParam
, originateWithAdmin
, clevelandProp
, iterateM
, Sized
) where
import qualified Data.Foldable as F
import qualified Data.Map as Map
import Data.Maybe
import Data.Sized (Sized)
import qualified Data.Sized as Sized
import Data.Type.Natural.Lemma.Order (type (<))
import Data.Type.Ordinal (ordToNatural)
import Fmt (build, indentF, unlinesF, (+|), (|+))
import GHC.TypeLits (Symbol)
import GHC.TypeNats (Nat, type (+))
import Hedgehog (Gen, MonadTest)
import Lorentz.Test.Consumer
import Lorentz.Value
import qualified Indigo.Contracts.FA2Sample as FA2
import Lorentz.Contracts.FA2
import qualified Lorentz.Contracts.Spec.FA2Interface as FA2
import qualified Lorentz.Contracts.MinterCollection.Ft.Asset as FtAsset
import qualified Lorentz.Contracts.MinterCollection.Ft.Token as FtToken
import qualified Lorentz.Contracts.PausableAdminOption as PausableAdminOption
import Morley.Nettest
import Morley.Nettest.Pure (PureM, runEmulated)
infix 0 -:
(-:) :: a -> b -> (a, b)
(-:) = (,)
data (:#) :: Symbol -> Nat -> Type
infixr 2 ::<
pattern (::<) :: (((1 + n) ~ m), (0 < m), KnownNat m) => a -> Sized [] n a -> Sized [] m a
pattern a ::< l <- ((Sized.head &&& Sized.tail) -> (a, l))
pattern SNil :: Sized [] 0 a
pattern SNil <- _
data FA2Setup addrsNum tokensNum = FA2Setup
{ sAddresses :: Sized [] addrsNum Address
, sTokens :: Sized [] tokensNum FA2.TokenId
} deriving stock (Show)
| Prepare all the operated entities .
Note that number of addresses and other entities may be inferred automatically ,
so you should bind all the fields of returned ' FA2Setup ' . For instance :
@
scenario = do
setup < - doFA2Setup
let addr1 : : < addr2 : : < SNil = sAddresses setup
let tokenId : : < SNil = sTokens setup
...
@
Another option is to explicitly annotate the ' doFA2Setup ' call :
@
scenario = do
setup < - doFA2Setup @("addresses " : # 2 ) @("tokens " : # 1 )
...
@
Note that number of addresses and other entities may be inferred automatically,
so you should bind all the fields of returned 'FA2Setup'. For instance:
@
scenario = do
setup <- doFA2Setup
let addr1 ::< addr2 ::< SNil = sAddresses setup
let tokenId ::< SNil = sTokens setup
...
@
Another option is to explicitly annotate the 'doFA2Setup' call:
@
scenario = do
setup <- doFA2Setup @("addresses" :# 2) @("tokens" :# 1)
...
@
-}
doFA2Setup
:: forall addrsArg tokensArg addrsNum tokensNum caps base m.
( MonadNettest caps base m
, KnownNat addrsNum, addrsArg ~ ("addresses" :# addrsNum)
, KnownNat tokensNum, tokensArg ~ ("tokens" :# tokensNum)
)
=> m (FA2Setup addrsNum tokensNum)
doFA2Setup = do
let aliases = Sized.generate' $ \i -> fromString ("fa2-addr-" <> show (ordToNatural i))
sAddresses <- mapM newAddress aliases
let sTokens = Sized.generate' $ \i -> FA2.TokenId (ordToNatural i)
return FA2Setup{..}
originateFA2
:: MonadNettest caps base m
=> AliasHint
-> FA2Setup addrsNum tokensNum
-> [ContractHandler contractParam contractStorage]
-> m (ContractHandler FA2.FA2SampleParameter FA2.Storage)
originateFA2 name FA2Setup{..} contracts = do
fa2 <- originateSimple name
FA2.Storage
{ sLedger = BigMap $ Map.fromList do
addr <- F.toList sAddresses
tokenId <- F.toList sTokens
pure ((addr, tokenId), 1000)
, sOperators = BigMap $ Map.fromList do
owner <- F.toList sAddresses
operator <- contracts
pure ((owner, toAddress operator), ())
, sTokenMetadata = mempty
}
(FA2.fa2Contract def
{ FA2.cAllowedTokenIds = F.toList sTokens
}
)
return fa2
originateFA2WithGlobalOperators
:: MonadNettest caps base m
=> AliasHint
-> FA2Setup addrsNum tokensNum
-> Set Address
-> Address
-> [ContractHandler contractParam contractStorage]
-> m (ContractHandler FtAsset.LimitedWithGlobalOperatorsEntrypoints FtAsset.LimitedStorageWithGlobalOperators)
originateFA2WithGlobalOperators name FA2Setup{..} globalOperators admin operatorContracts = do
fa2 <- originateTypedSimple name
FtAsset.LimitedStorageWithGlobalOperators
{
assets = FtToken.LimitedStorageWithGlobalOperators
{
ledger = BigMap $ Map.fromList do
addr <- F.toList sAddresses
tokenId <- F.toList sTokens
pure ((addr, tokenId), 1000)
, operators = BigMap $ Map.fromList do
owner <- F.toList sAddresses
operator <- operatorContracts
tokenId <- F.toList sTokens
pure ((OperatorKey owner (toAddress operator) tokenId), ())
, tokenMetadata = BigMap $ Map.fromList do
tokenId <- F.toList sTokens
pure (tokenId, (TokenMetadata tokenId mempty))
, globalOperators = globalOperators
, nextTokenId = 0
, totalTokenSupply = mempty
},
metadata = mempty,
admin = fromJust $ PausableAdminOption.initAdminStorage admin
}
(FtAsset.limitedWithGlobalOperatorsContract)
return fa2
assertingBalanceDeltas
:: (MonadNettest caps base m, HasCallStack)
=> ContractHandler FA2.FA2SampleParameter storage
-> [((Address, FA2.TokenId), Integer)]
-> m a
-> m a
assertingBalanceDeltas fa2 indicedDeltas action = do
consumer <- originateSimple "consumer" [] contractConsumer
pullBalance consumer
res <- action
pullBalance consumer
balancesRes <- map (map FA2.briBalance) <$> getStorage consumer
(balancesAfter, balancesBefore) <- case balancesRes of
[balancesAfter, balancesBefore] ->
return (balancesAfter, balancesBefore)
other -> failure $ "Unexpected consumer storage: " +| other |+ ""
forM_ (zip3 indicedDeltas balancesBefore balancesAfter) $
\(((addr, tokenId), expected), actualBefore, actualAfter) -> do
let actual = toInteger actualAfter - toInteger actualBefore
assert (expected == actual) $
"For address " +| addr |+ "\n(token id = " +| tokenId |+ ")\n\
\got unexpected balance delta: \
\expected " +| expected |+ ", got " +| actual |+ ""
return res
where
pullBalance
:: MonadNettest base caps m
=> ContractHandler [FA2.BalanceResponseItem] storage -> m ()
pullBalance consumer = do
let tokenRefs = map fst indicedDeltas
call fa2 (Call @"Balance_of") $
FA2.mkFA2View
(uncurry FA2.BalanceRequestItem <$> tokenRefs)
consumer
assertingBalanceDeltas'
:: (MonadNettest caps base m, HasCallStack)
=> ContractHandler FtAsset.LimitedWithGlobalOperatorsEntrypoints st
-> [((Address, FA2.TokenId), Integer)]
-> m a
-> m a
assertingBalanceDeltas' fa2 indicedDeltas action = do
consumer <- originateSimple "consumer" [] contractConsumer
pullBalance consumer
res <- action
pullBalance consumer
balancesRes <- map (map FA2.briBalance) <$>
getStorage consumer
(balancesAfter, balancesBefore) <- case balancesRes of
[balancesAfter, balancesBefore] ->
return (balancesAfter, balancesBefore)
other -> failure $ "Unexpected consumer storage: " +| other |+ ""
forM_ (zip3 indicedDeltas balancesBefore balancesAfter) $
\(((addr, tokenId), expected), actualBefore, actualAfter) -> do
let actual = toInteger actualAfter - toInteger actualBefore
assert (expected == actual) $
"For address " +| addr |+ "\n(token id = " +| tokenId |+ ")\n\
\got unexpected balance delta: \
\expected " +| expected |+ ", got " +| actual |+ ""
return res
where
pullBalance
:: MonadNettest base caps m
=> ContractHandler [FA2.BalanceResponseItem] st -> m ()
pullBalance consumer = do
let tokenRefs = map fst indicedDeltas
call fa2 (Call @"Balance_of") $
FA2.mkFA2View
(uncurry FA2.BalanceRequestItem <$> tokenRefs)
consumer
balanceOf
:: (HasCallStack, MonadNettest caps base m, ToAddress addr)
=> ContractHandler FA2.FA2SampleParameter storage -> FA2.TokenId -> addr -> m Natural
balanceOf fa2 tokenId account = do
consumer <- originateSimple "balance-response-consumer" [] (contractConsumer @[FA2.BalanceResponseItem])
call fa2 (Call @"Balance_of") (FA2.mkFA2View [FA2.BalanceRequestItem (toAddress account) tokenId] consumer)
consumerStorage <- getStorage consumer
case consumerStorage of
[[balanceResponseItem]] -> pure $ FA2.briBalance balanceResponseItem
_ -> failure $ unlinesF
[ "Expected consumer storage to have exactly 1 balance response, with exactly 1 item."
, "Consumer storage:"
, indentF 2 $ build consumerStorage
]
| Construct allowlist for passing to allowlist overriding entrypoint .
mkAllowlistSimpleParam :: [ContractHandler p s] -> BigMap Address ()
mkAllowlistSimpleParam = mconcat . map (\a -> one (toAddress a, ()))
originateWithAdmin
:: MonadNettest caps base m
=> (Address -> m (ContractHandler param storage))
-> m (ContractHandler param storage, Address)
originateWithAdmin originateFn = do
admin <- newAddress "admin"
swaps <- originateFn admin
return (swaps, admin)
| Create a hedgehog property - based test from a cleveland scenario .
clevelandProp :: (MonadIO m, MonadTest m) => EmulatedT PureM () -> m ()
clevelandProp = nettestTestProp . runEmulated . uncapsNettestEmulated
iterateM :: forall a. Int -> (a -> Gen a) -> a -> Gen [a]
iterateM 0 _ _ = pure []
iterateM len gen previous = do
current <- gen previous
(current :) <$> iterateM (len - 1) gen current
|
d0430f901b25722771b1baa38ddd92bc76c7c91a90c0c6e99cd5631773892adc | mariari/Misc-Lisp-Scripts | chapter5.lisp | (load "chapter1.lisp")
(load "chapter3.lisp")
(defmacro! defunits% (quantity base-unit &rest units)
`(defmacro ,(symb 'unit-of- quantity) (,g!val ,g!un)
`(* ,,g!val
,(case ,g!un
((,base-unit) 1)
,@(mapcar (lambda (x)
`((,(car x)) ,(cadr x)))
(group units 2))))))
(defun defunits-chaining% (u units)
(let ((spec (find u units :key #'car)))
(if (null spec)
(error "Unknown unit ~a" u)
(let ((chain (cadr spec)))
(if (listp chain)
(* (car chain)
(defunits-chaining%
(cadr chain)
units))
chain)))))
(defmacro! defunits%% (quantity base-unit &rest units)
`(defmacro ,(symb 'unit-of- quantity) (,g!val ,g!un)
`(* ,,g!val
,(case ,g!un
((,base-unit) 1)
,@(mapcar (lambda (x)
`((,(car x))
,(defunits-chaining%
(car x)
(cons `(,base-unit 1)
(group units 2)))))
(group units 2))))))
(defunits-chaining%
(car '(H (60 M)))
(cons '(S 1) (group ' (m 60
h (60 m)
d (24 h)
ms (1/1000 s)
us (1/1000 ms)) 2)))
(defunits% time s
m 60
h 3600
d 86400
ms 1/1000
us 1/1000000)
(defunits%% time s
m 60
h (60 m)
d (24 h)
ms (1/1000 s)
us (1/1000 ms))
(defmacro cxr% (x tree)
(if (null x)
tree
`(,(cond
((eq 'a (cadr x)) 'car)
((eq 'd (cadr x)) 'cdr)
(t (error "Non A/D symbol")))
,(if (= 1 (car x))
`(cxr% ,(cddr x) ,tree)
`(cxr% ,(cons (- (car x) 1) (cdr x))
,tree)))))
(defun eleventh (x)
(cxr% (1 a 10 d) x))
;; Has issues with accepting a variable
(defvar cxr-inline-thresh 10)
(lol:defmacro! cxr (x tree)
(if (null x)
tree
(let ((op (cond
((eq 'a (cadr x)) 'car)
((eq 'd (cadr x)) 'cdr)
(t (error "Non A/D symbol")))))
(if (and (integerp (car x))
(<= 1 (car x) cxr-inline-thresh))
(if (= 1 (car x))
`(,op (cxr ,(cddr x) ,tree))
`(,op (cxr ,(cons (- (car x) 1) (cdr x)) ,tree)))
`(lol:nlet-tail
,g!name ((,g!count ,(car x))
(,g!val (cxr ,(cddr x) ,tree)))
(if (>= 0 ,g!count)
,g!val
(,g!name (- ,g!count 1)
(,op ,g!val))))))))
| null | https://raw.githubusercontent.com/mariari/Misc-Lisp-Scripts/acecadc75fcbe15e6b97e084d179aacdbbde06a8/Books/LetOverLambda/chapter5.lisp | lisp | Has issues with accepting a variable | (load "chapter1.lisp")
(load "chapter3.lisp")
(defmacro! defunits% (quantity base-unit &rest units)
`(defmacro ,(symb 'unit-of- quantity) (,g!val ,g!un)
`(* ,,g!val
,(case ,g!un
((,base-unit) 1)
,@(mapcar (lambda (x)
`((,(car x)) ,(cadr x)))
(group units 2))))))
(defun defunits-chaining% (u units)
(let ((spec (find u units :key #'car)))
(if (null spec)
(error "Unknown unit ~a" u)
(let ((chain (cadr spec)))
(if (listp chain)
(* (car chain)
(defunits-chaining%
(cadr chain)
units))
chain)))))
(defmacro! defunits%% (quantity base-unit &rest units)
`(defmacro ,(symb 'unit-of- quantity) (,g!val ,g!un)
`(* ,,g!val
,(case ,g!un
((,base-unit) 1)
,@(mapcar (lambda (x)
`((,(car x))
,(defunits-chaining%
(car x)
(cons `(,base-unit 1)
(group units 2)))))
(group units 2))))))
(defunits-chaining%
(car '(H (60 M)))
(cons '(S 1) (group ' (m 60
h (60 m)
d (24 h)
ms (1/1000 s)
us (1/1000 ms)) 2)))
(defunits% time s
m 60
h 3600
d 86400
ms 1/1000
us 1/1000000)
(defunits%% time s
m 60
h (60 m)
d (24 h)
ms (1/1000 s)
us (1/1000 ms))
(defmacro cxr% (x tree)
(if (null x)
tree
`(,(cond
((eq 'a (cadr x)) 'car)
((eq 'd (cadr x)) 'cdr)
(t (error "Non A/D symbol")))
,(if (= 1 (car x))
`(cxr% ,(cddr x) ,tree)
`(cxr% ,(cons (- (car x) 1) (cdr x))
,tree)))))
(defun eleventh (x)
(cxr% (1 a 10 d) x))
(defvar cxr-inline-thresh 10)
(lol:defmacro! cxr (x tree)
(if (null x)
tree
(let ((op (cond
((eq 'a (cadr x)) 'car)
((eq 'd (cadr x)) 'cdr)
(t (error "Non A/D symbol")))))
(if (and (integerp (car x))
(<= 1 (car x) cxr-inline-thresh))
(if (= 1 (car x))
`(,op (cxr ,(cddr x) ,tree))
`(,op (cxr ,(cons (- (car x) 1) (cdr x)) ,tree)))
`(lol:nlet-tail
,g!name ((,g!count ,(car x))
(,g!val (cxr ,(cddr x) ,tree)))
(if (>= 0 ,g!count)
,g!val
(,g!name (- ,g!count 1)
(,op ,g!val))))))))
|
e68acdaf2dfe74cb8ecf84474d2346d3b4f55983d33819b54d28c20de00e1ca8 | faylang/fay | T190_C.hs | module T190_C where
foo :: String -> Fay ()
foo x = putStrLn x
| null | https://raw.githubusercontent.com/faylang/fay/8455d975f9f0db2ecc922410e43e484fbd134699/tests/T190_C.hs | haskell | module T190_C where
foo :: String -> Fay ()
foo x = putStrLn x
| |
8b7878ee42dbea89085a67c196df37f3180f2fece33ef1babb1f1462485cd52b | iu-parfunc/haskell_dsl_tour | L10ConvertClosures.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE StandaloneDeriving #
module FrameworkHs.GenGrammars.L10ConvertClosures where
import FrameworkHs.Prims
import FrameworkHs.Helpers
import Text.PrettyPrint.HughesPJ (text)
import Blaze.ByteString.Builder (fromByteString)
data Prog
= Expr Expr
data Expr
= If Expr Expr Expr
| Begin [Expr] Expr
| App1 ValPrim [Expr]
| App2 EffectPrim [Expr]
| App3 PredPrim [Expr]
| App4 Expr [Expr]
| UVar UVar
| Quote Immediate
| Let [(UVar,Expr)] Expr
| Letrec [(Label,[UVar],[UVar],Expr)] [(UVar,Label,[UVar])] Expr
| Label Label
instance PP Prog where
pp (Expr e) = (pp e)
ppp (Expr e) = (ppp e)
instance PP Expr where
pp (If e e2 e3) = (ppSexp [fromByteString "if",(pp e),(pp e2),(pp e3)])
pp (Begin l e) = (ppSexp (fromByteString "begin" : ((map pp l) ++ [(pp e)])))
pp (App1 v l) = (ppSexp ((pp v) : (map pp l)))
pp (App2 e l) = (ppSexp ((pp e) : (map pp l)))
pp (App3 p l) = (ppSexp ((pp p) : (map pp l)))
pp (App4 e l) = (ppSexp ((pp e) : (map pp l)))
pp (UVar u) = (pp u)
pp (Quote i) = (ppSexp [fromByteString "quote",(pp i)])
pp (Let l e) = (ppSexp [fromByteString "let",(ppSexp (map (\(u,e) -> (ppSexp [(pp u),(pp e)])) l)),(pp e)])
pp (Letrec l l2 e) = (ppSexp [fromByteString "letrec",(ppSexp (map (\(l,l2,l3,e) -> (ppSexp [(pp l),(ppSexp [fromByteString "lambda",(ppSexp (map pp l2)),(ppSexp [fromByteString "bind-free",(ppSexp (map pp l3)),(pp e)])])])) l)),(ppSexp [fromByteString "closures",(ppSexp (map (\(u,l,l2) -> (ppSexp ((pp u) : ((pp l) : (map pp l2))))) l2)),(pp e)])])
pp (Label l) = (pp l)
ppp (If e e2 e3) = (pppSexp [text "if",(ppp e),(ppp e2),(ppp e3)])
ppp (Begin l e) = (pppSexp (text "begin" : ((map ppp l) ++ [(ppp e)])))
ppp (App1 v l) = (pppSexp ((ppp v) : (map ppp l)))
ppp (App2 e l) = (pppSexp ((ppp e) : (map ppp l)))
ppp (App3 p l) = (pppSexp ((ppp p) : (map ppp l)))
ppp (App4 e l) = (pppSexp ((ppp e) : (map ppp l)))
ppp (UVar u) = (ppp u)
ppp (Quote i) = (pppSexp [text "quote",(ppp i)])
ppp (Let l e) = (pppSexp [text "let",(pppSexp (map (\(u,e) -> (pppSexp [(ppp u),(ppp e)])) l)),(ppp e)])
ppp (Letrec l l2 e) = (pppSexp [text "letrec",(pppSexp (map (\(l,l2,l3,e) -> (pppSexp [(ppp l),(pppSexp [text "lambda",(pppSexp (map ppp l2)),(pppSexp [text "bind-free",(pppSexp (map ppp l3)),(ppp e)])])])) l)),(pppSexp [text "closures",(pppSexp (map (\(u,l,l2) -> (pppSexp ((ppp u) : ((ppp l) : (map ppp l2))))) l2)),(ppp e)])])
ppp (Label l) = (ppp l)
deriving instance Eq Prog
deriving instance Read Prog
deriving instance Show Prog
deriving instance Ord Prog
deriving instance Eq Expr
deriving instance Read Expr
deriving instance Show Expr
deriving instance Ord Expr
| null | https://raw.githubusercontent.com/iu-parfunc/haskell_dsl_tour/f75a7e492a1e5d219a77fb128f70441d54a706eb/middle_end/nanopass/course_example/FrameworkHs/GenGrammars/L10ConvertClosures.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE StandaloneDeriving #
module FrameworkHs.GenGrammars.L10ConvertClosures where
import FrameworkHs.Prims
import FrameworkHs.Helpers
import Text.PrettyPrint.HughesPJ (text)
import Blaze.ByteString.Builder (fromByteString)
data Prog
= Expr Expr
data Expr
= If Expr Expr Expr
| Begin [Expr] Expr
| App1 ValPrim [Expr]
| App2 EffectPrim [Expr]
| App3 PredPrim [Expr]
| App4 Expr [Expr]
| UVar UVar
| Quote Immediate
| Let [(UVar,Expr)] Expr
| Letrec [(Label,[UVar],[UVar],Expr)] [(UVar,Label,[UVar])] Expr
| Label Label
instance PP Prog where
pp (Expr e) = (pp e)
ppp (Expr e) = (ppp e)
instance PP Expr where
pp (If e e2 e3) = (ppSexp [fromByteString "if",(pp e),(pp e2),(pp e3)])
pp (Begin l e) = (ppSexp (fromByteString "begin" : ((map pp l) ++ [(pp e)])))
pp (App1 v l) = (ppSexp ((pp v) : (map pp l)))
pp (App2 e l) = (ppSexp ((pp e) : (map pp l)))
pp (App3 p l) = (ppSexp ((pp p) : (map pp l)))
pp (App4 e l) = (ppSexp ((pp e) : (map pp l)))
pp (UVar u) = (pp u)
pp (Quote i) = (ppSexp [fromByteString "quote",(pp i)])
pp (Let l e) = (ppSexp [fromByteString "let",(ppSexp (map (\(u,e) -> (ppSexp [(pp u),(pp e)])) l)),(pp e)])
pp (Letrec l l2 e) = (ppSexp [fromByteString "letrec",(ppSexp (map (\(l,l2,l3,e) -> (ppSexp [(pp l),(ppSexp [fromByteString "lambda",(ppSexp (map pp l2)),(ppSexp [fromByteString "bind-free",(ppSexp (map pp l3)),(pp e)])])])) l)),(ppSexp [fromByteString "closures",(ppSexp (map (\(u,l,l2) -> (ppSexp ((pp u) : ((pp l) : (map pp l2))))) l2)),(pp e)])])
pp (Label l) = (pp l)
ppp (If e e2 e3) = (pppSexp [text "if",(ppp e),(ppp e2),(ppp e3)])
ppp (Begin l e) = (pppSexp (text "begin" : ((map ppp l) ++ [(ppp e)])))
ppp (App1 v l) = (pppSexp ((ppp v) : (map ppp l)))
ppp (App2 e l) = (pppSexp ((ppp e) : (map ppp l)))
ppp (App3 p l) = (pppSexp ((ppp p) : (map ppp l)))
ppp (App4 e l) = (pppSexp ((ppp e) : (map ppp l)))
ppp (UVar u) = (ppp u)
ppp (Quote i) = (pppSexp [text "quote",(ppp i)])
ppp (Let l e) = (pppSexp [text "let",(pppSexp (map (\(u,e) -> (pppSexp [(ppp u),(ppp e)])) l)),(ppp e)])
ppp (Letrec l l2 e) = (pppSexp [text "letrec",(pppSexp (map (\(l,l2,l3,e) -> (pppSexp [(ppp l),(pppSexp [text "lambda",(pppSexp (map ppp l2)),(pppSexp [text "bind-free",(pppSexp (map ppp l3)),(ppp e)])])])) l)),(pppSexp [text "closures",(pppSexp (map (\(u,l,l2) -> (pppSexp ((ppp u) : ((ppp l) : (map ppp l2))))) l2)),(ppp e)])])
ppp (Label l) = (ppp l)
deriving instance Eq Prog
deriving instance Read Prog
deriving instance Show Prog
deriving instance Ord Prog
deriving instance Eq Expr
deriving instance Read Expr
deriving instance Show Expr
deriving instance Ord Expr
|
f234f6664f888a6d782b86c6e63c40b0c1d2d56aeb435a33546fa4e3db07496d | yallop/ocaml-ctypes | types.ml |
* Copyright ( c ) 2014 .
*
* This file is distributed under the terms of the MIT License .
* See the file LICENSE for details .
* Copyright (c) 2014 Jeremy Yallop.
*
* This file is distributed under the terms of the MIT License.
* See the file LICENSE for details.
*)
open Ctypes
module Struct_stubs(S : Ctypes.TYPE) =
struct
open S
(* missing fields *)
let u1 : [`u1] union typ = union "u1"
let x1 = field u1 "x1" char
let () = seal u1
(* adding fields through views (typedefs) *)
let union_u2 : [`s7] union typ = union ""
let u2 = typedef union_u2 "u2"
let t1 = field u2 "t1" int
let t2 = field u2 "t2" float
let () = seal u2
end
| null | https://raw.githubusercontent.com/yallop/ocaml-ctypes/52ff621f47dbc1ee5a90c30af0ae0474549946b4/tests/test-unions/stubs/types.ml | ocaml | missing fields
adding fields through views (typedefs) |
* Copyright ( c ) 2014 .
*
* This file is distributed under the terms of the MIT License .
* See the file LICENSE for details .
* Copyright (c) 2014 Jeremy Yallop.
*
* This file is distributed under the terms of the MIT License.
* See the file LICENSE for details.
*)
open Ctypes
module Struct_stubs(S : Ctypes.TYPE) =
struct
open S
let u1 : [`u1] union typ = union "u1"
let x1 = field u1 "x1" char
let () = seal u1
let union_u2 : [`s7] union typ = union ""
let u2 = typedef union_u2 "u2"
let t1 = field u2 "t1" int
let t2 = field u2 "t2" float
let () = seal u2
end
|
4660114bb80b99068b093070cc9ff06cfb32884bab4e66db591cb45cfcc2b922 | brendanhay/gogol | Delete.hs | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
-- |
Module : . ShoppingContent . Content . Accounts . Delete
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
-- Deletes a Merchant Center sub-account.
--
/See:/ < API for Shopping Reference > for @content.accounts.delete@.
module Gogol.ShoppingContent.Content.Accounts.Delete
( -- * Resource
ContentAccountsDeleteResource,
-- ** Constructing a Request
ContentAccountsDelete (..),
newContentAccountsDelete,
)
where
import qualified Gogol.Prelude as Core
import Gogol.ShoppingContent.Types
| A resource alias for @content.accounts.delete@ method which the
' ' request conforms to .
type ContentAccountsDeleteResource =
"content"
Core.:> "v2.1"
Core.:> Core.Capture "merchantId" Core.Word64
Core.:> "accounts"
Core.:> Core.Capture "accountId" Core.Word64
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "force" Core.Bool
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.Delete '[Core.JSON] ()
-- | Deletes a Merchant Center sub-account.
--
-- /See:/ 'newContentAccountsDelete' smart constructor.
data ContentAccountsDelete = ContentAccountsDelete
{ -- | V1 error format.
xgafv :: (Core.Maybe Xgafv),
-- | OAuth access token.
accessToken :: (Core.Maybe Core.Text),
-- | The ID of the account.
accountId :: Core.Word64,
| JSONP
callback :: (Core.Maybe Core.Text),
-- | Option to delete sub-accounts with products. The default value is false.
force :: Core.Bool,
-- | The ID of the managing account. This must be a multi-client account, and accountId must be the ID of a sub-account of this account.
merchantId :: Core.Word64,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
| Creates a value of ' ' with the minimum fields required to make a request .
newContentAccountsDelete ::
-- | The ID of the account. See 'accountId'.
Core.Word64 ->
-- | The ID of the managing account. This must be a multi-client account, and accountId must be the ID of a sub-account of this account. See 'merchantId'.
Core.Word64 ->
ContentAccountsDelete
newContentAccountsDelete accountId merchantId =
ContentAccountsDelete
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
accountId = accountId,
callback = Core.Nothing,
force = Core.False,
merchantId = merchantId,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance Core.GoogleRequest ContentAccountsDelete where
type Rs ContentAccountsDelete = ()
type
Scopes ContentAccountsDelete =
'[Content'FullControl]
requestClient ContentAccountsDelete {..} =
go
merchantId
accountId
xgafv
accessToken
callback
(Core.Just force)
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
shoppingContentService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy ContentAccountsDeleteResource
)
Core.mempty
| null | https://raw.githubusercontent.com/brendanhay/gogol/fffd4d98a1996d0ffd4cf64545c5e8af9c976cda/lib/services/gogol-shopping-content/gen/Gogol/ShoppingContent/Content/Accounts/Delete.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
Deletes a Merchant Center sub-account.
* Resource
** Constructing a Request
| Deletes a Merchant Center sub-account.
/See:/ 'newContentAccountsDelete' smart constructor.
| V1 error format.
| OAuth access token.
| The ID of the account.
| Option to delete sub-accounts with products. The default value is false.
| The ID of the managing account. This must be a multi-client account, and accountId must be the ID of a sub-account of this account.
| Upload protocol for media (e.g. \"raw\", \"multipart\").
| The ID of the account. See 'accountId'.
| The ID of the managing account. This must be a multi-client account, and accountId must be the ID of a sub-account of this account. See 'merchantId'. | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Module : . ShoppingContent . Content . Accounts . Delete
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
/See:/ < API for Shopping Reference > for @content.accounts.delete@.
module Gogol.ShoppingContent.Content.Accounts.Delete
ContentAccountsDeleteResource,
ContentAccountsDelete (..),
newContentAccountsDelete,
)
where
import qualified Gogol.Prelude as Core
import Gogol.ShoppingContent.Types
| A resource alias for @content.accounts.delete@ method which the
' ' request conforms to .
type ContentAccountsDeleteResource =
"content"
Core.:> "v2.1"
Core.:> Core.Capture "merchantId" Core.Word64
Core.:> "accounts"
Core.:> Core.Capture "accountId" Core.Word64
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "force" Core.Bool
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.Delete '[Core.JSON] ()
data ContentAccountsDelete = ContentAccountsDelete
xgafv :: (Core.Maybe Xgafv),
accessToken :: (Core.Maybe Core.Text),
accountId :: Core.Word64,
| JSONP
callback :: (Core.Maybe Core.Text),
force :: Core.Bool,
merchantId :: Core.Word64,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
| Creates a value of ' ' with the minimum fields required to make a request .
newContentAccountsDelete ::
Core.Word64 ->
Core.Word64 ->
ContentAccountsDelete
newContentAccountsDelete accountId merchantId =
ContentAccountsDelete
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
accountId = accountId,
callback = Core.Nothing,
force = Core.False,
merchantId = merchantId,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance Core.GoogleRequest ContentAccountsDelete where
type Rs ContentAccountsDelete = ()
type
Scopes ContentAccountsDelete =
'[Content'FullControl]
requestClient ContentAccountsDelete {..} =
go
merchantId
accountId
xgafv
accessToken
callback
(Core.Just force)
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
shoppingContentService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy ContentAccountsDeleteResource
)
Core.mempty
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.